xref: /netbsd-src/external/gpl3/gcc/dist/gcc/tree.cc (revision 0a3071956a3a9fdebdbf7f338cf2d439b45fc728)
1 /* Language-independent node constructors for parse phase of GNU compiler.
2    Copyright (C) 1987-2022 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains the low level primitives for operating on tree nodes,
21    including allocation, list operations, interning of identifiers,
22    construction of data type nodes and statement nodes,
23    and construction of type conversion nodes.  It also contains
24    tables index by tree code that describe how to take apart
25    nodes of that code.
26 
27    It is intended to be language-independent but can occasionally
28    calls language-dependent routines.  */
29 
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
72 #include "gomp-constants.h"
73 #include "dfp.h"
74 
75 /* Tree code classes.  */
76 
77 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
78 #define END_OF_BASE_TREE_CODES tcc_exceptional,
79 
80 const enum tree_code_class tree_code_type[] = {
81 #include "all-tree.def"
82 };
83 
84 #undef DEFTREECODE
85 #undef END_OF_BASE_TREE_CODES
86 
87 /* Table indexed by tree code giving number of expression
88    operands beyond the fixed part of the node structure.
89    Not used for types or decls.  */
90 
91 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
92 #define END_OF_BASE_TREE_CODES 0,
93 
94 const unsigned char tree_code_length[] = {
95 #include "all-tree.def"
96 };
97 
98 #undef DEFTREECODE
99 #undef END_OF_BASE_TREE_CODES
100 
101 /* Names of tree components.
102    Used for printing out the tree and error messages.  */
103 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
104 #define END_OF_BASE_TREE_CODES "@dummy",
105 
106 static const char *const tree_code_name[] = {
107 #include "all-tree.def"
108 };
109 
110 #undef DEFTREECODE
111 #undef END_OF_BASE_TREE_CODES
112 
113 /* Each tree code class has an associated string representation.
114    These must correspond to the tree_code_class entries.  */
115 
116 const char *const tree_code_class_strings[] =
117 {
118   "exceptional",
119   "constant",
120   "type",
121   "declaration",
122   "reference",
123   "comparison",
124   "unary",
125   "binary",
126   "statement",
127   "vl_exp",
128   "expression"
129 };
130 
131 /* obstack.[ch] explicitly declined to prototype this.  */
132 extern int _obstack_allocated_p (struct obstack *h, void *obj);
133 
134 /* Statistics-gathering stuff.  */
135 
136 static uint64_t tree_code_counts[MAX_TREE_CODES];
137 uint64_t tree_node_counts[(int) all_kinds];
138 uint64_t tree_node_sizes[(int) all_kinds];
139 
140 /* Keep in sync with tree.h:enum tree_node_kind.  */
141 static const char * const tree_node_kind_names[] = {
142   "decls",
143   "types",
144   "blocks",
145   "stmts",
146   "refs",
147   "exprs",
148   "constants",
149   "identifiers",
150   "vecs",
151   "binfos",
152   "ssa names",
153   "constructors",
154   "random kinds",
155   "lang_decl kinds",
156   "lang_type kinds",
157   "omp clauses",
158 };
159 
160 /* Unique id for next decl created.  */
161 static GTY(()) int next_decl_uid;
162 /* Unique id for next type created.  */
163 static GTY(()) unsigned next_type_uid = 1;
164 /* Unique id for next debug decl created.  Use negative numbers,
165    to catch erroneous uses.  */
166 static GTY(()) int next_debug_decl_uid;
167 
168 /* Since we cannot rehash a type after it is in the table, we have to
169    keep the hash code.  */
170 
171 struct GTY((for_user)) type_hash {
172   unsigned long hash;
173   tree type;
174 };
175 
176 /* Initial size of the hash table (rounded to next prime).  */
177 #define TYPE_HASH_INITIAL_SIZE 1000
178 
179 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
180 {
hashtype_cache_hasher181   static hashval_t hash (type_hash *t) { return t->hash; }
182   static bool equal (type_hash *a, type_hash *b);
183 
184   static int
keep_cache_entrytype_cache_hasher185   keep_cache_entry (type_hash *&t)
186   {
187     return ggc_marked_p (t->type);
188   }
189 };
190 
191 /* Now here is the hash table.  When recording a type, it is added to
192    the slot whose index is the hash code.  Note that the hash table is
193    used for several kinds of types (function types, array types and
194    array index range types, for now).  While all these live in the
195    same table, they are completely independent, and the hash code is
196    computed differently for each of these.  */
197 
198 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
199 
200 /* Hash table and temporary node for larger integer const values.  */
201 static GTY (()) tree int_cst_node;
202 
203 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
204 {
205   static hashval_t hash (tree t);
206   static bool equal (tree x, tree y);
207 };
208 
209 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
210 
211 /* Class and variable for making sure that there is a single POLY_INT_CST
212    for a given value.  */
213 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
214 {
215   typedef std::pair<tree, const poly_wide_int *> compare_type;
216   static hashval_t hash (tree t);
217   static bool equal (tree x, const compare_type &y);
218 };
219 
220 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
221 
222 /* Hash table for optimization flags and target option flags.  Use the same
223    hash table for both sets of options.  Nodes for building the current
224    optimization and target option nodes.  The assumption is most of the time
225    the options created will already be in the hash table, so we avoid
226    allocating and freeing up a node repeatably.  */
227 static GTY (()) tree cl_optimization_node;
228 static GTY (()) tree cl_target_option_node;
229 
230 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
231 {
232   static hashval_t hash (tree t);
233   static bool equal (tree x, tree y);
234 };
235 
236 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
237 
238 /* General tree->tree mapping  structure for use in hash tables.  */
239 
240 
241 static GTY ((cache))
242      hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
243 
244 static GTY ((cache))
245      hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
246 
247 static GTY ((cache))
248      hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
249 
250 static void set_type_quals (tree, int);
251 static void print_type_hash_statistics (void);
252 static void print_debug_expr_statistics (void);
253 static void print_value_expr_statistics (void);
254 
255 tree global_trees[TI_MAX];
256 tree integer_types[itk_none];
257 
258 bool int_n_enabled_p[NUM_INT_N_ENTS];
259 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
260 
261 bool tree_contains_struct[MAX_TREE_CODES][64];
262 
263 /* Number of operands for each OMP clause.  */
264 unsigned const char omp_clause_num_ops[] =
265 {
266   0, /* OMP_CLAUSE_ERROR  */
267   1, /* OMP_CLAUSE_PRIVATE  */
268   1, /* OMP_CLAUSE_SHARED  */
269   1, /* OMP_CLAUSE_FIRSTPRIVATE  */
270   2, /* OMP_CLAUSE_LASTPRIVATE  */
271   5, /* OMP_CLAUSE_REDUCTION  */
272   5, /* OMP_CLAUSE_TASK_REDUCTION  */
273   5, /* OMP_CLAUSE_IN_REDUCTION  */
274   1, /* OMP_CLAUSE_COPYIN  */
275   1, /* OMP_CLAUSE_COPYPRIVATE  */
276   3, /* OMP_CLAUSE_LINEAR  */
277   1, /* OMP_CLAUSE_AFFINITY  */
278   2, /* OMP_CLAUSE_ALIGNED  */
279   3, /* OMP_CLAUSE_ALLOCATE  */
280   1, /* OMP_CLAUSE_DEPEND  */
281   1, /* OMP_CLAUSE_NONTEMPORAL  */
282   1, /* OMP_CLAUSE_UNIFORM  */
283   1, /* OMP_CLAUSE_TO_DECLARE  */
284   1, /* OMP_CLAUSE_LINK  */
285   1, /* OMP_CLAUSE_DETACH  */
286   1, /* OMP_CLAUSE_USE_DEVICE_PTR  */
287   1, /* OMP_CLAUSE_USE_DEVICE_ADDR  */
288   1, /* OMP_CLAUSE_IS_DEVICE_PTR  */
289   1, /* OMP_CLAUSE_INCLUSIVE  */
290   1, /* OMP_CLAUSE_EXCLUSIVE  */
291   2, /* OMP_CLAUSE_FROM  */
292   2, /* OMP_CLAUSE_TO  */
293   2, /* OMP_CLAUSE_MAP  */
294   1, /* OMP_CLAUSE_HAS_DEVICE_ADDR  */
295   2, /* OMP_CLAUSE__CACHE_  */
296   2, /* OMP_CLAUSE_GANG  */
297   1, /* OMP_CLAUSE_ASYNC  */
298   1, /* OMP_CLAUSE_WAIT  */
299   0, /* OMP_CLAUSE_AUTO  */
300   0, /* OMP_CLAUSE_SEQ  */
301   1, /* OMP_CLAUSE__LOOPTEMP_  */
302   1, /* OMP_CLAUSE__REDUCTEMP_  */
303   1, /* OMP_CLAUSE__CONDTEMP_  */
304   1, /* OMP_CLAUSE__SCANTEMP_  */
305   1, /* OMP_CLAUSE_IF  */
306   1, /* OMP_CLAUSE_NUM_THREADS  */
307   1, /* OMP_CLAUSE_SCHEDULE  */
308   0, /* OMP_CLAUSE_NOWAIT  */
309   1, /* OMP_CLAUSE_ORDERED  */
310   0, /* OMP_CLAUSE_DEFAULT  */
311   3, /* OMP_CLAUSE_COLLAPSE  */
312   0, /* OMP_CLAUSE_UNTIED   */
313   1, /* OMP_CLAUSE_FINAL  */
314   0, /* OMP_CLAUSE_MERGEABLE  */
315   1, /* OMP_CLAUSE_DEVICE  */
316   1, /* OMP_CLAUSE_DIST_SCHEDULE  */
317   0, /* OMP_CLAUSE_INBRANCH  */
318   0, /* OMP_CLAUSE_NOTINBRANCH  */
319   2, /* OMP_CLAUSE_NUM_TEAMS  */
320   1, /* OMP_CLAUSE_THREAD_LIMIT  */
321   0, /* OMP_CLAUSE_PROC_BIND  */
322   1, /* OMP_CLAUSE_SAFELEN  */
323   1, /* OMP_CLAUSE_SIMDLEN  */
324   0, /* OMP_CLAUSE_DEVICE_TYPE  */
325   0, /* OMP_CLAUSE_FOR  */
326   0, /* OMP_CLAUSE_PARALLEL  */
327   0, /* OMP_CLAUSE_SECTIONS  */
328   0, /* OMP_CLAUSE_TASKGROUP  */
329   1, /* OMP_CLAUSE_PRIORITY  */
330   1, /* OMP_CLAUSE_GRAINSIZE  */
331   1, /* OMP_CLAUSE_NUM_TASKS  */
332   0, /* OMP_CLAUSE_NOGROUP  */
333   0, /* OMP_CLAUSE_THREADS  */
334   0, /* OMP_CLAUSE_SIMD  */
335   1, /* OMP_CLAUSE_HINT  */
336   0, /* OMP_CLAUSE_DEFAULTMAP  */
337   0, /* OMP_CLAUSE_ORDER  */
338   0, /* OMP_CLAUSE_BIND  */
339   1, /* OMP_CLAUSE_FILTER  */
340   1, /* OMP_CLAUSE__SIMDUID_  */
341   0, /* OMP_CLAUSE__SIMT_  */
342   0, /* OMP_CLAUSE_INDEPENDENT  */
343   1, /* OMP_CLAUSE_WORKER  */
344   1, /* OMP_CLAUSE_VECTOR  */
345   1, /* OMP_CLAUSE_NUM_GANGS  */
346   1, /* OMP_CLAUSE_NUM_WORKERS  */
347   1, /* OMP_CLAUSE_VECTOR_LENGTH  */
348   3, /* OMP_CLAUSE_TILE  */
349   0, /* OMP_CLAUSE_IF_PRESENT */
350   0, /* OMP_CLAUSE_FINALIZE */
351   0, /* OMP_CLAUSE_NOHOST */
352 };
353 
354 const char * const omp_clause_code_name[] =
355 {
356   "error_clause",
357   "private",
358   "shared",
359   "firstprivate",
360   "lastprivate",
361   "reduction",
362   "task_reduction",
363   "in_reduction",
364   "copyin",
365   "copyprivate",
366   "linear",
367   "affinity",
368   "aligned",
369   "allocate",
370   "depend",
371   "nontemporal",
372   "uniform",
373   "to",
374   "link",
375   "detach",
376   "use_device_ptr",
377   "use_device_addr",
378   "is_device_ptr",
379   "inclusive",
380   "exclusive",
381   "from",
382   "to",
383   "map",
384   "has_device_addr",
385   "_cache_",
386   "gang",
387   "async",
388   "wait",
389   "auto",
390   "seq",
391   "_looptemp_",
392   "_reductemp_",
393   "_condtemp_",
394   "_scantemp_",
395   "if",
396   "num_threads",
397   "schedule",
398   "nowait",
399   "ordered",
400   "default",
401   "collapse",
402   "untied",
403   "final",
404   "mergeable",
405   "device",
406   "dist_schedule",
407   "inbranch",
408   "notinbranch",
409   "num_teams",
410   "thread_limit",
411   "proc_bind",
412   "safelen",
413   "simdlen",
414   "device_type",
415   "for",
416   "parallel",
417   "sections",
418   "taskgroup",
419   "priority",
420   "grainsize",
421   "num_tasks",
422   "nogroup",
423   "threads",
424   "simd",
425   "hint",
426   "defaultmap",
427   "order",
428   "bind",
429   "filter",
430   "_simduid_",
431   "_simt_",
432   "independent",
433   "worker",
434   "vector",
435   "num_gangs",
436   "num_workers",
437   "vector_length",
438   "tile",
439   "if_present",
440   "finalize",
441   "nohost",
442 };
443 
444 /* Unless specific to OpenACC, we tend to internally maintain OpenMP-centric
445    clause names, but for use in diagnostics etc. would like to use the "user"
446    clause names.  */
447 
448 const char *
user_omp_clause_code_name(tree clause,bool oacc)449 user_omp_clause_code_name (tree clause, bool oacc)
450 {
451   /* For OpenACC, the 'OMP_CLAUSE_MAP_KIND' of an 'OMP_CLAUSE_MAP' is used to
452      distinguish clauses as seen by the user.  See also where front ends do
453      'build_omp_clause' with 'OMP_CLAUSE_MAP'.  */
454   if (oacc && OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP)
455     switch (OMP_CLAUSE_MAP_KIND (clause))
456       {
457       case GOMP_MAP_FORCE_ALLOC:
458       case GOMP_MAP_ALLOC: return "create";
459       case GOMP_MAP_FORCE_TO:
460       case GOMP_MAP_TO: return "copyin";
461       case GOMP_MAP_FORCE_FROM:
462       case GOMP_MAP_FROM: return "copyout";
463       case GOMP_MAP_FORCE_TOFROM:
464       case GOMP_MAP_TOFROM: return "copy";
465       case GOMP_MAP_RELEASE: return "delete";
466       case GOMP_MAP_FORCE_PRESENT: return "present";
467       case GOMP_MAP_ATTACH: return "attach";
468       case GOMP_MAP_FORCE_DETACH:
469       case GOMP_MAP_DETACH: return "detach";
470       case GOMP_MAP_DEVICE_RESIDENT: return "device_resident";
471       case GOMP_MAP_LINK: return "link";
472       case GOMP_MAP_FORCE_DEVICEPTR: return "deviceptr";
473       default: break;
474       }
475 
476   return omp_clause_code_name[OMP_CLAUSE_CODE (clause)];
477 }
478 
479 
480 /* Return the tree node structure used by tree code CODE.  */
481 
482 static inline enum tree_node_structure_enum
tree_node_structure_for_code(enum tree_code code)483 tree_node_structure_for_code (enum tree_code code)
484 {
485   switch (TREE_CODE_CLASS (code))
486     {
487     case tcc_declaration:
488       switch (code)
489 	{
490 	case CONST_DECL:	return TS_CONST_DECL;
491 	case DEBUG_EXPR_DECL:	return TS_DECL_WRTL;
492 	case FIELD_DECL:	return TS_FIELD_DECL;
493 	case FUNCTION_DECL:	return TS_FUNCTION_DECL;
494 	case LABEL_DECL:	return TS_LABEL_DECL;
495 	case PARM_DECL:		return TS_PARM_DECL;
496 	case RESULT_DECL:	return TS_RESULT_DECL;
497 	case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
498 	case TYPE_DECL:		return TS_TYPE_DECL;
499 	case VAR_DECL:		return TS_VAR_DECL;
500 	default: 		return TS_DECL_NON_COMMON;
501 	}
502 
503     case tcc_type:		return TS_TYPE_NON_COMMON;
504 
505     case tcc_binary:
506     case tcc_comparison:
507     case tcc_expression:
508     case tcc_reference:
509     case tcc_statement:
510     case tcc_unary:
511     case tcc_vl_exp:		return TS_EXP;
512 
513     default:  /* tcc_constant and tcc_exceptional */
514       break;
515     }
516 
517   switch (code)
518     {
519       /* tcc_constant cases.  */
520     case COMPLEX_CST:		return TS_COMPLEX;
521     case FIXED_CST:		return TS_FIXED_CST;
522     case INTEGER_CST:		return TS_INT_CST;
523     case POLY_INT_CST:		return TS_POLY_INT_CST;
524     case REAL_CST:		return TS_REAL_CST;
525     case STRING_CST:		return TS_STRING;
526     case VECTOR_CST:		return TS_VECTOR;
527     case VOID_CST:		return TS_TYPED;
528 
529       /* tcc_exceptional cases.  */
530     case BLOCK:			return TS_BLOCK;
531     case CONSTRUCTOR:		return TS_CONSTRUCTOR;
532     case ERROR_MARK:		return TS_COMMON;
533     case IDENTIFIER_NODE:	return TS_IDENTIFIER;
534     case OMP_CLAUSE:		return TS_OMP_CLAUSE;
535     case OPTIMIZATION_NODE:	return TS_OPTIMIZATION;
536     case PLACEHOLDER_EXPR:	return TS_COMMON;
537     case SSA_NAME:		return TS_SSA_NAME;
538     case STATEMENT_LIST:	return TS_STATEMENT_LIST;
539     case TARGET_OPTION_NODE:	return TS_TARGET_OPTION;
540     case TREE_BINFO:		return TS_BINFO;
541     case TREE_LIST:		return TS_LIST;
542     case TREE_VEC:		return TS_VEC;
543 
544     default:
545       gcc_unreachable ();
546     }
547 }
548 
549 
550 /* Initialize tree_contains_struct to describe the hierarchy of tree
551    nodes.  */
552 
553 static void
initialize_tree_contains_struct(void)554 initialize_tree_contains_struct (void)
555 {
556   unsigned i;
557 
558   for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
559     {
560       enum tree_code code;
561       enum tree_node_structure_enum ts_code;
562 
563       code = (enum tree_code) i;
564       ts_code = tree_node_structure_for_code (code);
565 
566       /* Mark the TS structure itself.  */
567       tree_contains_struct[code][ts_code] = 1;
568 
569       /* Mark all the structures that TS is derived from.  */
570       switch (ts_code)
571 	{
572 	case TS_TYPED:
573 	case TS_BLOCK:
574 	case TS_OPTIMIZATION:
575 	case TS_TARGET_OPTION:
576 	  MARK_TS_BASE (code);
577 	  break;
578 
579 	case TS_COMMON:
580 	case TS_INT_CST:
581 	case TS_POLY_INT_CST:
582 	case TS_REAL_CST:
583 	case TS_FIXED_CST:
584 	case TS_VECTOR:
585 	case TS_STRING:
586 	case TS_COMPLEX:
587 	case TS_SSA_NAME:
588 	case TS_CONSTRUCTOR:
589 	case TS_EXP:
590 	case TS_STATEMENT_LIST:
591 	  MARK_TS_TYPED (code);
592 	  break;
593 
594 	case TS_IDENTIFIER:
595 	case TS_DECL_MINIMAL:
596 	case TS_TYPE_COMMON:
597 	case TS_LIST:
598 	case TS_VEC:
599 	case TS_BINFO:
600 	case TS_OMP_CLAUSE:
601 	  MARK_TS_COMMON (code);
602 	  break;
603 
604 	case TS_TYPE_WITH_LANG_SPECIFIC:
605 	  MARK_TS_TYPE_COMMON (code);
606 	  break;
607 
608 	case TS_TYPE_NON_COMMON:
609 	  MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
610 	  break;
611 
612 	case TS_DECL_COMMON:
613 	  MARK_TS_DECL_MINIMAL (code);
614 	  break;
615 
616 	case TS_DECL_WRTL:
617 	case TS_CONST_DECL:
618 	  MARK_TS_DECL_COMMON (code);
619 	  break;
620 
621 	case TS_DECL_NON_COMMON:
622 	  MARK_TS_DECL_WITH_VIS (code);
623 	  break;
624 
625 	case TS_DECL_WITH_VIS:
626 	case TS_PARM_DECL:
627 	case TS_LABEL_DECL:
628 	case TS_RESULT_DECL:
629 	  MARK_TS_DECL_WRTL (code);
630 	  break;
631 
632 	case TS_FIELD_DECL:
633 	  MARK_TS_DECL_COMMON (code);
634 	  break;
635 
636 	case TS_VAR_DECL:
637 	  MARK_TS_DECL_WITH_VIS (code);
638 	  break;
639 
640 	case TS_TYPE_DECL:
641 	case TS_FUNCTION_DECL:
642 	  MARK_TS_DECL_NON_COMMON (code);
643 	  break;
644 
645 	case TS_TRANSLATION_UNIT_DECL:
646 	  MARK_TS_DECL_COMMON (code);
647 	  break;
648 
649 	default:
650 	  gcc_unreachable ();
651 	}
652     }
653 
654   /* Basic consistency checks for attributes used in fold.  */
655   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
656   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
657   gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
658   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
659   gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
660   gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
661   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
662   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
663   gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
664   gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
665   gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
666   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
667   gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
668   gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
669   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
670   gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
671   gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
672   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
673   gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
674   gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
675   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
676   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
677   gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
678   gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
679   gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
680   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
681   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
682   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
683   gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
684   gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
685   gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
686   gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
687   gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
688   gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
689   gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
690   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
691   gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
692   gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
693   gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
694   gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
695 }
696 
697 
698 /* Init tree.cc.  */
699 
700 void
init_ttree(void)701 init_ttree (void)
702 {
703   /* Initialize the hash table of types.  */
704   type_hash_table
705     = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
706 
707   debug_expr_for_decl
708     = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
709 
710   value_expr_for_decl
711     = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
712 
713   int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
714 
715   poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
716 
717   int_cst_node = make_int_cst (1, 1);
718 
719   cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
720 
721   cl_optimization_node = make_node (OPTIMIZATION_NODE);
722   cl_target_option_node = make_node (TARGET_OPTION_NODE);
723 
724   /* Initialize the tree_contains_struct array.  */
725   initialize_tree_contains_struct ();
726   lang_hooks.init_ts ();
727 }
728 
729 
730 /* The name of the object as the assembler will see it (but before any
731    translations made by ASM_OUTPUT_LABELREF).  Often this is the same
732    as DECL_NAME.  It is an IDENTIFIER_NODE.  */
733 tree
decl_assembler_name(tree decl)734 decl_assembler_name (tree decl)
735 {
736   if (!DECL_ASSEMBLER_NAME_SET_P (decl))
737     lang_hooks.set_decl_assembler_name (decl);
738   return DECL_ASSEMBLER_NAME_RAW (decl);
739 }
740 
741 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
742    (either of which may be NULL).  Inform the FE, if this changes the
743    name.  */
744 
745 void
overwrite_decl_assembler_name(tree decl,tree name)746 overwrite_decl_assembler_name (tree decl, tree name)
747 {
748   if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
749     lang_hooks.overwrite_decl_assembler_name (decl, name);
750 }
751 
752 /* Return true if DECL may need an assembler name to be set.  */
753 
754 static inline bool
need_assembler_name_p(tree decl)755 need_assembler_name_p (tree decl)
756 {
757   /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
758      Rule merging.  This makes type_odr_p to return true on those types during
759      LTO and by comparing the mangled name, we can say what types are intended
760      to be equivalent across compilation unit.
761 
762      We do not store names of type_in_anonymous_namespace_p.
763 
764      Record, union and enumeration type have linkage that allows use
765      to check type_in_anonymous_namespace_p. We do not mangle compound types
766      that always can be compared structurally.
767 
768      Similarly for builtin types, we compare properties of their main variant.
769      A special case are integer types where mangling do make differences
770      between char/signed char/unsigned char etc.  Storing name for these makes
771      e.g.  -fno-signed-char/-fsigned-char mismatches to be handled well.
772      See cp/mangle.cc:write_builtin_type for details.  */
773 
774   if (TREE_CODE (decl) == TYPE_DECL)
775     {
776       if (DECL_NAME (decl)
777 	  && decl == TYPE_NAME (TREE_TYPE (decl))
778 	  && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
779 	  && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
780 	  && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
781 	       && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
782 	      || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
783 	  && (type_with_linkage_p (TREE_TYPE (decl))
784 	      || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
785 	  && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
786 	return !DECL_ASSEMBLER_NAME_SET_P (decl);
787       return false;
788     }
789   /* Only FUNCTION_DECLs and VAR_DECLs are considered.  */
790   if (!VAR_OR_FUNCTION_DECL_P (decl))
791     return false;
792 
793   /* If DECL already has its assembler name set, it does not need a
794      new one.  */
795   if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
796       || DECL_ASSEMBLER_NAME_SET_P (decl))
797     return false;
798 
799   /* Abstract decls do not need an assembler name.  */
800   if (DECL_ABSTRACT_P (decl))
801     return false;
802 
803   /* For VAR_DECLs, only static, public and external symbols need an
804      assembler name.  */
805   if (VAR_P (decl)
806       && !TREE_STATIC (decl)
807       && !TREE_PUBLIC (decl)
808       && !DECL_EXTERNAL (decl))
809     return false;
810 
811   if (TREE_CODE (decl) == FUNCTION_DECL)
812     {
813       /* Do not set assembler name on builtins.  Allow RTL expansion to
814 	 decide whether to expand inline or via a regular call.  */
815       if (fndecl_built_in_p (decl)
816 	  && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
817 	return false;
818 
819       /* Functions represented in the callgraph need an assembler name.  */
820       if (cgraph_node::get (decl) != NULL)
821 	return true;
822 
823       /* Unused and not public functions don't need an assembler name.  */
824       if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
825 	return false;
826     }
827 
828   return true;
829 }
830 
831 /* If T needs an assembler name, have one created for it.  */
832 
833 void
assign_assembler_name_if_needed(tree t)834 assign_assembler_name_if_needed (tree t)
835 {
836   if (need_assembler_name_p (t))
837     {
838       /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
839 	 diagnostics that use input_location to show locus
840 	 information.  The problem here is that, at this point,
841 	 input_location is generally anchored to the end of the file
842 	 (since the parser is long gone), so we don't have a good
843 	 position to pin it to.
844 
845 	 To alleviate this problem, this uses the location of T's
846 	 declaration.  Examples of this are
847 	 testsuite/g++.dg/template/cond2.C and
848 	 testsuite/g++.dg/template/pr35240.C.  */
849       location_t saved_location = input_location;
850       input_location = DECL_SOURCE_LOCATION (t);
851 
852       decl_assembler_name (t);
853 
854       input_location = saved_location;
855     }
856 }
857 
858 /* When the target supports COMDAT groups, this indicates which group the
859    DECL is associated with.  This can be either an IDENTIFIER_NODE or a
860    decl, in which case its DECL_ASSEMBLER_NAME identifies the group.  */
861 tree
decl_comdat_group(const_tree node)862 decl_comdat_group (const_tree node)
863 {
864   struct symtab_node *snode = symtab_node::get (node);
865   if (!snode)
866     return NULL;
867   return snode->get_comdat_group ();
868 }
869 
870 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE.  */
871 tree
decl_comdat_group_id(const_tree node)872 decl_comdat_group_id (const_tree node)
873 {
874   struct symtab_node *snode = symtab_node::get (node);
875   if (!snode)
876     return NULL;
877   return snode->get_comdat_group_id ();
878 }
879 
880 /* When the target supports named section, return its name as IDENTIFIER_NODE
881    or NULL if it is in no section.  */
882 const char *
decl_section_name(const_tree node)883 decl_section_name (const_tree node)
884 {
885   struct symtab_node *snode = symtab_node::get (node);
886   if (!snode)
887     return NULL;
888   return snode->get_section ();
889 }
890 
891 /* Set section name of NODE to VALUE (that is expected to be
892    identifier node) */
893 void
set_decl_section_name(tree node,const char * value)894 set_decl_section_name (tree node, const char *value)
895 {
896   struct symtab_node *snode;
897 
898   if (value == NULL)
899     {
900       snode = symtab_node::get (node);
901       if (!snode)
902 	return;
903     }
904   else if (VAR_P (node))
905     snode = varpool_node::get_create (node);
906   else
907     snode = cgraph_node::get_create (node);
908   snode->set_section (value);
909 }
910 
911 /* Set section name of NODE to match the section name of OTHER.
912 
913    set_decl_section_name (decl, other) is equivalent to
914    set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
915    efficient.  */
916 void
set_decl_section_name(tree decl,const_tree other)917 set_decl_section_name (tree decl, const_tree other)
918 {
919   struct symtab_node *other_node = symtab_node::get (other);
920   if (other_node)
921     {
922       struct symtab_node *decl_node;
923       if (VAR_P (decl))
924     decl_node = varpool_node::get_create (decl);
925       else
926     decl_node = cgraph_node::get_create (decl);
927       decl_node->set_section (*other_node);
928     }
929   else
930     {
931       struct symtab_node *decl_node = symtab_node::get (decl);
932       if (!decl_node)
933     return;
934       decl_node->set_section (NULL);
935     }
936 }
937 
938 /* Return TLS model of a variable NODE.  */
939 enum tls_model
decl_tls_model(const_tree node)940 decl_tls_model (const_tree node)
941 {
942   struct varpool_node *snode = varpool_node::get (node);
943   if (!snode)
944     return TLS_MODEL_NONE;
945   return snode->tls_model;
946 }
947 
948 /* Set TLS model of variable NODE to MODEL.  */
949 void
set_decl_tls_model(tree node,enum tls_model model)950 set_decl_tls_model (tree node, enum tls_model model)
951 {
952   struct varpool_node *vnode;
953 
954   if (model == TLS_MODEL_NONE)
955     {
956       vnode = varpool_node::get (node);
957       if (!vnode)
958 	return;
959     }
960   else
961     vnode = varpool_node::get_create (node);
962   vnode->tls_model = model;
963 }
964 
965 /* Compute the number of bytes occupied by a tree with code CODE.
966    This function cannot be used for nodes that have variable sizes,
967    including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR.  */
968 size_t
tree_code_size(enum tree_code code)969 tree_code_size (enum tree_code code)
970 {
971   switch (TREE_CODE_CLASS (code))
972     {
973     case tcc_declaration:  /* A decl node */
974       switch (code)
975 	{
976 	case FIELD_DECL:	return sizeof (tree_field_decl);
977 	case PARM_DECL:		return sizeof (tree_parm_decl);
978 	case VAR_DECL:		return sizeof (tree_var_decl);
979 	case LABEL_DECL:	return sizeof (tree_label_decl);
980 	case RESULT_DECL:	return sizeof (tree_result_decl);
981 	case CONST_DECL:	return sizeof (tree_const_decl);
982 	case TYPE_DECL:		return sizeof (tree_type_decl);
983 	case FUNCTION_DECL:	return sizeof (tree_function_decl);
984 	case DEBUG_EXPR_DECL:	return sizeof (tree_decl_with_rtl);
985 	case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
986 	case NAMESPACE_DECL:
987 	case IMPORTED_DECL:
988 	case NAMELIST_DECL:	return sizeof (tree_decl_non_common);
989 	default:
990 	  gcc_checking_assert (code >= NUM_TREE_CODES);
991 	  return lang_hooks.tree_size (code);
992 	}
993 
994     case tcc_type:  /* a type node */
995       switch (code)
996 	{
997 	case OFFSET_TYPE:
998 	case ENUMERAL_TYPE:
999 	case BOOLEAN_TYPE:
1000 	case INTEGER_TYPE:
1001 	case REAL_TYPE:
1002 	case OPAQUE_TYPE:
1003 	case POINTER_TYPE:
1004 	case REFERENCE_TYPE:
1005 	case NULLPTR_TYPE:
1006 	case FIXED_POINT_TYPE:
1007 	case COMPLEX_TYPE:
1008 	case VECTOR_TYPE:
1009 	case ARRAY_TYPE:
1010 	case RECORD_TYPE:
1011 	case UNION_TYPE:
1012 	case QUAL_UNION_TYPE:
1013 	case VOID_TYPE:
1014 	case FUNCTION_TYPE:
1015 	case METHOD_TYPE:
1016 	case LANG_TYPE:		return sizeof (tree_type_non_common);
1017 	default:
1018 	  gcc_checking_assert (code >= NUM_TREE_CODES);
1019 	  return lang_hooks.tree_size (code);
1020 	}
1021 
1022     case tcc_reference:   /* a reference */
1023     case tcc_expression:  /* an expression */
1024     case tcc_statement:   /* an expression with side effects */
1025     case tcc_comparison:  /* a comparison expression */
1026     case tcc_unary:       /* a unary arithmetic expression */
1027     case tcc_binary:      /* a binary arithmetic expression */
1028       return (sizeof (struct tree_exp)
1029 	      + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
1030 
1031     case tcc_constant:  /* a constant */
1032       switch (code)
1033 	{
1034 	case VOID_CST:		return sizeof (tree_typed);
1035 	case INTEGER_CST:	gcc_unreachable ();
1036 	case POLY_INT_CST:	return sizeof (tree_poly_int_cst);
1037 	case REAL_CST:		return sizeof (tree_real_cst);
1038 	case FIXED_CST:		return sizeof (tree_fixed_cst);
1039 	case COMPLEX_CST:	return sizeof (tree_complex);
1040 	case VECTOR_CST:	gcc_unreachable ();
1041 	case STRING_CST:	gcc_unreachable ();
1042 	default:
1043 	  gcc_checking_assert (code >= NUM_TREE_CODES);
1044 	  return lang_hooks.tree_size (code);
1045 	}
1046 
1047     case tcc_exceptional:  /* something random, like an identifier.  */
1048       switch (code)
1049 	{
1050 	case IDENTIFIER_NODE:	return lang_hooks.identifier_size;
1051 	case TREE_LIST:		return sizeof (tree_list);
1052 
1053 	case ERROR_MARK:
1054 	case PLACEHOLDER_EXPR:	return sizeof (tree_common);
1055 
1056 	case TREE_VEC:		gcc_unreachable ();
1057 	case OMP_CLAUSE:	gcc_unreachable ();
1058 
1059 	case SSA_NAME:		return sizeof (tree_ssa_name);
1060 
1061 	case STATEMENT_LIST:	return sizeof (tree_statement_list);
1062 	case BLOCK:		return sizeof (struct tree_block);
1063 	case CONSTRUCTOR:	return sizeof (tree_constructor);
1064 	case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
1065 	case TARGET_OPTION_NODE: return sizeof (tree_target_option);
1066 
1067 	default:
1068 	  gcc_checking_assert (code >= NUM_TREE_CODES);
1069 	  return lang_hooks.tree_size (code);
1070 	}
1071 
1072     default:
1073       gcc_unreachable ();
1074     }
1075 }
1076 
1077 /* Compute the number of bytes occupied by NODE.  This routine only
1078    looks at TREE_CODE, except for those nodes that have variable sizes.  */
1079 size_t
tree_size(const_tree node)1080 tree_size (const_tree node)
1081 {
1082   const enum tree_code code = TREE_CODE (node);
1083   switch (code)
1084     {
1085     case INTEGER_CST:
1086       return (sizeof (struct tree_int_cst)
1087 	      + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
1088 
1089     case TREE_BINFO:
1090       return (offsetof (struct tree_binfo, base_binfos)
1091 	      + vec<tree, va_gc>
1092 		  ::embedded_size (BINFO_N_BASE_BINFOS (node)));
1093 
1094     case TREE_VEC:
1095       return (sizeof (struct tree_vec)
1096 	      + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
1097 
1098     case VECTOR_CST:
1099       return (sizeof (struct tree_vector)
1100 	      + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
1101 
1102     case STRING_CST:
1103       return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
1104 
1105     case OMP_CLAUSE:
1106       return (sizeof (struct tree_omp_clause)
1107 	      + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
1108 	        * sizeof (tree));
1109 
1110     default:
1111       if (TREE_CODE_CLASS (code) == tcc_vl_exp)
1112 	return (sizeof (struct tree_exp)
1113 		+ (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
1114       else
1115 	return tree_code_size (code);
1116     }
1117 }
1118 
1119 /* Return tree node kind based on tree CODE.  */
1120 
1121 static tree_node_kind
get_stats_node_kind(enum tree_code code)1122 get_stats_node_kind (enum tree_code code)
1123 {
1124   enum tree_code_class type = TREE_CODE_CLASS (code);
1125 
1126   switch (type)
1127     {
1128     case tcc_declaration:  /* A decl node */
1129       return d_kind;
1130     case tcc_type:  /* a type node */
1131       return t_kind;
1132     case tcc_statement:  /* an expression with side effects */
1133       return s_kind;
1134     case tcc_reference:  /* a reference */
1135       return r_kind;
1136     case tcc_expression:  /* an expression */
1137     case tcc_comparison:  /* a comparison expression */
1138     case tcc_unary:  /* a unary arithmetic expression */
1139     case tcc_binary:  /* a binary arithmetic expression */
1140       return e_kind;
1141     case tcc_constant:  /* a constant */
1142       return c_kind;
1143     case tcc_exceptional:  /* something random, like an identifier.  */
1144       switch (code)
1145 	{
1146 	case IDENTIFIER_NODE:
1147 	  return id_kind;
1148 	case TREE_VEC:
1149 	  return vec_kind;
1150 	case TREE_BINFO:
1151 	  return binfo_kind;
1152 	case SSA_NAME:
1153 	  return ssa_name_kind;
1154 	case BLOCK:
1155 	  return b_kind;
1156 	case CONSTRUCTOR:
1157 	  return constr_kind;
1158 	case OMP_CLAUSE:
1159 	  return omp_clause_kind;
1160 	default:
1161 	  return x_kind;
1162 	}
1163       break;
1164     case tcc_vl_exp:
1165       return e_kind;
1166     default:
1167       gcc_unreachable ();
1168     }
1169 }
1170 
1171 /* Record interesting allocation statistics for a tree node with CODE
1172    and LENGTH.  */
1173 
1174 static void
record_node_allocation_statistics(enum tree_code code,size_t length)1175 record_node_allocation_statistics (enum tree_code code, size_t length)
1176 {
1177   if (!GATHER_STATISTICS)
1178     return;
1179 
1180   tree_node_kind kind = get_stats_node_kind (code);
1181 
1182   tree_code_counts[(int) code]++;
1183   tree_node_counts[(int) kind]++;
1184   tree_node_sizes[(int) kind] += length;
1185 }
1186 
1187 /* Allocate and return a new UID from the DECL_UID namespace.  */
1188 
1189 int
allocate_decl_uid(void)1190 allocate_decl_uid (void)
1191 {
1192   return next_decl_uid++;
1193 }
1194 
1195 /* Return a newly allocated node of code CODE.  For decl and type
1196    nodes, some other fields are initialized.  The rest of the node is
1197    initialized to zero.  This function cannot be used for TREE_VEC,
1198    INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1199    tree_code_size.
1200 
1201    Achoo!  I got a code in the node.  */
1202 
1203 tree
make_node(enum tree_code code MEM_STAT_DECL)1204 make_node (enum tree_code code MEM_STAT_DECL)
1205 {
1206   tree t;
1207   enum tree_code_class type = TREE_CODE_CLASS (code);
1208   size_t length = tree_code_size (code);
1209 
1210   record_node_allocation_statistics (code, length);
1211 
1212   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1213   TREE_SET_CODE (t, code);
1214 
1215   switch (type)
1216     {
1217     case tcc_statement:
1218       if (code != DEBUG_BEGIN_STMT)
1219 	TREE_SIDE_EFFECTS (t) = 1;
1220       break;
1221 
1222     case tcc_declaration:
1223       if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1224 	{
1225 	  if (code == FUNCTION_DECL)
1226 	    {
1227 	      SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1228 	      SET_DECL_MODE (t, FUNCTION_MODE);
1229 	    }
1230 	  else
1231 	    SET_DECL_ALIGN (t, 1);
1232 	}
1233       DECL_SOURCE_LOCATION (t) = input_location;
1234       if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1235 	DECL_UID (t) = --next_debug_decl_uid;
1236       else
1237 	{
1238 	  DECL_UID (t) = allocate_decl_uid ();
1239 	  SET_DECL_PT_UID (t, -1);
1240 	}
1241       if (TREE_CODE (t) == LABEL_DECL)
1242 	LABEL_DECL_UID (t) = -1;
1243 
1244       break;
1245 
1246     case tcc_type:
1247       TYPE_UID (t) = next_type_uid++;
1248       SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1249       TYPE_USER_ALIGN (t) = 0;
1250       TYPE_MAIN_VARIANT (t) = t;
1251       TYPE_CANONICAL (t) = t;
1252 
1253       /* Default to no attributes for type, but let target change that.  */
1254       TYPE_ATTRIBUTES (t) = NULL_TREE;
1255       targetm.set_default_type_attributes (t);
1256 
1257       /* We have not yet computed the alias set for this type.  */
1258       TYPE_ALIAS_SET (t) = -1;
1259       break;
1260 
1261     case tcc_constant:
1262       TREE_CONSTANT (t) = 1;
1263       break;
1264 
1265     case tcc_expression:
1266       switch (code)
1267 	{
1268 	case INIT_EXPR:
1269 	case MODIFY_EXPR:
1270 	case VA_ARG_EXPR:
1271 	case PREDECREMENT_EXPR:
1272 	case PREINCREMENT_EXPR:
1273 	case POSTDECREMENT_EXPR:
1274 	case POSTINCREMENT_EXPR:
1275 	  /* All of these have side-effects, no matter what their
1276 	     operands are.  */
1277 	  TREE_SIDE_EFFECTS (t) = 1;
1278 	  break;
1279 
1280 	default:
1281 	  break;
1282 	}
1283       break;
1284 
1285     case tcc_exceptional:
1286       switch (code)
1287         {
1288 	case TARGET_OPTION_NODE:
1289 	  TREE_TARGET_OPTION(t)
1290 			    = ggc_cleared_alloc<struct cl_target_option> ();
1291 	  break;
1292 
1293 	case OPTIMIZATION_NODE:
1294 	  TREE_OPTIMIZATION (t)
1295 			    = ggc_cleared_alloc<struct cl_optimization> ();
1296 	  break;
1297 
1298 	default:
1299 	  break;
1300 	}
1301       break;
1302 
1303     default:
1304       /* Other classes need no special treatment.  */
1305       break;
1306     }
1307 
1308   return t;
1309 }
1310 
1311 /* Free tree node.  */
1312 
1313 void
free_node(tree node)1314 free_node (tree node)
1315 {
1316   enum tree_code code = TREE_CODE (node);
1317   if (GATHER_STATISTICS)
1318     {
1319       enum tree_node_kind kind = get_stats_node_kind (code);
1320 
1321       gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1322       gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1323       gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1324 
1325       tree_code_counts[(int) TREE_CODE (node)]--;
1326       tree_node_counts[(int) kind]--;
1327       tree_node_sizes[(int) kind] -= tree_size (node);
1328     }
1329   if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1330     vec_free (CONSTRUCTOR_ELTS (node));
1331   else if (code == BLOCK)
1332     vec_free (BLOCK_NONLOCALIZED_VARS (node));
1333   else if (code == TREE_BINFO)
1334     vec_free (BINFO_BASE_ACCESSES (node));
1335   else if (code == OPTIMIZATION_NODE)
1336     cl_optimization_option_free (TREE_OPTIMIZATION (node));
1337   else if (code == TARGET_OPTION_NODE)
1338     cl_target_option_free (TREE_TARGET_OPTION (node));
1339   ggc_free (node);
1340 }
1341 
1342 /* Return a new node with the same contents as NODE except that its
1343    TREE_CHAIN, if it has one, is zero and it has a fresh uid.  */
1344 
1345 tree
copy_node(tree node MEM_STAT_DECL)1346 copy_node (tree node MEM_STAT_DECL)
1347 {
1348   tree t;
1349   enum tree_code code = TREE_CODE (node);
1350   size_t length;
1351 
1352   gcc_assert (code != STATEMENT_LIST);
1353 
1354   length = tree_size (node);
1355   record_node_allocation_statistics (code, length);
1356   t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1357   memcpy (t, node, length);
1358 
1359   if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1360     TREE_CHAIN (t) = 0;
1361   TREE_ASM_WRITTEN (t) = 0;
1362   TREE_VISITED (t) = 0;
1363 
1364   if (TREE_CODE_CLASS (code) == tcc_declaration)
1365     {
1366       if (code == DEBUG_EXPR_DECL)
1367 	DECL_UID (t) = --next_debug_decl_uid;
1368       else
1369 	{
1370 	  DECL_UID (t) = allocate_decl_uid ();
1371 	  if (DECL_PT_UID_SET_P (node))
1372 	    SET_DECL_PT_UID (t, DECL_PT_UID (node));
1373 	}
1374       if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1375 	  && DECL_HAS_VALUE_EXPR_P (node))
1376 	{
1377 	  SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1378 	  DECL_HAS_VALUE_EXPR_P (t) = 1;
1379 	}
1380       /* DECL_DEBUG_EXPR is copied explicitly by callers.  */
1381       if (VAR_P (node))
1382 	{
1383 	  DECL_HAS_DEBUG_EXPR_P (t) = 0;
1384 	  t->decl_with_vis.symtab_node = NULL;
1385 	}
1386       if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1387 	{
1388 	  SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1389 	  DECL_HAS_INIT_PRIORITY_P (t) = 1;
1390 	}
1391       if (TREE_CODE (node) == FUNCTION_DECL)
1392 	{
1393 	  DECL_STRUCT_FUNCTION (t) = NULL;
1394 	  t->decl_with_vis.symtab_node = NULL;
1395 	}
1396     }
1397   else if (TREE_CODE_CLASS (code) == tcc_type)
1398     {
1399       TYPE_UID (t) = next_type_uid++;
1400       /* The following is so that the debug code for
1401 	 the copy is different from the original type.
1402 	 The two statements usually duplicate each other
1403 	 (because they clear fields of the same union),
1404 	 but the optimizer should catch that.  */
1405       TYPE_SYMTAB_ADDRESS (t) = 0;
1406       TYPE_SYMTAB_DIE (t) = 0;
1407 
1408       /* Do not copy the values cache.  */
1409       if (TYPE_CACHED_VALUES_P (t))
1410 	{
1411 	  TYPE_CACHED_VALUES_P (t) = 0;
1412 	  TYPE_CACHED_VALUES (t) = NULL_TREE;
1413 	}
1414     }
1415     else if (code == TARGET_OPTION_NODE)
1416       {
1417 	TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1418 	memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1419 		sizeof (struct cl_target_option));
1420       }
1421     else if (code == OPTIMIZATION_NODE)
1422       {
1423 	TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1424 	memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1425 		sizeof (struct cl_optimization));
1426       }
1427 
1428   return t;
1429 }
1430 
1431 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1432    For example, this can copy a list made of TREE_LIST nodes.  */
1433 
1434 tree
copy_list(tree list)1435 copy_list (tree list)
1436 {
1437   tree head;
1438   tree prev, next;
1439 
1440   if (list == 0)
1441     return 0;
1442 
1443   head = prev = copy_node (list);
1444   next = TREE_CHAIN (list);
1445   while (next)
1446     {
1447       TREE_CHAIN (prev) = copy_node (next);
1448       prev = TREE_CHAIN (prev);
1449       next = TREE_CHAIN (next);
1450     }
1451   return head;
1452 }
1453 
1454 
1455 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1456    INTEGER_CST with value CST and type TYPE.   */
1457 
1458 static unsigned int
get_int_cst_ext_nunits(tree type,const wide_int & cst)1459 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1460 {
1461   gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1462   /* We need extra HWIs if CST is an unsigned integer with its
1463      upper bit set.  */
1464   if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1465     return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1466   return cst.get_len ();
1467 }
1468 
1469 /* Return a new INTEGER_CST with value CST and type TYPE.  */
1470 
1471 static tree
build_new_int_cst(tree type,const wide_int & cst)1472 build_new_int_cst (tree type, const wide_int &cst)
1473 {
1474   unsigned int len = cst.get_len ();
1475   unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1476   tree nt = make_int_cst (len, ext_len);
1477 
1478   if (len < ext_len)
1479     {
1480       --ext_len;
1481       TREE_INT_CST_ELT (nt, ext_len)
1482 	= zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1483       for (unsigned int i = len; i < ext_len; ++i)
1484 	TREE_INT_CST_ELT (nt, i) = -1;
1485     }
1486   else if (TYPE_UNSIGNED (type)
1487 	   && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1488     {
1489       len--;
1490       TREE_INT_CST_ELT (nt, len)
1491 	= zext_hwi (cst.elt (len),
1492 		    cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1493     }
1494 
1495   for (unsigned int i = 0; i < len; i++)
1496     TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1497   TREE_TYPE (nt) = type;
1498   return nt;
1499 }
1500 
1501 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE.  */
1502 
1503 static tree
build_new_poly_int_cst(tree type,tree (& coeffs)[NUM_POLY_INT_COEFFS]CXX_MEM_STAT_INFO)1504 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1505 			CXX_MEM_STAT_INFO)
1506 {
1507   size_t length = sizeof (struct tree_poly_int_cst);
1508   record_node_allocation_statistics (POLY_INT_CST, length);
1509 
1510   tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1511 
1512   TREE_SET_CODE (t, POLY_INT_CST);
1513   TREE_CONSTANT (t) = 1;
1514   TREE_TYPE (t) = type;
1515   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1516     POLY_INT_CST_COEFF (t, i) = coeffs[i];
1517   return t;
1518 }
1519 
1520 /* Create a constant tree that contains CST sign-extended to TYPE.  */
1521 
1522 tree
build_int_cst(tree type,poly_int64 cst)1523 build_int_cst (tree type, poly_int64 cst)
1524 {
1525   /* Support legacy code.  */
1526   if (!type)
1527     type = integer_type_node;
1528 
1529   return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1530 }
1531 
1532 /* Create a constant tree that contains CST zero-extended to TYPE.  */
1533 
1534 tree
build_int_cstu(tree type,poly_uint64 cst)1535 build_int_cstu (tree type, poly_uint64 cst)
1536 {
1537   return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1538 }
1539 
1540 /* Create a constant tree that contains CST sign-extended to TYPE.  */
1541 
1542 tree
build_int_cst_type(tree type,poly_int64 cst)1543 build_int_cst_type (tree type, poly_int64 cst)
1544 {
1545   gcc_assert (type);
1546   return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1547 }
1548 
1549 /* Constructs tree in type TYPE from with value given by CST.  Signedness
1550    of CST is assumed to be the same as the signedness of TYPE.  */
1551 
1552 tree
double_int_to_tree(tree type,double_int cst)1553 double_int_to_tree (tree type, double_int cst)
1554 {
1555   return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1556 }
1557 
1558 /* We force the wide_int CST to the range of the type TYPE by sign or
1559    zero extending it.  OVERFLOWABLE indicates if we are interested in
1560    overflow of the value, when >0 we are only interested in signed
1561    overflow, for <0 we are interested in any overflow.  OVERFLOWED
1562    indicates whether overflow has already occurred.  CONST_OVERFLOWED
1563    indicates whether constant overflow has already occurred.  We force
1564    T's value to be within range of T's type (by setting to 0 or 1 all
1565    the bits outside the type's range).  We set TREE_OVERFLOWED if,
1566         OVERFLOWED is nonzero,
1567         or OVERFLOWABLE is >0 and signed overflow occurs
1568         or OVERFLOWABLE is <0 and any overflow occurs
1569    We return a new tree node for the extended wide_int.  The node
1570    is shared if no overflow flags are set.  */
1571 
1572 
1573 tree
force_fit_type(tree type,const poly_wide_int_ref & cst,int overflowable,bool overflowed)1574 force_fit_type (tree type, const poly_wide_int_ref &cst,
1575 		int overflowable, bool overflowed)
1576 {
1577   signop sign = TYPE_SIGN (type);
1578 
1579   /* If we need to set overflow flags, return a new unshared node.  */
1580   if (overflowed || !wi::fits_to_tree_p (cst, type))
1581     {
1582       if (overflowed
1583 	  || overflowable < 0
1584 	  || (overflowable > 0 && sign == SIGNED))
1585 	{
1586 	  poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1587 						   sign);
1588 	  tree t;
1589 	  if (tmp.is_constant ())
1590 	    t = build_new_int_cst (type, tmp.coeffs[0]);
1591 	  else
1592 	    {
1593 	      tree coeffs[NUM_POLY_INT_COEFFS];
1594 	      for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1595 		{
1596 		  coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1597 		  TREE_OVERFLOW (coeffs[i]) = 1;
1598 		}
1599 	      t = build_new_poly_int_cst (type, coeffs);
1600 	    }
1601 	  TREE_OVERFLOW (t) = 1;
1602 	  return t;
1603 	}
1604     }
1605 
1606   /* Else build a shared node.  */
1607   return wide_int_to_tree (type, cst);
1608 }
1609 
1610 /* These are the hash table functions for the hash table of INTEGER_CST
1611    nodes of a sizetype.  */
1612 
1613 /* Return the hash code X, an INTEGER_CST.  */
1614 
1615 hashval_t
hash(tree x)1616 int_cst_hasher::hash (tree x)
1617 {
1618   const_tree const t = x;
1619   hashval_t code = TYPE_UID (TREE_TYPE (t));
1620   int i;
1621 
1622   for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1623     code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1624 
1625   return code;
1626 }
1627 
1628 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1629    is the same as that given by *Y, which is the same.  */
1630 
1631 bool
equal(tree x,tree y)1632 int_cst_hasher::equal (tree x, tree y)
1633 {
1634   const_tree const xt = x;
1635   const_tree const yt = y;
1636 
1637   if (TREE_TYPE (xt) != TREE_TYPE (yt)
1638       || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1639       || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1640     return false;
1641 
1642   for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1643     if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1644       return false;
1645 
1646   return true;
1647 }
1648 
1649 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1650    SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1651    number of slots that can be cached for the type.  */
1652 
1653 static inline tree
cache_wide_int_in_type_cache(tree type,const wide_int & cst,int slot,int max_slots)1654 cache_wide_int_in_type_cache (tree type, const wide_int &cst,
1655 			      int slot, int max_slots)
1656 {
1657   gcc_checking_assert (slot >= 0);
1658   /* Initialize cache.  */
1659   if (!TYPE_CACHED_VALUES_P (type))
1660     {
1661       TYPE_CACHED_VALUES_P (type) = 1;
1662       TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots);
1663     }
1664   tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot);
1665   if (!t)
1666     {
1667       /* Create a new shared int.  */
1668       t = build_new_int_cst (type, cst);
1669       TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t;
1670     }
1671   return t;
1672 }
1673 
1674 /* Create an INT_CST node of TYPE and value CST.
1675    The returned node is always shared.  For small integers we use a
1676    per-type vector cache, for larger ones we use a single hash table.
1677    The value is extended from its precision according to the sign of
1678    the type to be a multiple of HOST_BITS_PER_WIDE_INT.  This defines
1679    the upper bits and ensures that hashing and value equality based
1680    upon the underlying HOST_WIDE_INTs works without masking.  */
1681 
1682 static tree
wide_int_to_tree_1(tree type,const wide_int_ref & pcst)1683 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1684 {
1685   tree t;
1686   int ix = -1;
1687   int limit = 0;
1688 
1689   gcc_assert (type);
1690   unsigned int prec = TYPE_PRECISION (type);
1691   signop sgn = TYPE_SIGN (type);
1692 
1693   /* Verify that everything is canonical.  */
1694   int l = pcst.get_len ();
1695   if (l > 1)
1696     {
1697       if (pcst.elt (l - 1) == 0)
1698 	gcc_checking_assert (pcst.elt (l - 2) < 0);
1699       if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1700 	gcc_checking_assert (pcst.elt (l - 2) >= 0);
1701     }
1702 
1703   wide_int cst = wide_int::from (pcst, prec, sgn);
1704   unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1705 
1706   enum tree_code code = TREE_CODE (type);
1707   if (code == POINTER_TYPE || code == REFERENCE_TYPE)
1708     {
1709       /* Cache NULL pointer and zero bounds.  */
1710       if (cst == 0)
1711 	ix = 0;
1712       /* Cache upper bounds of pointers.  */
1713       else if (cst == wi::max_value (prec, sgn))
1714 	ix = 1;
1715       /* Cache 1 which is used for a non-zero range.  */
1716       else if (cst == 1)
1717 	ix = 2;
1718 
1719       if (ix >= 0)
1720 	{
1721 	  t = cache_wide_int_in_type_cache (type, cst, ix, 3);
1722 	  /* Make sure no one is clobbering the shared constant.  */
1723 	  gcc_checking_assert (TREE_TYPE (t) == type
1724 			       && cst == wi::to_wide (t));
1725 	  return t;
1726 	}
1727     }
1728   if (ext_len == 1)
1729     {
1730       /* We just need to store a single HOST_WIDE_INT.  */
1731       HOST_WIDE_INT hwi;
1732       if (TYPE_UNSIGNED (type))
1733 	hwi = cst.to_uhwi ();
1734       else
1735 	hwi = cst.to_shwi ();
1736 
1737       switch (code)
1738 	{
1739 	case NULLPTR_TYPE:
1740 	  gcc_assert (hwi == 0);
1741 	  /* Fallthru.  */
1742 
1743 	case POINTER_TYPE:
1744 	case REFERENCE_TYPE:
1745 	  /* Ignore pointers, as they were already handled above.  */
1746 	  break;
1747 
1748 	case BOOLEAN_TYPE:
1749 	  /* Cache false or true.  */
1750 	  limit = 2;
1751 	  if (IN_RANGE (hwi, 0, 1))
1752 	    ix = hwi;
1753 	  break;
1754 
1755 	case INTEGER_TYPE:
1756 	case OFFSET_TYPE:
1757 	  if (TYPE_SIGN (type) == UNSIGNED)
1758 	    {
1759 	      /* Cache [0, N).  */
1760 	      limit = param_integer_share_limit;
1761 	      if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1762 		ix = hwi;
1763 	    }
1764 	  else
1765 	    {
1766 	      /* Cache [-1, N).  */
1767 	      limit = param_integer_share_limit + 1;
1768 	      if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1769 		ix = hwi + 1;
1770 	    }
1771 	  break;
1772 
1773 	case ENUMERAL_TYPE:
1774 	  break;
1775 
1776 	default:
1777 	  gcc_unreachable ();
1778 	}
1779 
1780       if (ix >= 0)
1781 	{
1782 	  t = cache_wide_int_in_type_cache (type, cst, ix, limit);
1783 	  /* Make sure no one is clobbering the shared constant.  */
1784 	  gcc_checking_assert (TREE_TYPE (t) == type
1785 			       && TREE_INT_CST_NUNITS (t) == 1
1786 			       && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1787 			       && TREE_INT_CST_EXT_NUNITS (t) == 1
1788 			       && TREE_INT_CST_ELT (t, 0) == hwi);
1789 	  return t;
1790 	}
1791       else
1792 	{
1793 	  /* Use the cache of larger shared ints, using int_cst_node as
1794 	     a temporary.  */
1795 
1796 	  TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1797 	  TREE_TYPE (int_cst_node) = type;
1798 
1799 	  tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1800 	  t = *slot;
1801 	  if (!t)
1802 	    {
1803 	      /* Insert this one into the hash table.  */
1804 	      t = int_cst_node;
1805 	      *slot = t;
1806 	      /* Make a new node for next time round.  */
1807 	      int_cst_node = make_int_cst (1, 1);
1808 	    }
1809 	}
1810     }
1811   else
1812     {
1813       /* The value either hashes properly or we drop it on the floor
1814 	 for the gc to take care of.  There will not be enough of them
1815 	 to worry about.  */
1816 
1817       tree nt = build_new_int_cst (type, cst);
1818       tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1819       t = *slot;
1820       if (!t)
1821 	{
1822 	  /* Insert this one into the hash table.  */
1823 	  t = nt;
1824 	  *slot = t;
1825 	}
1826       else
1827 	ggc_free (nt);
1828     }
1829 
1830   return t;
1831 }
1832 
1833 hashval_t
hash(tree t)1834 poly_int_cst_hasher::hash (tree t)
1835 {
1836   inchash::hash hstate;
1837 
1838   hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1839   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1840     hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1841 
1842   return hstate.end ();
1843 }
1844 
1845 bool
equal(tree x,const compare_type & y)1846 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1847 {
1848   if (TREE_TYPE (x) != y.first)
1849     return false;
1850   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1851     if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1852       return false;
1853   return true;
1854 }
1855 
1856 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1857    The elements must also have type TYPE.  */
1858 
1859 tree
build_poly_int_cst(tree type,const poly_wide_int_ref & values)1860 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1861 {
1862   unsigned int prec = TYPE_PRECISION (type);
1863   gcc_assert (prec <= values.coeffs[0].get_precision ());
1864   poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1865 
1866   inchash::hash h;
1867   h.add_int (TYPE_UID (type));
1868   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1869     h.add_wide_int (c.coeffs[i]);
1870   poly_int_cst_hasher::compare_type comp (type, &c);
1871   tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1872 							     INSERT);
1873   if (*slot == NULL_TREE)
1874     {
1875       tree coeffs[NUM_POLY_INT_COEFFS];
1876       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1877 	coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1878       *slot = build_new_poly_int_cst (type, coeffs);
1879     }
1880   return *slot;
1881 }
1882 
1883 /* Create a constant tree with value VALUE in type TYPE.  */
1884 
1885 tree
wide_int_to_tree(tree type,const poly_wide_int_ref & value)1886 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1887 {
1888   if (value.is_constant ())
1889     return wide_int_to_tree_1 (type, value.coeffs[0]);
1890   return build_poly_int_cst (type, value);
1891 }
1892 
1893 /* Insert INTEGER_CST T into a cache of integer constants.  And return
1894    the cached constant (which may or may not be T).  If MIGHT_DUPLICATE
1895    is false, and T falls into the type's 'smaller values' range, there
1896    cannot be an existing entry.  Otherwise, if MIGHT_DUPLICATE is true,
1897    or the value is large, should an existing entry exist, it is
1898    returned (rather than inserting T).  */
1899 
1900 tree
cache_integer_cst(tree t,bool might_duplicate ATTRIBUTE_UNUSED)1901 cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED)
1902 {
1903   tree type = TREE_TYPE (t);
1904   int ix = -1;
1905   int limit = 0;
1906   int prec = TYPE_PRECISION (type);
1907 
1908   gcc_assert (!TREE_OVERFLOW (t));
1909 
1910   /* The caching indices here must match those in
1911      wide_int_to_type_1.  */
1912   switch (TREE_CODE (type))
1913     {
1914     case NULLPTR_TYPE:
1915       gcc_checking_assert (integer_zerop (t));
1916       /* Fallthru.  */
1917 
1918     case POINTER_TYPE:
1919     case REFERENCE_TYPE:
1920       {
1921 	if (integer_zerop (t))
1922 	  ix = 0;
1923 	else if (integer_onep (t))
1924 	  ix = 2;
1925 
1926 	if (ix >= 0)
1927 	  limit = 3;
1928       }
1929       break;
1930 
1931     case BOOLEAN_TYPE:
1932       /* Cache false or true.  */
1933       limit = 2;
1934       if (wi::ltu_p (wi::to_wide (t), 2))
1935 	ix = TREE_INT_CST_ELT (t, 0);
1936       break;
1937 
1938     case INTEGER_TYPE:
1939     case OFFSET_TYPE:
1940       if (TYPE_UNSIGNED (type))
1941 	{
1942 	  /* Cache 0..N */
1943 	  limit = param_integer_share_limit;
1944 
1945 	  /* This is a little hokie, but if the prec is smaller than
1946 	     what is necessary to hold param_integer_share_limit, then the
1947 	     obvious test will not get the correct answer.  */
1948 	  if (prec < HOST_BITS_PER_WIDE_INT)
1949 	    {
1950 	      if (tree_to_uhwi (t)
1951 		  < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1952 		ix = tree_to_uhwi (t);
1953 	    }
1954 	  else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1955 	    ix = tree_to_uhwi (t);
1956 	}
1957       else
1958 	{
1959 	  /* Cache -1..N */
1960 	  limit = param_integer_share_limit + 1;
1961 
1962 	  if (integer_minus_onep (t))
1963 	    ix = 0;
1964 	  else if (!wi::neg_p (wi::to_wide (t)))
1965 	    {
1966 	      if (prec < HOST_BITS_PER_WIDE_INT)
1967 		{
1968 		  if (tree_to_shwi (t) < param_integer_share_limit)
1969 		    ix = tree_to_shwi (t) + 1;
1970 		}
1971 	      else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1972 		ix = tree_to_shwi (t) + 1;
1973 	    }
1974 	}
1975       break;
1976 
1977     case ENUMERAL_TYPE:
1978       /* The slot used by TYPE_CACHED_VALUES is used for the enum
1979 	 members.  */
1980       break;
1981 
1982     default:
1983       gcc_unreachable ();
1984     }
1985 
1986   if (ix >= 0)
1987     {
1988       /* Look for it in the type's vector of small shared ints.  */
1989       if (!TYPE_CACHED_VALUES_P (type))
1990 	{
1991 	  TYPE_CACHED_VALUES_P (type) = 1;
1992 	  TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1993 	}
1994 
1995       if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
1996 	{
1997 	  gcc_checking_assert (might_duplicate);
1998 	  t = r;
1999 	}
2000       else
2001 	TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
2002     }
2003   else
2004     {
2005       /* Use the cache of larger shared ints.  */
2006       tree *slot = int_cst_hash_table->find_slot (t, INSERT);
2007       if (tree r = *slot)
2008 	{
2009 	  /* If there is already an entry for the number verify it's the
2010 	     same value.  */
2011 	  gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
2012 	  /* And return the cached value.  */
2013 	  t = r;
2014 	}
2015       else
2016 	/* Otherwise insert this one into the hash table.  */
2017 	*slot = t;
2018     }
2019 
2020   return t;
2021 }
2022 
2023 
2024 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
2025    and the rest are zeros.  */
2026 
2027 tree
build_low_bits_mask(tree type,unsigned bits)2028 build_low_bits_mask (tree type, unsigned bits)
2029 {
2030   gcc_assert (bits <= TYPE_PRECISION (type));
2031 
2032   return wide_int_to_tree (type, wi::mask (bits, false,
2033 					   TYPE_PRECISION (type)));
2034 }
2035 
2036 /* Checks that X is integer constant that can be expressed in (unsigned)
2037    HOST_WIDE_INT without loss of precision.  */
2038 
2039 bool
cst_and_fits_in_hwi(const_tree x)2040 cst_and_fits_in_hwi (const_tree x)
2041 {
2042   return (TREE_CODE (x) == INTEGER_CST
2043 	  && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
2044 }
2045 
2046 /* Build a newly constructed VECTOR_CST with the given values of
2047    (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN.  */
2048 
2049 tree
make_vector(unsigned log2_npatterns,unsigned int nelts_per_pattern MEM_STAT_DECL)2050 make_vector (unsigned log2_npatterns,
2051 	     unsigned int nelts_per_pattern MEM_STAT_DECL)
2052 {
2053   gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
2054   tree t;
2055   unsigned npatterns = 1 << log2_npatterns;
2056   unsigned encoded_nelts = npatterns * nelts_per_pattern;
2057   unsigned length = (sizeof (struct tree_vector)
2058 		     + (encoded_nelts - 1) * sizeof (tree));
2059 
2060   record_node_allocation_statistics (VECTOR_CST, length);
2061 
2062   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2063 
2064   TREE_SET_CODE (t, VECTOR_CST);
2065   TREE_CONSTANT (t) = 1;
2066   VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
2067   VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
2068 
2069   return t;
2070 }
2071 
2072 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2073    are extracted from V, a vector of CONSTRUCTOR_ELT.  */
2074 
2075 tree
build_vector_from_ctor(tree type,const vec<constructor_elt,va_gc> * v)2076 build_vector_from_ctor (tree type, const vec<constructor_elt, va_gc> *v)
2077 {
2078   if (vec_safe_length (v) == 0)
2079     return build_zero_cst (type);
2080 
2081   unsigned HOST_WIDE_INT idx, nelts;
2082   tree value;
2083 
2084   /* We can't construct a VECTOR_CST for a variable number of elements.  */
2085   nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
2086   tree_vector_builder vec (type, nelts, 1);
2087   FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2088     {
2089       if (TREE_CODE (value) == VECTOR_CST)
2090 	{
2091 	  /* If NELTS is constant then this must be too.  */
2092 	  unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
2093 	  for (unsigned i = 0; i < sub_nelts; ++i)
2094 	    vec.quick_push (VECTOR_CST_ELT (value, i));
2095 	}
2096       else
2097 	vec.quick_push (value);
2098     }
2099   while (vec.length () < nelts)
2100     vec.quick_push (build_zero_cst (TREE_TYPE (type)));
2101 
2102   return vec.build ();
2103 }
2104 
2105 /* Build a vector of type VECTYPE where all the elements are SCs.  */
2106 tree
build_vector_from_val(tree vectype,tree sc)2107 build_vector_from_val (tree vectype, tree sc)
2108 {
2109   unsigned HOST_WIDE_INT i, nunits;
2110 
2111   if (sc == error_mark_node)
2112     return sc;
2113 
2114   /* Verify that the vector type is suitable for SC.  Note that there
2115      is some inconsistency in the type-system with respect to restrict
2116      qualifications of pointers.  Vector types always have a main-variant
2117      element type and the qualification is applied to the vector-type.
2118      So TREE_TYPE (vector-type) does not return a properly qualified
2119      vector element-type.  */
2120   gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
2121 					   TREE_TYPE (vectype)));
2122 
2123   if (CONSTANT_CLASS_P (sc))
2124     {
2125       tree_vector_builder v (vectype, 1, 1);
2126       v.quick_push (sc);
2127       return v.build ();
2128     }
2129   else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
2130     return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
2131   else
2132     {
2133       vec<constructor_elt, va_gc> *v;
2134       vec_alloc (v, nunits);
2135       for (i = 0; i < nunits; ++i)
2136 	CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
2137       return build_constructor (vectype, v);
2138     }
2139 }
2140 
2141 /* If TYPE is not a vector type, just return SC, otherwise return
2142    build_vector_from_val (TYPE, SC).  */
2143 
2144 tree
build_uniform_cst(tree type,tree sc)2145 build_uniform_cst (tree type, tree sc)
2146 {
2147   if (!VECTOR_TYPE_P (type))
2148     return sc;
2149 
2150   return build_vector_from_val (type, sc);
2151 }
2152 
2153 /* Build a vector series of type TYPE in which element I has the value
2154    BASE + I * STEP.  The result is a constant if BASE and STEP are constant
2155    and a VEC_SERIES_EXPR otherwise.  */
2156 
2157 tree
build_vec_series(tree type,tree base,tree step)2158 build_vec_series (tree type, tree base, tree step)
2159 {
2160   if (integer_zerop (step))
2161     return build_vector_from_val (type, base);
2162   if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
2163     {
2164       tree_vector_builder builder (type, 1, 3);
2165       tree elt1 = wide_int_to_tree (TREE_TYPE (base),
2166 				    wi::to_wide (base) + wi::to_wide (step));
2167       tree elt2 = wide_int_to_tree (TREE_TYPE (base),
2168 				    wi::to_wide (elt1) + wi::to_wide (step));
2169       builder.quick_push (base);
2170       builder.quick_push (elt1);
2171       builder.quick_push (elt2);
2172       return builder.build ();
2173     }
2174   return build2 (VEC_SERIES_EXPR, type, base, step);
2175 }
2176 
2177 /* Return a vector with the same number of units and number of bits
2178    as VEC_TYPE, but in which the elements are a linear series of unsigned
2179    integers { BASE, BASE + STEP, BASE + STEP * 2, ... }.  */
2180 
2181 tree
build_index_vector(tree vec_type,poly_uint64 base,poly_uint64 step)2182 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
2183 {
2184   tree index_vec_type = vec_type;
2185   tree index_elt_type = TREE_TYPE (vec_type);
2186   poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
2187   if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
2188     {
2189       index_elt_type = build_nonstandard_integer_type
2190 	(GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
2191       index_vec_type = build_vector_type (index_elt_type, nunits);
2192     }
2193 
2194   tree_vector_builder v (index_vec_type, 1, 3);
2195   for (unsigned int i = 0; i < 3; ++i)
2196     v.quick_push (build_int_cstu (index_elt_type, base + i * step));
2197   return v.build ();
2198 }
2199 
2200 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2201    elements are A and the rest are B.  */
2202 
2203 tree
build_vector_a_then_b(tree vec_type,unsigned int num_a,tree a,tree b)2204 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
2205 {
2206   gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
2207   unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
2208   /* Optimize the constant case.  */
2209   if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
2210     count /= 2;
2211   tree_vector_builder builder (vec_type, count, 2);
2212   for (unsigned int i = 0; i < count * 2; ++i)
2213     builder.quick_push (i < num_a ? a : b);
2214   return builder.build ();
2215 }
2216 
2217 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2218    calculate TREE_CONSTANT and TREE_SIDE_EFFECTS.  */
2219 
2220 void
recompute_constructor_flags(tree c)2221 recompute_constructor_flags (tree c)
2222 {
2223   unsigned int i;
2224   tree val;
2225   bool constant_p = true;
2226   bool side_effects_p = false;
2227   vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2228 
2229   FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2230     {
2231       /* Mostly ctors will have elts that don't have side-effects, so
2232 	 the usual case is to scan all the elements.  Hence a single
2233 	 loop for both const and side effects, rather than one loop
2234 	 each (with early outs).  */
2235       if (!TREE_CONSTANT (val))
2236 	constant_p = false;
2237       if (TREE_SIDE_EFFECTS (val))
2238 	side_effects_p = true;
2239     }
2240 
2241   TREE_SIDE_EFFECTS (c) = side_effects_p;
2242   TREE_CONSTANT (c) = constant_p;
2243 }
2244 
2245 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2246    CONSTRUCTOR C.  */
2247 
2248 void
verify_constructor_flags(tree c)2249 verify_constructor_flags (tree c)
2250 {
2251   unsigned int i;
2252   tree val;
2253   bool constant_p = TREE_CONSTANT (c);
2254   bool side_effects_p = TREE_SIDE_EFFECTS (c);
2255   vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2256 
2257   FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2258     {
2259       if (constant_p && !TREE_CONSTANT (val))
2260 	internal_error ("non-constant element in constant CONSTRUCTOR");
2261       if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2262 	internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2263     }
2264 }
2265 
2266 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2267    are in the vec pointed to by VALS.  */
2268 tree
build_constructor(tree type,vec<constructor_elt,va_gc> * vals MEM_STAT_DECL)2269 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2270 {
2271   tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2272 
2273   TREE_TYPE (c) = type;
2274   CONSTRUCTOR_ELTS (c) = vals;
2275 
2276   recompute_constructor_flags (c);
2277 
2278   return c;
2279 }
2280 
2281 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2282    INDEX and VALUE.  */
2283 tree
build_constructor_single(tree type,tree index,tree value)2284 build_constructor_single (tree type, tree index, tree value)
2285 {
2286   vec<constructor_elt, va_gc> *v;
2287   constructor_elt elt = {index, value};
2288 
2289   vec_alloc (v, 1);
2290   v->quick_push (elt);
2291 
2292   return build_constructor (type, v);
2293 }
2294 
2295 
2296 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2297    are in a list pointed to by VALS.  */
2298 tree
build_constructor_from_list(tree type,tree vals)2299 build_constructor_from_list (tree type, tree vals)
2300 {
2301   tree t;
2302   vec<constructor_elt, va_gc> *v = NULL;
2303 
2304   if (vals)
2305     {
2306       vec_alloc (v, list_length (vals));
2307       for (t = vals; t; t = TREE_CHAIN (t))
2308 	CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2309     }
2310 
2311   return build_constructor (type, v);
2312 }
2313 
2314 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2315    are in a vector pointed to by VALS.  Note that the TREE_PURPOSE
2316    fields in the constructor remain null.  */
2317 
2318 tree
build_constructor_from_vec(tree type,const vec<tree,va_gc> * vals)2319 build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals)
2320 {
2321   vec<constructor_elt, va_gc> *v = NULL;
2322 
2323   for (tree t : vals)
2324     CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
2325 
2326   return build_constructor (type, v);
2327 }
2328 
2329 /* Return a new CONSTRUCTOR node whose type is TYPE.  NELTS is the number
2330    of elements, provided as index/value pairs.  */
2331 
2332 tree
build_constructor_va(tree type,int nelts,...)2333 build_constructor_va (tree type, int nelts, ...)
2334 {
2335   vec<constructor_elt, va_gc> *v = NULL;
2336   va_list p;
2337 
2338   va_start (p, nelts);
2339   vec_alloc (v, nelts);
2340   while (nelts--)
2341     {
2342       tree index = va_arg (p, tree);
2343       tree value = va_arg (p, tree);
2344       CONSTRUCTOR_APPEND_ELT (v, index, value);
2345     }
2346   va_end (p);
2347   return build_constructor (type, v);
2348 }
2349 
2350 /* Return a node of type TYPE for which TREE_CLOBBER_P is true.  */
2351 
2352 tree
build_clobber(tree type,enum clobber_kind kind)2353 build_clobber (tree type, enum clobber_kind kind)
2354 {
2355   tree clobber = build_constructor (type, NULL);
2356   TREE_THIS_VOLATILE (clobber) = true;
2357   CLOBBER_KIND (clobber) = kind;
2358   return clobber;
2359 }
2360 
2361 /* Return a new FIXED_CST node whose type is TYPE and value is F.  */
2362 
2363 tree
build_fixed(tree type,FIXED_VALUE_TYPE f)2364 build_fixed (tree type, FIXED_VALUE_TYPE f)
2365 {
2366   tree v;
2367   FIXED_VALUE_TYPE *fp;
2368 
2369   v = make_node (FIXED_CST);
2370   fp = ggc_alloc<fixed_value> ();
2371   memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2372 
2373   TREE_TYPE (v) = type;
2374   TREE_FIXED_CST_PTR (v) = fp;
2375   return v;
2376 }
2377 
2378 /* Return a new REAL_CST node whose type is TYPE and value is D.  */
2379 
2380 tree
build_real(tree type,REAL_VALUE_TYPE d)2381 build_real (tree type, REAL_VALUE_TYPE d)
2382 {
2383   tree v;
2384   REAL_VALUE_TYPE *dp;
2385   int overflow = 0;
2386 
2387   /* dconst{1,2,m1,half} are used in various places in
2388      the middle-end and optimizers, allow them here
2389      even for decimal floating point types as an exception
2390      by converting them to decimal.  */
2391   if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type))
2392       && d.cl == rvc_normal
2393       && !d.decimal)
2394     {
2395       if (memcmp (&d, &dconst1, sizeof (d)) == 0)
2396 	decimal_real_from_string (&d, "1");
2397       else if (memcmp (&d, &dconst2, sizeof (d)) == 0)
2398 	decimal_real_from_string (&d, "2");
2399       else if (memcmp (&d, &dconstm1, sizeof (d)) == 0)
2400 	decimal_real_from_string (&d, "-1");
2401       else if (memcmp (&d, &dconsthalf, sizeof (d)) == 0)
2402 	decimal_real_from_string (&d, "0.5");
2403       else
2404 	gcc_unreachable ();
2405     }
2406 
2407   /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2408      Consider doing it via real_convert now.  */
2409 
2410   v = make_node (REAL_CST);
2411   dp = ggc_alloc<real_value> ();
2412   memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
2413 
2414   TREE_TYPE (v) = type;
2415   TREE_REAL_CST_PTR (v) = dp;
2416   TREE_OVERFLOW (v) = overflow;
2417   return v;
2418 }
2419 
2420 /* Like build_real, but first truncate D to the type.  */
2421 
2422 tree
build_real_truncate(tree type,REAL_VALUE_TYPE d)2423 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2424 {
2425   return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2426 }
2427 
2428 /* Return a new REAL_CST node whose type is TYPE
2429    and whose value is the integer value of the INTEGER_CST node I.  */
2430 
2431 REAL_VALUE_TYPE
real_value_from_int_cst(const_tree type,const_tree i)2432 real_value_from_int_cst (const_tree type, const_tree i)
2433 {
2434   REAL_VALUE_TYPE d;
2435 
2436   /* Clear all bits of the real value type so that we can later do
2437      bitwise comparisons to see if two values are the same.  */
2438   memset (&d, 0, sizeof d);
2439 
2440   real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2441 		     TYPE_SIGN (TREE_TYPE (i)));
2442   return d;
2443 }
2444 
2445 /* Given a tree representing an integer constant I, return a tree
2446    representing the same value as a floating-point constant of type TYPE.  */
2447 
2448 tree
build_real_from_int_cst(tree type,const_tree i)2449 build_real_from_int_cst (tree type, const_tree i)
2450 {
2451   tree v;
2452   int overflow = TREE_OVERFLOW (i);
2453 
2454   v = build_real (type, real_value_from_int_cst (type, i));
2455 
2456   TREE_OVERFLOW (v) |= overflow;
2457   return v;
2458 }
2459 
2460 /* Return a new REAL_CST node whose type is TYPE
2461    and whose value is the integer value I which has sign SGN.  */
2462 
2463 tree
build_real_from_wide(tree type,const wide_int_ref & i,signop sgn)2464 build_real_from_wide (tree type, const wide_int_ref &i, signop sgn)
2465 {
2466   REAL_VALUE_TYPE d;
2467 
2468   /* Clear all bits of the real value type so that we can later do
2469      bitwise comparisons to see if two values are the same.  */
2470   memset (&d, 0, sizeof d);
2471 
2472   real_from_integer (&d, TYPE_MODE (type), i, sgn);
2473   return build_real (type, d);
2474 }
2475 
2476 /* Return a newly constructed STRING_CST node whose value is the LEN
2477    characters at STR when STR is nonnull, or all zeros otherwise.
2478    Note that for a C string literal, LEN should include the trailing NUL.
2479    The TREE_TYPE is not initialized.  */
2480 
2481 tree
build_string(unsigned len,const char * str)2482 build_string (unsigned len, const char *str /*= NULL */)
2483 {
2484   /* Do not waste bytes provided by padding of struct tree_string.  */
2485   unsigned size = len + offsetof (struct tree_string, str) + 1;
2486 
2487   record_node_allocation_statistics (STRING_CST, size);
2488 
2489   tree s = (tree) ggc_internal_alloc (size);
2490 
2491   memset (s, 0, sizeof (struct tree_typed));
2492   TREE_SET_CODE (s, STRING_CST);
2493   TREE_CONSTANT (s) = 1;
2494   TREE_STRING_LENGTH (s) = len;
2495   if (str)
2496     memcpy (s->string.str, str, len);
2497   else
2498     memset (s->string.str, 0, len);
2499   s->string.str[len] = '\0';
2500 
2501   return s;
2502 }
2503 
2504 /* Return a newly constructed COMPLEX_CST node whose value is
2505    specified by the real and imaginary parts REAL and IMAG.
2506    Both REAL and IMAG should be constant nodes.  TYPE, if specified,
2507    will be the type of the COMPLEX_CST; otherwise a new type will be made.  */
2508 
2509 tree
build_complex(tree type,tree real,tree imag)2510 build_complex (tree type, tree real, tree imag)
2511 {
2512   gcc_assert (CONSTANT_CLASS_P (real));
2513   gcc_assert (CONSTANT_CLASS_P (imag));
2514 
2515   tree t = make_node (COMPLEX_CST);
2516 
2517   TREE_REALPART (t) = real;
2518   TREE_IMAGPART (t) = imag;
2519   TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2520   TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2521   return t;
2522 }
2523 
2524 /* Build a complex (inf +- 0i), such as for the result of cproj.
2525    TYPE is the complex tree type of the result.  If NEG is true, the
2526    imaginary zero is negative.  */
2527 
2528 tree
build_complex_inf(tree type,bool neg)2529 build_complex_inf (tree type, bool neg)
2530 {
2531   REAL_VALUE_TYPE rinf, rzero = dconst0;
2532 
2533   real_inf (&rinf);
2534   rzero.sign = neg;
2535   return build_complex (type, build_real (TREE_TYPE (type), rinf),
2536 			build_real (TREE_TYPE (type), rzero));
2537 }
2538 
2539 /* Return the constant 1 in type TYPE.  If TYPE has several elements, each
2540    element is set to 1.  In particular, this is 1 + i for complex types.  */
2541 
2542 tree
build_each_one_cst(tree type)2543 build_each_one_cst (tree type)
2544 {
2545   if (TREE_CODE (type) == COMPLEX_TYPE)
2546     {
2547       tree scalar = build_one_cst (TREE_TYPE (type));
2548       return build_complex (type, scalar, scalar);
2549     }
2550   else
2551     return build_one_cst (type);
2552 }
2553 
2554 /* Return a constant of arithmetic type TYPE which is the
2555    multiplicative identity of the set TYPE.  */
2556 
2557 tree
build_one_cst(tree type)2558 build_one_cst (tree type)
2559 {
2560   switch (TREE_CODE (type))
2561     {
2562     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2563     case POINTER_TYPE: case REFERENCE_TYPE:
2564     case OFFSET_TYPE:
2565       return build_int_cst (type, 1);
2566 
2567     case REAL_TYPE:
2568       return build_real (type, dconst1);
2569 
2570     case FIXED_POINT_TYPE:
2571       /* We can only generate 1 for accum types.  */
2572       gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2573       return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2574 
2575     case VECTOR_TYPE:
2576       {
2577 	tree scalar = build_one_cst (TREE_TYPE (type));
2578 
2579 	return build_vector_from_val (type, scalar);
2580       }
2581 
2582     case COMPLEX_TYPE:
2583       return build_complex (type,
2584 			    build_one_cst (TREE_TYPE (type)),
2585 			    build_zero_cst (TREE_TYPE (type)));
2586 
2587     default:
2588       gcc_unreachable ();
2589     }
2590 }
2591 
2592 /* Return an integer of type TYPE containing all 1's in as much precision as
2593    it contains, or a complex or vector whose subparts are such integers.  */
2594 
2595 tree
build_all_ones_cst(tree type)2596 build_all_ones_cst (tree type)
2597 {
2598   if (TREE_CODE (type) == COMPLEX_TYPE)
2599     {
2600       tree scalar = build_all_ones_cst (TREE_TYPE (type));
2601       return build_complex (type, scalar, scalar);
2602     }
2603   else
2604     return build_minus_one_cst (type);
2605 }
2606 
2607 /* Return a constant of arithmetic type TYPE which is the
2608    opposite of the multiplicative identity of the set TYPE.  */
2609 
2610 tree
build_minus_one_cst(tree type)2611 build_minus_one_cst (tree type)
2612 {
2613   switch (TREE_CODE (type))
2614     {
2615     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2616     case POINTER_TYPE: case REFERENCE_TYPE:
2617     case OFFSET_TYPE:
2618       return build_int_cst (type, -1);
2619 
2620     case REAL_TYPE:
2621       return build_real (type, dconstm1);
2622 
2623     case FIXED_POINT_TYPE:
2624       /* We can only generate 1 for accum types.  */
2625       gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2626       return build_fixed (type,
2627 			  fixed_from_double_int (double_int_minus_one,
2628 						 SCALAR_TYPE_MODE (type)));
2629 
2630     case VECTOR_TYPE:
2631       {
2632 	tree scalar = build_minus_one_cst (TREE_TYPE (type));
2633 
2634 	return build_vector_from_val (type, scalar);
2635       }
2636 
2637     case COMPLEX_TYPE:
2638       return build_complex (type,
2639 			    build_minus_one_cst (TREE_TYPE (type)),
2640 			    build_zero_cst (TREE_TYPE (type)));
2641 
2642     default:
2643       gcc_unreachable ();
2644     }
2645 }
2646 
2647 /* Build 0 constant of type TYPE.  This is used by constructor folding
2648    and thus the constant should be represented in memory by
2649    zero(es).  */
2650 
2651 tree
build_zero_cst(tree type)2652 build_zero_cst (tree type)
2653 {
2654   switch (TREE_CODE (type))
2655     {
2656     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2657     case POINTER_TYPE: case REFERENCE_TYPE:
2658     case OFFSET_TYPE: case NULLPTR_TYPE:
2659       return build_int_cst (type, 0);
2660 
2661     case REAL_TYPE:
2662       return build_real (type, dconst0);
2663 
2664     case FIXED_POINT_TYPE:
2665       return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2666 
2667     case VECTOR_TYPE:
2668       {
2669 	tree scalar = build_zero_cst (TREE_TYPE (type));
2670 
2671 	return build_vector_from_val (type, scalar);
2672       }
2673 
2674     case COMPLEX_TYPE:
2675       {
2676 	tree zero = build_zero_cst (TREE_TYPE (type));
2677 
2678 	return build_complex (type, zero, zero);
2679       }
2680 
2681     default:
2682       if (!AGGREGATE_TYPE_P (type))
2683 	return fold_convert (type, integer_zero_node);
2684       return build_constructor (type, NULL);
2685     }
2686 }
2687 
2688 
2689 /* Build a BINFO with LEN language slots.  */
2690 
2691 tree
make_tree_binfo(unsigned base_binfos MEM_STAT_DECL)2692 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2693 {
2694   tree t;
2695   size_t length = (offsetof (struct tree_binfo, base_binfos)
2696 		   + vec<tree, va_gc>::embedded_size (base_binfos));
2697 
2698   record_node_allocation_statistics (TREE_BINFO, length);
2699 
2700   t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2701 
2702   memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2703 
2704   TREE_SET_CODE (t, TREE_BINFO);
2705 
2706   BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2707 
2708   return t;
2709 }
2710 
2711 /* Create a CASE_LABEL_EXPR tree node and return it.  */
2712 
2713 tree
build_case_label(tree low_value,tree high_value,tree label_decl)2714 build_case_label (tree low_value, tree high_value, tree label_decl)
2715 {
2716   tree t = make_node (CASE_LABEL_EXPR);
2717 
2718   TREE_TYPE (t) = void_type_node;
2719   SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2720 
2721   CASE_LOW (t) = low_value;
2722   CASE_HIGH (t) = high_value;
2723   CASE_LABEL (t) = label_decl;
2724   CASE_CHAIN (t) = NULL_TREE;
2725 
2726   return t;
2727 }
2728 
2729 /* Build a newly constructed INTEGER_CST node.  LEN and EXT_LEN are the
2730    values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2731    The latter determines the length of the HOST_WIDE_INT vector.  */
2732 
2733 tree
make_int_cst(int len,int ext_len MEM_STAT_DECL)2734 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2735 {
2736   tree t;
2737   int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2738 		+ sizeof (struct tree_int_cst));
2739 
2740   gcc_assert (len);
2741   record_node_allocation_statistics (INTEGER_CST, length);
2742 
2743   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2744 
2745   TREE_SET_CODE (t, INTEGER_CST);
2746   TREE_INT_CST_NUNITS (t) = len;
2747   TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2748   /* to_offset can only be applied to trees that are offset_int-sized
2749      or smaller.  EXT_LEN is correct if it fits, otherwise the constant
2750      must be exactly the precision of offset_int and so LEN is correct.  */
2751   if (ext_len <= OFFSET_INT_ELTS)
2752     TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2753   else
2754     TREE_INT_CST_OFFSET_NUNITS (t) = len;
2755 
2756   TREE_CONSTANT (t) = 1;
2757 
2758   return t;
2759 }
2760 
2761 /* Build a newly constructed TREE_VEC node of length LEN.  */
2762 
2763 tree
make_tree_vec(int len MEM_STAT_DECL)2764 make_tree_vec (int len MEM_STAT_DECL)
2765 {
2766   tree t;
2767   size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2768 
2769   record_node_allocation_statistics (TREE_VEC, length);
2770 
2771   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2772 
2773   TREE_SET_CODE (t, TREE_VEC);
2774   TREE_VEC_LENGTH (t) = len;
2775 
2776   return t;
2777 }
2778 
2779 /* Grow a TREE_VEC node to new length LEN.  */
2780 
2781 tree
grow_tree_vec(tree v,int len MEM_STAT_DECL)2782 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2783 {
2784   gcc_assert (TREE_CODE (v) == TREE_VEC);
2785 
2786   int oldlen = TREE_VEC_LENGTH (v);
2787   gcc_assert (len > oldlen);
2788 
2789   size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2790   size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2791 
2792   record_node_allocation_statistics (TREE_VEC, length - oldlength);
2793 
2794   v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2795 
2796   TREE_VEC_LENGTH (v) = len;
2797 
2798   return v;
2799 }
2800 
2801 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2802    fixed, and scalar, complex or vector.  */
2803 
2804 bool
zerop(const_tree expr)2805 zerop (const_tree expr)
2806 {
2807   return (integer_zerop (expr)
2808 	  || real_zerop (expr)
2809 	  || fixed_zerop (expr));
2810 }
2811 
2812 /* Return 1 if EXPR is the integer constant zero or a complex constant
2813    of zero, or a location wrapper for such a constant.  */
2814 
2815 bool
integer_zerop(const_tree expr)2816 integer_zerop (const_tree expr)
2817 {
2818   STRIP_ANY_LOCATION_WRAPPER (expr);
2819 
2820   switch (TREE_CODE (expr))
2821     {
2822     case INTEGER_CST:
2823       return wi::to_wide (expr) == 0;
2824     case COMPLEX_CST:
2825       return (integer_zerop (TREE_REALPART (expr))
2826 	      && integer_zerop (TREE_IMAGPART (expr)));
2827     case VECTOR_CST:
2828       return (VECTOR_CST_NPATTERNS (expr) == 1
2829 	      && VECTOR_CST_DUPLICATE_P (expr)
2830 	      && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2831     default:
2832       return false;
2833     }
2834 }
2835 
2836 /* Return 1 if EXPR is the integer constant one or the corresponding
2837    complex constant, or a location wrapper for such a constant.  */
2838 
2839 bool
integer_onep(const_tree expr)2840 integer_onep (const_tree expr)
2841 {
2842   STRIP_ANY_LOCATION_WRAPPER (expr);
2843 
2844   switch (TREE_CODE (expr))
2845     {
2846     case INTEGER_CST:
2847       return wi::eq_p (wi::to_widest (expr), 1);
2848     case COMPLEX_CST:
2849       return (integer_onep (TREE_REALPART (expr))
2850 	      && integer_zerop (TREE_IMAGPART (expr)));
2851     case VECTOR_CST:
2852       return (VECTOR_CST_NPATTERNS (expr) == 1
2853 	      && VECTOR_CST_DUPLICATE_P (expr)
2854 	      && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2855     default:
2856       return false;
2857     }
2858 }
2859 
2860 /* Return 1 if EXPR is the integer constant one.  For complex and vector,
2861    return 1 if every piece is the integer constant one.
2862    Also return 1 for location wrappers for such a constant.  */
2863 
2864 bool
integer_each_onep(const_tree expr)2865 integer_each_onep (const_tree expr)
2866 {
2867   STRIP_ANY_LOCATION_WRAPPER (expr);
2868 
2869   if (TREE_CODE (expr) == COMPLEX_CST)
2870     return (integer_onep (TREE_REALPART (expr))
2871 	    && integer_onep (TREE_IMAGPART (expr)));
2872   else
2873     return integer_onep (expr);
2874 }
2875 
2876 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2877    it contains, or a complex or vector whose subparts are such integers,
2878    or a location wrapper for such a constant.  */
2879 
2880 bool
integer_all_onesp(const_tree expr)2881 integer_all_onesp (const_tree expr)
2882 {
2883   STRIP_ANY_LOCATION_WRAPPER (expr);
2884 
2885   if (TREE_CODE (expr) == COMPLEX_CST
2886       && integer_all_onesp (TREE_REALPART (expr))
2887       && integer_all_onesp (TREE_IMAGPART (expr)))
2888     return true;
2889 
2890   else if (TREE_CODE (expr) == VECTOR_CST)
2891     return (VECTOR_CST_NPATTERNS (expr) == 1
2892 	    && VECTOR_CST_DUPLICATE_P (expr)
2893 	    && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2894 
2895   else if (TREE_CODE (expr) != INTEGER_CST)
2896     return false;
2897 
2898   return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2899 	  == wi::to_wide (expr));
2900 }
2901 
2902 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2903    for such a constant.  */
2904 
2905 bool
integer_minus_onep(const_tree expr)2906 integer_minus_onep (const_tree expr)
2907 {
2908   STRIP_ANY_LOCATION_WRAPPER (expr);
2909 
2910   if (TREE_CODE (expr) == COMPLEX_CST)
2911     return (integer_all_onesp (TREE_REALPART (expr))
2912 	    && integer_zerop (TREE_IMAGPART (expr)));
2913   else
2914     return integer_all_onesp (expr);
2915 }
2916 
2917 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2918    one bit on), or a location wrapper for such a constant.  */
2919 
2920 bool
integer_pow2p(const_tree expr)2921 integer_pow2p (const_tree expr)
2922 {
2923   STRIP_ANY_LOCATION_WRAPPER (expr);
2924 
2925   if (TREE_CODE (expr) == COMPLEX_CST
2926       && integer_pow2p (TREE_REALPART (expr))
2927       && integer_zerop (TREE_IMAGPART (expr)))
2928     return true;
2929 
2930   if (TREE_CODE (expr) != INTEGER_CST)
2931     return false;
2932 
2933   return wi::popcount (wi::to_wide (expr)) == 1;
2934 }
2935 
2936 /* Return 1 if EXPR is an integer constant other than zero or a
2937    complex constant other than zero, or a location wrapper for such a
2938    constant.  */
2939 
2940 bool
integer_nonzerop(const_tree expr)2941 integer_nonzerop (const_tree expr)
2942 {
2943   STRIP_ANY_LOCATION_WRAPPER (expr);
2944 
2945   return ((TREE_CODE (expr) == INTEGER_CST
2946 	   && wi::to_wide (expr) != 0)
2947 	  || (TREE_CODE (expr) == COMPLEX_CST
2948 	      && (integer_nonzerop (TREE_REALPART (expr))
2949 		  || integer_nonzerop (TREE_IMAGPART (expr)))));
2950 }
2951 
2952 /* Return 1 if EXPR is the integer constant one.  For vector,
2953    return 1 if every piece is the integer constant minus one
2954    (representing the value TRUE).
2955    Also return 1 for location wrappers for such a constant.  */
2956 
2957 bool
integer_truep(const_tree expr)2958 integer_truep (const_tree expr)
2959 {
2960   STRIP_ANY_LOCATION_WRAPPER (expr);
2961 
2962   if (TREE_CODE (expr) == VECTOR_CST)
2963     return integer_all_onesp (expr);
2964   return integer_onep (expr);
2965 }
2966 
2967 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2968    for such a constant.  */
2969 
2970 bool
fixed_zerop(const_tree expr)2971 fixed_zerop (const_tree expr)
2972 {
2973   STRIP_ANY_LOCATION_WRAPPER (expr);
2974 
2975   return (TREE_CODE (expr) == FIXED_CST
2976 	  && TREE_FIXED_CST (expr).data.is_zero ());
2977 }
2978 
2979 /* Return the power of two represented by a tree node known to be a
2980    power of two.  */
2981 
2982 int
tree_log2(const_tree expr)2983 tree_log2 (const_tree expr)
2984 {
2985   if (TREE_CODE (expr) == COMPLEX_CST)
2986     return tree_log2 (TREE_REALPART (expr));
2987 
2988   return wi::exact_log2 (wi::to_wide (expr));
2989 }
2990 
2991 /* Similar, but return the largest integer Y such that 2 ** Y is less
2992    than or equal to EXPR.  */
2993 
2994 int
tree_floor_log2(const_tree expr)2995 tree_floor_log2 (const_tree expr)
2996 {
2997   if (TREE_CODE (expr) == COMPLEX_CST)
2998     return tree_log2 (TREE_REALPART (expr));
2999 
3000   return wi::floor_log2 (wi::to_wide (expr));
3001 }
3002 
3003 /* Return number of known trailing zero bits in EXPR, or, if the value of
3004    EXPR is known to be zero, the precision of it's type.  */
3005 
3006 unsigned int
tree_ctz(const_tree expr)3007 tree_ctz (const_tree expr)
3008 {
3009   if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3010       && !POINTER_TYPE_P (TREE_TYPE (expr)))
3011     return 0;
3012 
3013   unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
3014   switch (TREE_CODE (expr))
3015     {
3016     case INTEGER_CST:
3017       ret1 = wi::ctz (wi::to_wide (expr));
3018       return MIN (ret1, prec);
3019     case SSA_NAME:
3020       ret1 = wi::ctz (get_nonzero_bits (expr));
3021       return MIN (ret1, prec);
3022     case PLUS_EXPR:
3023     case MINUS_EXPR:
3024     case BIT_IOR_EXPR:
3025     case BIT_XOR_EXPR:
3026     case MIN_EXPR:
3027     case MAX_EXPR:
3028       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3029       if (ret1 == 0)
3030 	return ret1;
3031       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3032       return MIN (ret1, ret2);
3033     case POINTER_PLUS_EXPR:
3034       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3035       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3036       /* Second operand is sizetype, which could be in theory
3037 	 wider than pointer's precision.  Make sure we never
3038 	 return more than prec.  */
3039       ret2 = MIN (ret2, prec);
3040       return MIN (ret1, ret2);
3041     case BIT_AND_EXPR:
3042       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3043       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3044       return MAX (ret1, ret2);
3045     case MULT_EXPR:
3046       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3047       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3048       return MIN (ret1 + ret2, prec);
3049     case LSHIFT_EXPR:
3050       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3051       if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3052 	  && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3053 	{
3054 	  ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3055 	  return MIN (ret1 + ret2, prec);
3056 	}
3057       return ret1;
3058     case RSHIFT_EXPR:
3059       if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3060 	  && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3061 	{
3062 	  ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3063 	  ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3064 	  if (ret1 > ret2)
3065 	    return ret1 - ret2;
3066 	}
3067       return 0;
3068     case TRUNC_DIV_EXPR:
3069     case CEIL_DIV_EXPR:
3070     case FLOOR_DIV_EXPR:
3071     case ROUND_DIV_EXPR:
3072     case EXACT_DIV_EXPR:
3073       if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3074 	  && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
3075 	{
3076 	  int l = tree_log2 (TREE_OPERAND (expr, 1));
3077 	  if (l >= 0)
3078 	    {
3079 	      ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3080 	      ret2 = l;
3081 	      if (ret1 > ret2)
3082 		return ret1 - ret2;
3083 	    }
3084 	}
3085       return 0;
3086     CASE_CONVERT:
3087       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3088       if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
3089 	ret1 = prec;
3090       return MIN (ret1, prec);
3091     case SAVE_EXPR:
3092       return tree_ctz (TREE_OPERAND (expr, 0));
3093     case COND_EXPR:
3094       ret1 = tree_ctz (TREE_OPERAND (expr, 1));
3095       if (ret1 == 0)
3096 	return 0;
3097       ret2 = tree_ctz (TREE_OPERAND (expr, 2));
3098       return MIN (ret1, ret2);
3099     case COMPOUND_EXPR:
3100       return tree_ctz (TREE_OPERAND (expr, 1));
3101     case ADDR_EXPR:
3102       ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
3103       if (ret1 > BITS_PER_UNIT)
3104 	{
3105 	  ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
3106 	  return MIN (ret1, prec);
3107 	}
3108       return 0;
3109     default:
3110       return 0;
3111     }
3112 }
3113 
3114 /* Return 1 if EXPR is the real constant zero.  Trailing zeroes matter for
3115    decimal float constants, so don't return 1 for them.
3116    Also return 1 for location wrappers around such a constant.  */
3117 
3118 bool
real_zerop(const_tree expr)3119 real_zerop (const_tree expr)
3120 {
3121   STRIP_ANY_LOCATION_WRAPPER (expr);
3122 
3123   switch (TREE_CODE (expr))
3124     {
3125     case REAL_CST:
3126       return real_equal (&TREE_REAL_CST (expr), &dconst0)
3127 	     && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3128     case COMPLEX_CST:
3129       return real_zerop (TREE_REALPART (expr))
3130 	     && real_zerop (TREE_IMAGPART (expr));
3131     case VECTOR_CST:
3132       {
3133 	/* Don't simply check for a duplicate because the predicate
3134 	   accepts both +0.0 and -0.0.  */
3135 	unsigned count = vector_cst_encoded_nelts (expr);
3136 	for (unsigned int i = 0; i < count; ++i)
3137 	  if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3138 	    return false;
3139 	return true;
3140       }
3141     default:
3142       return false;
3143     }
3144 }
3145 
3146 /* Return 1 if EXPR is the real constant one in real or complex form.
3147    Trailing zeroes matter for decimal float constants, so don't return
3148    1 for them.
3149    Also return 1 for location wrappers around such a constant.  */
3150 
3151 bool
real_onep(const_tree expr)3152 real_onep (const_tree expr)
3153 {
3154   STRIP_ANY_LOCATION_WRAPPER (expr);
3155 
3156   switch (TREE_CODE (expr))
3157     {
3158     case REAL_CST:
3159       return real_equal (&TREE_REAL_CST (expr), &dconst1)
3160 	     && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3161     case COMPLEX_CST:
3162       return real_onep (TREE_REALPART (expr))
3163 	     && real_zerop (TREE_IMAGPART (expr));
3164     case VECTOR_CST:
3165       return (VECTOR_CST_NPATTERNS (expr) == 1
3166 	      && VECTOR_CST_DUPLICATE_P (expr)
3167 	      && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3168     default:
3169       return false;
3170     }
3171 }
3172 
3173 /* Return 1 if EXPR is the real constant minus one.  Trailing zeroes
3174    matter for decimal float constants, so don't return 1 for them.
3175    Also return 1 for location wrappers around such a constant.  */
3176 
3177 bool
real_minus_onep(const_tree expr)3178 real_minus_onep (const_tree expr)
3179 {
3180   STRIP_ANY_LOCATION_WRAPPER (expr);
3181 
3182   switch (TREE_CODE (expr))
3183     {
3184     case REAL_CST:
3185       return real_equal (&TREE_REAL_CST (expr), &dconstm1)
3186 	     && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3187     case COMPLEX_CST:
3188       return real_minus_onep (TREE_REALPART (expr))
3189 	     && real_zerop (TREE_IMAGPART (expr));
3190     case VECTOR_CST:
3191       return (VECTOR_CST_NPATTERNS (expr) == 1
3192 	      && VECTOR_CST_DUPLICATE_P (expr)
3193 	      && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3194     default:
3195       return false;
3196     }
3197 }
3198 
3199 /* Return true if T could be a floating point zero.  */
3200 
3201 bool
real_maybe_zerop(const_tree expr)3202 real_maybe_zerop (const_tree expr)
3203 {
3204   switch (TREE_CODE (expr))
3205     {
3206     case REAL_CST:
3207       /* Can't use real_zerop here, as it always returns false for decimal
3208 	 floats.  And can't use TREE_REAL_CST (expr).cl == rvc_zero
3209 	 either, as decimal zeros are rvc_normal.  */
3210       return real_equal (&TREE_REAL_CST (expr), &dconst0);
3211     case COMPLEX_CST:
3212       return (real_maybe_zerop (TREE_REALPART (expr))
3213 	      || real_maybe_zerop (TREE_IMAGPART (expr)));
3214     case VECTOR_CST:
3215       {
3216 	unsigned count = vector_cst_encoded_nelts (expr);
3217 	for (unsigned int i = 0; i < count; ++i)
3218 	  if (real_maybe_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3219 	    return true;
3220 	return false;
3221       }
3222     default:
3223       /* Perhaps for SSA_NAMEs we could query frange.  */
3224       return true;
3225     }
3226 }
3227 
3228 /* Nonzero if EXP is a constant or a cast of a constant.  */
3229 
3230 bool
really_constant_p(const_tree exp)3231 really_constant_p (const_tree exp)
3232 {
3233   /* This is not quite the same as STRIP_NOPS.  It does more.  */
3234   while (CONVERT_EXPR_P (exp)
3235 	 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3236     exp = TREE_OPERAND (exp, 0);
3237   return TREE_CONSTANT (exp);
3238 }
3239 
3240 /* Return true if T holds a polynomial pointer difference, storing it in
3241    *VALUE if so.  A true return means that T's precision is no greater
3242    than 64 bits, which is the largest address space we support, so *VALUE
3243    never loses precision.  However, the signedness of the result does
3244    not necessarily match the signedness of T: sometimes an unsigned type
3245    like sizetype is used to encode a value that is actually negative.  */
3246 
3247 bool
ptrdiff_tree_p(const_tree t,poly_int64_pod * value)3248 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
3249 {
3250   if (!t)
3251     return false;
3252   if (TREE_CODE (t) == INTEGER_CST)
3253     {
3254       if (!cst_and_fits_in_hwi (t))
3255 	return false;
3256       *value = int_cst_value (t);
3257       return true;
3258     }
3259   if (POLY_INT_CST_P (t))
3260     {
3261       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3262 	if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
3263 	  return false;
3264       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3265 	value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
3266       return true;
3267     }
3268   return false;
3269 }
3270 
3271 poly_int64
tree_to_poly_int64(const_tree t)3272 tree_to_poly_int64 (const_tree t)
3273 {
3274   gcc_assert (tree_fits_poly_int64_p (t));
3275   if (POLY_INT_CST_P (t))
3276     return poly_int_cst_value (t).force_shwi ();
3277   return TREE_INT_CST_LOW (t);
3278 }
3279 
3280 poly_uint64
tree_to_poly_uint64(const_tree t)3281 tree_to_poly_uint64 (const_tree t)
3282 {
3283   gcc_assert (tree_fits_poly_uint64_p (t));
3284   if (POLY_INT_CST_P (t))
3285     return poly_int_cst_value (t).force_uhwi ();
3286   return TREE_INT_CST_LOW (t);
3287 }
3288 
3289 /* Return first list element whose TREE_VALUE is ELEM.
3290    Return 0 if ELEM is not in LIST.  */
3291 
3292 tree
value_member(tree elem,tree list)3293 value_member (tree elem, tree list)
3294 {
3295   while (list)
3296     {
3297       if (elem == TREE_VALUE (list))
3298 	return list;
3299       list = TREE_CHAIN (list);
3300     }
3301   return NULL_TREE;
3302 }
3303 
3304 /* Return first list element whose TREE_PURPOSE is ELEM.
3305    Return 0 if ELEM is not in LIST.  */
3306 
3307 tree
purpose_member(const_tree elem,tree list)3308 purpose_member (const_tree elem, tree list)
3309 {
3310   while (list)
3311     {
3312       if (elem == TREE_PURPOSE (list))
3313 	return list;
3314       list = TREE_CHAIN (list);
3315     }
3316   return NULL_TREE;
3317 }
3318 
3319 /* Return true if ELEM is in V.  */
3320 
3321 bool
vec_member(const_tree elem,vec<tree,va_gc> * v)3322 vec_member (const_tree elem, vec<tree, va_gc> *v)
3323 {
3324   unsigned ix;
3325   tree t;
3326   FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3327     if (elem == t)
3328       return true;
3329   return false;
3330 }
3331 
3332 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3333    NULL_TREE.  */
3334 
3335 tree
chain_index(int idx,tree chain)3336 chain_index (int idx, tree chain)
3337 {
3338   for (; chain && idx > 0; --idx)
3339     chain = TREE_CHAIN (chain);
3340   return chain;
3341 }
3342 
3343 /* Return nonzero if ELEM is part of the chain CHAIN.  */
3344 
3345 bool
chain_member(const_tree elem,const_tree chain)3346 chain_member (const_tree elem, const_tree chain)
3347 {
3348   while (chain)
3349     {
3350       if (elem == chain)
3351 	return true;
3352       chain = DECL_CHAIN (chain);
3353     }
3354 
3355   return false;
3356 }
3357 
3358 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3359    We expect a null pointer to mark the end of the chain.
3360    This is the Lisp primitive `length'.  */
3361 
3362 int
list_length(const_tree t)3363 list_length (const_tree t)
3364 {
3365   const_tree p = t;
3366 #ifdef ENABLE_TREE_CHECKING
3367   const_tree q = t;
3368 #endif
3369   int len = 0;
3370 
3371   while (p)
3372     {
3373       p = TREE_CHAIN (p);
3374 #ifdef ENABLE_TREE_CHECKING
3375       if (len % 2)
3376 	q = TREE_CHAIN (q);
3377       gcc_assert (p != q);
3378 #endif
3379       len++;
3380     }
3381 
3382   return len;
3383 }
3384 
3385 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3386    UNION_TYPE TYPE, or NULL_TREE if none.  */
3387 
3388 tree
first_field(const_tree type)3389 first_field (const_tree type)
3390 {
3391   tree t = TYPE_FIELDS (type);
3392   while (t && TREE_CODE (t) != FIELD_DECL)
3393     t = TREE_CHAIN (t);
3394   return t;
3395 }
3396 
3397 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3398    UNION_TYPE TYPE, or NULL_TREE if none.  */
3399 
3400 tree
last_field(const_tree type)3401 last_field (const_tree type)
3402 {
3403   tree last = NULL_TREE;
3404 
3405   for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3406     {
3407       if (TREE_CODE (fld) != FIELD_DECL)
3408 	continue;
3409 
3410       last = fld;
3411     }
3412 
3413   return last;
3414 }
3415 
3416 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3417    by modifying the last node in chain 1 to point to chain 2.
3418    This is the Lisp primitive `nconc'.  */
3419 
3420 tree
chainon(tree op1,tree op2)3421 chainon (tree op1, tree op2)
3422 {
3423   tree t1;
3424 
3425   if (!op1)
3426     return op2;
3427   if (!op2)
3428     return op1;
3429 
3430   for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3431     continue;
3432   TREE_CHAIN (t1) = op2;
3433 
3434 #ifdef ENABLE_TREE_CHECKING
3435   {
3436     tree t2;
3437     for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3438       gcc_assert (t2 != t1);
3439   }
3440 #endif
3441 
3442   return op1;
3443 }
3444 
3445 /* Return the last node in a chain of nodes (chained through TREE_CHAIN).  */
3446 
3447 tree
tree_last(tree chain)3448 tree_last (tree chain)
3449 {
3450   tree next;
3451   if (chain)
3452     while ((next = TREE_CHAIN (chain)))
3453       chain = next;
3454   return chain;
3455 }
3456 
3457 /* Reverse the order of elements in the chain T,
3458    and return the new head of the chain (old last element).  */
3459 
3460 tree
nreverse(tree t)3461 nreverse (tree t)
3462 {
3463   tree prev = 0, decl, next;
3464   for (decl = t; decl; decl = next)
3465     {
3466       /* We shouldn't be using this function to reverse BLOCK chains; we
3467 	 have blocks_nreverse for that.  */
3468       gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3469       next = TREE_CHAIN (decl);
3470       TREE_CHAIN (decl) = prev;
3471       prev = decl;
3472     }
3473   return prev;
3474 }
3475 
3476 /* Return a newly created TREE_LIST node whose
3477    purpose and value fields are PARM and VALUE.  */
3478 
3479 tree
build_tree_list(tree parm,tree value MEM_STAT_DECL)3480 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3481 {
3482   tree t = make_node (TREE_LIST PASS_MEM_STAT);
3483   TREE_PURPOSE (t) = parm;
3484   TREE_VALUE (t) = value;
3485   return t;
3486 }
3487 
3488 /* Build a chain of TREE_LIST nodes from a vector.  */
3489 
3490 tree
build_tree_list_vec(const vec<tree,va_gc> * vec MEM_STAT_DECL)3491 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3492 {
3493   tree ret = NULL_TREE;
3494   tree *pp = &ret;
3495   unsigned int i;
3496   tree t;
3497   FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3498     {
3499       *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3500       pp = &TREE_CHAIN (*pp);
3501     }
3502   return ret;
3503 }
3504 
3505 /* Return a newly created TREE_LIST node whose
3506    purpose and value fields are PURPOSE and VALUE
3507    and whose TREE_CHAIN is CHAIN.  */
3508 
3509 tree
tree_cons(tree purpose,tree value,tree chain MEM_STAT_DECL)3510 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3511 {
3512   tree node;
3513 
3514   node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3515   memset (node, 0, sizeof (struct tree_common));
3516 
3517   record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3518 
3519   TREE_SET_CODE (node, TREE_LIST);
3520   TREE_CHAIN (node) = chain;
3521   TREE_PURPOSE (node) = purpose;
3522   TREE_VALUE (node) = value;
3523   return node;
3524 }
3525 
3526 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3527    trees.  */
3528 
3529 vec<tree, va_gc> *
ctor_to_vec(tree ctor)3530 ctor_to_vec (tree ctor)
3531 {
3532   vec<tree, va_gc> *vec;
3533   vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3534   unsigned int ix;
3535   tree val;
3536 
3537   FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3538     vec->quick_push (val);
3539 
3540   return vec;
3541 }
3542 
3543 /* Return the size nominally occupied by an object of type TYPE
3544    when it resides in memory.  The value is measured in units of bytes,
3545    and its data type is that normally used for type sizes
3546    (which is the first type created by make_signed_type or
3547    make_unsigned_type).  */
3548 
3549 tree
size_in_bytes_loc(location_t loc,const_tree type)3550 size_in_bytes_loc (location_t loc, const_tree type)
3551 {
3552   tree t;
3553 
3554   if (type == error_mark_node)
3555     return integer_zero_node;
3556 
3557   type = TYPE_MAIN_VARIANT (type);
3558   t = TYPE_SIZE_UNIT (type);
3559 
3560   if (t == 0)
3561     {
3562       lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3563       return size_zero_node;
3564     }
3565 
3566   return t;
3567 }
3568 
3569 /* Return the size of TYPE (in bytes) as a wide integer
3570    or return -1 if the size can vary or is larger than an integer.  */
3571 
3572 HOST_WIDE_INT
int_size_in_bytes(const_tree type)3573 int_size_in_bytes (const_tree type)
3574 {
3575   tree t;
3576 
3577   if (type == error_mark_node)
3578     return 0;
3579 
3580   type = TYPE_MAIN_VARIANT (type);
3581   t = TYPE_SIZE_UNIT (type);
3582 
3583   if (t && tree_fits_uhwi_p (t))
3584     return TREE_INT_CST_LOW (t);
3585   else
3586     return -1;
3587 }
3588 
3589 /* Return the maximum size of TYPE (in bytes) as a wide integer
3590    or return -1 if the size can vary or is larger than an integer.  */
3591 
3592 HOST_WIDE_INT
max_int_size_in_bytes(const_tree type)3593 max_int_size_in_bytes (const_tree type)
3594 {
3595   HOST_WIDE_INT size = -1;
3596   tree size_tree;
3597 
3598   /* If this is an array type, check for a possible MAX_SIZE attached.  */
3599 
3600   if (TREE_CODE (type) == ARRAY_TYPE)
3601     {
3602       size_tree = TYPE_ARRAY_MAX_SIZE (type);
3603 
3604       if (size_tree && tree_fits_uhwi_p (size_tree))
3605 	size = tree_to_uhwi (size_tree);
3606     }
3607 
3608   /* If we still haven't been able to get a size, see if the language
3609      can compute a maximum size.  */
3610 
3611   if (size == -1)
3612     {
3613       size_tree = lang_hooks.types.max_size (type);
3614 
3615       if (size_tree && tree_fits_uhwi_p (size_tree))
3616 	size = tree_to_uhwi (size_tree);
3617     }
3618 
3619   return size;
3620 }
3621 
3622 /* Return the bit position of FIELD, in bits from the start of the record.
3623    This is a tree of type bitsizetype.  */
3624 
3625 tree
bit_position(const_tree field)3626 bit_position (const_tree field)
3627 {
3628   return bit_from_pos (DECL_FIELD_OFFSET (field),
3629 		       DECL_FIELD_BIT_OFFSET (field));
3630 }
3631 
3632 /* Return the byte position of FIELD, in bytes from the start of the record.
3633    This is a tree of type sizetype.  */
3634 
3635 tree
byte_position(const_tree field)3636 byte_position (const_tree field)
3637 {
3638   return byte_from_pos (DECL_FIELD_OFFSET (field),
3639 			DECL_FIELD_BIT_OFFSET (field));
3640 }
3641 
3642 /* Likewise, but return as an integer.  It must be representable in
3643    that way (since it could be a signed value, we don't have the
3644    option of returning -1 like int_size_in_byte can.  */
3645 
3646 HOST_WIDE_INT
int_byte_position(const_tree field)3647 int_byte_position (const_tree field)
3648 {
3649   return tree_to_shwi (byte_position (field));
3650 }
3651 
3652 /* Return, as a tree node, the number of elements for TYPE (which is an
3653    ARRAY_TYPE) minus one. This counts only elements of the top array.  */
3654 
3655 tree
array_type_nelts(const_tree type)3656 array_type_nelts (const_tree type)
3657 {
3658   tree index_type, min, max;
3659 
3660   /* If they did it with unspecified bounds, then we should have already
3661      given an error about it before we got here.  */
3662   if (! TYPE_DOMAIN (type))
3663     return error_mark_node;
3664 
3665   index_type = TYPE_DOMAIN (type);
3666   min = TYPE_MIN_VALUE (index_type);
3667   max = TYPE_MAX_VALUE (index_type);
3668 
3669   /* TYPE_MAX_VALUE may not be set if the array has unknown length.  */
3670   if (!max)
3671     {
3672       /* zero sized arrays are represented from C FE as complete types with
3673 	 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3674 	 them as min 0, max -1.  */
3675       if (COMPLETE_TYPE_P (type)
3676 	  && integer_zerop (TYPE_SIZE (type))
3677 	  && integer_zerop (min))
3678 	return build_int_cst (TREE_TYPE (min), -1);
3679 
3680       return error_mark_node;
3681     }
3682 
3683   return (integer_zerop (min)
3684 	  ? max
3685 	  : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3686 }
3687 
3688 /* If arg is static -- a reference to an object in static storage -- then
3689    return the object.  This is not the same as the C meaning of `static'.
3690    If arg isn't static, return NULL.  */
3691 
3692 tree
staticp(tree arg)3693 staticp (tree arg)
3694 {
3695   switch (TREE_CODE (arg))
3696     {
3697     case FUNCTION_DECL:
3698       /* Nested functions are static, even though taking their address will
3699 	 involve a trampoline as we unnest the nested function and create
3700 	 the trampoline on the tree level.  */
3701       return arg;
3702 
3703     case VAR_DECL:
3704       return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3705 	      && ! DECL_THREAD_LOCAL_P (arg)
3706 	      && ! DECL_DLLIMPORT_P (arg)
3707 	      ? arg : NULL);
3708 
3709     case CONST_DECL:
3710       return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3711 	      ? arg : NULL);
3712 
3713     case CONSTRUCTOR:
3714       return TREE_STATIC (arg) ? arg : NULL;
3715 
3716     case LABEL_DECL:
3717     case STRING_CST:
3718       return arg;
3719 
3720     case COMPONENT_REF:
3721       /* If the thing being referenced is not a field, then it is
3722 	 something language specific.  */
3723       gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3724 
3725       /* If we are referencing a bitfield, we can't evaluate an
3726 	 ADDR_EXPR at compile time and so it isn't a constant.  */
3727       if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3728 	return NULL;
3729 
3730       return staticp (TREE_OPERAND (arg, 0));
3731 
3732     case BIT_FIELD_REF:
3733       return NULL;
3734 
3735     case INDIRECT_REF:
3736       return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3737 
3738     case ARRAY_REF:
3739     case ARRAY_RANGE_REF:
3740       if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3741 	  && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3742 	return staticp (TREE_OPERAND (arg, 0));
3743       else
3744 	return NULL;
3745 
3746     case COMPOUND_LITERAL_EXPR:
3747       return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3748 
3749     default:
3750       return NULL;
3751     }
3752 }
3753 
3754 
3755 
3756 
3757 /* Return whether OP is a DECL whose address is function-invariant.  */
3758 
3759 bool
decl_address_invariant_p(const_tree op)3760 decl_address_invariant_p (const_tree op)
3761 {
3762   /* The conditions below are slightly less strict than the one in
3763      staticp.  */
3764 
3765   switch (TREE_CODE (op))
3766     {
3767     case PARM_DECL:
3768     case RESULT_DECL:
3769     case LABEL_DECL:
3770     case FUNCTION_DECL:
3771       return true;
3772 
3773     case VAR_DECL:
3774       if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3775           || DECL_THREAD_LOCAL_P (op)
3776           || DECL_CONTEXT (op) == current_function_decl
3777           || decl_function_context (op) == current_function_decl)
3778         return true;
3779       break;
3780 
3781     case CONST_DECL:
3782       if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3783           || decl_function_context (op) == current_function_decl)
3784         return true;
3785       break;
3786 
3787     default:
3788       break;
3789     }
3790 
3791   return false;
3792 }
3793 
3794 /* Return whether OP is a DECL whose address is interprocedural-invariant.  */
3795 
3796 bool
decl_address_ip_invariant_p(const_tree op)3797 decl_address_ip_invariant_p (const_tree op)
3798 {
3799   /* The conditions below are slightly less strict than the one in
3800      staticp.  */
3801 
3802   switch (TREE_CODE (op))
3803     {
3804     case LABEL_DECL:
3805     case FUNCTION_DECL:
3806     case STRING_CST:
3807       return true;
3808 
3809     case VAR_DECL:
3810       if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3811            && !DECL_DLLIMPORT_P (op))
3812           || DECL_THREAD_LOCAL_P (op))
3813         return true;
3814       break;
3815 
3816     case CONST_DECL:
3817       if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3818         return true;
3819       break;
3820 
3821     default:
3822       break;
3823     }
3824 
3825   return false;
3826 }
3827 
3828 
3829 /* Return true if T is function-invariant (internal function, does
3830    not handle arithmetic; that's handled in skip_simple_arithmetic and
3831    tree_invariant_p).  */
3832 
3833 static bool
tree_invariant_p_1(tree t)3834 tree_invariant_p_1 (tree t)
3835 {
3836   tree op;
3837 
3838   if (TREE_CONSTANT (t)
3839       || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3840     return true;
3841 
3842   switch (TREE_CODE (t))
3843     {
3844     case SAVE_EXPR:
3845       return true;
3846 
3847     case ADDR_EXPR:
3848       op = TREE_OPERAND (t, 0);
3849       while (handled_component_p (op))
3850 	{
3851 	  switch (TREE_CODE (op))
3852 	    {
3853 	    case ARRAY_REF:
3854 	    case ARRAY_RANGE_REF:
3855 	      if (!tree_invariant_p (TREE_OPERAND (op, 1))
3856 		  || TREE_OPERAND (op, 2) != NULL_TREE
3857 		  || TREE_OPERAND (op, 3) != NULL_TREE)
3858 		return false;
3859 	      break;
3860 
3861 	    case COMPONENT_REF:
3862 	      if (TREE_OPERAND (op, 2) != NULL_TREE)
3863 		return false;
3864 	      break;
3865 
3866 	    default:;
3867 	    }
3868 	  op = TREE_OPERAND (op, 0);
3869 	}
3870 
3871       return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3872 
3873     default:
3874       break;
3875     }
3876 
3877   return false;
3878 }
3879 
3880 /* Return true if T is function-invariant.  */
3881 
3882 bool
tree_invariant_p(tree t)3883 tree_invariant_p (tree t)
3884 {
3885   tree inner = skip_simple_arithmetic (t);
3886   return tree_invariant_p_1 (inner);
3887 }
3888 
3889 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3890    Do this to any expression which may be used in more than one place,
3891    but must be evaluated only once.
3892 
3893    Normally, expand_expr would reevaluate the expression each time.
3894    Calling save_expr produces something that is evaluated and recorded
3895    the first time expand_expr is called on it.  Subsequent calls to
3896    expand_expr just reuse the recorded value.
3897 
3898    The call to expand_expr that generates code that actually computes
3899    the value is the first call *at compile time*.  Subsequent calls
3900    *at compile time* generate code to use the saved value.
3901    This produces correct result provided that *at run time* control
3902    always flows through the insns made by the first expand_expr
3903    before reaching the other places where the save_expr was evaluated.
3904    You, the caller of save_expr, must make sure this is so.
3905 
3906    Constants, and certain read-only nodes, are returned with no
3907    SAVE_EXPR because that is safe.  Expressions containing placeholders
3908    are not touched; see tree.def for an explanation of what these
3909    are used for.  */
3910 
3911 tree
save_expr(tree expr)3912 save_expr (tree expr)
3913 {
3914   tree inner;
3915 
3916   /* If the tree evaluates to a constant, then we don't want to hide that
3917      fact (i.e. this allows further folding, and direct checks for constants).
3918      However, a read-only object that has side effects cannot be bypassed.
3919      Since it is no problem to reevaluate literals, we just return the
3920      literal node.  */
3921   inner = skip_simple_arithmetic (expr);
3922   if (TREE_CODE (inner) == ERROR_MARK)
3923     return inner;
3924 
3925   if (tree_invariant_p_1 (inner))
3926     return expr;
3927 
3928   /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3929      it means that the size or offset of some field of an object depends on
3930      the value within another field.
3931 
3932      Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3933      and some variable since it would then need to be both evaluated once and
3934      evaluated more than once.  Front-ends must assure this case cannot
3935      happen by surrounding any such subexpressions in their own SAVE_EXPR
3936      and forcing evaluation at the proper time.  */
3937   if (contains_placeholder_p (inner))
3938     return expr;
3939 
3940   expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3941 
3942   /* This expression might be placed ahead of a jump to ensure that the
3943      value was computed on both sides of the jump.  So make sure it isn't
3944      eliminated as dead.  */
3945   TREE_SIDE_EFFECTS (expr) = 1;
3946   return expr;
3947 }
3948 
3949 /* Look inside EXPR into any simple arithmetic operations.  Return the
3950    outermost non-arithmetic or non-invariant node.  */
3951 
3952 tree
skip_simple_arithmetic(tree expr)3953 skip_simple_arithmetic (tree expr)
3954 {
3955   /* We don't care about whether this can be used as an lvalue in this
3956      context.  */
3957   while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3958     expr = TREE_OPERAND (expr, 0);
3959 
3960   /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3961      a constant, it will be more efficient to not make another SAVE_EXPR since
3962      it will allow better simplification and GCSE will be able to merge the
3963      computations if they actually occur.  */
3964   while (true)
3965     {
3966       if (UNARY_CLASS_P (expr))
3967 	expr = TREE_OPERAND (expr, 0);
3968       else if (BINARY_CLASS_P (expr))
3969 	{
3970 	  if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3971 	    expr = TREE_OPERAND (expr, 0);
3972 	  else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3973 	    expr = TREE_OPERAND (expr, 1);
3974 	  else
3975 	    break;
3976 	}
3977       else
3978 	break;
3979     }
3980 
3981   return expr;
3982 }
3983 
3984 /* Look inside EXPR into simple arithmetic operations involving constants.
3985    Return the outermost non-arithmetic or non-constant node.  */
3986 
3987 tree
skip_simple_constant_arithmetic(tree expr)3988 skip_simple_constant_arithmetic (tree expr)
3989 {
3990   while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3991     expr = TREE_OPERAND (expr, 0);
3992 
3993   while (true)
3994     {
3995       if (UNARY_CLASS_P (expr))
3996 	expr = TREE_OPERAND (expr, 0);
3997       else if (BINARY_CLASS_P (expr))
3998 	{
3999 	  if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
4000 	    expr = TREE_OPERAND (expr, 0);
4001 	  else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
4002 	    expr = TREE_OPERAND (expr, 1);
4003 	  else
4004 	    break;
4005 	}
4006       else
4007 	break;
4008     }
4009 
4010   return expr;
4011 }
4012 
4013 /* Return which tree structure is used by T.  */
4014 
4015 enum tree_node_structure_enum
tree_node_structure(const_tree t)4016 tree_node_structure (const_tree t)
4017 {
4018   const enum tree_code code = TREE_CODE (t);
4019   return tree_node_structure_for_code (code);
4020 }
4021 
4022 /* Set various status flags when building a CALL_EXPR object T.  */
4023 
4024 static void
process_call_operands(tree t)4025 process_call_operands (tree t)
4026 {
4027   bool side_effects = TREE_SIDE_EFFECTS (t);
4028   bool read_only = false;
4029   int i = call_expr_flags (t);
4030 
4031   /* Calls have side-effects, except those to const or pure functions.  */
4032   if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
4033     side_effects = true;
4034   /* Propagate TREE_READONLY of arguments for const functions.  */
4035   if (i & ECF_CONST)
4036     read_only = true;
4037 
4038   if (!side_effects || read_only)
4039     for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
4040       {
4041 	tree op = TREE_OPERAND (t, i);
4042 	if (op && TREE_SIDE_EFFECTS (op))
4043 	  side_effects = true;
4044 	if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
4045 	  read_only = false;
4046       }
4047 
4048   TREE_SIDE_EFFECTS (t) = side_effects;
4049   TREE_READONLY (t) = read_only;
4050 }
4051 
4052 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
4053    size or offset that depends on a field within a record.  */
4054 
4055 bool
contains_placeholder_p(const_tree exp)4056 contains_placeholder_p (const_tree exp)
4057 {
4058   enum tree_code code;
4059 
4060   if (!exp)
4061     return 0;
4062 
4063   code = TREE_CODE (exp);
4064   if (code == PLACEHOLDER_EXPR)
4065     return 1;
4066 
4067   switch (TREE_CODE_CLASS (code))
4068     {
4069     case tcc_reference:
4070       /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
4071 	 position computations since they will be converted into a
4072 	 WITH_RECORD_EXPR involving the reference, which will assume
4073 	 here will be valid.  */
4074       return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4075 
4076     case tcc_exceptional:
4077       if (code == TREE_LIST)
4078 	return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
4079 		|| CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
4080       break;
4081 
4082     case tcc_unary:
4083     case tcc_binary:
4084     case tcc_comparison:
4085     case tcc_expression:
4086       switch (code)
4087 	{
4088 	case COMPOUND_EXPR:
4089 	  /* Ignoring the first operand isn't quite right, but works best.  */
4090 	  return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
4091 
4092 	case COND_EXPR:
4093 	  return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4094 		  || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
4095 		  || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
4096 
4097 	case SAVE_EXPR:
4098 	  /* The save_expr function never wraps anything containing
4099 	     a PLACEHOLDER_EXPR. */
4100 	  return 0;
4101 
4102 	default:
4103 	  break;
4104 	}
4105 
4106       switch (TREE_CODE_LENGTH (code))
4107 	{
4108 	case 1:
4109 	  return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4110 	case 2:
4111 	  return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4112 		  || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
4113 	default:
4114 	  return 0;
4115 	}
4116 
4117     case tcc_vl_exp:
4118       switch (code)
4119 	{
4120 	case CALL_EXPR:
4121 	  {
4122 	    const_tree arg;
4123 	    const_call_expr_arg_iterator iter;
4124 	    FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
4125 	      if (CONTAINS_PLACEHOLDER_P (arg))
4126 		return 1;
4127 	    return 0;
4128 	  }
4129 	default:
4130 	  return 0;
4131 	}
4132 
4133     default:
4134       return 0;
4135     }
4136   return 0;
4137 }
4138 
4139 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4140    directly.  This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4141    field positions.  */
4142 
4143 static bool
type_contains_placeholder_1(const_tree type)4144 type_contains_placeholder_1 (const_tree type)
4145 {
4146   /* If the size contains a placeholder or the parent type (component type in
4147      the case of arrays) type involves a placeholder, this type does.  */
4148   if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
4149       || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
4150       || (!POINTER_TYPE_P (type)
4151 	  && TREE_TYPE (type)
4152 	  && type_contains_placeholder_p (TREE_TYPE (type))))
4153     return true;
4154 
4155   /* Now do type-specific checks.  Note that the last part of the check above
4156      greatly limits what we have to do below.  */
4157   switch (TREE_CODE (type))
4158     {
4159     case VOID_TYPE:
4160     case OPAQUE_TYPE:
4161     case COMPLEX_TYPE:
4162     case ENUMERAL_TYPE:
4163     case BOOLEAN_TYPE:
4164     case POINTER_TYPE:
4165     case OFFSET_TYPE:
4166     case REFERENCE_TYPE:
4167     case METHOD_TYPE:
4168     case FUNCTION_TYPE:
4169     case VECTOR_TYPE:
4170     case NULLPTR_TYPE:
4171       return false;
4172 
4173     case INTEGER_TYPE:
4174     case REAL_TYPE:
4175     case FIXED_POINT_TYPE:
4176       /* Here we just check the bounds.  */
4177       return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
4178 	      || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
4179 
4180     case ARRAY_TYPE:
4181       /* We have already checked the component type above, so just check
4182 	 the domain type.  Flexible array members have a null domain.  */
4183       return TYPE_DOMAIN (type) ?
4184 	type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
4185 
4186     case RECORD_TYPE:
4187     case UNION_TYPE:
4188     case QUAL_UNION_TYPE:
4189       {
4190 	tree field;
4191 
4192 	for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4193 	  if (TREE_CODE (field) == FIELD_DECL
4194 	      && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
4195 		  || (TREE_CODE (type) == QUAL_UNION_TYPE
4196 		      && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
4197 		  || type_contains_placeholder_p (TREE_TYPE (field))))
4198 	    return true;
4199 
4200 	return false;
4201       }
4202 
4203     default:
4204       gcc_unreachable ();
4205     }
4206 }
4207 
4208 /* Wrapper around above function used to cache its result.  */
4209 
4210 bool
type_contains_placeholder_p(tree type)4211 type_contains_placeholder_p (tree type)
4212 {
4213   bool result;
4214 
4215   /* If the contains_placeholder_bits field has been initialized,
4216      then we know the answer.  */
4217   if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
4218     return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
4219 
4220   /* Indicate that we've seen this type node, and the answer is false.
4221      This is what we want to return if we run into recursion via fields.  */
4222   TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
4223 
4224   /* Compute the real value.  */
4225   result = type_contains_placeholder_1 (type);
4226 
4227   /* Store the real value.  */
4228   TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
4229 
4230   return result;
4231 }
4232 
4233 /* Push tree EXP onto vector QUEUE if it is not already present.  */
4234 
4235 static void
push_without_duplicates(tree exp,vec<tree> * queue)4236 push_without_duplicates (tree exp, vec<tree> *queue)
4237 {
4238   unsigned int i;
4239   tree iter;
4240 
4241   FOR_EACH_VEC_ELT (*queue, i, iter)
4242     if (simple_cst_equal (iter, exp) == 1)
4243       break;
4244 
4245   if (!iter)
4246     queue->safe_push (exp);
4247 }
4248 
4249 /* Given a tree EXP, find all occurrences of references to fields
4250    in a PLACEHOLDER_EXPR and place them in vector REFS without
4251    duplicates.  Also record VAR_DECLs and CONST_DECLs.  Note that
4252    we assume here that EXP contains only arithmetic expressions
4253    or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4254    argument list.  */
4255 
4256 void
find_placeholder_in_expr(tree exp,vec<tree> * refs)4257 find_placeholder_in_expr (tree exp, vec<tree> *refs)
4258 {
4259   enum tree_code code = TREE_CODE (exp);
4260   tree inner;
4261   int i;
4262 
4263   /* We handle TREE_LIST and COMPONENT_REF separately.  */
4264   if (code == TREE_LIST)
4265     {
4266       FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4267       FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4268     }
4269   else if (code == COMPONENT_REF)
4270     {
4271       for (inner = TREE_OPERAND (exp, 0);
4272 	   REFERENCE_CLASS_P (inner);
4273 	   inner = TREE_OPERAND (inner, 0))
4274 	;
4275 
4276       if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4277 	push_without_duplicates (exp, refs);
4278       else
4279 	FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4280    }
4281   else
4282     switch (TREE_CODE_CLASS (code))
4283       {
4284       case tcc_constant:
4285 	break;
4286 
4287       case tcc_declaration:
4288 	/* Variables allocated to static storage can stay.  */
4289         if (!TREE_STATIC (exp))
4290 	  push_without_duplicates (exp, refs);
4291 	break;
4292 
4293       case tcc_expression:
4294 	/* This is the pattern built in ada/make_aligning_type.  */
4295 	if (code == ADDR_EXPR
4296 	    && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4297 	  {
4298 	    push_without_duplicates (exp, refs);
4299 	    break;
4300 	  }
4301 
4302         /* Fall through.  */
4303 
4304       case tcc_exceptional:
4305       case tcc_unary:
4306       case tcc_binary:
4307       case tcc_comparison:
4308       case tcc_reference:
4309 	for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4310 	  FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4311 	break;
4312 
4313       case tcc_vl_exp:
4314 	for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4315 	  FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4316 	break;
4317 
4318       default:
4319 	gcc_unreachable ();
4320       }
4321 }
4322 
4323 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4324    return a tree with all occurrences of references to F in a
4325    PLACEHOLDER_EXPR replaced by R.  Also handle VAR_DECLs and
4326    CONST_DECLs.  Note that we assume here that EXP contains only
4327    arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4328    occurring only in their argument list.  */
4329 
4330 tree
substitute_in_expr(tree exp,tree f,tree r)4331 substitute_in_expr (tree exp, tree f, tree r)
4332 {
4333   enum tree_code code = TREE_CODE (exp);
4334   tree op0, op1, op2, op3;
4335   tree new_tree;
4336 
4337   /* We handle TREE_LIST and COMPONENT_REF separately.  */
4338   if (code == TREE_LIST)
4339     {
4340       op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4341       op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4342       if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4343 	return exp;
4344 
4345       return tree_cons (TREE_PURPOSE (exp), op1, op0);
4346     }
4347   else if (code == COMPONENT_REF)
4348     {
4349       tree inner;
4350 
4351       /* If this expression is getting a value from a PLACEHOLDER_EXPR
4352 	 and it is the right field, replace it with R.  */
4353       for (inner = TREE_OPERAND (exp, 0);
4354 	   REFERENCE_CLASS_P (inner);
4355 	   inner = TREE_OPERAND (inner, 0))
4356 	;
4357 
4358       /* The field.  */
4359       op1 = TREE_OPERAND (exp, 1);
4360 
4361       if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4362 	return r;
4363 
4364       /* If this expression hasn't been completed let, leave it alone.  */
4365       if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4366 	return exp;
4367 
4368       op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4369       if (op0 == TREE_OPERAND (exp, 0))
4370 	return exp;
4371 
4372       new_tree
4373 	= fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4374    }
4375   else
4376     switch (TREE_CODE_CLASS (code))
4377       {
4378       case tcc_constant:
4379 	return exp;
4380 
4381       case tcc_declaration:
4382 	if (exp == f)
4383 	  return r;
4384 	else
4385 	  return exp;
4386 
4387       case tcc_expression:
4388 	if (exp == f)
4389 	  return r;
4390 
4391         /* Fall through.  */
4392 
4393       case tcc_exceptional:
4394       case tcc_unary:
4395       case tcc_binary:
4396       case tcc_comparison:
4397       case tcc_reference:
4398 	switch (TREE_CODE_LENGTH (code))
4399 	  {
4400 	  case 0:
4401 	    return exp;
4402 
4403 	  case 1:
4404 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4405 	    if (op0 == TREE_OPERAND (exp, 0))
4406 	      return exp;
4407 
4408 	    new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4409 	    break;
4410 
4411 	  case 2:
4412 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4413 	    op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4414 
4415 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4416 	      return exp;
4417 
4418 	    new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4419 	    break;
4420 
4421 	  case 3:
4422 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4423 	    op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4424 	    op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4425 
4426 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4427 		&& op2 == TREE_OPERAND (exp, 2))
4428 	      return exp;
4429 
4430 	    new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4431 	    break;
4432 
4433 	  case 4:
4434 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4435 	    op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4436 	    op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4437 	    op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4438 
4439 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4440 		&& op2 == TREE_OPERAND (exp, 2)
4441 		&& op3 == TREE_OPERAND (exp, 3))
4442 	      return exp;
4443 
4444 	    new_tree
4445 	      = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4446 	    break;
4447 
4448 	  default:
4449 	    gcc_unreachable ();
4450 	  }
4451 	break;
4452 
4453       case tcc_vl_exp:
4454 	{
4455 	  int i;
4456 
4457 	  new_tree = NULL_TREE;
4458 
4459 	  /* If we are trying to replace F with a constant or with another
4460 	     instance of one of the arguments of the call, inline back
4461 	     functions which do nothing else than computing a value from
4462 	     the arguments they are passed.  This makes it possible to
4463 	     fold partially or entirely the replacement expression.  */
4464 	  if (code == CALL_EXPR)
4465 	    {
4466 	      bool maybe_inline = false;
4467 	      if (CONSTANT_CLASS_P (r))
4468 		maybe_inline = true;
4469 	      else
4470 		for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4471 		  if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4472 		    {
4473 		      maybe_inline = true;
4474 		      break;
4475 		    }
4476 	      if (maybe_inline)
4477 		{
4478 		  tree t = maybe_inline_call_in_expr (exp);
4479 		  if (t)
4480 		    return SUBSTITUTE_IN_EXPR (t, f, r);
4481 		}
4482 	    }
4483 
4484 	  for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4485 	    {
4486 	      tree op = TREE_OPERAND (exp, i);
4487 	      tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4488 	      if (new_op != op)
4489 		{
4490 		  if (!new_tree)
4491 		    new_tree = copy_node (exp);
4492 		  TREE_OPERAND (new_tree, i) = new_op;
4493 		}
4494 	    }
4495 
4496 	  if (new_tree)
4497 	    {
4498 	      new_tree = fold (new_tree);
4499 	      if (TREE_CODE (new_tree) == CALL_EXPR)
4500 		process_call_operands (new_tree);
4501 	    }
4502 	  else
4503 	    return exp;
4504 	}
4505 	break;
4506 
4507       default:
4508 	gcc_unreachable ();
4509       }
4510 
4511   TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4512 
4513   if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4514     TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4515 
4516   return new_tree;
4517 }
4518 
4519 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4520    for it within OBJ, a tree that is an object or a chain of references.  */
4521 
4522 tree
substitute_placeholder_in_expr(tree exp,tree obj)4523 substitute_placeholder_in_expr (tree exp, tree obj)
4524 {
4525   enum tree_code code = TREE_CODE (exp);
4526   tree op0, op1, op2, op3;
4527   tree new_tree;
4528 
4529   /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4530      in the chain of OBJ.  */
4531   if (code == PLACEHOLDER_EXPR)
4532     {
4533       tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4534       tree elt;
4535 
4536       for (elt = obj; elt != 0;
4537 	   elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4538 		   || TREE_CODE (elt) == COND_EXPR)
4539 		  ? TREE_OPERAND (elt, 1)
4540 		  : (REFERENCE_CLASS_P (elt)
4541 		     || UNARY_CLASS_P (elt)
4542 		     || BINARY_CLASS_P (elt)
4543 		     || VL_EXP_CLASS_P (elt)
4544 		     || EXPRESSION_CLASS_P (elt))
4545 		  ? TREE_OPERAND (elt, 0) : 0))
4546 	if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4547 	  return elt;
4548 
4549       for (elt = obj; elt != 0;
4550 	   elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4551 		   || TREE_CODE (elt) == COND_EXPR)
4552 		  ? TREE_OPERAND (elt, 1)
4553 		  : (REFERENCE_CLASS_P (elt)
4554 		     || UNARY_CLASS_P (elt)
4555 		     || BINARY_CLASS_P (elt)
4556 		     || VL_EXP_CLASS_P (elt)
4557 		     || EXPRESSION_CLASS_P (elt))
4558 		  ? TREE_OPERAND (elt, 0) : 0))
4559 	if (POINTER_TYPE_P (TREE_TYPE (elt))
4560 	    && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4561 		== need_type))
4562 	  return fold_build1 (INDIRECT_REF, need_type, elt);
4563 
4564       /* If we didn't find it, return the original PLACEHOLDER_EXPR.  If it
4565 	 survives until RTL generation, there will be an error.  */
4566       return exp;
4567     }
4568 
4569   /* TREE_LIST is special because we need to look at TREE_VALUE
4570      and TREE_CHAIN, not TREE_OPERANDS.  */
4571   else if (code == TREE_LIST)
4572     {
4573       op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4574       op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4575       if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4576 	return exp;
4577 
4578       return tree_cons (TREE_PURPOSE (exp), op1, op0);
4579     }
4580   else
4581     switch (TREE_CODE_CLASS (code))
4582       {
4583       case tcc_constant:
4584       case tcc_declaration:
4585 	return exp;
4586 
4587       case tcc_exceptional:
4588       case tcc_unary:
4589       case tcc_binary:
4590       case tcc_comparison:
4591       case tcc_expression:
4592       case tcc_reference:
4593       case tcc_statement:
4594 	switch (TREE_CODE_LENGTH (code))
4595 	  {
4596 	  case 0:
4597 	    return exp;
4598 
4599 	  case 1:
4600 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4601 	    if (op0 == TREE_OPERAND (exp, 0))
4602 	      return exp;
4603 
4604 	    new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4605 	    break;
4606 
4607 	  case 2:
4608 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4609 	    op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4610 
4611 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4612 	      return exp;
4613 
4614 	    new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4615 	    break;
4616 
4617 	  case 3:
4618 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4619 	    op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4620 	    op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4621 
4622 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4623 		&& op2 == TREE_OPERAND (exp, 2))
4624 	      return exp;
4625 
4626 	    new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4627 	    break;
4628 
4629 	  case 4:
4630 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4631 	    op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4632 	    op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4633 	    op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4634 
4635 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4636 		&& op2 == TREE_OPERAND (exp, 2)
4637 		&& op3 == TREE_OPERAND (exp, 3))
4638 	      return exp;
4639 
4640 	    new_tree
4641 	      = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4642 	    break;
4643 
4644 	  default:
4645 	    gcc_unreachable ();
4646 	  }
4647 	break;
4648 
4649       case tcc_vl_exp:
4650 	{
4651 	  int i;
4652 
4653 	  new_tree = NULL_TREE;
4654 
4655 	  for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4656 	    {
4657 	      tree op = TREE_OPERAND (exp, i);
4658 	      tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4659 	      if (new_op != op)
4660 		{
4661 		  if (!new_tree)
4662 		    new_tree = copy_node (exp);
4663 		  TREE_OPERAND (new_tree, i) = new_op;
4664 		}
4665 	    }
4666 
4667 	  if (new_tree)
4668 	    {
4669 	      new_tree = fold (new_tree);
4670 	      if (TREE_CODE (new_tree) == CALL_EXPR)
4671 		process_call_operands (new_tree);
4672 	    }
4673 	  else
4674 	    return exp;
4675 	}
4676 	break;
4677 
4678       default:
4679 	gcc_unreachable ();
4680       }
4681 
4682   TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4683 
4684   if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4685     TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4686 
4687   return new_tree;
4688 }
4689 
4690 
4691 /* Subroutine of stabilize_reference; this is called for subtrees of
4692    references.  Any expression with side-effects must be put in a SAVE_EXPR
4693    to ensure that it is only evaluated once.
4694 
4695    We don't put SAVE_EXPR nodes around everything, because assigning very
4696    simple expressions to temporaries causes us to miss good opportunities
4697    for optimizations.  Among other things, the opportunity to fold in the
4698    addition of a constant into an addressing mode often gets lost, e.g.
4699    "y[i+1] += x;".  In general, we take the approach that we should not make
4700    an assignment unless we are forced into it - i.e., that any non-side effect
4701    operator should be allowed, and that cse should take care of coalescing
4702    multiple utterances of the same expression should that prove fruitful.  */
4703 
4704 static tree
stabilize_reference_1(tree e)4705 stabilize_reference_1 (tree e)
4706 {
4707   tree result;
4708   enum tree_code code = TREE_CODE (e);
4709 
4710   /* We cannot ignore const expressions because it might be a reference
4711      to a const array but whose index contains side-effects.  But we can
4712      ignore things that are actual constant or that already have been
4713      handled by this function.  */
4714 
4715   if (tree_invariant_p (e))
4716     return e;
4717 
4718   switch (TREE_CODE_CLASS (code))
4719     {
4720     case tcc_exceptional:
4721       /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4722 	 have side-effects.  */
4723       if (code == STATEMENT_LIST)
4724 	return save_expr (e);
4725       /* FALLTHRU */
4726     case tcc_type:
4727     case tcc_declaration:
4728     case tcc_comparison:
4729     case tcc_statement:
4730     case tcc_expression:
4731     case tcc_reference:
4732     case tcc_vl_exp:
4733       /* If the expression has side-effects, then encase it in a SAVE_EXPR
4734 	 so that it will only be evaluated once.  */
4735       /* The reference (r) and comparison (<) classes could be handled as
4736 	 below, but it is generally faster to only evaluate them once.  */
4737       if (TREE_SIDE_EFFECTS (e))
4738 	return save_expr (e);
4739       return e;
4740 
4741     case tcc_constant:
4742       /* Constants need no processing.  In fact, we should never reach
4743 	 here.  */
4744       return e;
4745 
4746     case tcc_binary:
4747       /* Division is slow and tends to be compiled with jumps,
4748 	 especially the division by powers of 2 that is often
4749 	 found inside of an array reference.  So do it just once.  */
4750       if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4751 	  || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4752 	  || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4753 	  || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4754 	return save_expr (e);
4755       /* Recursively stabilize each operand.  */
4756       result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4757 			 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4758       break;
4759 
4760     case tcc_unary:
4761       /* Recursively stabilize each operand.  */
4762       result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4763       break;
4764 
4765     default:
4766       gcc_unreachable ();
4767     }
4768 
4769   TREE_TYPE (result) = TREE_TYPE (e);
4770   TREE_READONLY (result) = TREE_READONLY (e);
4771   TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4772   TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4773 
4774   return result;
4775 }
4776 
4777 /* Stabilize a reference so that we can use it any number of times
4778    without causing its operands to be evaluated more than once.
4779    Returns the stabilized reference.  This works by means of save_expr,
4780    so see the caveats in the comments about save_expr.
4781 
4782    Also allows conversion expressions whose operands are references.
4783    Any other kind of expression is returned unchanged.  */
4784 
4785 tree
stabilize_reference(tree ref)4786 stabilize_reference (tree ref)
4787 {
4788   tree result;
4789   enum tree_code code = TREE_CODE (ref);
4790 
4791   switch (code)
4792     {
4793     case VAR_DECL:
4794     case PARM_DECL:
4795     case RESULT_DECL:
4796       /* No action is needed in this case.  */
4797       return ref;
4798 
4799     CASE_CONVERT:
4800     case FLOAT_EXPR:
4801     case FIX_TRUNC_EXPR:
4802       result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4803       break;
4804 
4805     case INDIRECT_REF:
4806       result = build_nt (INDIRECT_REF,
4807 			 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4808       break;
4809 
4810     case COMPONENT_REF:
4811       result = build_nt (COMPONENT_REF,
4812 			 stabilize_reference (TREE_OPERAND (ref, 0)),
4813 			 TREE_OPERAND (ref, 1), NULL_TREE);
4814       break;
4815 
4816     case BIT_FIELD_REF:
4817       result = build_nt (BIT_FIELD_REF,
4818 			 stabilize_reference (TREE_OPERAND (ref, 0)),
4819 			 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4820       REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4821       break;
4822 
4823     case ARRAY_REF:
4824       result = build_nt (ARRAY_REF,
4825 			 stabilize_reference (TREE_OPERAND (ref, 0)),
4826 			 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4827 			 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4828       break;
4829 
4830     case ARRAY_RANGE_REF:
4831       result = build_nt (ARRAY_RANGE_REF,
4832 			 stabilize_reference (TREE_OPERAND (ref, 0)),
4833 			 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4834 			 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4835       break;
4836 
4837     case COMPOUND_EXPR:
4838       /* We cannot wrap the first expression in a SAVE_EXPR, as then
4839 	 it wouldn't be ignored.  This matters when dealing with
4840 	 volatiles.  */
4841       return stabilize_reference_1 (ref);
4842 
4843       /* If arg isn't a kind of lvalue we recognize, make no change.
4844 	 Caller should recognize the error for an invalid lvalue.  */
4845     default:
4846       return ref;
4847 
4848     case ERROR_MARK:
4849       return error_mark_node;
4850     }
4851 
4852   TREE_TYPE (result) = TREE_TYPE (ref);
4853   TREE_READONLY (result) = TREE_READONLY (ref);
4854   TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4855   TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4856   protected_set_expr_location (result, EXPR_LOCATION (ref));
4857 
4858   return result;
4859 }
4860 
4861 /* Low-level constructors for expressions.  */
4862 
4863 /* A helper function for build1 and constant folders.  Set TREE_CONSTANT,
4864    and TREE_SIDE_EFFECTS for an ADDR_EXPR.  */
4865 
4866 void
recompute_tree_invariant_for_addr_expr(tree t)4867 recompute_tree_invariant_for_addr_expr (tree t)
4868 {
4869   tree node;
4870   bool tc = true, se = false;
4871 
4872   gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4873 
4874   /* We started out assuming this address is both invariant and constant, but
4875      does not have side effects.  Now go down any handled components and see if
4876      any of them involve offsets that are either non-constant or non-invariant.
4877      Also check for side-effects.
4878 
4879      ??? Note that this code makes no attempt to deal with the case where
4880      taking the address of something causes a copy due to misalignment.  */
4881 
4882 #define UPDATE_FLAGS(NODE)  \
4883 do { tree _node = (NODE); \
4884      if (_node && !TREE_CONSTANT (_node)) tc = false; \
4885      if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4886 
4887   for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4888        node = TREE_OPERAND (node, 0))
4889     {
4890       /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4891 	 array reference (probably made temporarily by the G++ front end),
4892 	 so ignore all the operands.  */
4893       if ((TREE_CODE (node) == ARRAY_REF
4894 	   || TREE_CODE (node) == ARRAY_RANGE_REF)
4895 	  && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4896 	{
4897 	  UPDATE_FLAGS (TREE_OPERAND (node, 1));
4898 	  if (TREE_OPERAND (node, 2))
4899 	    UPDATE_FLAGS (TREE_OPERAND (node, 2));
4900 	  if (TREE_OPERAND (node, 3))
4901 	    UPDATE_FLAGS (TREE_OPERAND (node, 3));
4902 	}
4903       /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4904 	 FIELD_DECL, apparently.  The G++ front end can put something else
4905 	 there, at least temporarily.  */
4906       else if (TREE_CODE (node) == COMPONENT_REF
4907 	       && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4908 	{
4909 	  if (TREE_OPERAND (node, 2))
4910 	    UPDATE_FLAGS (TREE_OPERAND (node, 2));
4911 	}
4912     }
4913 
4914   node = lang_hooks.expr_to_decl (node, &tc, &se);
4915 
4916   /* Now see what's inside.  If it's an INDIRECT_REF, copy our properties from
4917      the address, since &(*a)->b is a form of addition.  If it's a constant, the
4918      address is constant too.  If it's a decl, its address is constant if the
4919      decl is static.  Everything else is not constant and, furthermore,
4920      taking the address of a volatile variable is not volatile.  */
4921   if (TREE_CODE (node) == INDIRECT_REF
4922       || TREE_CODE (node) == MEM_REF)
4923     UPDATE_FLAGS (TREE_OPERAND (node, 0));
4924   else if (CONSTANT_CLASS_P (node))
4925     ;
4926   else if (DECL_P (node))
4927     tc &= (staticp (node) != NULL_TREE);
4928   else
4929     {
4930       tc = false;
4931       se |= TREE_SIDE_EFFECTS (node);
4932     }
4933 
4934 
4935   TREE_CONSTANT (t) = tc;
4936   TREE_SIDE_EFFECTS (t) = se;
4937 #undef UPDATE_FLAGS
4938 }
4939 
4940 /* Build an expression of code CODE, data type TYPE, and operands as
4941    specified.  Expressions and reference nodes can be created this way.
4942    Constants, decls, types and misc nodes cannot be.
4943 
4944    We define 5 non-variadic functions, from 0 to 4 arguments.  This is
4945    enough for all extant tree codes.  */
4946 
4947 tree
build0(enum tree_code code,tree tt MEM_STAT_DECL)4948 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4949 {
4950   tree t;
4951 
4952   gcc_assert (TREE_CODE_LENGTH (code) == 0);
4953 
4954   t = make_node (code PASS_MEM_STAT);
4955   TREE_TYPE (t) = tt;
4956 
4957   return t;
4958 }
4959 
4960 tree
build1(enum tree_code code,tree type,tree node MEM_STAT_DECL)4961 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4962 {
4963   int length = sizeof (struct tree_exp);
4964   tree t;
4965 
4966   record_node_allocation_statistics (code, length);
4967 
4968   gcc_assert (TREE_CODE_LENGTH (code) == 1);
4969 
4970   t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4971 
4972   memset (t, 0, sizeof (struct tree_common));
4973 
4974   TREE_SET_CODE (t, code);
4975 
4976   TREE_TYPE (t) = type;
4977   SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4978   TREE_OPERAND (t, 0) = node;
4979   if (node && !TYPE_P (node))
4980     {
4981       TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4982       TREE_READONLY (t) = TREE_READONLY (node);
4983     }
4984 
4985   if (TREE_CODE_CLASS (code) == tcc_statement)
4986     {
4987       if (code != DEBUG_BEGIN_STMT)
4988 	TREE_SIDE_EFFECTS (t) = 1;
4989     }
4990   else switch (code)
4991     {
4992     case VA_ARG_EXPR:
4993       /* All of these have side-effects, no matter what their
4994 	 operands are.  */
4995       TREE_SIDE_EFFECTS (t) = 1;
4996       TREE_READONLY (t) = 0;
4997       break;
4998 
4999     case INDIRECT_REF:
5000       /* Whether a dereference is readonly has nothing to do with whether
5001 	 its operand is readonly.  */
5002       TREE_READONLY (t) = 0;
5003       break;
5004 
5005     case ADDR_EXPR:
5006       if (node)
5007 	recompute_tree_invariant_for_addr_expr (t);
5008       break;
5009 
5010     default:
5011       if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
5012 	  && node && !TYPE_P (node)
5013 	  && TREE_CONSTANT (node))
5014 	TREE_CONSTANT (t) = 1;
5015       if (TREE_CODE_CLASS (code) == tcc_reference
5016 	  && node && TREE_THIS_VOLATILE (node))
5017 	TREE_THIS_VOLATILE (t) = 1;
5018       break;
5019     }
5020 
5021   return t;
5022 }
5023 
5024 #define PROCESS_ARG(N)				\
5025   do {						\
5026     TREE_OPERAND (t, N) = arg##N;		\
5027     if (arg##N &&!TYPE_P (arg##N))		\
5028       {						\
5029         if (TREE_SIDE_EFFECTS (arg##N))		\
5030 	  side_effects = 1;			\
5031         if (!TREE_READONLY (arg##N)		\
5032 	    && !CONSTANT_CLASS_P (arg##N))	\
5033 	  (void) (read_only = 0);		\
5034         if (!TREE_CONSTANT (arg##N))		\
5035 	  (void) (constant = 0);		\
5036       }						\
5037   } while (0)
5038 
5039 tree
build2(enum tree_code code,tree tt,tree arg0,tree arg1 MEM_STAT_DECL)5040 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
5041 {
5042   bool constant, read_only, side_effects, div_by_zero;
5043   tree t;
5044 
5045   gcc_assert (TREE_CODE_LENGTH (code) == 2);
5046 
5047   if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
5048       && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
5049       /* When sizetype precision doesn't match that of pointers
5050          we need to be able to build explicit extensions or truncations
5051 	 of the offset argument.  */
5052       && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
5053     gcc_assert (TREE_CODE (arg0) == INTEGER_CST
5054 		&& TREE_CODE (arg1) == INTEGER_CST);
5055 
5056   if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
5057     gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
5058 		&& ptrofftype_p (TREE_TYPE (arg1)));
5059 
5060   t = make_node (code PASS_MEM_STAT);
5061   TREE_TYPE (t) = tt;
5062 
5063   /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
5064      result based on those same flags for the arguments.  But if the
5065      arguments aren't really even `tree' expressions, we shouldn't be trying
5066      to do this.  */
5067 
5068   /* Expressions without side effects may be constant if their
5069      arguments are as well.  */
5070   constant = (TREE_CODE_CLASS (code) == tcc_comparison
5071 	      || TREE_CODE_CLASS (code) == tcc_binary);
5072   read_only = 1;
5073   side_effects = TREE_SIDE_EFFECTS (t);
5074 
5075   switch (code)
5076     {
5077     case TRUNC_DIV_EXPR:
5078     case CEIL_DIV_EXPR:
5079     case FLOOR_DIV_EXPR:
5080     case ROUND_DIV_EXPR:
5081     case EXACT_DIV_EXPR:
5082     case CEIL_MOD_EXPR:
5083     case FLOOR_MOD_EXPR:
5084     case ROUND_MOD_EXPR:
5085     case TRUNC_MOD_EXPR:
5086       div_by_zero = integer_zerop (arg1);
5087       break;
5088     default:
5089       div_by_zero = false;
5090     }
5091 
5092   PROCESS_ARG (0);
5093   PROCESS_ARG (1);
5094 
5095   TREE_SIDE_EFFECTS (t) = side_effects;
5096   if (code == MEM_REF)
5097     {
5098       if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5099 	{
5100 	  tree o = TREE_OPERAND (arg0, 0);
5101 	  TREE_READONLY (t) = TREE_READONLY (o);
5102 	  TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5103 	}
5104     }
5105   else
5106     {
5107       TREE_READONLY (t) = read_only;
5108       /* Don't mark X / 0 as constant.  */
5109       TREE_CONSTANT (t) = constant && !div_by_zero;
5110       TREE_THIS_VOLATILE (t)
5111 	= (TREE_CODE_CLASS (code) == tcc_reference
5112 	   && arg0 && TREE_THIS_VOLATILE (arg0));
5113     }
5114 
5115   return t;
5116 }
5117 
5118 
5119 tree
build3(enum tree_code code,tree tt,tree arg0,tree arg1,tree arg2 MEM_STAT_DECL)5120 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
5121 	tree arg2 MEM_STAT_DECL)
5122 {
5123   bool constant, read_only, side_effects;
5124   tree t;
5125 
5126   gcc_assert (TREE_CODE_LENGTH (code) == 3);
5127   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5128 
5129   t = make_node (code PASS_MEM_STAT);
5130   TREE_TYPE (t) = tt;
5131 
5132   read_only = 1;
5133 
5134   /* As a special exception, if COND_EXPR has NULL branches, we
5135      assume that it is a gimple statement and always consider
5136      it to have side effects.  */
5137   if (code == COND_EXPR
5138       && tt == void_type_node
5139       && arg1 == NULL_TREE
5140       && arg2 == NULL_TREE)
5141     side_effects = true;
5142   else
5143     side_effects = TREE_SIDE_EFFECTS (t);
5144 
5145   PROCESS_ARG (0);
5146   PROCESS_ARG (1);
5147   PROCESS_ARG (2);
5148 
5149   if (code == COND_EXPR)
5150     TREE_READONLY (t) = read_only;
5151 
5152   TREE_SIDE_EFFECTS (t) = side_effects;
5153   TREE_THIS_VOLATILE (t)
5154     = (TREE_CODE_CLASS (code) == tcc_reference
5155        && arg0 && TREE_THIS_VOLATILE (arg0));
5156 
5157   return t;
5158 }
5159 
5160 tree
build4(enum tree_code code,tree tt,tree arg0,tree arg1,tree arg2,tree arg3 MEM_STAT_DECL)5161 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
5162 	tree arg2, tree arg3 MEM_STAT_DECL)
5163 {
5164   bool constant, read_only, side_effects;
5165   tree t;
5166 
5167   gcc_assert (TREE_CODE_LENGTH (code) == 4);
5168 
5169   t = make_node (code PASS_MEM_STAT);
5170   TREE_TYPE (t) = tt;
5171 
5172   side_effects = TREE_SIDE_EFFECTS (t);
5173 
5174   PROCESS_ARG (0);
5175   PROCESS_ARG (1);
5176   PROCESS_ARG (2);
5177   PROCESS_ARG (3);
5178 
5179   TREE_SIDE_EFFECTS (t) = side_effects;
5180   TREE_THIS_VOLATILE (t)
5181     = (TREE_CODE_CLASS (code) == tcc_reference
5182        && arg0 && TREE_THIS_VOLATILE (arg0));
5183 
5184   return t;
5185 }
5186 
5187 tree
build5(enum tree_code code,tree tt,tree arg0,tree arg1,tree arg2,tree arg3,tree arg4 MEM_STAT_DECL)5188 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
5189 	tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
5190 {
5191   bool constant, read_only, side_effects;
5192   tree t;
5193 
5194   gcc_assert (TREE_CODE_LENGTH (code) == 5);
5195 
5196   t = make_node (code PASS_MEM_STAT);
5197   TREE_TYPE (t) = tt;
5198 
5199   side_effects = TREE_SIDE_EFFECTS (t);
5200 
5201   PROCESS_ARG (0);
5202   PROCESS_ARG (1);
5203   PROCESS_ARG (2);
5204   PROCESS_ARG (3);
5205   PROCESS_ARG (4);
5206 
5207   TREE_SIDE_EFFECTS (t) = side_effects;
5208   if (code == TARGET_MEM_REF)
5209     {
5210       if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5211 	{
5212 	  tree o = TREE_OPERAND (arg0, 0);
5213 	  TREE_READONLY (t) = TREE_READONLY (o);
5214 	  TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5215 	}
5216     }
5217   else
5218     TREE_THIS_VOLATILE (t)
5219       = (TREE_CODE_CLASS (code) == tcc_reference
5220 	 && arg0 && TREE_THIS_VOLATILE (arg0));
5221 
5222   return t;
5223 }
5224 
5225 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5226    on the pointer PTR.  */
5227 
5228 tree
build_simple_mem_ref_loc(location_t loc,tree ptr)5229 build_simple_mem_ref_loc (location_t loc, tree ptr)
5230 {
5231   poly_int64 offset = 0;
5232   tree ptype = TREE_TYPE (ptr);
5233   tree tem;
5234   /* For convenience allow addresses that collapse to a simple base
5235      and offset.  */
5236   if (TREE_CODE (ptr) == ADDR_EXPR
5237       && (handled_component_p (TREE_OPERAND (ptr, 0))
5238 	  || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
5239     {
5240       ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
5241       gcc_assert (ptr);
5242       if (TREE_CODE (ptr) == MEM_REF)
5243 	{
5244 	  offset += mem_ref_offset (ptr).force_shwi ();
5245 	  ptr = TREE_OPERAND (ptr, 0);
5246 	}
5247       else
5248 	ptr = build_fold_addr_expr (ptr);
5249       gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
5250     }
5251   tem = build2 (MEM_REF, TREE_TYPE (ptype),
5252 		ptr, build_int_cst (ptype, offset));
5253   SET_EXPR_LOCATION (tem, loc);
5254   return tem;
5255 }
5256 
5257 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T.  */
5258 
5259 poly_offset_int
mem_ref_offset(const_tree t)5260 mem_ref_offset (const_tree t)
5261 {
5262   return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
5263 				SIGNED);
5264 }
5265 
5266 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5267    offsetted by OFFSET units.  */
5268 
5269 tree
build_invariant_address(tree type,tree base,poly_int64 offset)5270 build_invariant_address (tree type, tree base, poly_int64 offset)
5271 {
5272   tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5273 			  build_fold_addr_expr (base),
5274 			  build_int_cst (ptr_type_node, offset));
5275   tree addr = build1 (ADDR_EXPR, type, ref);
5276   recompute_tree_invariant_for_addr_expr (addr);
5277   return addr;
5278 }
5279 
5280 /* Similar except don't specify the TREE_TYPE
5281    and leave the TREE_SIDE_EFFECTS as 0.
5282    It is permissible for arguments to be null,
5283    or even garbage if their values do not matter.  */
5284 
5285 tree
build_nt(enum tree_code code,...)5286 build_nt (enum tree_code code, ...)
5287 {
5288   tree t;
5289   int length;
5290   int i;
5291   va_list p;
5292 
5293   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5294 
5295   va_start (p, code);
5296 
5297   t = make_node (code);
5298   length = TREE_CODE_LENGTH (code);
5299 
5300   for (i = 0; i < length; i++)
5301     TREE_OPERAND (t, i) = va_arg (p, tree);
5302 
5303   va_end (p);
5304   return t;
5305 }
5306 
5307 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5308    tree vec.  */
5309 
5310 tree
build_nt_call_vec(tree fn,vec<tree,va_gc> * args)5311 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5312 {
5313   tree ret, t;
5314   unsigned int ix;
5315 
5316   ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5317   CALL_EXPR_FN (ret) = fn;
5318   CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5319   FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5320     CALL_EXPR_ARG (ret, ix) = t;
5321   return ret;
5322 }
5323 
5324 /* Create a DECL_... node of code CODE, name NAME  (if non-null)
5325    and data type TYPE.
5326    We do NOT enter this node in any sort of symbol table.
5327 
5328    LOC is the location of the decl.
5329 
5330    layout_decl is used to set up the decl's storage layout.
5331    Other slots are initialized to 0 or null pointers.  */
5332 
5333 tree
build_decl(location_t loc,enum tree_code code,tree name,tree type MEM_STAT_DECL)5334 build_decl (location_t loc, enum tree_code code, tree name,
5335     		 tree type MEM_STAT_DECL)
5336 {
5337   tree t;
5338 
5339   t = make_node (code PASS_MEM_STAT);
5340   DECL_SOURCE_LOCATION (t) = loc;
5341 
5342 /*  if (type == error_mark_node)
5343     type = integer_type_node; */
5344 /* That is not done, deliberately, so that having error_mark_node
5345    as the type can suppress useless errors in the use of this variable.  */
5346 
5347   DECL_NAME (t) = name;
5348   TREE_TYPE (t) = type;
5349 
5350   if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5351     layout_decl (t, 0);
5352 
5353   return t;
5354 }
5355 
5356 /* Create and return a DEBUG_EXPR_DECL node of the given TYPE.  */
5357 
5358 tree
build_debug_expr_decl(tree type)5359 build_debug_expr_decl (tree type)
5360 {
5361   tree vexpr = make_node (DEBUG_EXPR_DECL);
5362   DECL_ARTIFICIAL (vexpr) = 1;
5363   TREE_TYPE (vexpr) = type;
5364   SET_DECL_MODE (vexpr, TYPE_MODE (type));
5365   return vexpr;
5366 }
5367 
5368 /* Builds and returns function declaration with NAME and TYPE.  */
5369 
5370 tree
build_fn_decl(const char * name,tree type)5371 build_fn_decl (const char *name, tree type)
5372 {
5373   tree id = get_identifier (name);
5374   tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5375 
5376   DECL_EXTERNAL (decl) = 1;
5377   TREE_PUBLIC (decl) = 1;
5378   DECL_ARTIFICIAL (decl) = 1;
5379   TREE_NOTHROW (decl) = 1;
5380 
5381   return decl;
5382 }
5383 
5384 vec<tree, va_gc> *all_translation_units;
5385 
5386 /* Builds a new translation-unit decl with name NAME, queues it in the
5387    global list of translation-unit decls and returns it.   */
5388 
5389 tree
build_translation_unit_decl(tree name)5390 build_translation_unit_decl (tree name)
5391 {
5392   tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5393 			name, NULL_TREE);
5394   TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5395   vec_safe_push (all_translation_units, tu);
5396   return tu;
5397 }
5398 
5399 
5400 /* BLOCK nodes are used to represent the structure of binding contours
5401    and declarations, once those contours have been exited and their contents
5402    compiled.  This information is used for outputting debugging info.  */
5403 
5404 tree
build_block(tree vars,tree subblocks,tree supercontext,tree chain)5405 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5406 {
5407   tree block = make_node (BLOCK);
5408 
5409   BLOCK_VARS (block) = vars;
5410   BLOCK_SUBBLOCKS (block) = subblocks;
5411   BLOCK_SUPERCONTEXT (block) = supercontext;
5412   BLOCK_CHAIN (block) = chain;
5413   return block;
5414 }
5415 
5416 
5417 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5418 
5419    LOC is the location to use in tree T.  */
5420 
5421 void
protected_set_expr_location(tree t,location_t loc)5422 protected_set_expr_location (tree t, location_t loc)
5423 {
5424   if (CAN_HAVE_LOCATION_P (t))
5425     SET_EXPR_LOCATION (t, loc);
5426   else if (t && TREE_CODE (t) == STATEMENT_LIST)
5427     {
5428       t = expr_single (t);
5429       if (t && CAN_HAVE_LOCATION_P (t))
5430 	SET_EXPR_LOCATION (t, loc);
5431     }
5432 }
5433 
5434 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5435    UNKNOWN_LOCATION.  */
5436 
5437 void
protected_set_expr_location_if_unset(tree t,location_t loc)5438 protected_set_expr_location_if_unset (tree t, location_t loc)
5439 {
5440   t = expr_single (t);
5441   if (t && !EXPR_HAS_LOCATION (t))
5442     protected_set_expr_location (t, loc);
5443 }
5444 
5445 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5446    of the various TYPE_QUAL values.  */
5447 
5448 static void
set_type_quals(tree type,int type_quals)5449 set_type_quals (tree type, int type_quals)
5450 {
5451   TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5452   TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5453   TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5454   TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5455   TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5456 }
5457 
5458 /* Returns true iff CAND and BASE have equivalent language-specific
5459    qualifiers.  */
5460 
5461 bool
check_lang_type(const_tree cand,const_tree base)5462 check_lang_type (const_tree cand, const_tree base)
5463 {
5464   if (lang_hooks.types.type_hash_eq == NULL)
5465     return true;
5466   /* type_hash_eq currently only applies to these types.  */
5467   if (TREE_CODE (cand) != FUNCTION_TYPE
5468       && TREE_CODE (cand) != METHOD_TYPE)
5469     return true;
5470   return lang_hooks.types.type_hash_eq (cand, base);
5471 }
5472 
5473 /* This function checks to see if TYPE matches the size one of the built-in
5474    atomic types, and returns that core atomic type.  */
5475 
5476 static tree
find_atomic_core_type(const_tree type)5477 find_atomic_core_type (const_tree type)
5478 {
5479   tree base_atomic_type;
5480 
5481   /* Only handle complete types.  */
5482   if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5483     return NULL_TREE;
5484 
5485   switch (tree_to_uhwi (TYPE_SIZE (type)))
5486     {
5487     case 8:
5488       base_atomic_type = atomicQI_type_node;
5489       break;
5490 
5491     case 16:
5492       base_atomic_type = atomicHI_type_node;
5493       break;
5494 
5495     case 32:
5496       base_atomic_type = atomicSI_type_node;
5497       break;
5498 
5499     case 64:
5500       base_atomic_type = atomicDI_type_node;
5501       break;
5502 
5503     case 128:
5504       base_atomic_type = atomicTI_type_node;
5505       break;
5506 
5507     default:
5508       base_atomic_type = NULL_TREE;
5509     }
5510 
5511   return base_atomic_type;
5512 }
5513 
5514 /* Returns true iff unqualified CAND and BASE are equivalent.  */
5515 
5516 bool
check_base_type(const_tree cand,const_tree base)5517 check_base_type (const_tree cand, const_tree base)
5518 {
5519   if (TYPE_NAME (cand) != TYPE_NAME (base)
5520       /* Apparently this is needed for Objective-C.  */
5521       || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
5522       || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
5523 			        TYPE_ATTRIBUTES (base)))
5524     return false;
5525   /* Check alignment.  */
5526   if (TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5527       && TYPE_USER_ALIGN (cand) == TYPE_USER_ALIGN (base))
5528     return true;
5529   /* Atomic types increase minimal alignment.  We must to do so as well
5530      or we get duplicated canonical types. See PR88686.  */
5531   if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
5532     {
5533       /* See if this object can map to a basic atomic type.  */
5534       tree atomic_type = find_atomic_core_type (cand);
5535       if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
5536        return true;
5537     }
5538   return false;
5539 }
5540 
5541 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS.  */
5542 
5543 bool
check_qualified_type(const_tree cand,const_tree base,int type_quals)5544 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5545 {
5546   return (TYPE_QUALS (cand) == type_quals
5547 	  && check_base_type (cand, base)
5548 	  && check_lang_type (cand, base));
5549 }
5550 
5551 /* Returns true iff CAND is equivalent to BASE with ALIGN.  */
5552 
5553 static bool
check_aligned_type(const_tree cand,const_tree base,unsigned int align)5554 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5555 {
5556   return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5557 	  && TYPE_NAME (cand) == TYPE_NAME (base)
5558 	  /* Apparently this is needed for Objective-C.  */
5559 	  && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5560 	  /* Check alignment.  */
5561 	  && TYPE_ALIGN (cand) == align
5562 	  /* Check this is a user-aligned type as build_aligned_type
5563 	     would create.  */
5564 	  && TYPE_USER_ALIGN (cand)
5565 	  && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5566 				   TYPE_ATTRIBUTES (base))
5567 	  && check_lang_type (cand, base));
5568 }
5569 
5570 /* Return a version of the TYPE, qualified as indicated by the
5571    TYPE_QUALS, if one exists.  If no qualified version exists yet,
5572    return NULL_TREE.  */
5573 
5574 tree
get_qualified_type(tree type,int type_quals)5575 get_qualified_type (tree type, int type_quals)
5576 {
5577   if (TYPE_QUALS (type) == type_quals)
5578     return type;
5579 
5580   tree mv = TYPE_MAIN_VARIANT (type);
5581   if (check_qualified_type (mv, type, type_quals))
5582     return mv;
5583 
5584   /* Search the chain of variants to see if there is already one there just
5585      like the one we need to have.  If so, use that existing one.  We must
5586      preserve the TYPE_NAME, since there is code that depends on this.  */
5587   for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
5588     if (check_qualified_type (*tp, type, type_quals))
5589       {
5590 	/* Put the found variant at the head of the variant list so
5591 	   frequently searched variants get found faster.  The C++ FE
5592 	   benefits greatly from this.  */
5593 	tree t = *tp;
5594 	*tp = TYPE_NEXT_VARIANT (t);
5595 	TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
5596 	TYPE_NEXT_VARIANT (mv) = t;
5597 	return t;
5598       }
5599 
5600   return NULL_TREE;
5601 }
5602 
5603 /* Like get_qualified_type, but creates the type if it does not
5604    exist.  This function never returns NULL_TREE.  */
5605 
5606 tree
build_qualified_type(tree type,int type_quals MEM_STAT_DECL)5607 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5608 {
5609   tree t;
5610 
5611   /* See if we already have the appropriate qualified variant.  */
5612   t = get_qualified_type (type, type_quals);
5613 
5614   /* If not, build it.  */
5615   if (!t)
5616     {
5617       t = build_variant_type_copy (type PASS_MEM_STAT);
5618       set_type_quals (t, type_quals);
5619 
5620       if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5621 	{
5622 	  /* See if this object can map to a basic atomic type.  */
5623 	  tree atomic_type = find_atomic_core_type (type);
5624 	  if (atomic_type)
5625 	    {
5626 	      /* Ensure the alignment of this type is compatible with
5627 		 the required alignment of the atomic type.  */
5628 	      if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5629 		SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5630 	    }
5631 	}
5632 
5633       if (TYPE_STRUCTURAL_EQUALITY_P (type))
5634 	/* Propagate structural equality. */
5635 	SET_TYPE_STRUCTURAL_EQUALITY (t);
5636       else if (TYPE_CANONICAL (type) != type)
5637 	/* Build the underlying canonical type, since it is different
5638 	   from TYPE. */
5639 	{
5640 	  tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5641 	  TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5642 	}
5643       else
5644 	/* T is its own canonical type. */
5645 	TYPE_CANONICAL (t) = t;
5646 
5647     }
5648 
5649   return t;
5650 }
5651 
5652 /* Create a variant of type T with alignment ALIGN which
5653    is measured in bits.  */
5654 
5655 tree
build_aligned_type(tree type,unsigned int align)5656 build_aligned_type (tree type, unsigned int align)
5657 {
5658   tree t;
5659 
5660   if (TYPE_PACKED (type)
5661       || TYPE_ALIGN (type) == align)
5662     return type;
5663 
5664   for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5665     if (check_aligned_type (t, type, align))
5666       return t;
5667 
5668   t = build_variant_type_copy (type);
5669   SET_TYPE_ALIGN (t, align);
5670   TYPE_USER_ALIGN (t) = 1;
5671 
5672   return t;
5673 }
5674 
5675 /* Create a new distinct copy of TYPE.  The new type is made its own
5676    MAIN_VARIANT. If TYPE requires structural equality checks, the
5677    resulting type requires structural equality checks; otherwise, its
5678    TYPE_CANONICAL points to itself. */
5679 
5680 tree
build_distinct_type_copy(tree type MEM_STAT_DECL)5681 build_distinct_type_copy (tree type MEM_STAT_DECL)
5682 {
5683   tree t = copy_node (type PASS_MEM_STAT);
5684 
5685   TYPE_POINTER_TO (t) = 0;
5686   TYPE_REFERENCE_TO (t) = 0;
5687 
5688   /* Set the canonical type either to a new equivalence class, or
5689      propagate the need for structural equality checks. */
5690   if (TYPE_STRUCTURAL_EQUALITY_P (type))
5691     SET_TYPE_STRUCTURAL_EQUALITY (t);
5692   else
5693     TYPE_CANONICAL (t) = t;
5694 
5695   /* Make it its own variant.  */
5696   TYPE_MAIN_VARIANT (t) = t;
5697   TYPE_NEXT_VARIANT (t) = 0;
5698 
5699   /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5700      whose TREE_TYPE is not t.  This can also happen in the Ada
5701      frontend when using subtypes.  */
5702 
5703   return t;
5704 }
5705 
5706 /* Create a new variant of TYPE, equivalent but distinct.  This is so
5707    the caller can modify it. TYPE_CANONICAL for the return type will
5708    be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5709    are considered equal by the language itself (or that both types
5710    require structural equality checks). */
5711 
5712 tree
build_variant_type_copy(tree type MEM_STAT_DECL)5713 build_variant_type_copy (tree type MEM_STAT_DECL)
5714 {
5715   tree t, m = TYPE_MAIN_VARIANT (type);
5716 
5717   t = build_distinct_type_copy (type PASS_MEM_STAT);
5718 
5719   /* Since we're building a variant, assume that it is a non-semantic
5720      variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5721   TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
5722   /* Type variants have no alias set defined.  */
5723   TYPE_ALIAS_SET (t) = -1;
5724 
5725   /* Add the new type to the chain of variants of TYPE.  */
5726   TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
5727   TYPE_NEXT_VARIANT (m) = t;
5728   TYPE_MAIN_VARIANT (t) = m;
5729 
5730   return t;
5731 }
5732 
5733 /* Return true if the from tree in both tree maps are equal.  */
5734 
5735 int
tree_map_base_eq(const void * va,const void * vb)5736 tree_map_base_eq (const void *va, const void *vb)
5737 {
5738   const struct tree_map_base  *const a = (const struct tree_map_base *) va,
5739     *const b = (const struct tree_map_base *) vb;
5740   return (a->from == b->from);
5741 }
5742 
5743 /* Hash a from tree in a tree_base_map.  */
5744 
5745 unsigned int
tree_map_base_hash(const void * item)5746 tree_map_base_hash (const void *item)
5747 {
5748   return htab_hash_pointer (((const struct tree_map_base *)item)->from);
5749 }
5750 
5751 /* Return true if this tree map structure is marked for garbage collection
5752    purposes.  We simply return true if the from tree is marked, so that this
5753    structure goes away when the from tree goes away.  */
5754 
5755 int
tree_map_base_marked_p(const void * p)5756 tree_map_base_marked_p (const void *p)
5757 {
5758   return ggc_marked_p (((const struct tree_map_base *) p)->from);
5759 }
5760 
5761 /* Hash a from tree in a tree_map.  */
5762 
5763 unsigned int
tree_map_hash(const void * item)5764 tree_map_hash (const void *item)
5765 {
5766   return (((const struct tree_map *) item)->hash);
5767 }
5768 
5769 /* Hash a from tree in a tree_decl_map.  */
5770 
5771 unsigned int
tree_decl_map_hash(const void * item)5772 tree_decl_map_hash (const void *item)
5773 {
5774   return DECL_UID (((const struct tree_decl_map *) item)->base.from);
5775 }
5776 
5777 /* Return the initialization priority for DECL.  */
5778 
5779 priority_type
decl_init_priority_lookup(tree decl)5780 decl_init_priority_lookup (tree decl)
5781 {
5782   symtab_node *snode = symtab_node::get (decl);
5783 
5784   if (!snode)
5785     return DEFAULT_INIT_PRIORITY;
5786   return
5787     snode->get_init_priority ();
5788 }
5789 
5790 /* Return the finalization priority for DECL.  */
5791 
5792 priority_type
decl_fini_priority_lookup(tree decl)5793 decl_fini_priority_lookup (tree decl)
5794 {
5795   cgraph_node *node = cgraph_node::get (decl);
5796 
5797   if (!node)
5798     return DEFAULT_INIT_PRIORITY;
5799   return
5800     node->get_fini_priority ();
5801 }
5802 
5803 /* Set the initialization priority for DECL to PRIORITY.  */
5804 
5805 void
decl_init_priority_insert(tree decl,priority_type priority)5806 decl_init_priority_insert (tree decl, priority_type priority)
5807 {
5808   struct symtab_node *snode;
5809 
5810   if (priority == DEFAULT_INIT_PRIORITY)
5811     {
5812       snode = symtab_node::get (decl);
5813       if (!snode)
5814 	return;
5815     }
5816   else if (VAR_P (decl))
5817     snode = varpool_node::get_create (decl);
5818   else
5819     snode = cgraph_node::get_create (decl);
5820   snode->set_init_priority (priority);
5821 }
5822 
5823 /* Set the finalization priority for DECL to PRIORITY.  */
5824 
5825 void
decl_fini_priority_insert(tree decl,priority_type priority)5826 decl_fini_priority_insert (tree decl, priority_type priority)
5827 {
5828   struct cgraph_node *node;
5829 
5830   if (priority == DEFAULT_INIT_PRIORITY)
5831     {
5832       node = cgraph_node::get (decl);
5833       if (!node)
5834 	return;
5835     }
5836   else
5837     node = cgraph_node::get_create (decl);
5838   node->set_fini_priority (priority);
5839 }
5840 
5841 /* Print out the statistics for the DECL_DEBUG_EXPR hash table.  */
5842 
5843 static void
print_debug_expr_statistics(void)5844 print_debug_expr_statistics (void)
5845 {
5846   fprintf (stderr, "DECL_DEBUG_EXPR  hash: size %ld, %ld elements, %f collisions\n",
5847 	   (long) debug_expr_for_decl->size (),
5848 	   (long) debug_expr_for_decl->elements (),
5849 	   debug_expr_for_decl->collisions ());
5850 }
5851 
5852 /* Print out the statistics for the DECL_VALUE_EXPR hash table.  */
5853 
5854 static void
print_value_expr_statistics(void)5855 print_value_expr_statistics (void)
5856 {
5857   fprintf (stderr, "DECL_VALUE_EXPR  hash: size %ld, %ld elements, %f collisions\n",
5858 	   (long) value_expr_for_decl->size (),
5859 	   (long) value_expr_for_decl->elements (),
5860 	   value_expr_for_decl->collisions ());
5861 }
5862 
5863 /* Lookup a debug expression for FROM, and return it if we find one.  */
5864 
5865 tree
decl_debug_expr_lookup(tree from)5866 decl_debug_expr_lookup (tree from)
5867 {
5868   struct tree_decl_map *h, in;
5869   in.base.from = from;
5870 
5871   h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5872   if (h)
5873     return h->to;
5874   return NULL_TREE;
5875 }
5876 
5877 /* Insert a mapping FROM->TO in the debug expression hashtable.  */
5878 
5879 void
decl_debug_expr_insert(tree from,tree to)5880 decl_debug_expr_insert (tree from, tree to)
5881 {
5882   struct tree_decl_map *h;
5883 
5884   h = ggc_alloc<tree_decl_map> ();
5885   h->base.from = from;
5886   h->to = to;
5887   *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5888 }
5889 
5890 /* Lookup a value expression for FROM, and return it if we find one.  */
5891 
5892 tree
decl_value_expr_lookup(tree from)5893 decl_value_expr_lookup (tree from)
5894 {
5895   struct tree_decl_map *h, in;
5896   in.base.from = from;
5897 
5898   h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5899   if (h)
5900     return h->to;
5901   return NULL_TREE;
5902 }
5903 
5904 /* Insert a mapping FROM->TO in the value expression hashtable.  */
5905 
5906 void
decl_value_expr_insert(tree from,tree to)5907 decl_value_expr_insert (tree from, tree to)
5908 {
5909   struct tree_decl_map *h;
5910 
5911   h = ggc_alloc<tree_decl_map> ();
5912   h->base.from = from;
5913   h->to = to;
5914   *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5915 }
5916 
5917 /* Lookup a vector of debug arguments for FROM, and return it if we
5918    find one.  */
5919 
5920 vec<tree, va_gc> **
decl_debug_args_lookup(tree from)5921 decl_debug_args_lookup (tree from)
5922 {
5923   struct tree_vec_map *h, in;
5924 
5925   if (!DECL_HAS_DEBUG_ARGS_P (from))
5926     return NULL;
5927   gcc_checking_assert (debug_args_for_decl != NULL);
5928   in.base.from = from;
5929   h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
5930   if (h)
5931     return &h->to;
5932   return NULL;
5933 }
5934 
5935 /* Insert a mapping FROM->empty vector of debug arguments in the value
5936    expression hashtable.  */
5937 
5938 vec<tree, va_gc> **
decl_debug_args_insert(tree from)5939 decl_debug_args_insert (tree from)
5940 {
5941   struct tree_vec_map *h;
5942   tree_vec_map **loc;
5943 
5944   if (DECL_HAS_DEBUG_ARGS_P (from))
5945     return decl_debug_args_lookup (from);
5946   if (debug_args_for_decl == NULL)
5947     debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
5948   h = ggc_alloc<tree_vec_map> ();
5949   h->base.from = from;
5950   h->to = NULL;
5951   loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
5952   *loc = h;
5953   DECL_HAS_DEBUG_ARGS_P (from) = 1;
5954   return &h->to;
5955 }
5956 
5957 /* Hashing of types so that we don't make duplicates.
5958    The entry point is `type_hash_canon'.  */
5959 
5960 /* Generate the default hash code for TYPE.  This is designed for
5961    speed, rather than maximum entropy.  */
5962 
5963 hashval_t
type_hash_canon_hash(tree type)5964 type_hash_canon_hash (tree type)
5965 {
5966   inchash::hash hstate;
5967 
5968   hstate.add_int (TREE_CODE (type));
5969 
5970   if (TREE_TYPE (type))
5971     hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
5972 
5973   for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
5974     /* Just the identifier is adequate to distinguish.  */
5975     hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
5976 
5977   switch (TREE_CODE (type))
5978     {
5979     case METHOD_TYPE:
5980       hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
5981       /* FALLTHROUGH. */
5982     case FUNCTION_TYPE:
5983       for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
5984 	if (TREE_VALUE (t) != error_mark_node)
5985 	  hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
5986       break;
5987 
5988     case OFFSET_TYPE:
5989       hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
5990       break;
5991 
5992     case ARRAY_TYPE:
5993       {
5994 	if (TYPE_DOMAIN (type))
5995 	  hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
5996 	if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
5997 	  {
5998 	    unsigned typeless = TYPE_TYPELESS_STORAGE (type);
5999 	    hstate.add_object (typeless);
6000 	  }
6001       }
6002       break;
6003 
6004     case INTEGER_TYPE:
6005       {
6006 	tree t = TYPE_MAX_VALUE (type);
6007 	if (!t)
6008 	  t = TYPE_MIN_VALUE (type);
6009 	for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6010 	  hstate.add_object (TREE_INT_CST_ELT (t, i));
6011 	break;
6012       }
6013 
6014     case REAL_TYPE:
6015     case FIXED_POINT_TYPE:
6016       {
6017 	unsigned prec = TYPE_PRECISION (type);
6018 	hstate.add_object (prec);
6019 	break;
6020       }
6021 
6022     case VECTOR_TYPE:
6023       hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
6024       break;
6025 
6026     default:
6027       break;
6028     }
6029 
6030   return hstate.end ();
6031 }
6032 
6033 /* These are the Hashtable callback functions.  */
6034 
6035 /* Returns true iff the types are equivalent.  */
6036 
6037 bool
equal(type_hash * a,type_hash * b)6038 type_cache_hasher::equal (type_hash *a, type_hash *b)
6039 {
6040   /* First test the things that are the same for all types.  */
6041   if (a->hash != b->hash
6042       || TREE_CODE (a->type) != TREE_CODE (b->type)
6043       || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6044       || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6045 				 TYPE_ATTRIBUTES (b->type))
6046       || (TREE_CODE (a->type) != COMPLEX_TYPE
6047           && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6048     return 0;
6049 
6050   /* Be careful about comparing arrays before and after the element type
6051      has been completed; don't compare TYPE_ALIGN unless both types are
6052      complete.  */
6053   if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6054       && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6055 	  || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6056     return 0;
6057 
6058   switch (TREE_CODE (a->type))
6059     {
6060     case VOID_TYPE:
6061     case OPAQUE_TYPE:
6062     case COMPLEX_TYPE:
6063     case POINTER_TYPE:
6064     case REFERENCE_TYPE:
6065     case NULLPTR_TYPE:
6066       return 1;
6067 
6068     case VECTOR_TYPE:
6069       return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
6070 		       TYPE_VECTOR_SUBPARTS (b->type));
6071 
6072     case ENUMERAL_TYPE:
6073       if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6074 	  && !(TYPE_VALUES (a->type)
6075 	       && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6076 	       && TYPE_VALUES (b->type)
6077 	       && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6078 	       && type_list_equal (TYPE_VALUES (a->type),
6079 				   TYPE_VALUES (b->type))))
6080 	return 0;
6081 
6082       /* fall through */
6083 
6084     case INTEGER_TYPE:
6085     case REAL_TYPE:
6086     case BOOLEAN_TYPE:
6087       if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6088 	return false;
6089       return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6090 	       || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6091 				      TYPE_MAX_VALUE (b->type)))
6092 	      && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6093 		  || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6094 					 TYPE_MIN_VALUE (b->type))));
6095 
6096     case FIXED_POINT_TYPE:
6097       return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6098 
6099     case OFFSET_TYPE:
6100       return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6101 
6102     case METHOD_TYPE:
6103       if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6104 	  && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6105 	      || (TYPE_ARG_TYPES (a->type)
6106 		  && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6107 		  && TYPE_ARG_TYPES (b->type)
6108 		  && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6109 		  && type_list_equal (TYPE_ARG_TYPES (a->type),
6110 				      TYPE_ARG_TYPES (b->type)))))
6111         break;
6112       return 0;
6113     case ARRAY_TYPE:
6114       /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6115 	 where the flag should be inherited from the element type
6116 	 and can change after ARRAY_TYPEs are created; on non-aggregates
6117 	 compare it and hash it, scalars will never have that flag set
6118 	 and we need to differentiate between arrays created by different
6119 	 front-ends or middle-end created arrays.  */
6120       return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6121 	      && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6122 		  || (TYPE_TYPELESS_STORAGE (a->type)
6123 		      == TYPE_TYPELESS_STORAGE (b->type))));
6124 
6125     case RECORD_TYPE:
6126     case UNION_TYPE:
6127     case QUAL_UNION_TYPE:
6128       return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6129 	      || (TYPE_FIELDS (a->type)
6130 		  && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6131 		  && TYPE_FIELDS (b->type)
6132 		  && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6133 		  && type_list_equal (TYPE_FIELDS (a->type),
6134 				      TYPE_FIELDS (b->type))));
6135 
6136     case FUNCTION_TYPE:
6137       if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6138 	  || (TYPE_ARG_TYPES (a->type)
6139 	      && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6140 	      && TYPE_ARG_TYPES (b->type)
6141 	      && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6142 	      && type_list_equal (TYPE_ARG_TYPES (a->type),
6143 				  TYPE_ARG_TYPES (b->type))))
6144 	break;
6145       return 0;
6146 
6147     default:
6148       return 0;
6149     }
6150 
6151   if (lang_hooks.types.type_hash_eq != NULL)
6152     return lang_hooks.types.type_hash_eq (a->type, b->type);
6153 
6154   return 1;
6155 }
6156 
6157 /* Given TYPE, and HASHCODE its hash code, return the canonical
6158    object for an identical type if one already exists.
6159    Otherwise, return TYPE, and record it as the canonical object.
6160 
6161    To use this function, first create a type of the sort you want.
6162    Then compute its hash code from the fields of the type that
6163    make it different from other similar types.
6164    Then call this function and use the value.  */
6165 
6166 tree
type_hash_canon(unsigned int hashcode,tree type)6167 type_hash_canon (unsigned int hashcode, tree type)
6168 {
6169   type_hash in;
6170   type_hash **loc;
6171 
6172   /* The hash table only contains main variants, so ensure that's what we're
6173      being passed.  */
6174   gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6175 
6176   /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6177      must call that routine before comparing TYPE_ALIGNs.  */
6178   layout_type (type);
6179 
6180   in.hash = hashcode;
6181   in.type = type;
6182 
6183   loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6184   if (*loc)
6185     {
6186       tree t1 = ((type_hash *) *loc)->type;
6187       gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
6188 		  && t1 != type);
6189       if (TYPE_UID (type) + 1 == next_type_uid)
6190 	--next_type_uid;
6191       /* Free also min/max values and the cache for integer
6192 	 types.  This can't be done in free_node, as LTO frees
6193 	 those on its own.  */
6194       if (TREE_CODE (type) == INTEGER_TYPE)
6195 	{
6196 	  if (TYPE_MIN_VALUE (type)
6197 	      && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6198 	    {
6199 	      /* Zero is always in TYPE_CACHED_VALUES.  */
6200 	      if (! TYPE_UNSIGNED (type))
6201 		int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6202 	      ggc_free (TYPE_MIN_VALUE (type));
6203 	    }
6204 	  if (TYPE_MAX_VALUE (type)
6205 	      && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6206 	    {
6207 	      int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6208 	      ggc_free (TYPE_MAX_VALUE (type));
6209 	    }
6210 	  if (TYPE_CACHED_VALUES_P (type))
6211 	    ggc_free (TYPE_CACHED_VALUES (type));
6212 	}
6213       free_node (type);
6214       return t1;
6215     }
6216   else
6217     {
6218       struct type_hash *h;
6219 
6220       h = ggc_alloc<type_hash> ();
6221       h->hash = hashcode;
6222       h->type = type;
6223       *loc = h;
6224 
6225       return type;
6226     }
6227 }
6228 
6229 static void
print_type_hash_statistics(void)6230 print_type_hash_statistics (void)
6231 {
6232   fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6233 	   (long) type_hash_table->size (),
6234 	   (long) type_hash_table->elements (),
6235 	   type_hash_table->collisions ());
6236 }
6237 
6238 /* Given two lists of types
6239    (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6240    return 1 if the lists contain the same types in the same order.
6241    Also, the TREE_PURPOSEs must match.  */
6242 
6243 bool
type_list_equal(const_tree l1,const_tree l2)6244 type_list_equal (const_tree l1, const_tree l2)
6245 {
6246   const_tree t1, t2;
6247 
6248   for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6249     if (TREE_VALUE (t1) != TREE_VALUE (t2)
6250 	|| (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6251 	    && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6252 		  && (TREE_TYPE (TREE_PURPOSE (t1))
6253 		      == TREE_TYPE (TREE_PURPOSE (t2))))))
6254       return false;
6255 
6256   return t1 == t2;
6257 }
6258 
6259 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6260    given by TYPE.  If the argument list accepts variable arguments,
6261    then this function counts only the ordinary arguments.  */
6262 
6263 int
type_num_arguments(const_tree fntype)6264 type_num_arguments (const_tree fntype)
6265 {
6266   int i = 0;
6267 
6268   for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
6269     /* If the function does not take a variable number of arguments,
6270        the last element in the list will have type `void'.  */
6271     if (VOID_TYPE_P (TREE_VALUE (t)))
6272       break;
6273     else
6274       ++i;
6275 
6276   return i;
6277 }
6278 
6279 /* Return the type of the function TYPE's argument ARGNO if known.
6280    For vararg function's where ARGNO refers to one of the variadic
6281    arguments return null.  Otherwise, return a void_type_node for
6282    out-of-bounds ARGNO.  */
6283 
6284 tree
type_argument_type(const_tree fntype,unsigned argno)6285 type_argument_type (const_tree fntype, unsigned argno)
6286 {
6287   /* Treat zero the same as an out-of-bounds argument number.  */
6288   if (!argno)
6289     return void_type_node;
6290 
6291   function_args_iterator iter;
6292 
6293   tree argtype;
6294   unsigned i = 1;
6295   FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
6296     {
6297       /* A vararg function's argument list ends in a null.  Otherwise,
6298 	 an ordinary function's argument list ends with void.  Return
6299 	 null if ARGNO refers to a vararg argument, void_type_node if
6300 	 it's out of bounds, and the formal argument type otherwise.  */
6301       if (!argtype)
6302 	break;
6303 
6304       if (i == argno || VOID_TYPE_P (argtype))
6305 	return argtype;
6306 
6307       ++i;
6308     }
6309 
6310   return NULL_TREE;
6311 }
6312 
6313 /* Nonzero if integer constants T1 and T2
6314    represent the same constant value.  */
6315 
6316 int
tree_int_cst_equal(const_tree t1,const_tree t2)6317 tree_int_cst_equal (const_tree t1, const_tree t2)
6318 {
6319   if (t1 == t2)
6320     return 1;
6321 
6322   if (t1 == 0 || t2 == 0)
6323     return 0;
6324 
6325   STRIP_ANY_LOCATION_WRAPPER (t1);
6326   STRIP_ANY_LOCATION_WRAPPER (t2);
6327 
6328   if (TREE_CODE (t1) == INTEGER_CST
6329       && TREE_CODE (t2) == INTEGER_CST
6330       && wi::to_widest (t1) == wi::to_widest (t2))
6331     return 1;
6332 
6333   return 0;
6334 }
6335 
6336 /* Return true if T is an INTEGER_CST whose numerical value (extended
6337    according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT.  */
6338 
6339 bool
tree_fits_shwi_p(const_tree t)6340 tree_fits_shwi_p (const_tree t)
6341 {
6342   return (t != NULL_TREE
6343 	  && TREE_CODE (t) == INTEGER_CST
6344 	  && wi::fits_shwi_p (wi::to_widest (t)));
6345 }
6346 
6347 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6348    value (extended according to TYPE_UNSIGNED) fits in a poly_int64.  */
6349 
6350 bool
tree_fits_poly_int64_p(const_tree t)6351 tree_fits_poly_int64_p (const_tree t)
6352 {
6353   if (t == NULL_TREE)
6354     return false;
6355   if (POLY_INT_CST_P (t))
6356     {
6357       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6358 	if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
6359 	  return false;
6360       return true;
6361     }
6362   return (TREE_CODE (t) == INTEGER_CST
6363 	  && wi::fits_shwi_p (wi::to_widest (t)));
6364 }
6365 
6366 /* Return true if T is an INTEGER_CST whose numerical value (extended
6367    according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT.  */
6368 
6369 bool
tree_fits_uhwi_p(const_tree t)6370 tree_fits_uhwi_p (const_tree t)
6371 {
6372   return (t != NULL_TREE
6373 	  && TREE_CODE (t) == INTEGER_CST
6374 	  && wi::fits_uhwi_p (wi::to_widest (t)));
6375 }
6376 
6377 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6378    value (extended according to TYPE_UNSIGNED) fits in a poly_uint64.  */
6379 
6380 bool
tree_fits_poly_uint64_p(const_tree t)6381 tree_fits_poly_uint64_p (const_tree t)
6382 {
6383   if (t == NULL_TREE)
6384     return false;
6385   if (POLY_INT_CST_P (t))
6386     {
6387       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6388 	if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
6389 	  return false;
6390       return true;
6391     }
6392   return (TREE_CODE (t) == INTEGER_CST
6393 	  && wi::fits_uhwi_p (wi::to_widest (t)));
6394 }
6395 
6396 /* T is an INTEGER_CST whose numerical value (extended according to
6397    TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT.  Return that
6398    HOST_WIDE_INT.  */
6399 
6400 HOST_WIDE_INT
tree_to_shwi(const_tree t)6401 tree_to_shwi (const_tree t)
6402 {
6403   gcc_assert (tree_fits_shwi_p (t));
6404   return TREE_INT_CST_LOW (t);
6405 }
6406 
6407 /* T is an INTEGER_CST whose numerical value (extended according to
6408    TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT.  Return that
6409    HOST_WIDE_INT.  */
6410 
6411 unsigned HOST_WIDE_INT
tree_to_uhwi(const_tree t)6412 tree_to_uhwi (const_tree t)
6413 {
6414   gcc_assert (tree_fits_uhwi_p (t));
6415   return TREE_INT_CST_LOW (t);
6416 }
6417 
6418 /* Return the most significant (sign) bit of T.  */
6419 
6420 int
tree_int_cst_sign_bit(const_tree t)6421 tree_int_cst_sign_bit (const_tree t)
6422 {
6423   unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6424 
6425   return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6426 }
6427 
6428 /* Return an indication of the sign of the integer constant T.
6429    The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6430    Note that -1 will never be returned if T's type is unsigned.  */
6431 
6432 int
tree_int_cst_sgn(const_tree t)6433 tree_int_cst_sgn (const_tree t)
6434 {
6435   if (wi::to_wide (t) == 0)
6436     return 0;
6437   else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6438     return 1;
6439   else if (wi::neg_p (wi::to_wide (t)))
6440     return -1;
6441   else
6442     return 1;
6443 }
6444 
6445 /* Return the minimum number of bits needed to represent VALUE in a
6446    signed or unsigned type, UNSIGNEDP says which.  */
6447 
6448 unsigned int
tree_int_cst_min_precision(tree value,signop sgn)6449 tree_int_cst_min_precision (tree value, signop sgn)
6450 {
6451   /* If the value is negative, compute its negative minus 1.  The latter
6452      adjustment is because the absolute value of the largest negative value
6453      is one larger than the largest positive value.  This is equivalent to
6454      a bit-wise negation, so use that operation instead.  */
6455 
6456   if (tree_int_cst_sgn (value) < 0)
6457     value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6458 
6459   /* Return the number of bits needed, taking into account the fact
6460      that we need one more bit for a signed than unsigned type.
6461      If value is 0 or -1, the minimum precision is 1 no matter
6462      whether unsignedp is true or false.  */
6463 
6464   if (integer_zerop (value))
6465     return 1;
6466   else
6467     return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6468 }
6469 
6470 /* Return truthvalue of whether T1 is the same tree structure as T2.
6471    Return 1 if they are the same.
6472    Return 0 if they are understandably different.
6473    Return -1 if either contains tree structure not understood by
6474    this function.  */
6475 
6476 int
simple_cst_equal(const_tree t1,const_tree t2)6477 simple_cst_equal (const_tree t1, const_tree t2)
6478 {
6479   enum tree_code code1, code2;
6480   int cmp;
6481   int i;
6482 
6483   if (t1 == t2)
6484     return 1;
6485   if (t1 == 0 || t2 == 0)
6486     return 0;
6487 
6488   /* For location wrappers to be the same, they must be at the same
6489      source location (and wrap the same thing).  */
6490   if (location_wrapper_p (t1) && location_wrapper_p (t2))
6491     {
6492       if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
6493 	return 0;
6494       return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6495     }
6496 
6497   code1 = TREE_CODE (t1);
6498   code2 = TREE_CODE (t2);
6499 
6500   if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6501     {
6502       if (CONVERT_EXPR_CODE_P (code2)
6503 	  || code2 == NON_LVALUE_EXPR)
6504 	return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6505       else
6506 	return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6507     }
6508 
6509   else if (CONVERT_EXPR_CODE_P (code2)
6510 	   || code2 == NON_LVALUE_EXPR)
6511     return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6512 
6513   if (code1 != code2)
6514     return 0;
6515 
6516   switch (code1)
6517     {
6518     case INTEGER_CST:
6519       return wi::to_widest (t1) == wi::to_widest (t2);
6520 
6521     case REAL_CST:
6522       return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6523 
6524     case FIXED_CST:
6525       return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6526 
6527     case STRING_CST:
6528       return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6529 	      && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6530 			 TREE_STRING_LENGTH (t1)));
6531 
6532     case CONSTRUCTOR:
6533       {
6534 	unsigned HOST_WIDE_INT idx;
6535 	vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6536 	vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6537 
6538 	if (vec_safe_length (v1) != vec_safe_length (v2))
6539 	  return false;
6540 
6541         for (idx = 0; idx < vec_safe_length (v1); ++idx)
6542 	  /* ??? Should we handle also fields here? */
6543 	  if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6544 	    return false;
6545 	return true;
6546       }
6547 
6548     case SAVE_EXPR:
6549       return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6550 
6551     case CALL_EXPR:
6552       cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6553       if (cmp <= 0)
6554 	return cmp;
6555       if (call_expr_nargs (t1) != call_expr_nargs (t2))
6556 	return 0;
6557       {
6558 	const_tree arg1, arg2;
6559 	const_call_expr_arg_iterator iter1, iter2;
6560 	for (arg1 = first_const_call_expr_arg (t1, &iter1),
6561 	       arg2 = first_const_call_expr_arg (t2, &iter2);
6562 	     arg1 && arg2;
6563 	     arg1 = next_const_call_expr_arg (&iter1),
6564 	       arg2 = next_const_call_expr_arg (&iter2))
6565 	  {
6566 	    cmp = simple_cst_equal (arg1, arg2);
6567 	    if (cmp <= 0)
6568 	      return cmp;
6569 	  }
6570 	return arg1 == arg2;
6571       }
6572 
6573     case TARGET_EXPR:
6574       /* Special case: if either target is an unallocated VAR_DECL,
6575 	 it means that it's going to be unified with whatever the
6576 	 TARGET_EXPR is really supposed to initialize, so treat it
6577 	 as being equivalent to anything.  */
6578       if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6579 	   && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6580 	   && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6581 	  || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6582 	      && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6583 	      && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6584 	cmp = 1;
6585       else
6586 	cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6587 
6588       if (cmp <= 0)
6589 	return cmp;
6590 
6591       return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6592 
6593     case WITH_CLEANUP_EXPR:
6594       cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6595       if (cmp <= 0)
6596 	return cmp;
6597 
6598       return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6599 
6600     case COMPONENT_REF:
6601       if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6602 	return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6603 
6604       return 0;
6605 
6606     case VAR_DECL:
6607     case PARM_DECL:
6608     case CONST_DECL:
6609     case FUNCTION_DECL:
6610       return 0;
6611 
6612     default:
6613       if (POLY_INT_CST_P (t1))
6614 	/* A false return means maybe_ne rather than known_ne.  */
6615 	return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
6616 						TYPE_SIGN (TREE_TYPE (t1))),
6617 			 poly_widest_int::from (poly_int_cst_value (t2),
6618 						TYPE_SIGN (TREE_TYPE (t2))));
6619       break;
6620     }
6621 
6622   /* This general rule works for most tree codes.  All exceptions should be
6623      handled above.  If this is a language-specific tree code, we can't
6624      trust what might be in the operand, so say we don't know
6625      the situation.  */
6626   if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6627     return -1;
6628 
6629   switch (TREE_CODE_CLASS (code1))
6630     {
6631     case tcc_unary:
6632     case tcc_binary:
6633     case tcc_comparison:
6634     case tcc_expression:
6635     case tcc_reference:
6636     case tcc_statement:
6637       cmp = 1;
6638       for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6639 	{
6640 	  cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6641 	  if (cmp <= 0)
6642 	    return cmp;
6643 	}
6644 
6645       return cmp;
6646 
6647     default:
6648       return -1;
6649     }
6650 }
6651 
6652 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6653    Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6654    than U, respectively.  */
6655 
6656 int
compare_tree_int(const_tree t,unsigned HOST_WIDE_INT u)6657 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6658 {
6659   if (tree_int_cst_sgn (t) < 0)
6660     return -1;
6661   else if (!tree_fits_uhwi_p (t))
6662     return 1;
6663   else if (TREE_INT_CST_LOW (t) == u)
6664     return 0;
6665   else if (TREE_INT_CST_LOW (t) < u)
6666     return -1;
6667   else
6668     return 1;
6669 }
6670 
6671 /* Return true if SIZE represents a constant size that is in bounds of
6672    what the middle-end and the backend accepts (covering not more than
6673    half of the address-space).
6674    When PERR is non-null, set *PERR on failure to the description of
6675    why SIZE is not valid.  */
6676 
6677 bool
valid_constant_size_p(const_tree size,cst_size_error * perr)6678 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
6679 {
6680   if (POLY_INT_CST_P (size))
6681     {
6682       if (TREE_OVERFLOW (size))
6683 	return false;
6684       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
6685 	if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
6686 	  return false;
6687       return true;
6688     }
6689 
6690   cst_size_error error;
6691   if (!perr)
6692     perr = &error;
6693 
6694   if (TREE_CODE (size) != INTEGER_CST)
6695     {
6696       *perr = cst_size_not_constant;
6697       return false;
6698     }
6699 
6700   if (TREE_OVERFLOW_P (size))
6701     {
6702       *perr = cst_size_overflow;
6703       return false;
6704     }
6705 
6706   if (tree_int_cst_sgn (size) < 0)
6707     {
6708       *perr = cst_size_negative;
6709       return false;
6710     }
6711   if (!tree_fits_uhwi_p (size)
6712       || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
6713 	  < wi::to_widest (size) * 2))
6714     {
6715       *perr = cst_size_too_big;
6716       return false;
6717     }
6718 
6719   return true;
6720 }
6721 
6722 /* Return the precision of the type, or for a complex or vector type the
6723    precision of the type of its elements.  */
6724 
6725 unsigned int
element_precision(const_tree type)6726 element_precision (const_tree type)
6727 {
6728   if (!TYPE_P (type))
6729     type = TREE_TYPE (type);
6730   enum tree_code code = TREE_CODE (type);
6731   if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6732     type = TREE_TYPE (type);
6733 
6734   return TYPE_PRECISION (type);
6735 }
6736 
6737 /* Return true if CODE represents an associative tree code.  Otherwise
6738    return false.  */
6739 bool
associative_tree_code(enum tree_code code)6740 associative_tree_code (enum tree_code code)
6741 {
6742   switch (code)
6743     {
6744     case BIT_IOR_EXPR:
6745     case BIT_AND_EXPR:
6746     case BIT_XOR_EXPR:
6747     case PLUS_EXPR:
6748     case MULT_EXPR:
6749     case MIN_EXPR:
6750     case MAX_EXPR:
6751       return true;
6752 
6753     default:
6754       break;
6755     }
6756   return false;
6757 }
6758 
6759 /* Return true if CODE represents a commutative tree code.  Otherwise
6760    return false.  */
6761 bool
commutative_tree_code(enum tree_code code)6762 commutative_tree_code (enum tree_code code)
6763 {
6764   switch (code)
6765     {
6766     case PLUS_EXPR:
6767     case MULT_EXPR:
6768     case MULT_HIGHPART_EXPR:
6769     case MIN_EXPR:
6770     case MAX_EXPR:
6771     case BIT_IOR_EXPR:
6772     case BIT_XOR_EXPR:
6773     case BIT_AND_EXPR:
6774     case NE_EXPR:
6775     case EQ_EXPR:
6776     case UNORDERED_EXPR:
6777     case ORDERED_EXPR:
6778     case UNEQ_EXPR:
6779     case LTGT_EXPR:
6780     case TRUTH_AND_EXPR:
6781     case TRUTH_XOR_EXPR:
6782     case TRUTH_OR_EXPR:
6783     case WIDEN_MULT_EXPR:
6784     case VEC_WIDEN_MULT_HI_EXPR:
6785     case VEC_WIDEN_MULT_LO_EXPR:
6786     case VEC_WIDEN_MULT_EVEN_EXPR:
6787     case VEC_WIDEN_MULT_ODD_EXPR:
6788       return true;
6789 
6790     default:
6791       break;
6792     }
6793   return false;
6794 }
6795 
6796 /* Return true if CODE represents a ternary tree code for which the
6797    first two operands are commutative.  Otherwise return false.  */
6798 bool
commutative_ternary_tree_code(enum tree_code code)6799 commutative_ternary_tree_code (enum tree_code code)
6800 {
6801   switch (code)
6802     {
6803     case WIDEN_MULT_PLUS_EXPR:
6804     case WIDEN_MULT_MINUS_EXPR:
6805     case DOT_PROD_EXPR:
6806       return true;
6807 
6808     default:
6809       break;
6810     }
6811   return false;
6812 }
6813 
6814 /* Returns true if CODE can overflow.  */
6815 
6816 bool
operation_can_overflow(enum tree_code code)6817 operation_can_overflow (enum tree_code code)
6818 {
6819   switch (code)
6820     {
6821     case PLUS_EXPR:
6822     case MINUS_EXPR:
6823     case MULT_EXPR:
6824     case LSHIFT_EXPR:
6825       /* Can overflow in various ways.  */
6826       return true;
6827     case TRUNC_DIV_EXPR:
6828     case EXACT_DIV_EXPR:
6829     case FLOOR_DIV_EXPR:
6830     case CEIL_DIV_EXPR:
6831       /* For INT_MIN / -1.  */
6832       return true;
6833     case NEGATE_EXPR:
6834     case ABS_EXPR:
6835       /* For -INT_MIN.  */
6836       return true;
6837     default:
6838       /* These operators cannot overflow.  */
6839       return false;
6840     }
6841 }
6842 
6843 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6844    ftrapv doesn't generate trapping insns for CODE.  */
6845 
6846 bool
operation_no_trapping_overflow(tree type,enum tree_code code)6847 operation_no_trapping_overflow (tree type, enum tree_code code)
6848 {
6849   gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
6850 
6851   /* We don't generate instructions that trap on overflow for complex or vector
6852      types.  */
6853   if (!INTEGRAL_TYPE_P (type))
6854     return true;
6855 
6856   if (!TYPE_OVERFLOW_TRAPS (type))
6857     return true;
6858 
6859   switch (code)
6860     {
6861     case PLUS_EXPR:
6862     case MINUS_EXPR:
6863     case MULT_EXPR:
6864     case NEGATE_EXPR:
6865     case ABS_EXPR:
6866       /* These operators can overflow, and -ftrapv generates trapping code for
6867 	 these.  */
6868       return false;
6869     case TRUNC_DIV_EXPR:
6870     case EXACT_DIV_EXPR:
6871     case FLOOR_DIV_EXPR:
6872     case CEIL_DIV_EXPR:
6873     case LSHIFT_EXPR:
6874       /* These operators can overflow, but -ftrapv does not generate trapping
6875 	 code for these.  */
6876       return true;
6877     default:
6878       /* These operators cannot overflow.  */
6879       return true;
6880     }
6881 }
6882 
6883 /* Constructors for pointer, array and function types.
6884    (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6885    constructed by language-dependent code, not here.)  */
6886 
6887 /* Construct, lay out and return the type of pointers to TO_TYPE with
6888    mode MODE.  If MODE is VOIDmode, a pointer mode for the address
6889    space of TO_TYPE will be picked.  If CAN_ALIAS_ALL is TRUE,
6890    indicate this type can reference all of memory. If such a type has
6891    already been constructed, reuse it.  */
6892 
6893 tree
build_pointer_type_for_mode(tree to_type,machine_mode mode,bool can_alias_all)6894 build_pointer_type_for_mode (tree to_type, machine_mode mode,
6895 			     bool can_alias_all)
6896 {
6897   tree t;
6898   bool could_alias = can_alias_all;
6899 
6900   if (to_type == error_mark_node)
6901     return error_mark_node;
6902 
6903   if (mode == VOIDmode)
6904     {
6905       addr_space_t as = TYPE_ADDR_SPACE (to_type);
6906       mode = targetm.addr_space.pointer_mode (as);
6907     }
6908 
6909   /* If the pointed-to type has the may_alias attribute set, force
6910      a TYPE_REF_CAN_ALIAS_ALL pointer to be generated.  */
6911   if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6912     can_alias_all = true;
6913 
6914   /* In some cases, languages will have things that aren't a POINTER_TYPE
6915      (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6916      In that case, return that type without regard to the rest of our
6917      operands.
6918 
6919      ??? This is a kludge, but consistent with the way this function has
6920      always operated and there doesn't seem to be a good way to avoid this
6921      at the moment.  */
6922   if (TYPE_POINTER_TO (to_type) != 0
6923       && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
6924     return TYPE_POINTER_TO (to_type);
6925 
6926   /* First, if we already have a type for pointers to TO_TYPE and it's
6927      the proper mode, use it.  */
6928   for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
6929     if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6930       return t;
6931 
6932   t = make_node (POINTER_TYPE);
6933 
6934   TREE_TYPE (t) = to_type;
6935   SET_TYPE_MODE (t, mode);
6936   TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6937   TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
6938   TYPE_POINTER_TO (to_type) = t;
6939 
6940   /* During LTO we do not set TYPE_CANONICAL of pointers and references.  */
6941   if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6942     SET_TYPE_STRUCTURAL_EQUALITY (t);
6943   else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6944     TYPE_CANONICAL (t)
6945       = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
6946 				     mode, false);
6947 
6948   /* Lay out the type.  This function has many callers that are concerned
6949      with expression-construction, and this simplifies them all.  */
6950   layout_type (t);
6951 
6952   return t;
6953 }
6954 
6955 /* By default build pointers in ptr_mode.  */
6956 
6957 tree
build_pointer_type(tree to_type)6958 build_pointer_type (tree to_type)
6959 {
6960   return build_pointer_type_for_mode (to_type, VOIDmode, false);
6961 }
6962 
6963 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE.  */
6964 
6965 tree
build_reference_type_for_mode(tree to_type,machine_mode mode,bool can_alias_all)6966 build_reference_type_for_mode (tree to_type, machine_mode mode,
6967 			       bool can_alias_all)
6968 {
6969   tree t;
6970   bool could_alias = can_alias_all;
6971 
6972   if (to_type == error_mark_node)
6973     return error_mark_node;
6974 
6975   if (mode == VOIDmode)
6976     {
6977       addr_space_t as = TYPE_ADDR_SPACE (to_type);
6978       mode = targetm.addr_space.pointer_mode (as);
6979     }
6980 
6981   /* If the pointed-to type has the may_alias attribute set, force
6982      a TYPE_REF_CAN_ALIAS_ALL pointer to be generated.  */
6983   if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6984     can_alias_all = true;
6985 
6986   /* In some cases, languages will have things that aren't a REFERENCE_TYPE
6987      (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
6988      In that case, return that type without regard to the rest of our
6989      operands.
6990 
6991      ??? This is a kludge, but consistent with the way this function has
6992      always operated and there doesn't seem to be a good way to avoid this
6993      at the moment.  */
6994   if (TYPE_REFERENCE_TO (to_type) != 0
6995       && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
6996     return TYPE_REFERENCE_TO (to_type);
6997 
6998   /* First, if we already have a type for pointers to TO_TYPE and it's
6999      the proper mode, use it.  */
7000   for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7001     if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7002       return t;
7003 
7004   t = make_node (REFERENCE_TYPE);
7005 
7006   TREE_TYPE (t) = to_type;
7007   SET_TYPE_MODE (t, mode);
7008   TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7009   TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7010   TYPE_REFERENCE_TO (to_type) = t;
7011 
7012   /* During LTO we do not set TYPE_CANONICAL of pointers and references.  */
7013   if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7014     SET_TYPE_STRUCTURAL_EQUALITY (t);
7015   else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7016     TYPE_CANONICAL (t)
7017       = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7018 				       mode, false);
7019 
7020   layout_type (t);
7021 
7022   return t;
7023 }
7024 
7025 
7026 /* Build the node for the type of references-to-TO_TYPE by default
7027    in ptr_mode.  */
7028 
7029 tree
build_reference_type(tree to_type)7030 build_reference_type (tree to_type)
7031 {
7032   return build_reference_type_for_mode (to_type, VOIDmode, false);
7033 }
7034 
7035 #define MAX_INT_CACHED_PREC \
7036   (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7037 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7038 
7039 static void
clear_nonstandard_integer_type_cache(void)7040 clear_nonstandard_integer_type_cache (void)
7041 {
7042   for (size_t i = 0 ; i < 2 * MAX_INT_CACHED_PREC + 2 ; i++)
7043   {
7044     nonstandard_integer_type_cache[i] = NULL;
7045   }
7046 }
7047 
7048 /* Builds a signed or unsigned integer type of precision PRECISION.
7049    Used for C bitfields whose precision does not match that of
7050    built-in target types.  */
7051 tree
build_nonstandard_integer_type(unsigned HOST_WIDE_INT precision,int unsignedp)7052 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7053 				int unsignedp)
7054 {
7055   tree itype, ret;
7056 
7057   if (unsignedp)
7058     unsignedp = MAX_INT_CACHED_PREC + 1;
7059 
7060   if (precision <= MAX_INT_CACHED_PREC)
7061     {
7062       itype = nonstandard_integer_type_cache[precision + unsignedp];
7063       if (itype)
7064 	return itype;
7065     }
7066 
7067   itype = make_node (INTEGER_TYPE);
7068   TYPE_PRECISION (itype) = precision;
7069 
7070   if (unsignedp)
7071     fixup_unsigned_type (itype);
7072   else
7073     fixup_signed_type (itype);
7074 
7075   inchash::hash hstate;
7076   inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7077   ret = type_hash_canon (hstate.end (), itype);
7078   if (precision <= MAX_INT_CACHED_PREC)
7079     nonstandard_integer_type_cache[precision + unsignedp] = ret;
7080 
7081   return ret;
7082 }
7083 
7084 #define MAX_BOOL_CACHED_PREC \
7085   (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7086 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7087 
7088 /* Builds a boolean type of precision PRECISION.
7089    Used for boolean vectors to choose proper vector element size.  */
7090 tree
build_nonstandard_boolean_type(unsigned HOST_WIDE_INT precision)7091 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7092 {
7093   tree type;
7094 
7095   if (precision <= MAX_BOOL_CACHED_PREC)
7096     {
7097       type = nonstandard_boolean_type_cache[precision];
7098       if (type)
7099 	return type;
7100     }
7101 
7102   type = make_node (BOOLEAN_TYPE);
7103   TYPE_PRECISION (type) = precision;
7104   fixup_signed_type (type);
7105 
7106   if (precision <= MAX_INT_CACHED_PREC)
7107     nonstandard_boolean_type_cache[precision] = type;
7108 
7109   return type;
7110 }
7111 
7112 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7113    or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL.  If SHARED
7114    is true, reuse such a type that has already been constructed.  */
7115 
7116 static tree
build_range_type_1(tree type,tree lowval,tree highval,bool shared)7117 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7118 {
7119   tree itype = make_node (INTEGER_TYPE);
7120 
7121   TREE_TYPE (itype) = type;
7122 
7123   TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7124   TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7125 
7126   TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7127   SET_TYPE_MODE (itype, TYPE_MODE (type));
7128   TYPE_SIZE (itype) = TYPE_SIZE (type);
7129   TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7130   SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7131   TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7132   SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7133 
7134   if (!shared)
7135     return itype;
7136 
7137   if ((TYPE_MIN_VALUE (itype)
7138        && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7139       || (TYPE_MAX_VALUE (itype)
7140 	  && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7141     {
7142       /* Since we cannot reliably merge this type, we need to compare it using
7143 	 structural equality checks.  */
7144       SET_TYPE_STRUCTURAL_EQUALITY (itype);
7145       return itype;
7146     }
7147 
7148   hashval_t hash = type_hash_canon_hash (itype);
7149   itype = type_hash_canon (hash, itype);
7150 
7151   return itype;
7152 }
7153 
7154 /* Wrapper around build_range_type_1 with SHARED set to true.  */
7155 
7156 tree
build_range_type(tree type,tree lowval,tree highval)7157 build_range_type (tree type, tree lowval, tree highval)
7158 {
7159   return build_range_type_1 (type, lowval, highval, true);
7160 }
7161 
7162 /* Wrapper around build_range_type_1 with SHARED set to false.  */
7163 
7164 tree
build_nonshared_range_type(tree type,tree lowval,tree highval)7165 build_nonshared_range_type (tree type, tree lowval, tree highval)
7166 {
7167   return build_range_type_1 (type, lowval, highval, false);
7168 }
7169 
7170 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7171    MAXVAL should be the maximum value in the domain
7172    (one less than the length of the array).
7173 
7174    The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7175    We don't enforce this limit, that is up to caller (e.g. language front end).
7176    The limit exists because the result is a signed type and we don't handle
7177    sizes that use more than one HOST_WIDE_INT.  */
7178 
7179 tree
build_index_type(tree maxval)7180 build_index_type (tree maxval)
7181 {
7182   return build_range_type (sizetype, size_zero_node, maxval);
7183 }
7184 
7185 /* Return true if the debug information for TYPE, a subtype, should be emitted
7186    as a subrange type.  If so, set LOWVAL to the low bound and HIGHVAL to the
7187    high bound, respectively.  Sometimes doing so unnecessarily obfuscates the
7188    debug info and doesn't reflect the source code.  */
7189 
7190 bool
subrange_type_for_debug_p(const_tree type,tree * lowval,tree * highval)7191 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7192 {
7193   tree base_type = TREE_TYPE (type), low, high;
7194 
7195   /* Subrange types have a base type which is an integral type.  */
7196   if (!INTEGRAL_TYPE_P (base_type))
7197     return false;
7198 
7199   /* Get the real bounds of the subtype.  */
7200   if (lang_hooks.types.get_subrange_bounds)
7201     lang_hooks.types.get_subrange_bounds (type, &low, &high);
7202   else
7203     {
7204       low = TYPE_MIN_VALUE (type);
7205       high = TYPE_MAX_VALUE (type);
7206     }
7207 
7208   /* If the type and its base type have the same representation and the same
7209      name, then the type is not a subrange but a copy of the base type.  */
7210   if ((TREE_CODE (base_type) == INTEGER_TYPE
7211        || TREE_CODE (base_type) == BOOLEAN_TYPE)
7212       && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7213       && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7214       && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7215       && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7216     return false;
7217 
7218   if (lowval)
7219     *lowval = low;
7220   if (highval)
7221     *highval = high;
7222   return true;
7223 }
7224 
7225 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7226    and number of elements specified by the range of values of INDEX_TYPE.
7227    If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7228    If SHARED is true, reuse such a type that has already been constructed.
7229    If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type.  */
7230 
7231 tree
build_array_type_1(tree elt_type,tree index_type,bool typeless_storage,bool shared,bool set_canonical)7232 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7233 		    bool shared, bool set_canonical)
7234 {
7235   tree t;
7236 
7237   if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7238     {
7239       error ("arrays of functions are not meaningful");
7240       elt_type = integer_type_node;
7241     }
7242 
7243   t = make_node (ARRAY_TYPE);
7244   TREE_TYPE (t) = elt_type;
7245   TYPE_DOMAIN (t) = index_type;
7246   TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7247   TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7248   layout_type (t);
7249 
7250   if (shared)
7251     {
7252       hashval_t hash = type_hash_canon_hash (t);
7253       t = type_hash_canon (hash, t);
7254     }
7255 
7256   if (TYPE_CANONICAL (t) == t && set_canonical)
7257     {
7258       if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7259 	  || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7260 	  || in_lto_p)
7261 	SET_TYPE_STRUCTURAL_EQUALITY (t);
7262       else if (TYPE_CANONICAL (elt_type) != elt_type
7263 	       || (index_type && TYPE_CANONICAL (index_type) != index_type))
7264 	TYPE_CANONICAL (t)
7265 	  = build_array_type_1 (TYPE_CANONICAL (elt_type),
7266 				index_type
7267 				? TYPE_CANONICAL (index_type) : NULL_TREE,
7268 				typeless_storage, shared, set_canonical);
7269     }
7270 
7271   return t;
7272 }
7273 
7274 /* Wrapper around build_array_type_1 with SHARED set to true.  */
7275 
7276 tree
build_array_type(tree elt_type,tree index_type,bool typeless_storage)7277 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7278 {
7279   return
7280     build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
7281 }
7282 
7283 /* Wrapper around build_array_type_1 with SHARED set to false.  */
7284 
7285 tree
build_nonshared_array_type(tree elt_type,tree index_type)7286 build_nonshared_array_type (tree elt_type, tree index_type)
7287 {
7288   return build_array_type_1 (elt_type, index_type, false, false, true);
7289 }
7290 
7291 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7292    sizetype.  */
7293 
7294 tree
build_array_type_nelts(tree elt_type,poly_uint64 nelts)7295 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
7296 {
7297   return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7298 }
7299 
7300 /* Recursively examines the array elements of TYPE, until a non-array
7301    element type is found.  */
7302 
7303 tree
strip_array_types(tree type)7304 strip_array_types (tree type)
7305 {
7306   while (TREE_CODE (type) == ARRAY_TYPE)
7307     type = TREE_TYPE (type);
7308 
7309   return type;
7310 }
7311 
7312 /* Computes the canonical argument types from the argument type list
7313    ARGTYPES.
7314 
7315    Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7316    on entry to this function, or if any of the ARGTYPES are
7317    structural.
7318 
7319    Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7320    true on entry to this function, or if any of the ARGTYPES are
7321    non-canonical.
7322 
7323    Returns a canonical argument list, which may be ARGTYPES when the
7324    canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7325    true) or would not differ from ARGTYPES.  */
7326 
7327 static tree
maybe_canonicalize_argtypes(tree argtypes,bool * any_structural_p,bool * any_noncanonical_p)7328 maybe_canonicalize_argtypes (tree argtypes,
7329 			     bool *any_structural_p,
7330 			     bool *any_noncanonical_p)
7331 {
7332   tree arg;
7333   bool any_noncanonical_argtypes_p = false;
7334 
7335   for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7336     {
7337       if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7338 	/* Fail gracefully by stating that the type is structural.  */
7339 	*any_structural_p = true;
7340       else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7341 	*any_structural_p = true;
7342       else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7343 	       || TREE_PURPOSE (arg))
7344 	/* If the argument has a default argument, we consider it
7345 	   non-canonical even though the type itself is canonical.
7346 	   That way, different variants of function and method types
7347 	   with default arguments will all point to the variant with
7348 	   no defaults as their canonical type.  */
7349         any_noncanonical_argtypes_p = true;
7350     }
7351 
7352   if (*any_structural_p)
7353     return argtypes;
7354 
7355   if (any_noncanonical_argtypes_p)
7356     {
7357       /* Build the canonical list of argument types.  */
7358       tree canon_argtypes = NULL_TREE;
7359       bool is_void = false;
7360 
7361       for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7362         {
7363           if (arg == void_list_node)
7364             is_void = true;
7365           else
7366             canon_argtypes = tree_cons (NULL_TREE,
7367                                         TYPE_CANONICAL (TREE_VALUE (arg)),
7368                                         canon_argtypes);
7369         }
7370 
7371       canon_argtypes = nreverse (canon_argtypes);
7372       if (is_void)
7373         canon_argtypes = chainon (canon_argtypes, void_list_node);
7374 
7375       /* There is a non-canonical type.  */
7376       *any_noncanonical_p = true;
7377       return canon_argtypes;
7378     }
7379 
7380   /* The canonical argument types are the same as ARGTYPES.  */
7381   return argtypes;
7382 }
7383 
7384 /* Construct, lay out and return
7385    the type of functions returning type VALUE_TYPE
7386    given arguments of types ARG_TYPES.
7387    ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7388    are data type nodes for the arguments of the function.
7389    If such a type has already been constructed, reuse it.  */
7390 
7391 tree
build_function_type(tree value_type,tree arg_types)7392 build_function_type (tree value_type, tree arg_types)
7393 {
7394   tree t;
7395   inchash::hash hstate;
7396   bool any_structural_p, any_noncanonical_p;
7397   tree canon_argtypes;
7398 
7399   gcc_assert (arg_types != error_mark_node);
7400 
7401   if (TREE_CODE (value_type) == FUNCTION_TYPE)
7402     {
7403       error ("function return type cannot be function");
7404       value_type = integer_type_node;
7405     }
7406 
7407   /* Make a node of the sort we want.  */
7408   t = make_node (FUNCTION_TYPE);
7409   TREE_TYPE (t) = value_type;
7410   TYPE_ARG_TYPES (t) = arg_types;
7411 
7412   /* If we already have such a type, use the old one.  */
7413   hashval_t hash = type_hash_canon_hash (t);
7414   t = type_hash_canon (hash, t);
7415 
7416   /* Set up the canonical type. */
7417   any_structural_p   = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7418   any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7419   canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7420 						&any_structural_p,
7421 						&any_noncanonical_p);
7422   if (any_structural_p)
7423     SET_TYPE_STRUCTURAL_EQUALITY (t);
7424   else if (any_noncanonical_p)
7425     TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7426 					      canon_argtypes);
7427 
7428   if (!COMPLETE_TYPE_P (t))
7429     layout_type (t);
7430   return t;
7431 }
7432 
7433 /* Build a function type.  The RETURN_TYPE is the type returned by the
7434    function.  If VAARGS is set, no void_type_node is appended to the
7435    list.  ARGP must be always be terminated be a NULL_TREE.  */
7436 
7437 static tree
build_function_type_list_1(bool vaargs,tree return_type,va_list argp)7438 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7439 {
7440   tree t, args, last;
7441 
7442   t = va_arg (argp, tree);
7443   for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7444     args = tree_cons (NULL_TREE, t, args);
7445 
7446   if (vaargs)
7447     {
7448       last = args;
7449       if (args != NULL_TREE)
7450 	args = nreverse (args);
7451       gcc_assert (last != void_list_node);
7452     }
7453   else if (args == NULL_TREE)
7454     args = void_list_node;
7455   else
7456     {
7457       last = args;
7458       args = nreverse (args);
7459       TREE_CHAIN (last) = void_list_node;
7460     }
7461   args = build_function_type (return_type, args);
7462 
7463   return args;
7464 }
7465 
7466 /* Build a function type.  The RETURN_TYPE is the type returned by the
7467    function.  If additional arguments are provided, they are
7468    additional argument types.  The list of argument types must always
7469    be terminated by NULL_TREE.  */
7470 
7471 tree
build_function_type_list(tree return_type,...)7472 build_function_type_list (tree return_type, ...)
7473 {
7474   tree args;
7475   va_list p;
7476 
7477   va_start (p, return_type);
7478   args = build_function_type_list_1 (false, return_type, p);
7479   va_end (p);
7480   return args;
7481 }
7482 
7483 /* Build a variable argument function type.  The RETURN_TYPE is the
7484    type returned by the function.  If additional arguments are provided,
7485    they are additional argument types.  The list of argument types must
7486    always be terminated by NULL_TREE.  */
7487 
7488 tree
build_varargs_function_type_list(tree return_type,...)7489 build_varargs_function_type_list (tree return_type, ...)
7490 {
7491   tree args;
7492   va_list p;
7493 
7494   va_start (p, return_type);
7495   args = build_function_type_list_1 (true, return_type, p);
7496   va_end (p);
7497 
7498   return args;
7499 }
7500 
7501 /* Build a function type.  RETURN_TYPE is the type returned by the
7502    function; VAARGS indicates whether the function takes varargs.  The
7503    function takes N named arguments, the types of which are provided in
7504    ARG_TYPES.  */
7505 
7506 static tree
build_function_type_array_1(bool vaargs,tree return_type,int n,tree * arg_types)7507 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7508 			     tree *arg_types)
7509 {
7510   int i;
7511   tree t = vaargs ? NULL_TREE : void_list_node;
7512 
7513   for (i = n - 1; i >= 0; i--)
7514     t = tree_cons (NULL_TREE, arg_types[i], t);
7515 
7516   return build_function_type (return_type, t);
7517 }
7518 
7519 /* Build a function type.  RETURN_TYPE is the type returned by the
7520    function.  The function takes N named arguments, the types of which
7521    are provided in ARG_TYPES.  */
7522 
7523 tree
build_function_type_array(tree return_type,int n,tree * arg_types)7524 build_function_type_array (tree return_type, int n, tree *arg_types)
7525 {
7526   return build_function_type_array_1 (false, return_type, n, arg_types);
7527 }
7528 
7529 /* Build a variable argument function type.  RETURN_TYPE is the type
7530    returned by the function.  The function takes N named arguments, the
7531    types of which are provided in ARG_TYPES.  */
7532 
7533 tree
build_varargs_function_type_array(tree return_type,int n,tree * arg_types)7534 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7535 {
7536   return build_function_type_array_1 (true, return_type, n, arg_types);
7537 }
7538 
7539 /* Build a METHOD_TYPE for a member of BASETYPE.  The RETTYPE (a TYPE)
7540    and ARGTYPES (a TREE_LIST) are the return type and arguments types
7541    for the method.  An implicit additional parameter (of type
7542    pointer-to-BASETYPE) is added to the ARGTYPES.  */
7543 
7544 tree
build_method_type_directly(tree basetype,tree rettype,tree argtypes)7545 build_method_type_directly (tree basetype,
7546 			    tree rettype,
7547 			    tree argtypes)
7548 {
7549   tree t;
7550   tree ptype;
7551   bool any_structural_p, any_noncanonical_p;
7552   tree canon_argtypes;
7553 
7554   /* Make a node of the sort we want.  */
7555   t = make_node (METHOD_TYPE);
7556 
7557   TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7558   TREE_TYPE (t) = rettype;
7559   ptype = build_pointer_type (basetype);
7560 
7561   /* The actual arglist for this function includes a "hidden" argument
7562      which is "this".  Put it into the list of argument types.  */
7563   argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7564   TYPE_ARG_TYPES (t) = argtypes;
7565 
7566   /* If we already have such a type, use the old one.  */
7567   hashval_t hash = type_hash_canon_hash (t);
7568   t = type_hash_canon (hash, t);
7569 
7570   /* Set up the canonical type. */
7571   any_structural_p
7572     = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7573        || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7574   any_noncanonical_p
7575     = (TYPE_CANONICAL (basetype) != basetype
7576        || TYPE_CANONICAL (rettype) != rettype);
7577   canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7578 						&any_structural_p,
7579 						&any_noncanonical_p);
7580   if (any_structural_p)
7581     SET_TYPE_STRUCTURAL_EQUALITY (t);
7582   else if (any_noncanonical_p)
7583     TYPE_CANONICAL (t)
7584       = build_method_type_directly (TYPE_CANONICAL (basetype),
7585 				    TYPE_CANONICAL (rettype),
7586 				    canon_argtypes);
7587   if (!COMPLETE_TYPE_P (t))
7588     layout_type (t);
7589 
7590   return t;
7591 }
7592 
7593 /* Construct, lay out and return the type of methods belonging to class
7594    BASETYPE and whose arguments and values are described by TYPE.
7595    If that type exists already, reuse it.
7596    TYPE must be a FUNCTION_TYPE node.  */
7597 
7598 tree
build_method_type(tree basetype,tree type)7599 build_method_type (tree basetype, tree type)
7600 {
7601   gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
7602 
7603   return build_method_type_directly (basetype,
7604 				     TREE_TYPE (type),
7605 				     TYPE_ARG_TYPES (type));
7606 }
7607 
7608 /* Construct, lay out and return the type of offsets to a value
7609    of type TYPE, within an object of type BASETYPE.
7610    If a suitable offset type exists already, reuse it.  */
7611 
7612 tree
build_offset_type(tree basetype,tree type)7613 build_offset_type (tree basetype, tree type)
7614 {
7615   tree t;
7616 
7617   /* Make a node of the sort we want.  */
7618   t = make_node (OFFSET_TYPE);
7619 
7620   TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7621   TREE_TYPE (t) = type;
7622 
7623   /* If we already have such a type, use the old one.  */
7624   hashval_t hash = type_hash_canon_hash (t);
7625   t = type_hash_canon (hash, t);
7626 
7627   if (!COMPLETE_TYPE_P (t))
7628     layout_type (t);
7629 
7630   if (TYPE_CANONICAL (t) == t)
7631     {
7632       if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7633 	  || TYPE_STRUCTURAL_EQUALITY_P (type))
7634 	SET_TYPE_STRUCTURAL_EQUALITY (t);
7635       else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
7636 	       || TYPE_CANONICAL (type) != type)
7637 	TYPE_CANONICAL (t)
7638 	  = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
7639 			       TYPE_CANONICAL (type));
7640     }
7641 
7642   return t;
7643 }
7644 
7645 /* Create a complex type whose components are COMPONENT_TYPE.
7646 
7647    If NAMED is true, the type is given a TYPE_NAME.  We do not always
7648    do so because this creates a DECL node and thus make the DECL_UIDs
7649    dependent on the type canonicalization hashtable, which is GC-ed,
7650    so the DECL_UIDs would not be stable wrt garbage collection.  */
7651 
7652 tree
build_complex_type(tree component_type,bool named)7653 build_complex_type (tree component_type, bool named)
7654 {
7655   gcc_assert (INTEGRAL_TYPE_P (component_type)
7656 	      || SCALAR_FLOAT_TYPE_P (component_type)
7657 	      || FIXED_POINT_TYPE_P (component_type));
7658 
7659   /* Make a node of the sort we want.  */
7660   tree probe = make_node (COMPLEX_TYPE);
7661 
7662   TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
7663 
7664   /* If we already have such a type, use the old one.  */
7665   hashval_t hash = type_hash_canon_hash (probe);
7666   tree t = type_hash_canon (hash, probe);
7667 
7668   if (t == probe)
7669     {
7670       /* We created a new type.  The hash insertion will have laid
7671 	 out the type.  We need to check the canonicalization and
7672 	 maybe set the name.  */
7673       gcc_checking_assert (COMPLETE_TYPE_P (t)
7674 			   && !TYPE_NAME (t)
7675 			   && TYPE_CANONICAL (t) == t);
7676 
7677       if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
7678 	SET_TYPE_STRUCTURAL_EQUALITY (t);
7679       else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
7680 	TYPE_CANONICAL (t)
7681 	  = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
7682 
7683       /* We need to create a name, since complex is a fundamental type.  */
7684       if (named)
7685 	{
7686 	  const char *name = NULL;
7687 
7688 	  if (TREE_TYPE (t) == char_type_node)
7689 	    name = "complex char";
7690 	  else if (TREE_TYPE (t) == signed_char_type_node)
7691 	    name = "complex signed char";
7692 	  else if (TREE_TYPE (t) == unsigned_char_type_node)
7693 	    name = "complex unsigned char";
7694 	  else if (TREE_TYPE (t) == short_integer_type_node)
7695 	    name = "complex short int";
7696 	  else if (TREE_TYPE (t) == short_unsigned_type_node)
7697 	    name = "complex short unsigned int";
7698 	  else if (TREE_TYPE (t) == integer_type_node)
7699 	    name = "complex int";
7700 	  else if (TREE_TYPE (t) == unsigned_type_node)
7701 	    name = "complex unsigned int";
7702 	  else if (TREE_TYPE (t) == long_integer_type_node)
7703 	    name = "complex long int";
7704 	  else if (TREE_TYPE (t) == long_unsigned_type_node)
7705 	    name = "complex long unsigned int";
7706 	  else if (TREE_TYPE (t) == long_long_integer_type_node)
7707 	    name = "complex long long int";
7708 	  else if (TREE_TYPE (t) == long_long_unsigned_type_node)
7709 	    name = "complex long long unsigned int";
7710 
7711 	  if (name != NULL)
7712 	    TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
7713 					get_identifier (name), t);
7714 	}
7715     }
7716 
7717   return build_qualified_type (t, TYPE_QUALS (component_type));
7718 }
7719 
7720 /* If TYPE is a real or complex floating-point type and the target
7721    does not directly support arithmetic on TYPE then return the wider
7722    type to be used for arithmetic on TYPE.  Otherwise, return
7723    NULL_TREE.  */
7724 
7725 tree
excess_precision_type(tree type)7726 excess_precision_type (tree type)
7727 {
7728   /* The target can give two different responses to the question of
7729      which excess precision mode it would like depending on whether we
7730      are in -fexcess-precision=standard or -fexcess-precision=fast.  */
7731 
7732   enum excess_precision_type requested_type
7733     = (flag_excess_precision == EXCESS_PRECISION_FAST
7734        ? EXCESS_PRECISION_TYPE_FAST
7735        : (flag_excess_precision == EXCESS_PRECISION_FLOAT16
7736 	  ? EXCESS_PRECISION_TYPE_FLOAT16 :EXCESS_PRECISION_TYPE_STANDARD));
7737 
7738   enum flt_eval_method target_flt_eval_method
7739     = targetm.c.excess_precision (requested_type);
7740 
7741   /* The target should not ask for unpredictable float evaluation (though
7742      it might advertise that implicitly the evaluation is unpredictable,
7743      but we don't care about that here, it will have been reported
7744      elsewhere).  If it does ask for unpredictable evaluation, we have
7745      nothing to do here.  */
7746   gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
7747 
7748   /* Nothing to do.  The target has asked for all types we know about
7749      to be computed with their native precision and range.  */
7750   if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
7751     return NULL_TREE;
7752 
7753   /* The target will promote this type in a target-dependent way, so excess
7754      precision ought to leave it alone.  */
7755   if (targetm.promoted_type (type) != NULL_TREE)
7756     return NULL_TREE;
7757 
7758   machine_mode float16_type_mode = (float16_type_node
7759 				    ? TYPE_MODE (float16_type_node)
7760 				    : VOIDmode);
7761   machine_mode float_type_mode = TYPE_MODE (float_type_node);
7762   machine_mode double_type_mode = TYPE_MODE (double_type_node);
7763 
7764   switch (TREE_CODE (type))
7765     {
7766     case REAL_TYPE:
7767       {
7768 	machine_mode type_mode = TYPE_MODE (type);
7769 	switch (target_flt_eval_method)
7770 	  {
7771 	  case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7772 	    if (type_mode == float16_type_mode)
7773 	      return float_type_node;
7774 	    break;
7775 	  case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7776 	    if (type_mode == float16_type_mode
7777 		|| type_mode == float_type_mode)
7778 	      return double_type_node;
7779 	    break;
7780 	  case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7781 	    if (type_mode == float16_type_mode
7782 		|| type_mode == float_type_mode
7783 		|| type_mode == double_type_mode)
7784 	      return long_double_type_node;
7785 	    break;
7786 	  default:
7787 	    gcc_unreachable ();
7788 	  }
7789 	break;
7790       }
7791     case COMPLEX_TYPE:
7792       {
7793 	if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
7794 	  return NULL_TREE;
7795 	machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
7796 	switch (target_flt_eval_method)
7797 	  {
7798 	  case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7799 	    if (type_mode == float16_type_mode)
7800 	      return complex_float_type_node;
7801 	    break;
7802 	  case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7803 	    if (type_mode == float16_type_mode
7804 		|| type_mode == float_type_mode)
7805 	      return complex_double_type_node;
7806 	    break;
7807 	  case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7808 	    if (type_mode == float16_type_mode
7809 		|| type_mode == float_type_mode
7810 		|| type_mode == double_type_mode)
7811 	      return complex_long_double_type_node;
7812 	    break;
7813 	  default:
7814 	    gcc_unreachable ();
7815 	  }
7816 	break;
7817       }
7818     default:
7819       break;
7820     }
7821 
7822   return NULL_TREE;
7823 }
7824 
7825 /* Return OP, stripped of any conversions to wider types as much as is safe.
7826    Converting the value back to OP's type makes a value equivalent to OP.
7827 
7828    If FOR_TYPE is nonzero, we return a value which, if converted to
7829    type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7830 
7831    OP must have integer, real or enumeral type.  Pointers are not allowed!
7832 
7833    There are some cases where the obvious value we could return
7834    would regenerate to OP if converted to OP's type,
7835    but would not extend like OP to wider types.
7836    If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7837    For example, if OP is (unsigned short)(signed char)-1,
7838    we avoid returning (signed char)-1 if FOR_TYPE is int,
7839    even though extending that to an unsigned short would regenerate OP,
7840    since the result of extending (signed char)-1 to (int)
7841    is different from (int) OP.  */
7842 
7843 tree
get_unwidened(tree op,tree for_type)7844 get_unwidened (tree op, tree for_type)
7845 {
7846   /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension.  */
7847   tree type = TREE_TYPE (op);
7848   unsigned final_prec
7849     = TYPE_PRECISION (for_type != 0 ? for_type : type);
7850   int uns
7851     = (for_type != 0 && for_type != type
7852        && final_prec > TYPE_PRECISION (type)
7853        && TYPE_UNSIGNED (type));
7854   tree win = op;
7855 
7856   while (CONVERT_EXPR_P (op))
7857     {
7858       int bitschange;
7859 
7860       /* TYPE_PRECISION on vector types has different meaning
7861 	 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7862 	 so avoid them here.  */
7863       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
7864 	break;
7865 
7866       bitschange = TYPE_PRECISION (TREE_TYPE (op))
7867 		   - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
7868 
7869       /* Truncations are many-one so cannot be removed.
7870 	 Unless we are later going to truncate down even farther.  */
7871       if (bitschange < 0
7872 	  && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
7873 	break;
7874 
7875       /* See what's inside this conversion.  If we decide to strip it,
7876 	 we will set WIN.  */
7877       op = TREE_OPERAND (op, 0);
7878 
7879       /* If we have not stripped any zero-extensions (uns is 0),
7880 	 we can strip any kind of extension.
7881 	 If we have previously stripped a zero-extension,
7882 	 only zero-extensions can safely be stripped.
7883 	 Any extension can be stripped if the bits it would produce
7884 	 are all going to be discarded later by truncating to FOR_TYPE.  */
7885 
7886       if (bitschange > 0)
7887 	{
7888 	  if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
7889 	    win = op;
7890 	  /* TYPE_UNSIGNED says whether this is a zero-extension.
7891 	     Let's avoid computing it if it does not affect WIN
7892 	     and if UNS will not be needed again.  */
7893 	  if ((uns
7894 	       || CONVERT_EXPR_P (op))
7895 	      && TYPE_UNSIGNED (TREE_TYPE (op)))
7896 	    {
7897 	      uns = 1;
7898 	      win = op;
7899 	    }
7900 	}
7901     }
7902 
7903   /* If we finally reach a constant see if it fits in sth smaller and
7904      in that case convert it.  */
7905   if (TREE_CODE (win) == INTEGER_CST)
7906     {
7907       tree wtype = TREE_TYPE (win);
7908       unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
7909       if (for_type)
7910 	prec = MAX (prec, final_prec);
7911       if (prec < TYPE_PRECISION (wtype))
7912 	{
7913 	  tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
7914 	  if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
7915 	    win = fold_convert (t, win);
7916 	}
7917     }
7918 
7919   return win;
7920 }
7921 
7922 /* Return OP or a simpler expression for a narrower value
7923    which can be sign-extended or zero-extended to give back OP.
7924    Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
7925    or 0 if the value should be sign-extended.  */
7926 
7927 tree
get_narrower(tree op,int * unsignedp_ptr)7928 get_narrower (tree op, int *unsignedp_ptr)
7929 {
7930   int uns = 0;
7931   int first = 1;
7932   tree win = op;
7933   bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
7934 
7935   if (TREE_CODE (op) == COMPOUND_EXPR)
7936     {
7937       do
7938 	op = TREE_OPERAND (op, 1);
7939       while (TREE_CODE (op) == COMPOUND_EXPR);
7940       tree ret = get_narrower (op, unsignedp_ptr);
7941       if (ret == op)
7942 	return win;
7943       auto_vec <tree, 16> v;
7944       unsigned int i;
7945       for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
7946 	   op = TREE_OPERAND (op, 1))
7947 	v.safe_push (op);
7948       FOR_EACH_VEC_ELT_REVERSE (v, i, op)
7949 	ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
7950 			  TREE_TYPE (ret), TREE_OPERAND (op, 0),
7951 			  ret);
7952       return ret;
7953     }
7954   while (TREE_CODE (op) == NOP_EXPR)
7955     {
7956       int bitschange
7957 	= (TYPE_PRECISION (TREE_TYPE (op))
7958 	   - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
7959 
7960       /* Truncations are many-one so cannot be removed.  */
7961       if (bitschange < 0)
7962 	break;
7963 
7964       /* See what's inside this conversion.  If we decide to strip it,
7965 	 we will set WIN.  */
7966 
7967       if (bitschange > 0)
7968 	{
7969 	  op = TREE_OPERAND (op, 0);
7970 	  /* An extension: the outermost one can be stripped,
7971 	     but remember whether it is zero or sign extension.  */
7972 	  if (first)
7973 	    uns = TYPE_UNSIGNED (TREE_TYPE (op));
7974 	  /* Otherwise, if a sign extension has been stripped,
7975 	     only sign extensions can now be stripped;
7976 	     if a zero extension has been stripped, only zero-extensions.  */
7977 	  else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
7978 	    break;
7979 	  first = 0;
7980 	}
7981       else /* bitschange == 0 */
7982 	{
7983 	  /* A change in nominal type can always be stripped, but we must
7984 	     preserve the unsignedness.  */
7985 	  if (first)
7986 	    uns = TYPE_UNSIGNED (TREE_TYPE (op));
7987 	  first = 0;
7988 	  op = TREE_OPERAND (op, 0);
7989 	  /* Keep trying to narrow, but don't assign op to win if it
7990 	     would turn an integral type into something else.  */
7991 	  if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
7992 	    continue;
7993 	}
7994 
7995       win = op;
7996     }
7997 
7998   if (TREE_CODE (op) == COMPONENT_REF
7999       /* Since type_for_size always gives an integer type.  */
8000       && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8001       && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8002       /* Ensure field is laid out already.  */
8003       && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8004       && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8005     {
8006       unsigned HOST_WIDE_INT innerprec
8007 	= tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8008       int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8009 		       || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8010       tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8011 
8012       /* We can get this structure field in a narrower type that fits it,
8013 	 but the resulting extension to its nominal type (a fullword type)
8014 	 must satisfy the same conditions as for other extensions.
8015 
8016 	 Do this only for fields that are aligned (not bit-fields),
8017 	 because when bit-field insns will be used there is no
8018 	 advantage in doing this.  */
8019 
8020       if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8021 	  && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8022 	  && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8023 	  && type != 0)
8024 	{
8025 	  if (first)
8026 	    uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8027 	  win = fold_convert (type, op);
8028 	}
8029     }
8030 
8031   *unsignedp_ptr = uns;
8032   return win;
8033 }
8034 
8035 /* Return true if integer constant C has a value that is permissible
8036    for TYPE, an integral type.  */
8037 
8038 bool
int_fits_type_p(const_tree c,const_tree type)8039 int_fits_type_p (const_tree c, const_tree type)
8040 {
8041   tree type_low_bound, type_high_bound;
8042   bool ok_for_low_bound, ok_for_high_bound;
8043   signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8044 
8045   /* Non-standard boolean types can have arbitrary precision but various
8046      transformations assume that they can only take values 0 and +/-1.  */
8047   if (TREE_CODE (type) == BOOLEAN_TYPE)
8048     return wi::fits_to_boolean_p (wi::to_wide (c), type);
8049 
8050 retry:
8051   type_low_bound = TYPE_MIN_VALUE (type);
8052   type_high_bound = TYPE_MAX_VALUE (type);
8053 
8054   /* If at least one bound of the type is a constant integer, we can check
8055      ourselves and maybe make a decision. If no such decision is possible, but
8056      this type is a subtype, try checking against that.  Otherwise, use
8057      fits_to_tree_p, which checks against the precision.
8058 
8059      Compute the status for each possibly constant bound, and return if we see
8060      one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8061      for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8062      for "constant known to fit".  */
8063 
8064   /* Check if c >= type_low_bound.  */
8065   if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8066     {
8067       if (tree_int_cst_lt (c, type_low_bound))
8068 	return false;
8069       ok_for_low_bound = true;
8070     }
8071   else
8072     ok_for_low_bound = false;
8073 
8074   /* Check if c <= type_high_bound.  */
8075   if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8076     {
8077       if (tree_int_cst_lt (type_high_bound, c))
8078 	return false;
8079       ok_for_high_bound = true;
8080     }
8081   else
8082     ok_for_high_bound = false;
8083 
8084   /* If the constant fits both bounds, the result is known.  */
8085   if (ok_for_low_bound && ok_for_high_bound)
8086     return true;
8087 
8088   /* Perform some generic filtering which may allow making a decision
8089      even if the bounds are not constant.  First, negative integers
8090      never fit in unsigned types, */
8091   if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8092     return false;
8093 
8094   /* Second, narrower types always fit in wider ones.  */
8095   if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8096     return true;
8097 
8098   /* Third, unsigned integers with top bit set never fit signed types.  */
8099   if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8100     {
8101       int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8102       if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8103 	{
8104 	  /* When a tree_cst is converted to a wide-int, the precision
8105 	     is taken from the type.  However, if the precision of the
8106 	     mode underneath the type is smaller than that, it is
8107 	     possible that the value will not fit.  The test below
8108 	     fails if any bit is set between the sign bit of the
8109 	     underlying mode and the top bit of the type.  */
8110 	  if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8111 	    return false;
8112 	}
8113       else if (wi::neg_p (wi::to_wide (c)))
8114 	return false;
8115     }
8116 
8117   /* If we haven't been able to decide at this point, there nothing more we
8118      can check ourselves here.  Look at the base type if we have one and it
8119      has the same precision.  */
8120   if (TREE_CODE (type) == INTEGER_TYPE
8121       && TREE_TYPE (type) != 0
8122       && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8123     {
8124       type = TREE_TYPE (type);
8125       goto retry;
8126     }
8127 
8128   /* Or to fits_to_tree_p, if nothing else.  */
8129   return wi::fits_to_tree_p (wi::to_wide (c), type);
8130 }
8131 
8132 /* Stores bounds of an integer TYPE in MIN and MAX.  If TYPE has non-constant
8133    bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8134    represented (assuming two's-complement arithmetic) within the bit
8135    precision of the type are returned instead.  */
8136 
8137 void
get_type_static_bounds(const_tree type,mpz_t min,mpz_t max)8138 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8139 {
8140   if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8141       && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8142     wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8143   else
8144     {
8145       if (TYPE_UNSIGNED (type))
8146 	mpz_set_ui (min, 0);
8147       else
8148 	{
8149 	  wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8150 	  wi::to_mpz (mn, min, SIGNED);
8151 	}
8152     }
8153 
8154   if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8155       && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8156     wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8157   else
8158     {
8159       wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8160       wi::to_mpz (mn, max, TYPE_SIGN (type));
8161     }
8162 }
8163 
8164 /* Return true if VAR is an automatic variable.  */
8165 
8166 bool
auto_var_p(const_tree var)8167 auto_var_p (const_tree var)
8168 {
8169   return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8170 	    || TREE_CODE (var) == PARM_DECL)
8171 	   && ! TREE_STATIC (var))
8172 	  || TREE_CODE (var) == RESULT_DECL);
8173 }
8174 
8175 /* Return true if VAR is an automatic variable defined in function FN.  */
8176 
8177 bool
auto_var_in_fn_p(const_tree var,const_tree fn)8178 auto_var_in_fn_p (const_tree var, const_tree fn)
8179 {
8180   return (DECL_P (var) && DECL_CONTEXT (var) == fn
8181 	  && (auto_var_p (var)
8182 	      || TREE_CODE (var) == LABEL_DECL));
8183 }
8184 
8185 /* Subprogram of following function.  Called by walk_tree.
8186 
8187    Return *TP if it is an automatic variable or parameter of the
8188    function passed in as DATA.  */
8189 
8190 static tree
find_var_from_fn(tree * tp,int * walk_subtrees,void * data)8191 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8192 {
8193   tree fn = (tree) data;
8194 
8195   if (TYPE_P (*tp))
8196     *walk_subtrees = 0;
8197 
8198   else if (DECL_P (*tp)
8199 	   && auto_var_in_fn_p (*tp, fn))
8200     return *tp;
8201 
8202   return NULL_TREE;
8203 }
8204 
8205 /* Returns true if T is, contains, or refers to a type with variable
8206    size.  For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8207    arguments, but not the return type.  If FN is nonzero, only return
8208    true if a modifier of the type or position of FN is a variable or
8209    parameter inside FN.
8210 
8211    This concept is more general than that of C99 'variably modified types':
8212    in C99, a struct type is never variably modified because a VLA may not
8213    appear as a structure member.  However, in GNU C code like:
8214 
8215      struct S { int i[f()]; };
8216 
8217    is valid, and other languages may define similar constructs.  */
8218 
8219 bool
variably_modified_type_p(tree type,tree fn)8220 variably_modified_type_p (tree type, tree fn)
8221 {
8222   tree t;
8223 
8224 /* Test if T is either variable (if FN is zero) or an expression containing
8225    a variable in FN.  If TYPE isn't gimplified, return true also if
8226    gimplify_one_sizepos would gimplify the expression into a local
8227    variable.  */
8228 #define RETURN_TRUE_IF_VAR(T)						\
8229   do { tree _t = (T);							\
8230     if (_t != NULL_TREE							\
8231 	&& _t != error_mark_node					\
8232 	&& !CONSTANT_CLASS_P (_t)					\
8233 	&& TREE_CODE (_t) != PLACEHOLDER_EXPR				\
8234 	&& (!fn								\
8235 	    || (!TYPE_SIZES_GIMPLIFIED (type)				\
8236 		&& (TREE_CODE (_t) != VAR_DECL				\
8237 		    && !CONTAINS_PLACEHOLDER_P (_t)))			\
8238 	    || walk_tree (&_t, find_var_from_fn, fn, NULL)))		\
8239       return true;  } while (0)
8240 
8241   if (type == error_mark_node)
8242     return false;
8243 
8244   /* If TYPE itself has variable size, it is variably modified.  */
8245   RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8246   RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8247 
8248   switch (TREE_CODE (type))
8249     {
8250     case POINTER_TYPE:
8251     case REFERENCE_TYPE:
8252     case VECTOR_TYPE:
8253       /* Ada can have pointer types refering to themselves indirectly.  */
8254       if (TREE_VISITED (type))
8255 	return false;
8256       TREE_VISITED (type) = true;
8257       if (variably_modified_type_p (TREE_TYPE (type), fn))
8258 	{
8259 	  TREE_VISITED (type) = false;
8260 	  return true;
8261 	}
8262       TREE_VISITED (type) = false;
8263       break;
8264 
8265     case FUNCTION_TYPE:
8266     case METHOD_TYPE:
8267       /* If TYPE is a function type, it is variably modified if the
8268 	 return type is variably modified.  */
8269       if (variably_modified_type_p (TREE_TYPE (type), fn))
8270 	  return true;
8271       break;
8272 
8273     case INTEGER_TYPE:
8274     case REAL_TYPE:
8275     case FIXED_POINT_TYPE:
8276     case ENUMERAL_TYPE:
8277     case BOOLEAN_TYPE:
8278       /* Scalar types are variably modified if their end points
8279 	 aren't constant.  */
8280       RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8281       RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8282       break;
8283 
8284     case RECORD_TYPE:
8285     case UNION_TYPE:
8286     case QUAL_UNION_TYPE:
8287       /* We can't see if any of the fields are variably-modified by the
8288 	 definition we normally use, since that would produce infinite
8289 	 recursion via pointers.  */
8290       /* This is variably modified if some field's type is.  */
8291       for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8292 	if (TREE_CODE (t) == FIELD_DECL)
8293 	  {
8294 	    RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8295 	    RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8296 	    RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8297 
8298 	    /* If the type is a qualified union, then the DECL_QUALIFIER
8299 	       of fields can also be an expression containing a variable.  */
8300 	    if (TREE_CODE (type) == QUAL_UNION_TYPE)
8301 	      RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8302 
8303 	    /* If the field is a qualified union, then it's only a container
8304 	       for what's inside so we look into it.  That's necessary in LTO
8305 	       mode because the sizes of the field tested above have been set
8306 	       to PLACEHOLDER_EXPRs by free_lang_data.  */
8307 	    if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
8308 		&& variably_modified_type_p (TREE_TYPE (t), fn))
8309 	      return true;
8310 	  }
8311       break;
8312 
8313     case ARRAY_TYPE:
8314       /* Do not call ourselves to avoid infinite recursion.  This is
8315 	 variably modified if the element type is.  */
8316       RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8317       RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8318       break;
8319 
8320     default:
8321       break;
8322     }
8323 
8324   /* The current language may have other cases to check, but in general,
8325      all other types are not variably modified.  */
8326   return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8327 
8328 #undef RETURN_TRUE_IF_VAR
8329 }
8330 
8331 /* Given a DECL or TYPE, return the scope in which it was declared, or
8332    NULL_TREE if there is no containing scope.  */
8333 
8334 tree
get_containing_scope(const_tree t)8335 get_containing_scope (const_tree t)
8336 {
8337   return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8338 }
8339 
8340 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL.  */
8341 
8342 const_tree
get_ultimate_context(const_tree decl)8343 get_ultimate_context (const_tree decl)
8344 {
8345   while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8346     {
8347       if (TREE_CODE (decl) == BLOCK)
8348 	decl = BLOCK_SUPERCONTEXT (decl);
8349       else
8350 	decl = get_containing_scope (decl);
8351     }
8352   return decl;
8353 }
8354 
8355 /* Return the innermost context enclosing DECL that is
8356    a FUNCTION_DECL, or zero if none.  */
8357 
8358 tree
decl_function_context(const_tree decl)8359 decl_function_context (const_tree decl)
8360 {
8361   tree context;
8362 
8363   if (TREE_CODE (decl) == ERROR_MARK)
8364     return 0;
8365 
8366   /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8367      where we look up the function at runtime.  Such functions always take
8368      a first argument of type 'pointer to real context'.
8369 
8370      C++ should really be fixed to use DECL_CONTEXT for the real context,
8371      and use something else for the "virtual context".  */
8372   else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
8373     context
8374       = TYPE_MAIN_VARIANT
8375 	(TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8376   else
8377     context = DECL_CONTEXT (decl);
8378 
8379   while (context && TREE_CODE (context) != FUNCTION_DECL)
8380     {
8381       if (TREE_CODE (context) == BLOCK)
8382 	context = BLOCK_SUPERCONTEXT (context);
8383       else
8384 	context = get_containing_scope (context);
8385     }
8386 
8387   return context;
8388 }
8389 
8390 /* Return the innermost context enclosing DECL that is
8391    a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8392    TYPE_DECLs and FUNCTION_DECLs are transparent to this function.  */
8393 
8394 tree
decl_type_context(const_tree decl)8395 decl_type_context (const_tree decl)
8396 {
8397   tree context = DECL_CONTEXT (decl);
8398 
8399   while (context)
8400     switch (TREE_CODE (context))
8401       {
8402       case NAMESPACE_DECL:
8403       case TRANSLATION_UNIT_DECL:
8404 	return NULL_TREE;
8405 
8406       case RECORD_TYPE:
8407       case UNION_TYPE:
8408       case QUAL_UNION_TYPE:
8409 	return context;
8410 
8411       case TYPE_DECL:
8412       case FUNCTION_DECL:
8413 	context = DECL_CONTEXT (context);
8414 	break;
8415 
8416       case BLOCK:
8417 	context = BLOCK_SUPERCONTEXT (context);
8418 	break;
8419 
8420       default:
8421 	gcc_unreachable ();
8422       }
8423 
8424   return NULL_TREE;
8425 }
8426 
8427 /* CALL is a CALL_EXPR.  Return the declaration for the function
8428    called, or NULL_TREE if the called function cannot be
8429    determined.  */
8430 
8431 tree
get_callee_fndecl(const_tree call)8432 get_callee_fndecl (const_tree call)
8433 {
8434   tree addr;
8435 
8436   if (call == error_mark_node)
8437     return error_mark_node;
8438 
8439   /* It's invalid to call this function with anything but a
8440      CALL_EXPR.  */
8441   gcc_assert (TREE_CODE (call) == CALL_EXPR);
8442 
8443   /* The first operand to the CALL is the address of the function
8444      called.  */
8445   addr = CALL_EXPR_FN (call);
8446 
8447   /* If there is no function, return early.  */
8448   if (addr == NULL_TREE)
8449     return NULL_TREE;
8450 
8451   STRIP_NOPS (addr);
8452 
8453   /* If this is a readonly function pointer, extract its initial value.  */
8454   if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8455       && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8456       && DECL_INITIAL (addr))
8457     addr = DECL_INITIAL (addr);
8458 
8459   /* If the address is just `&f' for some function `f', then we know
8460      that `f' is being called.  */
8461   if (TREE_CODE (addr) == ADDR_EXPR
8462       && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8463     return TREE_OPERAND (addr, 0);
8464 
8465   /* We couldn't figure out what was being called.  */
8466   return NULL_TREE;
8467 }
8468 
8469 /* Return true when STMTs arguments and return value match those of FNDECL,
8470    a decl of a builtin function.  */
8471 
8472 static bool
tree_builtin_call_types_compatible_p(const_tree call,tree fndecl)8473 tree_builtin_call_types_compatible_p (const_tree call, tree fndecl)
8474 {
8475   gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
8476 
8477   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8478     if (tree decl = builtin_decl_explicit (DECL_FUNCTION_CODE (fndecl)))
8479       fndecl = decl;
8480 
8481   bool gimple_form = (cfun && (cfun->curr_properties & PROP_gimple)) != 0;
8482   if (gimple_form
8483       ? !useless_type_conversion_p (TREE_TYPE (call),
8484 				    TREE_TYPE (TREE_TYPE (fndecl)))
8485       : (TYPE_MAIN_VARIANT (TREE_TYPE (call))
8486 	 != TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (fndecl)))))
8487     return false;
8488 
8489   tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8490   unsigned nargs = call_expr_nargs (call);
8491   for (unsigned i = 0; i < nargs; ++i, targs = TREE_CHAIN (targs))
8492     {
8493       /* Variadic args follow.  */
8494       if (!targs)
8495 	return true;
8496       tree arg = CALL_EXPR_ARG (call, i);
8497       tree type = TREE_VALUE (targs);
8498       if (gimple_form
8499 	  ? !useless_type_conversion_p (type, TREE_TYPE (arg))
8500 	  : TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (TREE_TYPE (arg)))
8501 	{
8502 	  /* For pointer arguments be more forgiving, e.g. due to
8503 	     FILE * vs. fileptr_type_node, or say char * vs. const char *
8504 	     differences etc.  */
8505 	  if (!gimple_form
8506 	      && POINTER_TYPE_P (type)
8507 	      && POINTER_TYPE_P (TREE_TYPE (arg))
8508 	      && tree_nop_conversion_p (type, TREE_TYPE (arg)))
8509 	    continue;
8510 	  /* char/short integral arguments are promoted to int
8511 	     by several frontends if targetm.calls.promote_prototypes
8512 	     is true.  Allow such promotion too.  */
8513 	  if (INTEGRAL_TYPE_P (type)
8514 	      && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
8515 	      && INTEGRAL_TYPE_P (TREE_TYPE (arg))
8516 	      && !TYPE_UNSIGNED (TREE_TYPE (arg))
8517 	      && targetm.calls.promote_prototypes (TREE_TYPE (fndecl))
8518 	      && (gimple_form
8519 		  ? useless_type_conversion_p (integer_type_node,
8520 					       TREE_TYPE (arg))
8521 		  : tree_nop_conversion_p (integer_type_node,
8522 					   TREE_TYPE (arg))))
8523 	    continue;
8524 	  return false;
8525 	}
8526     }
8527   if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
8528     return false;
8529   return true;
8530 }
8531 
8532 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8533    return the associated function code, otherwise return CFN_LAST.  */
8534 
8535 combined_fn
get_call_combined_fn(const_tree call)8536 get_call_combined_fn (const_tree call)
8537 {
8538   /* It's invalid to call this function with anything but a CALL_EXPR.  */
8539   gcc_assert (TREE_CODE (call) == CALL_EXPR);
8540 
8541   if (!CALL_EXPR_FN (call))
8542     return as_combined_fn (CALL_EXPR_IFN (call));
8543 
8544   tree fndecl = get_callee_fndecl (call);
8545   if (fndecl
8546       && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
8547       && tree_builtin_call_types_compatible_p (call, fndecl))
8548     return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8549 
8550   return CFN_LAST;
8551 }
8552 
8553 /* Comparator of indices based on tree_node_counts.  */
8554 
8555 static int
tree_nodes_cmp(const void * p1,const void * p2)8556 tree_nodes_cmp (const void *p1, const void *p2)
8557 {
8558   const unsigned *n1 = (const unsigned *)p1;
8559   const unsigned *n2 = (const unsigned *)p2;
8560 
8561   return tree_node_counts[*n1] - tree_node_counts[*n2];
8562 }
8563 
8564 /* Comparator of indices based on tree_code_counts.  */
8565 
8566 static int
tree_codes_cmp(const void * p1,const void * p2)8567 tree_codes_cmp (const void *p1, const void *p2)
8568 {
8569   const unsigned *n1 = (const unsigned *)p1;
8570   const unsigned *n2 = (const unsigned *)p2;
8571 
8572   return tree_code_counts[*n1] - tree_code_counts[*n2];
8573 }
8574 
8575 #define TREE_MEM_USAGE_SPACES 40
8576 
8577 /* Print debugging information about tree nodes generated during the compile,
8578    and any language-specific information.  */
8579 
8580 void
dump_tree_statistics(void)8581 dump_tree_statistics (void)
8582 {
8583   if (GATHER_STATISTICS)
8584     {
8585       uint64_t total_nodes, total_bytes;
8586       fprintf (stderr, "\nKind                   Nodes      Bytes\n");
8587       mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8588       total_nodes = total_bytes = 0;
8589 
8590       {
8591 	auto_vec<unsigned> indices (all_kinds);
8592 	for (unsigned i = 0; i < all_kinds; i++)
8593 	  indices.quick_push (i);
8594 	indices.qsort (tree_nodes_cmp);
8595 
8596 	for (unsigned i = 0; i < (int) all_kinds; i++)
8597 	  {
8598 	    unsigned j = indices[i];
8599 	    fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
8600 		     tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
8601 		     SIZE_AMOUNT (tree_node_sizes[j]));
8602 	    total_nodes += tree_node_counts[j];
8603 	    total_bytes += tree_node_sizes[j];
8604 	  }
8605 	mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8606 	fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
8607 		 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
8608 	mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8609       }
8610 
8611       {
8612 	fprintf (stderr, "Code                              Nodes\n");
8613 	mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8614 
8615 	auto_vec<unsigned> indices (MAX_TREE_CODES);
8616 	for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8617 	  indices.quick_push (i);
8618 	indices.qsort (tree_codes_cmp);
8619 
8620 	for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8621 	  {
8622 	    unsigned j = indices[i];
8623 	    fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
8624 		     get_tree_code_name ((enum tree_code) j),
8625 		     SIZE_AMOUNT (tree_code_counts[j]));
8626 	  }
8627 	mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8628 	fprintf (stderr, "\n");
8629 	ssanames_print_statistics ();
8630 	fprintf (stderr, "\n");
8631 	phinodes_print_statistics ();
8632 	fprintf (stderr, "\n");
8633       }
8634     }
8635   else
8636     fprintf (stderr, "(No per-node statistics)\n");
8637 
8638   print_type_hash_statistics ();
8639   print_debug_expr_statistics ();
8640   print_value_expr_statistics ();
8641   lang_hooks.print_statistics ();
8642 }
8643 
8644 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8645 
8646 /* Generate a crc32 of the low BYTES bytes of VALUE.  */
8647 
8648 unsigned
crc32_unsigned_n(unsigned chksum,unsigned value,unsigned bytes)8649 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8650 {
8651   /* This relies on the raw feedback's top 4 bits being zero.  */
8652 #define FEEDBACK(X) ((X) * 0x04c11db7)
8653 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8654 		     ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8655   static const unsigned syndromes[16] =
8656     {
8657       SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8658       SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8659       SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8660       SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8661     };
8662 #undef FEEDBACK
8663 #undef SYNDROME
8664 
8665   value <<= (32 - bytes * 8);
8666   for (unsigned ix = bytes * 2; ix--; value <<= 4)
8667     {
8668       unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8669 
8670       chksum = (chksum << 4) ^ feedback;
8671     }
8672 
8673   return chksum;
8674 }
8675 
8676 /* Generate a crc32 of a string.  */
8677 
8678 unsigned
crc32_string(unsigned chksum,const char * string)8679 crc32_string (unsigned chksum, const char *string)
8680 {
8681   do
8682     chksum = crc32_byte (chksum, *string);
8683   while (*string++);
8684   return chksum;
8685 }
8686 
8687 /* P is a string that will be used in a symbol.  Mask out any characters
8688    that are not valid in that context.  */
8689 
8690 void
clean_symbol_name(char * p)8691 clean_symbol_name (char *p)
8692 {
8693   for (; *p; p++)
8694     if (! (ISALNUM (*p)
8695 #ifndef NO_DOLLAR_IN_LABEL	/* this for `$'; unlikely, but... -- kr */
8696 	    || *p == '$'
8697 #endif
8698 #ifndef NO_DOT_IN_LABEL		/* this for `.'; unlikely, but...  */
8699 	    || *p == '.'
8700 #endif
8701 	   ))
8702       *p = '_';
8703 }
8704 
8705 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH.  */
8706 
8707 /* Create a unique anonymous identifier.  The identifier is still a
8708    valid assembly label.  */
8709 
8710 tree
make_anon_name()8711 make_anon_name ()
8712 {
8713   const char *fmt =
8714 #if !defined (NO_DOT_IN_LABEL)
8715     "."
8716 #elif !defined (NO_DOLLAR_IN_LABEL)
8717     "$"
8718 #else
8719     "_"
8720 #endif
8721     "_anon_%d";
8722 
8723   char buf[24];
8724   int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
8725   gcc_checking_assert (len < int (sizeof (buf)));
8726 
8727   tree id = get_identifier_with_length (buf, len);
8728   IDENTIFIER_ANON_P (id) = true;
8729 
8730   return id;
8731 }
8732 
8733 /* Generate a name for a special-purpose function.
8734    The generated name may need to be unique across the whole link.
8735    Changes to this function may also require corresponding changes to
8736    xstrdup_mask_random.
8737    TYPE is some string to identify the purpose of this function to the
8738    linker or collect2; it must start with an uppercase letter,
8739    one of:
8740    I - for constructors
8741    D - for destructors
8742    N - for C++ anonymous namespaces
8743    F - for DWARF unwind frame information.  */
8744 
8745 tree
get_file_function_name(const char * type)8746 get_file_function_name (const char *type)
8747 {
8748   char *buf;
8749   const char *p;
8750   char *q;
8751 
8752   /* If we already have a name we know to be unique, just use that.  */
8753   if (first_global_object_name)
8754     p = q = ASTRDUP (first_global_object_name);
8755   /* If the target is handling the constructors/destructors, they
8756      will be local to this file and the name is only necessary for
8757      debugging purposes.
8758      We also assign sub_I and sub_D sufixes to constructors called from
8759      the global static constructors.  These are always local.  */
8760   else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
8761 	   || (startswith (type, "sub_")
8762 	       && (type[4] == 'I' || type[4] == 'D')))
8763     {
8764       const char *file = main_input_filename;
8765       if (! file)
8766 	file = LOCATION_FILE (input_location);
8767       /* Just use the file's basename, because the full pathname
8768 	 might be quite long.  */
8769       p = q = ASTRDUP (lbasename (file));
8770     }
8771   else
8772     {
8773       /* Otherwise, the name must be unique across the entire link.
8774 	 We don't have anything that we know to be unique to this translation
8775 	 unit, so use what we do have and throw in some randomness.  */
8776       unsigned len;
8777       const char *name = weak_global_object_name;
8778       const char *file = main_input_filename;
8779 
8780       if (! name)
8781 	name = "";
8782       if (! file)
8783 	file = LOCATION_FILE (input_location);
8784 
8785       len = strlen (file);
8786       q = (char *) alloca (9 + 19 + len + 1);
8787       memcpy (q, file, len + 1);
8788 
8789       snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
8790 		crc32_string (0, name), get_random_seed (false));
8791 
8792       p = q;
8793     }
8794 
8795   clean_symbol_name (q);
8796   buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
8797 			 + strlen (type));
8798 
8799   /* Set up the name of the file-level functions we may need.
8800      Use a global object (which is already required to be unique over
8801      the program) rather than the file name (which imposes extra
8802      constraints).  */
8803   sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
8804 
8805   return get_identifier (buf);
8806 }
8807 
8808 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8809 
8810 /* Complain that the tree code of NODE does not match the expected 0
8811    terminated list of trailing codes. The trailing code list can be
8812    empty, for a more vague error message.  FILE, LINE, and FUNCTION
8813    are of the caller.  */
8814 
8815 void
tree_check_failed(const_tree node,const char * file,int line,const char * function,...)8816 tree_check_failed (const_tree node, const char *file,
8817 		   int line, const char *function, ...)
8818 {
8819   va_list args;
8820   const char *buffer;
8821   unsigned length = 0;
8822   enum tree_code code;
8823 
8824   va_start (args, function);
8825   while ((code = (enum tree_code) va_arg (args, int)))
8826     length += 4 + strlen (get_tree_code_name (code));
8827   va_end (args);
8828   if (length)
8829     {
8830       char *tmp;
8831       va_start (args, function);
8832       length += strlen ("expected ");
8833       buffer = tmp = (char *) alloca (length);
8834       length = 0;
8835       while ((code = (enum tree_code) va_arg (args, int)))
8836 	{
8837 	  const char *prefix = length ? " or " : "expected ";
8838 
8839 	  strcpy (tmp + length, prefix);
8840 	  length += strlen (prefix);
8841 	  strcpy (tmp + length, get_tree_code_name (code));
8842 	  length += strlen (get_tree_code_name (code));
8843 	}
8844       va_end (args);
8845     }
8846   else
8847     buffer = "unexpected node";
8848 
8849   internal_error ("tree check: %s, have %s in %s, at %s:%d",
8850 		  buffer, get_tree_code_name (TREE_CODE (node)),
8851 		  function, trim_filename (file), line);
8852 }
8853 
8854 /* Complain that the tree code of NODE does match the expected 0
8855    terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8856    the caller.  */
8857 
8858 void
tree_not_check_failed(const_tree node,const char * file,int line,const char * function,...)8859 tree_not_check_failed (const_tree node, const char *file,
8860 		       int line, const char *function, ...)
8861 {
8862   va_list args;
8863   char *buffer;
8864   unsigned length = 0;
8865   enum tree_code code;
8866 
8867   va_start (args, function);
8868   while ((code = (enum tree_code) va_arg (args, int)))
8869     length += 4 + strlen (get_tree_code_name (code));
8870   va_end (args);
8871   va_start (args, function);
8872   buffer = (char *) alloca (length);
8873   length = 0;
8874   while ((code = (enum tree_code) va_arg (args, int)))
8875     {
8876       if (length)
8877 	{
8878 	  strcpy (buffer + length, " or ");
8879 	  length += 4;
8880 	}
8881       strcpy (buffer + length, get_tree_code_name (code));
8882       length += strlen (get_tree_code_name (code));
8883     }
8884   va_end (args);
8885 
8886   internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8887 		  buffer, get_tree_code_name (TREE_CODE (node)),
8888 		  function, trim_filename (file), line);
8889 }
8890 
8891 /* Similar to tree_check_failed, except that we check for a class of tree
8892    code, given in CL.  */
8893 
8894 void
tree_class_check_failed(const_tree node,const enum tree_code_class cl,const char * file,int line,const char * function)8895 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
8896 			 const char *file, int line, const char *function)
8897 {
8898   internal_error
8899     ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
8900      TREE_CODE_CLASS_STRING (cl),
8901      TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8902      get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8903 }
8904 
8905 /* Similar to tree_check_failed, except that instead of specifying a
8906    dozen codes, use the knowledge that they're all sequential.  */
8907 
8908 void
tree_range_check_failed(const_tree node,const char * file,int line,const char * function,enum tree_code c1,enum tree_code c2)8909 tree_range_check_failed (const_tree node, const char *file, int line,
8910 			 const char *function, enum tree_code c1,
8911 			 enum tree_code c2)
8912 {
8913   char *buffer;
8914   unsigned length = 0;
8915   unsigned int c;
8916 
8917   for (c = c1; c <= c2; ++c)
8918     length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
8919 
8920   length += strlen ("expected ");
8921   buffer = (char *) alloca (length);
8922   length = 0;
8923 
8924   for (c = c1; c <= c2; ++c)
8925     {
8926       const char *prefix = length ? " or " : "expected ";
8927 
8928       strcpy (buffer + length, prefix);
8929       length += strlen (prefix);
8930       strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
8931       length += strlen (get_tree_code_name ((enum tree_code) c));
8932     }
8933 
8934   internal_error ("tree check: %s, have %s in %s, at %s:%d",
8935 		  buffer, get_tree_code_name (TREE_CODE (node)),
8936 		  function, trim_filename (file), line);
8937 }
8938 
8939 
8940 /* Similar to tree_check_failed, except that we check that a tree does
8941    not have the specified code, given in CL.  */
8942 
8943 void
tree_not_class_check_failed(const_tree node,const enum tree_code_class cl,const char * file,int line,const char * function)8944 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
8945 			     const char *file, int line, const char *function)
8946 {
8947   internal_error
8948     ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
8949      TREE_CODE_CLASS_STRING (cl),
8950      TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8951      get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8952 }
8953 
8954 
8955 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes.  */
8956 
8957 void
omp_clause_check_failed(const_tree node,const char * file,int line,const char * function,enum omp_clause_code code)8958 omp_clause_check_failed (const_tree node, const char *file, int line,
8959                          const char *function, enum omp_clause_code code)
8960 {
8961   internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
8962 		  "in %s, at %s:%d",
8963 		  omp_clause_code_name[code],
8964 		  get_tree_code_name (TREE_CODE (node)),
8965 		  function, trim_filename (file), line);
8966 }
8967 
8968 
8969 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes.  */
8970 
8971 void
omp_clause_range_check_failed(const_tree node,const char * file,int line,const char * function,enum omp_clause_code c1,enum omp_clause_code c2)8972 omp_clause_range_check_failed (const_tree node, const char *file, int line,
8973 			       const char *function, enum omp_clause_code c1,
8974 			       enum omp_clause_code c2)
8975 {
8976   char *buffer;
8977   unsigned length = 0;
8978   unsigned int c;
8979 
8980   for (c = c1; c <= c2; ++c)
8981     length += 4 + strlen (omp_clause_code_name[c]);
8982 
8983   length += strlen ("expected ");
8984   buffer = (char *) alloca (length);
8985   length = 0;
8986 
8987   for (c = c1; c <= c2; ++c)
8988     {
8989       const char *prefix = length ? " or " : "expected ";
8990 
8991       strcpy (buffer + length, prefix);
8992       length += strlen (prefix);
8993       strcpy (buffer + length, omp_clause_code_name[c]);
8994       length += strlen (omp_clause_code_name[c]);
8995     }
8996 
8997   internal_error ("tree check: %s, have %s in %s, at %s:%d",
8998 		  buffer, omp_clause_code_name[TREE_CODE (node)],
8999 		  function, trim_filename (file), line);
9000 }
9001 
9002 
9003 #undef DEFTREESTRUCT
9004 #define DEFTREESTRUCT(VAL, NAME) NAME,
9005 
9006 static const char *ts_enum_names[] = {
9007 #include "treestruct.def"
9008 };
9009 #undef DEFTREESTRUCT
9010 
9011 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9012 
9013 /* Similar to tree_class_check_failed, except that we check for
9014    whether CODE contains the tree structure identified by EN.  */
9015 
9016 void
tree_contains_struct_check_failed(const_tree node,const enum tree_node_structure_enum en,const char * file,int line,const char * function)9017 tree_contains_struct_check_failed (const_tree node,
9018 				   const enum tree_node_structure_enum en,
9019 				   const char *file, int line,
9020 				   const char *function)
9021 {
9022   internal_error
9023     ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9024      TS_ENUM_NAME (en),
9025      get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9026 }
9027 
9028 
9029 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9030    (dynamically sized) vector.  */
9031 
9032 void
tree_int_cst_elt_check_failed(int idx,int len,const char * file,int line,const char * function)9033 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9034 			       const char *function)
9035 {
9036   internal_error
9037     ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
9038      "at %s:%d",
9039      idx + 1, len, function, trim_filename (file), line);
9040 }
9041 
9042 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9043    (dynamically sized) vector.  */
9044 
9045 void
tree_vec_elt_check_failed(int idx,int len,const char * file,int line,const char * function)9046 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9047 			   const char *function)
9048 {
9049   internal_error
9050     ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
9051      idx + 1, len, function, trim_filename (file), line);
9052 }
9053 
9054 /* Similar to above, except that the check is for the bounds of the operand
9055    vector of an expression node EXP.  */
9056 
9057 void
tree_operand_check_failed(int idx,const_tree exp,const char * file,int line,const char * function)9058 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9059 			   int line, const char *function)
9060 {
9061   enum tree_code code = TREE_CODE (exp);
9062   internal_error
9063     ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9064      idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9065      function, trim_filename (file), line);
9066 }
9067 
9068 /* Similar to above, except that the check is for the number of
9069    operands of an OMP_CLAUSE node.  */
9070 
9071 void
omp_clause_operand_check_failed(int idx,const_tree t,const char * file,int line,const char * function)9072 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9073 			         int line, const char *function)
9074 {
9075   internal_error
9076     ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
9077      "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9078      omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9079      trim_filename (file), line);
9080 }
9081 #endif /* ENABLE_TREE_CHECKING */
9082 
9083 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9084    and mapped to the machine mode MODE.  Initialize its fields and build
9085    the information necessary for debugging output.  */
9086 
9087 static tree
make_vector_type(tree innertype,poly_int64 nunits,machine_mode mode)9088 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
9089 {
9090   tree t;
9091   tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9092 
9093   t = make_node (VECTOR_TYPE);
9094   TREE_TYPE (t) = mv_innertype;
9095   SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9096   SET_TYPE_MODE (t, mode);
9097 
9098   if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9099     SET_TYPE_STRUCTURAL_EQUALITY (t);
9100   else if ((TYPE_CANONICAL (mv_innertype) != innertype
9101 	    || mode != VOIDmode)
9102 	   && !VECTOR_BOOLEAN_TYPE_P (t))
9103     TYPE_CANONICAL (t)
9104       = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9105 
9106   layout_type (t);
9107 
9108   hashval_t hash = type_hash_canon_hash (t);
9109   t = type_hash_canon (hash, t);
9110 
9111   /* We have built a main variant, based on the main variant of the
9112      inner type. Use it to build the variant we return.  */
9113   if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9114       && TREE_TYPE (t) != innertype)
9115     return build_type_attribute_qual_variant (t,
9116 					      TYPE_ATTRIBUTES (innertype),
9117 					      TYPE_QUALS (innertype));
9118 
9119   return t;
9120 }
9121 
9122 static tree
make_or_reuse_type(unsigned size,int unsignedp)9123 make_or_reuse_type (unsigned size, int unsignedp)
9124 {
9125   int i;
9126 
9127   if (size == INT_TYPE_SIZE)
9128     return unsignedp ? unsigned_type_node : integer_type_node;
9129   if (size == CHAR_TYPE_SIZE)
9130     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9131   if (size == SHORT_TYPE_SIZE)
9132     return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9133   if (size == LONG_TYPE_SIZE)
9134     return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9135   if (size == LONG_LONG_TYPE_SIZE)
9136     return (unsignedp ? long_long_unsigned_type_node
9137             : long_long_integer_type_node);
9138 
9139   for (i = 0; i < NUM_INT_N_ENTS; i ++)
9140     if (size == int_n_data[i].bitsize
9141 	&& int_n_enabled_p[i])
9142       return (unsignedp ? int_n_trees[i].unsigned_type
9143 	      : int_n_trees[i].signed_type);
9144 
9145   if (unsignedp)
9146     return make_unsigned_type (size);
9147   else
9148     return make_signed_type (size);
9149 }
9150 
9151 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP.  */
9152 
9153 static tree
make_or_reuse_fract_type(unsigned size,int unsignedp,int satp)9154 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9155 {
9156   if (satp)
9157     {
9158       if (size == SHORT_FRACT_TYPE_SIZE)
9159 	return unsignedp ? sat_unsigned_short_fract_type_node
9160 			 : sat_short_fract_type_node;
9161       if (size == FRACT_TYPE_SIZE)
9162 	return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9163       if (size == LONG_FRACT_TYPE_SIZE)
9164 	return unsignedp ? sat_unsigned_long_fract_type_node
9165 			 : sat_long_fract_type_node;
9166       if (size == LONG_LONG_FRACT_TYPE_SIZE)
9167 	return unsignedp ? sat_unsigned_long_long_fract_type_node
9168 			 : sat_long_long_fract_type_node;
9169     }
9170   else
9171     {
9172       if (size == SHORT_FRACT_TYPE_SIZE)
9173 	return unsignedp ? unsigned_short_fract_type_node
9174 			 : short_fract_type_node;
9175       if (size == FRACT_TYPE_SIZE)
9176 	return unsignedp ? unsigned_fract_type_node : fract_type_node;
9177       if (size == LONG_FRACT_TYPE_SIZE)
9178 	return unsignedp ? unsigned_long_fract_type_node
9179 			 : long_fract_type_node;
9180       if (size == LONG_LONG_FRACT_TYPE_SIZE)
9181 	return unsignedp ? unsigned_long_long_fract_type_node
9182 			 : long_long_fract_type_node;
9183     }
9184 
9185   return make_fract_type (size, unsignedp, satp);
9186 }
9187 
9188 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP.  */
9189 
9190 static tree
make_or_reuse_accum_type(unsigned size,int unsignedp,int satp)9191 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9192 {
9193   if (satp)
9194     {
9195       if (size == SHORT_ACCUM_TYPE_SIZE)
9196 	return unsignedp ? sat_unsigned_short_accum_type_node
9197 			 : sat_short_accum_type_node;
9198       if (size == ACCUM_TYPE_SIZE)
9199 	return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9200       if (size == LONG_ACCUM_TYPE_SIZE)
9201 	return unsignedp ? sat_unsigned_long_accum_type_node
9202 			 : sat_long_accum_type_node;
9203       if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9204 	return unsignedp ? sat_unsigned_long_long_accum_type_node
9205 			 : sat_long_long_accum_type_node;
9206     }
9207   else
9208     {
9209       if (size == SHORT_ACCUM_TYPE_SIZE)
9210 	return unsignedp ? unsigned_short_accum_type_node
9211 			 : short_accum_type_node;
9212       if (size == ACCUM_TYPE_SIZE)
9213 	return unsignedp ? unsigned_accum_type_node : accum_type_node;
9214       if (size == LONG_ACCUM_TYPE_SIZE)
9215 	return unsignedp ? unsigned_long_accum_type_node
9216 			 : long_accum_type_node;
9217       if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9218 	return unsignedp ? unsigned_long_long_accum_type_node
9219 			 : long_long_accum_type_node;
9220     }
9221 
9222   return make_accum_type (size, unsignedp, satp);
9223 }
9224 
9225 
9226 /* Create an atomic variant node for TYPE.  This routine is called
9227    during initialization of data types to create the 5 basic atomic
9228    types. The generic build_variant_type function requires these to
9229    already be set up in order to function properly, so cannot be
9230    called from there.  If ALIGN is non-zero, then ensure alignment is
9231    overridden to this value.  */
9232 
9233 static tree
build_atomic_base(tree type,unsigned int align)9234 build_atomic_base (tree type, unsigned int align)
9235 {
9236   tree t;
9237 
9238   /* Make sure its not already registered.  */
9239   if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9240     return t;
9241 
9242   t = build_variant_type_copy (type);
9243   set_type_quals (t, TYPE_QUAL_ATOMIC);
9244 
9245   if (align)
9246     SET_TYPE_ALIGN (t, align);
9247 
9248   return t;
9249 }
9250 
9251 /* Information about the _FloatN and _FloatNx types.  This must be in
9252    the same order as the corresponding TI_* enum values.  */
9253 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9254   {
9255     { 16, false },
9256     { 32, false },
9257     { 64, false },
9258     { 128, false },
9259     { 32, true },
9260     { 64, true },
9261     { 128, true },
9262   };
9263 
9264 
9265 /* Create nodes for all integer types (and error_mark_node) using the sizes
9266    of C datatypes.  SIGNED_CHAR specifies whether char is signed.  */
9267 
9268 void
build_common_tree_nodes(bool signed_char)9269 build_common_tree_nodes (bool signed_char)
9270 {
9271   int i;
9272 
9273   error_mark_node = make_node (ERROR_MARK);
9274   TREE_TYPE (error_mark_node) = error_mark_node;
9275 
9276   initialize_sizetypes ();
9277 
9278   /* Define both `signed char' and `unsigned char'.  */
9279   signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9280   TYPE_STRING_FLAG (signed_char_type_node) = 1;
9281   unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9282   TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9283 
9284   /* Define `char', which is like either `signed char' or `unsigned char'
9285      but not the same as either.  */
9286   char_type_node
9287     = (signed_char
9288        ? make_signed_type (CHAR_TYPE_SIZE)
9289        : make_unsigned_type (CHAR_TYPE_SIZE));
9290   TYPE_STRING_FLAG (char_type_node) = 1;
9291 
9292   short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9293   short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9294   integer_type_node = make_signed_type (INT_TYPE_SIZE);
9295   unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9296   long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9297   long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9298   long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9299   long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9300 
9301   for (i = 0; i < NUM_INT_N_ENTS; i ++)
9302     {
9303       int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9304       int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9305 
9306       if (int_n_enabled_p[i])
9307 	{
9308 	  integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9309 	  integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9310 	}
9311     }
9312 
9313   /* Define a boolean type.  This type only represents boolean values but
9314      may be larger than char depending on the value of BOOL_TYPE_SIZE.  */
9315   boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9316   TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9317   TYPE_PRECISION (boolean_type_node) = 1;
9318   TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9319 
9320   /* Define what type to use for size_t.  */
9321   if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9322     size_type_node = unsigned_type_node;
9323   else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9324     size_type_node = long_unsigned_type_node;
9325   else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9326     size_type_node = long_long_unsigned_type_node;
9327   else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9328     size_type_node = short_unsigned_type_node;
9329   else
9330     {
9331       int i;
9332 
9333       size_type_node = NULL_TREE;
9334       for (i = 0; i < NUM_INT_N_ENTS; i++)
9335 	if (int_n_enabled_p[i])
9336 	  {
9337 	    char name[50], altname[50];
9338 	    sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9339 	    sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
9340 
9341 	    if (strcmp (name, SIZE_TYPE) == 0
9342 		|| strcmp (altname, SIZE_TYPE) == 0)
9343 	      {
9344 		size_type_node = int_n_trees[i].unsigned_type;
9345 	      }
9346 	  }
9347       if (size_type_node == NULL_TREE)
9348 	gcc_unreachable ();
9349     }
9350 
9351   /* Define what type to use for ptrdiff_t.  */
9352   if (strcmp (PTRDIFF_TYPE, "int") == 0)
9353     ptrdiff_type_node = integer_type_node;
9354   else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9355     ptrdiff_type_node = long_integer_type_node;
9356   else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9357     ptrdiff_type_node = long_long_integer_type_node;
9358   else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9359     ptrdiff_type_node = short_integer_type_node;
9360   else
9361     {
9362       ptrdiff_type_node = NULL_TREE;
9363       for (int i = 0; i < NUM_INT_N_ENTS; i++)
9364 	if (int_n_enabled_p[i])
9365 	  {
9366 	    char name[50], altname[50];
9367 	    sprintf (name, "__int%d", int_n_data[i].bitsize);
9368 	    sprintf (altname, "__int%d__", int_n_data[i].bitsize);
9369 
9370 	    if (strcmp (name, PTRDIFF_TYPE) == 0
9371 		|| strcmp (altname, PTRDIFF_TYPE) == 0)
9372 	      ptrdiff_type_node = int_n_trees[i].signed_type;
9373 	  }
9374       if (ptrdiff_type_node == NULL_TREE)
9375 	gcc_unreachable ();
9376     }
9377 
9378   /* Fill in the rest of the sized types.  Reuse existing type nodes
9379      when possible.  */
9380   intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9381   intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9382   intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9383   intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9384   intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9385 
9386   unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9387   unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9388   unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9389   unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9390   unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9391 
9392   /* Don't call build_qualified type for atomics.  That routine does
9393      special processing for atomics, and until they are initialized
9394      it's better not to make that call.
9395 
9396      Check to see if there is a target override for atomic types.  */
9397 
9398   atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9399 					targetm.atomic_align_for_mode (QImode));
9400   atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9401 					targetm.atomic_align_for_mode (HImode));
9402   atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9403 					targetm.atomic_align_for_mode (SImode));
9404   atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9405 					targetm.atomic_align_for_mode (DImode));
9406   atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9407 					targetm.atomic_align_for_mode (TImode));
9408 
9409   access_public_node = get_identifier ("public");
9410   access_protected_node = get_identifier ("protected");
9411   access_private_node = get_identifier ("private");
9412 
9413   /* Define these next since types below may used them.  */
9414   integer_zero_node = build_int_cst (integer_type_node, 0);
9415   integer_one_node = build_int_cst (integer_type_node, 1);
9416   integer_three_node = build_int_cst (integer_type_node, 3);
9417   integer_minus_one_node = build_int_cst (integer_type_node, -1);
9418 
9419   size_zero_node = size_int (0);
9420   size_one_node = size_int (1);
9421   bitsize_zero_node = bitsize_int (0);
9422   bitsize_one_node = bitsize_int (1);
9423   bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9424 
9425   boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9426   boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9427 
9428   void_type_node = make_node (VOID_TYPE);
9429   layout_type (void_type_node);
9430 
9431   /* We are not going to have real types in C with less than byte alignment,
9432      so we might as well not have any types that claim to have it.  */
9433   SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9434   TYPE_USER_ALIGN (void_type_node) = 0;
9435 
9436   void_node = make_node (VOID_CST);
9437   TREE_TYPE (void_node) = void_type_node;
9438 
9439   null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9440   layout_type (TREE_TYPE (null_pointer_node));
9441 
9442   ptr_type_node = build_pointer_type (void_type_node);
9443   const_ptr_type_node
9444     = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9445   for (unsigned i = 0;
9446        i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
9447        ++i)
9448     builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9449 
9450   pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9451 
9452   float_type_node = make_node (REAL_TYPE);
9453   TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9454   layout_type (float_type_node);
9455 
9456   double_type_node = make_node (REAL_TYPE);
9457   TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9458   layout_type (double_type_node);
9459 
9460   long_double_type_node = make_node (REAL_TYPE);
9461   TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9462   layout_type (long_double_type_node);
9463 
9464   for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9465     {
9466       int n = floatn_nx_types[i].n;
9467       bool extended = floatn_nx_types[i].extended;
9468       scalar_float_mode mode;
9469       if (!targetm.floatn_mode (n, extended).exists (&mode))
9470 	continue;
9471       int precision = GET_MODE_PRECISION (mode);
9472       /* Work around the rs6000 KFmode having precision 113 not
9473 	 128.  */
9474       const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9475       gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9476       int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9477       if (!extended)
9478 	gcc_assert (min_precision == n);
9479       if (precision < min_precision)
9480 	precision = min_precision;
9481       FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9482       TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9483       layout_type (FLOATN_NX_TYPE_NODE (i));
9484       SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9485     }
9486 
9487   float_ptr_type_node = build_pointer_type (float_type_node);
9488   double_ptr_type_node = build_pointer_type (double_type_node);
9489   long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9490   integer_ptr_type_node = build_pointer_type (integer_type_node);
9491 
9492   /* Fixed size integer types.  */
9493   uint16_type_node = make_or_reuse_type (16, 1);
9494   uint32_type_node = make_or_reuse_type (32, 1);
9495   uint64_type_node = make_or_reuse_type (64, 1);
9496   if (targetm.scalar_mode_supported_p (TImode))
9497     uint128_type_node = make_or_reuse_type (128, 1);
9498 
9499   /* Decimal float types. */
9500   if (targetm.decimal_float_supported_p ())
9501     {
9502       dfloat32_type_node = make_node (REAL_TYPE);
9503       TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9504       SET_TYPE_MODE (dfloat32_type_node, SDmode);
9505       layout_type (dfloat32_type_node);
9506 
9507       dfloat64_type_node = make_node (REAL_TYPE);
9508       TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9509       SET_TYPE_MODE (dfloat64_type_node, DDmode);
9510       layout_type (dfloat64_type_node);
9511 
9512       dfloat128_type_node = make_node (REAL_TYPE);
9513       TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9514       SET_TYPE_MODE (dfloat128_type_node, TDmode);
9515       layout_type (dfloat128_type_node);
9516     }
9517 
9518   complex_integer_type_node = build_complex_type (integer_type_node, true);
9519   complex_float_type_node = build_complex_type (float_type_node, true);
9520   complex_double_type_node = build_complex_type (double_type_node, true);
9521   complex_long_double_type_node = build_complex_type (long_double_type_node,
9522 						      true);
9523 
9524   for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9525     {
9526       if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9527 	COMPLEX_FLOATN_NX_TYPE_NODE (i)
9528 	  = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9529     }
9530 
9531 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned.  */
9532 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9533   sat_ ## KIND ## _type_node = \
9534     make_sat_signed_ ## KIND ## _type (SIZE); \
9535   sat_unsigned_ ## KIND ## _type_node = \
9536     make_sat_unsigned_ ## KIND ## _type (SIZE); \
9537   KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9538   unsigned_ ## KIND ## _type_node = \
9539     make_unsigned_ ## KIND ## _type (SIZE);
9540 
9541 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9542   sat_ ## WIDTH ## KIND ## _type_node = \
9543     make_sat_signed_ ## KIND ## _type (SIZE); \
9544   sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9545     make_sat_unsigned_ ## KIND ## _type (SIZE); \
9546   WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9547   unsigned_ ## WIDTH ## KIND ## _type_node = \
9548     make_unsigned_ ## KIND ## _type (SIZE);
9549 
9550 /* Make fixed-point type nodes based on four different widths.  */
9551 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9552   MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9553   MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9554   MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9555   MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9556 
9557 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned.  */
9558 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9559   NAME ## _type_node = \
9560     make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9561   u ## NAME ## _type_node = \
9562     make_or_reuse_unsigned_ ## KIND ## _type \
9563       (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9564   sat_ ## NAME ## _type_node = \
9565     make_or_reuse_sat_signed_ ## KIND ## _type \
9566       (GET_MODE_BITSIZE (MODE ## mode)); \
9567   sat_u ## NAME ## _type_node = \
9568     make_or_reuse_sat_unsigned_ ## KIND ## _type \
9569       (GET_MODE_BITSIZE (U ## MODE ## mode));
9570 
9571   /* Fixed-point type and mode nodes.  */
9572   MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9573   MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9574   MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9575   MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9576   MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9577   MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9578   MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9579   MAKE_FIXED_MODE_NODE (accum, ha, HA)
9580   MAKE_FIXED_MODE_NODE (accum, sa, SA)
9581   MAKE_FIXED_MODE_NODE (accum, da, DA)
9582   MAKE_FIXED_MODE_NODE (accum, ta, TA)
9583 
9584   {
9585     tree t = targetm.build_builtin_va_list ();
9586 
9587     /* Many back-ends define record types without setting TYPE_NAME.
9588        If we copied the record type here, we'd keep the original
9589        record type without a name.  This breaks name mangling.  So,
9590        don't copy record types and let c_common_nodes_and_builtins()
9591        declare the type to be __builtin_va_list.  */
9592     if (TREE_CODE (t) != RECORD_TYPE)
9593       t = build_variant_type_copy (t);
9594 
9595     va_list_type_node = t;
9596   }
9597 
9598   /* SCEV analyzer global shared trees.  */
9599   chrec_dont_know = make_node (SCEV_NOT_KNOWN);
9600   TREE_TYPE (chrec_dont_know) = void_type_node;
9601   chrec_known = make_node (SCEV_KNOWN);
9602   TREE_TYPE (chrec_known) = void_type_node;
9603 }
9604 
9605 /* Modify DECL for given flags.
9606    TM_PURE attribute is set only on types, so the function will modify
9607    DECL's type when ECF_TM_PURE is used.  */
9608 
9609 void
set_call_expr_flags(tree decl,int flags)9610 set_call_expr_flags (tree decl, int flags)
9611 {
9612   if (flags & ECF_NOTHROW)
9613     TREE_NOTHROW (decl) = 1;
9614   if (flags & ECF_CONST)
9615     TREE_READONLY (decl) = 1;
9616   if (flags & ECF_PURE)
9617     DECL_PURE_P (decl) = 1;
9618   if (flags & ECF_LOOPING_CONST_OR_PURE)
9619     DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9620   if (flags & ECF_NOVOPS)
9621     DECL_IS_NOVOPS (decl) = 1;
9622   if (flags & ECF_NORETURN)
9623     TREE_THIS_VOLATILE (decl) = 1;
9624   if (flags & ECF_MALLOC)
9625     DECL_IS_MALLOC (decl) = 1;
9626   if (flags & ECF_RETURNS_TWICE)
9627     DECL_IS_RETURNS_TWICE (decl) = 1;
9628   if (flags & ECF_LEAF)
9629     DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9630 					NULL, DECL_ATTRIBUTES (decl));
9631   if (flags & ECF_COLD)
9632     DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9633 					NULL, DECL_ATTRIBUTES (decl));
9634   if (flags & ECF_RET1)
9635     DECL_ATTRIBUTES (decl)
9636       = tree_cons (get_identifier ("fn spec"),
9637 		   build_tree_list (NULL_TREE, build_string (2, "1 ")),
9638 		   DECL_ATTRIBUTES (decl));
9639   if ((flags & ECF_TM_PURE) && flag_tm)
9640     apply_tm_attr (decl, get_identifier ("transaction_pure"));
9641   /* Looping const or pure is implied by noreturn.
9642      There is currently no way to declare looping const or looping pure alone.  */
9643   gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9644 	      || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9645 }
9646 
9647 
9648 /* A subroutine of build_common_builtin_nodes.  Define a builtin function.  */
9649 
9650 static void
local_define_builtin(const char * name,tree type,enum built_in_function code,const char * library_name,int ecf_flags)9651 local_define_builtin (const char *name, tree type, enum built_in_function code,
9652                       const char *library_name, int ecf_flags)
9653 {
9654   tree decl;
9655 
9656   decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9657 			       library_name, NULL_TREE);
9658   set_call_expr_flags (decl, ecf_flags);
9659 
9660   set_builtin_decl (code, decl, true);
9661 }
9662 
9663 /* Call this function after instantiating all builtins that the language
9664    front end cares about.  This will build the rest of the builtins
9665    and internal functions that are relied upon by the tree optimizers and
9666    the middle-end.  */
9667 
9668 void
build_common_builtin_nodes(void)9669 build_common_builtin_nodes (void)
9670 {
9671   tree tmp, ftype;
9672   int ecf_flags;
9673 
9674   if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_PADDING))
9675     {
9676       ftype = build_function_type_list (void_type_node,
9677 					ptr_type_node,
9678 					ptr_type_node,
9679 					integer_type_node,
9680 					NULL_TREE);
9681       local_define_builtin ("__builtin_clear_padding", ftype,
9682 			    BUILT_IN_CLEAR_PADDING,
9683 			    "__builtin_clear_padding",
9684 			    ECF_LEAF | ECF_NOTHROW);
9685     }
9686 
9687   if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9688       || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9689     {
9690       ftype = build_function_type (void_type_node, void_list_node);
9691       if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9692 	local_define_builtin ("__builtin_unreachable", ftype,
9693 			      BUILT_IN_UNREACHABLE,
9694 			      "__builtin_unreachable",
9695 			      ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9696 			      | ECF_CONST | ECF_COLD);
9697       if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9698 	local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9699 			      "abort",
9700 			      ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9701     }
9702 
9703   if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9704       || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9705     {
9706       ftype = build_function_type_list (ptr_type_node,
9707 					ptr_type_node, const_ptr_type_node,
9708 					size_type_node, NULL_TREE);
9709 
9710       if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9711 	local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9712 			      "memcpy", ECF_NOTHROW | ECF_LEAF);
9713       if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9714 	local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9715 			      "memmove", ECF_NOTHROW | ECF_LEAF);
9716     }
9717 
9718   if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9719     {
9720       ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9721 					const_ptr_type_node, size_type_node,
9722 					NULL_TREE);
9723       local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9724 			    "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9725     }
9726 
9727   if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9728     {
9729       ftype = build_function_type_list (ptr_type_node,
9730 					ptr_type_node, integer_type_node,
9731 					size_type_node, NULL_TREE);
9732       local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9733 			    "memset", ECF_NOTHROW | ECF_LEAF);
9734     }
9735 
9736   /* If we're checking the stack, `alloca' can throw.  */
9737   const int alloca_flags
9738     = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9739 
9740   if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9741     {
9742       ftype = build_function_type_list (ptr_type_node,
9743 					size_type_node, NULL_TREE);
9744       local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9745 			    "alloca", alloca_flags);
9746     }
9747 
9748   ftype = build_function_type_list (ptr_type_node, size_type_node,
9749 				    size_type_node, NULL_TREE);
9750   local_define_builtin ("__builtin_alloca_with_align", ftype,
9751 			BUILT_IN_ALLOCA_WITH_ALIGN,
9752 			"__builtin_alloca_with_align",
9753 			alloca_flags);
9754 
9755   ftype = build_function_type_list (ptr_type_node, size_type_node,
9756 				    size_type_node, size_type_node, NULL_TREE);
9757   local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
9758 			BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
9759 			"__builtin_alloca_with_align_and_max",
9760 			alloca_flags);
9761 
9762   ftype = build_function_type_list (void_type_node,
9763 				    ptr_type_node, ptr_type_node,
9764 				    ptr_type_node, NULL_TREE);
9765   local_define_builtin ("__builtin_init_trampoline", ftype,
9766 			BUILT_IN_INIT_TRAMPOLINE,
9767 			"__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9768   local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9769 			BUILT_IN_INIT_HEAP_TRAMPOLINE,
9770 			"__builtin_init_heap_trampoline",
9771 			ECF_NOTHROW | ECF_LEAF);
9772   local_define_builtin ("__builtin_init_descriptor", ftype,
9773 			BUILT_IN_INIT_DESCRIPTOR,
9774 			"__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
9775 
9776   ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9777   local_define_builtin ("__builtin_adjust_trampoline", ftype,
9778 			BUILT_IN_ADJUST_TRAMPOLINE,
9779 			"__builtin_adjust_trampoline",
9780 			ECF_CONST | ECF_NOTHROW);
9781   local_define_builtin ("__builtin_adjust_descriptor", ftype,
9782 			BUILT_IN_ADJUST_DESCRIPTOR,
9783 			"__builtin_adjust_descriptor",
9784 			ECF_CONST | ECF_NOTHROW);
9785 
9786   ftype = build_function_type_list (void_type_node,
9787 				    ptr_type_node, ptr_type_node, NULL_TREE);
9788   if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE))
9789     local_define_builtin ("__builtin___clear_cache", ftype,
9790 			  BUILT_IN_CLEAR_CACHE,
9791 			  "__clear_cache",
9792 			  ECF_NOTHROW);
9793 
9794   local_define_builtin ("__builtin_nonlocal_goto", ftype,
9795 			BUILT_IN_NONLOCAL_GOTO,
9796 			"__builtin_nonlocal_goto",
9797 			ECF_NORETURN | ECF_NOTHROW);
9798 
9799   ftype = build_function_type_list (void_type_node,
9800 				    ptr_type_node, ptr_type_node, NULL_TREE);
9801   local_define_builtin ("__builtin_setjmp_setup", ftype,
9802 			BUILT_IN_SETJMP_SETUP,
9803 			"__builtin_setjmp_setup", ECF_NOTHROW);
9804 
9805   ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9806   local_define_builtin ("__builtin_setjmp_receiver", ftype,
9807 			BUILT_IN_SETJMP_RECEIVER,
9808 			"__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9809 
9810   ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9811   local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9812 			"__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9813 
9814   ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9815   local_define_builtin ("__builtin_stack_restore", ftype,
9816 			BUILT_IN_STACK_RESTORE,
9817 			"__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9818 
9819   ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9820 				    const_ptr_type_node, size_type_node,
9821 				    NULL_TREE);
9822   local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
9823 			"__builtin_memcmp_eq",
9824 			ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9825 
9826   local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
9827 			"__builtin_strncmp_eq",
9828 			ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9829 
9830   local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
9831 			"__builtin_strcmp_eq",
9832 			ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9833 
9834   /* If there's a possibility that we might use the ARM EABI, build the
9835     alternate __cxa_end_cleanup node used to resume from C++.  */
9836   if (targetm.arm_eabi_unwinder)
9837     {
9838       ftype = build_function_type_list (void_type_node, NULL_TREE);
9839       local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9840 			    BUILT_IN_CXA_END_CLEANUP,
9841 			    "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9842     }
9843 
9844   ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9845   local_define_builtin ("__builtin_unwind_resume", ftype,
9846 			BUILT_IN_UNWIND_RESUME,
9847 			((targetm_common.except_unwind_info (&global_options)
9848 			  == UI_SJLJ)
9849 			 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9850 			ECF_NORETURN);
9851 
9852   if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
9853     {
9854       ftype = build_function_type_list (ptr_type_node, integer_type_node,
9855 					NULL_TREE);
9856       local_define_builtin ("__builtin_return_address", ftype,
9857 			    BUILT_IN_RETURN_ADDRESS,
9858 			    "__builtin_return_address",
9859 			    ECF_NOTHROW);
9860     }
9861 
9862   if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
9863       || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9864     {
9865       ftype = build_function_type_list (void_type_node, ptr_type_node,
9866 					ptr_type_node, NULL_TREE);
9867       if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
9868 	local_define_builtin ("__cyg_profile_func_enter", ftype,
9869 			      BUILT_IN_PROFILE_FUNC_ENTER,
9870 			      "__cyg_profile_func_enter", 0);
9871       if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9872 	local_define_builtin ("__cyg_profile_func_exit", ftype,
9873 			      BUILT_IN_PROFILE_FUNC_EXIT,
9874 			      "__cyg_profile_func_exit", 0);
9875     }
9876 
9877   /* The exception object and filter values from the runtime.  The argument
9878      must be zero before exception lowering, i.e. from the front end.  After
9879      exception lowering, it will be the region number for the exception
9880      landing pad.  These functions are PURE instead of CONST to prevent
9881      them from being hoisted past the exception edge that will initialize
9882      its value in the landing pad.  */
9883   ftype = build_function_type_list (ptr_type_node,
9884 				    integer_type_node, NULL_TREE);
9885   ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
9886   /* Only use TM_PURE if we have TM language support.  */
9887   if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
9888     ecf_flags |= ECF_TM_PURE;
9889   local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
9890 			"__builtin_eh_pointer", ecf_flags);
9891 
9892   tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
9893   ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
9894   local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
9895 			"__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9896 
9897   ftype = build_function_type_list (void_type_node,
9898 				    integer_type_node, integer_type_node,
9899 				    NULL_TREE);
9900   local_define_builtin ("__builtin_eh_copy_values", ftype,
9901 			BUILT_IN_EH_COPY_VALUES,
9902 			"__builtin_eh_copy_values", ECF_NOTHROW);
9903 
9904   /* Complex multiplication and division.  These are handled as builtins
9905      rather than optabs because emit_library_call_value doesn't support
9906      complex.  Further, we can do slightly better with folding these
9907      beasties if the real and complex parts of the arguments are separate.  */
9908   {
9909     int mode;
9910 
9911     for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
9912       {
9913 	char mode_name_buf[4], *q;
9914 	const char *p;
9915 	enum built_in_function mcode, dcode;
9916 	tree type, inner_type;
9917 	const char *prefix = "__";
9918 
9919 	if (targetm.libfunc_gnu_prefix)
9920 	  prefix = "__gnu_";
9921 
9922 	type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
9923 	if (type == NULL)
9924 	  continue;
9925 	inner_type = TREE_TYPE (type);
9926 
9927 	ftype = build_function_type_list (type, inner_type, inner_type,
9928 					  inner_type, inner_type, NULL_TREE);
9929 
9930         mcode = ((enum built_in_function)
9931 		 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9932         dcode = ((enum built_in_function)
9933 		 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9934 
9935         for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
9936 	  *q = TOLOWER (*p);
9937 	*q = '\0';
9938 
9939 	/* For -ftrapping-math these should throw from a former
9940 	   -fnon-call-exception stmt.  */
9941 	built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
9942 					NULL);
9943         local_define_builtin (built_in_names[mcode], ftype, mcode,
9944 			      built_in_names[mcode],
9945 			      ECF_CONST | ECF_LEAF);
9946 
9947 	built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
9948 					NULL);
9949         local_define_builtin (built_in_names[dcode], ftype, dcode,
9950 			      built_in_names[dcode],
9951 			      ECF_CONST | ECF_LEAF);
9952       }
9953   }
9954 
9955   init_internal_fns ();
9956 }
9957 
9958 /* HACK.  GROSS.  This is absolutely disgusting.  I wish there was a
9959    better way.
9960 
9961    If we requested a pointer to a vector, build up the pointers that
9962    we stripped off while looking for the inner type.  Similarly for
9963    return values from functions.
9964 
9965    The argument TYPE is the top of the chain, and BOTTOM is the
9966    new type which we will point to.  */
9967 
9968 tree
reconstruct_complex_type(tree type,tree bottom)9969 reconstruct_complex_type (tree type, tree bottom)
9970 {
9971   tree inner, outer;
9972 
9973   if (TREE_CODE (type) == POINTER_TYPE)
9974     {
9975       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9976       outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
9977 					   TYPE_REF_CAN_ALIAS_ALL (type));
9978     }
9979   else if (TREE_CODE (type) == REFERENCE_TYPE)
9980     {
9981       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9982       outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
9983 					     TYPE_REF_CAN_ALIAS_ALL (type));
9984     }
9985   else if (TREE_CODE (type) == ARRAY_TYPE)
9986     {
9987       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9988       outer = build_array_type (inner, TYPE_DOMAIN (type));
9989     }
9990   else if (TREE_CODE (type) == FUNCTION_TYPE)
9991     {
9992       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9993       outer = build_function_type (inner, TYPE_ARG_TYPES (type));
9994     }
9995   else if (TREE_CODE (type) == METHOD_TYPE)
9996     {
9997       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9998       /* The build_method_type_directly() routine prepends 'this' to argument list,
9999          so we must compensate by getting rid of it.  */
10000       outer
10001 	= build_method_type_directly
10002 	    (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10003 	     inner,
10004 	     TREE_CHAIN (TYPE_ARG_TYPES (type)));
10005     }
10006   else if (TREE_CODE (type) == OFFSET_TYPE)
10007     {
10008       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10009       outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10010     }
10011   else
10012     return bottom;
10013 
10014   return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10015 					    TYPE_QUALS (type));
10016 }
10017 
10018 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10019    the inner type.  */
10020 tree
build_vector_type_for_mode(tree innertype,machine_mode mode)10021 build_vector_type_for_mode (tree innertype, machine_mode mode)
10022 {
10023   poly_int64 nunits;
10024   unsigned int bitsize;
10025 
10026   switch (GET_MODE_CLASS (mode))
10027     {
10028     case MODE_VECTOR_BOOL:
10029     case MODE_VECTOR_INT:
10030     case MODE_VECTOR_FLOAT:
10031     case MODE_VECTOR_FRACT:
10032     case MODE_VECTOR_UFRACT:
10033     case MODE_VECTOR_ACCUM:
10034     case MODE_VECTOR_UACCUM:
10035       nunits = GET_MODE_NUNITS (mode);
10036       break;
10037 
10038     case MODE_INT:
10039       /* Check that there are no leftover bits.  */
10040       bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10041       gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10042       nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10043       break;
10044 
10045     default:
10046       gcc_unreachable ();
10047     }
10048 
10049   return make_vector_type (innertype, nunits, mode);
10050 }
10051 
10052 /* Similarly, but takes the inner type and number of units, which must be
10053    a power of two.  */
10054 
10055 tree
build_vector_type(tree innertype,poly_int64 nunits)10056 build_vector_type (tree innertype, poly_int64 nunits)
10057 {
10058   return make_vector_type (innertype, nunits, VOIDmode);
10059 }
10060 
10061 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE.  */
10062 
10063 tree
build_truth_vector_type_for_mode(poly_uint64 nunits,machine_mode mask_mode)10064 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
10065 {
10066   gcc_assert (mask_mode != BLKmode);
10067 
10068   unsigned HOST_WIDE_INT esize;
10069   if (VECTOR_MODE_P (mask_mode))
10070     {
10071       poly_uint64 vsize = GET_MODE_BITSIZE (mask_mode);
10072       esize = vector_element_size (vsize, nunits);
10073     }
10074   else
10075     esize = 1;
10076 
10077   tree bool_type = build_nonstandard_boolean_type (esize);
10078 
10079   return make_vector_type (bool_type, nunits, mask_mode);
10080 }
10081 
10082 /* Build a vector type that holds one boolean result for each element of
10083    vector type VECTYPE.  The public interface for this operation is
10084    truth_type_for.  */
10085 
10086 static tree
build_truth_vector_type_for(tree vectype)10087 build_truth_vector_type_for (tree vectype)
10088 {
10089   machine_mode vector_mode = TYPE_MODE (vectype);
10090   poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
10091 
10092   machine_mode mask_mode;
10093   if (VECTOR_MODE_P (vector_mode)
10094       && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
10095     return build_truth_vector_type_for_mode (nunits, mask_mode);
10096 
10097   poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
10098   unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10099   tree bool_type = build_nonstandard_boolean_type (esize);
10100 
10101   return make_vector_type (bool_type, nunits, VOIDmode);
10102 }
10103 
10104 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
10105    set.  */
10106 
10107 tree
build_opaque_vector_type(tree innertype,poly_int64 nunits)10108 build_opaque_vector_type (tree innertype, poly_int64 nunits)
10109 {
10110   tree t = make_vector_type (innertype, nunits, VOIDmode);
10111   tree cand;
10112   /* We always build the non-opaque variant before the opaque one,
10113      so if it already exists, it is TYPE_NEXT_VARIANT of this one.  */
10114   cand = TYPE_NEXT_VARIANT (t);
10115   if (cand
10116       && TYPE_VECTOR_OPAQUE (cand)
10117       && check_qualified_type (cand, t, TYPE_QUALS (t)))
10118     return cand;
10119   /* Othewise build a variant type and make sure to queue it after
10120      the non-opaque type.  */
10121   cand = build_distinct_type_copy (t);
10122   TYPE_VECTOR_OPAQUE (cand) = true;
10123   TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10124   TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10125   TYPE_NEXT_VARIANT (t) = cand;
10126   TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10127   /* Type variants have no alias set defined.  */
10128   TYPE_ALIAS_SET (cand) = -1;
10129   return cand;
10130 }
10131 
10132 /* Return the value of element I of VECTOR_CST T as a wide_int.  */
10133 
10134 static poly_wide_int
vector_cst_int_elt(const_tree t,unsigned int i)10135 vector_cst_int_elt (const_tree t, unsigned int i)
10136 {
10137   /* First handle elements that are directly encoded.  */
10138   unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10139   if (i < encoded_nelts)
10140     return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
10141 
10142   /* Identify the pattern that contains element I and work out the index of
10143      the last encoded element for that pattern.  */
10144   unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10145   unsigned int pattern = i % npatterns;
10146   unsigned int count = i / npatterns;
10147   unsigned int final_i = encoded_nelts - npatterns + pattern;
10148 
10149   /* If there are no steps, the final encoded value is the right one.  */
10150   if (!VECTOR_CST_STEPPED_P (t))
10151     return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
10152 
10153   /* Otherwise work out the value from the last two encoded elements.  */
10154   tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
10155   tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
10156   poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
10157   return wi::to_poly_wide (v2) + (count - 2) * diff;
10158 }
10159 
10160 /* Return the value of element I of VECTOR_CST T.  */
10161 
10162 tree
vector_cst_elt(const_tree t,unsigned int i)10163 vector_cst_elt (const_tree t, unsigned int i)
10164 {
10165   /* First handle elements that are directly encoded.  */
10166   unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10167   if (i < encoded_nelts)
10168     return VECTOR_CST_ENCODED_ELT (t, i);
10169 
10170   /* If there are no steps, the final encoded value is the right one.  */
10171   if (!VECTOR_CST_STEPPED_P (t))
10172     {
10173       /* Identify the pattern that contains element I and work out the index of
10174 	 the last encoded element for that pattern.  */
10175       unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10176       unsigned int pattern = i % npatterns;
10177       unsigned int final_i = encoded_nelts - npatterns + pattern;
10178       return VECTOR_CST_ENCODED_ELT (t, final_i);
10179     }
10180 
10181   /* Otherwise work out the value from the last two encoded elements.  */
10182   return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10183 			   vector_cst_int_elt (t, i));
10184 }
10185 
10186 /* Given an initializer INIT, return TRUE if INIT is zero or some
10187    aggregate of zeros.  Otherwise return FALSE.  If NONZERO is not
10188    null, set *NONZERO if and only if INIT is known not to be all
10189    zeros.  The combination of return value of false and *NONZERO
10190    false implies that INIT may but need not be all zeros.  Other
10191    combinations indicate definitive answers.  */
10192 
10193 bool
initializer_zerop(const_tree init,bool * nonzero)10194 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
10195 {
10196   bool dummy;
10197   if (!nonzero)
10198     nonzero = &dummy;
10199 
10200   /* Conservatively clear NONZERO and set it only if INIT is definitely
10201      not all zero.  */
10202   *nonzero = false;
10203 
10204   STRIP_NOPS (init);
10205 
10206   unsigned HOST_WIDE_INT off = 0;
10207 
10208   switch (TREE_CODE (init))
10209     {
10210     case INTEGER_CST:
10211       if (integer_zerop (init))
10212 	return true;
10213 
10214       *nonzero = true;
10215       return false;
10216 
10217     case REAL_CST:
10218       /* ??? Note that this is not correct for C4X float formats.  There,
10219 	 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10220 	 negative exponent.  */
10221       if (real_zerop (init)
10222 	  && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
10223 	return true;
10224 
10225       *nonzero = true;
10226       return false;
10227 
10228     case FIXED_CST:
10229       if (fixed_zerop (init))
10230 	return true;
10231 
10232       *nonzero = true;
10233       return false;
10234 
10235     case COMPLEX_CST:
10236       if (integer_zerop (init)
10237 	  || (real_zerop (init)
10238 	      && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10239 	      && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
10240 	return true;
10241 
10242       *nonzero = true;
10243       return false;
10244 
10245     case VECTOR_CST:
10246       if (VECTOR_CST_NPATTERNS (init) == 1
10247 	  && VECTOR_CST_DUPLICATE_P (init)
10248 	  && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
10249 	return true;
10250 
10251       *nonzero = true;
10252       return false;
10253 
10254     case CONSTRUCTOR:
10255       {
10256 	if (TREE_CLOBBER_P (init))
10257 	  return false;
10258 
10259 	unsigned HOST_WIDE_INT idx;
10260 	tree elt;
10261 
10262 	FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10263 	  if (!initializer_zerop (elt, nonzero))
10264 	    return false;
10265 
10266 	return true;
10267       }
10268 
10269     case MEM_REF:
10270       {
10271 	tree arg = TREE_OPERAND (init, 0);
10272 	if (TREE_CODE (arg) != ADDR_EXPR)
10273 	  return false;
10274 	tree offset = TREE_OPERAND (init, 1);
10275 	if (TREE_CODE (offset) != INTEGER_CST
10276 	    || !tree_fits_uhwi_p (offset))
10277 	  return false;
10278 	off = tree_to_uhwi (offset);
10279 	if (INT_MAX < off)
10280 	  return false;
10281 	arg = TREE_OPERAND (arg, 0);
10282 	if (TREE_CODE (arg) != STRING_CST)
10283 	  return false;
10284 	init = arg;
10285       }
10286       /* Fall through.  */
10287 
10288     case STRING_CST:
10289       {
10290 	gcc_assert (off <= INT_MAX);
10291 
10292 	int i = off;
10293 	int n = TREE_STRING_LENGTH (init);
10294 	if (n <= i)
10295 	  return false;
10296 
10297 	/* We need to loop through all elements to handle cases like
10298 	   "\0" and "\0foobar".  */
10299 	for (i = 0; i < n; ++i)
10300 	  if (TREE_STRING_POINTER (init)[i] != '\0')
10301 	    {
10302 	      *nonzero = true;
10303 	      return false;
10304 	    }
10305 
10306 	return true;
10307       }
10308 
10309     default:
10310       return false;
10311     }
10312 }
10313 
10314 /* Return true if EXPR is an initializer expression in which every element
10315    is a constant that is numerically equal to 0 or 1.  The elements do not
10316    need to be equal to each other.  */
10317 
10318 bool
initializer_each_zero_or_onep(const_tree expr)10319 initializer_each_zero_or_onep (const_tree expr)
10320 {
10321   STRIP_ANY_LOCATION_WRAPPER (expr);
10322 
10323   switch (TREE_CODE (expr))
10324     {
10325     case INTEGER_CST:
10326       return integer_zerop (expr) || integer_onep (expr);
10327 
10328     case REAL_CST:
10329       return real_zerop (expr) || real_onep (expr);
10330 
10331     case VECTOR_CST:
10332       {
10333 	unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
10334 	if (VECTOR_CST_STEPPED_P (expr)
10335 	    && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
10336 	  return false;
10337 
10338 	for (unsigned int i = 0; i < nelts; ++i)
10339 	  {
10340 	    tree elt = vector_cst_elt (expr, i);
10341 	    if (!initializer_each_zero_or_onep (elt))
10342 	      return false;
10343 	  }
10344 
10345 	return true;
10346       }
10347 
10348     default:
10349       return false;
10350     }
10351 }
10352 
10353 /* Check if vector VEC consists of all the equal elements and
10354    that the number of elements corresponds to the type of VEC.
10355    The function returns first element of the vector
10356    or NULL_TREE if the vector is not uniform.  */
10357 tree
uniform_vector_p(const_tree vec)10358 uniform_vector_p (const_tree vec)
10359 {
10360   tree first, t;
10361   unsigned HOST_WIDE_INT i, nelts;
10362 
10363   if (vec == NULL_TREE)
10364     return NULL_TREE;
10365 
10366   gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10367 
10368   if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
10369     return TREE_OPERAND (vec, 0);
10370 
10371   else if (TREE_CODE (vec) == VECTOR_CST)
10372     {
10373       if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10374 	return VECTOR_CST_ENCODED_ELT (vec, 0);
10375       return NULL_TREE;
10376     }
10377 
10378   else if (TREE_CODE (vec) == CONSTRUCTOR
10379 	   && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
10380     {
10381       first = error_mark_node;
10382 
10383       FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10384         {
10385           if (i == 0)
10386             {
10387               first = t;
10388               continue;
10389             }
10390 	  if (!operand_equal_p (first, t, 0))
10391 	    return NULL_TREE;
10392         }
10393       if (i != nelts)
10394 	return NULL_TREE;
10395 
10396       if (TREE_CODE (first) == CONSTRUCTOR || TREE_CODE (first) == VECTOR_CST)
10397 	return uniform_vector_p (first);
10398       return first;
10399     }
10400 
10401   return NULL_TREE;
10402 }
10403 
10404 /* If the argument is INTEGER_CST, return it.  If the argument is vector
10405    with all elements the same INTEGER_CST, return that INTEGER_CST.  Otherwise
10406    return NULL_TREE.
10407    Look through location wrappers. */
10408 
10409 tree
uniform_integer_cst_p(tree t)10410 uniform_integer_cst_p (tree t)
10411 {
10412   STRIP_ANY_LOCATION_WRAPPER (t);
10413 
10414   if (TREE_CODE (t) == INTEGER_CST)
10415     return t;
10416 
10417   if (VECTOR_TYPE_P (TREE_TYPE (t)))
10418     {
10419       t = uniform_vector_p (t);
10420       if (t && TREE_CODE (t) == INTEGER_CST)
10421 	return t;
10422     }
10423 
10424   return NULL_TREE;
10425 }
10426 
10427 /* Checks to see if T is a constant or a constant vector and if each element E
10428    adheres to ~E + 1 == pow2 then return ~E otherwise NULL_TREE.  */
10429 
10430 tree
bitmask_inv_cst_vector_p(tree t)10431 bitmask_inv_cst_vector_p (tree t)
10432 {
10433 
10434   tree_code code = TREE_CODE (t);
10435   tree type = TREE_TYPE (t);
10436 
10437   if (!INTEGRAL_TYPE_P (type)
10438       && !VECTOR_INTEGER_TYPE_P (type))
10439     return NULL_TREE;
10440 
10441   unsigned HOST_WIDE_INT nelts = 1;
10442   tree cst;
10443   unsigned int idx = 0;
10444   bool uniform = uniform_integer_cst_p (t);
10445   tree newtype = unsigned_type_for (type);
10446   tree_vector_builder builder;
10447   if (code == INTEGER_CST)
10448     cst = t;
10449   else
10450     {
10451       if (!VECTOR_CST_NELTS (t).is_constant (&nelts))
10452 	return NULL_TREE;
10453 
10454       cst = vector_cst_elt (t, 0);
10455       builder.new_vector (newtype, nelts, 1);
10456     }
10457 
10458   tree ty = unsigned_type_for (TREE_TYPE (cst));
10459 
10460   do
10461     {
10462       if (idx > 0)
10463 	cst = vector_cst_elt (t, idx);
10464       wide_int icst = wi::to_wide (cst);
10465       wide_int inv =  wi::bit_not (icst);
10466       icst = wi::add (1, inv);
10467       if (wi::popcount (icst) != 1)
10468 	return NULL_TREE;
10469 
10470       tree newcst = wide_int_to_tree (ty, inv);
10471 
10472       if (uniform)
10473 	return build_uniform_cst (newtype, newcst);
10474 
10475       builder.quick_push (newcst);
10476     }
10477   while (++idx < nelts);
10478 
10479   return builder.build ();
10480 }
10481 
10482 /* If VECTOR_CST T has a single nonzero element, return the index of that
10483    element, otherwise return -1.  */
10484 
10485 int
single_nonzero_element(const_tree t)10486 single_nonzero_element (const_tree t)
10487 {
10488   unsigned HOST_WIDE_INT nelts;
10489   unsigned int repeat_nelts;
10490   if (VECTOR_CST_NELTS (t).is_constant (&nelts))
10491     repeat_nelts = nelts;
10492   else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
10493     {
10494       nelts = vector_cst_encoded_nelts (t);
10495       repeat_nelts = VECTOR_CST_NPATTERNS (t);
10496     }
10497   else
10498     return -1;
10499 
10500   int res = -1;
10501   for (unsigned int i = 0; i < nelts; ++i)
10502     {
10503       tree elt = vector_cst_elt (t, i);
10504       if (!integer_zerop (elt) && !real_zerop (elt))
10505 	{
10506 	  if (res >= 0 || i >= repeat_nelts)
10507 	    return -1;
10508 	  res = i;
10509 	}
10510     }
10511   return res;
10512 }
10513 
10514 /* Build an empty statement at location LOC.  */
10515 
10516 tree
build_empty_stmt(location_t loc)10517 build_empty_stmt (location_t loc)
10518 {
10519   tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10520   SET_EXPR_LOCATION (t, loc);
10521   return t;
10522 }
10523 
10524 
10525 /* Build an OMP clause with code CODE.  LOC is the location of the
10526    clause.  */
10527 
10528 tree
build_omp_clause(location_t loc,enum omp_clause_code code)10529 build_omp_clause (location_t loc, enum omp_clause_code code)
10530 {
10531   tree t;
10532   int size, length;
10533 
10534   length = omp_clause_num_ops[code];
10535   size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10536 
10537   record_node_allocation_statistics (OMP_CLAUSE, size);
10538 
10539   t = (tree) ggc_internal_alloc (size);
10540   memset (t, 0, size);
10541   TREE_SET_CODE (t, OMP_CLAUSE);
10542   OMP_CLAUSE_SET_CODE (t, code);
10543   OMP_CLAUSE_LOCATION (t) = loc;
10544 
10545   return t;
10546 }
10547 
10548 /* Build a tcc_vl_exp object with code CODE and room for LEN operands.  LEN
10549    includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10550    Except for the CODE and operand count field, other storage for the
10551    object is initialized to zeros.  */
10552 
10553 tree
build_vl_exp(enum tree_code code,int len MEM_STAT_DECL)10554 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10555 {
10556   tree t;
10557   int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10558 
10559   gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10560   gcc_assert (len >= 1);
10561 
10562   record_node_allocation_statistics (code, length);
10563 
10564   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10565 
10566   TREE_SET_CODE (t, code);
10567 
10568   /* Can't use TREE_OPERAND to store the length because if checking is
10569      enabled, it will try to check the length before we store it.  :-P  */
10570   t->exp.operands[0] = build_int_cst (sizetype, len);
10571 
10572   return t;
10573 }
10574 
10575 /* Helper function for build_call_* functions; build a CALL_EXPR with
10576    indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10577    the argument slots.  */
10578 
10579 static tree
build_call_1(tree return_type,tree fn,int nargs)10580 build_call_1 (tree return_type, tree fn, int nargs)
10581 {
10582   tree t;
10583 
10584   t = build_vl_exp (CALL_EXPR, nargs + 3);
10585   TREE_TYPE (t) = return_type;
10586   CALL_EXPR_FN (t) = fn;
10587   CALL_EXPR_STATIC_CHAIN (t) = NULL;
10588 
10589   return t;
10590 }
10591 
10592 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10593    FN and a null static chain slot.  NARGS is the number of call arguments
10594    which are specified as "..." arguments.  */
10595 
10596 tree
build_call_nary(tree return_type,tree fn,int nargs,...)10597 build_call_nary (tree return_type, tree fn, int nargs, ...)
10598 {
10599   tree ret;
10600   va_list args;
10601   va_start (args, nargs);
10602   ret = build_call_valist (return_type, fn, nargs, args);
10603   va_end (args);
10604   return ret;
10605 }
10606 
10607 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10608    FN and a null static chain slot.  NARGS is the number of call arguments
10609    which are specified as a va_list ARGS.  */
10610 
10611 tree
build_call_valist(tree return_type,tree fn,int nargs,va_list args)10612 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10613 {
10614   tree t;
10615   int i;
10616 
10617   t = build_call_1 (return_type, fn, nargs);
10618   for (i = 0; i < nargs; i++)
10619     CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10620   process_call_operands (t);
10621   return t;
10622 }
10623 
10624 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10625    FN and a null static chain slot.  NARGS is the number of call arguments
10626    which are specified as a tree array ARGS.  */
10627 
10628 tree
build_call_array_loc(location_t loc,tree return_type,tree fn,int nargs,const tree * args)10629 build_call_array_loc (location_t loc, tree return_type, tree fn,
10630 		      int nargs, const tree *args)
10631 {
10632   tree t;
10633   int i;
10634 
10635   t = build_call_1 (return_type, fn, nargs);
10636   for (i = 0; i < nargs; i++)
10637     CALL_EXPR_ARG (t, i) = args[i];
10638   process_call_operands (t);
10639   SET_EXPR_LOCATION (t, loc);
10640   return t;
10641 }
10642 
10643 /* Like build_call_array, but takes a vec.  */
10644 
10645 tree
build_call_vec(tree return_type,tree fn,const vec<tree,va_gc> * args)10646 build_call_vec (tree return_type, tree fn, const vec<tree, va_gc> *args)
10647 {
10648   tree ret, t;
10649   unsigned int ix;
10650 
10651   ret = build_call_1 (return_type, fn, vec_safe_length (args));
10652   FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10653     CALL_EXPR_ARG (ret, ix) = t;
10654   process_call_operands (ret);
10655   return ret;
10656 }
10657 
10658 /* Conveniently construct a function call expression.  FNDECL names the
10659    function to be called and N arguments are passed in the array
10660    ARGARRAY.  */
10661 
10662 tree
build_call_expr_loc_array(location_t loc,tree fndecl,int n,tree * argarray)10663 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10664 {
10665   tree fntype = TREE_TYPE (fndecl);
10666   tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10667 
10668   return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10669 }
10670 
10671 /* Conveniently construct a function call expression.  FNDECL names the
10672    function to be called and the arguments are passed in the vector
10673    VEC.  */
10674 
10675 tree
build_call_expr_loc_vec(location_t loc,tree fndecl,vec<tree,va_gc> * vec)10676 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10677 {
10678   return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10679 				    vec_safe_address (vec));
10680 }
10681 
10682 
10683 /* Conveniently construct a function call expression.  FNDECL names the
10684    function to be called, N is the number of arguments, and the "..."
10685    parameters are the argument expressions.  */
10686 
10687 tree
build_call_expr_loc(location_t loc,tree fndecl,int n,...)10688 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10689 {
10690   va_list ap;
10691   tree *argarray = XALLOCAVEC (tree, n);
10692   int i;
10693 
10694   va_start (ap, n);
10695   for (i = 0; i < n; i++)
10696     argarray[i] = va_arg (ap, tree);
10697   va_end (ap);
10698   return build_call_expr_loc_array (loc, fndecl, n, argarray);
10699 }
10700 
10701 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...).  Duplicated because
10702    varargs macros aren't supported by all bootstrap compilers.  */
10703 
10704 tree
build_call_expr(tree fndecl,int n,...)10705 build_call_expr (tree fndecl, int n, ...)
10706 {
10707   va_list ap;
10708   tree *argarray = XALLOCAVEC (tree, n);
10709   int i;
10710 
10711   va_start (ap, n);
10712   for (i = 0; i < n; i++)
10713     argarray[i] = va_arg (ap, tree);
10714   va_end (ap);
10715   return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10716 }
10717 
10718 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10719    type TYPE.  This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10720    It will get gimplified later into an ordinary internal function.  */
10721 
10722 tree
build_call_expr_internal_loc_array(location_t loc,internal_fn ifn,tree type,int n,const tree * args)10723 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10724 				    tree type, int n, const tree *args)
10725 {
10726   tree t = build_call_1 (type, NULL_TREE, n);
10727   for (int i = 0; i < n; ++i)
10728     CALL_EXPR_ARG (t, i) = args[i];
10729   SET_EXPR_LOCATION (t, loc);
10730   CALL_EXPR_IFN (t) = ifn;
10731   process_call_operands (t);
10732   return t;
10733 }
10734 
10735 /* Build internal call expression.  This is just like CALL_EXPR, except
10736    its CALL_EXPR_FN is NULL.  It will get gimplified later into ordinary
10737    internal function.  */
10738 
10739 tree
build_call_expr_internal_loc(location_t loc,enum internal_fn ifn,tree type,int n,...)10740 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10741 			      tree type, int n, ...)
10742 {
10743   va_list ap;
10744   tree *argarray = XALLOCAVEC (tree, n);
10745   int i;
10746 
10747   va_start (ap, n);
10748   for (i = 0; i < n; i++)
10749     argarray[i] = va_arg (ap, tree);
10750   va_end (ap);
10751   return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10752 }
10753 
10754 /* Return a function call to FN, if the target is guaranteed to support it,
10755    or null otherwise.
10756 
10757    N is the number of arguments, passed in the "...", and TYPE is the
10758    type of the return value.  */
10759 
10760 tree
maybe_build_call_expr_loc(location_t loc,combined_fn fn,tree type,int n,...)10761 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10762 			   int n, ...)
10763 {
10764   va_list ap;
10765   tree *argarray = XALLOCAVEC (tree, n);
10766   int i;
10767 
10768   va_start (ap, n);
10769   for (i = 0; i < n; i++)
10770     argarray[i] = va_arg (ap, tree);
10771   va_end (ap);
10772   if (internal_fn_p (fn))
10773     {
10774       internal_fn ifn = as_internal_fn (fn);
10775       if (direct_internal_fn_p (ifn))
10776 	{
10777 	  tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10778 	  if (!direct_internal_fn_supported_p (ifn, types,
10779 					       OPTIMIZE_FOR_BOTH))
10780 	    return NULL_TREE;
10781 	}
10782       return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10783     }
10784   else
10785     {
10786       tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10787       if (!fndecl)
10788 	return NULL_TREE;
10789       return build_call_expr_loc_array (loc, fndecl, n, argarray);
10790     }
10791 }
10792 
10793 /* Return a function call to the appropriate builtin alloca variant.
10794 
10795    SIZE is the size to be allocated.  ALIGN, if non-zero, is the requested
10796    alignment of the allocated area.  MAX_SIZE, if non-negative, is an upper
10797    bound for SIZE in case it is not a fixed value.  */
10798 
10799 tree
build_alloca_call_expr(tree size,unsigned int align,HOST_WIDE_INT max_size)10800 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10801 {
10802   if (max_size >= 0)
10803     {
10804       tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10805       return
10806 	build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10807     }
10808   else if (align > 0)
10809     {
10810       tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10811       return build_call_expr (t, 2, size, size_int (align));
10812     }
10813   else
10814     {
10815       tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10816       return build_call_expr (t, 1, size);
10817     }
10818 }
10819 
10820 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10821    if SIZE == -1) and return a tree node representing char* pointer to
10822    it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)).  When STR is nonnull
10823    the STRING_CST value is the LEN bytes at STR (the representation
10824    of the string, which may be wide).  Otherwise it's all zeros.  */
10825 
10826 tree
build_string_literal(unsigned len,const char * str,tree eltype,unsigned HOST_WIDE_INT size)10827 build_string_literal (unsigned len, const char *str /* = NULL */,
10828 		      tree eltype /* = char_type_node */,
10829 		      unsigned HOST_WIDE_INT size /* = -1 */)
10830 {
10831   tree t = build_string (len, str);
10832   /* Set the maximum valid index based on the string length or SIZE.  */
10833   unsigned HOST_WIDE_INT maxidx
10834     = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
10835 
10836   tree index = build_index_type (size_int (maxidx));
10837   eltype = build_type_variant (eltype, 1, 0);
10838   tree type = build_array_type (eltype, index);
10839   TREE_TYPE (t) = type;
10840   TREE_CONSTANT (t) = 1;
10841   TREE_READONLY (t) = 1;
10842   TREE_STATIC (t) = 1;
10843 
10844   type = build_pointer_type (eltype);
10845   t = build1 (ADDR_EXPR, type,
10846 	      build4 (ARRAY_REF, eltype,
10847 		      t, integer_zero_node, NULL_TREE, NULL_TREE));
10848   return t;
10849 }
10850 
10851 
10852 
10853 /* Return true if T (assumed to be a DECL) must be assigned a memory
10854    location.  */
10855 
10856 bool
needs_to_live_in_memory(const_tree t)10857 needs_to_live_in_memory (const_tree t)
10858 {
10859   return (TREE_ADDRESSABLE (t)
10860 	  || is_global_var (t)
10861 	  || (TREE_CODE (t) == RESULT_DECL
10862 	      && !DECL_BY_REFERENCE (t)
10863 	      && aggregate_value_p (t, current_function_decl)));
10864 }
10865 
10866 /* Return value of a constant X and sign-extend it.  */
10867 
10868 HOST_WIDE_INT
int_cst_value(const_tree x)10869 int_cst_value (const_tree x)
10870 {
10871   unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10872   unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10873 
10874   /* Make sure the sign-extended value will fit in a HOST_WIDE_INT.  */
10875   gcc_assert (cst_and_fits_in_hwi (x));
10876 
10877   if (bits < HOST_BITS_PER_WIDE_INT)
10878     {
10879       bool negative = ((val >> (bits - 1)) & 1) != 0;
10880       if (negative)
10881 	val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
10882       else
10883 	val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
10884     }
10885 
10886   return val;
10887 }
10888 
10889 /* If TYPE is an integral or pointer type, return an integer type with
10890    the same precision which is unsigned iff UNSIGNEDP is true, or itself
10891    if TYPE is already an integer type of signedness UNSIGNEDP.
10892    If TYPE is a floating-point type, return an integer type with the same
10893    bitsize and with the signedness given by UNSIGNEDP; this is useful
10894    when doing bit-level operations on a floating-point value.  */
10895 
10896 tree
signed_or_unsigned_type_for(int unsignedp,tree type)10897 signed_or_unsigned_type_for (int unsignedp, tree type)
10898 {
10899   if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
10900     return type;
10901 
10902   if (TREE_CODE (type) == VECTOR_TYPE)
10903     {
10904       tree inner = TREE_TYPE (type);
10905       tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10906       if (!inner2)
10907 	return NULL_TREE;
10908       if (inner == inner2)
10909 	return type;
10910       return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10911     }
10912 
10913   if (TREE_CODE (type) == COMPLEX_TYPE)
10914     {
10915       tree inner = TREE_TYPE (type);
10916       tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10917       if (!inner2)
10918 	return NULL_TREE;
10919       if (inner == inner2)
10920 	return type;
10921       return build_complex_type (inner2);
10922     }
10923 
10924   unsigned int bits;
10925   if (INTEGRAL_TYPE_P (type)
10926       || POINTER_TYPE_P (type)
10927       || TREE_CODE (type) == OFFSET_TYPE)
10928     bits = TYPE_PRECISION (type);
10929   else if (TREE_CODE (type) == REAL_TYPE)
10930     bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
10931   else
10932     return NULL_TREE;
10933 
10934   return build_nonstandard_integer_type (bits, unsignedp);
10935 }
10936 
10937 /* If TYPE is an integral or pointer type, return an integer type with
10938    the same precision which is unsigned, or itself if TYPE is already an
10939    unsigned integer type.  If TYPE is a floating-point type, return an
10940    unsigned integer type with the same bitsize as TYPE.  */
10941 
10942 tree
unsigned_type_for(tree type)10943 unsigned_type_for (tree type)
10944 {
10945   return signed_or_unsigned_type_for (1, type);
10946 }
10947 
10948 /* If TYPE is an integral or pointer type, return an integer type with
10949    the same precision which is signed, or itself if TYPE is already a
10950    signed integer type.  If TYPE is a floating-point type, return a
10951    signed integer type with the same bitsize as TYPE.  */
10952 
10953 tree
signed_type_for(tree type)10954 signed_type_for (tree type)
10955 {
10956   return signed_or_unsigned_type_for (0, type);
10957 }
10958 
10959 /* - For VECTOR_TYPEs:
10960     - The truth type must be a VECTOR_BOOLEAN_TYPE.
10961     - The number of elements must match (known_eq).
10962     - targetm.vectorize.get_mask_mode exists, and exactly
10963       the same mode as the truth type.
10964    - Otherwise, the truth type must be a BOOLEAN_TYPE
10965      or useless_type_conversion_p to BOOLEAN_TYPE.  */
10966 bool
is_truth_type_for(tree type,tree truth_type)10967 is_truth_type_for (tree type, tree truth_type)
10968 {
10969   machine_mode mask_mode = TYPE_MODE (truth_type);
10970   machine_mode vmode = TYPE_MODE (type);
10971   machine_mode tmask_mode;
10972 
10973   if (TREE_CODE (type) == VECTOR_TYPE)
10974     {
10975       if (VECTOR_BOOLEAN_TYPE_P (truth_type)
10976 	  && known_eq (TYPE_VECTOR_SUBPARTS (type),
10977 		       TYPE_VECTOR_SUBPARTS (truth_type))
10978 	  && targetm.vectorize.get_mask_mode (vmode).exists (&tmask_mode)
10979 	  && tmask_mode == mask_mode)
10980 	return true;
10981 
10982       return false;
10983     }
10984 
10985   return useless_type_conversion_p (boolean_type_node, truth_type);
10986 }
10987 
10988 /* If TYPE is a vector type, return a signed integer vector type with the
10989    same width and number of subparts. Otherwise return boolean_type_node.  */
10990 
10991 tree
truth_type_for(tree type)10992 truth_type_for (tree type)
10993 {
10994   if (TREE_CODE (type) == VECTOR_TYPE)
10995     {
10996       if (VECTOR_BOOLEAN_TYPE_P (type))
10997 	return type;
10998       return build_truth_vector_type_for (type);
10999     }
11000   else
11001     return boolean_type_node;
11002 }
11003 
11004 /* Returns the largest value obtainable by casting something in INNER type to
11005    OUTER type.  */
11006 
11007 tree
upper_bound_in_type(tree outer,tree inner)11008 upper_bound_in_type (tree outer, tree inner)
11009 {
11010   unsigned int det = 0;
11011   unsigned oprec = TYPE_PRECISION (outer);
11012   unsigned iprec = TYPE_PRECISION (inner);
11013   unsigned prec;
11014 
11015   /* Compute a unique number for every combination.  */
11016   det |= (oprec > iprec) ? 4 : 0;
11017   det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11018   det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11019 
11020   /* Determine the exponent to use.  */
11021   switch (det)
11022     {
11023     case 0:
11024     case 1:
11025       /* oprec <= iprec, outer: signed, inner: don't care.  */
11026       prec = oprec - 1;
11027       break;
11028     case 2:
11029     case 3:
11030       /* oprec <= iprec, outer: unsigned, inner: don't care.  */
11031       prec = oprec;
11032       break;
11033     case 4:
11034       /* oprec > iprec, outer: signed, inner: signed.  */
11035       prec = iprec - 1;
11036       break;
11037     case 5:
11038       /* oprec > iprec, outer: signed, inner: unsigned.  */
11039       prec = iprec;
11040       break;
11041     case 6:
11042       /* oprec > iprec, outer: unsigned, inner: signed.  */
11043       prec = oprec;
11044       break;
11045     case 7:
11046       /* oprec > iprec, outer: unsigned, inner: unsigned.  */
11047       prec = iprec;
11048       break;
11049     default:
11050       gcc_unreachable ();
11051     }
11052 
11053   return wide_int_to_tree (outer,
11054 			   wi::mask (prec, false, TYPE_PRECISION (outer)));
11055 }
11056 
11057 /* Returns the smallest value obtainable by casting something in INNER type to
11058    OUTER type.  */
11059 
11060 tree
lower_bound_in_type(tree outer,tree inner)11061 lower_bound_in_type (tree outer, tree inner)
11062 {
11063   unsigned oprec = TYPE_PRECISION (outer);
11064   unsigned iprec = TYPE_PRECISION (inner);
11065 
11066   /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11067      and obtain 0.  */
11068   if (TYPE_UNSIGNED (outer)
11069       /* If we are widening something of an unsigned type, OUTER type
11070 	 contains all values of INNER type.  In particular, both INNER
11071 	 and OUTER types have zero in common.  */
11072       || (oprec > iprec && TYPE_UNSIGNED (inner)))
11073     return build_int_cst (outer, 0);
11074   else
11075     {
11076       /* If we are widening a signed type to another signed type, we
11077 	 want to obtain -2^^(iprec-1).  If we are keeping the
11078 	 precision or narrowing to a signed type, we want to obtain
11079 	 -2^(oprec-1).  */
11080       unsigned prec = oprec > iprec ? iprec : oprec;
11081       return wide_int_to_tree (outer,
11082 			       wi::mask (prec - 1, true,
11083 					 TYPE_PRECISION (outer)));
11084     }
11085 }
11086 
11087 /* Return nonzero if two operands that are suitable for PHI nodes are
11088    necessarily equal.  Specifically, both ARG0 and ARG1 must be either
11089    SSA_NAME or invariant.  Note that this is strictly an optimization.
11090    That is, callers of this function can directly call operand_equal_p
11091    and get the same result, only slower.  */
11092 
11093 int
operand_equal_for_phi_arg_p(const_tree arg0,const_tree arg1)11094 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11095 {
11096   if (arg0 == arg1)
11097     return 1;
11098   if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11099     return 0;
11100   return operand_equal_p (arg0, arg1, 0);
11101 }
11102 
11103 /* Returns number of zeros at the end of binary representation of X.  */
11104 
11105 tree
num_ending_zeros(const_tree x)11106 num_ending_zeros (const_tree x)
11107 {
11108   return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11109 }
11110 
11111 
11112 #define WALK_SUBTREE(NODE)				\
11113   do							\
11114     {							\
11115       result = walk_tree_1 (&(NODE), func, data, pset, lh);	\
11116       if (result)					\
11117 	return result;					\
11118     }							\
11119   while (0)
11120 
11121 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11122    be walked whenever a type is seen in the tree.  Rest of operands and return
11123    value are as for walk_tree.  */
11124 
11125 static tree
walk_type_fields(tree type,walk_tree_fn func,void * data,hash_set<tree> * pset,walk_tree_lh lh)11126 walk_type_fields (tree type, walk_tree_fn func, void *data,
11127 		  hash_set<tree> *pset, walk_tree_lh lh)
11128 {
11129   tree result = NULL_TREE;
11130 
11131   switch (TREE_CODE (type))
11132     {
11133     case POINTER_TYPE:
11134     case REFERENCE_TYPE:
11135     case VECTOR_TYPE:
11136       /* We have to worry about mutually recursive pointers.  These can't
11137 	 be written in C.  They can in Ada.  It's pathological, but
11138 	 there's an ACATS test (c38102a) that checks it.  Deal with this
11139 	 by checking if we're pointing to another pointer, that one
11140 	 points to another pointer, that one does too, and we have no htab.
11141 	 If so, get a hash table.  We check three levels deep to avoid
11142 	 the cost of the hash table if we don't need one.  */
11143       if (POINTER_TYPE_P (TREE_TYPE (type))
11144 	  && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11145 	  && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11146 	  && !pset)
11147 	{
11148 	  result = walk_tree_without_duplicates (&TREE_TYPE (type),
11149 						 func, data);
11150 	  if (result)
11151 	    return result;
11152 
11153 	  break;
11154 	}
11155 
11156       /* fall through */
11157 
11158     case COMPLEX_TYPE:
11159       WALK_SUBTREE (TREE_TYPE (type));
11160       break;
11161 
11162     case METHOD_TYPE:
11163       WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11164 
11165       /* Fall through.  */
11166 
11167     case FUNCTION_TYPE:
11168       WALK_SUBTREE (TREE_TYPE (type));
11169       {
11170 	tree arg;
11171 
11172 	/* We never want to walk into default arguments.  */
11173 	for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11174 	  WALK_SUBTREE (TREE_VALUE (arg));
11175       }
11176       break;
11177 
11178     case ARRAY_TYPE:
11179       /* Don't follow this nodes's type if a pointer for fear that
11180 	 we'll have infinite recursion.  If we have a PSET, then we
11181 	 need not fear.  */
11182       if (pset
11183 	  || (!POINTER_TYPE_P (TREE_TYPE (type))
11184 	      && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11185 	WALK_SUBTREE (TREE_TYPE (type));
11186       WALK_SUBTREE (TYPE_DOMAIN (type));
11187       break;
11188 
11189     case OFFSET_TYPE:
11190       WALK_SUBTREE (TREE_TYPE (type));
11191       WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11192       break;
11193 
11194     default:
11195       break;
11196     }
11197 
11198   return NULL_TREE;
11199 }
11200 
11201 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal.  FUNC is
11202    called with the DATA and the address of each sub-tree.  If FUNC returns a
11203    non-NULL value, the traversal is stopped, and the value returned by FUNC
11204    is returned.  If PSET is non-NULL it is used to record the nodes visited,
11205    and to avoid visiting a node more than once.  */
11206 
11207 tree
walk_tree_1(tree * tp,walk_tree_fn func,void * data,hash_set<tree> * pset,walk_tree_lh lh)11208 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11209 	     hash_set<tree> *pset, walk_tree_lh lh)
11210 {
11211   enum tree_code code;
11212   int walk_subtrees;
11213   tree result;
11214 
11215 #define WALK_SUBTREE_TAIL(NODE)				\
11216   do							\
11217     {							\
11218        tp = & (NODE);					\
11219        goto tail_recurse;				\
11220     }							\
11221   while (0)
11222 
11223  tail_recurse:
11224   /* Skip empty subtrees.  */
11225   if (!*tp)
11226     return NULL_TREE;
11227 
11228   /* Don't walk the same tree twice, if the user has requested
11229      that we avoid doing so.  */
11230   if (pset && pset->add (*tp))
11231     return NULL_TREE;
11232 
11233   /* Call the function.  */
11234   walk_subtrees = 1;
11235   result = (*func) (tp, &walk_subtrees, data);
11236 
11237   /* If we found something, return it.  */
11238   if (result)
11239     return result;
11240 
11241   code = TREE_CODE (*tp);
11242 
11243   /* Even if we didn't, FUNC may have decided that there was nothing
11244      interesting below this point in the tree.  */
11245   if (!walk_subtrees)
11246     {
11247       /* But we still need to check our siblings.  */
11248       if (code == TREE_LIST)
11249 	WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11250       else if (code == OMP_CLAUSE)
11251 	WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11252       else
11253 	return NULL_TREE;
11254     }
11255 
11256   if (lh)
11257     {
11258       result = (*lh) (tp, &walk_subtrees, func, data, pset);
11259       if (result || !walk_subtrees)
11260         return result;
11261     }
11262 
11263   switch (code)
11264     {
11265     case ERROR_MARK:
11266     case IDENTIFIER_NODE:
11267     case INTEGER_CST:
11268     case REAL_CST:
11269     case FIXED_CST:
11270     case STRING_CST:
11271     case BLOCK:
11272     case PLACEHOLDER_EXPR:
11273     case SSA_NAME:
11274     case FIELD_DECL:
11275     case RESULT_DECL:
11276       /* None of these have subtrees other than those already walked
11277 	 above.  */
11278       break;
11279 
11280     case TREE_LIST:
11281       WALK_SUBTREE (TREE_VALUE (*tp));
11282       WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11283 
11284     case TREE_VEC:
11285       {
11286 	int len = TREE_VEC_LENGTH (*tp);
11287 
11288 	if (len == 0)
11289 	  break;
11290 
11291 	/* Walk all elements but the first.  */
11292 	while (--len)
11293 	  WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11294 
11295 	/* Now walk the first one as a tail call.  */
11296 	WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11297       }
11298 
11299     case VECTOR_CST:
11300       {
11301 	unsigned len = vector_cst_encoded_nelts (*tp);
11302 	if (len == 0)
11303 	  break;
11304 	/* Walk all elements but the first.  */
11305 	while (--len)
11306 	  WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (*tp, len));
11307 	/* Now walk the first one as a tail call.  */
11308 	WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (*tp, 0));
11309       }
11310 
11311     case COMPLEX_CST:
11312       WALK_SUBTREE (TREE_REALPART (*tp));
11313       WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11314 
11315     case CONSTRUCTOR:
11316       {
11317 	unsigned HOST_WIDE_INT idx;
11318 	constructor_elt *ce;
11319 
11320 	for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11321 	     idx++)
11322 	  WALK_SUBTREE (ce->value);
11323       }
11324       break;
11325 
11326     case SAVE_EXPR:
11327       WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11328 
11329     case BIND_EXPR:
11330       {
11331 	tree decl;
11332 	for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11333 	  {
11334 	    /* Walk the DECL_INITIAL and DECL_SIZE.  We don't want to walk
11335 	       into declarations that are just mentioned, rather than
11336 	       declared; they don't really belong to this part of the tree.
11337 	       And, we can see cycles: the initializer for a declaration
11338 	       can refer to the declaration itself.  */
11339 	    WALK_SUBTREE (DECL_INITIAL (decl));
11340 	    WALK_SUBTREE (DECL_SIZE (decl));
11341 	    WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11342 	  }
11343 	WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11344       }
11345 
11346     case STATEMENT_LIST:
11347       {
11348 	tree_stmt_iterator i;
11349 	for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11350 	  WALK_SUBTREE (*tsi_stmt_ptr (i));
11351       }
11352       break;
11353 
11354     case OMP_CLAUSE:
11355       {
11356 	int len = omp_clause_num_ops[OMP_CLAUSE_CODE (*tp)];
11357 	for (int i = 0; i < len; i++)
11358 	  WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11359 	WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11360       }
11361 
11362     case TARGET_EXPR:
11363       {
11364 	int i, len;
11365 
11366 	/* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11367 	   But, we only want to walk once.  */
11368 	len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11369 	for (i = 0; i < len; ++i)
11370 	  WALK_SUBTREE (TREE_OPERAND (*tp, i));
11371 	WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11372       }
11373 
11374     case DECL_EXPR:
11375       /* If this is a TYPE_DECL, walk into the fields of the type that it's
11376 	 defining.  We only want to walk into these fields of a type in this
11377 	 case and not in the general case of a mere reference to the type.
11378 
11379 	 The criterion is as follows: if the field can be an expression, it
11380 	 must be walked only here.  This should be in keeping with the fields
11381 	 that are directly gimplified in gimplify_type_sizes in order for the
11382 	 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11383 	 variable-sized types.
11384 
11385 	 Note that DECLs get walked as part of processing the BIND_EXPR.  */
11386       if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11387 	{
11388 	  /* Call the function for the decl so e.g. copy_tree_body_r can
11389 	     replace it with the remapped one.  */
11390 	  result = (*func) (&DECL_EXPR_DECL (*tp), &walk_subtrees, data);
11391 	  if (result || !walk_subtrees)
11392 	    return result;
11393 
11394 	  tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11395 	  if (TREE_CODE (*type_p) == ERROR_MARK)
11396 	    return NULL_TREE;
11397 
11398 	  /* Call the function for the type.  See if it returns anything or
11399 	     doesn't want us to continue.  If we are to continue, walk both
11400 	     the normal fields and those for the declaration case.  */
11401 	  result = (*func) (type_p, &walk_subtrees, data);
11402 	  if (result || !walk_subtrees)
11403 	    return result;
11404 
11405 	  /* But do not walk a pointed-to type since it may itself need to
11406 	     be walked in the declaration case if it isn't anonymous.  */
11407 	  if (!POINTER_TYPE_P (*type_p))
11408 	    {
11409 	      result = walk_type_fields (*type_p, func, data, pset, lh);
11410 	      if (result)
11411 		return result;
11412 	    }
11413 
11414 	  /* If this is a record type, also walk the fields.  */
11415 	  if (RECORD_OR_UNION_TYPE_P (*type_p))
11416 	    {
11417 	      tree field;
11418 
11419 	      for (field = TYPE_FIELDS (*type_p); field;
11420 		   field = DECL_CHAIN (field))
11421 		{
11422 		  /* We'd like to look at the type of the field, but we can
11423 		     easily get infinite recursion.  So assume it's pointed
11424 		     to elsewhere in the tree.  Also, ignore things that
11425 		     aren't fields.  */
11426 		  if (TREE_CODE (field) != FIELD_DECL)
11427 		    continue;
11428 
11429 		  WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11430 		  WALK_SUBTREE (DECL_SIZE (field));
11431 		  WALK_SUBTREE (DECL_SIZE_UNIT (field));
11432 		  if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11433 		    WALK_SUBTREE (DECL_QUALIFIER (field));
11434 		}
11435 	    }
11436 
11437 	  /* Same for scalar types.  */
11438 	  else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11439 		   || TREE_CODE (*type_p) == ENUMERAL_TYPE
11440 		   || TREE_CODE (*type_p) == INTEGER_TYPE
11441 		   || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11442 		   || TREE_CODE (*type_p) == REAL_TYPE)
11443 	    {
11444 	      WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11445 	      WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11446 	    }
11447 
11448 	  WALK_SUBTREE (TYPE_SIZE (*type_p));
11449 	  WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11450 	}
11451       /* FALLTHRU */
11452 
11453     default:
11454       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11455 	{
11456 	  int i, len;
11457 
11458 	  /* Walk over all the sub-trees of this operand.  */
11459 	  len = TREE_OPERAND_LENGTH (*tp);
11460 
11461 	  /* Go through the subtrees.  We need to do this in forward order so
11462 	     that the scope of a FOR_EXPR is handled properly.  */
11463 	  if (len)
11464 	    {
11465 	      for (i = 0; i < len - 1; ++i)
11466 		WALK_SUBTREE (TREE_OPERAND (*tp, i));
11467 	      WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11468 	    }
11469 	}
11470       /* If this is a type, walk the needed fields in the type.  */
11471       else if (TYPE_P (*tp))
11472 	return walk_type_fields (*tp, func, data, pset, lh);
11473       break;
11474     }
11475 
11476   /* We didn't find what we were looking for.  */
11477   return NULL_TREE;
11478 
11479 #undef WALK_SUBTREE_TAIL
11480 }
11481 #undef WALK_SUBTREE
11482 
11483 /* Like walk_tree, but does not walk duplicate nodes more than once.  */
11484 
11485 tree
walk_tree_without_duplicates_1(tree * tp,walk_tree_fn func,void * data,walk_tree_lh lh)11486 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11487 				walk_tree_lh lh)
11488 {
11489   tree result;
11490 
11491   hash_set<tree> pset;
11492   result = walk_tree_1 (tp, func, data, &pset, lh);
11493   return result;
11494 }
11495 
11496 
11497 tree
tree_block(tree t)11498 tree_block (tree t)
11499 {
11500   const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11501 
11502   if (IS_EXPR_CODE_CLASS (c))
11503     return LOCATION_BLOCK (t->exp.locus);
11504   gcc_unreachable ();
11505   return NULL;
11506 }
11507 
11508 void
tree_set_block(tree t,tree b)11509 tree_set_block (tree t, tree b)
11510 {
11511   const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11512 
11513   if (IS_EXPR_CODE_CLASS (c))
11514     {
11515       t->exp.locus = set_block (t->exp.locus, b);
11516     }
11517   else
11518     gcc_unreachable ();
11519 }
11520 
11521 /* Create a nameless artificial label and put it in the current
11522    function context.  The label has a location of LOC.  Returns the
11523    newly created label.  */
11524 
11525 tree
create_artificial_label(location_t loc)11526 create_artificial_label (location_t loc)
11527 {
11528   tree lab = build_decl (loc,
11529       			 LABEL_DECL, NULL_TREE, void_type_node);
11530 
11531   DECL_ARTIFICIAL (lab) = 1;
11532   DECL_IGNORED_P (lab) = 1;
11533   DECL_CONTEXT (lab) = current_function_decl;
11534   return lab;
11535 }
11536 
11537 /*  Given a tree, try to return a useful variable name that we can use
11538     to prefix a temporary that is being assigned the value of the tree.
11539     I.E. given  <temp> = &A, return A.  */
11540 
11541 const char *
get_name(tree t)11542 get_name (tree t)
11543 {
11544   tree stripped_decl;
11545 
11546   stripped_decl = t;
11547   STRIP_NOPS (stripped_decl);
11548   if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11549     return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11550   else if (TREE_CODE (stripped_decl) == SSA_NAME)
11551     {
11552       tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11553       if (!name)
11554 	return NULL;
11555       return IDENTIFIER_POINTER (name);
11556     }
11557   else
11558     {
11559       switch (TREE_CODE (stripped_decl))
11560 	{
11561 	case ADDR_EXPR:
11562 	  return get_name (TREE_OPERAND (stripped_decl, 0));
11563 	default:
11564 	  return NULL;
11565 	}
11566     }
11567 }
11568 
11569 /* Return true if TYPE has a variable argument list.  */
11570 
11571 bool
stdarg_p(const_tree fntype)11572 stdarg_p (const_tree fntype)
11573 {
11574   function_args_iterator args_iter;
11575   tree n = NULL_TREE, t;
11576 
11577   if (!fntype)
11578     return false;
11579 
11580   FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11581     {
11582       n = t;
11583     }
11584 
11585   return n != NULL_TREE && n != void_type_node;
11586 }
11587 
11588 /* Return true if TYPE has a prototype.  */
11589 
11590 bool
prototype_p(const_tree fntype)11591 prototype_p (const_tree fntype)
11592 {
11593   tree t;
11594 
11595   gcc_assert (fntype != NULL_TREE);
11596 
11597   t = TYPE_ARG_TYPES (fntype);
11598   return (t != NULL_TREE);
11599 }
11600 
11601 /* If BLOCK is inlined from an __attribute__((__artificial__))
11602    routine, return pointer to location from where it has been
11603    called.  */
11604 location_t *
block_nonartificial_location(tree block)11605 block_nonartificial_location (tree block)
11606 {
11607   location_t *ret = NULL;
11608 
11609   while (block && TREE_CODE (block) == BLOCK
11610 	 && BLOCK_ABSTRACT_ORIGIN (block))
11611     {
11612       tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11613       if (TREE_CODE (ao) == FUNCTION_DECL)
11614 	{
11615 	  /* If AO is an artificial inline, point RET to the
11616 	     call site locus at which it has been inlined and continue
11617 	     the loop, in case AO's caller is also an artificial
11618 	     inline.  */
11619 	  if (DECL_DECLARED_INLINE_P (ao)
11620 	      && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11621 	    ret = &BLOCK_SOURCE_LOCATION (block);
11622 	  else
11623 	    break;
11624 	}
11625       else if (TREE_CODE (ao) != BLOCK)
11626 	break;
11627 
11628       block = BLOCK_SUPERCONTEXT (block);
11629     }
11630   return ret;
11631 }
11632 
11633 
11634 /* If EXP is inlined from an __attribute__((__artificial__))
11635    function, return the location of the original call expression.  */
11636 
11637 location_t
tree_nonartificial_location(tree exp)11638 tree_nonartificial_location (tree exp)
11639 {
11640   location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11641 
11642   if (loc)
11643     return *loc;
11644   else
11645     return EXPR_LOCATION (exp);
11646 }
11647 
11648 /* Return the location into which EXP has been inlined.  Analogous
11649    to tree_nonartificial_location() above but not limited to artificial
11650    functions declared inline.  If SYSTEM_HEADER is true, return
11651    the macro expansion point of the location if it's in a system header */
11652 
11653 location_t
tree_inlined_location(tree exp,bool system_header)11654 tree_inlined_location (tree exp, bool system_header /* = true */)
11655 {
11656   location_t loc = UNKNOWN_LOCATION;
11657 
11658   tree block = TREE_BLOCK (exp);
11659 
11660   while (block && TREE_CODE (block) == BLOCK
11661 	 && BLOCK_ABSTRACT_ORIGIN (block))
11662     {
11663       tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11664       if (TREE_CODE (ao) == FUNCTION_DECL)
11665 	loc = BLOCK_SOURCE_LOCATION (block);
11666       else if (TREE_CODE (ao) != BLOCK)
11667 	break;
11668 
11669       block = BLOCK_SUPERCONTEXT (block);
11670     }
11671 
11672   if (loc == UNKNOWN_LOCATION)
11673     {
11674       loc = EXPR_LOCATION (exp);
11675       if (system_header)
11676 	/* Only consider macro expansion when the block traversal failed
11677 	   to find a location.  Otherwise it's not relevant.  */
11678 	return expansion_point_location_if_in_system_header (loc);
11679     }
11680 
11681   return loc;
11682 }
11683 
11684 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11685    nodes.  */
11686 
11687 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code.  */
11688 
11689 hashval_t
hash(tree x)11690 cl_option_hasher::hash (tree x)
11691 {
11692   const_tree const t = x;
11693 
11694   if (TREE_CODE (t) == OPTIMIZATION_NODE)
11695     return cl_optimization_hash (TREE_OPTIMIZATION (t));
11696   else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11697     return cl_target_option_hash (TREE_TARGET_OPTION (t));
11698   else
11699     gcc_unreachable ();
11700 }
11701 
11702 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11703    TARGET_OPTION tree node) is the same as that given by *Y, which is the
11704    same.  */
11705 
11706 bool
equal(tree x,tree y)11707 cl_option_hasher::equal (tree x, tree y)
11708 {
11709   const_tree const xt = x;
11710   const_tree const yt = y;
11711 
11712   if (TREE_CODE (xt) != TREE_CODE (yt))
11713     return 0;
11714 
11715   if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11716     return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
11717 				      TREE_OPTIMIZATION (yt));
11718   else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11719     return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11720 				TREE_TARGET_OPTION (yt));
11721   else
11722     gcc_unreachable ();
11723 }
11724 
11725 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET.  */
11726 
11727 tree
build_optimization_node(struct gcc_options * opts,struct gcc_options * opts_set)11728 build_optimization_node (struct gcc_options *opts,
11729 			 struct gcc_options *opts_set)
11730 {
11731   tree t;
11732 
11733   /* Use the cache of optimization nodes.  */
11734 
11735   cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11736 			opts, opts_set);
11737 
11738   tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11739   t = *slot;
11740   if (!t)
11741     {
11742       /* Insert this one into the hash table.  */
11743       t = cl_optimization_node;
11744       *slot = t;
11745 
11746       /* Make a new node for next time round.  */
11747       cl_optimization_node = make_node (OPTIMIZATION_NODE);
11748     }
11749 
11750   return t;
11751 }
11752 
11753 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET.  */
11754 
11755 tree
build_target_option_node(struct gcc_options * opts,struct gcc_options * opts_set)11756 build_target_option_node (struct gcc_options *opts,
11757 			  struct gcc_options *opts_set)
11758 {
11759   tree t;
11760 
11761   /* Use the cache of optimization nodes.  */
11762 
11763   cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11764 			 opts, opts_set);
11765 
11766   tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11767   t = *slot;
11768   if (!t)
11769     {
11770       /* Insert this one into the hash table.  */
11771       t = cl_target_option_node;
11772       *slot = t;
11773 
11774       /* Make a new node for next time round.  */
11775       cl_target_option_node = make_node (TARGET_OPTION_NODE);
11776     }
11777 
11778   return t;
11779 }
11780 
11781 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11782    so that they aren't saved during PCH writing.  */
11783 
11784 void
prepare_target_option_nodes_for_pch(void)11785 prepare_target_option_nodes_for_pch (void)
11786 {
11787   hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11788   for (; iter != cl_option_hash_table->end (); ++iter)
11789     if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11790       TREE_TARGET_GLOBALS (*iter) = NULL;
11791 }
11792 
11793 /* Determine the "ultimate origin" of a block.  */
11794 
11795 tree
block_ultimate_origin(const_tree block)11796 block_ultimate_origin (const_tree block)
11797 {
11798   tree origin = BLOCK_ABSTRACT_ORIGIN (block);
11799 
11800   if (origin == NULL_TREE)
11801     return NULL_TREE;
11802   else
11803     {
11804       gcc_checking_assert ((DECL_P (origin)
11805 			    && DECL_ORIGIN (origin) == origin)
11806 			   || BLOCK_ORIGIN (origin) == origin);
11807       return origin;
11808     }
11809 }
11810 
11811 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11812    no instruction.  */
11813 
11814 bool
tree_nop_conversion_p(const_tree outer_type,const_tree inner_type)11815 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11816 {
11817   /* Do not strip casts into or out of differing address spaces.  */
11818   if (POINTER_TYPE_P (outer_type)
11819       && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
11820     {
11821       if (!POINTER_TYPE_P (inner_type)
11822 	  || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
11823 	      != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
11824 	return false;
11825     }
11826   else if (POINTER_TYPE_P (inner_type)
11827 	   && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
11828     {
11829       /* We already know that outer_type is not a pointer with
11830 	 a non-generic address space.  */
11831       return false;
11832     }
11833 
11834   /* Use precision rather then machine mode when we can, which gives
11835      the correct answer even for submode (bit-field) types.  */
11836   if ((INTEGRAL_TYPE_P (outer_type)
11837        || POINTER_TYPE_P (outer_type)
11838        || TREE_CODE (outer_type) == OFFSET_TYPE)
11839       && (INTEGRAL_TYPE_P (inner_type)
11840 	  || POINTER_TYPE_P (inner_type)
11841 	  || TREE_CODE (inner_type) == OFFSET_TYPE))
11842     return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11843 
11844   /* Otherwise fall back on comparing machine modes (e.g. for
11845      aggregate types, floats).  */
11846   return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11847 }
11848 
11849 /* Return true iff conversion in EXP generates no instruction.  Mark
11850    it inline so that we fully inline into the stripping functions even
11851    though we have two uses of this function.  */
11852 
11853 static inline bool
tree_nop_conversion(const_tree exp)11854 tree_nop_conversion (const_tree exp)
11855 {
11856   tree outer_type, inner_type;
11857 
11858   if (location_wrapper_p (exp))
11859     return true;
11860   if (!CONVERT_EXPR_P (exp)
11861       && TREE_CODE (exp) != NON_LVALUE_EXPR)
11862     return false;
11863 
11864   outer_type = TREE_TYPE (exp);
11865   inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11866   if (!inner_type || inner_type == error_mark_node)
11867     return false;
11868 
11869   return tree_nop_conversion_p (outer_type, inner_type);
11870 }
11871 
11872 /* Return true iff conversion in EXP generates no instruction.  Don't
11873    consider conversions changing the signedness.  */
11874 
11875 static bool
tree_sign_nop_conversion(const_tree exp)11876 tree_sign_nop_conversion (const_tree exp)
11877 {
11878   tree outer_type, inner_type;
11879 
11880   if (!tree_nop_conversion (exp))
11881     return false;
11882 
11883   outer_type = TREE_TYPE (exp);
11884   inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11885 
11886   return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11887 	  && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11888 }
11889 
11890 /* Strip conversions from EXP according to tree_nop_conversion and
11891    return the resulting expression.  */
11892 
11893 tree
tree_strip_nop_conversions(tree exp)11894 tree_strip_nop_conversions (tree exp)
11895 {
11896   while (tree_nop_conversion (exp))
11897     exp = TREE_OPERAND (exp, 0);
11898   return exp;
11899 }
11900 
11901 /* Strip conversions from EXP according to tree_sign_nop_conversion
11902    and return the resulting expression.  */
11903 
11904 tree
tree_strip_sign_nop_conversions(tree exp)11905 tree_strip_sign_nop_conversions (tree exp)
11906 {
11907   while (tree_sign_nop_conversion (exp))
11908     exp = TREE_OPERAND (exp, 0);
11909   return exp;
11910 }
11911 
11912 /* Avoid any floating point extensions from EXP.  */
11913 tree
strip_float_extensions(tree exp)11914 strip_float_extensions (tree exp)
11915 {
11916   tree sub, expt, subt;
11917 
11918   /*  For floating point constant look up the narrowest type that can hold
11919       it properly and handle it like (type)(narrowest_type)constant.
11920       This way we can optimize for instance a=a*2.0 where "a" is float
11921       but 2.0 is double constant.  */
11922   if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11923     {
11924       REAL_VALUE_TYPE orig;
11925       tree type = NULL;
11926 
11927       orig = TREE_REAL_CST (exp);
11928       if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11929 	  && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11930 	type = float_type_node;
11931       else if (TYPE_PRECISION (TREE_TYPE (exp))
11932 	       > TYPE_PRECISION (double_type_node)
11933 	       && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11934 	type = double_type_node;
11935       if (type)
11936 	return build_real_truncate (type, orig);
11937     }
11938 
11939   if (!CONVERT_EXPR_P (exp))
11940     return exp;
11941 
11942   sub = TREE_OPERAND (exp, 0);
11943   subt = TREE_TYPE (sub);
11944   expt = TREE_TYPE (exp);
11945 
11946   if (!FLOAT_TYPE_P (subt))
11947     return exp;
11948 
11949   if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11950     return exp;
11951 
11952   if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11953     return exp;
11954 
11955   return strip_float_extensions (sub);
11956 }
11957 
11958 /* Strip out all handled components that produce invariant
11959    offsets.  */
11960 
11961 const_tree
strip_invariant_refs(const_tree op)11962 strip_invariant_refs (const_tree op)
11963 {
11964   while (handled_component_p (op))
11965     {
11966       switch (TREE_CODE (op))
11967 	{
11968 	case ARRAY_REF:
11969 	case ARRAY_RANGE_REF:
11970 	  if (!is_gimple_constant (TREE_OPERAND (op, 1))
11971 	      || TREE_OPERAND (op, 2) != NULL_TREE
11972 	      || TREE_OPERAND (op, 3) != NULL_TREE)
11973 	    return NULL;
11974 	  break;
11975 
11976 	case COMPONENT_REF:
11977 	  if (TREE_OPERAND (op, 2) != NULL_TREE)
11978 	    return NULL;
11979 	  break;
11980 
11981 	default:;
11982 	}
11983       op = TREE_OPERAND (op, 0);
11984     }
11985 
11986   return op;
11987 }
11988 
11989 /* Strip handled components with zero offset from OP.  */
11990 
11991 tree
strip_zero_offset_components(tree op)11992 strip_zero_offset_components (tree op)
11993 {
11994   while (TREE_CODE (op) == COMPONENT_REF
11995 	 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (op, 1)))
11996 	 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (op, 1))))
11997     op = TREE_OPERAND (op, 0);
11998   return op;
11999 }
12000 
12001 static GTY(()) tree gcc_eh_personality_decl;
12002 
12003 /* Return the GCC personality function decl.  */
12004 
12005 tree
lhd_gcc_personality(void)12006 lhd_gcc_personality (void)
12007 {
12008   if (!gcc_eh_personality_decl)
12009     gcc_eh_personality_decl = build_personality_function ("gcc");
12010   return gcc_eh_personality_decl;
12011 }
12012 
12013 /* TARGET is a call target of GIMPLE call statement
12014    (obtained by gimple_call_fn).  Return true if it is
12015    OBJ_TYPE_REF representing an virtual call of C++ method.
12016    (As opposed to OBJ_TYPE_REF representing objc calls
12017    through a cast where middle-end devirtualization machinery
12018    can't apply.)  FOR_DUMP_P is true when being called from
12019    the dump routines.  */
12020 
12021 bool
virtual_method_call_p(const_tree target,bool for_dump_p)12022 virtual_method_call_p (const_tree target, bool for_dump_p)
12023 {
12024   if (TREE_CODE (target) != OBJ_TYPE_REF)
12025     return false;
12026   tree t = TREE_TYPE (target);
12027   gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12028   t = TREE_TYPE (t);
12029   if (TREE_CODE (t) == FUNCTION_TYPE)
12030     return false;
12031   gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12032   /* If we do not have BINFO associated, it means that type was built
12033      without devirtualization enabled.  Do not consider this a virtual
12034      call.  */
12035   if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
12036     return false;
12037   return true;
12038 }
12039 
12040 /* Lookup sub-BINFO of BINFO of TYPE at offset POS.  */
12041 
12042 static tree
lookup_binfo_at_offset(tree binfo,tree type,HOST_WIDE_INT pos)12043 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12044 {
12045   unsigned int i;
12046   tree base_binfo, b;
12047 
12048   for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12049     if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12050 	&& types_same_for_odr (TREE_TYPE (base_binfo), type))
12051       return base_binfo;
12052     else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12053       return b;
12054   return NULL;
12055 }
12056 
12057 /* Try to find a base info of BINFO that would have its field decl at offset
12058    OFFSET within the BINFO type and which is of EXPECTED_TYPE.  If it can be
12059    found, return, otherwise return NULL_TREE.  */
12060 
12061 tree
get_binfo_at_offset(tree binfo,poly_int64 offset,tree expected_type)12062 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
12063 {
12064   tree type = BINFO_TYPE (binfo);
12065 
12066   while (true)
12067     {
12068       HOST_WIDE_INT pos, size;
12069       tree fld;
12070       int i;
12071 
12072       if (types_same_for_odr (type, expected_type))
12073 	  return binfo;
12074       if (maybe_lt (offset, 0))
12075 	return NULL_TREE;
12076 
12077       for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12078 	{
12079 	  if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12080 	    continue;
12081 
12082 	  pos = int_bit_position (fld);
12083 	  size = tree_to_uhwi (DECL_SIZE (fld));
12084 	  if (known_in_range_p (offset, pos, size))
12085 	    break;
12086 	}
12087       if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12088 	return NULL_TREE;
12089 
12090       /* Offset 0 indicates the primary base, whose vtable contents are
12091 	 represented in the binfo for the derived class.  */
12092       else if (maybe_ne (offset, 0))
12093 	{
12094 	  tree found_binfo = NULL, base_binfo;
12095 	  /* Offsets in BINFO are in bytes relative to the whole structure
12096 	     while POS is in bits relative to the containing field.  */
12097 	  int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12098 			     / BITS_PER_UNIT);
12099 
12100 	  for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12101 	    if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12102 		&& types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12103 	      {
12104 		found_binfo = base_binfo;
12105 		break;
12106 	      }
12107 	  if (found_binfo)
12108 	    binfo = found_binfo;
12109 	  else
12110 	    binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12111 					    binfo_offset);
12112 	 }
12113 
12114       type = TREE_TYPE (fld);
12115       offset -= pos;
12116     }
12117 }
12118 
12119 /* Returns true if X is a typedef decl.  */
12120 
12121 bool
is_typedef_decl(const_tree x)12122 is_typedef_decl (const_tree x)
12123 {
12124   return (x && TREE_CODE (x) == TYPE_DECL
12125           && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12126 }
12127 
12128 /* Returns true iff TYPE is a type variant created for a typedef. */
12129 
12130 bool
typedef_variant_p(const_tree type)12131 typedef_variant_p (const_tree type)
12132 {
12133   return is_typedef_decl (TYPE_NAME (type));
12134 }
12135 
12136 /* PR 84195: Replace control characters in "unescaped" with their
12137    escaped equivalents.  Allow newlines if -fmessage-length has
12138    been set to a non-zero value.  This is done here, rather than
12139    where the attribute is recorded as the message length can
12140    change between these two locations.  */
12141 
12142 void
escape(const char * unescaped)12143 escaped_string::escape (const char *unescaped)
12144 {
12145   char *escaped;
12146   size_t i, new_i, len;
12147 
12148   if (m_owned)
12149     free (m_str);
12150 
12151   m_str = const_cast<char *> (unescaped);
12152   m_owned = false;
12153 
12154   if (unescaped == NULL || *unescaped == 0)
12155     return;
12156 
12157   len = strlen (unescaped);
12158   escaped = NULL;
12159   new_i = 0;
12160 
12161   for (i = 0; i < len; i++)
12162     {
12163       char c = unescaped[i];
12164 
12165       if (!ISCNTRL (c))
12166 	{
12167 	  if (escaped)
12168 	    escaped[new_i++] = c;
12169 	  continue;
12170 	}
12171 
12172       if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
12173 	{
12174 	  if (escaped == NULL)
12175 	    {
12176 	      /* We only allocate space for a new string if we
12177 		 actually encounter a control character that
12178 		 needs replacing.  */
12179 	      escaped = (char *) xmalloc (len * 2 + 1);
12180 	      strncpy (escaped, unescaped, i);
12181 	      new_i = i;
12182 	    }
12183 
12184 	  escaped[new_i++] = '\\';
12185 
12186 	  switch (c)
12187 	    {
12188 	    case '\a': escaped[new_i++] = 'a'; break;
12189 	    case '\b': escaped[new_i++] = 'b'; break;
12190 	    case '\f': escaped[new_i++] = 'f'; break;
12191 	    case '\n': escaped[new_i++] = 'n'; break;
12192 	    case '\r': escaped[new_i++] = 'r'; break;
12193 	    case '\t': escaped[new_i++] = 't'; break;
12194 	    case '\v': escaped[new_i++] = 'v'; break;
12195 	    default:   escaped[new_i++] = '?'; break;
12196 	    }
12197 	}
12198       else if (escaped)
12199 	escaped[new_i++] = c;
12200     }
12201 
12202   if (escaped)
12203     {
12204       escaped[new_i] = 0;
12205       m_str = escaped;
12206       m_owned = true;
12207     }
12208 }
12209 
12210 /* Warn about a use of an identifier which was marked deprecated.  Returns
12211    whether a warning was given.  */
12212 
12213 bool
warn_deprecated_use(tree node,tree attr)12214 warn_deprecated_use (tree node, tree attr)
12215 {
12216   escaped_string msg;
12217 
12218   if (node == 0 || !warn_deprecated_decl)
12219     return false;
12220 
12221   if (!attr)
12222     {
12223       if (DECL_P (node))
12224 	attr = DECL_ATTRIBUTES (node);
12225       else if (TYPE_P (node))
12226 	{
12227 	  tree decl = TYPE_STUB_DECL (node);
12228 	  if (decl)
12229 	    attr = TYPE_ATTRIBUTES (TREE_TYPE (decl));
12230 	  else if ((decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (node)))
12231 		   != NULL_TREE)
12232 	    {
12233 	      node = TREE_TYPE (decl);
12234 	      attr = TYPE_ATTRIBUTES (node);
12235 	    }
12236 	}
12237     }
12238 
12239   if (attr)
12240     attr = lookup_attribute ("deprecated", attr);
12241 
12242   if (attr)
12243     msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12244 
12245   bool w = false;
12246   if (DECL_P (node))
12247     {
12248       auto_diagnostic_group d;
12249       if (msg)
12250 	w = warning (OPT_Wdeprecated_declarations,
12251 		     "%qD is deprecated: %s", node, (const char *) msg);
12252       else
12253 	w = warning (OPT_Wdeprecated_declarations,
12254 		     "%qD is deprecated", node);
12255       if (w)
12256 	inform (DECL_SOURCE_LOCATION (node), "declared here");
12257     }
12258   else if (TYPE_P (node))
12259     {
12260       tree what = NULL_TREE;
12261       tree decl = TYPE_STUB_DECL (node);
12262 
12263       if (TYPE_NAME (node))
12264 	{
12265 	  if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12266 	    what = TYPE_NAME (node);
12267 	  else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12268 		   && DECL_NAME (TYPE_NAME (node)))
12269 	    what = DECL_NAME (TYPE_NAME (node));
12270 	}
12271 
12272       auto_diagnostic_group d;
12273       if (what)
12274 	{
12275 	  if (msg)
12276 	    w = warning (OPT_Wdeprecated_declarations,
12277 			 "%qE is deprecated: %s", what, (const char *) msg);
12278 	  else
12279 	    w = warning (OPT_Wdeprecated_declarations,
12280 			 "%qE is deprecated", what);
12281 	}
12282       else
12283 	{
12284 	  if (msg)
12285 	    w = warning (OPT_Wdeprecated_declarations,
12286 			 "type is deprecated: %s", (const char *) msg);
12287 	  else
12288 	    w = warning (OPT_Wdeprecated_declarations,
12289 			 "type is deprecated");
12290 	}
12291 
12292       if (w && decl)
12293 	inform (DECL_SOURCE_LOCATION (decl), "declared here");
12294     }
12295 
12296   return w;
12297 }
12298 
12299 /* Error out with an identifier which was marked 'unavailable'. */
12300 void
error_unavailable_use(tree node,tree attr)12301 error_unavailable_use (tree node, tree attr)
12302 {
12303   escaped_string msg;
12304 
12305   if (node == 0)
12306     return;
12307 
12308   if (!attr)
12309     {
12310       if (DECL_P (node))
12311 	attr = DECL_ATTRIBUTES (node);
12312       else if (TYPE_P (node))
12313 	{
12314 	  tree decl = TYPE_STUB_DECL (node);
12315 	  if (decl)
12316 	    attr = lookup_attribute ("unavailable",
12317 				     TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12318 	}
12319     }
12320 
12321   if (attr)
12322     attr = lookup_attribute ("unavailable", attr);
12323 
12324   if (attr)
12325     msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12326 
12327   if (DECL_P (node))
12328     {
12329       auto_diagnostic_group d;
12330       if (msg)
12331 	error ("%qD is unavailable: %s", node, (const char *) msg);
12332       else
12333 	error ("%qD is unavailable", node);
12334       inform (DECL_SOURCE_LOCATION (node), "declared here");
12335     }
12336   else if (TYPE_P (node))
12337     {
12338       tree what = NULL_TREE;
12339       tree decl = TYPE_STUB_DECL (node);
12340 
12341       if (TYPE_NAME (node))
12342 	{
12343 	  if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12344 	    what = TYPE_NAME (node);
12345 	  else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12346 		   && DECL_NAME (TYPE_NAME (node)))
12347 	    what = DECL_NAME (TYPE_NAME (node));
12348 	}
12349 
12350       auto_diagnostic_group d;
12351       if (what)
12352 	{
12353 	  if (msg)
12354 	    error ("%qE is unavailable: %s", what, (const char *) msg);
12355 	  else
12356 	    error ("%qE is unavailable", what);
12357 	}
12358       else
12359 	{
12360 	  if (msg)
12361 	    error ("type is unavailable: %s", (const char *) msg);
12362 	  else
12363 	    error ("type is unavailable");
12364 	}
12365 
12366       if (decl)
12367 	inform (DECL_SOURCE_LOCATION (decl), "declared here");
12368     }
12369 }
12370 
12371 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12372    somewhere in it.  */
12373 
12374 bool
contains_bitfld_component_ref_p(const_tree ref)12375 contains_bitfld_component_ref_p (const_tree ref)
12376 {
12377   while (handled_component_p (ref))
12378     {
12379       if (TREE_CODE (ref) == COMPONENT_REF
12380           && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12381         return true;
12382       ref = TREE_OPERAND (ref, 0);
12383     }
12384 
12385   return false;
12386 }
12387 
12388 /* Try to determine whether a TRY_CATCH expression can fall through.
12389    This is a subroutine of block_may_fallthru.  */
12390 
12391 static bool
try_catch_may_fallthru(const_tree stmt)12392 try_catch_may_fallthru (const_tree stmt)
12393 {
12394   tree_stmt_iterator i;
12395 
12396   /* If the TRY block can fall through, the whole TRY_CATCH can
12397      fall through.  */
12398   if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12399     return true;
12400 
12401   i = tsi_start (TREE_OPERAND (stmt, 1));
12402   switch (TREE_CODE (tsi_stmt (i)))
12403     {
12404     case CATCH_EXPR:
12405       /* We expect to see a sequence of CATCH_EXPR trees, each with a
12406 	 catch expression and a body.  The whole TRY_CATCH may fall
12407 	 through iff any of the catch bodies falls through.  */
12408       for (; !tsi_end_p (i); tsi_next (&i))
12409 	{
12410 	  if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12411 	    return true;
12412 	}
12413       return false;
12414 
12415     case EH_FILTER_EXPR:
12416       /* The exception filter expression only matters if there is an
12417 	 exception.  If the exception does not match EH_FILTER_TYPES,
12418 	 we will execute EH_FILTER_FAILURE, and we will fall through
12419 	 if that falls through.  If the exception does match
12420 	 EH_FILTER_TYPES, the stack unwinder will continue up the
12421 	 stack, so we will not fall through.  We don't know whether we
12422 	 will throw an exception which matches EH_FILTER_TYPES or not,
12423 	 so we just ignore EH_FILTER_TYPES and assume that we might
12424 	 throw an exception which doesn't match.  */
12425       return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12426 
12427     default:
12428       /* This case represents statements to be executed when an
12429 	 exception occurs.  Those statements are implicitly followed
12430 	 by a RESX statement to resume execution after the exception.
12431 	 So in this case the TRY_CATCH never falls through.  */
12432       return false;
12433     }
12434 }
12435 
12436 /* Try to determine if we can fall out of the bottom of BLOCK.  This guess
12437    need not be 100% accurate; simply be conservative and return true if we
12438    don't know.  This is used only to avoid stupidly generating extra code.
12439    If we're wrong, we'll just delete the extra code later.  */
12440 
12441 bool
block_may_fallthru(const_tree block)12442 block_may_fallthru (const_tree block)
12443 {
12444   /* This CONST_CAST is okay because expr_last returns its argument
12445      unmodified and we assign it to a const_tree.  */
12446   const_tree stmt = expr_last (CONST_CAST_TREE (block));
12447 
12448   switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12449     {
12450     case GOTO_EXPR:
12451     case RETURN_EXPR:
12452       /* Easy cases.  If the last statement of the block implies
12453 	 control transfer, then we can't fall through.  */
12454       return false;
12455 
12456     case SWITCH_EXPR:
12457       /* If there is a default: label or case labels cover all possible
12458 	 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12459 	 to some case label in all cases and all we care is whether the
12460 	 SWITCH_BODY falls through.  */
12461       if (SWITCH_ALL_CASES_P (stmt))
12462 	return block_may_fallthru (SWITCH_BODY (stmt));
12463       return true;
12464 
12465     case COND_EXPR:
12466       if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12467 	return true;
12468       return block_may_fallthru (COND_EXPR_ELSE (stmt));
12469 
12470     case BIND_EXPR:
12471       return block_may_fallthru (BIND_EXPR_BODY (stmt));
12472 
12473     case TRY_CATCH_EXPR:
12474       return try_catch_may_fallthru (stmt);
12475 
12476     case TRY_FINALLY_EXPR:
12477       /* The finally clause is always executed after the try clause,
12478 	 so if it does not fall through, then the try-finally will not
12479 	 fall through.  Otherwise, if the try clause does not fall
12480 	 through, then when the finally clause falls through it will
12481 	 resume execution wherever the try clause was going.  So the
12482 	 whole try-finally will only fall through if both the try
12483 	 clause and the finally clause fall through.  */
12484       return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12485 	      && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12486 
12487     case EH_ELSE_EXPR:
12488       return block_may_fallthru (TREE_OPERAND (stmt, 0));
12489 
12490     case MODIFY_EXPR:
12491       if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12492 	stmt = TREE_OPERAND (stmt, 1);
12493       else
12494 	return true;
12495       /* FALLTHRU */
12496 
12497     case CALL_EXPR:
12498       /* Functions that do not return do not fall through.  */
12499       return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12500 
12501     case CLEANUP_POINT_EXPR:
12502       return block_may_fallthru (TREE_OPERAND (stmt, 0));
12503 
12504     case TARGET_EXPR:
12505       return block_may_fallthru (TREE_OPERAND (stmt, 1));
12506 
12507     case ERROR_MARK:
12508       return true;
12509 
12510     default:
12511       return lang_hooks.block_may_fallthru (stmt);
12512     }
12513 }
12514 
12515 /* True if we are using EH to handle cleanups.  */
12516 static bool using_eh_for_cleanups_flag = false;
12517 
12518 /* This routine is called from front ends to indicate eh should be used for
12519    cleanups.  */
12520 void
using_eh_for_cleanups(void)12521 using_eh_for_cleanups (void)
12522 {
12523   using_eh_for_cleanups_flag = true;
12524 }
12525 
12526 /* Query whether EH is used for cleanups.  */
12527 bool
using_eh_for_cleanups_p(void)12528 using_eh_for_cleanups_p (void)
12529 {
12530   return using_eh_for_cleanups_flag;
12531 }
12532 
12533 /* Wrapper for tree_code_name to ensure that tree code is valid */
12534 const char *
get_tree_code_name(enum tree_code code)12535 get_tree_code_name (enum tree_code code)
12536 {
12537   const char *invalid = "<invalid tree code>";
12538 
12539   /* The tree_code enum promotes to signed, but we could be getting
12540      invalid values, so force an unsigned comparison.  */
12541   if (unsigned (code) >= MAX_TREE_CODES)
12542     {
12543       if ((unsigned)code == 0xa5a5)
12544 	return "ggc_freed";
12545       return invalid;
12546     }
12547 
12548   return tree_code_name[code];
12549 }
12550 
12551 /* Drops the TREE_OVERFLOW flag from T.  */
12552 
12553 tree
drop_tree_overflow(tree t)12554 drop_tree_overflow (tree t)
12555 {
12556   gcc_checking_assert (TREE_OVERFLOW (t));
12557 
12558   /* For tree codes with a sharing machinery re-build the result.  */
12559   if (poly_int_tree_p (t))
12560     return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
12561 
12562   /* For VECTOR_CST, remove the overflow bits from the encoded elements
12563      and canonicalize the result.  */
12564   if (TREE_CODE (t) == VECTOR_CST)
12565     {
12566       tree_vector_builder builder;
12567       builder.new_unary_operation (TREE_TYPE (t), t, true);
12568       unsigned int count = builder.encoded_nelts ();
12569       for (unsigned int i = 0; i < count; ++i)
12570 	{
12571 	  tree elt = VECTOR_CST_ELT (t, i);
12572 	  if (TREE_OVERFLOW (elt))
12573 	    elt = drop_tree_overflow (elt);
12574 	  builder.quick_push (elt);
12575 	}
12576       return builder.build ();
12577     }
12578 
12579   /* Otherwise, as all tcc_constants are possibly shared, copy the node
12580      and drop the flag.  */
12581   t = copy_node (t);
12582   TREE_OVERFLOW (t) = 0;
12583 
12584   /* For constants that contain nested constants, drop the flag
12585      from those as well.  */
12586   if (TREE_CODE (t) == COMPLEX_CST)
12587     {
12588       if (TREE_OVERFLOW (TREE_REALPART (t)))
12589 	TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12590       if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12591 	TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12592     }
12593 
12594   return t;
12595 }
12596 
12597 /* Given a memory reference expression T, return its base address.
12598    The base address of a memory reference expression is the main
12599    object being referenced.  For instance, the base address for
12600    'array[i].fld[j]' is 'array'.  You can think of this as stripping
12601    away the offset part from a memory address.
12602 
12603    This function calls handled_component_p to strip away all the inner
12604    parts of the memory reference until it reaches the base object.  */
12605 
12606 tree
get_base_address(tree t)12607 get_base_address (tree t)
12608 {
12609   if (TREE_CODE (t) == WITH_SIZE_EXPR)
12610     t = TREE_OPERAND (t, 0);
12611   while (handled_component_p (t))
12612     t = TREE_OPERAND (t, 0);
12613 
12614   if ((TREE_CODE (t) == MEM_REF
12615        || TREE_CODE (t) == TARGET_MEM_REF)
12616       && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12617     t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12618 
12619   return t;
12620 }
12621 
12622 /* Return a tree of sizetype representing the size, in bytes, of the element
12623    of EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
12624 
12625 tree
array_ref_element_size(tree exp)12626 array_ref_element_size (tree exp)
12627 {
12628   tree aligned_size = TREE_OPERAND (exp, 3);
12629   tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12630   location_t loc = EXPR_LOCATION (exp);
12631 
12632   /* If a size was specified in the ARRAY_REF, it's the size measured
12633      in alignment units of the element type.  So multiply by that value.  */
12634   if (aligned_size)
12635     {
12636       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12637 	 sizetype from another type of the same width and signedness.  */
12638       if (TREE_TYPE (aligned_size) != sizetype)
12639 	aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12640       return size_binop_loc (loc, MULT_EXPR, aligned_size,
12641 			     size_int (TYPE_ALIGN_UNIT (elmt_type)));
12642     }
12643 
12644   /* Otherwise, take the size from that of the element type.  Substitute
12645      any PLACEHOLDER_EXPR that we have.  */
12646   else
12647     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12648 }
12649 
12650 /* Return a tree representing the lower bound of the array mentioned in
12651    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
12652 
12653 tree
array_ref_low_bound(tree exp)12654 array_ref_low_bound (tree exp)
12655 {
12656   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12657 
12658   /* If a lower bound is specified in EXP, use it.  */
12659   if (TREE_OPERAND (exp, 2))
12660     return TREE_OPERAND (exp, 2);
12661 
12662   /* Otherwise, if there is a domain type and it has a lower bound, use it,
12663      substituting for a PLACEHOLDER_EXPR as needed.  */
12664   if (domain_type && TYPE_MIN_VALUE (domain_type))
12665     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12666 
12667   /* Otherwise, return a zero of the appropriate type.  */
12668   tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
12669   return (idxtype == error_mark_node
12670 	  ? integer_zero_node : build_int_cst (idxtype, 0));
12671 }
12672 
12673 /* Return a tree representing the upper bound of the array mentioned in
12674    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
12675 
12676 tree
array_ref_up_bound(tree exp)12677 array_ref_up_bound (tree exp)
12678 {
12679   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12680 
12681   /* If there is a domain type and it has an upper bound, use it, substituting
12682      for a PLACEHOLDER_EXPR as needed.  */
12683   if (domain_type && TYPE_MAX_VALUE (domain_type))
12684     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12685 
12686   /* Otherwise fail.  */
12687   return NULL_TREE;
12688 }
12689 
12690 /* Returns true if REF is an array reference, component reference,
12691    or memory reference to an array at the end of a structure.
12692    If this is the case, the array may be allocated larger
12693    than its upper bound implies.  */
12694 
12695 bool
array_at_struct_end_p(tree ref)12696 array_at_struct_end_p (tree ref)
12697 {
12698   tree atype;
12699 
12700   if (TREE_CODE (ref) == ARRAY_REF
12701       || TREE_CODE (ref) == ARRAY_RANGE_REF)
12702     {
12703       atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12704       ref = TREE_OPERAND (ref, 0);
12705     }
12706   else if (TREE_CODE (ref) == COMPONENT_REF
12707 	   && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12708     atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12709   else if (TREE_CODE (ref) == MEM_REF)
12710     {
12711       tree arg = TREE_OPERAND (ref, 0);
12712       if (TREE_CODE (arg) == ADDR_EXPR)
12713 	arg = TREE_OPERAND (arg, 0);
12714       tree argtype = TREE_TYPE (arg);
12715       if (TREE_CODE (argtype) == RECORD_TYPE)
12716 	{
12717 	  if (tree fld = last_field (argtype))
12718 	    {
12719 	      atype = TREE_TYPE (fld);
12720 	      if (TREE_CODE (atype) != ARRAY_TYPE)
12721 		return false;
12722 	      if (VAR_P (arg) && DECL_SIZE (fld))
12723 		return false;
12724 	    }
12725 	  else
12726 	    return false;
12727 	}
12728       else
12729 	return false;
12730     }
12731   else
12732     return false;
12733 
12734   if (TREE_CODE (ref) == STRING_CST)
12735     return false;
12736 
12737   tree ref_to_array = ref;
12738   while (handled_component_p (ref))
12739     {
12740       /* If the reference chain contains a component reference to a
12741          non-union type and there follows another field the reference
12742 	 is not at the end of a structure.  */
12743       if (TREE_CODE (ref) == COMPONENT_REF)
12744 	{
12745 	  if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12746 	    {
12747 	      tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12748 	      while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12749 		nextf = DECL_CHAIN (nextf);
12750 	      if (nextf)
12751 		return false;
12752 	    }
12753 	}
12754       /* If we have a multi-dimensional array we do not consider
12755          a non-innermost dimension as flex array if the whole
12756 	 multi-dimensional array is at struct end.
12757 	 Same for an array of aggregates with a trailing array
12758 	 member.  */
12759       else if (TREE_CODE (ref) == ARRAY_REF)
12760 	return false;
12761       else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12762 	;
12763       /* If we view an underlying object as sth else then what we
12764          gathered up to now is what we have to rely on.  */
12765       else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12766 	break;
12767       else
12768 	gcc_unreachable ();
12769 
12770       ref = TREE_OPERAND (ref, 0);
12771     }
12772 
12773   /* The array now is at struct end.  Treat flexible arrays as
12774      always subject to extend, even into just padding constrained by
12775      an underlying decl.  */
12776   if (! TYPE_SIZE (atype)
12777       || ! TYPE_DOMAIN (atype)
12778       || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12779     return true;
12780 
12781   /* If the reference is based on a declared entity, the size of the array
12782      is constrained by its given domain.  (Do not trust commons PR/69368).  */
12783   ref = get_base_address (ref);
12784   if (ref
12785       && DECL_P (ref)
12786       && !(flag_unconstrained_commons
12787 	   && VAR_P (ref) && DECL_COMMON (ref))
12788       && DECL_SIZE_UNIT (ref)
12789       && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
12790     {
12791       /* Check whether the array domain covers all of the available
12792          padding.  */
12793       poly_int64 offset;
12794       if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
12795 	  || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
12796           || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
12797 	return true;
12798       if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
12799 	return true;
12800 
12801       /* If at least one extra element fits it is a flexarray.  */
12802       if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12803 		     - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
12804 		     + 2)
12805 		    * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
12806 		    wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
12807 	return true;
12808 
12809       return false;
12810     }
12811 
12812   return true;
12813 }
12814 
12815 /* Return a tree representing the offset, in bytes, of the field referenced
12816    by EXP.  This does not include any offset in DECL_FIELD_BIT_OFFSET.  */
12817 
12818 tree
component_ref_field_offset(tree exp)12819 component_ref_field_offset (tree exp)
12820 {
12821   tree aligned_offset = TREE_OPERAND (exp, 2);
12822   tree field = TREE_OPERAND (exp, 1);
12823   location_t loc = EXPR_LOCATION (exp);
12824 
12825   /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12826      in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT.  So multiply by that
12827      value.  */
12828   if (aligned_offset)
12829     {
12830       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12831 	 sizetype from another type of the same width and signedness.  */
12832       if (TREE_TYPE (aligned_offset) != sizetype)
12833 	aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12834       return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12835 			     size_int (DECL_OFFSET_ALIGN (field)
12836 				       / BITS_PER_UNIT));
12837     }
12838 
12839   /* Otherwise, take the offset from that of the field.  Substitute
12840      any PLACEHOLDER_EXPR that we have.  */
12841   else
12842     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12843 }
12844 
12845 /* Given the initializer INIT, return the initializer for the field
12846    DECL if it exists, otherwise null.  Used to obtain the initializer
12847    for a flexible array member and determine its size.  */
12848 
12849 static tree
get_initializer_for(tree init,tree decl)12850 get_initializer_for (tree init, tree decl)
12851 {
12852   STRIP_NOPS (init);
12853 
12854   tree fld, fld_init;
12855   unsigned HOST_WIDE_INT i;
12856   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
12857     {
12858       if (decl == fld)
12859 	return fld_init;
12860 
12861       if (TREE_CODE (fld) == CONSTRUCTOR)
12862 	{
12863 	  fld_init = get_initializer_for (fld_init, decl);
12864 	  if (fld_init)
12865 	    return fld_init;
12866 	}
12867     }
12868 
12869   return NULL_TREE;
12870 }
12871 
12872 /* Determines the size of the member referenced by the COMPONENT_REF
12873    REF, using its initializer expression if necessary in order to
12874    determine the size of an initialized flexible array member.
12875    If non-null, set *ARK when REF refers to an interior zero-length
12876    array or a trailing one-element array.
12877    Returns the size as sizetype (which might be zero for an object
12878    with an uninitialized flexible array member) or null if the size
12879    cannot be determined.  */
12880 
12881 tree
component_ref_size(tree ref,special_array_member * sam)12882 component_ref_size (tree ref, special_array_member *sam /* = NULL */)
12883 {
12884   gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
12885 
12886   special_array_member sambuf;
12887   if (!sam)
12888     sam = &sambuf;
12889   *sam = special_array_member::none;
12890 
12891   /* The object/argument referenced by the COMPONENT_REF and its type.  */
12892   tree arg = TREE_OPERAND (ref, 0);
12893   tree argtype = TREE_TYPE (arg);
12894   /* The referenced member.  */
12895   tree member = TREE_OPERAND (ref, 1);
12896 
12897   tree memsize = DECL_SIZE_UNIT (member);
12898   if (memsize)
12899     {
12900       tree memtype = TREE_TYPE (member);
12901       if (TREE_CODE (memtype) != ARRAY_TYPE)
12902 	/* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
12903 	   to the type of a class with a virtual base which doesn't
12904 	   reflect the size of the virtual's members (see pr97595).
12905 	   If that's the case fail for now and implement something
12906 	   more robust in the future.  */
12907 	return (tree_int_cst_equal (memsize, TYPE_SIZE_UNIT (memtype))
12908 		? memsize : NULL_TREE);
12909 
12910       bool trailing = array_at_struct_end_p (ref);
12911       bool zero_length = integer_zerop (memsize);
12912       if (!trailing && !zero_length)
12913 	/* MEMBER is either an interior array or is an array with
12914 	   more than one element.  */
12915 	return memsize;
12916 
12917       if (zero_length)
12918 	{
12919 	  if (trailing)
12920 	    *sam = special_array_member::trail_0;
12921 	  else
12922 	    {
12923 	      *sam = special_array_member::int_0;
12924 	      memsize = NULL_TREE;
12925 	    }
12926 	}
12927 
12928       if (!zero_length)
12929 	if (tree dom = TYPE_DOMAIN (memtype))
12930 	  if (tree min = TYPE_MIN_VALUE (dom))
12931 	    if (tree max = TYPE_MAX_VALUE (dom))
12932 	      if (TREE_CODE (min) == INTEGER_CST
12933 		  && TREE_CODE (max) == INTEGER_CST)
12934 		{
12935 		  offset_int minidx = wi::to_offset (min);
12936 		  offset_int maxidx = wi::to_offset (max);
12937 		  offset_int neltsm1 = maxidx - minidx;
12938 		  if (neltsm1 > 0)
12939 		    /* MEMBER is an array with more than one element.  */
12940 		    return memsize;
12941 
12942 		  if (neltsm1 == 0)
12943 		    *sam = special_array_member::trail_1;
12944 		}
12945 
12946       /* For a reference to a zero- or one-element array member of a union
12947 	 use the size of the union instead of the size of the member.  */
12948       if (TREE_CODE (argtype) == UNION_TYPE)
12949 	memsize = TYPE_SIZE_UNIT (argtype);
12950     }
12951 
12952   /* MEMBER is either a bona fide flexible array member, or a zero-length
12953      array member, or an array of length one treated as such.  */
12954 
12955   /* If the reference is to a declared object and the member a true
12956      flexible array, try to determine its size from its initializer.  */
12957   poly_int64 baseoff = 0;
12958   tree base = get_addr_base_and_unit_offset (ref, &baseoff);
12959   if (!base || !VAR_P (base))
12960     {
12961       if (*sam != special_array_member::int_0)
12962 	return NULL_TREE;
12963 
12964       if (TREE_CODE (arg) != COMPONENT_REF)
12965 	return NULL_TREE;
12966 
12967       base = arg;
12968       while (TREE_CODE (base) == COMPONENT_REF)
12969 	base = TREE_OPERAND (base, 0);
12970       baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
12971     }
12972 
12973   /* BASE is the declared object of which MEMBER is either a member
12974      or that is cast to ARGTYPE (e.g., a char buffer used to store
12975      an ARGTYPE object).  */
12976   tree basetype = TREE_TYPE (base);
12977 
12978   /* Determine the base type of the referenced object.  If it's
12979      the same as ARGTYPE and MEMBER has a known size, return it.  */
12980   tree bt = basetype;
12981   if (*sam != special_array_member::int_0)
12982     while (TREE_CODE (bt) == ARRAY_TYPE)
12983       bt = TREE_TYPE (bt);
12984   bool typematch = useless_type_conversion_p (argtype, bt);
12985   if (memsize && typematch)
12986     return memsize;
12987 
12988   memsize = NULL_TREE;
12989 
12990   if (typematch)
12991     /* MEMBER is a true flexible array member.  Compute its size from
12992        the initializer of the BASE object if it has one.  */
12993     if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
12994       if (init != error_mark_node)
12995 	{
12996 	  init = get_initializer_for (init, member);
12997 	  if (init)
12998 	    {
12999 	      memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
13000 	      if (tree refsize = TYPE_SIZE_UNIT (argtype))
13001 		{
13002 		  /* Use the larger of the initializer size and the tail
13003 		     padding in the enclosing struct.  */
13004 		  poly_int64 rsz = tree_to_poly_int64 (refsize);
13005 		  rsz -= baseoff;
13006 		  if (known_lt (tree_to_poly_int64 (memsize), rsz))
13007 		    memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
13008 		}
13009 
13010 	      baseoff = 0;
13011 	    }
13012 	}
13013 
13014   if (!memsize)
13015     {
13016       if (typematch)
13017 	{
13018 	  if (DECL_P (base)
13019 	      && DECL_EXTERNAL (base)
13020 	      && bt == basetype
13021 	      && *sam != special_array_member::int_0)
13022 	    /* The size of a flexible array member of an extern struct
13023 	       with no initializer cannot be determined (it's defined
13024 	       in another translation unit and can have an initializer
13025 	       with an arbitrary number of elements).  */
13026 	    return NULL_TREE;
13027 
13028 	  /* Use the size of the base struct or, for interior zero-length
13029 	     arrays, the size of the enclosing type.  */
13030 	  memsize = TYPE_SIZE_UNIT (bt);
13031 	}
13032       else if (DECL_P (base))
13033 	/* Use the size of the BASE object (possibly an array of some
13034 	   other type such as char used to store the struct).  */
13035 	memsize = DECL_SIZE_UNIT (base);
13036       else
13037 	return NULL_TREE;
13038     }
13039 
13040   /* If the flexible array member has a known size use the greater
13041      of it and the tail padding in the enclosing struct.
13042      Otherwise, when the size of the flexible array member is unknown
13043      and the referenced object is not a struct, use the size of its
13044      type when known.  This detects sizes of array buffers when cast
13045      to struct types with flexible array members.  */
13046   if (memsize)
13047     {
13048       if (!tree_fits_poly_int64_p (memsize))
13049 	return NULL_TREE;
13050       poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
13051       if (known_lt (baseoff, memsz64))
13052 	{
13053 	  memsz64 -= baseoff;
13054 	  return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
13055 	}
13056       return size_zero_node;
13057     }
13058 
13059   /* Return "don't know" for an external non-array object since its
13060      flexible array member can be initialized to have any number of
13061      elements.  Otherwise, return zero because the flexible array
13062      member has no elements.  */
13063   return (DECL_P (base)
13064 	  && DECL_EXTERNAL (base)
13065 	  && (!typematch
13066 	      || TREE_CODE (basetype) != ARRAY_TYPE)
13067 	  ? NULL_TREE : size_zero_node);
13068 }
13069 
13070 /* Return the machine mode of T.  For vectors, returns the mode of the
13071    inner type.  The main use case is to feed the result to HONOR_NANS,
13072    avoiding the BLKmode that a direct TYPE_MODE (T) might return.  */
13073 
13074 machine_mode
element_mode(const_tree t)13075 element_mode (const_tree t)
13076 {
13077   if (!TYPE_P (t))
13078     t = TREE_TYPE (t);
13079   if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13080     t = TREE_TYPE (t);
13081   return TYPE_MODE (t);
13082 }
13083 
13084 /* Vector types need to re-check the target flags each time we report
13085    the machine mode.  We need to do this because attribute target can
13086    change the result of vector_mode_supported_p and have_regs_of_mode
13087    on a per-function basis.  Thus the TYPE_MODE of a VECTOR_TYPE can
13088    change on a per-function basis.  */
13089 /* ??? Possibly a better solution is to run through all the types
13090    referenced by a function and re-compute the TYPE_MODE once, rather
13091    than make the TYPE_MODE macro call a function.  */
13092 
13093 machine_mode
vector_type_mode(const_tree t)13094 vector_type_mode (const_tree t)
13095 {
13096   machine_mode mode;
13097 
13098   gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13099 
13100   mode = t->type_common.mode;
13101   if (VECTOR_MODE_P (mode)
13102       && (!targetm.vector_mode_supported_p (mode)
13103 	  || !have_regs_of_mode[mode]))
13104     {
13105       scalar_int_mode innermode;
13106 
13107       /* For integers, try mapping it to a same-sized scalar mode.  */
13108       if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13109 	{
13110 	  poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13111 			     * GET_MODE_BITSIZE (innermode));
13112 	  scalar_int_mode mode;
13113 	  if (int_mode_for_size (size, 0).exists (&mode)
13114 	      && have_regs_of_mode[mode])
13115 	    return mode;
13116 	}
13117 
13118       return BLKmode;
13119     }
13120 
13121   return mode;
13122 }
13123 
13124 /* Return the size in bits of each element of vector type TYPE.  */
13125 
13126 unsigned int
vector_element_bits(const_tree type)13127 vector_element_bits (const_tree type)
13128 {
13129   gcc_checking_assert (VECTOR_TYPE_P (type));
13130   if (VECTOR_BOOLEAN_TYPE_P (type))
13131     return TYPE_PRECISION (TREE_TYPE (type));
13132   return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
13133 }
13134 
13135 /* Calculate the size in bits of each element of vector type TYPE
13136    and return the result as a tree of type bitsizetype.  */
13137 
13138 tree
vector_element_bits_tree(const_tree type)13139 vector_element_bits_tree (const_tree type)
13140 {
13141   gcc_checking_assert (VECTOR_TYPE_P (type));
13142   if (VECTOR_BOOLEAN_TYPE_P (type))
13143     return bitsize_int (vector_element_bits (type));
13144   return TYPE_SIZE (TREE_TYPE (type));
13145 }
13146 
13147 /* Verify that basic properties of T match TV and thus T can be a variant of
13148    TV.  TV should be the more specified variant (i.e. the main variant).  */
13149 
13150 static bool
verify_type_variant(const_tree t,tree tv)13151 verify_type_variant (const_tree t, tree tv)
13152 {
13153   /* Type variant can differ by:
13154 
13155      - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13156                    ENCODE_QUAL_ADDR_SPACE.
13157      - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13158        in this case some values may not be set in the variant types
13159        (see TYPE_COMPLETE_P checks).
13160      - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13161      - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13162      - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13163      - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13164      - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13165        this is necessary to make it possible to merge types form different TUs
13166      - arrays, pointers and references may have TREE_TYPE that is a variant
13167        of TREE_TYPE of their main variants.
13168      - aggregates may have new TYPE_FIELDS list that list variants of
13169        the main variant TYPE_FIELDS.
13170      - vector types may differ by TYPE_VECTOR_OPAQUE
13171    */
13172 
13173   /* Convenience macro for matching individual fields.  */
13174 #define verify_variant_match(flag)					    \
13175   do {									    \
13176     if (flag (tv) != flag (t))						    \
13177       {									    \
13178 	error ("type variant differs by %s", #flag);			    \
13179 	debug_tree (tv);						    \
13180 	return false;							    \
13181       }									    \
13182   } while (false)
13183 
13184   /* tree_base checks.  */
13185 
13186   verify_variant_match (TREE_CODE);
13187   /* FIXME: Ada builds non-artificial variants of artificial types.  */
13188 #if 0
13189   if (TYPE_ARTIFICIAL (tv))
13190     verify_variant_match (TYPE_ARTIFICIAL);
13191 #endif
13192   if (POINTER_TYPE_P (tv))
13193     verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13194   /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build.  */
13195   verify_variant_match (TYPE_UNSIGNED);
13196   verify_variant_match (TYPE_PACKED);
13197   if (TREE_CODE (t) == REFERENCE_TYPE)
13198     verify_variant_match (TYPE_REF_IS_RVALUE);
13199   if (AGGREGATE_TYPE_P (t))
13200     verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13201   else
13202     verify_variant_match (TYPE_SATURATING);
13203   /* FIXME: This check trigger during libstdc++ build.  */
13204 #if 0
13205   if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t))
13206     verify_variant_match (TYPE_FINAL_P);
13207 #endif
13208 
13209   /* tree_type_common checks.  */
13210 
13211   if (COMPLETE_TYPE_P (t))
13212     {
13213       verify_variant_match (TYPE_MODE);
13214       if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13215 	  && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13216 	verify_variant_match (TYPE_SIZE);
13217       if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13218 	  && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13219 	  && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13220 	{
13221 	  gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13222 					TYPE_SIZE_UNIT (tv), 0));
13223 	  error ("type variant has different %<TYPE_SIZE_UNIT%>");
13224 	  debug_tree (tv);
13225 	  error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13226 	  debug_tree (TYPE_SIZE_UNIT (tv));
13227 	  error ("type%'s %<TYPE_SIZE_UNIT%>");
13228 	  debug_tree (TYPE_SIZE_UNIT (t));
13229 	  return false;
13230 	}
13231       verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13232     }
13233   verify_variant_match (TYPE_PRECISION);
13234   if (RECORD_OR_UNION_TYPE_P (t))
13235     verify_variant_match (TYPE_TRANSPARENT_AGGR);
13236   else if (TREE_CODE (t) == ARRAY_TYPE)
13237     verify_variant_match (TYPE_NONALIASED_COMPONENT);
13238   /* During LTO we merge variant lists from diferent translation units
13239      that may differ BY TYPE_CONTEXT that in turn may point
13240      to TRANSLATION_UNIT_DECL.
13241      Ada also builds variants of types with different TYPE_CONTEXT.   */
13242 #if 0
13243   if (!in_lto_p || !TYPE_FILE_SCOPE_P (t))
13244     verify_variant_match (TYPE_CONTEXT);
13245 #endif
13246   if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13247     verify_variant_match (TYPE_STRING_FLAG);
13248   if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13249     verify_variant_match (TYPE_CXX_ODR_P);
13250   if (TYPE_ALIAS_SET_KNOWN_P (t))
13251     {
13252       error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13253       debug_tree (tv);
13254       return false;
13255     }
13256 
13257   /* tree_type_non_common checks.  */
13258 
13259   /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13260      and dangle the pointer from time to time.  */
13261   if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13262       && (in_lto_p || !TYPE_VFIELD (tv)
13263 	  || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13264     {
13265       error ("type variant has different %<TYPE_VFIELD%>");
13266       debug_tree (tv);
13267       return false;
13268     }
13269   if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13270        || TREE_CODE (t) == INTEGER_TYPE
13271        || TREE_CODE (t) == BOOLEAN_TYPE
13272        || TREE_CODE (t) == REAL_TYPE
13273        || TREE_CODE (t) == FIXED_POINT_TYPE)
13274     {
13275       verify_variant_match (TYPE_MAX_VALUE);
13276       verify_variant_match (TYPE_MIN_VALUE);
13277     }
13278   if (TREE_CODE (t) == METHOD_TYPE)
13279     verify_variant_match (TYPE_METHOD_BASETYPE);
13280   if (TREE_CODE (t) == OFFSET_TYPE)
13281     verify_variant_match (TYPE_OFFSET_BASETYPE);
13282   if (TREE_CODE (t) == ARRAY_TYPE)
13283     verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13284   /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13285      or even type's main variant.  This is needed to make bootstrap pass
13286      and the bug seems new in GCC 5.
13287      C++ FE should be updated to make this consistent and we should check
13288      that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13289      is a match with main variant.
13290 
13291      Also disable the check for Java for now because of parser hack that builds
13292      first an dummy BINFO and then sometimes replace it by real BINFO in some
13293      of the copies.  */
13294   if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13295       && TYPE_BINFO (t) != TYPE_BINFO (tv)
13296       /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13297 	 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13298 	 at LTO time only.  */
13299       && (in_lto_p && odr_type_p (t)))
13300     {
13301       error ("type variant has different %<TYPE_BINFO%>");
13302       debug_tree (tv);
13303       error ("type variant%'s %<TYPE_BINFO%>");
13304       debug_tree (TYPE_BINFO (tv));
13305       error ("type%'s %<TYPE_BINFO%>");
13306       debug_tree (TYPE_BINFO (t));
13307       return false;
13308     }
13309 
13310   /* Check various uses of TYPE_VALUES_RAW.  */
13311   if (TREE_CODE (t) == ENUMERAL_TYPE
13312       && TYPE_VALUES (t))
13313     verify_variant_match (TYPE_VALUES);
13314   else if (TREE_CODE (t) == ARRAY_TYPE)
13315     verify_variant_match (TYPE_DOMAIN);
13316   /* Permit incomplete variants of complete type.  While FEs may complete
13317      all variants, this does not happen for C++ templates in all cases.  */
13318   else if (RECORD_OR_UNION_TYPE_P (t)
13319 	   && COMPLETE_TYPE_P (t)
13320 	   && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13321     {
13322       tree f1, f2;
13323 
13324       /* Fortran builds qualified variants as new records with items of
13325 	 qualified type. Verify that they looks same.  */
13326       for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13327 	   f1 && f2;
13328 	   f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13329 	if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13330 	    || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13331 		 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13332 		/* FIXME: gfc_nonrestricted_type builds all types as variants
13333 		   with exception of pointer types.  It deeply copies the type
13334 		   which means that we may end up with a variant type
13335 		   referring non-variant pointer.  We may change it to
13336 		   produce types as variants, too, like
13337 		   objc_get_protocol_qualified_type does.  */
13338 		&& !POINTER_TYPE_P (TREE_TYPE (f1)))
13339 	    || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13340 	    || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13341 	  break;
13342       if (f1 || f2)
13343 	{
13344 	  error ("type variant has different %<TYPE_FIELDS%>");
13345 	  debug_tree (tv);
13346 	  error ("first mismatch is field");
13347 	  debug_tree (f1);
13348 	  error ("and field");
13349 	  debug_tree (f2);
13350           return false;
13351 	}
13352     }
13353   else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13354     verify_variant_match (TYPE_ARG_TYPES);
13355   /* For C++ the qualified variant of array type is really an array type
13356      of qualified TREE_TYPE.
13357      objc builds variants of pointer where pointer to type is a variant, too
13358      in objc_get_protocol_qualified_type.  */
13359   if (TREE_TYPE (t) != TREE_TYPE (tv)
13360       && ((TREE_CODE (t) != ARRAY_TYPE
13361 	   && !POINTER_TYPE_P (t))
13362 	  || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13363 	     != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13364     {
13365       error ("type variant has different %<TREE_TYPE%>");
13366       debug_tree (tv);
13367       error ("type variant%'s %<TREE_TYPE%>");
13368       debug_tree (TREE_TYPE (tv));
13369       error ("type%'s %<TREE_TYPE%>");
13370       debug_tree (TREE_TYPE (t));
13371       return false;
13372     }
13373   if (type_with_alias_set_p (t)
13374       && !gimple_canonical_types_compatible_p (t, tv, false))
13375     {
13376       error ("type is not compatible with its variant");
13377       debug_tree (tv);
13378       error ("type variant%'s %<TREE_TYPE%>");
13379       debug_tree (TREE_TYPE (tv));
13380       error ("type%'s %<TREE_TYPE%>");
13381       debug_tree (TREE_TYPE (t));
13382       return false;
13383     }
13384   return true;
13385 #undef verify_variant_match
13386 }
13387 
13388 
13389 /* The TYPE_CANONICAL merging machinery.  It should closely resemble
13390    the middle-end types_compatible_p function.  It needs to avoid
13391    claiming types are different for types that should be treated
13392    the same with respect to TBAA.  Canonical types are also used
13393    for IL consistency checks via the useless_type_conversion_p
13394    predicate which does not handle all type kinds itself but falls
13395    back to pointer-comparison of TYPE_CANONICAL for aggregates
13396    for example.  */
13397 
13398 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13399    type calculation because we need to allow inter-operability between signed
13400    and unsigned variants.  */
13401 
13402 bool
type_with_interoperable_signedness(const_tree type)13403 type_with_interoperable_signedness (const_tree type)
13404 {
13405   /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13406      signed char and unsigned char.  Similarly fortran FE builds
13407      C_SIZE_T as signed type, while C defines it unsigned.  */
13408 
13409   return tree_code_for_canonical_type_merging (TREE_CODE (type))
13410 	   == INTEGER_TYPE
13411          && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13412 	     || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13413 }
13414 
13415 /* Return true iff T1 and T2 are structurally identical for what
13416    TBAA is concerned.
13417    This function is used both by lto.cc canonical type merging and by the
13418    verifier.  If TRUST_TYPE_CANONICAL we do not look into structure of types
13419    that have TYPE_CANONICAL defined and assume them equivalent.  This is useful
13420    only for LTO because only in these cases TYPE_CANONICAL equivalence
13421    correspond to one defined by gimple_canonical_types_compatible_p.  */
13422 
13423 bool
gimple_canonical_types_compatible_p(const_tree t1,const_tree t2,bool trust_type_canonical)13424 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13425 				     bool trust_type_canonical)
13426 {
13427   /* Type variants should be same as the main variant.  When not doing sanity
13428      checking to verify this fact, go to main variants and save some work.  */
13429   if (trust_type_canonical)
13430     {
13431       t1 = TYPE_MAIN_VARIANT (t1);
13432       t2 = TYPE_MAIN_VARIANT (t2);
13433     }
13434 
13435   /* Check first for the obvious case of pointer identity.  */
13436   if (t1 == t2)
13437     return true;
13438 
13439   /* Check that we have two types to compare.  */
13440   if (t1 == NULL_TREE || t2 == NULL_TREE)
13441     return false;
13442 
13443   /* We consider complete types always compatible with incomplete type.
13444      This does not make sense for canonical type calculation and thus we
13445      need to ensure that we are never called on it.
13446 
13447      FIXME: For more correctness the function probably should have three modes
13448 	1) mode assuming that types are complete mathcing their structure
13449 	2) mode allowing incomplete types but producing equivalence classes
13450 	   and thus ignoring all info from complete types
13451 	3) mode allowing incomplete types to match complete but checking
13452 	   compatibility between complete types.
13453 
13454      1 and 2 can be used for canonical type calculation. 3 is the real
13455      definition of type compatibility that can be used i.e. for warnings during
13456      declaration merging.  */
13457 
13458   gcc_assert (!trust_type_canonical
13459 	      || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13460 
13461   /* If the types have been previously registered and found equal
13462      they still are.  */
13463 
13464   if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13465       && trust_type_canonical)
13466     {
13467       /* Do not use TYPE_CANONICAL of pointer types.  For LTO streamed types
13468 	 they are always NULL, but they are set to non-NULL for types
13469 	 constructed by build_pointer_type and variants.  In this case the
13470 	 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13471 	 all pointers are considered equal.  Be sure to not return false
13472 	 negatives.  */
13473       gcc_checking_assert (canonical_type_used_p (t1)
13474 			   && canonical_type_used_p (t2));
13475       return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13476     }
13477 
13478   /* For types where we do ODR based TBAA the canonical type is always
13479      set correctly, so we know that types are different if their
13480      canonical types does not match.  */
13481   if (trust_type_canonical
13482       && (odr_type_p (t1) && odr_based_tbaa_p (t1))
13483 	  != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
13484     return false;
13485 
13486   /* Can't be the same type if the types don't have the same code.  */
13487   enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13488   if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13489     return false;
13490 
13491   /* Qualifiers do not matter for canonical type comparison purposes.  */
13492 
13493   /* Void types and nullptr types are always the same.  */
13494   if (TREE_CODE (t1) == VOID_TYPE
13495       || TREE_CODE (t1) == NULLPTR_TYPE)
13496     return true;
13497 
13498   /* Can't be the same type if they have different mode.  */
13499   if (TYPE_MODE (t1) != TYPE_MODE (t2))
13500     return false;
13501 
13502   /* Non-aggregate types can be handled cheaply.  */
13503   if (INTEGRAL_TYPE_P (t1)
13504       || SCALAR_FLOAT_TYPE_P (t1)
13505       || FIXED_POINT_TYPE_P (t1)
13506       || TREE_CODE (t1) == VECTOR_TYPE
13507       || TREE_CODE (t1) == COMPLEX_TYPE
13508       || TREE_CODE (t1) == OFFSET_TYPE
13509       || POINTER_TYPE_P (t1))
13510     {
13511       /* Can't be the same type if they have different recision.  */
13512       if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13513 	return false;
13514 
13515       /* In some cases the signed and unsigned types are required to be
13516 	 inter-operable.  */
13517       if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13518 	  && !type_with_interoperable_signedness (t1))
13519 	return false;
13520 
13521       /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13522 	 interoperable with "signed char".  Unless all frontends are revisited
13523 	 to agree on these types, we must ignore the flag completely.  */
13524 
13525       /* Fortran standard define C_PTR type that is compatible with every
13526  	 C pointer.  For this reason we need to glob all pointers into one.
13527 	 Still pointers in different address spaces are not compatible.  */
13528       if (POINTER_TYPE_P (t1))
13529 	{
13530 	  if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13531 	      != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13532 	    return false;
13533 	}
13534 
13535       /* Tail-recurse to components.  */
13536       if (TREE_CODE (t1) == VECTOR_TYPE
13537 	  || TREE_CODE (t1) == COMPLEX_TYPE)
13538 	return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13539 						    TREE_TYPE (t2),
13540 						    trust_type_canonical);
13541 
13542       return true;
13543     }
13544 
13545   /* Do type-specific comparisons.  */
13546   switch (TREE_CODE (t1))
13547     {
13548     case ARRAY_TYPE:
13549       /* Array types are the same if the element types are the same and
13550 	 the number of elements are the same.  */
13551       if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13552 						trust_type_canonical)
13553 	  || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13554 	  || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13555 	  || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13556 	return false;
13557       else
13558 	{
13559 	  tree i1 = TYPE_DOMAIN (t1);
13560 	  tree i2 = TYPE_DOMAIN (t2);
13561 
13562 	  /* For an incomplete external array, the type domain can be
13563  	     NULL_TREE.  Check this condition also.  */
13564 	  if (i1 == NULL_TREE && i2 == NULL_TREE)
13565 	    return true;
13566 	  else if (i1 == NULL_TREE || i2 == NULL_TREE)
13567 	    return false;
13568 	  else
13569 	    {
13570 	      tree min1 = TYPE_MIN_VALUE (i1);
13571 	      tree min2 = TYPE_MIN_VALUE (i2);
13572 	      tree max1 = TYPE_MAX_VALUE (i1);
13573 	      tree max2 = TYPE_MAX_VALUE (i2);
13574 
13575 	      /* The minimum/maximum values have to be the same.  */
13576 	      if ((min1 == min2
13577 		   || (min1 && min2
13578 		       && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13579 			    && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13580 		           || operand_equal_p (min1, min2, 0))))
13581 		  && (max1 == max2
13582 		      || (max1 && max2
13583 			  && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13584 			       && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13585 			      || operand_equal_p (max1, max2, 0)))))
13586 		return true;
13587 	      else
13588 		return false;
13589 	    }
13590 	}
13591 
13592     case METHOD_TYPE:
13593     case FUNCTION_TYPE:
13594       /* Function types are the same if the return type and arguments types
13595 	 are the same.  */
13596       if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13597 						trust_type_canonical))
13598 	return false;
13599 
13600       if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13601 	return true;
13602       else
13603 	{
13604 	  tree parms1, parms2;
13605 
13606 	  for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13607 	       parms1 && parms2;
13608 	       parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13609 	    {
13610 	      if (!gimple_canonical_types_compatible_p
13611 		     (TREE_VALUE (parms1), TREE_VALUE (parms2),
13612 		      trust_type_canonical))
13613 		return false;
13614 	    }
13615 
13616 	  if (parms1 || parms2)
13617 	    return false;
13618 
13619 	  return true;
13620 	}
13621 
13622     case RECORD_TYPE:
13623     case UNION_TYPE:
13624     case QUAL_UNION_TYPE:
13625       {
13626 	tree f1, f2;
13627 
13628 	/* Don't try to compare variants of an incomplete type, before
13629 	   TYPE_FIELDS has been copied around.  */
13630 	if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13631 	  return true;
13632 
13633 
13634 	if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13635 	  return false;
13636 
13637 	/* For aggregate types, all the fields must be the same.  */
13638 	for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13639 	     f1 || f2;
13640 	     f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13641 	  {
13642 	    /* Skip non-fields and zero-sized fields.  */
13643 	    while (f1 && (TREE_CODE (f1) != FIELD_DECL
13644 			  || (DECL_SIZE (f1)
13645 			      && integer_zerop (DECL_SIZE (f1)))))
13646 	      f1 = TREE_CHAIN (f1);
13647 	    while (f2 && (TREE_CODE (f2) != FIELD_DECL
13648 			  || (DECL_SIZE (f2)
13649 			      && integer_zerop (DECL_SIZE (f2)))))
13650 	      f2 = TREE_CHAIN (f2);
13651 	    if (!f1 || !f2)
13652 	      break;
13653 	    /* The fields must have the same name, offset and type.  */
13654 	    if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13655 		|| !gimple_compare_field_offset (f1, f2)
13656 		|| !gimple_canonical_types_compatible_p
13657 		      (TREE_TYPE (f1), TREE_TYPE (f2),
13658 		       trust_type_canonical))
13659 	      return false;
13660 	  }
13661 
13662 	/* If one aggregate has more fields than the other, they
13663 	   are not the same.  */
13664 	if (f1 || f2)
13665 	  return false;
13666 
13667 	return true;
13668       }
13669 
13670     default:
13671       /* Consider all types with language specific trees in them mutually
13672 	 compatible.  This is executed only from verify_type and false
13673          positives can be tolerated.  */
13674       gcc_assert (!in_lto_p);
13675       return true;
13676     }
13677 }
13678 
13679 /* Verify type T.  */
13680 
13681 void
verify_type(const_tree t)13682 verify_type (const_tree t)
13683 {
13684   bool error_found = false;
13685   tree mv = TYPE_MAIN_VARIANT (t);
13686   if (!mv)
13687     {
13688       error ("main variant is not defined");
13689       error_found = true;
13690     }
13691   else if (mv != TYPE_MAIN_VARIANT (mv))
13692     {
13693       error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
13694       debug_tree (mv);
13695       error_found = true;
13696     }
13697   else if (t != mv && !verify_type_variant (t, mv))
13698     error_found = true;
13699 
13700   tree ct = TYPE_CANONICAL (t);
13701   if (!ct)
13702     ;
13703   else if (TYPE_CANONICAL (ct) != ct)
13704     {
13705       error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
13706       debug_tree (ct);
13707       error_found = true;
13708     }
13709   /* Method and function types cannot be used to address memory and thus
13710      TYPE_CANONICAL really matters only for determining useless conversions.
13711 
13712      FIXME: C++ FE produce declarations of builtin functions that are not
13713      compatible with main variants.  */
13714   else if (TREE_CODE (t) == FUNCTION_TYPE)
13715     ;
13716   else if (t != ct
13717 	   /* FIXME: gimple_canonical_types_compatible_p cannot compare types
13718 	      with variably sized arrays because their sizes possibly
13719 	      gimplified to different variables.  */
13720 	   && !variably_modified_type_p (ct, NULL)
13721 	   && !gimple_canonical_types_compatible_p (t, ct, false)
13722 	   && COMPLETE_TYPE_P (t))
13723     {
13724       error ("%<TYPE_CANONICAL%> is not compatible");
13725       debug_tree (ct);
13726       error_found = true;
13727     }
13728 
13729   if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13730       && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13731     {
13732       error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
13733       debug_tree (ct);
13734       error_found = true;
13735     }
13736   if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13737    {
13738       error ("%<TYPE_CANONICAL%> of main variant is not main variant");
13739       debug_tree (ct);
13740       debug_tree (TYPE_MAIN_VARIANT (ct));
13741       error_found = true;
13742    }
13743 
13744 
13745   /* Check various uses of TYPE_MIN_VALUE_RAW.  */
13746   if (RECORD_OR_UNION_TYPE_P (t))
13747     {
13748       /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13749 	 and danagle the pointer from time to time.  */
13750       if (TYPE_VFIELD (t)
13751 	  && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13752 	  && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13753 	{
13754 	  error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
13755 	  debug_tree (TYPE_VFIELD (t));
13756 	  error_found = true;
13757 	}
13758     }
13759   else if (TREE_CODE (t) == POINTER_TYPE)
13760     {
13761       if (TYPE_NEXT_PTR_TO (t)
13762 	  && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13763 	{
13764 	  error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
13765 	  debug_tree (TYPE_NEXT_PTR_TO (t));
13766 	  error_found = true;
13767 	}
13768     }
13769   else if (TREE_CODE (t) == REFERENCE_TYPE)
13770     {
13771       if (TYPE_NEXT_REF_TO (t)
13772 	  && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13773 	{
13774 	  error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
13775 	  debug_tree (TYPE_NEXT_REF_TO (t));
13776 	  error_found = true;
13777 	}
13778     }
13779   else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13780 	   || TREE_CODE (t) == FIXED_POINT_TYPE)
13781     {
13782       /* FIXME: The following check should pass:
13783 	  useless_type_conversion_p (const_cast <tree> (t),
13784 				     TREE_TYPE (TYPE_MIN_VALUE (t))
13785 	 but does not for C sizetypes in LTO.  */
13786     }
13787 
13788   /* Check various uses of TYPE_MAXVAL_RAW.  */
13789   if (RECORD_OR_UNION_TYPE_P (t))
13790     {
13791       if (!TYPE_BINFO (t))
13792 	;
13793       else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13794 	{
13795 	  error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
13796 	  debug_tree (TYPE_BINFO (t));
13797 	  error_found = true;
13798 	}
13799       else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
13800 	{
13801 	  error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
13802 	  debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13803 	  error_found = true;
13804 	}
13805     }
13806   else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13807     {
13808       if (TYPE_METHOD_BASETYPE (t)
13809 	  && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13810 	  && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13811 	{
13812 	  error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
13813 	  debug_tree (TYPE_METHOD_BASETYPE (t));
13814 	  error_found = true;
13815 	}
13816     }
13817   else if (TREE_CODE (t) == OFFSET_TYPE)
13818     {
13819       if (TYPE_OFFSET_BASETYPE (t)
13820 	  && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13821 	  && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13822 	{
13823 	  error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
13824 	  debug_tree (TYPE_OFFSET_BASETYPE (t));
13825 	  error_found = true;
13826 	}
13827     }
13828   else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13829 	   || TREE_CODE (t) == FIXED_POINT_TYPE)
13830     {
13831       /* FIXME: The following check should pass:
13832 	  useless_type_conversion_p (const_cast <tree> (t),
13833 				     TREE_TYPE (TYPE_MAX_VALUE (t))
13834 	 but does not for C sizetypes in LTO.  */
13835     }
13836   else if (TREE_CODE (t) == ARRAY_TYPE)
13837     {
13838       if (TYPE_ARRAY_MAX_SIZE (t)
13839 	  && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13840         {
13841 	  error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
13842 	  debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13843 	  error_found = true;
13844         }
13845     }
13846   else if (TYPE_MAX_VALUE_RAW (t))
13847     {
13848       error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
13849       debug_tree (TYPE_MAX_VALUE_RAW (t));
13850       error_found = true;
13851     }
13852 
13853   if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13854     {
13855       error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
13856       debug_tree (TYPE_LANG_SLOT_1 (t));
13857       error_found = true;
13858     }
13859 
13860   /* Check various uses of TYPE_VALUES_RAW.  */
13861   if (TREE_CODE (t) == ENUMERAL_TYPE)
13862     for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13863       {
13864 	tree value = TREE_VALUE (l);
13865 	tree name = TREE_PURPOSE (l);
13866 
13867 	/* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13868  	   CONST_DECL of ENUMERAL TYPE.  */
13869 	if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13870 	  {
13871 	    error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
13872 	    debug_tree (value);
13873 	    debug_tree (name);
13874 	    error_found = true;
13875 	  }
13876 	if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13877 	    && TREE_CODE (TREE_TYPE (value)) != BOOLEAN_TYPE
13878 	    && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13879 	  {
13880 	    error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
13881 		   "to the enum");
13882 	    debug_tree (value);
13883 	    debug_tree (name);
13884 	    error_found = true;
13885 	  }
13886 	if (TREE_CODE (name) != IDENTIFIER_NODE)
13887 	  {
13888 	    error ("enum value name is not %<IDENTIFIER_NODE%>");
13889 	    debug_tree (value);
13890 	    debug_tree (name);
13891 	    error_found = true;
13892 	  }
13893       }
13894   else if (TREE_CODE (t) == ARRAY_TYPE)
13895     {
13896       if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13897 	{
13898 	  error ("array %<TYPE_DOMAIN%> is not integer type");
13899 	  debug_tree (TYPE_DOMAIN (t));
13900 	  error_found = true;
13901 	}
13902     }
13903   else if (RECORD_OR_UNION_TYPE_P (t))
13904     {
13905       if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13906 	{
13907 	  error ("%<TYPE_FIELDS%> defined in incomplete type");
13908 	  error_found = true;
13909 	}
13910       for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13911 	{
13912 	  /* TODO: verify properties of decls.  */
13913 	  if (TREE_CODE (fld) == FIELD_DECL)
13914 	    ;
13915 	  else if (TREE_CODE (fld) == TYPE_DECL)
13916 	    ;
13917 	  else if (TREE_CODE (fld) == CONST_DECL)
13918 	    ;
13919 	  else if (VAR_P (fld))
13920 	    ;
13921 	  else if (TREE_CODE (fld) == TEMPLATE_DECL)
13922 	    ;
13923 	  else if (TREE_CODE (fld) == USING_DECL)
13924 	    ;
13925 	  else if (TREE_CODE (fld) == FUNCTION_DECL)
13926 	    ;
13927 	  else
13928 	    {
13929 	      error ("wrong tree in %<TYPE_FIELDS%> list");
13930 	      debug_tree (fld);
13931 	      error_found = true;
13932 	    }
13933 	}
13934     }
13935   else if (TREE_CODE (t) == INTEGER_TYPE
13936 	   || TREE_CODE (t) == BOOLEAN_TYPE
13937 	   || TREE_CODE (t) == OFFSET_TYPE
13938 	   || TREE_CODE (t) == REFERENCE_TYPE
13939 	   || TREE_CODE (t) == NULLPTR_TYPE
13940 	   || TREE_CODE (t) == POINTER_TYPE)
13941     {
13942       if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13943 	{
13944 	  error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
13945 		 "is %p",
13946 		 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13947 	  error_found = true;
13948 	}
13949       else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13950 	{
13951 	  error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
13952 	  debug_tree (TYPE_CACHED_VALUES (t));
13953 	  error_found = true;
13954 	}
13955       /* Verify just enough of cache to ensure that no one copied it to new type.
13956  	 All copying should go by copy_node that should clear it.  */
13957       else if (TYPE_CACHED_VALUES_P (t))
13958 	{
13959 	  int i;
13960 	  for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13961 	    if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13962 		&& TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13963 	      {
13964 		error ("wrong %<TYPE_CACHED_VALUES%> entry");
13965 		debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13966 		error_found = true;
13967 		break;
13968 	      }
13969 	}
13970     }
13971   else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13972     for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13973       {
13974 	/* C++ FE uses TREE_PURPOSE to store initial values.  */
13975 	if (TREE_PURPOSE (l) && in_lto_p)
13976 	  {
13977 	    error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
13978 	    debug_tree (l);
13979 	    error_found = true;
13980 	  }
13981 	if (!TYPE_P (TREE_VALUE (l)))
13982 	  {
13983 	    error ("wrong entry in %<TYPE_ARG_TYPES%> list");
13984 	    debug_tree (l);
13985 	    error_found = true;
13986 	  }
13987       }
13988   else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13989     {
13990       error ("%<TYPE_VALUES_RAW%> field is non-NULL");
13991       debug_tree (TYPE_VALUES_RAW (t));
13992       error_found = true;
13993     }
13994   if (TREE_CODE (t) != INTEGER_TYPE
13995       && TREE_CODE (t) != BOOLEAN_TYPE
13996       && TREE_CODE (t) != OFFSET_TYPE
13997       && TREE_CODE (t) != REFERENCE_TYPE
13998       && TREE_CODE (t) != NULLPTR_TYPE
13999       && TREE_CODE (t) != POINTER_TYPE
14000       && TYPE_CACHED_VALUES_P (t))
14001     {
14002       error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14003       error_found = true;
14004     }
14005 
14006   /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14007      TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14008      of a type. */
14009   if (TREE_CODE (t) == METHOD_TYPE
14010       && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14011     {
14012 	error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14013 	error_found = true;
14014     }
14015 
14016   if (error_found)
14017     {
14018       debug_tree (const_cast <tree> (t));
14019       internal_error ("%qs failed", __func__);
14020     }
14021 }
14022 
14023 
14024 /* Return 1 if ARG interpreted as signed in its precision is known to be
14025    always positive or 2 if ARG is known to be always negative, or 3 if
14026    ARG may be positive or negative.  */
14027 
14028 int
get_range_pos_neg(tree arg)14029 get_range_pos_neg (tree arg)
14030 {
14031   if (arg == error_mark_node)
14032     return 3;
14033 
14034   int prec = TYPE_PRECISION (TREE_TYPE (arg));
14035   int cnt = 0;
14036   if (TREE_CODE (arg) == INTEGER_CST)
14037     {
14038       wide_int w = wi::sext (wi::to_wide (arg), prec);
14039       if (wi::neg_p (w))
14040 	return 2;
14041       else
14042 	return 1;
14043     }
14044   while (CONVERT_EXPR_P (arg)
14045 	 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14046 	 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14047     {
14048       arg = TREE_OPERAND (arg, 0);
14049       /* Narrower value zero extended into wider type
14050 	 will always result in positive values.  */
14051       if (TYPE_UNSIGNED (TREE_TYPE (arg))
14052 	  && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14053 	return 1;
14054       prec = TYPE_PRECISION (TREE_TYPE (arg));
14055       if (++cnt > 30)
14056 	return 3;
14057     }
14058 
14059   if (TREE_CODE (arg) != SSA_NAME)
14060     return 3;
14061   value_range r;
14062   while (!get_global_range_query ()->range_of_expr (r, arg) || r.kind () != VR_RANGE)
14063     {
14064       gimple *g = SSA_NAME_DEF_STMT (arg);
14065       if (is_gimple_assign (g)
14066 	  && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14067 	{
14068 	  tree t = gimple_assign_rhs1 (g);
14069 	  if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14070 	      && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14071 	    {
14072 	      if (TYPE_UNSIGNED (TREE_TYPE (t))
14073 		  && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14074 		return 1;
14075 	      prec = TYPE_PRECISION (TREE_TYPE (t));
14076 	      arg = t;
14077 	      if (++cnt > 30)
14078 		return 3;
14079 	      continue;
14080 	    }
14081 	}
14082       return 3;
14083     }
14084   if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14085     {
14086       /* For unsigned values, the "positive" range comes
14087 	 below the "negative" range.  */
14088       if (!wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14089 	return 1;
14090       if (wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14091 	return 2;
14092     }
14093   else
14094     {
14095       if (!wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14096 	return 1;
14097       if (wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14098 	return 2;
14099     }
14100   return 3;
14101 }
14102 
14103 
14104 
14105 
14106 /* Return true if ARG is marked with the nonnull attribute in the
14107    current function signature.  */
14108 
14109 bool
nonnull_arg_p(const_tree arg)14110 nonnull_arg_p (const_tree arg)
14111 {
14112   tree t, attrs, fntype;
14113   unsigned HOST_WIDE_INT arg_num;
14114 
14115   gcc_assert (TREE_CODE (arg) == PARM_DECL
14116 	      && (POINTER_TYPE_P (TREE_TYPE (arg))
14117 		  || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14118 
14119   /* The static chain decl is always non null.  */
14120   if (arg == cfun->static_chain_decl)
14121     return true;
14122 
14123   /* THIS argument of method is always non-NULL.  */
14124   if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14125       && arg == DECL_ARGUMENTS (cfun->decl)
14126       && flag_delete_null_pointer_checks)
14127     return true;
14128 
14129   /* Values passed by reference are always non-NULL.  */
14130   if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14131       && flag_delete_null_pointer_checks)
14132     return true;
14133 
14134   fntype = TREE_TYPE (cfun->decl);
14135   for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14136     {
14137       attrs = lookup_attribute ("nonnull", attrs);
14138 
14139       /* If "nonnull" wasn't specified, we know nothing about the argument.  */
14140       if (attrs == NULL_TREE)
14141 	return false;
14142 
14143       /* If "nonnull" applies to all the arguments, then ARG is non-null.  */
14144       if (TREE_VALUE (attrs) == NULL_TREE)
14145 	return true;
14146 
14147       /* Get the position number for ARG in the function signature.  */
14148       for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14149 	   t;
14150 	   t = DECL_CHAIN (t), arg_num++)
14151 	{
14152 	  if (t == arg)
14153 	    break;
14154 	}
14155 
14156       gcc_assert (t == arg);
14157 
14158       /* Now see if ARG_NUM is mentioned in the nonnull list.  */
14159       for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14160 	{
14161 	  if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14162 	    return true;
14163 	}
14164     }
14165 
14166   return false;
14167 }
14168 
14169 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14170    information.  */
14171 
14172 location_t
set_block(location_t loc,tree block)14173 set_block (location_t loc, tree block)
14174 {
14175   location_t pure_loc = get_pure_location (loc);
14176   source_range src_range = get_range_from_loc (line_table, loc);
14177   return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14178 }
14179 
14180 location_t
set_source_range(tree expr,location_t start,location_t finish)14181 set_source_range (tree expr, location_t start, location_t finish)
14182 {
14183   source_range src_range;
14184   src_range.m_start = start;
14185   src_range.m_finish = finish;
14186   return set_source_range (expr, src_range);
14187 }
14188 
14189 location_t
set_source_range(tree expr,source_range src_range)14190 set_source_range (tree expr, source_range src_range)
14191 {
14192   if (!EXPR_P (expr))
14193     return UNKNOWN_LOCATION;
14194 
14195   location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14196   location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14197 					    pure_loc,
14198 					    src_range,
14199 					    NULL);
14200   SET_EXPR_LOCATION (expr, adhoc);
14201   return adhoc;
14202 }
14203 
14204 /* Return EXPR, potentially wrapped with a node expression LOC,
14205    if !CAN_HAVE_LOCATION_P (expr).
14206 
14207    NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14208    VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14209 
14210    Wrapper nodes can be identified using location_wrapper_p.  */
14211 
14212 tree
maybe_wrap_with_location(tree expr,location_t loc)14213 maybe_wrap_with_location (tree expr, location_t loc)
14214 {
14215   if (expr == NULL)
14216     return NULL;
14217   if (loc == UNKNOWN_LOCATION)
14218     return expr;
14219   if (CAN_HAVE_LOCATION_P (expr))
14220     return expr;
14221   /* We should only be adding wrappers for constants and for decls,
14222      or for some exceptional tree nodes (e.g. BASELINK in the C++ FE).  */
14223   gcc_assert (CONSTANT_CLASS_P (expr)
14224 	      || DECL_P (expr)
14225 	      || EXCEPTIONAL_CLASS_P (expr));
14226 
14227   /* For now, don't add wrappers to exceptional tree nodes, to minimize
14228      any impact of the wrapper nodes.  */
14229   if (EXCEPTIONAL_CLASS_P (expr))
14230     return expr;
14231 
14232   /* Compiler-generated temporary variables don't need a wrapper.  */
14233   if (DECL_P (expr) && DECL_ARTIFICIAL (expr) && DECL_IGNORED_P (expr))
14234     return expr;
14235 
14236   /* If any auto_suppress_location_wrappers are active, don't create
14237      wrappers.  */
14238   if (suppress_location_wrappers > 0)
14239     return expr;
14240 
14241   tree_code code
14242     = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14243 	|| (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14244        ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14245   tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14246   /* Mark this node as being a wrapper.  */
14247   EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14248   return wrapper;
14249 }
14250 
14251 int suppress_location_wrappers;
14252 
14253 /* Return the name of combined function FN, for debugging purposes.  */
14254 
14255 const char *
combined_fn_name(combined_fn fn)14256 combined_fn_name (combined_fn fn)
14257 {
14258   if (builtin_fn_p (fn))
14259     {
14260       tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14261       return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14262     }
14263   else
14264     return internal_fn_name (as_internal_fn (fn));
14265 }
14266 
14267 /* Return a bitmap with a bit set corresponding to each argument in
14268    a function call type FNTYPE declared with attribute nonnull,
14269    or null if none of the function's argument are nonnull.  The caller
14270    must free the bitmap.  */
14271 
14272 bitmap
get_nonnull_args(const_tree fntype)14273 get_nonnull_args (const_tree fntype)
14274 {
14275   if (fntype == NULL_TREE)
14276     return NULL;
14277 
14278   bitmap argmap = NULL;
14279   if (TREE_CODE (fntype) == METHOD_TYPE)
14280     {
14281       /* The this pointer in C++ non-static member functions is
14282 	 implicitly nonnull whether or not it's declared as such.  */
14283       argmap = BITMAP_ALLOC (NULL);
14284       bitmap_set_bit (argmap, 0);
14285     }
14286 
14287   tree attrs = TYPE_ATTRIBUTES (fntype);
14288   if (!attrs)
14289     return argmap;
14290 
14291   /* A function declaration can specify multiple attribute nonnull,
14292      each with zero or more arguments.  The loop below creates a bitmap
14293      representing a union of all the arguments.  An empty (but non-null)
14294      bitmap means that all arguments have been declaraed nonnull.  */
14295   for ( ; attrs; attrs = TREE_CHAIN (attrs))
14296     {
14297       attrs = lookup_attribute ("nonnull", attrs);
14298       if (!attrs)
14299 	break;
14300 
14301       if (!argmap)
14302 	argmap = BITMAP_ALLOC (NULL);
14303 
14304       if (!TREE_VALUE (attrs))
14305 	{
14306 	  /* Clear the bitmap in case a previous attribute nonnull
14307 	     set it and this one overrides it for all arguments.  */
14308 	  bitmap_clear (argmap);
14309 	  return argmap;
14310 	}
14311 
14312       /* Iterate over the indices of the format arguments declared nonnull
14313 	 and set a bit for each.  */
14314       for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14315 	{
14316 	  unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14317 	  bitmap_set_bit (argmap, val);
14318 	}
14319     }
14320 
14321   return argmap;
14322 }
14323 
14324 /* Returns true if TYPE is a type where it and all of its subobjects
14325    (recursively) are of structure, union, or array type.  */
14326 
14327 bool
is_empty_type(const_tree type)14328 is_empty_type (const_tree type)
14329 {
14330   if (RECORD_OR_UNION_TYPE_P (type))
14331     {
14332       for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14333 	if (TREE_CODE (field) == FIELD_DECL
14334 	    && !DECL_PADDING_P (field)
14335 	    && !is_empty_type (TREE_TYPE (field)))
14336 	  return false;
14337       return true;
14338     }
14339   else if (TREE_CODE (type) == ARRAY_TYPE)
14340     return (integer_minus_onep (array_type_nelts (type))
14341 	    || TYPE_DOMAIN (type) == NULL_TREE
14342 	    || is_empty_type (TREE_TYPE (type)));
14343   return false;
14344 }
14345 
14346 /* Implement TARGET_EMPTY_RECORD_P.  Return true if TYPE is an empty type
14347    that shouldn't be passed via stack.  */
14348 
14349 bool
default_is_empty_record(const_tree type)14350 default_is_empty_record (const_tree type)
14351 {
14352   if (!abi_version_at_least (12))
14353     return false;
14354 
14355   if (type == error_mark_node)
14356     return false;
14357 
14358   if (TREE_ADDRESSABLE (type))
14359     return false;
14360 
14361   return is_empty_type (TYPE_MAIN_VARIANT (type));
14362 }
14363 
14364 /* Determine whether TYPE is a structure with a flexible array member,
14365    or a union containing such a structure (possibly recursively).  */
14366 
14367 bool
flexible_array_type_p(const_tree type)14368 flexible_array_type_p (const_tree type)
14369 {
14370   tree x, last;
14371   switch (TREE_CODE (type))
14372     {
14373     case RECORD_TYPE:
14374       last = NULL_TREE;
14375       for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14376 	if (TREE_CODE (x) == FIELD_DECL)
14377 	  last = x;
14378       if (last == NULL_TREE)
14379 	return false;
14380       if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
14381 	  && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
14382 	  && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
14383 	  && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
14384 	return true;
14385       return false;
14386     case UNION_TYPE:
14387       for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14388 	{
14389 	  if (TREE_CODE (x) == FIELD_DECL
14390 	      && flexible_array_type_p (TREE_TYPE (x)))
14391 	    return true;
14392 	}
14393       return false;
14394     default:
14395       return false;
14396   }
14397 }
14398 
14399 /* Like int_size_in_bytes, but handle empty records specially.  */
14400 
14401 HOST_WIDE_INT
arg_int_size_in_bytes(const_tree type)14402 arg_int_size_in_bytes (const_tree type)
14403 {
14404   return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14405 }
14406 
14407 /* Like size_in_bytes, but handle empty records specially.  */
14408 
14409 tree
arg_size_in_bytes(const_tree type)14410 arg_size_in_bytes (const_tree type)
14411 {
14412   return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14413 }
14414 
14415 /* Return true if an expression with CODE has to have the same result type as
14416    its first operand.  */
14417 
14418 bool
expr_type_first_operand_type_p(tree_code code)14419 expr_type_first_operand_type_p (tree_code code)
14420 {
14421   switch (code)
14422     {
14423     case NEGATE_EXPR:
14424     case ABS_EXPR:
14425     case BIT_NOT_EXPR:
14426     case PAREN_EXPR:
14427     case CONJ_EXPR:
14428 
14429     case PLUS_EXPR:
14430     case MINUS_EXPR:
14431     case MULT_EXPR:
14432     case TRUNC_DIV_EXPR:
14433     case CEIL_DIV_EXPR:
14434     case FLOOR_DIV_EXPR:
14435     case ROUND_DIV_EXPR:
14436     case TRUNC_MOD_EXPR:
14437     case CEIL_MOD_EXPR:
14438     case FLOOR_MOD_EXPR:
14439     case ROUND_MOD_EXPR:
14440     case RDIV_EXPR:
14441     case EXACT_DIV_EXPR:
14442     case MIN_EXPR:
14443     case MAX_EXPR:
14444     case BIT_IOR_EXPR:
14445     case BIT_XOR_EXPR:
14446     case BIT_AND_EXPR:
14447 
14448     case LSHIFT_EXPR:
14449     case RSHIFT_EXPR:
14450     case LROTATE_EXPR:
14451     case RROTATE_EXPR:
14452       return true;
14453 
14454     default:
14455       return false;
14456     }
14457 }
14458 
14459 /* Return a typenode for the "standard" C type with a given name.  */
14460 tree
get_typenode_from_name(const char * name)14461 get_typenode_from_name (const char *name)
14462 {
14463   if (name == NULL || *name == '\0')
14464     return NULL_TREE;
14465 
14466   if (strcmp (name, "char") == 0)
14467     return char_type_node;
14468   if (strcmp (name, "unsigned char") == 0)
14469     return unsigned_char_type_node;
14470   if (strcmp (name, "signed char") == 0)
14471     return signed_char_type_node;
14472 
14473   if (strcmp (name, "short int") == 0)
14474     return short_integer_type_node;
14475   if (strcmp (name, "short unsigned int") == 0)
14476     return short_unsigned_type_node;
14477 
14478   if (strcmp (name, "int") == 0)
14479     return integer_type_node;
14480   if (strcmp (name, "unsigned int") == 0)
14481     return unsigned_type_node;
14482 
14483   if (strcmp (name, "long int") == 0)
14484     return long_integer_type_node;
14485   if (strcmp (name, "long unsigned int") == 0)
14486     return long_unsigned_type_node;
14487 
14488   if (strcmp (name, "long long int") == 0)
14489     return long_long_integer_type_node;
14490   if (strcmp (name, "long long unsigned int") == 0)
14491     return long_long_unsigned_type_node;
14492 
14493   gcc_unreachable ();
14494 }
14495 
14496 /* List of pointer types used to declare builtins before we have seen their
14497    real declaration.
14498 
14499    Keep the size up to date in tree.h !  */
14500 const builtin_structptr_type builtin_structptr_types[6] =
14501 {
14502   { fileptr_type_node, ptr_type_node, "FILE" },
14503   { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14504   { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14505   { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14506   { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14507   { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14508 };
14509 
14510 /* Return the maximum object size.  */
14511 
14512 tree
max_object_size(void)14513 max_object_size (void)
14514 {
14515   /* To do: Make this a configurable parameter.  */
14516   return TYPE_MAX_VALUE (ptrdiff_type_node);
14517 }
14518 
14519 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14520    parameter default to false and that weeds out error_mark_node.  */
14521 
14522 bool
verify_type_context(location_t loc,type_context_kind context,const_tree type,bool silent_p)14523 verify_type_context (location_t loc, type_context_kind context,
14524 		     const_tree type, bool silent_p)
14525 {
14526   if (type == error_mark_node)
14527     return true;
14528 
14529   gcc_assert (TYPE_P (type));
14530   return (!targetm.verify_type_context
14531 	  || targetm.verify_type_context (loc, context, type, silent_p));
14532 }
14533 
14534 /* Return true if NEW_ASM and DELETE_ASM name a valid pair of new and
14535    delete operators.  Return false if they may or may not name such
14536    a pair and, when nonnull, set *PCERTAIN to true if they certainly
14537    do not.  */
14538 
14539 bool
valid_new_delete_pair_p(tree new_asm,tree delete_asm,bool * pcertain)14540 valid_new_delete_pair_p (tree new_asm, tree delete_asm,
14541 			 bool *pcertain /* = NULL */)
14542 {
14543   bool certain;
14544   if (!pcertain)
14545     pcertain = &certain;
14546 
14547   const char *new_name = IDENTIFIER_POINTER (new_asm);
14548   const char *delete_name = IDENTIFIER_POINTER (delete_asm);
14549   unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
14550   unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
14551 
14552   /* The following failures are due to invalid names so they're not
14553      considered certain mismatches.  */
14554   *pcertain = false;
14555 
14556   if (new_len < 5 || delete_len < 6)
14557     return false;
14558   if (new_name[0] == '_')
14559     ++new_name, --new_len;
14560   if (new_name[0] == '_')
14561     ++new_name, --new_len;
14562   if (delete_name[0] == '_')
14563     ++delete_name, --delete_len;
14564   if (delete_name[0] == '_')
14565     ++delete_name, --delete_len;
14566   if (new_len < 4 || delete_len < 5)
14567     return false;
14568 
14569   /* The following failures are due to names of user-defined operators
14570      so they're also not considered certain mismatches.  */
14571 
14572   /* *_len is now just the length after initial underscores.  */
14573   if (new_name[0] != 'Z' || new_name[1] != 'n')
14574     return false;
14575   if (delete_name[0] != 'Z' || delete_name[1] != 'd')
14576     return false;
14577 
14578   /* The following failures are certain mismatches.  */
14579   *pcertain = true;
14580 
14581   /* _Znw must match _Zdl, _Zna must match _Zda.  */
14582   if ((new_name[2] != 'w' || delete_name[2] != 'l')
14583       && (new_name[2] != 'a' || delete_name[2] != 'a'))
14584     return false;
14585   /* 'j', 'm' and 'y' correspond to size_t.  */
14586   if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
14587     return false;
14588   if (delete_name[3] != 'P' || delete_name[4] != 'v')
14589     return false;
14590   if (new_len == 4
14591       || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
14592     {
14593       /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14594 	 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t.  */
14595       if (delete_len == 5)
14596 	return true;
14597       if (delete_len == 6 && delete_name[5] == new_name[3])
14598 	return true;
14599       if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
14600 	return true;
14601     }
14602   else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
14603 	   || (new_len == 33
14604 	       && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
14605     {
14606       /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14607 	 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or  or
14608 	 _ZdXPvSt11align_val_tRKSt9nothrow_t.  */
14609       if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
14610 	return true;
14611       if (delete_len == 21
14612 	  && delete_name[5] == new_name[3]
14613 	  && !memcmp (delete_name + 6, "St11align_val_t", 15))
14614 	return true;
14615       if (delete_len == 34
14616 	  && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
14617 	return true;
14618     }
14619 
14620   /* The negative result is conservative.  */
14621   *pcertain = false;
14622   return false;
14623 }
14624 
14625 /* Return the zero-based number corresponding to the argument being
14626    deallocated if FNDECL is a deallocation function or an out-of-bounds
14627    value if it isn't.  */
14628 
14629 unsigned
fndecl_dealloc_argno(tree fndecl)14630 fndecl_dealloc_argno (tree fndecl)
14631 {
14632   /* A call to operator delete isn't recognized as one to a built-in.  */
14633   if (DECL_IS_OPERATOR_DELETE_P (fndecl))
14634     {
14635       if (DECL_IS_REPLACEABLE_OPERATOR (fndecl))
14636 	return 0;
14637 
14638       /* Avoid placement delete that's not been inlined.  */
14639       tree fname = DECL_ASSEMBLER_NAME (fndecl);
14640       if (id_equal (fname, "_ZdlPvS_")       // ordinary form
14641 	  || id_equal (fname, "_ZdaPvS_"))   // array form
14642 	return UINT_MAX;
14643       return 0;
14644     }
14645 
14646   /* TODO: Handle user-defined functions with attribute malloc?  Handle
14647      known non-built-ins like fopen?  */
14648   if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14649     {
14650       switch (DECL_FUNCTION_CODE (fndecl))
14651 	{
14652 	case BUILT_IN_FREE:
14653 	case BUILT_IN_REALLOC:
14654 	  return 0;
14655 	default:
14656 	  break;
14657 	}
14658       return UINT_MAX;
14659     }
14660 
14661   tree attrs = DECL_ATTRIBUTES (fndecl);
14662   if (!attrs)
14663     return UINT_MAX;
14664 
14665   for (tree atfree = attrs;
14666        (atfree = lookup_attribute ("*dealloc", atfree));
14667        atfree = TREE_CHAIN (atfree))
14668     {
14669       tree alloc = TREE_VALUE (atfree);
14670       if (!alloc)
14671 	continue;
14672 
14673       tree pos = TREE_CHAIN (alloc);
14674       if (!pos)
14675 	return 0;
14676 
14677       pos = TREE_VALUE (pos);
14678       return TREE_INT_CST_LOW (pos) - 1;
14679     }
14680 
14681   return UINT_MAX;
14682 }
14683 
14684 /* If EXPR refers to a character array or pointer declared attribute
14685    nonstring, return a decl for that array or pointer and set *REF
14686    to the referenced enclosing object or pointer.  Otherwise return
14687    null.  */
14688 
14689 tree
get_attr_nonstring_decl(tree expr,tree * ref)14690 get_attr_nonstring_decl (tree expr, tree *ref)
14691 {
14692   tree decl = expr;
14693   tree var = NULL_TREE;
14694   if (TREE_CODE (decl) == SSA_NAME)
14695     {
14696       gimple *def = SSA_NAME_DEF_STMT (decl);
14697 
14698       if (is_gimple_assign (def))
14699 	{
14700 	  tree_code code = gimple_assign_rhs_code (def);
14701 	  if (code == ADDR_EXPR
14702 	      || code == COMPONENT_REF
14703 	      || code == VAR_DECL)
14704 	    decl = gimple_assign_rhs1 (def);
14705 	}
14706       else
14707 	var = SSA_NAME_VAR (decl);
14708     }
14709 
14710   if (TREE_CODE (decl) == ADDR_EXPR)
14711     decl = TREE_OPERAND (decl, 0);
14712 
14713   /* To simplify calling code, store the referenced DECL regardless of
14714      the attribute determined below, but avoid storing the SSA_NAME_VAR
14715      obtained above (it's not useful for dataflow purposes).  */
14716   if (ref)
14717     *ref = decl;
14718 
14719   /* Use the SSA_NAME_VAR that was determined above to see if it's
14720      declared nonstring.  Otherwise drill down into the referenced
14721      DECL.  */
14722   if (var)
14723     decl = var;
14724   else if (TREE_CODE (decl) == ARRAY_REF)
14725     decl = TREE_OPERAND (decl, 0);
14726   else if (TREE_CODE (decl) == COMPONENT_REF)
14727     decl = TREE_OPERAND (decl, 1);
14728   else if (TREE_CODE (decl) == MEM_REF)
14729     return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
14730 
14731   if (DECL_P (decl)
14732       && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
14733     return decl;
14734 
14735   return NULL_TREE;
14736 }
14737 
14738 /* Return length of attribute names string,
14739    if arglist chain > 1, -1 otherwise.  */
14740 
14741 int
get_target_clone_attr_len(tree arglist)14742 get_target_clone_attr_len (tree arglist)
14743 {
14744   tree arg;
14745   int str_len_sum = 0;
14746   int argnum = 0;
14747 
14748   for (arg = arglist; arg; arg = TREE_CHAIN (arg))
14749     {
14750       const char *str = TREE_STRING_POINTER (TREE_VALUE (arg));
14751       size_t len = strlen (str);
14752       str_len_sum += len + 1;
14753       for (const char *p = strchr (str, ','); p; p = strchr (p + 1, ','))
14754 	argnum++;
14755       argnum++;
14756     }
14757   if (argnum <= 1)
14758     return -1;
14759   return str_len_sum;
14760 }
14761 
14762 void
tree_cc_finalize(void)14763 tree_cc_finalize (void)
14764 {
14765   clear_nonstandard_integer_type_cache ();
14766 }
14767 
14768 #if CHECKING_P
14769 
14770 namespace selftest {
14771 
14772 /* Selftests for tree.  */
14773 
14774 /* Verify that integer constants are sane.  */
14775 
14776 static void
test_integer_constants()14777 test_integer_constants ()
14778 {
14779   ASSERT_TRUE (integer_type_node != NULL);
14780   ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
14781 
14782   tree type = integer_type_node;
14783 
14784   tree zero = build_zero_cst (type);
14785   ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
14786   ASSERT_EQ (type, TREE_TYPE (zero));
14787 
14788   tree one = build_int_cst (type, 1);
14789   ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
14790   ASSERT_EQ (type, TREE_TYPE (zero));
14791 }
14792 
14793 /* Verify identifiers.  */
14794 
14795 static void
test_identifiers()14796 test_identifiers ()
14797 {
14798   tree identifier = get_identifier ("foo");
14799   ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
14800   ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
14801 }
14802 
14803 /* Verify LABEL_DECL.  */
14804 
14805 static void
test_labels()14806 test_labels ()
14807 {
14808   tree identifier = get_identifier ("err");
14809   tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
14810 				identifier, void_type_node);
14811   ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
14812   ASSERT_FALSE (FORCED_LABEL (label_decl));
14813 }
14814 
14815 /* Return a new VECTOR_CST node whose type is TYPE and whose values
14816    are given by VALS.  */
14817 
14818 static tree
build_vector(tree type,const vec<tree> & vals MEM_STAT_DECL)14819 build_vector (tree type, const vec<tree> &vals MEM_STAT_DECL)
14820 {
14821   gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
14822   tree_vector_builder builder (type, vals.length (), 1);
14823   builder.splice (vals);
14824   return builder.build ();
14825 }
14826 
14827 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED.  */
14828 
14829 static void
check_vector_cst(const vec<tree> & expected,tree actual)14830 check_vector_cst (const vec<tree> &expected, tree actual)
14831 {
14832   ASSERT_KNOWN_EQ (expected.length (),
14833 		   TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
14834   for (unsigned int i = 0; i < expected.length (); ++i)
14835     ASSERT_EQ (wi::to_wide (expected[i]),
14836 	       wi::to_wide (vector_cst_elt (actual, i)));
14837 }
14838 
14839 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
14840    and that its elements match EXPECTED.  */
14841 
14842 static void
check_vector_cst_duplicate(const vec<tree> & expected,tree actual,unsigned int npatterns)14843 check_vector_cst_duplicate (const vec<tree> &expected, tree actual,
14844 			    unsigned int npatterns)
14845 {
14846   ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14847   ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
14848   ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
14849   ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
14850   ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14851   check_vector_cst (expected, actual);
14852 }
14853 
14854 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
14855    and NPATTERNS background elements, and that its elements match
14856    EXPECTED.  */
14857 
14858 static void
check_vector_cst_fill(const vec<tree> & expected,tree actual,unsigned int npatterns)14859 check_vector_cst_fill (const vec<tree> &expected, tree actual,
14860 		       unsigned int npatterns)
14861 {
14862   ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14863   ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
14864   ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
14865   ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14866   ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14867   check_vector_cst (expected, actual);
14868 }
14869 
14870 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
14871    and that its elements match EXPECTED.  */
14872 
14873 static void
check_vector_cst_stepped(const vec<tree> & expected,tree actual,unsigned int npatterns)14874 check_vector_cst_stepped (const vec<tree> &expected, tree actual,
14875 			  unsigned int npatterns)
14876 {
14877   ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14878   ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
14879   ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
14880   ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14881   ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
14882   check_vector_cst (expected, actual);
14883 }
14884 
14885 /* Test the creation of VECTOR_CSTs.  */
14886 
14887 static void
test_vector_cst_patterns(ALONE_CXX_MEM_STAT_INFO)14888 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
14889 {
14890   auto_vec<tree, 8> elements (8);
14891   elements.quick_grow (8);
14892   tree element_type = build_nonstandard_integer_type (16, true);
14893   tree vector_type = build_vector_type (element_type, 8);
14894 
14895   /* Test a simple linear series with a base of 0 and a step of 1:
14896      { 0, 1, 2, 3, 4, 5, 6, 7 }.  */
14897   for (unsigned int i = 0; i < 8; ++i)
14898     elements[i] = build_int_cst (element_type, i);
14899   tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
14900   check_vector_cst_stepped (elements, vector, 1);
14901 
14902   /* Try the same with the first element replaced by 100:
14903      { 100, 1, 2, 3, 4, 5, 6, 7 }.  */
14904   elements[0] = build_int_cst (element_type, 100);
14905   vector = build_vector (vector_type, elements PASS_MEM_STAT);
14906   check_vector_cst_stepped (elements, vector, 1);
14907 
14908   /* Try a series that wraps around.
14909      { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }.  */
14910   for (unsigned int i = 1; i < 8; ++i)
14911     elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
14912   vector = build_vector (vector_type, elements PASS_MEM_STAT);
14913   check_vector_cst_stepped (elements, vector, 1);
14914 
14915   /* Try a downward series:
14916      { 100, 79, 78, 77, 76, 75, 75, 73 }.  */
14917   for (unsigned int i = 1; i < 8; ++i)
14918     elements[i] = build_int_cst (element_type, 80 - i);
14919   vector = build_vector (vector_type, elements PASS_MEM_STAT);
14920   check_vector_cst_stepped (elements, vector, 1);
14921 
14922   /* Try two interleaved series with different bases and steps:
14923      { 100, 53, 66, 206, 62, 212, 58, 218 }.  */
14924   elements[1] = build_int_cst (element_type, 53);
14925   for (unsigned int i = 2; i < 8; i += 2)
14926     {
14927       elements[i] = build_int_cst (element_type, 70 - i * 2);
14928       elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
14929     }
14930   vector = build_vector (vector_type, elements PASS_MEM_STAT);
14931   check_vector_cst_stepped (elements, vector, 2);
14932 
14933   /* Try a duplicated value:
14934      { 100, 100, 100, 100, 100, 100, 100, 100 }.  */
14935   for (unsigned int i = 1; i < 8; ++i)
14936     elements[i] = elements[0];
14937   vector = build_vector (vector_type, elements PASS_MEM_STAT);
14938   check_vector_cst_duplicate (elements, vector, 1);
14939 
14940   /* Try an interleaved duplicated value:
14941      { 100, 55, 100, 55, 100, 55, 100, 55 }.  */
14942   elements[1] = build_int_cst (element_type, 55);
14943   for (unsigned int i = 2; i < 8; ++i)
14944     elements[i] = elements[i - 2];
14945   vector = build_vector (vector_type, elements PASS_MEM_STAT);
14946   check_vector_cst_duplicate (elements, vector, 2);
14947 
14948   /* Try a duplicated value with 2 exceptions
14949      { 41, 97, 100, 55, 100, 55, 100, 55 }.  */
14950   elements[0] = build_int_cst (element_type, 41);
14951   elements[1] = build_int_cst (element_type, 97);
14952   vector = build_vector (vector_type, elements PASS_MEM_STAT);
14953   check_vector_cst_fill (elements, vector, 2);
14954 
14955   /* Try with and without a step
14956      { 41, 97, 100, 21, 100, 35, 100, 49 }.  */
14957   for (unsigned int i = 3; i < 8; i += 2)
14958     elements[i] = build_int_cst (element_type, i * 7);
14959   vector = build_vector (vector_type, elements PASS_MEM_STAT);
14960   check_vector_cst_stepped (elements, vector, 2);
14961 
14962   /* Try a fully-general constant:
14963      { 41, 97, 100, 21, 100, 9990, 100, 49 }.  */
14964   elements[5] = build_int_cst (element_type, 9990);
14965   vector = build_vector (vector_type, elements PASS_MEM_STAT);
14966   check_vector_cst_fill (elements, vector, 4);
14967 }
14968 
14969 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
14970    Helper function for test_location_wrappers, to deal with STRIP_NOPS
14971    modifying its argument in-place.  */
14972 
14973 static void
check_strip_nops(tree node,tree expected)14974 check_strip_nops (tree node, tree expected)
14975 {
14976   STRIP_NOPS (node);
14977   ASSERT_EQ (expected, node);
14978 }
14979 
14980 /* Verify location wrappers.  */
14981 
14982 static void
test_location_wrappers()14983 test_location_wrappers ()
14984 {
14985   location_t loc = BUILTINS_LOCATION;
14986 
14987   ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
14988 
14989   /* Wrapping a constant.  */
14990   tree int_cst = build_int_cst (integer_type_node, 42);
14991   ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
14992   ASSERT_FALSE (location_wrapper_p (int_cst));
14993 
14994   tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
14995   ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
14996   ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
14997   ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
14998 
14999   /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION.  */
15000   ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
15001 
15002   /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P.  */
15003   tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
15004   ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
15005   ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
15006 
15007   /* Wrapping a STRING_CST.  */
15008   tree string_cst = build_string (4, "foo");
15009   ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
15010   ASSERT_FALSE (location_wrapper_p (string_cst));
15011 
15012   tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
15013   ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
15014   ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
15015   ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
15016   ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15017 
15018 
15019   /* Wrapping a variable.  */
15020   tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15021 			     get_identifier ("some_int_var"),
15022 			     integer_type_node);
15023   ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15024   ASSERT_FALSE (location_wrapper_p (int_var));
15025 
15026   tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15027   ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15028   ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15029   ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15030 
15031   /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15032      wrapper.  */
15033   tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15034   ASSERT_FALSE (location_wrapper_p (r_cast));
15035   ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15036 
15037   /* Verify that STRIP_NOPS removes wrappers.  */
15038   check_strip_nops (wrapped_int_cst, int_cst);
15039   check_strip_nops (wrapped_string_cst, string_cst);
15040   check_strip_nops (wrapped_int_var, int_var);
15041 }
15042 
15043 /* Test various tree predicates.  Verify that location wrappers don't
15044    affect the results.  */
15045 
15046 static void
test_predicates()15047 test_predicates ()
15048 {
15049   /* Build various constants and wrappers around them.  */
15050 
15051   location_t loc = BUILTINS_LOCATION;
15052 
15053   tree i_0 = build_int_cst (integer_type_node, 0);
15054   tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15055 
15056   tree i_1 = build_int_cst (integer_type_node, 1);
15057   tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15058 
15059   tree i_m1 = build_int_cst (integer_type_node, -1);
15060   tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15061 
15062   tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15063   tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15064   tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15065   tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15066   tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15067   tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15068 
15069   tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15070   tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15071   tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15072 
15073   tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15074   tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15075   tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15076 
15077   /* TODO: vector constants.  */
15078 
15079   /* Test integer_onep.  */
15080   ASSERT_FALSE (integer_onep (i_0));
15081   ASSERT_FALSE (integer_onep (wr_i_0));
15082   ASSERT_TRUE (integer_onep (i_1));
15083   ASSERT_TRUE (integer_onep (wr_i_1));
15084   ASSERT_FALSE (integer_onep (i_m1));
15085   ASSERT_FALSE (integer_onep (wr_i_m1));
15086   ASSERT_FALSE (integer_onep (f_0));
15087   ASSERT_FALSE (integer_onep (wr_f_0));
15088   ASSERT_FALSE (integer_onep (f_1));
15089   ASSERT_FALSE (integer_onep (wr_f_1));
15090   ASSERT_FALSE (integer_onep (f_m1));
15091   ASSERT_FALSE (integer_onep (wr_f_m1));
15092   ASSERT_FALSE (integer_onep (c_i_0));
15093   ASSERT_TRUE (integer_onep (c_i_1));
15094   ASSERT_FALSE (integer_onep (c_i_m1));
15095   ASSERT_FALSE (integer_onep (c_f_0));
15096   ASSERT_FALSE (integer_onep (c_f_1));
15097   ASSERT_FALSE (integer_onep (c_f_m1));
15098 
15099   /* Test integer_zerop.  */
15100   ASSERT_TRUE (integer_zerop (i_0));
15101   ASSERT_TRUE (integer_zerop (wr_i_0));
15102   ASSERT_FALSE (integer_zerop (i_1));
15103   ASSERT_FALSE (integer_zerop (wr_i_1));
15104   ASSERT_FALSE (integer_zerop (i_m1));
15105   ASSERT_FALSE (integer_zerop (wr_i_m1));
15106   ASSERT_FALSE (integer_zerop (f_0));
15107   ASSERT_FALSE (integer_zerop (wr_f_0));
15108   ASSERT_FALSE (integer_zerop (f_1));
15109   ASSERT_FALSE (integer_zerop (wr_f_1));
15110   ASSERT_FALSE (integer_zerop (f_m1));
15111   ASSERT_FALSE (integer_zerop (wr_f_m1));
15112   ASSERT_TRUE (integer_zerop (c_i_0));
15113   ASSERT_FALSE (integer_zerop (c_i_1));
15114   ASSERT_FALSE (integer_zerop (c_i_m1));
15115   ASSERT_FALSE (integer_zerop (c_f_0));
15116   ASSERT_FALSE (integer_zerop (c_f_1));
15117   ASSERT_FALSE (integer_zerop (c_f_m1));
15118 
15119   /* Test integer_all_onesp.  */
15120   ASSERT_FALSE (integer_all_onesp (i_0));
15121   ASSERT_FALSE (integer_all_onesp (wr_i_0));
15122   ASSERT_FALSE (integer_all_onesp (i_1));
15123   ASSERT_FALSE (integer_all_onesp (wr_i_1));
15124   ASSERT_TRUE (integer_all_onesp (i_m1));
15125   ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15126   ASSERT_FALSE (integer_all_onesp (f_0));
15127   ASSERT_FALSE (integer_all_onesp (wr_f_0));
15128   ASSERT_FALSE (integer_all_onesp (f_1));
15129   ASSERT_FALSE (integer_all_onesp (wr_f_1));
15130   ASSERT_FALSE (integer_all_onesp (f_m1));
15131   ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15132   ASSERT_FALSE (integer_all_onesp (c_i_0));
15133   ASSERT_FALSE (integer_all_onesp (c_i_1));
15134   ASSERT_FALSE (integer_all_onesp (c_i_m1));
15135   ASSERT_FALSE (integer_all_onesp (c_f_0));
15136   ASSERT_FALSE (integer_all_onesp (c_f_1));
15137   ASSERT_FALSE (integer_all_onesp (c_f_m1));
15138 
15139   /* Test integer_minus_onep.  */
15140   ASSERT_FALSE (integer_minus_onep (i_0));
15141   ASSERT_FALSE (integer_minus_onep (wr_i_0));
15142   ASSERT_FALSE (integer_minus_onep (i_1));
15143   ASSERT_FALSE (integer_minus_onep (wr_i_1));
15144   ASSERT_TRUE (integer_minus_onep (i_m1));
15145   ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15146   ASSERT_FALSE (integer_minus_onep (f_0));
15147   ASSERT_FALSE (integer_minus_onep (wr_f_0));
15148   ASSERT_FALSE (integer_minus_onep (f_1));
15149   ASSERT_FALSE (integer_minus_onep (wr_f_1));
15150   ASSERT_FALSE (integer_minus_onep (f_m1));
15151   ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15152   ASSERT_FALSE (integer_minus_onep (c_i_0));
15153   ASSERT_FALSE (integer_minus_onep (c_i_1));
15154   ASSERT_TRUE (integer_minus_onep (c_i_m1));
15155   ASSERT_FALSE (integer_minus_onep (c_f_0));
15156   ASSERT_FALSE (integer_minus_onep (c_f_1));
15157   ASSERT_FALSE (integer_minus_onep (c_f_m1));
15158 
15159   /* Test integer_each_onep.  */
15160   ASSERT_FALSE (integer_each_onep (i_0));
15161   ASSERT_FALSE (integer_each_onep (wr_i_0));
15162   ASSERT_TRUE (integer_each_onep (i_1));
15163   ASSERT_TRUE (integer_each_onep (wr_i_1));
15164   ASSERT_FALSE (integer_each_onep (i_m1));
15165   ASSERT_FALSE (integer_each_onep (wr_i_m1));
15166   ASSERT_FALSE (integer_each_onep (f_0));
15167   ASSERT_FALSE (integer_each_onep (wr_f_0));
15168   ASSERT_FALSE (integer_each_onep (f_1));
15169   ASSERT_FALSE (integer_each_onep (wr_f_1));
15170   ASSERT_FALSE (integer_each_onep (f_m1));
15171   ASSERT_FALSE (integer_each_onep (wr_f_m1));
15172   ASSERT_FALSE (integer_each_onep (c_i_0));
15173   ASSERT_FALSE (integer_each_onep (c_i_1));
15174   ASSERT_FALSE (integer_each_onep (c_i_m1));
15175   ASSERT_FALSE (integer_each_onep (c_f_0));
15176   ASSERT_FALSE (integer_each_onep (c_f_1));
15177   ASSERT_FALSE (integer_each_onep (c_f_m1));
15178 
15179   /* Test integer_truep.  */
15180   ASSERT_FALSE (integer_truep (i_0));
15181   ASSERT_FALSE (integer_truep (wr_i_0));
15182   ASSERT_TRUE (integer_truep (i_1));
15183   ASSERT_TRUE (integer_truep (wr_i_1));
15184   ASSERT_FALSE (integer_truep (i_m1));
15185   ASSERT_FALSE (integer_truep (wr_i_m1));
15186   ASSERT_FALSE (integer_truep (f_0));
15187   ASSERT_FALSE (integer_truep (wr_f_0));
15188   ASSERT_FALSE (integer_truep (f_1));
15189   ASSERT_FALSE (integer_truep (wr_f_1));
15190   ASSERT_FALSE (integer_truep (f_m1));
15191   ASSERT_FALSE (integer_truep (wr_f_m1));
15192   ASSERT_FALSE (integer_truep (c_i_0));
15193   ASSERT_TRUE (integer_truep (c_i_1));
15194   ASSERT_FALSE (integer_truep (c_i_m1));
15195   ASSERT_FALSE (integer_truep (c_f_0));
15196   ASSERT_FALSE (integer_truep (c_f_1));
15197   ASSERT_FALSE (integer_truep (c_f_m1));
15198 
15199   /* Test integer_nonzerop.  */
15200   ASSERT_FALSE (integer_nonzerop (i_0));
15201   ASSERT_FALSE (integer_nonzerop (wr_i_0));
15202   ASSERT_TRUE (integer_nonzerop (i_1));
15203   ASSERT_TRUE (integer_nonzerop (wr_i_1));
15204   ASSERT_TRUE (integer_nonzerop (i_m1));
15205   ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15206   ASSERT_FALSE (integer_nonzerop (f_0));
15207   ASSERT_FALSE (integer_nonzerop (wr_f_0));
15208   ASSERT_FALSE (integer_nonzerop (f_1));
15209   ASSERT_FALSE (integer_nonzerop (wr_f_1));
15210   ASSERT_FALSE (integer_nonzerop (f_m1));
15211   ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15212   ASSERT_FALSE (integer_nonzerop (c_i_0));
15213   ASSERT_TRUE (integer_nonzerop (c_i_1));
15214   ASSERT_TRUE (integer_nonzerop (c_i_m1));
15215   ASSERT_FALSE (integer_nonzerop (c_f_0));
15216   ASSERT_FALSE (integer_nonzerop (c_f_1));
15217   ASSERT_FALSE (integer_nonzerop (c_f_m1));
15218 
15219   /* Test real_zerop.  */
15220   ASSERT_FALSE (real_zerop (i_0));
15221   ASSERT_FALSE (real_zerop (wr_i_0));
15222   ASSERT_FALSE (real_zerop (i_1));
15223   ASSERT_FALSE (real_zerop (wr_i_1));
15224   ASSERT_FALSE (real_zerop (i_m1));
15225   ASSERT_FALSE (real_zerop (wr_i_m1));
15226   ASSERT_TRUE (real_zerop (f_0));
15227   ASSERT_TRUE (real_zerop (wr_f_0));
15228   ASSERT_FALSE (real_zerop (f_1));
15229   ASSERT_FALSE (real_zerop (wr_f_1));
15230   ASSERT_FALSE (real_zerop (f_m1));
15231   ASSERT_FALSE (real_zerop (wr_f_m1));
15232   ASSERT_FALSE (real_zerop (c_i_0));
15233   ASSERT_FALSE (real_zerop (c_i_1));
15234   ASSERT_FALSE (real_zerop (c_i_m1));
15235   ASSERT_TRUE (real_zerop (c_f_0));
15236   ASSERT_FALSE (real_zerop (c_f_1));
15237   ASSERT_FALSE (real_zerop (c_f_m1));
15238 
15239   /* Test real_onep.  */
15240   ASSERT_FALSE (real_onep (i_0));
15241   ASSERT_FALSE (real_onep (wr_i_0));
15242   ASSERT_FALSE (real_onep (i_1));
15243   ASSERT_FALSE (real_onep (wr_i_1));
15244   ASSERT_FALSE (real_onep (i_m1));
15245   ASSERT_FALSE (real_onep (wr_i_m1));
15246   ASSERT_FALSE (real_onep (f_0));
15247   ASSERT_FALSE (real_onep (wr_f_0));
15248   ASSERT_TRUE (real_onep (f_1));
15249   ASSERT_TRUE (real_onep (wr_f_1));
15250   ASSERT_FALSE (real_onep (f_m1));
15251   ASSERT_FALSE (real_onep (wr_f_m1));
15252   ASSERT_FALSE (real_onep (c_i_0));
15253   ASSERT_FALSE (real_onep (c_i_1));
15254   ASSERT_FALSE (real_onep (c_i_m1));
15255   ASSERT_FALSE (real_onep (c_f_0));
15256   ASSERT_TRUE (real_onep (c_f_1));
15257   ASSERT_FALSE (real_onep (c_f_m1));
15258 
15259   /* Test real_minus_onep.  */
15260   ASSERT_FALSE (real_minus_onep (i_0));
15261   ASSERT_FALSE (real_minus_onep (wr_i_0));
15262   ASSERT_FALSE (real_minus_onep (i_1));
15263   ASSERT_FALSE (real_minus_onep (wr_i_1));
15264   ASSERT_FALSE (real_minus_onep (i_m1));
15265   ASSERT_FALSE (real_minus_onep (wr_i_m1));
15266   ASSERT_FALSE (real_minus_onep (f_0));
15267   ASSERT_FALSE (real_minus_onep (wr_f_0));
15268   ASSERT_FALSE (real_minus_onep (f_1));
15269   ASSERT_FALSE (real_minus_onep (wr_f_1));
15270   ASSERT_TRUE (real_minus_onep (f_m1));
15271   ASSERT_TRUE (real_minus_onep (wr_f_m1));
15272   ASSERT_FALSE (real_minus_onep (c_i_0));
15273   ASSERT_FALSE (real_minus_onep (c_i_1));
15274   ASSERT_FALSE (real_minus_onep (c_i_m1));
15275   ASSERT_FALSE (real_minus_onep (c_f_0));
15276   ASSERT_FALSE (real_minus_onep (c_f_1));
15277   ASSERT_TRUE (real_minus_onep (c_f_m1));
15278 
15279   /* Test zerop.  */
15280   ASSERT_TRUE (zerop (i_0));
15281   ASSERT_TRUE (zerop (wr_i_0));
15282   ASSERT_FALSE (zerop (i_1));
15283   ASSERT_FALSE (zerop (wr_i_1));
15284   ASSERT_FALSE (zerop (i_m1));
15285   ASSERT_FALSE (zerop (wr_i_m1));
15286   ASSERT_TRUE (zerop (f_0));
15287   ASSERT_TRUE (zerop (wr_f_0));
15288   ASSERT_FALSE (zerop (f_1));
15289   ASSERT_FALSE (zerop (wr_f_1));
15290   ASSERT_FALSE (zerop (f_m1));
15291   ASSERT_FALSE (zerop (wr_f_m1));
15292   ASSERT_TRUE (zerop (c_i_0));
15293   ASSERT_FALSE (zerop (c_i_1));
15294   ASSERT_FALSE (zerop (c_i_m1));
15295   ASSERT_TRUE (zerop (c_f_0));
15296   ASSERT_FALSE (zerop (c_f_1));
15297   ASSERT_FALSE (zerop (c_f_m1));
15298 
15299   /* Test tree_expr_nonnegative_p.  */
15300   ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15301   ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15302   ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15303   ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15304   ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15305   ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15306   ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15307   ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15308   ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15309   ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15310   ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15311   ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15312   ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15313   ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15314   ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15315   ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15316   ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15317   ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15318 
15319   /* Test tree_expr_nonzero_p.  */
15320   ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15321   ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15322   ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15323   ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15324   ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15325   ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15326 
15327   /* Test integer_valued_real_p.  */
15328   ASSERT_FALSE (integer_valued_real_p (i_0));
15329   ASSERT_TRUE (integer_valued_real_p (f_0));
15330   ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15331   ASSERT_TRUE (integer_valued_real_p (f_1));
15332   ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15333 
15334   /* Test integer_pow2p.  */
15335   ASSERT_FALSE (integer_pow2p (i_0));
15336   ASSERT_TRUE (integer_pow2p (i_1));
15337   ASSERT_TRUE (integer_pow2p (wr_i_1));
15338 
15339   /* Test uniform_integer_cst_p.  */
15340   ASSERT_TRUE (uniform_integer_cst_p (i_0));
15341   ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15342   ASSERT_TRUE (uniform_integer_cst_p (i_1));
15343   ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15344   ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15345   ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15346   ASSERT_FALSE (uniform_integer_cst_p (f_0));
15347   ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15348   ASSERT_FALSE (uniform_integer_cst_p (f_1));
15349   ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15350   ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15351   ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15352   ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15353   ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15354   ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15355   ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15356   ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15357   ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15358 }
15359 
15360 /* Check that string escaping works correctly.  */
15361 
15362 static void
test_escaped_strings(void)15363 test_escaped_strings (void)
15364 {
15365   int saved_cutoff;
15366   escaped_string msg;
15367 
15368   msg.escape (NULL);
15369   /* ASSERT_STREQ does not accept NULL as a valid test
15370      result, so we have to use ASSERT_EQ instead.  */
15371   ASSERT_EQ (NULL, (const char *) msg);
15372 
15373   msg.escape ("");
15374   ASSERT_STREQ ("", (const char *) msg);
15375 
15376   msg.escape ("foobar");
15377   ASSERT_STREQ ("foobar", (const char *) msg);
15378 
15379   /* Ensure that we have -fmessage-length set to 0.  */
15380   saved_cutoff = pp_line_cutoff (global_dc->printer);
15381   pp_line_cutoff (global_dc->printer) = 0;
15382 
15383   msg.escape ("foo\nbar");
15384   ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15385 
15386   msg.escape ("\a\b\f\n\r\t\v");
15387   ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15388 
15389   /* Now repeat the tests with -fmessage-length set to 5.  */
15390   pp_line_cutoff (global_dc->printer) = 5;
15391 
15392   /* Note that the newline is not translated into an escape.  */
15393   msg.escape ("foo\nbar");
15394   ASSERT_STREQ ("foo\nbar", (const char *) msg);
15395 
15396   msg.escape ("\a\b\f\n\r\t\v");
15397   ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15398 
15399   /* Restore the original message length setting.  */
15400   pp_line_cutoff (global_dc->printer) = saved_cutoff;
15401 }
15402 
15403 /* Run all of the selftests within this file.  */
15404 
15405 void
tree_cc_tests()15406 tree_cc_tests ()
15407 {
15408   test_integer_constants ();
15409   test_identifiers ();
15410   test_labels ();
15411   test_vector_cst_patterns ();
15412   test_location_wrappers ();
15413   test_predicates ();
15414   test_escaped_strings ();
15415 }
15416 
15417 } // namespace selftest
15418 
15419 #endif /* CHECKING_P */
15420 
15421 #include "gt-tree.h"
15422