1 /* Language-independent node constructors for parse phase of GNU compiler. 2 Copyright (C) 1987-2017 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify it under 7 the terms of the GNU General Public License as published by the Free 8 Software Foundation; either version 3, or (at your option) any later 9 version. 10 11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 12 WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14 for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 /* This file contains the low level primitives for operating on tree nodes, 21 including allocation, list operations, interning of identifiers, 22 construction of data type nodes and statement nodes, 23 and construction of type conversion nodes. It also contains 24 tables index by tree code that describe how to take apart 25 nodes of that code. 26 27 It is intended to be language-independent but can occasionally 28 calls language-dependent routines. */ 29 30 #include "config.h" 31 #include "system.h" 32 #include "coretypes.h" 33 #include "backend.h" 34 #include "target.h" 35 #include "tree.h" 36 #include "gimple.h" 37 #include "tree-pass.h" 38 #include "ssa.h" 39 #include "cgraph.h" 40 #include "diagnostic.h" 41 #include "flags.h" 42 #include "alias.h" 43 #include "fold-const.h" 44 #include "stor-layout.h" 45 #include "calls.h" 46 #include "attribs.h" 47 #include "toplev.h" /* get_random_seed */ 48 #include "output.h" 49 #include "common/common-target.h" 50 #include "langhooks.h" 51 #include "tree-inline.h" 52 #include "tree-iterator.h" 53 #include "internal-fn.h" 54 #include "gimple-iterator.h" 55 #include "gimplify.h" 56 #include "tree-dfa.h" 57 #include "params.h" 58 #include "langhooks-def.h" 59 #include "tree-diagnostic.h" 60 #include "except.h" 61 #include "builtins.h" 62 #include "print-tree.h" 63 #include "ipa-utils.h" 64 #include "selftest.h" 65 66 /* Tree code classes. */ 67 68 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE, 69 #define END_OF_BASE_TREE_CODES tcc_exceptional, 70 71 const enum tree_code_class tree_code_type[] = { 72 #include "all-tree.def" 73 }; 74 75 #undef DEFTREECODE 76 #undef END_OF_BASE_TREE_CODES 77 78 /* Table indexed by tree code giving number of expression 79 operands beyond the fixed part of the node structure. 80 Not used for types or decls. */ 81 82 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH, 83 #define END_OF_BASE_TREE_CODES 0, 84 85 const unsigned char tree_code_length[] = { 86 #include "all-tree.def" 87 }; 88 89 #undef DEFTREECODE 90 #undef END_OF_BASE_TREE_CODES 91 92 /* Names of tree components. 93 Used for printing out the tree and error messages. */ 94 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME, 95 #define END_OF_BASE_TREE_CODES "@dummy", 96 97 static const char *const tree_code_name[] = { 98 #include "all-tree.def" 99 }; 100 101 #undef DEFTREECODE 102 #undef END_OF_BASE_TREE_CODES 103 104 /* Each tree code class has an associated string representation. 105 These must correspond to the tree_code_class entries. */ 106 107 const char *const tree_code_class_strings[] = 108 { 109 "exceptional", 110 "constant", 111 "type", 112 "declaration", 113 "reference", 114 "comparison", 115 "unary", 116 "binary", 117 "statement", 118 "vl_exp", 119 "expression" 120 }; 121 122 /* obstack.[ch] explicitly declined to prototype this. */ 123 extern int _obstack_allocated_p (struct obstack *h, void *obj); 124 125 /* Statistics-gathering stuff. */ 126 127 static int tree_code_counts[MAX_TREE_CODES]; 128 int tree_node_counts[(int) all_kinds]; 129 int tree_node_sizes[(int) all_kinds]; 130 131 /* Keep in sync with tree.h:enum tree_node_kind. */ 132 static const char * const tree_node_kind_names[] = { 133 "decls", 134 "types", 135 "blocks", 136 "stmts", 137 "refs", 138 "exprs", 139 "constants", 140 "identifiers", 141 "vecs", 142 "binfos", 143 "ssa names", 144 "constructors", 145 "random kinds", 146 "lang_decl kinds", 147 "lang_type kinds", 148 "omp clauses", 149 }; 150 151 /* Unique id for next decl created. */ 152 static GTY(()) int next_decl_uid; 153 /* Unique id for next type created. */ 154 static GTY(()) int next_type_uid = 1; 155 /* Unique id for next debug decl created. Use negative numbers, 156 to catch erroneous uses. */ 157 static GTY(()) int next_debug_decl_uid; 158 159 /* Since we cannot rehash a type after it is in the table, we have to 160 keep the hash code. */ 161 162 struct GTY((for_user)) type_hash { 163 unsigned long hash; 164 tree type; 165 }; 166 167 /* Initial size of the hash table (rounded to next prime). */ 168 #define TYPE_HASH_INITIAL_SIZE 1000 169 170 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash> 171 { 172 static hashval_t hash (type_hash *t) { return t->hash; } 173 static bool equal (type_hash *a, type_hash *b); 174 175 static int 176 keep_cache_entry (type_hash *&t) 177 { 178 return ggc_marked_p (t->type); 179 } 180 }; 181 182 /* Now here is the hash table. When recording a type, it is added to 183 the slot whose index is the hash code. Note that the hash table is 184 used for several kinds of types (function types, array types and 185 array index range types, for now). While all these live in the 186 same table, they are completely independent, and the hash code is 187 computed differently for each of these. */ 188 189 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table; 190 191 /* Hash table and temporary node for larger integer const values. */ 192 static GTY (()) tree int_cst_node; 193 194 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node> 195 { 196 static hashval_t hash (tree t); 197 static bool equal (tree x, tree y); 198 }; 199 200 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table; 201 202 /* Hash table for optimization flags and target option flags. Use the same 203 hash table for both sets of options. Nodes for building the current 204 optimization and target option nodes. The assumption is most of the time 205 the options created will already be in the hash table, so we avoid 206 allocating and freeing up a node repeatably. */ 207 static GTY (()) tree cl_optimization_node; 208 static GTY (()) tree cl_target_option_node; 209 210 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node> 211 { 212 static hashval_t hash (tree t); 213 static bool equal (tree x, tree y); 214 }; 215 216 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table; 217 218 /* General tree->tree mapping structure for use in hash tables. */ 219 220 221 static GTY ((cache)) 222 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl; 223 224 static GTY ((cache)) 225 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl; 226 227 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map> 228 { 229 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); } 230 231 static bool 232 equal (tree_vec_map *a, tree_vec_map *b) 233 { 234 return a->base.from == b->base.from; 235 } 236 237 static int 238 keep_cache_entry (tree_vec_map *&m) 239 { 240 return ggc_marked_p (m->base.from); 241 } 242 }; 243 244 static GTY ((cache)) 245 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl; 246 247 static void set_type_quals (tree, int); 248 static void print_type_hash_statistics (void); 249 static void print_debug_expr_statistics (void); 250 static void print_value_expr_statistics (void); 251 static void type_hash_list (const_tree, inchash::hash &); 252 static void attribute_hash_list (const_tree, inchash::hash &); 253 254 tree global_trees[TI_MAX]; 255 tree integer_types[itk_none]; 256 257 bool int_n_enabled_p[NUM_INT_N_ENTS]; 258 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS]; 259 260 unsigned char tree_contains_struct[MAX_TREE_CODES][64]; 261 262 /* Number of operands for each OpenMP clause. */ 263 unsigned const char omp_clause_num_ops[] = 264 { 265 0, /* OMP_CLAUSE_ERROR */ 266 1, /* OMP_CLAUSE_PRIVATE */ 267 1, /* OMP_CLAUSE_SHARED */ 268 1, /* OMP_CLAUSE_FIRSTPRIVATE */ 269 2, /* OMP_CLAUSE_LASTPRIVATE */ 270 5, /* OMP_CLAUSE_REDUCTION */ 271 1, /* OMP_CLAUSE_COPYIN */ 272 1, /* OMP_CLAUSE_COPYPRIVATE */ 273 3, /* OMP_CLAUSE_LINEAR */ 274 2, /* OMP_CLAUSE_ALIGNED */ 275 1, /* OMP_CLAUSE_DEPEND */ 276 1, /* OMP_CLAUSE_UNIFORM */ 277 1, /* OMP_CLAUSE_TO_DECLARE */ 278 1, /* OMP_CLAUSE_LINK */ 279 2, /* OMP_CLAUSE_FROM */ 280 2, /* OMP_CLAUSE_TO */ 281 2, /* OMP_CLAUSE_MAP */ 282 1, /* OMP_CLAUSE_USE_DEVICE_PTR */ 283 1, /* OMP_CLAUSE_IS_DEVICE_PTR */ 284 2, /* OMP_CLAUSE__CACHE_ */ 285 2, /* OMP_CLAUSE_GANG */ 286 1, /* OMP_CLAUSE_ASYNC */ 287 1, /* OMP_CLAUSE_WAIT */ 288 0, /* OMP_CLAUSE_AUTO */ 289 0, /* OMP_CLAUSE_SEQ */ 290 1, /* OMP_CLAUSE__LOOPTEMP_ */ 291 1, /* OMP_CLAUSE_IF */ 292 1, /* OMP_CLAUSE_NUM_THREADS */ 293 1, /* OMP_CLAUSE_SCHEDULE */ 294 0, /* OMP_CLAUSE_NOWAIT */ 295 1, /* OMP_CLAUSE_ORDERED */ 296 0, /* OMP_CLAUSE_DEFAULT */ 297 3, /* OMP_CLAUSE_COLLAPSE */ 298 0, /* OMP_CLAUSE_UNTIED */ 299 1, /* OMP_CLAUSE_FINAL */ 300 0, /* OMP_CLAUSE_MERGEABLE */ 301 1, /* OMP_CLAUSE_DEVICE */ 302 1, /* OMP_CLAUSE_DIST_SCHEDULE */ 303 0, /* OMP_CLAUSE_INBRANCH */ 304 0, /* OMP_CLAUSE_NOTINBRANCH */ 305 1, /* OMP_CLAUSE_NUM_TEAMS */ 306 1, /* OMP_CLAUSE_THREAD_LIMIT */ 307 0, /* OMP_CLAUSE_PROC_BIND */ 308 1, /* OMP_CLAUSE_SAFELEN */ 309 1, /* OMP_CLAUSE_SIMDLEN */ 310 0, /* OMP_CLAUSE_FOR */ 311 0, /* OMP_CLAUSE_PARALLEL */ 312 0, /* OMP_CLAUSE_SECTIONS */ 313 0, /* OMP_CLAUSE_TASKGROUP */ 314 1, /* OMP_CLAUSE_PRIORITY */ 315 1, /* OMP_CLAUSE_GRAINSIZE */ 316 1, /* OMP_CLAUSE_NUM_TASKS */ 317 0, /* OMP_CLAUSE_NOGROUP */ 318 0, /* OMP_CLAUSE_THREADS */ 319 0, /* OMP_CLAUSE_SIMD */ 320 1, /* OMP_CLAUSE_HINT */ 321 0, /* OMP_CLAUSE_DEFALTMAP */ 322 1, /* OMP_CLAUSE__SIMDUID_ */ 323 0, /* OMP_CLAUSE__SIMT_ */ 324 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */ 325 0, /* OMP_CLAUSE_INDEPENDENT */ 326 1, /* OMP_CLAUSE_WORKER */ 327 1, /* OMP_CLAUSE_VECTOR */ 328 1, /* OMP_CLAUSE_NUM_GANGS */ 329 1, /* OMP_CLAUSE_NUM_WORKERS */ 330 1, /* OMP_CLAUSE_VECTOR_LENGTH */ 331 3, /* OMP_CLAUSE_TILE */ 332 2, /* OMP_CLAUSE__GRIDDIM_ */ 333 }; 334 335 const char * const omp_clause_code_name[] = 336 { 337 "error_clause", 338 "private", 339 "shared", 340 "firstprivate", 341 "lastprivate", 342 "reduction", 343 "copyin", 344 "copyprivate", 345 "linear", 346 "aligned", 347 "depend", 348 "uniform", 349 "to", 350 "link", 351 "from", 352 "to", 353 "map", 354 "use_device_ptr", 355 "is_device_ptr", 356 "_cache_", 357 "gang", 358 "async", 359 "wait", 360 "auto", 361 "seq", 362 "_looptemp_", 363 "if", 364 "num_threads", 365 "schedule", 366 "nowait", 367 "ordered", 368 "default", 369 "collapse", 370 "untied", 371 "final", 372 "mergeable", 373 "device", 374 "dist_schedule", 375 "inbranch", 376 "notinbranch", 377 "num_teams", 378 "thread_limit", 379 "proc_bind", 380 "safelen", 381 "simdlen", 382 "for", 383 "parallel", 384 "sections", 385 "taskgroup", 386 "priority", 387 "grainsize", 388 "num_tasks", 389 "nogroup", 390 "threads", 391 "simd", 392 "hint", 393 "defaultmap", 394 "_simduid_", 395 "_simt_", 396 "_Cilk_for_count_", 397 "independent", 398 "worker", 399 "vector", 400 "num_gangs", 401 "num_workers", 402 "vector_length", 403 "tile", 404 "_griddim_" 405 }; 406 407 408 /* Return the tree node structure used by tree code CODE. */ 409 410 static inline enum tree_node_structure_enum 411 tree_node_structure_for_code (enum tree_code code) 412 { 413 switch (TREE_CODE_CLASS (code)) 414 { 415 case tcc_declaration: 416 { 417 switch (code) 418 { 419 case FIELD_DECL: 420 return TS_FIELD_DECL; 421 case PARM_DECL: 422 return TS_PARM_DECL; 423 case VAR_DECL: 424 return TS_VAR_DECL; 425 case LABEL_DECL: 426 return TS_LABEL_DECL; 427 case RESULT_DECL: 428 return TS_RESULT_DECL; 429 case DEBUG_EXPR_DECL: 430 return TS_DECL_WRTL; 431 case CONST_DECL: 432 return TS_CONST_DECL; 433 case TYPE_DECL: 434 return TS_TYPE_DECL; 435 case FUNCTION_DECL: 436 return TS_FUNCTION_DECL; 437 case TRANSLATION_UNIT_DECL: 438 return TS_TRANSLATION_UNIT_DECL; 439 default: 440 return TS_DECL_NON_COMMON; 441 } 442 } 443 case tcc_type: 444 return TS_TYPE_NON_COMMON; 445 case tcc_reference: 446 case tcc_comparison: 447 case tcc_unary: 448 case tcc_binary: 449 case tcc_expression: 450 case tcc_statement: 451 case tcc_vl_exp: 452 return TS_EXP; 453 default: /* tcc_constant and tcc_exceptional */ 454 break; 455 } 456 switch (code) 457 { 458 /* tcc_constant cases. */ 459 case VOID_CST: return TS_TYPED; 460 case INTEGER_CST: return TS_INT_CST; 461 case REAL_CST: return TS_REAL_CST; 462 case FIXED_CST: return TS_FIXED_CST; 463 case COMPLEX_CST: return TS_COMPLEX; 464 case VECTOR_CST: return TS_VECTOR; 465 case STRING_CST: return TS_STRING; 466 /* tcc_exceptional cases. */ 467 case ERROR_MARK: return TS_COMMON; 468 case IDENTIFIER_NODE: return TS_IDENTIFIER; 469 case TREE_LIST: return TS_LIST; 470 case TREE_VEC: return TS_VEC; 471 case SSA_NAME: return TS_SSA_NAME; 472 case PLACEHOLDER_EXPR: return TS_COMMON; 473 case STATEMENT_LIST: return TS_STATEMENT_LIST; 474 case BLOCK: return TS_BLOCK; 475 case CONSTRUCTOR: return TS_CONSTRUCTOR; 476 case TREE_BINFO: return TS_BINFO; 477 case OMP_CLAUSE: return TS_OMP_CLAUSE; 478 case OPTIMIZATION_NODE: return TS_OPTIMIZATION; 479 case TARGET_OPTION_NODE: return TS_TARGET_OPTION; 480 481 default: 482 gcc_unreachable (); 483 } 484 } 485 486 487 /* Initialize tree_contains_struct to describe the hierarchy of tree 488 nodes. */ 489 490 static void 491 initialize_tree_contains_struct (void) 492 { 493 unsigned i; 494 495 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++) 496 { 497 enum tree_code code; 498 enum tree_node_structure_enum ts_code; 499 500 code = (enum tree_code) i; 501 ts_code = tree_node_structure_for_code (code); 502 503 /* Mark the TS structure itself. */ 504 tree_contains_struct[code][ts_code] = 1; 505 506 /* Mark all the structures that TS is derived from. */ 507 switch (ts_code) 508 { 509 case TS_TYPED: 510 case TS_BLOCK: 511 case TS_OPTIMIZATION: 512 case TS_TARGET_OPTION: 513 MARK_TS_BASE (code); 514 break; 515 516 case TS_COMMON: 517 case TS_INT_CST: 518 case TS_REAL_CST: 519 case TS_FIXED_CST: 520 case TS_VECTOR: 521 case TS_STRING: 522 case TS_COMPLEX: 523 case TS_SSA_NAME: 524 case TS_CONSTRUCTOR: 525 case TS_EXP: 526 case TS_STATEMENT_LIST: 527 MARK_TS_TYPED (code); 528 break; 529 530 case TS_IDENTIFIER: 531 case TS_DECL_MINIMAL: 532 case TS_TYPE_COMMON: 533 case TS_LIST: 534 case TS_VEC: 535 case TS_BINFO: 536 case TS_OMP_CLAUSE: 537 MARK_TS_COMMON (code); 538 break; 539 540 case TS_TYPE_WITH_LANG_SPECIFIC: 541 MARK_TS_TYPE_COMMON (code); 542 break; 543 544 case TS_TYPE_NON_COMMON: 545 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code); 546 break; 547 548 case TS_DECL_COMMON: 549 MARK_TS_DECL_MINIMAL (code); 550 break; 551 552 case TS_DECL_WRTL: 553 case TS_CONST_DECL: 554 MARK_TS_DECL_COMMON (code); 555 break; 556 557 case TS_DECL_NON_COMMON: 558 MARK_TS_DECL_WITH_VIS (code); 559 break; 560 561 case TS_DECL_WITH_VIS: 562 case TS_PARM_DECL: 563 case TS_LABEL_DECL: 564 case TS_RESULT_DECL: 565 MARK_TS_DECL_WRTL (code); 566 break; 567 568 case TS_FIELD_DECL: 569 MARK_TS_DECL_COMMON (code); 570 break; 571 572 case TS_VAR_DECL: 573 MARK_TS_DECL_WITH_VIS (code); 574 break; 575 576 case TS_TYPE_DECL: 577 case TS_FUNCTION_DECL: 578 MARK_TS_DECL_NON_COMMON (code); 579 break; 580 581 case TS_TRANSLATION_UNIT_DECL: 582 MARK_TS_DECL_COMMON (code); 583 break; 584 585 default: 586 gcc_unreachable (); 587 } 588 } 589 590 /* Basic consistency checks for attributes used in fold. */ 591 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]); 592 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]); 593 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]); 594 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]); 595 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]); 596 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]); 597 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]); 598 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]); 599 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]); 600 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]); 601 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]); 602 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]); 603 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]); 604 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]); 605 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]); 606 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]); 607 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]); 608 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]); 609 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]); 610 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]); 611 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]); 612 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]); 613 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]); 614 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]); 615 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]); 616 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]); 617 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]); 618 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]); 619 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]); 620 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]); 621 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]); 622 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]); 623 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]); 624 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]); 625 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]); 626 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]); 627 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]); 628 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]); 629 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]); 630 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]); 631 } 632 633 634 /* Init tree.c. */ 635 636 void 637 init_ttree (void) 638 { 639 /* Initialize the hash table of types. */ 640 type_hash_table 641 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE); 642 643 debug_expr_for_decl 644 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512); 645 646 value_expr_for_decl 647 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512); 648 649 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024); 650 651 int_cst_node = make_int_cst (1, 1); 652 653 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64); 654 655 cl_optimization_node = make_node (OPTIMIZATION_NODE); 656 cl_target_option_node = make_node (TARGET_OPTION_NODE); 657 658 /* Initialize the tree_contains_struct array. */ 659 initialize_tree_contains_struct (); 660 lang_hooks.init_ts (); 661 } 662 663 664 /* The name of the object as the assembler will see it (but before any 665 translations made by ASM_OUTPUT_LABELREF). Often this is the same 666 as DECL_NAME. It is an IDENTIFIER_NODE. */ 667 tree 668 decl_assembler_name (tree decl) 669 { 670 if (!DECL_ASSEMBLER_NAME_SET_P (decl)) 671 lang_hooks.set_decl_assembler_name (decl); 672 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name; 673 } 674 675 /* When the target supports COMDAT groups, this indicates which group the 676 DECL is associated with. This can be either an IDENTIFIER_NODE or a 677 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */ 678 tree 679 decl_comdat_group (const_tree node) 680 { 681 struct symtab_node *snode = symtab_node::get (node); 682 if (!snode) 683 return NULL; 684 return snode->get_comdat_group (); 685 } 686 687 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */ 688 tree 689 decl_comdat_group_id (const_tree node) 690 { 691 struct symtab_node *snode = symtab_node::get (node); 692 if (!snode) 693 return NULL; 694 return snode->get_comdat_group_id (); 695 } 696 697 /* When the target supports named section, return its name as IDENTIFIER_NODE 698 or NULL if it is in no section. */ 699 const char * 700 decl_section_name (const_tree node) 701 { 702 struct symtab_node *snode = symtab_node::get (node); 703 if (!snode) 704 return NULL; 705 return snode->get_section (); 706 } 707 708 /* Set section name of NODE to VALUE (that is expected to be 709 identifier node) */ 710 void 711 set_decl_section_name (tree node, const char *value) 712 { 713 struct symtab_node *snode; 714 715 if (value == NULL) 716 { 717 snode = symtab_node::get (node); 718 if (!snode) 719 return; 720 } 721 else if (VAR_P (node)) 722 snode = varpool_node::get_create (node); 723 else 724 snode = cgraph_node::get_create (node); 725 snode->set_section (value); 726 } 727 728 /* Return TLS model of a variable NODE. */ 729 enum tls_model 730 decl_tls_model (const_tree node) 731 { 732 struct varpool_node *snode = varpool_node::get (node); 733 if (!snode) 734 return TLS_MODEL_NONE; 735 return snode->tls_model; 736 } 737 738 /* Set TLS model of variable NODE to MODEL. */ 739 void 740 set_decl_tls_model (tree node, enum tls_model model) 741 { 742 struct varpool_node *vnode; 743 744 if (model == TLS_MODEL_NONE) 745 { 746 vnode = varpool_node::get (node); 747 if (!vnode) 748 return; 749 } 750 else 751 vnode = varpool_node::get_create (node); 752 vnode->tls_model = model; 753 } 754 755 /* Compute the number of bytes occupied by a tree with code CODE. 756 This function cannot be used for nodes that have variable sizes, 757 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */ 758 size_t 759 tree_code_size (enum tree_code code) 760 { 761 switch (TREE_CODE_CLASS (code)) 762 { 763 case tcc_declaration: /* A decl node */ 764 { 765 switch (code) 766 { 767 case FIELD_DECL: 768 return sizeof (struct tree_field_decl); 769 case PARM_DECL: 770 return sizeof (struct tree_parm_decl); 771 case VAR_DECL: 772 return sizeof (struct tree_var_decl); 773 case LABEL_DECL: 774 return sizeof (struct tree_label_decl); 775 case RESULT_DECL: 776 return sizeof (struct tree_result_decl); 777 case CONST_DECL: 778 return sizeof (struct tree_const_decl); 779 case TYPE_DECL: 780 return sizeof (struct tree_type_decl); 781 case FUNCTION_DECL: 782 return sizeof (struct tree_function_decl); 783 case DEBUG_EXPR_DECL: 784 return sizeof (struct tree_decl_with_rtl); 785 case TRANSLATION_UNIT_DECL: 786 return sizeof (struct tree_translation_unit_decl); 787 case NAMESPACE_DECL: 788 case IMPORTED_DECL: 789 case NAMELIST_DECL: 790 return sizeof (struct tree_decl_non_common); 791 default: 792 return lang_hooks.tree_size (code); 793 } 794 } 795 796 case tcc_type: /* a type node */ 797 return sizeof (struct tree_type_non_common); 798 799 case tcc_reference: /* a reference */ 800 case tcc_expression: /* an expression */ 801 case tcc_statement: /* an expression with side effects */ 802 case tcc_comparison: /* a comparison expression */ 803 case tcc_unary: /* a unary arithmetic expression */ 804 case tcc_binary: /* a binary arithmetic expression */ 805 return (sizeof (struct tree_exp) 806 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree)); 807 808 case tcc_constant: /* a constant */ 809 switch (code) 810 { 811 case VOID_CST: return sizeof (struct tree_typed); 812 case INTEGER_CST: gcc_unreachable (); 813 case REAL_CST: return sizeof (struct tree_real_cst); 814 case FIXED_CST: return sizeof (struct tree_fixed_cst); 815 case COMPLEX_CST: return sizeof (struct tree_complex); 816 case VECTOR_CST: return sizeof (struct tree_vector); 817 case STRING_CST: gcc_unreachable (); 818 default: 819 return lang_hooks.tree_size (code); 820 } 821 822 case tcc_exceptional: /* something random, like an identifier. */ 823 switch (code) 824 { 825 case IDENTIFIER_NODE: return lang_hooks.identifier_size; 826 case TREE_LIST: return sizeof (struct tree_list); 827 828 case ERROR_MARK: 829 case PLACEHOLDER_EXPR: return sizeof (struct tree_common); 830 831 case TREE_VEC: 832 case OMP_CLAUSE: gcc_unreachable (); 833 834 case SSA_NAME: return sizeof (struct tree_ssa_name); 835 836 case STATEMENT_LIST: return sizeof (struct tree_statement_list); 837 case BLOCK: return sizeof (struct tree_block); 838 case CONSTRUCTOR: return sizeof (struct tree_constructor); 839 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option); 840 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option); 841 842 default: 843 return lang_hooks.tree_size (code); 844 } 845 846 default: 847 gcc_unreachable (); 848 } 849 } 850 851 /* Compute the number of bytes occupied by NODE. This routine only 852 looks at TREE_CODE, except for those nodes that have variable sizes. */ 853 size_t 854 tree_size (const_tree node) 855 { 856 const enum tree_code code = TREE_CODE (node); 857 switch (code) 858 { 859 case INTEGER_CST: 860 return (sizeof (struct tree_int_cst) 861 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT)); 862 863 case TREE_BINFO: 864 return (offsetof (struct tree_binfo, base_binfos) 865 + vec<tree, va_gc> 866 ::embedded_size (BINFO_N_BASE_BINFOS (node))); 867 868 case TREE_VEC: 869 return (sizeof (struct tree_vec) 870 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree)); 871 872 case VECTOR_CST: 873 return (sizeof (struct tree_vector) 874 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree)); 875 876 case STRING_CST: 877 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1; 878 879 case OMP_CLAUSE: 880 return (sizeof (struct tree_omp_clause) 881 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1) 882 * sizeof (tree)); 883 884 default: 885 if (TREE_CODE_CLASS (code) == tcc_vl_exp) 886 return (sizeof (struct tree_exp) 887 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree)); 888 else 889 return tree_code_size (code); 890 } 891 } 892 893 /* Record interesting allocation statistics for a tree node with CODE 894 and LENGTH. */ 895 896 static void 897 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED, 898 size_t length ATTRIBUTE_UNUSED) 899 { 900 enum tree_code_class type = TREE_CODE_CLASS (code); 901 tree_node_kind kind; 902 903 if (!GATHER_STATISTICS) 904 return; 905 906 switch (type) 907 { 908 case tcc_declaration: /* A decl node */ 909 kind = d_kind; 910 break; 911 912 case tcc_type: /* a type node */ 913 kind = t_kind; 914 break; 915 916 case tcc_statement: /* an expression with side effects */ 917 kind = s_kind; 918 break; 919 920 case tcc_reference: /* a reference */ 921 kind = r_kind; 922 break; 923 924 case tcc_expression: /* an expression */ 925 case tcc_comparison: /* a comparison expression */ 926 case tcc_unary: /* a unary arithmetic expression */ 927 case tcc_binary: /* a binary arithmetic expression */ 928 kind = e_kind; 929 break; 930 931 case tcc_constant: /* a constant */ 932 kind = c_kind; 933 break; 934 935 case tcc_exceptional: /* something random, like an identifier. */ 936 switch (code) 937 { 938 case IDENTIFIER_NODE: 939 kind = id_kind; 940 break; 941 942 case TREE_VEC: 943 kind = vec_kind; 944 break; 945 946 case TREE_BINFO: 947 kind = binfo_kind; 948 break; 949 950 case SSA_NAME: 951 kind = ssa_name_kind; 952 break; 953 954 case BLOCK: 955 kind = b_kind; 956 break; 957 958 case CONSTRUCTOR: 959 kind = constr_kind; 960 break; 961 962 case OMP_CLAUSE: 963 kind = omp_clause_kind; 964 break; 965 966 default: 967 kind = x_kind; 968 break; 969 } 970 break; 971 972 case tcc_vl_exp: 973 kind = e_kind; 974 break; 975 976 default: 977 gcc_unreachable (); 978 } 979 980 tree_code_counts[(int) code]++; 981 tree_node_counts[(int) kind]++; 982 tree_node_sizes[(int) kind] += length; 983 } 984 985 /* Allocate and return a new UID from the DECL_UID namespace. */ 986 987 int 988 allocate_decl_uid (void) 989 { 990 return next_decl_uid++; 991 } 992 993 /* Return a newly allocated node of code CODE. For decl and type 994 nodes, some other fields are initialized. The rest of the node is 995 initialized to zero. This function cannot be used for TREE_VEC, 996 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in 997 tree_code_size. 998 999 Achoo! I got a code in the node. */ 1000 1001 tree 1002 make_node_stat (enum tree_code code MEM_STAT_DECL) 1003 { 1004 tree t; 1005 enum tree_code_class type = TREE_CODE_CLASS (code); 1006 size_t length = tree_code_size (code); 1007 1008 record_node_allocation_statistics (code, length); 1009 1010 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT); 1011 TREE_SET_CODE (t, code); 1012 1013 switch (type) 1014 { 1015 case tcc_statement: 1016 TREE_SIDE_EFFECTS (t) = 1; 1017 break; 1018 1019 case tcc_declaration: 1020 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) 1021 { 1022 if (code == FUNCTION_DECL) 1023 { 1024 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY)); 1025 SET_DECL_MODE (t, FUNCTION_MODE); 1026 } 1027 else 1028 SET_DECL_ALIGN (t, 1); 1029 } 1030 DECL_SOURCE_LOCATION (t) = input_location; 1031 if (TREE_CODE (t) == DEBUG_EXPR_DECL) 1032 DECL_UID (t) = --next_debug_decl_uid; 1033 else 1034 { 1035 DECL_UID (t) = allocate_decl_uid (); 1036 SET_DECL_PT_UID (t, -1); 1037 } 1038 if (TREE_CODE (t) == LABEL_DECL) 1039 LABEL_DECL_UID (t) = -1; 1040 1041 break; 1042 1043 case tcc_type: 1044 TYPE_UID (t) = next_type_uid++; 1045 SET_TYPE_ALIGN (t, BITS_PER_UNIT); 1046 TYPE_USER_ALIGN (t) = 0; 1047 TYPE_MAIN_VARIANT (t) = t; 1048 TYPE_CANONICAL (t) = t; 1049 1050 /* Default to no attributes for type, but let target change that. */ 1051 TYPE_ATTRIBUTES (t) = NULL_TREE; 1052 targetm.set_default_type_attributes (t); 1053 1054 /* We have not yet computed the alias set for this type. */ 1055 TYPE_ALIAS_SET (t) = -1; 1056 break; 1057 1058 case tcc_constant: 1059 TREE_CONSTANT (t) = 1; 1060 break; 1061 1062 case tcc_expression: 1063 switch (code) 1064 { 1065 case INIT_EXPR: 1066 case MODIFY_EXPR: 1067 case VA_ARG_EXPR: 1068 case PREDECREMENT_EXPR: 1069 case PREINCREMENT_EXPR: 1070 case POSTDECREMENT_EXPR: 1071 case POSTINCREMENT_EXPR: 1072 /* All of these have side-effects, no matter what their 1073 operands are. */ 1074 TREE_SIDE_EFFECTS (t) = 1; 1075 break; 1076 1077 default: 1078 break; 1079 } 1080 break; 1081 1082 case tcc_exceptional: 1083 switch (code) 1084 { 1085 case TARGET_OPTION_NODE: 1086 TREE_TARGET_OPTION(t) 1087 = ggc_cleared_alloc<struct cl_target_option> (); 1088 break; 1089 1090 case OPTIMIZATION_NODE: 1091 TREE_OPTIMIZATION (t) 1092 = ggc_cleared_alloc<struct cl_optimization> (); 1093 break; 1094 1095 default: 1096 break; 1097 } 1098 break; 1099 1100 default: 1101 /* Other classes need no special treatment. */ 1102 break; 1103 } 1104 1105 return t; 1106 } 1107 1108 /* Free tree node. */ 1109 1110 void 1111 free_node (tree node) 1112 { 1113 enum tree_code code = TREE_CODE (node); 1114 if (GATHER_STATISTICS) 1115 { 1116 tree_code_counts[(int) TREE_CODE (node)]--; 1117 tree_node_counts[(int) t_kind]--; 1118 tree_node_sizes[(int) t_kind] -= tree_size (node); 1119 } 1120 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR)) 1121 vec_free (CONSTRUCTOR_ELTS (node)); 1122 else if (code == BLOCK) 1123 vec_free (BLOCK_NONLOCALIZED_VARS (node)); 1124 else if (code == TREE_BINFO) 1125 vec_free (BINFO_BASE_ACCESSES (node)); 1126 ggc_free (node); 1127 } 1128 1129 /* Return a new node with the same contents as NODE except that its 1130 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */ 1131 1132 tree 1133 copy_node_stat (tree node MEM_STAT_DECL) 1134 { 1135 tree t; 1136 enum tree_code code = TREE_CODE (node); 1137 size_t length; 1138 1139 gcc_assert (code != STATEMENT_LIST); 1140 1141 length = tree_size (node); 1142 record_node_allocation_statistics (code, length); 1143 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT); 1144 memcpy (t, node, length); 1145 1146 if (CODE_CONTAINS_STRUCT (code, TS_COMMON)) 1147 TREE_CHAIN (t) = 0; 1148 TREE_ASM_WRITTEN (t) = 0; 1149 TREE_VISITED (t) = 0; 1150 1151 if (TREE_CODE_CLASS (code) == tcc_declaration) 1152 { 1153 if (code == DEBUG_EXPR_DECL) 1154 DECL_UID (t) = --next_debug_decl_uid; 1155 else 1156 { 1157 DECL_UID (t) = allocate_decl_uid (); 1158 if (DECL_PT_UID_SET_P (node)) 1159 SET_DECL_PT_UID (t, DECL_PT_UID (node)); 1160 } 1161 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node)) 1162 && DECL_HAS_VALUE_EXPR_P (node)) 1163 { 1164 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node)); 1165 DECL_HAS_VALUE_EXPR_P (t) = 1; 1166 } 1167 /* DECL_DEBUG_EXPR is copied explicitely by callers. */ 1168 if (VAR_P (node)) 1169 { 1170 DECL_HAS_DEBUG_EXPR_P (t) = 0; 1171 t->decl_with_vis.symtab_node = NULL; 1172 } 1173 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node)) 1174 { 1175 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node)); 1176 DECL_HAS_INIT_PRIORITY_P (t) = 1; 1177 } 1178 if (TREE_CODE (node) == FUNCTION_DECL) 1179 { 1180 DECL_STRUCT_FUNCTION (t) = NULL; 1181 t->decl_with_vis.symtab_node = NULL; 1182 } 1183 } 1184 else if (TREE_CODE_CLASS (code) == tcc_type) 1185 { 1186 TYPE_UID (t) = next_type_uid++; 1187 /* The following is so that the debug code for 1188 the copy is different from the original type. 1189 The two statements usually duplicate each other 1190 (because they clear fields of the same union), 1191 but the optimizer should catch that. */ 1192 TYPE_SYMTAB_POINTER (t) = 0; 1193 TYPE_SYMTAB_ADDRESS (t) = 0; 1194 1195 /* Do not copy the values cache. */ 1196 if (TYPE_CACHED_VALUES_P (t)) 1197 { 1198 TYPE_CACHED_VALUES_P (t) = 0; 1199 TYPE_CACHED_VALUES (t) = NULL_TREE; 1200 } 1201 } 1202 else if (code == TARGET_OPTION_NODE) 1203 { 1204 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>(); 1205 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node), 1206 sizeof (struct cl_target_option)); 1207 } 1208 else if (code == OPTIMIZATION_NODE) 1209 { 1210 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>(); 1211 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node), 1212 sizeof (struct cl_optimization)); 1213 } 1214 1215 return t; 1216 } 1217 1218 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field. 1219 For example, this can copy a list made of TREE_LIST nodes. */ 1220 1221 tree 1222 copy_list (tree list) 1223 { 1224 tree head; 1225 tree prev, next; 1226 1227 if (list == 0) 1228 return 0; 1229 1230 head = prev = copy_node (list); 1231 next = TREE_CHAIN (list); 1232 while (next) 1233 { 1234 TREE_CHAIN (prev) = copy_node (next); 1235 prev = TREE_CHAIN (prev); 1236 next = TREE_CHAIN (next); 1237 } 1238 return head; 1239 } 1240 1241 1242 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an 1243 INTEGER_CST with value CST and type TYPE. */ 1244 1245 static unsigned int 1246 get_int_cst_ext_nunits (tree type, const wide_int &cst) 1247 { 1248 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type)); 1249 /* We need extra HWIs if CST is an unsigned integer with its 1250 upper bit set. */ 1251 if (TYPE_UNSIGNED (type) && wi::neg_p (cst)) 1252 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1; 1253 return cst.get_len (); 1254 } 1255 1256 /* Return a new INTEGER_CST with value CST and type TYPE. */ 1257 1258 static tree 1259 build_new_int_cst (tree type, const wide_int &cst) 1260 { 1261 unsigned int len = cst.get_len (); 1262 unsigned int ext_len = get_int_cst_ext_nunits (type, cst); 1263 tree nt = make_int_cst (len, ext_len); 1264 1265 if (len < ext_len) 1266 { 1267 --ext_len; 1268 TREE_INT_CST_ELT (nt, ext_len) 1269 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT); 1270 for (unsigned int i = len; i < ext_len; ++i) 1271 TREE_INT_CST_ELT (nt, i) = -1; 1272 } 1273 else if (TYPE_UNSIGNED (type) 1274 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT) 1275 { 1276 len--; 1277 TREE_INT_CST_ELT (nt, len) 1278 = zext_hwi (cst.elt (len), 1279 cst.get_precision () % HOST_BITS_PER_WIDE_INT); 1280 } 1281 1282 for (unsigned int i = 0; i < len; i++) 1283 TREE_INT_CST_ELT (nt, i) = cst.elt (i); 1284 TREE_TYPE (nt) = type; 1285 return nt; 1286 } 1287 1288 /* Create an INT_CST node with a LOW value sign extended to TYPE. */ 1289 1290 tree 1291 build_int_cst (tree type, HOST_WIDE_INT low) 1292 { 1293 /* Support legacy code. */ 1294 if (!type) 1295 type = integer_type_node; 1296 1297 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type))); 1298 } 1299 1300 tree 1301 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst) 1302 { 1303 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type))); 1304 } 1305 1306 /* Create an INT_CST node with a LOW value sign extended to TYPE. */ 1307 1308 tree 1309 build_int_cst_type (tree type, HOST_WIDE_INT low) 1310 { 1311 gcc_assert (type); 1312 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type))); 1313 } 1314 1315 /* Constructs tree in type TYPE from with value given by CST. Signedness 1316 of CST is assumed to be the same as the signedness of TYPE. */ 1317 1318 tree 1319 double_int_to_tree (tree type, double_int cst) 1320 { 1321 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type))); 1322 } 1323 1324 /* We force the wide_int CST to the range of the type TYPE by sign or 1325 zero extending it. OVERFLOWABLE indicates if we are interested in 1326 overflow of the value, when >0 we are only interested in signed 1327 overflow, for <0 we are interested in any overflow. OVERFLOWED 1328 indicates whether overflow has already occurred. CONST_OVERFLOWED 1329 indicates whether constant overflow has already occurred. We force 1330 T's value to be within range of T's type (by setting to 0 or 1 all 1331 the bits outside the type's range). We set TREE_OVERFLOWED if, 1332 OVERFLOWED is nonzero, 1333 or OVERFLOWABLE is >0 and signed overflow occurs 1334 or OVERFLOWABLE is <0 and any overflow occurs 1335 We return a new tree node for the extended wide_int. The node 1336 is shared if no overflow flags are set. */ 1337 1338 1339 tree 1340 force_fit_type (tree type, const wide_int_ref &cst, 1341 int overflowable, bool overflowed) 1342 { 1343 signop sign = TYPE_SIGN (type); 1344 1345 /* If we need to set overflow flags, return a new unshared node. */ 1346 if (overflowed || !wi::fits_to_tree_p (cst, type)) 1347 { 1348 if (overflowed 1349 || overflowable < 0 1350 || (overflowable > 0 && sign == SIGNED)) 1351 { 1352 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign); 1353 tree t = build_new_int_cst (type, tmp); 1354 TREE_OVERFLOW (t) = 1; 1355 return t; 1356 } 1357 } 1358 1359 /* Else build a shared node. */ 1360 return wide_int_to_tree (type, cst); 1361 } 1362 1363 /* These are the hash table functions for the hash table of INTEGER_CST 1364 nodes of a sizetype. */ 1365 1366 /* Return the hash code X, an INTEGER_CST. */ 1367 1368 hashval_t 1369 int_cst_hasher::hash (tree x) 1370 { 1371 const_tree const t = x; 1372 hashval_t code = TYPE_UID (TREE_TYPE (t)); 1373 int i; 1374 1375 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++) 1376 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code); 1377 1378 return code; 1379 } 1380 1381 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node) 1382 is the same as that given by *Y, which is the same. */ 1383 1384 bool 1385 int_cst_hasher::equal (tree x, tree y) 1386 { 1387 const_tree const xt = x; 1388 const_tree const yt = y; 1389 1390 if (TREE_TYPE (xt) != TREE_TYPE (yt) 1391 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt) 1392 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt)) 1393 return false; 1394 1395 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++) 1396 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i)) 1397 return false; 1398 1399 return true; 1400 } 1401 1402 /* Create an INT_CST node of TYPE and value CST. 1403 The returned node is always shared. For small integers we use a 1404 per-type vector cache, for larger ones we use a single hash table. 1405 The value is extended from its precision according to the sign of 1406 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines 1407 the upper bits and ensures that hashing and value equality based 1408 upon the underlying HOST_WIDE_INTs works without masking. */ 1409 1410 tree 1411 wide_int_to_tree (tree type, const wide_int_ref &pcst) 1412 { 1413 tree t; 1414 int ix = -1; 1415 int limit = 0; 1416 1417 gcc_assert (type); 1418 unsigned int prec = TYPE_PRECISION (type); 1419 signop sgn = TYPE_SIGN (type); 1420 1421 /* Verify that everything is canonical. */ 1422 int l = pcst.get_len (); 1423 if (l > 1) 1424 { 1425 if (pcst.elt (l - 1) == 0) 1426 gcc_checking_assert (pcst.elt (l - 2) < 0); 1427 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1) 1428 gcc_checking_assert (pcst.elt (l - 2) >= 0); 1429 } 1430 1431 wide_int cst = wide_int::from (pcst, prec, sgn); 1432 unsigned int ext_len = get_int_cst_ext_nunits (type, cst); 1433 1434 if (ext_len == 1) 1435 { 1436 /* We just need to store a single HOST_WIDE_INT. */ 1437 HOST_WIDE_INT hwi; 1438 if (TYPE_UNSIGNED (type)) 1439 hwi = cst.to_uhwi (); 1440 else 1441 hwi = cst.to_shwi (); 1442 1443 switch (TREE_CODE (type)) 1444 { 1445 case NULLPTR_TYPE: 1446 gcc_assert (hwi == 0); 1447 /* Fallthru. */ 1448 1449 case POINTER_TYPE: 1450 case REFERENCE_TYPE: 1451 case POINTER_BOUNDS_TYPE: 1452 /* Cache NULL pointer and zero bounds. */ 1453 if (hwi == 0) 1454 { 1455 limit = 1; 1456 ix = 0; 1457 } 1458 break; 1459 1460 case BOOLEAN_TYPE: 1461 /* Cache false or true. */ 1462 limit = 2; 1463 if (IN_RANGE (hwi, 0, 1)) 1464 ix = hwi; 1465 break; 1466 1467 case INTEGER_TYPE: 1468 case OFFSET_TYPE: 1469 if (TYPE_SIGN (type) == UNSIGNED) 1470 { 1471 /* Cache [0, N). */ 1472 limit = INTEGER_SHARE_LIMIT; 1473 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1)) 1474 ix = hwi; 1475 } 1476 else 1477 { 1478 /* Cache [-1, N). */ 1479 limit = INTEGER_SHARE_LIMIT + 1; 1480 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1)) 1481 ix = hwi + 1; 1482 } 1483 break; 1484 1485 case ENUMERAL_TYPE: 1486 break; 1487 1488 default: 1489 gcc_unreachable (); 1490 } 1491 1492 if (ix >= 0) 1493 { 1494 /* Look for it in the type's vector of small shared ints. */ 1495 if (!TYPE_CACHED_VALUES_P (type)) 1496 { 1497 TYPE_CACHED_VALUES_P (type) = 1; 1498 TYPE_CACHED_VALUES (type) = make_tree_vec (limit); 1499 } 1500 1501 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix); 1502 if (t) 1503 /* Make sure no one is clobbering the shared constant. */ 1504 gcc_checking_assert (TREE_TYPE (t) == type 1505 && TREE_INT_CST_NUNITS (t) == 1 1506 && TREE_INT_CST_OFFSET_NUNITS (t) == 1 1507 && TREE_INT_CST_EXT_NUNITS (t) == 1 1508 && TREE_INT_CST_ELT (t, 0) == hwi); 1509 else 1510 { 1511 /* Create a new shared int. */ 1512 t = build_new_int_cst (type, cst); 1513 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t; 1514 } 1515 } 1516 else 1517 { 1518 /* Use the cache of larger shared ints, using int_cst_node as 1519 a temporary. */ 1520 1521 TREE_INT_CST_ELT (int_cst_node, 0) = hwi; 1522 TREE_TYPE (int_cst_node) = type; 1523 1524 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT); 1525 t = *slot; 1526 if (!t) 1527 { 1528 /* Insert this one into the hash table. */ 1529 t = int_cst_node; 1530 *slot = t; 1531 /* Make a new node for next time round. */ 1532 int_cst_node = make_int_cst (1, 1); 1533 } 1534 } 1535 } 1536 else 1537 { 1538 /* The value either hashes properly or we drop it on the floor 1539 for the gc to take care of. There will not be enough of them 1540 to worry about. */ 1541 1542 tree nt = build_new_int_cst (type, cst); 1543 tree *slot = int_cst_hash_table->find_slot (nt, INSERT); 1544 t = *slot; 1545 if (!t) 1546 { 1547 /* Insert this one into the hash table. */ 1548 t = nt; 1549 *slot = t; 1550 } 1551 } 1552 1553 return t; 1554 } 1555 1556 void 1557 cache_integer_cst (tree t) 1558 { 1559 tree type = TREE_TYPE (t); 1560 int ix = -1; 1561 int limit = 0; 1562 int prec = TYPE_PRECISION (type); 1563 1564 gcc_assert (!TREE_OVERFLOW (t)); 1565 1566 switch (TREE_CODE (type)) 1567 { 1568 case NULLPTR_TYPE: 1569 gcc_assert (integer_zerop (t)); 1570 /* Fallthru. */ 1571 1572 case POINTER_TYPE: 1573 case REFERENCE_TYPE: 1574 /* Cache NULL pointer. */ 1575 if (integer_zerop (t)) 1576 { 1577 limit = 1; 1578 ix = 0; 1579 } 1580 break; 1581 1582 case BOOLEAN_TYPE: 1583 /* Cache false or true. */ 1584 limit = 2; 1585 if (wi::ltu_p (t, 2)) 1586 ix = TREE_INT_CST_ELT (t, 0); 1587 break; 1588 1589 case INTEGER_TYPE: 1590 case OFFSET_TYPE: 1591 if (TYPE_UNSIGNED (type)) 1592 { 1593 /* Cache 0..N */ 1594 limit = INTEGER_SHARE_LIMIT; 1595 1596 /* This is a little hokie, but if the prec is smaller than 1597 what is necessary to hold INTEGER_SHARE_LIMIT, then the 1598 obvious test will not get the correct answer. */ 1599 if (prec < HOST_BITS_PER_WIDE_INT) 1600 { 1601 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT) 1602 ix = tree_to_uhwi (t); 1603 } 1604 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT)) 1605 ix = tree_to_uhwi (t); 1606 } 1607 else 1608 { 1609 /* Cache -1..N */ 1610 limit = INTEGER_SHARE_LIMIT + 1; 1611 1612 if (integer_minus_onep (t)) 1613 ix = 0; 1614 else if (!wi::neg_p (t)) 1615 { 1616 if (prec < HOST_BITS_PER_WIDE_INT) 1617 { 1618 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT) 1619 ix = tree_to_shwi (t) + 1; 1620 } 1621 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT)) 1622 ix = tree_to_shwi (t) + 1; 1623 } 1624 } 1625 break; 1626 1627 case ENUMERAL_TYPE: 1628 break; 1629 1630 default: 1631 gcc_unreachable (); 1632 } 1633 1634 if (ix >= 0) 1635 { 1636 /* Look for it in the type's vector of small shared ints. */ 1637 if (!TYPE_CACHED_VALUES_P (type)) 1638 { 1639 TYPE_CACHED_VALUES_P (type) = 1; 1640 TYPE_CACHED_VALUES (type) = make_tree_vec (limit); 1641 } 1642 1643 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE); 1644 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t; 1645 } 1646 else 1647 { 1648 /* Use the cache of larger shared ints. */ 1649 tree *slot = int_cst_hash_table->find_slot (t, INSERT); 1650 /* If there is already an entry for the number verify it's the 1651 same. */ 1652 if (*slot) 1653 gcc_assert (wi::eq_p (tree (*slot), t)); 1654 else 1655 /* Otherwise insert this one into the hash table. */ 1656 *slot = t; 1657 } 1658 } 1659 1660 1661 /* Builds an integer constant in TYPE such that lowest BITS bits are ones 1662 and the rest are zeros. */ 1663 1664 tree 1665 build_low_bits_mask (tree type, unsigned bits) 1666 { 1667 gcc_assert (bits <= TYPE_PRECISION (type)); 1668 1669 return wide_int_to_tree (type, wi::mask (bits, false, 1670 TYPE_PRECISION (type))); 1671 } 1672 1673 /* Checks that X is integer constant that can be expressed in (unsigned) 1674 HOST_WIDE_INT without loss of precision. */ 1675 1676 bool 1677 cst_and_fits_in_hwi (const_tree x) 1678 { 1679 return (TREE_CODE (x) == INTEGER_CST 1680 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x))); 1681 } 1682 1683 /* Build a newly constructed VECTOR_CST node of length LEN. */ 1684 1685 tree 1686 make_vector_stat (unsigned len MEM_STAT_DECL) 1687 { 1688 tree t; 1689 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector); 1690 1691 record_node_allocation_statistics (VECTOR_CST, length); 1692 1693 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT); 1694 1695 TREE_SET_CODE (t, VECTOR_CST); 1696 TREE_CONSTANT (t) = 1; 1697 1698 return t; 1699 } 1700 1701 /* Return a new VECTOR_CST node whose type is TYPE and whose values 1702 are in a list pointed to by VALS. */ 1703 1704 tree 1705 build_vector_stat (tree type, tree *vals MEM_STAT_DECL) 1706 { 1707 int over = 0; 1708 unsigned cnt = 0; 1709 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type)); 1710 TREE_TYPE (v) = type; 1711 1712 /* Iterate through elements and check for overflow. */ 1713 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt) 1714 { 1715 tree value = vals[cnt]; 1716 1717 VECTOR_CST_ELT (v, cnt) = value; 1718 1719 /* Don't crash if we get an address constant. */ 1720 if (!CONSTANT_CLASS_P (value)) 1721 continue; 1722 1723 over |= TREE_OVERFLOW (value); 1724 } 1725 1726 TREE_OVERFLOW (v) = over; 1727 return v; 1728 } 1729 1730 /* Return a new VECTOR_CST node whose type is TYPE and whose values 1731 are extracted from V, a vector of CONSTRUCTOR_ELT. */ 1732 1733 tree 1734 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v) 1735 { 1736 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type)); 1737 unsigned HOST_WIDE_INT idx, pos = 0; 1738 tree value; 1739 1740 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value) 1741 { 1742 if (TREE_CODE (value) == VECTOR_CST) 1743 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i) 1744 vec[pos++] = VECTOR_CST_ELT (value, i); 1745 else 1746 vec[pos++] = value; 1747 } 1748 while (pos < TYPE_VECTOR_SUBPARTS (type)) 1749 vec[pos++] = build_zero_cst (TREE_TYPE (type)); 1750 1751 return build_vector (type, vec); 1752 } 1753 1754 /* Build a vector of type VECTYPE where all the elements are SCs. */ 1755 tree 1756 build_vector_from_val (tree vectype, tree sc) 1757 { 1758 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype); 1759 1760 if (sc == error_mark_node) 1761 return sc; 1762 1763 /* Verify that the vector type is suitable for SC. Note that there 1764 is some inconsistency in the type-system with respect to restrict 1765 qualifications of pointers. Vector types always have a main-variant 1766 element type and the qualification is applied to the vector-type. 1767 So TREE_TYPE (vector-type) does not return a properly qualified 1768 vector element-type. */ 1769 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)), 1770 TREE_TYPE (vectype))); 1771 1772 if (CONSTANT_CLASS_P (sc)) 1773 { 1774 tree *v = XALLOCAVEC (tree, nunits); 1775 for (i = 0; i < nunits; ++i) 1776 v[i] = sc; 1777 return build_vector (vectype, v); 1778 } 1779 else 1780 { 1781 vec<constructor_elt, va_gc> *v; 1782 vec_alloc (v, nunits); 1783 for (i = 0; i < nunits; ++i) 1784 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc); 1785 return build_constructor (vectype, v); 1786 } 1787 } 1788 1789 /* Something has messed with the elements of CONSTRUCTOR C after it was built; 1790 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */ 1791 1792 void 1793 recompute_constructor_flags (tree c) 1794 { 1795 unsigned int i; 1796 tree val; 1797 bool constant_p = true; 1798 bool side_effects_p = false; 1799 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c); 1800 1801 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val) 1802 { 1803 /* Mostly ctors will have elts that don't have side-effects, so 1804 the usual case is to scan all the elements. Hence a single 1805 loop for both const and side effects, rather than one loop 1806 each (with early outs). */ 1807 if (!TREE_CONSTANT (val)) 1808 constant_p = false; 1809 if (TREE_SIDE_EFFECTS (val)) 1810 side_effects_p = true; 1811 } 1812 1813 TREE_SIDE_EFFECTS (c) = side_effects_p; 1814 TREE_CONSTANT (c) = constant_p; 1815 } 1816 1817 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for 1818 CONSTRUCTOR C. */ 1819 1820 void 1821 verify_constructor_flags (tree c) 1822 { 1823 unsigned int i; 1824 tree val; 1825 bool constant_p = TREE_CONSTANT (c); 1826 bool side_effects_p = TREE_SIDE_EFFECTS (c); 1827 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c); 1828 1829 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val) 1830 { 1831 if (constant_p && !TREE_CONSTANT (val)) 1832 internal_error ("non-constant element in constant CONSTRUCTOR"); 1833 if (!side_effects_p && TREE_SIDE_EFFECTS (val)) 1834 internal_error ("side-effects element in no-side-effects CONSTRUCTOR"); 1835 } 1836 } 1837 1838 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values 1839 are in the vec pointed to by VALS. */ 1840 tree 1841 build_constructor (tree type, vec<constructor_elt, va_gc> *vals) 1842 { 1843 tree c = make_node (CONSTRUCTOR); 1844 1845 TREE_TYPE (c) = type; 1846 CONSTRUCTOR_ELTS (c) = vals; 1847 1848 recompute_constructor_flags (c); 1849 1850 return c; 1851 } 1852 1853 /* Build a CONSTRUCTOR node made of a single initializer, with the specified 1854 INDEX and VALUE. */ 1855 tree 1856 build_constructor_single (tree type, tree index, tree value) 1857 { 1858 vec<constructor_elt, va_gc> *v; 1859 constructor_elt elt = {index, value}; 1860 1861 vec_alloc (v, 1); 1862 v->quick_push (elt); 1863 1864 return build_constructor (type, v); 1865 } 1866 1867 1868 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values 1869 are in a list pointed to by VALS. */ 1870 tree 1871 build_constructor_from_list (tree type, tree vals) 1872 { 1873 tree t; 1874 vec<constructor_elt, va_gc> *v = NULL; 1875 1876 if (vals) 1877 { 1878 vec_alloc (v, list_length (vals)); 1879 for (t = vals; t; t = TREE_CHAIN (t)) 1880 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t)); 1881 } 1882 1883 return build_constructor (type, v); 1884 } 1885 1886 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number 1887 of elements, provided as index/value pairs. */ 1888 1889 tree 1890 build_constructor_va (tree type, int nelts, ...) 1891 { 1892 vec<constructor_elt, va_gc> *v = NULL; 1893 va_list p; 1894 1895 va_start (p, nelts); 1896 vec_alloc (v, nelts); 1897 while (nelts--) 1898 { 1899 tree index = va_arg (p, tree); 1900 tree value = va_arg (p, tree); 1901 CONSTRUCTOR_APPEND_ELT (v, index, value); 1902 } 1903 va_end (p); 1904 return build_constructor (type, v); 1905 } 1906 1907 /* Return a new FIXED_CST node whose type is TYPE and value is F. */ 1908 1909 tree 1910 build_fixed (tree type, FIXED_VALUE_TYPE f) 1911 { 1912 tree v; 1913 FIXED_VALUE_TYPE *fp; 1914 1915 v = make_node (FIXED_CST); 1916 fp = ggc_alloc<fixed_value> (); 1917 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE)); 1918 1919 TREE_TYPE (v) = type; 1920 TREE_FIXED_CST_PTR (v) = fp; 1921 return v; 1922 } 1923 1924 /* Return a new REAL_CST node whose type is TYPE and value is D. */ 1925 1926 tree 1927 build_real (tree type, REAL_VALUE_TYPE d) 1928 { 1929 tree v; 1930 REAL_VALUE_TYPE *dp; 1931 int overflow = 0; 1932 1933 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE. 1934 Consider doing it via real_convert now. */ 1935 1936 v = make_node (REAL_CST); 1937 dp = ggc_alloc<real_value> (); 1938 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE)); 1939 1940 TREE_TYPE (v) = type; 1941 TREE_REAL_CST_PTR (v) = dp; 1942 TREE_OVERFLOW (v) = overflow; 1943 return v; 1944 } 1945 1946 /* Like build_real, but first truncate D to the type. */ 1947 1948 tree 1949 build_real_truncate (tree type, REAL_VALUE_TYPE d) 1950 { 1951 return build_real (type, real_value_truncate (TYPE_MODE (type), d)); 1952 } 1953 1954 /* Return a new REAL_CST node whose type is TYPE 1955 and whose value is the integer value of the INTEGER_CST node I. */ 1956 1957 REAL_VALUE_TYPE 1958 real_value_from_int_cst (const_tree type, const_tree i) 1959 { 1960 REAL_VALUE_TYPE d; 1961 1962 /* Clear all bits of the real value type so that we can later do 1963 bitwise comparisons to see if two values are the same. */ 1964 memset (&d, 0, sizeof d); 1965 1966 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i, 1967 TYPE_SIGN (TREE_TYPE (i))); 1968 return d; 1969 } 1970 1971 /* Given a tree representing an integer constant I, return a tree 1972 representing the same value as a floating-point constant of type TYPE. */ 1973 1974 tree 1975 build_real_from_int_cst (tree type, const_tree i) 1976 { 1977 tree v; 1978 int overflow = TREE_OVERFLOW (i); 1979 1980 v = build_real (type, real_value_from_int_cst (type, i)); 1981 1982 TREE_OVERFLOW (v) |= overflow; 1983 return v; 1984 } 1985 1986 /* Return a newly constructed STRING_CST node whose value is 1987 the LEN characters at STR. 1988 Note that for a C string literal, LEN should include the trailing NUL. 1989 The TREE_TYPE is not initialized. */ 1990 1991 tree 1992 build_string (int len, const char *str) 1993 { 1994 tree s; 1995 size_t length; 1996 1997 /* Do not waste bytes provided by padding of struct tree_string. */ 1998 length = len + offsetof (struct tree_string, str) + 1; 1999 2000 record_node_allocation_statistics (STRING_CST, length); 2001 2002 s = (tree) ggc_internal_alloc (length); 2003 2004 memset (s, 0, sizeof (struct tree_typed)); 2005 TREE_SET_CODE (s, STRING_CST); 2006 TREE_CONSTANT (s) = 1; 2007 TREE_STRING_LENGTH (s) = len; 2008 memcpy (s->string.str, str, len); 2009 s->string.str[len] = '\0'; 2010 2011 return s; 2012 } 2013 2014 /* Return a newly constructed COMPLEX_CST node whose value is 2015 specified by the real and imaginary parts REAL and IMAG. 2016 Both REAL and IMAG should be constant nodes. TYPE, if specified, 2017 will be the type of the COMPLEX_CST; otherwise a new type will be made. */ 2018 2019 tree 2020 build_complex (tree type, tree real, tree imag) 2021 { 2022 tree t = make_node (COMPLEX_CST); 2023 2024 TREE_REALPART (t) = real; 2025 TREE_IMAGPART (t) = imag; 2026 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real)); 2027 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag); 2028 return t; 2029 } 2030 2031 /* Build a complex (inf +- 0i), such as for the result of cproj. 2032 TYPE is the complex tree type of the result. If NEG is true, the 2033 imaginary zero is negative. */ 2034 2035 tree 2036 build_complex_inf (tree type, bool neg) 2037 { 2038 REAL_VALUE_TYPE rinf, rzero = dconst0; 2039 2040 real_inf (&rinf); 2041 rzero.sign = neg; 2042 return build_complex (type, build_real (TREE_TYPE (type), rinf), 2043 build_real (TREE_TYPE (type), rzero)); 2044 } 2045 2046 /* Return the constant 1 in type TYPE. If TYPE has several elements, each 2047 element is set to 1. In particular, this is 1 + i for complex types. */ 2048 2049 tree 2050 build_each_one_cst (tree type) 2051 { 2052 if (TREE_CODE (type) == COMPLEX_TYPE) 2053 { 2054 tree scalar = build_one_cst (TREE_TYPE (type)); 2055 return build_complex (type, scalar, scalar); 2056 } 2057 else 2058 return build_one_cst (type); 2059 } 2060 2061 /* Return a constant of arithmetic type TYPE which is the 2062 multiplicative identity of the set TYPE. */ 2063 2064 tree 2065 build_one_cst (tree type) 2066 { 2067 switch (TREE_CODE (type)) 2068 { 2069 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: 2070 case POINTER_TYPE: case REFERENCE_TYPE: 2071 case OFFSET_TYPE: 2072 return build_int_cst (type, 1); 2073 2074 case REAL_TYPE: 2075 return build_real (type, dconst1); 2076 2077 case FIXED_POINT_TYPE: 2078 /* We can only generate 1 for accum types. */ 2079 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type))); 2080 return build_fixed (type, FCONST1 (TYPE_MODE (type))); 2081 2082 case VECTOR_TYPE: 2083 { 2084 tree scalar = build_one_cst (TREE_TYPE (type)); 2085 2086 return build_vector_from_val (type, scalar); 2087 } 2088 2089 case COMPLEX_TYPE: 2090 return build_complex (type, 2091 build_one_cst (TREE_TYPE (type)), 2092 build_zero_cst (TREE_TYPE (type))); 2093 2094 default: 2095 gcc_unreachable (); 2096 } 2097 } 2098 2099 /* Return an integer of type TYPE containing all 1's in as much precision as 2100 it contains, or a complex or vector whose subparts are such integers. */ 2101 2102 tree 2103 build_all_ones_cst (tree type) 2104 { 2105 if (TREE_CODE (type) == COMPLEX_TYPE) 2106 { 2107 tree scalar = build_all_ones_cst (TREE_TYPE (type)); 2108 return build_complex (type, scalar, scalar); 2109 } 2110 else 2111 return build_minus_one_cst (type); 2112 } 2113 2114 /* Return a constant of arithmetic type TYPE which is the 2115 opposite of the multiplicative identity of the set TYPE. */ 2116 2117 tree 2118 build_minus_one_cst (tree type) 2119 { 2120 switch (TREE_CODE (type)) 2121 { 2122 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: 2123 case POINTER_TYPE: case REFERENCE_TYPE: 2124 case OFFSET_TYPE: 2125 return build_int_cst (type, -1); 2126 2127 case REAL_TYPE: 2128 return build_real (type, dconstm1); 2129 2130 case FIXED_POINT_TYPE: 2131 /* We can only generate 1 for accum types. */ 2132 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type))); 2133 return build_fixed (type, fixed_from_double_int (double_int_minus_one, 2134 TYPE_MODE (type))); 2135 2136 case VECTOR_TYPE: 2137 { 2138 tree scalar = build_minus_one_cst (TREE_TYPE (type)); 2139 2140 return build_vector_from_val (type, scalar); 2141 } 2142 2143 case COMPLEX_TYPE: 2144 return build_complex (type, 2145 build_minus_one_cst (TREE_TYPE (type)), 2146 build_zero_cst (TREE_TYPE (type))); 2147 2148 default: 2149 gcc_unreachable (); 2150 } 2151 } 2152 2153 /* Build 0 constant of type TYPE. This is used by constructor folding 2154 and thus the constant should be represented in memory by 2155 zero(es). */ 2156 2157 tree 2158 build_zero_cst (tree type) 2159 { 2160 switch (TREE_CODE (type)) 2161 { 2162 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: 2163 case POINTER_TYPE: case REFERENCE_TYPE: 2164 case OFFSET_TYPE: case NULLPTR_TYPE: 2165 return build_int_cst (type, 0); 2166 2167 case REAL_TYPE: 2168 return build_real (type, dconst0); 2169 2170 case FIXED_POINT_TYPE: 2171 return build_fixed (type, FCONST0 (TYPE_MODE (type))); 2172 2173 case VECTOR_TYPE: 2174 { 2175 tree scalar = build_zero_cst (TREE_TYPE (type)); 2176 2177 return build_vector_from_val (type, scalar); 2178 } 2179 2180 case COMPLEX_TYPE: 2181 { 2182 tree zero = build_zero_cst (TREE_TYPE (type)); 2183 2184 return build_complex (type, zero, zero); 2185 } 2186 2187 default: 2188 if (!AGGREGATE_TYPE_P (type)) 2189 return fold_convert (type, integer_zero_node); 2190 return build_constructor (type, NULL); 2191 } 2192 } 2193 2194 2195 /* Build a BINFO with LEN language slots. */ 2196 2197 tree 2198 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL) 2199 { 2200 tree t; 2201 size_t length = (offsetof (struct tree_binfo, base_binfos) 2202 + vec<tree, va_gc>::embedded_size (base_binfos)); 2203 2204 record_node_allocation_statistics (TREE_BINFO, length); 2205 2206 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT); 2207 2208 memset (t, 0, offsetof (struct tree_binfo, base_binfos)); 2209 2210 TREE_SET_CODE (t, TREE_BINFO); 2211 2212 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos); 2213 2214 return t; 2215 } 2216 2217 /* Create a CASE_LABEL_EXPR tree node and return it. */ 2218 2219 tree 2220 build_case_label (tree low_value, tree high_value, tree label_decl) 2221 { 2222 tree t = make_node (CASE_LABEL_EXPR); 2223 2224 TREE_TYPE (t) = void_type_node; 2225 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl)); 2226 2227 CASE_LOW (t) = low_value; 2228 CASE_HIGH (t) = high_value; 2229 CASE_LABEL (t) = label_decl; 2230 CASE_CHAIN (t) = NULL_TREE; 2231 2232 return t; 2233 } 2234 2235 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the 2236 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively. 2237 The latter determines the length of the HOST_WIDE_INT vector. */ 2238 2239 tree 2240 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL) 2241 { 2242 tree t; 2243 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT) 2244 + sizeof (struct tree_int_cst)); 2245 2246 gcc_assert (len); 2247 record_node_allocation_statistics (INTEGER_CST, length); 2248 2249 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT); 2250 2251 TREE_SET_CODE (t, INTEGER_CST); 2252 TREE_INT_CST_NUNITS (t) = len; 2253 TREE_INT_CST_EXT_NUNITS (t) = ext_len; 2254 /* to_offset can only be applied to trees that are offset_int-sized 2255 or smaller. EXT_LEN is correct if it fits, otherwise the constant 2256 must be exactly the precision of offset_int and so LEN is correct. */ 2257 if (ext_len <= OFFSET_INT_ELTS) 2258 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len; 2259 else 2260 TREE_INT_CST_OFFSET_NUNITS (t) = len; 2261 2262 TREE_CONSTANT (t) = 1; 2263 2264 return t; 2265 } 2266 2267 /* Build a newly constructed TREE_VEC node of length LEN. */ 2268 2269 tree 2270 make_tree_vec_stat (int len MEM_STAT_DECL) 2271 { 2272 tree t; 2273 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec); 2274 2275 record_node_allocation_statistics (TREE_VEC, length); 2276 2277 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT); 2278 2279 TREE_SET_CODE (t, TREE_VEC); 2280 TREE_VEC_LENGTH (t) = len; 2281 2282 return t; 2283 } 2284 2285 /* Grow a TREE_VEC node to new length LEN. */ 2286 2287 tree 2288 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL) 2289 { 2290 gcc_assert (TREE_CODE (v) == TREE_VEC); 2291 2292 int oldlen = TREE_VEC_LENGTH (v); 2293 gcc_assert (len > oldlen); 2294 2295 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec); 2296 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec); 2297 2298 record_node_allocation_statistics (TREE_VEC, length - oldlength); 2299 2300 v = (tree) ggc_realloc (v, length PASS_MEM_STAT); 2301 2302 TREE_VEC_LENGTH (v) = len; 2303 2304 return v; 2305 } 2306 2307 /* Return 1 if EXPR is the constant zero, whether it is integral, float or 2308 fixed, and scalar, complex or vector. */ 2309 2310 int 2311 zerop (const_tree expr) 2312 { 2313 return (integer_zerop (expr) 2314 || real_zerop (expr) 2315 || fixed_zerop (expr)); 2316 } 2317 2318 /* Return 1 if EXPR is the integer constant zero or a complex constant 2319 of zero. */ 2320 2321 int 2322 integer_zerop (const_tree expr) 2323 { 2324 switch (TREE_CODE (expr)) 2325 { 2326 case INTEGER_CST: 2327 return wi::eq_p (expr, 0); 2328 case COMPLEX_CST: 2329 return (integer_zerop (TREE_REALPART (expr)) 2330 && integer_zerop (TREE_IMAGPART (expr))); 2331 case VECTOR_CST: 2332 { 2333 unsigned i; 2334 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i) 2335 if (!integer_zerop (VECTOR_CST_ELT (expr, i))) 2336 return false; 2337 return true; 2338 } 2339 default: 2340 return false; 2341 } 2342 } 2343 2344 /* Return 1 if EXPR is the integer constant one or the corresponding 2345 complex constant. */ 2346 2347 int 2348 integer_onep (const_tree expr) 2349 { 2350 switch (TREE_CODE (expr)) 2351 { 2352 case INTEGER_CST: 2353 return wi::eq_p (wi::to_widest (expr), 1); 2354 case COMPLEX_CST: 2355 return (integer_onep (TREE_REALPART (expr)) 2356 && integer_zerop (TREE_IMAGPART (expr))); 2357 case VECTOR_CST: 2358 { 2359 unsigned i; 2360 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i) 2361 if (!integer_onep (VECTOR_CST_ELT (expr, i))) 2362 return false; 2363 return true; 2364 } 2365 default: 2366 return false; 2367 } 2368 } 2369 2370 /* Return 1 if EXPR is the integer constant one. For complex and vector, 2371 return 1 if every piece is the integer constant one. */ 2372 2373 int 2374 integer_each_onep (const_tree expr) 2375 { 2376 if (TREE_CODE (expr) == COMPLEX_CST) 2377 return (integer_onep (TREE_REALPART (expr)) 2378 && integer_onep (TREE_IMAGPART (expr))); 2379 else 2380 return integer_onep (expr); 2381 } 2382 2383 /* Return 1 if EXPR is an integer containing all 1's in as much precision as 2384 it contains, or a complex or vector whose subparts are such integers. */ 2385 2386 int 2387 integer_all_onesp (const_tree expr) 2388 { 2389 if (TREE_CODE (expr) == COMPLEX_CST 2390 && integer_all_onesp (TREE_REALPART (expr)) 2391 && integer_all_onesp (TREE_IMAGPART (expr))) 2392 return 1; 2393 2394 else if (TREE_CODE (expr) == VECTOR_CST) 2395 { 2396 unsigned i; 2397 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i) 2398 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i))) 2399 return 0; 2400 return 1; 2401 } 2402 2403 else if (TREE_CODE (expr) != INTEGER_CST) 2404 return 0; 2405 2406 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr; 2407 } 2408 2409 /* Return 1 if EXPR is the integer constant minus one. */ 2410 2411 int 2412 integer_minus_onep (const_tree expr) 2413 { 2414 if (TREE_CODE (expr) == COMPLEX_CST) 2415 return (integer_all_onesp (TREE_REALPART (expr)) 2416 && integer_zerop (TREE_IMAGPART (expr))); 2417 else 2418 return integer_all_onesp (expr); 2419 } 2420 2421 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only 2422 one bit on). */ 2423 2424 int 2425 integer_pow2p (const_tree expr) 2426 { 2427 if (TREE_CODE (expr) == COMPLEX_CST 2428 && integer_pow2p (TREE_REALPART (expr)) 2429 && integer_zerop (TREE_IMAGPART (expr))) 2430 return 1; 2431 2432 if (TREE_CODE (expr) != INTEGER_CST) 2433 return 0; 2434 2435 return wi::popcount (expr) == 1; 2436 } 2437 2438 /* Return 1 if EXPR is an integer constant other than zero or a 2439 complex constant other than zero. */ 2440 2441 int 2442 integer_nonzerop (const_tree expr) 2443 { 2444 return ((TREE_CODE (expr) == INTEGER_CST 2445 && !wi::eq_p (expr, 0)) 2446 || (TREE_CODE (expr) == COMPLEX_CST 2447 && (integer_nonzerop (TREE_REALPART (expr)) 2448 || integer_nonzerop (TREE_IMAGPART (expr))))); 2449 } 2450 2451 /* Return 1 if EXPR is the integer constant one. For vector, 2452 return 1 if every piece is the integer constant minus one 2453 (representing the value TRUE). */ 2454 2455 int 2456 integer_truep (const_tree expr) 2457 { 2458 if (TREE_CODE (expr) == VECTOR_CST) 2459 return integer_all_onesp (expr); 2460 return integer_onep (expr); 2461 } 2462 2463 /* Return 1 if EXPR is the fixed-point constant zero. */ 2464 2465 int 2466 fixed_zerop (const_tree expr) 2467 { 2468 return (TREE_CODE (expr) == FIXED_CST 2469 && TREE_FIXED_CST (expr).data.is_zero ()); 2470 } 2471 2472 /* Return the power of two represented by a tree node known to be a 2473 power of two. */ 2474 2475 int 2476 tree_log2 (const_tree expr) 2477 { 2478 if (TREE_CODE (expr) == COMPLEX_CST) 2479 return tree_log2 (TREE_REALPART (expr)); 2480 2481 return wi::exact_log2 (expr); 2482 } 2483 2484 /* Similar, but return the largest integer Y such that 2 ** Y is less 2485 than or equal to EXPR. */ 2486 2487 int 2488 tree_floor_log2 (const_tree expr) 2489 { 2490 if (TREE_CODE (expr) == COMPLEX_CST) 2491 return tree_log2 (TREE_REALPART (expr)); 2492 2493 return wi::floor_log2 (expr); 2494 } 2495 2496 /* Return number of known trailing zero bits in EXPR, or, if the value of 2497 EXPR is known to be zero, the precision of it's type. */ 2498 2499 unsigned int 2500 tree_ctz (const_tree expr) 2501 { 2502 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr)) 2503 && !POINTER_TYPE_P (TREE_TYPE (expr))) 2504 return 0; 2505 2506 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr)); 2507 switch (TREE_CODE (expr)) 2508 { 2509 case INTEGER_CST: 2510 ret1 = wi::ctz (expr); 2511 return MIN (ret1, prec); 2512 case SSA_NAME: 2513 ret1 = wi::ctz (get_nonzero_bits (expr)); 2514 return MIN (ret1, prec); 2515 case PLUS_EXPR: 2516 case MINUS_EXPR: 2517 case BIT_IOR_EXPR: 2518 case BIT_XOR_EXPR: 2519 case MIN_EXPR: 2520 case MAX_EXPR: 2521 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2522 if (ret1 == 0) 2523 return ret1; 2524 ret2 = tree_ctz (TREE_OPERAND (expr, 1)); 2525 return MIN (ret1, ret2); 2526 case POINTER_PLUS_EXPR: 2527 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2528 ret2 = tree_ctz (TREE_OPERAND (expr, 1)); 2529 /* Second operand is sizetype, which could be in theory 2530 wider than pointer's precision. Make sure we never 2531 return more than prec. */ 2532 ret2 = MIN (ret2, prec); 2533 return MIN (ret1, ret2); 2534 case BIT_AND_EXPR: 2535 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2536 ret2 = tree_ctz (TREE_OPERAND (expr, 1)); 2537 return MAX (ret1, ret2); 2538 case MULT_EXPR: 2539 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2540 ret2 = tree_ctz (TREE_OPERAND (expr, 1)); 2541 return MIN (ret1 + ret2, prec); 2542 case LSHIFT_EXPR: 2543 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2544 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1)) 2545 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec)) 2546 { 2547 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1)); 2548 return MIN (ret1 + ret2, prec); 2549 } 2550 return ret1; 2551 case RSHIFT_EXPR: 2552 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1)) 2553 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec)) 2554 { 2555 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2556 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1)); 2557 if (ret1 > ret2) 2558 return ret1 - ret2; 2559 } 2560 return 0; 2561 case TRUNC_DIV_EXPR: 2562 case CEIL_DIV_EXPR: 2563 case FLOOR_DIV_EXPR: 2564 case ROUND_DIV_EXPR: 2565 case EXACT_DIV_EXPR: 2566 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST 2567 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1) 2568 { 2569 int l = tree_log2 (TREE_OPERAND (expr, 1)); 2570 if (l >= 0) 2571 { 2572 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2573 ret2 = l; 2574 if (ret1 > ret2) 2575 return ret1 - ret2; 2576 } 2577 } 2578 return 0; 2579 CASE_CONVERT: 2580 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2581 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0)))) 2582 ret1 = prec; 2583 return MIN (ret1, prec); 2584 case SAVE_EXPR: 2585 return tree_ctz (TREE_OPERAND (expr, 0)); 2586 case COND_EXPR: 2587 ret1 = tree_ctz (TREE_OPERAND (expr, 1)); 2588 if (ret1 == 0) 2589 return 0; 2590 ret2 = tree_ctz (TREE_OPERAND (expr, 2)); 2591 return MIN (ret1, ret2); 2592 case COMPOUND_EXPR: 2593 return tree_ctz (TREE_OPERAND (expr, 1)); 2594 case ADDR_EXPR: 2595 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr)); 2596 if (ret1 > BITS_PER_UNIT) 2597 { 2598 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT); 2599 return MIN (ret1, prec); 2600 } 2601 return 0; 2602 default: 2603 return 0; 2604 } 2605 } 2606 2607 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for 2608 decimal float constants, so don't return 1 for them. */ 2609 2610 int 2611 real_zerop (const_tree expr) 2612 { 2613 switch (TREE_CODE (expr)) 2614 { 2615 case REAL_CST: 2616 return real_equal (&TREE_REAL_CST (expr), &dconst0) 2617 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr)))); 2618 case COMPLEX_CST: 2619 return real_zerop (TREE_REALPART (expr)) 2620 && real_zerop (TREE_IMAGPART (expr)); 2621 case VECTOR_CST: 2622 { 2623 unsigned i; 2624 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i) 2625 if (!real_zerop (VECTOR_CST_ELT (expr, i))) 2626 return false; 2627 return true; 2628 } 2629 default: 2630 return false; 2631 } 2632 } 2633 2634 /* Return 1 if EXPR is the real constant one in real or complex form. 2635 Trailing zeroes matter for decimal float constants, so don't return 2636 1 for them. */ 2637 2638 int 2639 real_onep (const_tree expr) 2640 { 2641 switch (TREE_CODE (expr)) 2642 { 2643 case REAL_CST: 2644 return real_equal (&TREE_REAL_CST (expr), &dconst1) 2645 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr)))); 2646 case COMPLEX_CST: 2647 return real_onep (TREE_REALPART (expr)) 2648 && real_zerop (TREE_IMAGPART (expr)); 2649 case VECTOR_CST: 2650 { 2651 unsigned i; 2652 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i) 2653 if (!real_onep (VECTOR_CST_ELT (expr, i))) 2654 return false; 2655 return true; 2656 } 2657 default: 2658 return false; 2659 } 2660 } 2661 2662 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes 2663 matter for decimal float constants, so don't return 1 for them. */ 2664 2665 int 2666 real_minus_onep (const_tree expr) 2667 { 2668 switch (TREE_CODE (expr)) 2669 { 2670 case REAL_CST: 2671 return real_equal (&TREE_REAL_CST (expr), &dconstm1) 2672 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr)))); 2673 case COMPLEX_CST: 2674 return real_minus_onep (TREE_REALPART (expr)) 2675 && real_zerop (TREE_IMAGPART (expr)); 2676 case VECTOR_CST: 2677 { 2678 unsigned i; 2679 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i) 2680 if (!real_minus_onep (VECTOR_CST_ELT (expr, i))) 2681 return false; 2682 return true; 2683 } 2684 default: 2685 return false; 2686 } 2687 } 2688 2689 /* Nonzero if EXP is a constant or a cast of a constant. */ 2690 2691 int 2692 really_constant_p (const_tree exp) 2693 { 2694 /* This is not quite the same as STRIP_NOPS. It does more. */ 2695 while (CONVERT_EXPR_P (exp) 2696 || TREE_CODE (exp) == NON_LVALUE_EXPR) 2697 exp = TREE_OPERAND (exp, 0); 2698 return TREE_CONSTANT (exp); 2699 } 2700 2701 /* Return first list element whose TREE_VALUE is ELEM. 2702 Return 0 if ELEM is not in LIST. */ 2703 2704 tree 2705 value_member (tree elem, tree list) 2706 { 2707 while (list) 2708 { 2709 if (elem == TREE_VALUE (list)) 2710 return list; 2711 list = TREE_CHAIN (list); 2712 } 2713 return NULL_TREE; 2714 } 2715 2716 /* Return first list element whose TREE_PURPOSE is ELEM. 2717 Return 0 if ELEM is not in LIST. */ 2718 2719 tree 2720 purpose_member (const_tree elem, tree list) 2721 { 2722 while (list) 2723 { 2724 if (elem == TREE_PURPOSE (list)) 2725 return list; 2726 list = TREE_CHAIN (list); 2727 } 2728 return NULL_TREE; 2729 } 2730 2731 /* Return true if ELEM is in V. */ 2732 2733 bool 2734 vec_member (const_tree elem, vec<tree, va_gc> *v) 2735 { 2736 unsigned ix; 2737 tree t; 2738 FOR_EACH_VEC_SAFE_ELT (v, ix, t) 2739 if (elem == t) 2740 return true; 2741 return false; 2742 } 2743 2744 /* Returns element number IDX (zero-origin) of chain CHAIN, or 2745 NULL_TREE. */ 2746 2747 tree 2748 chain_index (int idx, tree chain) 2749 { 2750 for (; chain && idx > 0; --idx) 2751 chain = TREE_CHAIN (chain); 2752 return chain; 2753 } 2754 2755 /* Return nonzero if ELEM is part of the chain CHAIN. */ 2756 2757 int 2758 chain_member (const_tree elem, const_tree chain) 2759 { 2760 while (chain) 2761 { 2762 if (elem == chain) 2763 return 1; 2764 chain = DECL_CHAIN (chain); 2765 } 2766 2767 return 0; 2768 } 2769 2770 /* Return the length of a chain of nodes chained through TREE_CHAIN. 2771 We expect a null pointer to mark the end of the chain. 2772 This is the Lisp primitive `length'. */ 2773 2774 int 2775 list_length (const_tree t) 2776 { 2777 const_tree p = t; 2778 #ifdef ENABLE_TREE_CHECKING 2779 const_tree q = t; 2780 #endif 2781 int len = 0; 2782 2783 while (p) 2784 { 2785 p = TREE_CHAIN (p); 2786 #ifdef ENABLE_TREE_CHECKING 2787 if (len % 2) 2788 q = TREE_CHAIN (q); 2789 gcc_assert (p != q); 2790 #endif 2791 len++; 2792 } 2793 2794 return len; 2795 } 2796 2797 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or 2798 UNION_TYPE TYPE, or NULL_TREE if none. */ 2799 2800 tree 2801 first_field (const_tree type) 2802 { 2803 tree t = TYPE_FIELDS (type); 2804 while (t && TREE_CODE (t) != FIELD_DECL) 2805 t = TREE_CHAIN (t); 2806 return t; 2807 } 2808 2809 /* Concatenate two chains of nodes (chained through TREE_CHAIN) 2810 by modifying the last node in chain 1 to point to chain 2. 2811 This is the Lisp primitive `nconc'. */ 2812 2813 tree 2814 chainon (tree op1, tree op2) 2815 { 2816 tree t1; 2817 2818 if (!op1) 2819 return op2; 2820 if (!op2) 2821 return op1; 2822 2823 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1)) 2824 continue; 2825 TREE_CHAIN (t1) = op2; 2826 2827 #ifdef ENABLE_TREE_CHECKING 2828 { 2829 tree t2; 2830 for (t2 = op2; t2; t2 = TREE_CHAIN (t2)) 2831 gcc_assert (t2 != t1); 2832 } 2833 #endif 2834 2835 return op1; 2836 } 2837 2838 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */ 2839 2840 tree 2841 tree_last (tree chain) 2842 { 2843 tree next; 2844 if (chain) 2845 while ((next = TREE_CHAIN (chain))) 2846 chain = next; 2847 return chain; 2848 } 2849 2850 /* Reverse the order of elements in the chain T, 2851 and return the new head of the chain (old last element). */ 2852 2853 tree 2854 nreverse (tree t) 2855 { 2856 tree prev = 0, decl, next; 2857 for (decl = t; decl; decl = next) 2858 { 2859 /* We shouldn't be using this function to reverse BLOCK chains; we 2860 have blocks_nreverse for that. */ 2861 gcc_checking_assert (TREE_CODE (decl) != BLOCK); 2862 next = TREE_CHAIN (decl); 2863 TREE_CHAIN (decl) = prev; 2864 prev = decl; 2865 } 2866 return prev; 2867 } 2868 2869 /* Return a newly created TREE_LIST node whose 2870 purpose and value fields are PARM and VALUE. */ 2871 2872 tree 2873 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL) 2874 { 2875 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT); 2876 TREE_PURPOSE (t) = parm; 2877 TREE_VALUE (t) = value; 2878 return t; 2879 } 2880 2881 /* Build a chain of TREE_LIST nodes from a vector. */ 2882 2883 tree 2884 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL) 2885 { 2886 tree ret = NULL_TREE; 2887 tree *pp = &ret; 2888 unsigned int i; 2889 tree t; 2890 FOR_EACH_VEC_SAFE_ELT (vec, i, t) 2891 { 2892 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT); 2893 pp = &TREE_CHAIN (*pp); 2894 } 2895 return ret; 2896 } 2897 2898 /* Return a newly created TREE_LIST node whose 2899 purpose and value fields are PURPOSE and VALUE 2900 and whose TREE_CHAIN is CHAIN. */ 2901 2902 tree 2903 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL) 2904 { 2905 tree node; 2906 2907 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT); 2908 memset (node, 0, sizeof (struct tree_common)); 2909 2910 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list)); 2911 2912 TREE_SET_CODE (node, TREE_LIST); 2913 TREE_CHAIN (node) = chain; 2914 TREE_PURPOSE (node) = purpose; 2915 TREE_VALUE (node) = value; 2916 return node; 2917 } 2918 2919 /* Return the values of the elements of a CONSTRUCTOR as a vector of 2920 trees. */ 2921 2922 vec<tree, va_gc> * 2923 ctor_to_vec (tree ctor) 2924 { 2925 vec<tree, va_gc> *vec; 2926 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor)); 2927 unsigned int ix; 2928 tree val; 2929 2930 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val) 2931 vec->quick_push (val); 2932 2933 return vec; 2934 } 2935 2936 /* Return the size nominally occupied by an object of type TYPE 2937 when it resides in memory. The value is measured in units of bytes, 2938 and its data type is that normally used for type sizes 2939 (which is the first type created by make_signed_type or 2940 make_unsigned_type). */ 2941 2942 tree 2943 size_in_bytes_loc (location_t loc, const_tree type) 2944 { 2945 tree t; 2946 2947 if (type == error_mark_node) 2948 return integer_zero_node; 2949 2950 type = TYPE_MAIN_VARIANT (type); 2951 t = TYPE_SIZE_UNIT (type); 2952 2953 if (t == 0) 2954 { 2955 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type); 2956 return size_zero_node; 2957 } 2958 2959 return t; 2960 } 2961 2962 /* Return the size of TYPE (in bytes) as a wide integer 2963 or return -1 if the size can vary or is larger than an integer. */ 2964 2965 HOST_WIDE_INT 2966 int_size_in_bytes (const_tree type) 2967 { 2968 tree t; 2969 2970 if (type == error_mark_node) 2971 return 0; 2972 2973 type = TYPE_MAIN_VARIANT (type); 2974 t = TYPE_SIZE_UNIT (type); 2975 2976 if (t && tree_fits_uhwi_p (t)) 2977 return TREE_INT_CST_LOW (t); 2978 else 2979 return -1; 2980 } 2981 2982 /* Return the maximum size of TYPE (in bytes) as a wide integer 2983 or return -1 if the size can vary or is larger than an integer. */ 2984 2985 HOST_WIDE_INT 2986 max_int_size_in_bytes (const_tree type) 2987 { 2988 HOST_WIDE_INT size = -1; 2989 tree size_tree; 2990 2991 /* If this is an array type, check for a possible MAX_SIZE attached. */ 2992 2993 if (TREE_CODE (type) == ARRAY_TYPE) 2994 { 2995 size_tree = TYPE_ARRAY_MAX_SIZE (type); 2996 2997 if (size_tree && tree_fits_uhwi_p (size_tree)) 2998 size = tree_to_uhwi (size_tree); 2999 } 3000 3001 /* If we still haven't been able to get a size, see if the language 3002 can compute a maximum size. */ 3003 3004 if (size == -1) 3005 { 3006 size_tree = lang_hooks.types.max_size (type); 3007 3008 if (size_tree && tree_fits_uhwi_p (size_tree)) 3009 size = tree_to_uhwi (size_tree); 3010 } 3011 3012 return size; 3013 } 3014 3015 /* Return the bit position of FIELD, in bits from the start of the record. 3016 This is a tree of type bitsizetype. */ 3017 3018 tree 3019 bit_position (const_tree field) 3020 { 3021 return bit_from_pos (DECL_FIELD_OFFSET (field), 3022 DECL_FIELD_BIT_OFFSET (field)); 3023 } 3024 3025 /* Return the byte position of FIELD, in bytes from the start of the record. 3026 This is a tree of type sizetype. */ 3027 3028 tree 3029 byte_position (const_tree field) 3030 { 3031 return byte_from_pos (DECL_FIELD_OFFSET (field), 3032 DECL_FIELD_BIT_OFFSET (field)); 3033 } 3034 3035 /* Likewise, but return as an integer. It must be representable in 3036 that way (since it could be a signed value, we don't have the 3037 option of returning -1 like int_size_in_byte can. */ 3038 3039 HOST_WIDE_INT 3040 int_byte_position (const_tree field) 3041 { 3042 return tree_to_shwi (byte_position (field)); 3043 } 3044 3045 /* Return the strictest alignment, in bits, that T is known to have. */ 3046 3047 unsigned int 3048 expr_align (const_tree t) 3049 { 3050 unsigned int align0, align1; 3051 3052 switch (TREE_CODE (t)) 3053 { 3054 CASE_CONVERT: case NON_LVALUE_EXPR: 3055 /* If we have conversions, we know that the alignment of the 3056 object must meet each of the alignments of the types. */ 3057 align0 = expr_align (TREE_OPERAND (t, 0)); 3058 align1 = TYPE_ALIGN (TREE_TYPE (t)); 3059 return MAX (align0, align1); 3060 3061 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR: 3062 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR: 3063 case CLEANUP_POINT_EXPR: 3064 /* These don't change the alignment of an object. */ 3065 return expr_align (TREE_OPERAND (t, 0)); 3066 3067 case COND_EXPR: 3068 /* The best we can do is say that the alignment is the least aligned 3069 of the two arms. */ 3070 align0 = expr_align (TREE_OPERAND (t, 1)); 3071 align1 = expr_align (TREE_OPERAND (t, 2)); 3072 return MIN (align0, align1); 3073 3074 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set 3075 meaningfully, it's always 1. */ 3076 case LABEL_DECL: case CONST_DECL: 3077 case VAR_DECL: case PARM_DECL: case RESULT_DECL: 3078 case FUNCTION_DECL: 3079 gcc_assert (DECL_ALIGN (t) != 0); 3080 return DECL_ALIGN (t); 3081 3082 default: 3083 break; 3084 } 3085 3086 /* Otherwise take the alignment from that of the type. */ 3087 return TYPE_ALIGN (TREE_TYPE (t)); 3088 } 3089 3090 /* Return, as a tree node, the number of elements for TYPE (which is an 3091 ARRAY_TYPE) minus one. This counts only elements of the top array. */ 3092 3093 tree 3094 array_type_nelts (const_tree type) 3095 { 3096 tree index_type, min, max; 3097 3098 /* If they did it with unspecified bounds, then we should have already 3099 given an error about it before we got here. */ 3100 if (! TYPE_DOMAIN (type)) 3101 return error_mark_node; 3102 3103 index_type = TYPE_DOMAIN (type); 3104 min = TYPE_MIN_VALUE (index_type); 3105 max = TYPE_MAX_VALUE (index_type); 3106 3107 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */ 3108 if (!max) 3109 return error_mark_node; 3110 3111 return (integer_zerop (min) 3112 ? max 3113 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min)); 3114 } 3115 3116 /* If arg is static -- a reference to an object in static storage -- then 3117 return the object. This is not the same as the C meaning of `static'. 3118 If arg isn't static, return NULL. */ 3119 3120 tree 3121 staticp (tree arg) 3122 { 3123 switch (TREE_CODE (arg)) 3124 { 3125 case FUNCTION_DECL: 3126 /* Nested functions are static, even though taking their address will 3127 involve a trampoline as we unnest the nested function and create 3128 the trampoline on the tree level. */ 3129 return arg; 3130 3131 case VAR_DECL: 3132 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg)) 3133 && ! DECL_THREAD_LOCAL_P (arg) 3134 && ! DECL_DLLIMPORT_P (arg) 3135 ? arg : NULL); 3136 3137 case CONST_DECL: 3138 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg)) 3139 ? arg : NULL); 3140 3141 case CONSTRUCTOR: 3142 return TREE_STATIC (arg) ? arg : NULL; 3143 3144 case LABEL_DECL: 3145 case STRING_CST: 3146 return arg; 3147 3148 case COMPONENT_REF: 3149 /* If the thing being referenced is not a field, then it is 3150 something language specific. */ 3151 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL); 3152 3153 /* If we are referencing a bitfield, we can't evaluate an 3154 ADDR_EXPR at compile time and so it isn't a constant. */ 3155 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1))) 3156 return NULL; 3157 3158 return staticp (TREE_OPERAND (arg, 0)); 3159 3160 case BIT_FIELD_REF: 3161 return NULL; 3162 3163 case INDIRECT_REF: 3164 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL; 3165 3166 case ARRAY_REF: 3167 case ARRAY_RANGE_REF: 3168 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST 3169 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST) 3170 return staticp (TREE_OPERAND (arg, 0)); 3171 else 3172 return NULL; 3173 3174 case COMPOUND_LITERAL_EXPR: 3175 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL; 3176 3177 default: 3178 return NULL; 3179 } 3180 } 3181 3182 3183 3184 3185 /* Return whether OP is a DECL whose address is function-invariant. */ 3186 3187 bool 3188 decl_address_invariant_p (const_tree op) 3189 { 3190 /* The conditions below are slightly less strict than the one in 3191 staticp. */ 3192 3193 switch (TREE_CODE (op)) 3194 { 3195 case PARM_DECL: 3196 case RESULT_DECL: 3197 case LABEL_DECL: 3198 case FUNCTION_DECL: 3199 return true; 3200 3201 case VAR_DECL: 3202 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)) 3203 || DECL_THREAD_LOCAL_P (op) 3204 || DECL_CONTEXT (op) == current_function_decl 3205 || decl_function_context (op) == current_function_decl) 3206 return true; 3207 break; 3208 3209 case CONST_DECL: 3210 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)) 3211 || decl_function_context (op) == current_function_decl) 3212 return true; 3213 break; 3214 3215 default: 3216 break; 3217 } 3218 3219 return false; 3220 } 3221 3222 /* Return whether OP is a DECL whose address is interprocedural-invariant. */ 3223 3224 bool 3225 decl_address_ip_invariant_p (const_tree op) 3226 { 3227 /* The conditions below are slightly less strict than the one in 3228 staticp. */ 3229 3230 switch (TREE_CODE (op)) 3231 { 3232 case LABEL_DECL: 3233 case FUNCTION_DECL: 3234 case STRING_CST: 3235 return true; 3236 3237 case VAR_DECL: 3238 if (((TREE_STATIC (op) || DECL_EXTERNAL (op)) 3239 && !DECL_DLLIMPORT_P (op)) 3240 || DECL_THREAD_LOCAL_P (op)) 3241 return true; 3242 break; 3243 3244 case CONST_DECL: 3245 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))) 3246 return true; 3247 break; 3248 3249 default: 3250 break; 3251 } 3252 3253 return false; 3254 } 3255 3256 3257 /* Return true if T is function-invariant (internal function, does 3258 not handle arithmetic; that's handled in skip_simple_arithmetic and 3259 tree_invariant_p). */ 3260 3261 static bool 3262 tree_invariant_p_1 (tree t) 3263 { 3264 tree op; 3265 3266 if (TREE_CONSTANT (t) 3267 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t))) 3268 return true; 3269 3270 switch (TREE_CODE (t)) 3271 { 3272 case SAVE_EXPR: 3273 return true; 3274 3275 case ADDR_EXPR: 3276 op = TREE_OPERAND (t, 0); 3277 while (handled_component_p (op)) 3278 { 3279 switch (TREE_CODE (op)) 3280 { 3281 case ARRAY_REF: 3282 case ARRAY_RANGE_REF: 3283 if (!tree_invariant_p (TREE_OPERAND (op, 1)) 3284 || TREE_OPERAND (op, 2) != NULL_TREE 3285 || TREE_OPERAND (op, 3) != NULL_TREE) 3286 return false; 3287 break; 3288 3289 case COMPONENT_REF: 3290 if (TREE_OPERAND (op, 2) != NULL_TREE) 3291 return false; 3292 break; 3293 3294 default:; 3295 } 3296 op = TREE_OPERAND (op, 0); 3297 } 3298 3299 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op); 3300 3301 default: 3302 break; 3303 } 3304 3305 return false; 3306 } 3307 3308 /* Return true if T is function-invariant. */ 3309 3310 bool 3311 tree_invariant_p (tree t) 3312 { 3313 tree inner = skip_simple_arithmetic (t); 3314 return tree_invariant_p_1 (inner); 3315 } 3316 3317 /* Wrap a SAVE_EXPR around EXPR, if appropriate. 3318 Do this to any expression which may be used in more than one place, 3319 but must be evaluated only once. 3320 3321 Normally, expand_expr would reevaluate the expression each time. 3322 Calling save_expr produces something that is evaluated and recorded 3323 the first time expand_expr is called on it. Subsequent calls to 3324 expand_expr just reuse the recorded value. 3325 3326 The call to expand_expr that generates code that actually computes 3327 the value is the first call *at compile time*. Subsequent calls 3328 *at compile time* generate code to use the saved value. 3329 This produces correct result provided that *at run time* control 3330 always flows through the insns made by the first expand_expr 3331 before reaching the other places where the save_expr was evaluated. 3332 You, the caller of save_expr, must make sure this is so. 3333 3334 Constants, and certain read-only nodes, are returned with no 3335 SAVE_EXPR because that is safe. Expressions containing placeholders 3336 are not touched; see tree.def for an explanation of what these 3337 are used for. */ 3338 3339 tree 3340 save_expr (tree expr) 3341 { 3342 tree t = fold (expr); 3343 tree inner; 3344 3345 /* If the tree evaluates to a constant, then we don't want to hide that 3346 fact (i.e. this allows further folding, and direct checks for constants). 3347 However, a read-only object that has side effects cannot be bypassed. 3348 Since it is no problem to reevaluate literals, we just return the 3349 literal node. */ 3350 inner = skip_simple_arithmetic (t); 3351 if (TREE_CODE (inner) == ERROR_MARK) 3352 return inner; 3353 3354 if (tree_invariant_p_1 (inner)) 3355 return t; 3356 3357 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since 3358 it means that the size or offset of some field of an object depends on 3359 the value within another field. 3360 3361 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR 3362 and some variable since it would then need to be both evaluated once and 3363 evaluated more than once. Front-ends must assure this case cannot 3364 happen by surrounding any such subexpressions in their own SAVE_EXPR 3365 and forcing evaluation at the proper time. */ 3366 if (contains_placeholder_p (inner)) 3367 return t; 3368 3369 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t); 3370 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr)); 3371 3372 /* This expression might be placed ahead of a jump to ensure that the 3373 value was computed on both sides of the jump. So make sure it isn't 3374 eliminated as dead. */ 3375 TREE_SIDE_EFFECTS (t) = 1; 3376 return t; 3377 } 3378 3379 /* Look inside EXPR into any simple arithmetic operations. Return the 3380 outermost non-arithmetic or non-invariant node. */ 3381 3382 tree 3383 skip_simple_arithmetic (tree expr) 3384 { 3385 /* We don't care about whether this can be used as an lvalue in this 3386 context. */ 3387 while (TREE_CODE (expr) == NON_LVALUE_EXPR) 3388 expr = TREE_OPERAND (expr, 0); 3389 3390 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and 3391 a constant, it will be more efficient to not make another SAVE_EXPR since 3392 it will allow better simplification and GCSE will be able to merge the 3393 computations if they actually occur. */ 3394 while (true) 3395 { 3396 if (UNARY_CLASS_P (expr)) 3397 expr = TREE_OPERAND (expr, 0); 3398 else if (BINARY_CLASS_P (expr)) 3399 { 3400 if (tree_invariant_p (TREE_OPERAND (expr, 1))) 3401 expr = TREE_OPERAND (expr, 0); 3402 else if (tree_invariant_p (TREE_OPERAND (expr, 0))) 3403 expr = TREE_OPERAND (expr, 1); 3404 else 3405 break; 3406 } 3407 else 3408 break; 3409 } 3410 3411 return expr; 3412 } 3413 3414 /* Look inside EXPR into simple arithmetic operations involving constants. 3415 Return the outermost non-arithmetic or non-constant node. */ 3416 3417 tree 3418 skip_simple_constant_arithmetic (tree expr) 3419 { 3420 while (TREE_CODE (expr) == NON_LVALUE_EXPR) 3421 expr = TREE_OPERAND (expr, 0); 3422 3423 while (true) 3424 { 3425 if (UNARY_CLASS_P (expr)) 3426 expr = TREE_OPERAND (expr, 0); 3427 else if (BINARY_CLASS_P (expr)) 3428 { 3429 if (TREE_CONSTANT (TREE_OPERAND (expr, 1))) 3430 expr = TREE_OPERAND (expr, 0); 3431 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0))) 3432 expr = TREE_OPERAND (expr, 1); 3433 else 3434 break; 3435 } 3436 else 3437 break; 3438 } 3439 3440 return expr; 3441 } 3442 3443 /* Return which tree structure is used by T. */ 3444 3445 enum tree_node_structure_enum 3446 tree_node_structure (const_tree t) 3447 { 3448 const enum tree_code code = TREE_CODE (t); 3449 return tree_node_structure_for_code (code); 3450 } 3451 3452 /* Set various status flags when building a CALL_EXPR object T. */ 3453 3454 static void 3455 process_call_operands (tree t) 3456 { 3457 bool side_effects = TREE_SIDE_EFFECTS (t); 3458 bool read_only = false; 3459 int i = call_expr_flags (t); 3460 3461 /* Calls have side-effects, except those to const or pure functions. */ 3462 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE))) 3463 side_effects = true; 3464 /* Propagate TREE_READONLY of arguments for const functions. */ 3465 if (i & ECF_CONST) 3466 read_only = true; 3467 3468 if (!side_effects || read_only) 3469 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++) 3470 { 3471 tree op = TREE_OPERAND (t, i); 3472 if (op && TREE_SIDE_EFFECTS (op)) 3473 side_effects = true; 3474 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op)) 3475 read_only = false; 3476 } 3477 3478 TREE_SIDE_EFFECTS (t) = side_effects; 3479 TREE_READONLY (t) = read_only; 3480 } 3481 3482 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a 3483 size or offset that depends on a field within a record. */ 3484 3485 bool 3486 contains_placeholder_p (const_tree exp) 3487 { 3488 enum tree_code code; 3489 3490 if (!exp) 3491 return 0; 3492 3493 code = TREE_CODE (exp); 3494 if (code == PLACEHOLDER_EXPR) 3495 return 1; 3496 3497 switch (TREE_CODE_CLASS (code)) 3498 { 3499 case tcc_reference: 3500 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit 3501 position computations since they will be converted into a 3502 WITH_RECORD_EXPR involving the reference, which will assume 3503 here will be valid. */ 3504 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0)); 3505 3506 case tcc_exceptional: 3507 if (code == TREE_LIST) 3508 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp)) 3509 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp))); 3510 break; 3511 3512 case tcc_unary: 3513 case tcc_binary: 3514 case tcc_comparison: 3515 case tcc_expression: 3516 switch (code) 3517 { 3518 case COMPOUND_EXPR: 3519 /* Ignoring the first operand isn't quite right, but works best. */ 3520 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)); 3521 3522 case COND_EXPR: 3523 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0)) 3524 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)) 3525 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2))); 3526 3527 case SAVE_EXPR: 3528 /* The save_expr function never wraps anything containing 3529 a PLACEHOLDER_EXPR. */ 3530 return 0; 3531 3532 default: 3533 break; 3534 } 3535 3536 switch (TREE_CODE_LENGTH (code)) 3537 { 3538 case 1: 3539 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0)); 3540 case 2: 3541 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0)) 3542 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))); 3543 default: 3544 return 0; 3545 } 3546 3547 case tcc_vl_exp: 3548 switch (code) 3549 { 3550 case CALL_EXPR: 3551 { 3552 const_tree arg; 3553 const_call_expr_arg_iterator iter; 3554 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp) 3555 if (CONTAINS_PLACEHOLDER_P (arg)) 3556 return 1; 3557 return 0; 3558 } 3559 default: 3560 return 0; 3561 } 3562 3563 default: 3564 return 0; 3565 } 3566 return 0; 3567 } 3568 3569 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR 3570 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and 3571 field positions. */ 3572 3573 static bool 3574 type_contains_placeholder_1 (const_tree type) 3575 { 3576 /* If the size contains a placeholder or the parent type (component type in 3577 the case of arrays) type involves a placeholder, this type does. */ 3578 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type)) 3579 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type)) 3580 || (!POINTER_TYPE_P (type) 3581 && TREE_TYPE (type) 3582 && type_contains_placeholder_p (TREE_TYPE (type)))) 3583 return true; 3584 3585 /* Now do type-specific checks. Note that the last part of the check above 3586 greatly limits what we have to do below. */ 3587 switch (TREE_CODE (type)) 3588 { 3589 case VOID_TYPE: 3590 case POINTER_BOUNDS_TYPE: 3591 case COMPLEX_TYPE: 3592 case ENUMERAL_TYPE: 3593 case BOOLEAN_TYPE: 3594 case POINTER_TYPE: 3595 case OFFSET_TYPE: 3596 case REFERENCE_TYPE: 3597 case METHOD_TYPE: 3598 case FUNCTION_TYPE: 3599 case VECTOR_TYPE: 3600 case NULLPTR_TYPE: 3601 return false; 3602 3603 case INTEGER_TYPE: 3604 case REAL_TYPE: 3605 case FIXED_POINT_TYPE: 3606 /* Here we just check the bounds. */ 3607 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type)) 3608 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type))); 3609 3610 case ARRAY_TYPE: 3611 /* We have already checked the component type above, so just check 3612 the domain type. Flexible array members have a null domain. */ 3613 return TYPE_DOMAIN (type) ? 3614 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false; 3615 3616 case RECORD_TYPE: 3617 case UNION_TYPE: 3618 case QUAL_UNION_TYPE: 3619 { 3620 tree field; 3621 3622 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) 3623 if (TREE_CODE (field) == FIELD_DECL 3624 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field)) 3625 || (TREE_CODE (type) == QUAL_UNION_TYPE 3626 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field))) 3627 || type_contains_placeholder_p (TREE_TYPE (field)))) 3628 return true; 3629 3630 return false; 3631 } 3632 3633 default: 3634 gcc_unreachable (); 3635 } 3636 } 3637 3638 /* Wrapper around above function used to cache its result. */ 3639 3640 bool 3641 type_contains_placeholder_p (tree type) 3642 { 3643 bool result; 3644 3645 /* If the contains_placeholder_bits field has been initialized, 3646 then we know the answer. */ 3647 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0) 3648 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1; 3649 3650 /* Indicate that we've seen this type node, and the answer is false. 3651 This is what we want to return if we run into recursion via fields. */ 3652 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1; 3653 3654 /* Compute the real value. */ 3655 result = type_contains_placeholder_1 (type); 3656 3657 /* Store the real value. */ 3658 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1; 3659 3660 return result; 3661 } 3662 3663 /* Push tree EXP onto vector QUEUE if it is not already present. */ 3664 3665 static void 3666 push_without_duplicates (tree exp, vec<tree> *queue) 3667 { 3668 unsigned int i; 3669 tree iter; 3670 3671 FOR_EACH_VEC_ELT (*queue, i, iter) 3672 if (simple_cst_equal (iter, exp) == 1) 3673 break; 3674 3675 if (!iter) 3676 queue->safe_push (exp); 3677 } 3678 3679 /* Given a tree EXP, find all occurrences of references to fields 3680 in a PLACEHOLDER_EXPR and place them in vector REFS without 3681 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that 3682 we assume here that EXP contains only arithmetic expressions 3683 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their 3684 argument list. */ 3685 3686 void 3687 find_placeholder_in_expr (tree exp, vec<tree> *refs) 3688 { 3689 enum tree_code code = TREE_CODE (exp); 3690 tree inner; 3691 int i; 3692 3693 /* We handle TREE_LIST and COMPONENT_REF separately. */ 3694 if (code == TREE_LIST) 3695 { 3696 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs); 3697 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs); 3698 } 3699 else if (code == COMPONENT_REF) 3700 { 3701 for (inner = TREE_OPERAND (exp, 0); 3702 REFERENCE_CLASS_P (inner); 3703 inner = TREE_OPERAND (inner, 0)) 3704 ; 3705 3706 if (TREE_CODE (inner) == PLACEHOLDER_EXPR) 3707 push_without_duplicates (exp, refs); 3708 else 3709 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs); 3710 } 3711 else 3712 switch (TREE_CODE_CLASS (code)) 3713 { 3714 case tcc_constant: 3715 break; 3716 3717 case tcc_declaration: 3718 /* Variables allocated to static storage can stay. */ 3719 if (!TREE_STATIC (exp)) 3720 push_without_duplicates (exp, refs); 3721 break; 3722 3723 case tcc_expression: 3724 /* This is the pattern built in ada/make_aligning_type. */ 3725 if (code == ADDR_EXPR 3726 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR) 3727 { 3728 push_without_duplicates (exp, refs); 3729 break; 3730 } 3731 3732 /* Fall through. */ 3733 3734 case tcc_exceptional: 3735 case tcc_unary: 3736 case tcc_binary: 3737 case tcc_comparison: 3738 case tcc_reference: 3739 for (i = 0; i < TREE_CODE_LENGTH (code); i++) 3740 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs); 3741 break; 3742 3743 case tcc_vl_exp: 3744 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++) 3745 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs); 3746 break; 3747 3748 default: 3749 gcc_unreachable (); 3750 } 3751 } 3752 3753 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R, 3754 return a tree with all occurrences of references to F in a 3755 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and 3756 CONST_DECLs. Note that we assume here that EXP contains only 3757 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs 3758 occurring only in their argument list. */ 3759 3760 tree 3761 substitute_in_expr (tree exp, tree f, tree r) 3762 { 3763 enum tree_code code = TREE_CODE (exp); 3764 tree op0, op1, op2, op3; 3765 tree new_tree; 3766 3767 /* We handle TREE_LIST and COMPONENT_REF separately. */ 3768 if (code == TREE_LIST) 3769 { 3770 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r); 3771 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r); 3772 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp)) 3773 return exp; 3774 3775 return tree_cons (TREE_PURPOSE (exp), op1, op0); 3776 } 3777 else if (code == COMPONENT_REF) 3778 { 3779 tree inner; 3780 3781 /* If this expression is getting a value from a PLACEHOLDER_EXPR 3782 and it is the right field, replace it with R. */ 3783 for (inner = TREE_OPERAND (exp, 0); 3784 REFERENCE_CLASS_P (inner); 3785 inner = TREE_OPERAND (inner, 0)) 3786 ; 3787 3788 /* The field. */ 3789 op1 = TREE_OPERAND (exp, 1); 3790 3791 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f) 3792 return r; 3793 3794 /* If this expression hasn't been completed let, leave it alone. */ 3795 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner)) 3796 return exp; 3797 3798 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r); 3799 if (op0 == TREE_OPERAND (exp, 0)) 3800 return exp; 3801 3802 new_tree 3803 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE); 3804 } 3805 else 3806 switch (TREE_CODE_CLASS (code)) 3807 { 3808 case tcc_constant: 3809 return exp; 3810 3811 case tcc_declaration: 3812 if (exp == f) 3813 return r; 3814 else 3815 return exp; 3816 3817 case tcc_expression: 3818 if (exp == f) 3819 return r; 3820 3821 /* Fall through. */ 3822 3823 case tcc_exceptional: 3824 case tcc_unary: 3825 case tcc_binary: 3826 case tcc_comparison: 3827 case tcc_reference: 3828 switch (TREE_CODE_LENGTH (code)) 3829 { 3830 case 0: 3831 return exp; 3832 3833 case 1: 3834 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r); 3835 if (op0 == TREE_OPERAND (exp, 0)) 3836 return exp; 3837 3838 new_tree = fold_build1 (code, TREE_TYPE (exp), op0); 3839 break; 3840 3841 case 2: 3842 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r); 3843 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r); 3844 3845 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)) 3846 return exp; 3847 3848 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1); 3849 break; 3850 3851 case 3: 3852 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r); 3853 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r); 3854 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r); 3855 3856 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1) 3857 && op2 == TREE_OPERAND (exp, 2)) 3858 return exp; 3859 3860 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2); 3861 break; 3862 3863 case 4: 3864 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r); 3865 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r); 3866 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r); 3867 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r); 3868 3869 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1) 3870 && op2 == TREE_OPERAND (exp, 2) 3871 && op3 == TREE_OPERAND (exp, 3)) 3872 return exp; 3873 3874 new_tree 3875 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3)); 3876 break; 3877 3878 default: 3879 gcc_unreachable (); 3880 } 3881 break; 3882 3883 case tcc_vl_exp: 3884 { 3885 int i; 3886 3887 new_tree = NULL_TREE; 3888 3889 /* If we are trying to replace F with a constant, inline back 3890 functions which do nothing else than computing a value from 3891 the arguments they are passed. This makes it possible to 3892 fold partially or entirely the replacement expression. */ 3893 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR) 3894 { 3895 tree t = maybe_inline_call_in_expr (exp); 3896 if (t) 3897 return SUBSTITUTE_IN_EXPR (t, f, r); 3898 } 3899 3900 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++) 3901 { 3902 tree op = TREE_OPERAND (exp, i); 3903 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r); 3904 if (new_op != op) 3905 { 3906 if (!new_tree) 3907 new_tree = copy_node (exp); 3908 TREE_OPERAND (new_tree, i) = new_op; 3909 } 3910 } 3911 3912 if (new_tree) 3913 { 3914 new_tree = fold (new_tree); 3915 if (TREE_CODE (new_tree) == CALL_EXPR) 3916 process_call_operands (new_tree); 3917 } 3918 else 3919 return exp; 3920 } 3921 break; 3922 3923 default: 3924 gcc_unreachable (); 3925 } 3926 3927 TREE_READONLY (new_tree) |= TREE_READONLY (exp); 3928 3929 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF) 3930 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp); 3931 3932 return new_tree; 3933 } 3934 3935 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement 3936 for it within OBJ, a tree that is an object or a chain of references. */ 3937 3938 tree 3939 substitute_placeholder_in_expr (tree exp, tree obj) 3940 { 3941 enum tree_code code = TREE_CODE (exp); 3942 tree op0, op1, op2, op3; 3943 tree new_tree; 3944 3945 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type 3946 in the chain of OBJ. */ 3947 if (code == PLACEHOLDER_EXPR) 3948 { 3949 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp)); 3950 tree elt; 3951 3952 for (elt = obj; elt != 0; 3953 elt = ((TREE_CODE (elt) == COMPOUND_EXPR 3954 || TREE_CODE (elt) == COND_EXPR) 3955 ? TREE_OPERAND (elt, 1) 3956 : (REFERENCE_CLASS_P (elt) 3957 || UNARY_CLASS_P (elt) 3958 || BINARY_CLASS_P (elt) 3959 || VL_EXP_CLASS_P (elt) 3960 || EXPRESSION_CLASS_P (elt)) 3961 ? TREE_OPERAND (elt, 0) : 0)) 3962 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type) 3963 return elt; 3964 3965 for (elt = obj; elt != 0; 3966 elt = ((TREE_CODE (elt) == COMPOUND_EXPR 3967 || TREE_CODE (elt) == COND_EXPR) 3968 ? TREE_OPERAND (elt, 1) 3969 : (REFERENCE_CLASS_P (elt) 3970 || UNARY_CLASS_P (elt) 3971 || BINARY_CLASS_P (elt) 3972 || VL_EXP_CLASS_P (elt) 3973 || EXPRESSION_CLASS_P (elt)) 3974 ? TREE_OPERAND (elt, 0) : 0)) 3975 if (POINTER_TYPE_P (TREE_TYPE (elt)) 3976 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt))) 3977 == need_type)) 3978 return fold_build1 (INDIRECT_REF, need_type, elt); 3979 3980 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it 3981 survives until RTL generation, there will be an error. */ 3982 return exp; 3983 } 3984 3985 /* TREE_LIST is special because we need to look at TREE_VALUE 3986 and TREE_CHAIN, not TREE_OPERANDS. */ 3987 else if (code == TREE_LIST) 3988 { 3989 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj); 3990 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj); 3991 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp)) 3992 return exp; 3993 3994 return tree_cons (TREE_PURPOSE (exp), op1, op0); 3995 } 3996 else 3997 switch (TREE_CODE_CLASS (code)) 3998 { 3999 case tcc_constant: 4000 case tcc_declaration: 4001 return exp; 4002 4003 case tcc_exceptional: 4004 case tcc_unary: 4005 case tcc_binary: 4006 case tcc_comparison: 4007 case tcc_expression: 4008 case tcc_reference: 4009 case tcc_statement: 4010 switch (TREE_CODE_LENGTH (code)) 4011 { 4012 case 0: 4013 return exp; 4014 4015 case 1: 4016 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj); 4017 if (op0 == TREE_OPERAND (exp, 0)) 4018 return exp; 4019 4020 new_tree = fold_build1 (code, TREE_TYPE (exp), op0); 4021 break; 4022 4023 case 2: 4024 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj); 4025 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj); 4026 4027 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)) 4028 return exp; 4029 4030 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1); 4031 break; 4032 4033 case 3: 4034 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj); 4035 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj); 4036 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj); 4037 4038 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1) 4039 && op2 == TREE_OPERAND (exp, 2)) 4040 return exp; 4041 4042 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2); 4043 break; 4044 4045 case 4: 4046 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj); 4047 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj); 4048 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj); 4049 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj); 4050 4051 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1) 4052 && op2 == TREE_OPERAND (exp, 2) 4053 && op3 == TREE_OPERAND (exp, 3)) 4054 return exp; 4055 4056 new_tree 4057 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3)); 4058 break; 4059 4060 default: 4061 gcc_unreachable (); 4062 } 4063 break; 4064 4065 case tcc_vl_exp: 4066 { 4067 int i; 4068 4069 new_tree = NULL_TREE; 4070 4071 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++) 4072 { 4073 tree op = TREE_OPERAND (exp, i); 4074 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj); 4075 if (new_op != op) 4076 { 4077 if (!new_tree) 4078 new_tree = copy_node (exp); 4079 TREE_OPERAND (new_tree, i) = new_op; 4080 } 4081 } 4082 4083 if (new_tree) 4084 { 4085 new_tree = fold (new_tree); 4086 if (TREE_CODE (new_tree) == CALL_EXPR) 4087 process_call_operands (new_tree); 4088 } 4089 else 4090 return exp; 4091 } 4092 break; 4093 4094 default: 4095 gcc_unreachable (); 4096 } 4097 4098 TREE_READONLY (new_tree) |= TREE_READONLY (exp); 4099 4100 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF) 4101 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp); 4102 4103 return new_tree; 4104 } 4105 4106 4107 /* Subroutine of stabilize_reference; this is called for subtrees of 4108 references. Any expression with side-effects must be put in a SAVE_EXPR 4109 to ensure that it is only evaluated once. 4110 4111 We don't put SAVE_EXPR nodes around everything, because assigning very 4112 simple expressions to temporaries causes us to miss good opportunities 4113 for optimizations. Among other things, the opportunity to fold in the 4114 addition of a constant into an addressing mode often gets lost, e.g. 4115 "y[i+1] += x;". In general, we take the approach that we should not make 4116 an assignment unless we are forced into it - i.e., that any non-side effect 4117 operator should be allowed, and that cse should take care of coalescing 4118 multiple utterances of the same expression should that prove fruitful. */ 4119 4120 static tree 4121 stabilize_reference_1 (tree e) 4122 { 4123 tree result; 4124 enum tree_code code = TREE_CODE (e); 4125 4126 /* We cannot ignore const expressions because it might be a reference 4127 to a const array but whose index contains side-effects. But we can 4128 ignore things that are actual constant or that already have been 4129 handled by this function. */ 4130 4131 if (tree_invariant_p (e)) 4132 return e; 4133 4134 switch (TREE_CODE_CLASS (code)) 4135 { 4136 case tcc_exceptional: 4137 case tcc_type: 4138 case tcc_declaration: 4139 case tcc_comparison: 4140 case tcc_statement: 4141 case tcc_expression: 4142 case tcc_reference: 4143 case tcc_vl_exp: 4144 /* If the expression has side-effects, then encase it in a SAVE_EXPR 4145 so that it will only be evaluated once. */ 4146 /* The reference (r) and comparison (<) classes could be handled as 4147 below, but it is generally faster to only evaluate them once. */ 4148 if (TREE_SIDE_EFFECTS (e)) 4149 return save_expr (e); 4150 return e; 4151 4152 case tcc_constant: 4153 /* Constants need no processing. In fact, we should never reach 4154 here. */ 4155 return e; 4156 4157 case tcc_binary: 4158 /* Division is slow and tends to be compiled with jumps, 4159 especially the division by powers of 2 that is often 4160 found inside of an array reference. So do it just once. */ 4161 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR 4162 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR 4163 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR 4164 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR) 4165 return save_expr (e); 4166 /* Recursively stabilize each operand. */ 4167 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)), 4168 stabilize_reference_1 (TREE_OPERAND (e, 1))); 4169 break; 4170 4171 case tcc_unary: 4172 /* Recursively stabilize each operand. */ 4173 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0))); 4174 break; 4175 4176 default: 4177 gcc_unreachable (); 4178 } 4179 4180 TREE_TYPE (result) = TREE_TYPE (e); 4181 TREE_READONLY (result) = TREE_READONLY (e); 4182 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e); 4183 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e); 4184 4185 return result; 4186 } 4187 4188 /* Stabilize a reference so that we can use it any number of times 4189 without causing its operands to be evaluated more than once. 4190 Returns the stabilized reference. This works by means of save_expr, 4191 so see the caveats in the comments about save_expr. 4192 4193 Also allows conversion expressions whose operands are references. 4194 Any other kind of expression is returned unchanged. */ 4195 4196 tree 4197 stabilize_reference (tree ref) 4198 { 4199 tree result; 4200 enum tree_code code = TREE_CODE (ref); 4201 4202 switch (code) 4203 { 4204 case VAR_DECL: 4205 case PARM_DECL: 4206 case RESULT_DECL: 4207 /* No action is needed in this case. */ 4208 return ref; 4209 4210 CASE_CONVERT: 4211 case FLOAT_EXPR: 4212 case FIX_TRUNC_EXPR: 4213 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0))); 4214 break; 4215 4216 case INDIRECT_REF: 4217 result = build_nt (INDIRECT_REF, 4218 stabilize_reference_1 (TREE_OPERAND (ref, 0))); 4219 break; 4220 4221 case COMPONENT_REF: 4222 result = build_nt (COMPONENT_REF, 4223 stabilize_reference (TREE_OPERAND (ref, 0)), 4224 TREE_OPERAND (ref, 1), NULL_TREE); 4225 break; 4226 4227 case BIT_FIELD_REF: 4228 result = build_nt (BIT_FIELD_REF, 4229 stabilize_reference (TREE_OPERAND (ref, 0)), 4230 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2)); 4231 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref); 4232 break; 4233 4234 case ARRAY_REF: 4235 result = build_nt (ARRAY_REF, 4236 stabilize_reference (TREE_OPERAND (ref, 0)), 4237 stabilize_reference_1 (TREE_OPERAND (ref, 1)), 4238 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3)); 4239 break; 4240 4241 case ARRAY_RANGE_REF: 4242 result = build_nt (ARRAY_RANGE_REF, 4243 stabilize_reference (TREE_OPERAND (ref, 0)), 4244 stabilize_reference_1 (TREE_OPERAND (ref, 1)), 4245 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3)); 4246 break; 4247 4248 case COMPOUND_EXPR: 4249 /* We cannot wrap the first expression in a SAVE_EXPR, as then 4250 it wouldn't be ignored. This matters when dealing with 4251 volatiles. */ 4252 return stabilize_reference_1 (ref); 4253 4254 /* If arg isn't a kind of lvalue we recognize, make no change. 4255 Caller should recognize the error for an invalid lvalue. */ 4256 default: 4257 return ref; 4258 4259 case ERROR_MARK: 4260 return error_mark_node; 4261 } 4262 4263 TREE_TYPE (result) = TREE_TYPE (ref); 4264 TREE_READONLY (result) = TREE_READONLY (ref); 4265 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref); 4266 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref); 4267 4268 return result; 4269 } 4270 4271 /* Low-level constructors for expressions. */ 4272 4273 /* A helper function for build1 and constant folders. Set TREE_CONSTANT, 4274 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */ 4275 4276 void 4277 recompute_tree_invariant_for_addr_expr (tree t) 4278 { 4279 tree node; 4280 bool tc = true, se = false; 4281 4282 gcc_assert (TREE_CODE (t) == ADDR_EXPR); 4283 4284 /* We started out assuming this address is both invariant and constant, but 4285 does not have side effects. Now go down any handled components and see if 4286 any of them involve offsets that are either non-constant or non-invariant. 4287 Also check for side-effects. 4288 4289 ??? Note that this code makes no attempt to deal with the case where 4290 taking the address of something causes a copy due to misalignment. */ 4291 4292 #define UPDATE_FLAGS(NODE) \ 4293 do { tree _node = (NODE); \ 4294 if (_node && !TREE_CONSTANT (_node)) tc = false; \ 4295 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0) 4296 4297 for (node = TREE_OPERAND (t, 0); handled_component_p (node); 4298 node = TREE_OPERAND (node, 0)) 4299 { 4300 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus 4301 array reference (probably made temporarily by the G++ front end), 4302 so ignore all the operands. */ 4303 if ((TREE_CODE (node) == ARRAY_REF 4304 || TREE_CODE (node) == ARRAY_RANGE_REF) 4305 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE) 4306 { 4307 UPDATE_FLAGS (TREE_OPERAND (node, 1)); 4308 if (TREE_OPERAND (node, 2)) 4309 UPDATE_FLAGS (TREE_OPERAND (node, 2)); 4310 if (TREE_OPERAND (node, 3)) 4311 UPDATE_FLAGS (TREE_OPERAND (node, 3)); 4312 } 4313 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a 4314 FIELD_DECL, apparently. The G++ front end can put something else 4315 there, at least temporarily. */ 4316 else if (TREE_CODE (node) == COMPONENT_REF 4317 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL) 4318 { 4319 if (TREE_OPERAND (node, 2)) 4320 UPDATE_FLAGS (TREE_OPERAND (node, 2)); 4321 } 4322 } 4323 4324 node = lang_hooks.expr_to_decl (node, &tc, &se); 4325 4326 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from 4327 the address, since &(*a)->b is a form of addition. If it's a constant, the 4328 address is constant too. If it's a decl, its address is constant if the 4329 decl is static. Everything else is not constant and, furthermore, 4330 taking the address of a volatile variable is not volatile. */ 4331 if (TREE_CODE (node) == INDIRECT_REF 4332 || TREE_CODE (node) == MEM_REF) 4333 UPDATE_FLAGS (TREE_OPERAND (node, 0)); 4334 else if (CONSTANT_CLASS_P (node)) 4335 ; 4336 else if (DECL_P (node)) 4337 tc &= (staticp (node) != NULL_TREE); 4338 else 4339 { 4340 tc = false; 4341 se |= TREE_SIDE_EFFECTS (node); 4342 } 4343 4344 4345 TREE_CONSTANT (t) = tc; 4346 TREE_SIDE_EFFECTS (t) = se; 4347 #undef UPDATE_FLAGS 4348 } 4349 4350 /* Build an expression of code CODE, data type TYPE, and operands as 4351 specified. Expressions and reference nodes can be created this way. 4352 Constants, decls, types and misc nodes cannot be. 4353 4354 We define 5 non-variadic functions, from 0 to 4 arguments. This is 4355 enough for all extant tree codes. */ 4356 4357 tree 4358 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL) 4359 { 4360 tree t; 4361 4362 gcc_assert (TREE_CODE_LENGTH (code) == 0); 4363 4364 t = make_node_stat (code PASS_MEM_STAT); 4365 TREE_TYPE (t) = tt; 4366 4367 return t; 4368 } 4369 4370 tree 4371 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL) 4372 { 4373 int length = sizeof (struct tree_exp); 4374 tree t; 4375 4376 record_node_allocation_statistics (code, length); 4377 4378 gcc_assert (TREE_CODE_LENGTH (code) == 1); 4379 4380 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT); 4381 4382 memset (t, 0, sizeof (struct tree_common)); 4383 4384 TREE_SET_CODE (t, code); 4385 4386 TREE_TYPE (t) = type; 4387 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION); 4388 TREE_OPERAND (t, 0) = node; 4389 if (node && !TYPE_P (node)) 4390 { 4391 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node); 4392 TREE_READONLY (t) = TREE_READONLY (node); 4393 } 4394 4395 if (TREE_CODE_CLASS (code) == tcc_statement) 4396 TREE_SIDE_EFFECTS (t) = 1; 4397 else switch (code) 4398 { 4399 case VA_ARG_EXPR: 4400 /* All of these have side-effects, no matter what their 4401 operands are. */ 4402 TREE_SIDE_EFFECTS (t) = 1; 4403 TREE_READONLY (t) = 0; 4404 break; 4405 4406 case INDIRECT_REF: 4407 /* Whether a dereference is readonly has nothing to do with whether 4408 its operand is readonly. */ 4409 TREE_READONLY (t) = 0; 4410 break; 4411 4412 case ADDR_EXPR: 4413 if (node) 4414 recompute_tree_invariant_for_addr_expr (t); 4415 break; 4416 4417 default: 4418 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR) 4419 && node && !TYPE_P (node) 4420 && TREE_CONSTANT (node)) 4421 TREE_CONSTANT (t) = 1; 4422 if (TREE_CODE_CLASS (code) == tcc_reference 4423 && node && TREE_THIS_VOLATILE (node)) 4424 TREE_THIS_VOLATILE (t) = 1; 4425 break; 4426 } 4427 4428 return t; 4429 } 4430 4431 #define PROCESS_ARG(N) \ 4432 do { \ 4433 TREE_OPERAND (t, N) = arg##N; \ 4434 if (arg##N &&!TYPE_P (arg##N)) \ 4435 { \ 4436 if (TREE_SIDE_EFFECTS (arg##N)) \ 4437 side_effects = 1; \ 4438 if (!TREE_READONLY (arg##N) \ 4439 && !CONSTANT_CLASS_P (arg##N)) \ 4440 (void) (read_only = 0); \ 4441 if (!TREE_CONSTANT (arg##N)) \ 4442 (void) (constant = 0); \ 4443 } \ 4444 } while (0) 4445 4446 tree 4447 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL) 4448 { 4449 bool constant, read_only, side_effects; 4450 tree t; 4451 4452 gcc_assert (TREE_CODE_LENGTH (code) == 2); 4453 4454 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR) 4455 && arg0 && arg1 && tt && POINTER_TYPE_P (tt) 4456 /* When sizetype precision doesn't match that of pointers 4457 we need to be able to build explicit extensions or truncations 4458 of the offset argument. */ 4459 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt)) 4460 gcc_assert (TREE_CODE (arg0) == INTEGER_CST 4461 && TREE_CODE (arg1) == INTEGER_CST); 4462 4463 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt) 4464 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0)) 4465 && ptrofftype_p (TREE_TYPE (arg1))); 4466 4467 t = make_node_stat (code PASS_MEM_STAT); 4468 TREE_TYPE (t) = tt; 4469 4470 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the 4471 result based on those same flags for the arguments. But if the 4472 arguments aren't really even `tree' expressions, we shouldn't be trying 4473 to do this. */ 4474 4475 /* Expressions without side effects may be constant if their 4476 arguments are as well. */ 4477 constant = (TREE_CODE_CLASS (code) == tcc_comparison 4478 || TREE_CODE_CLASS (code) == tcc_binary); 4479 read_only = 1; 4480 side_effects = TREE_SIDE_EFFECTS (t); 4481 4482 PROCESS_ARG (0); 4483 PROCESS_ARG (1); 4484 4485 TREE_SIDE_EFFECTS (t) = side_effects; 4486 if (code == MEM_REF) 4487 { 4488 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR) 4489 { 4490 tree o = TREE_OPERAND (arg0, 0); 4491 TREE_READONLY (t) = TREE_READONLY (o); 4492 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o); 4493 } 4494 } 4495 else 4496 { 4497 TREE_READONLY (t) = read_only; 4498 TREE_CONSTANT (t) = constant; 4499 TREE_THIS_VOLATILE (t) 4500 = (TREE_CODE_CLASS (code) == tcc_reference 4501 && arg0 && TREE_THIS_VOLATILE (arg0)); 4502 } 4503 4504 return t; 4505 } 4506 4507 4508 tree 4509 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1, 4510 tree arg2 MEM_STAT_DECL) 4511 { 4512 bool constant, read_only, side_effects; 4513 tree t; 4514 4515 gcc_assert (TREE_CODE_LENGTH (code) == 3); 4516 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp); 4517 4518 t = make_node_stat (code PASS_MEM_STAT); 4519 TREE_TYPE (t) = tt; 4520 4521 read_only = 1; 4522 4523 /* As a special exception, if COND_EXPR has NULL branches, we 4524 assume that it is a gimple statement and always consider 4525 it to have side effects. */ 4526 if (code == COND_EXPR 4527 && tt == void_type_node 4528 && arg1 == NULL_TREE 4529 && arg2 == NULL_TREE) 4530 side_effects = true; 4531 else 4532 side_effects = TREE_SIDE_EFFECTS (t); 4533 4534 PROCESS_ARG (0); 4535 PROCESS_ARG (1); 4536 PROCESS_ARG (2); 4537 4538 if (code == COND_EXPR) 4539 TREE_READONLY (t) = read_only; 4540 4541 TREE_SIDE_EFFECTS (t) = side_effects; 4542 TREE_THIS_VOLATILE (t) 4543 = (TREE_CODE_CLASS (code) == tcc_reference 4544 && arg0 && TREE_THIS_VOLATILE (arg0)); 4545 4546 return t; 4547 } 4548 4549 tree 4550 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1, 4551 tree arg2, tree arg3 MEM_STAT_DECL) 4552 { 4553 bool constant, read_only, side_effects; 4554 tree t; 4555 4556 gcc_assert (TREE_CODE_LENGTH (code) == 4); 4557 4558 t = make_node_stat (code PASS_MEM_STAT); 4559 TREE_TYPE (t) = tt; 4560 4561 side_effects = TREE_SIDE_EFFECTS (t); 4562 4563 PROCESS_ARG (0); 4564 PROCESS_ARG (1); 4565 PROCESS_ARG (2); 4566 PROCESS_ARG (3); 4567 4568 TREE_SIDE_EFFECTS (t) = side_effects; 4569 TREE_THIS_VOLATILE (t) 4570 = (TREE_CODE_CLASS (code) == tcc_reference 4571 && arg0 && TREE_THIS_VOLATILE (arg0)); 4572 4573 return t; 4574 } 4575 4576 tree 4577 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1, 4578 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL) 4579 { 4580 bool constant, read_only, side_effects; 4581 tree t; 4582 4583 gcc_assert (TREE_CODE_LENGTH (code) == 5); 4584 4585 t = make_node_stat (code PASS_MEM_STAT); 4586 TREE_TYPE (t) = tt; 4587 4588 side_effects = TREE_SIDE_EFFECTS (t); 4589 4590 PROCESS_ARG (0); 4591 PROCESS_ARG (1); 4592 PROCESS_ARG (2); 4593 PROCESS_ARG (3); 4594 PROCESS_ARG (4); 4595 4596 TREE_SIDE_EFFECTS (t) = side_effects; 4597 if (code == TARGET_MEM_REF) 4598 { 4599 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR) 4600 { 4601 tree o = TREE_OPERAND (arg0, 0); 4602 TREE_READONLY (t) = TREE_READONLY (o); 4603 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o); 4604 } 4605 } 4606 else 4607 TREE_THIS_VOLATILE (t) 4608 = (TREE_CODE_CLASS (code) == tcc_reference 4609 && arg0 && TREE_THIS_VOLATILE (arg0)); 4610 4611 return t; 4612 } 4613 4614 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF 4615 on the pointer PTR. */ 4616 4617 tree 4618 build_simple_mem_ref_loc (location_t loc, tree ptr) 4619 { 4620 HOST_WIDE_INT offset = 0; 4621 tree ptype = TREE_TYPE (ptr); 4622 tree tem; 4623 /* For convenience allow addresses that collapse to a simple base 4624 and offset. */ 4625 if (TREE_CODE (ptr) == ADDR_EXPR 4626 && (handled_component_p (TREE_OPERAND (ptr, 0)) 4627 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF)) 4628 { 4629 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset); 4630 gcc_assert (ptr); 4631 ptr = build_fold_addr_expr (ptr); 4632 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr)); 4633 } 4634 tem = build2 (MEM_REF, TREE_TYPE (ptype), 4635 ptr, build_int_cst (ptype, offset)); 4636 SET_EXPR_LOCATION (tem, loc); 4637 return tem; 4638 } 4639 4640 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */ 4641 4642 offset_int 4643 mem_ref_offset (const_tree t) 4644 { 4645 return offset_int::from (TREE_OPERAND (t, 1), SIGNED); 4646 } 4647 4648 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE 4649 offsetted by OFFSET units. */ 4650 4651 tree 4652 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset) 4653 { 4654 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type), 4655 build_fold_addr_expr (base), 4656 build_int_cst (ptr_type_node, offset)); 4657 tree addr = build1 (ADDR_EXPR, type, ref); 4658 recompute_tree_invariant_for_addr_expr (addr); 4659 return addr; 4660 } 4661 4662 /* Similar except don't specify the TREE_TYPE 4663 and leave the TREE_SIDE_EFFECTS as 0. 4664 It is permissible for arguments to be null, 4665 or even garbage if their values do not matter. */ 4666 4667 tree 4668 build_nt (enum tree_code code, ...) 4669 { 4670 tree t; 4671 int length; 4672 int i; 4673 va_list p; 4674 4675 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp); 4676 4677 va_start (p, code); 4678 4679 t = make_node (code); 4680 length = TREE_CODE_LENGTH (code); 4681 4682 for (i = 0; i < length; i++) 4683 TREE_OPERAND (t, i) = va_arg (p, tree); 4684 4685 va_end (p); 4686 return t; 4687 } 4688 4689 /* Similar to build_nt, but for creating a CALL_EXPR object with a 4690 tree vec. */ 4691 4692 tree 4693 build_nt_call_vec (tree fn, vec<tree, va_gc> *args) 4694 { 4695 tree ret, t; 4696 unsigned int ix; 4697 4698 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3); 4699 CALL_EXPR_FN (ret) = fn; 4700 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE; 4701 FOR_EACH_VEC_SAFE_ELT (args, ix, t) 4702 CALL_EXPR_ARG (ret, ix) = t; 4703 return ret; 4704 } 4705 4706 /* Create a DECL_... node of code CODE, name NAME and data type TYPE. 4707 We do NOT enter this node in any sort of symbol table. 4708 4709 LOC is the location of the decl. 4710 4711 layout_decl is used to set up the decl's storage layout. 4712 Other slots are initialized to 0 or null pointers. */ 4713 4714 tree 4715 build_decl_stat (location_t loc, enum tree_code code, tree name, 4716 tree type MEM_STAT_DECL) 4717 { 4718 tree t; 4719 4720 t = make_node_stat (code PASS_MEM_STAT); 4721 DECL_SOURCE_LOCATION (t) = loc; 4722 4723 /* if (type == error_mark_node) 4724 type = integer_type_node; */ 4725 /* That is not done, deliberately, so that having error_mark_node 4726 as the type can suppress useless errors in the use of this variable. */ 4727 4728 DECL_NAME (t) = name; 4729 TREE_TYPE (t) = type; 4730 4731 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL) 4732 layout_decl (t, 0); 4733 4734 return t; 4735 } 4736 4737 /* Builds and returns function declaration with NAME and TYPE. */ 4738 4739 tree 4740 build_fn_decl (const char *name, tree type) 4741 { 4742 tree id = get_identifier (name); 4743 tree decl = build_decl (input_location, FUNCTION_DECL, id, type); 4744 4745 DECL_EXTERNAL (decl) = 1; 4746 TREE_PUBLIC (decl) = 1; 4747 DECL_ARTIFICIAL (decl) = 1; 4748 TREE_NOTHROW (decl) = 1; 4749 4750 return decl; 4751 } 4752 4753 vec<tree, va_gc> *all_translation_units; 4754 4755 /* Builds a new translation-unit decl with name NAME, queues it in the 4756 global list of translation-unit decls and returns it. */ 4757 4758 tree 4759 build_translation_unit_decl (tree name) 4760 { 4761 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL, 4762 name, NULL_TREE); 4763 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name; 4764 vec_safe_push (all_translation_units, tu); 4765 return tu; 4766 } 4767 4768 4769 /* BLOCK nodes are used to represent the structure of binding contours 4770 and declarations, once those contours have been exited and their contents 4771 compiled. This information is used for outputting debugging info. */ 4772 4773 tree 4774 build_block (tree vars, tree subblocks, tree supercontext, tree chain) 4775 { 4776 tree block = make_node (BLOCK); 4777 4778 BLOCK_VARS (block) = vars; 4779 BLOCK_SUBBLOCKS (block) = subblocks; 4780 BLOCK_SUPERCONTEXT (block) = supercontext; 4781 BLOCK_CHAIN (block) = chain; 4782 return block; 4783 } 4784 4785 4786 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location. 4787 4788 LOC is the location to use in tree T. */ 4789 4790 void 4791 protected_set_expr_location (tree t, location_t loc) 4792 { 4793 if (CAN_HAVE_LOCATION_P (t)) 4794 SET_EXPR_LOCATION (t, loc); 4795 } 4796 4797 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES 4798 is ATTRIBUTE. */ 4799 4800 tree 4801 build_decl_attribute_variant (tree ddecl, tree attribute) 4802 { 4803 DECL_ATTRIBUTES (ddecl) = attribute; 4804 return ddecl; 4805 } 4806 4807 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE 4808 is ATTRIBUTE and its qualifiers are QUALS. 4809 4810 Record such modified types already made so we don't make duplicates. */ 4811 4812 tree 4813 build_type_attribute_qual_variant (tree otype, tree attribute, int quals) 4814 { 4815 tree ttype = otype; 4816 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute)) 4817 { 4818 inchash::hash hstate; 4819 tree ntype; 4820 int i; 4821 tree t; 4822 enum tree_code code = TREE_CODE (ttype); 4823 4824 /* Building a distinct copy of a tagged type is inappropriate; it 4825 causes breakage in code that expects there to be a one-to-one 4826 relationship between a struct and its fields. 4827 build_duplicate_type is another solution (as used in 4828 handle_transparent_union_attribute), but that doesn't play well 4829 with the stronger C++ type identity model. */ 4830 if (TREE_CODE (ttype) == RECORD_TYPE 4831 || TREE_CODE (ttype) == UNION_TYPE 4832 || TREE_CODE (ttype) == QUAL_UNION_TYPE 4833 || TREE_CODE (ttype) == ENUMERAL_TYPE) 4834 { 4835 warning (OPT_Wattributes, 4836 "ignoring attributes applied to %qT after definition", 4837 TYPE_MAIN_VARIANT (ttype)); 4838 return build_qualified_type (ttype, quals); 4839 } 4840 4841 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED); 4842 if (lang_hooks.types.copy_lang_qualifiers 4843 && otype != TYPE_MAIN_VARIANT (otype)) 4844 ttype = (lang_hooks.types.copy_lang_qualifiers 4845 (ttype, TYPE_MAIN_VARIANT (otype))); 4846 4847 ntype = build_distinct_type_copy (ttype); 4848 4849 TYPE_ATTRIBUTES (ntype) = attribute; 4850 4851 hstate.add_int (code); 4852 if (TREE_TYPE (ntype)) 4853 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype))); 4854 attribute_hash_list (attribute, hstate); 4855 4856 switch (TREE_CODE (ntype)) 4857 { 4858 case FUNCTION_TYPE: 4859 type_hash_list (TYPE_ARG_TYPES (ntype), hstate); 4860 break; 4861 case ARRAY_TYPE: 4862 if (TYPE_DOMAIN (ntype)) 4863 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype))); 4864 break; 4865 case INTEGER_TYPE: 4866 t = TYPE_MAX_VALUE (ntype); 4867 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++) 4868 hstate.add_object (TREE_INT_CST_ELT (t, i)); 4869 break; 4870 case REAL_TYPE: 4871 case FIXED_POINT_TYPE: 4872 { 4873 unsigned int precision = TYPE_PRECISION (ntype); 4874 hstate.add_object (precision); 4875 } 4876 break; 4877 default: 4878 break; 4879 } 4880 4881 ntype = type_hash_canon (hstate.end(), ntype); 4882 4883 /* If the target-dependent attributes make NTYPE different from 4884 its canonical type, we will need to use structural equality 4885 checks for this type. */ 4886 if (TYPE_STRUCTURAL_EQUALITY_P (ttype) 4887 || !comp_type_attributes (ntype, ttype)) 4888 SET_TYPE_STRUCTURAL_EQUALITY (ntype); 4889 else if (TYPE_CANONICAL (ntype) == ntype) 4890 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype); 4891 4892 ttype = build_qualified_type (ntype, quals); 4893 if (lang_hooks.types.copy_lang_qualifiers 4894 && otype != TYPE_MAIN_VARIANT (otype)) 4895 ttype = lang_hooks.types.copy_lang_qualifiers (ttype, otype); 4896 } 4897 else if (TYPE_QUALS (ttype) != quals) 4898 ttype = build_qualified_type (ttype, quals); 4899 4900 return ttype; 4901 } 4902 4903 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are 4904 the same. */ 4905 4906 static bool 4907 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2) 4908 { 4909 tree cl1, cl2; 4910 for (cl1 = clauses1, cl2 = clauses2; 4911 cl1 && cl2; 4912 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2)) 4913 { 4914 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2)) 4915 return false; 4916 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN) 4917 { 4918 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1), 4919 OMP_CLAUSE_DECL (cl2)) != 1) 4920 return false; 4921 } 4922 switch (OMP_CLAUSE_CODE (cl1)) 4923 { 4924 case OMP_CLAUSE_ALIGNED: 4925 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1), 4926 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1) 4927 return false; 4928 break; 4929 case OMP_CLAUSE_LINEAR: 4930 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1), 4931 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1) 4932 return false; 4933 break; 4934 case OMP_CLAUSE_SIMDLEN: 4935 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1), 4936 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1) 4937 return false; 4938 default: 4939 break; 4940 } 4941 } 4942 return true; 4943 } 4944 4945 /* Compare two constructor-element-type constants. Return 1 if the lists 4946 are known to be equal; otherwise return 0. */ 4947 4948 static bool 4949 simple_cst_list_equal (const_tree l1, const_tree l2) 4950 { 4951 while (l1 != NULL_TREE && l2 != NULL_TREE) 4952 { 4953 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1) 4954 return false; 4955 4956 l1 = TREE_CHAIN (l1); 4957 l2 = TREE_CHAIN (l2); 4958 } 4959 4960 return l1 == l2; 4961 } 4962 4963 /* Compare two identifier nodes representing attributes. Either one may 4964 be in wrapped __ATTR__ form. Return true if they are the same, false 4965 otherwise. */ 4966 4967 static bool 4968 cmp_attrib_identifiers (const_tree attr1, const_tree attr2) 4969 { 4970 /* Make sure we're dealing with IDENTIFIER_NODEs. */ 4971 gcc_checking_assert (TREE_CODE (attr1) == IDENTIFIER_NODE 4972 && TREE_CODE (attr2) == IDENTIFIER_NODE); 4973 4974 /* Identifiers can be compared directly for equality. */ 4975 if (attr1 == attr2) 4976 return true; 4977 4978 /* If they are not equal, they may still be one in the form 4979 'text' while the other one is in the form '__text__'. TODO: 4980 If we were storing attributes in normalized 'text' form, then 4981 this could all go away and we could take full advantage of 4982 the fact that we're comparing identifiers. :-) */ 4983 const size_t attr1_len = IDENTIFIER_LENGTH (attr1); 4984 const size_t attr2_len = IDENTIFIER_LENGTH (attr2); 4985 4986 if (attr2_len == attr1_len + 4) 4987 { 4988 const char *p = IDENTIFIER_POINTER (attr2); 4989 const char *q = IDENTIFIER_POINTER (attr1); 4990 if (p[0] == '_' && p[1] == '_' 4991 && p[attr2_len - 2] == '_' && p[attr2_len - 1] == '_' 4992 && strncmp (q, p + 2, attr1_len) == 0) 4993 return true;; 4994 } 4995 else if (attr2_len + 4 == attr1_len) 4996 { 4997 const char *p = IDENTIFIER_POINTER (attr2); 4998 const char *q = IDENTIFIER_POINTER (attr1); 4999 if (q[0] == '_' && q[1] == '_' 5000 && q[attr1_len - 2] == '_' && q[attr1_len - 1] == '_' 5001 && strncmp (q + 2, p, attr2_len) == 0) 5002 return true; 5003 } 5004 5005 return false; 5006 } 5007 5008 /* Compare two attributes for their value identity. Return true if the 5009 attribute values are known to be equal; otherwise return false. */ 5010 5011 bool 5012 attribute_value_equal (const_tree attr1, const_tree attr2) 5013 { 5014 if (TREE_VALUE (attr1) == TREE_VALUE (attr2)) 5015 return true; 5016 5017 if (TREE_VALUE (attr1) != NULL_TREE 5018 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST 5019 && TREE_VALUE (attr2) != NULL_TREE 5020 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST) 5021 { 5022 /* Handle attribute format. */ 5023 if (is_attribute_p ("format", get_attribute_name (attr1))) 5024 { 5025 attr1 = TREE_VALUE (attr1); 5026 attr2 = TREE_VALUE (attr2); 5027 /* Compare the archetypes (printf/scanf/strftime/...). */ 5028 if (!cmp_attrib_identifiers (TREE_VALUE (attr1), 5029 TREE_VALUE (attr2))) 5030 return false; 5031 /* Archetypes are the same. Compare the rest. */ 5032 return (simple_cst_list_equal (TREE_CHAIN (attr1), 5033 TREE_CHAIN (attr2)) == 1); 5034 } 5035 return (simple_cst_list_equal (TREE_VALUE (attr1), 5036 TREE_VALUE (attr2)) == 1); 5037 } 5038 5039 if ((flag_openmp || flag_openmp_simd) 5040 && TREE_VALUE (attr1) && TREE_VALUE (attr2) 5041 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE 5042 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE) 5043 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1), 5044 TREE_VALUE (attr2)); 5045 5046 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1); 5047 } 5048 5049 /* Return 0 if the attributes for two types are incompatible, 1 if they 5050 are compatible, and 2 if they are nearly compatible (which causes a 5051 warning to be generated). */ 5052 int 5053 comp_type_attributes (const_tree type1, const_tree type2) 5054 { 5055 const_tree a1 = TYPE_ATTRIBUTES (type1); 5056 const_tree a2 = TYPE_ATTRIBUTES (type2); 5057 const_tree a; 5058 5059 if (a1 == a2) 5060 return 1; 5061 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a)) 5062 { 5063 const struct attribute_spec *as; 5064 const_tree attr; 5065 5066 as = lookup_attribute_spec (get_attribute_name (a)); 5067 if (!as || as->affects_type_identity == false) 5068 continue; 5069 5070 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2)); 5071 if (!attr || !attribute_value_equal (a, attr)) 5072 break; 5073 } 5074 if (!a) 5075 { 5076 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a)) 5077 { 5078 const struct attribute_spec *as; 5079 5080 as = lookup_attribute_spec (get_attribute_name (a)); 5081 if (!as || as->affects_type_identity == false) 5082 continue; 5083 5084 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1))) 5085 break; 5086 /* We don't need to compare trees again, as we did this 5087 already in first loop. */ 5088 } 5089 /* All types - affecting identity - are equal, so 5090 there is no need to call target hook for comparison. */ 5091 if (!a) 5092 return 1; 5093 } 5094 if (lookup_attribute ("transaction_safe", CONST_CAST_TREE (a))) 5095 return 0; 5096 /* As some type combinations - like default calling-convention - might 5097 be compatible, we have to call the target hook to get the final result. */ 5098 return targetm.comp_type_attributes (type1, type2); 5099 } 5100 5101 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE 5102 is ATTRIBUTE. 5103 5104 Record such modified types already made so we don't make duplicates. */ 5105 5106 tree 5107 build_type_attribute_variant (tree ttype, tree attribute) 5108 { 5109 return build_type_attribute_qual_variant (ttype, attribute, 5110 TYPE_QUALS (ttype)); 5111 } 5112 5113 5114 /* Reset the expression *EXPR_P, a size or position. 5115 5116 ??? We could reset all non-constant sizes or positions. But it's cheap 5117 enough to not do so and refrain from adding workarounds to dwarf2out.c. 5118 5119 We need to reset self-referential sizes or positions because they cannot 5120 be gimplified and thus can contain a CALL_EXPR after the gimplification 5121 is finished, which will run afoul of LTO streaming. And they need to be 5122 reset to something essentially dummy but not constant, so as to preserve 5123 the properties of the object they are attached to. */ 5124 5125 static inline void 5126 free_lang_data_in_one_sizepos (tree *expr_p) 5127 { 5128 tree expr = *expr_p; 5129 if (CONTAINS_PLACEHOLDER_P (expr)) 5130 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr)); 5131 } 5132 5133 5134 /* Reset all the fields in a binfo node BINFO. We only keep 5135 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */ 5136 5137 static void 5138 free_lang_data_in_binfo (tree binfo) 5139 { 5140 unsigned i; 5141 tree t; 5142 5143 gcc_assert (TREE_CODE (binfo) == TREE_BINFO); 5144 5145 BINFO_VIRTUALS (binfo) = NULL_TREE; 5146 BINFO_BASE_ACCESSES (binfo) = NULL; 5147 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE; 5148 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE; 5149 5150 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t) 5151 free_lang_data_in_binfo (t); 5152 } 5153 5154 5155 /* Reset all language specific information still present in TYPE. */ 5156 5157 static void 5158 free_lang_data_in_type (tree type) 5159 { 5160 gcc_assert (TYPE_P (type)); 5161 5162 /* Give the FE a chance to remove its own data first. */ 5163 lang_hooks.free_lang_data (type); 5164 5165 TREE_LANG_FLAG_0 (type) = 0; 5166 TREE_LANG_FLAG_1 (type) = 0; 5167 TREE_LANG_FLAG_2 (type) = 0; 5168 TREE_LANG_FLAG_3 (type) = 0; 5169 TREE_LANG_FLAG_4 (type) = 0; 5170 TREE_LANG_FLAG_5 (type) = 0; 5171 TREE_LANG_FLAG_6 (type) = 0; 5172 5173 if (TREE_CODE (type) == FUNCTION_TYPE) 5174 { 5175 /* Remove the const and volatile qualifiers from arguments. The 5176 C++ front end removes them, but the C front end does not, 5177 leading to false ODR violation errors when merging two 5178 instances of the same function signature compiled by 5179 different front ends. */ 5180 tree p; 5181 5182 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p)) 5183 { 5184 tree arg_type = TREE_VALUE (p); 5185 5186 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type)) 5187 { 5188 int quals = TYPE_QUALS (arg_type) 5189 & ~TYPE_QUAL_CONST 5190 & ~TYPE_QUAL_VOLATILE; 5191 TREE_VALUE (p) = build_qualified_type (arg_type, quals); 5192 free_lang_data_in_type (TREE_VALUE (p)); 5193 } 5194 /* C++ FE uses TREE_PURPOSE to store initial values. */ 5195 TREE_PURPOSE (p) = NULL; 5196 } 5197 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */ 5198 TYPE_MINVAL (type) = NULL; 5199 } 5200 if (TREE_CODE (type) == METHOD_TYPE) 5201 { 5202 tree p; 5203 5204 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p)) 5205 { 5206 /* C++ FE uses TREE_PURPOSE to store initial values. */ 5207 TREE_PURPOSE (p) = NULL; 5208 } 5209 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */ 5210 TYPE_MINVAL (type) = NULL; 5211 } 5212 5213 /* Remove members that are not actually FIELD_DECLs from the field 5214 list of an aggregate. These occur in C++. */ 5215 if (RECORD_OR_UNION_TYPE_P (type)) 5216 { 5217 tree prev, member; 5218 5219 /* Note that TYPE_FIELDS can be shared across distinct 5220 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is 5221 to be removed, we cannot set its TREE_CHAIN to NULL. 5222 Otherwise, we would not be able to find all the other fields 5223 in the other instances of this TREE_TYPE. 5224 5225 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */ 5226 prev = NULL_TREE; 5227 member = TYPE_FIELDS (type); 5228 while (member) 5229 { 5230 if (TREE_CODE (member) == FIELD_DECL 5231 || (TREE_CODE (member) == TYPE_DECL 5232 && !DECL_IGNORED_P (member) 5233 && debug_info_level > DINFO_LEVEL_TERSE 5234 && !is_redundant_typedef (member))) 5235 { 5236 if (prev) 5237 TREE_CHAIN (prev) = member; 5238 else 5239 TYPE_FIELDS (type) = member; 5240 prev = member; 5241 } 5242 5243 member = TREE_CHAIN (member); 5244 } 5245 5246 if (prev) 5247 TREE_CHAIN (prev) = NULL_TREE; 5248 else 5249 TYPE_FIELDS (type) = NULL_TREE; 5250 5251 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS 5252 and danagle the pointer from time to time. */ 5253 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL) 5254 TYPE_VFIELD (type) = NULL_TREE; 5255 5256 /* Remove TYPE_METHODS list. While it would be nice to keep it 5257 to enable ODR warnings about different method lists, doing so 5258 seems to impractically increase size of LTO data streamed. 5259 Keep the information if TYPE_METHODS was non-NULL. This is used 5260 by function.c and pretty printers. */ 5261 if (TYPE_METHODS (type)) 5262 TYPE_METHODS (type) = error_mark_node; 5263 if (TYPE_BINFO (type)) 5264 { 5265 free_lang_data_in_binfo (TYPE_BINFO (type)); 5266 /* We need to preserve link to bases and virtual table for all 5267 polymorphic types to make devirtualization machinery working. 5268 Debug output cares only about bases, but output also 5269 virtual table pointers so merging of -fdevirtualize and 5270 -fno-devirtualize units is easier. */ 5271 if ((!BINFO_VTABLE (TYPE_BINFO (type)) 5272 || !flag_devirtualize) 5273 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type)) 5274 && !BINFO_VTABLE (TYPE_BINFO (type))) 5275 || debug_info_level != DINFO_LEVEL_NONE)) 5276 TYPE_BINFO (type) = NULL; 5277 } 5278 } 5279 else 5280 { 5281 /* For non-aggregate types, clear out the language slot (which 5282 overloads TYPE_BINFO). */ 5283 TYPE_LANG_SLOT_1 (type) = NULL_TREE; 5284 5285 if (INTEGRAL_TYPE_P (type) 5286 || SCALAR_FLOAT_TYPE_P (type) 5287 || FIXED_POINT_TYPE_P (type)) 5288 { 5289 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type)); 5290 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type)); 5291 } 5292 } 5293 5294 free_lang_data_in_one_sizepos (&TYPE_SIZE (type)); 5295 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type)); 5296 5297 if (TYPE_CONTEXT (type) 5298 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK) 5299 { 5300 tree ctx = TYPE_CONTEXT (type); 5301 do 5302 { 5303 ctx = BLOCK_SUPERCONTEXT (ctx); 5304 } 5305 while (ctx && TREE_CODE (ctx) == BLOCK); 5306 TYPE_CONTEXT (type) = ctx; 5307 } 5308 } 5309 5310 5311 /* Return true if DECL may need an assembler name to be set. */ 5312 5313 static inline bool 5314 need_assembler_name_p (tree decl) 5315 { 5316 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition 5317 Rule merging. This makes type_odr_p to return true on those types during 5318 LTO and by comparing the mangled name, we can say what types are intended 5319 to be equivalent across compilation unit. 5320 5321 We do not store names of type_in_anonymous_namespace_p. 5322 5323 Record, union and enumeration type have linkage that allows use 5324 to check type_in_anonymous_namespace_p. We do not mangle compound types 5325 that always can be compared structurally. 5326 5327 Similarly for builtin types, we compare properties of their main variant. 5328 A special case are integer types where mangling do make differences 5329 between char/signed char/unsigned char etc. Storing name for these makes 5330 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well. 5331 See cp/mangle.c:write_builtin_type for details. */ 5332 5333 if (flag_lto_odr_type_mering 5334 && TREE_CODE (decl) == TYPE_DECL 5335 && DECL_NAME (decl) 5336 && decl == TYPE_NAME (TREE_TYPE (decl)) 5337 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl) 5338 && !TYPE_ARTIFICIAL (TREE_TYPE (decl)) 5339 && (type_with_linkage_p (TREE_TYPE (decl)) 5340 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE) 5341 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE)) 5342 return !DECL_ASSEMBLER_NAME_SET_P (decl); 5343 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */ 5344 if (!VAR_OR_FUNCTION_DECL_P (decl)) 5345 return false; 5346 5347 /* If DECL already has its assembler name set, it does not need a 5348 new one. */ 5349 if (!HAS_DECL_ASSEMBLER_NAME_P (decl) 5350 || DECL_ASSEMBLER_NAME_SET_P (decl)) 5351 return false; 5352 5353 /* Abstract decls do not need an assembler name. */ 5354 if (DECL_ABSTRACT_P (decl)) 5355 return false; 5356 5357 /* For VAR_DECLs, only static, public and external symbols need an 5358 assembler name. */ 5359 if (VAR_P (decl) 5360 && !TREE_STATIC (decl) 5361 && !TREE_PUBLIC (decl) 5362 && !DECL_EXTERNAL (decl)) 5363 return false; 5364 5365 if (TREE_CODE (decl) == FUNCTION_DECL) 5366 { 5367 /* Do not set assembler name on builtins. Allow RTL expansion to 5368 decide whether to expand inline or via a regular call. */ 5369 if (DECL_BUILT_IN (decl) 5370 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND) 5371 return false; 5372 5373 /* Functions represented in the callgraph need an assembler name. */ 5374 if (cgraph_node::get (decl) != NULL) 5375 return true; 5376 5377 /* Unused and not public functions don't need an assembler name. */ 5378 if (!TREE_USED (decl) && !TREE_PUBLIC (decl)) 5379 return false; 5380 } 5381 5382 return true; 5383 } 5384 5385 5386 /* Reset all language specific information still present in symbol 5387 DECL. */ 5388 5389 static void 5390 free_lang_data_in_decl (tree decl) 5391 { 5392 gcc_assert (DECL_P (decl)); 5393 5394 /* Give the FE a chance to remove its own data first. */ 5395 lang_hooks.free_lang_data (decl); 5396 5397 TREE_LANG_FLAG_0 (decl) = 0; 5398 TREE_LANG_FLAG_1 (decl) = 0; 5399 TREE_LANG_FLAG_2 (decl) = 0; 5400 TREE_LANG_FLAG_3 (decl) = 0; 5401 TREE_LANG_FLAG_4 (decl) = 0; 5402 TREE_LANG_FLAG_5 (decl) = 0; 5403 TREE_LANG_FLAG_6 (decl) = 0; 5404 5405 free_lang_data_in_one_sizepos (&DECL_SIZE (decl)); 5406 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl)); 5407 if (TREE_CODE (decl) == FIELD_DECL) 5408 { 5409 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl)); 5410 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE) 5411 DECL_QUALIFIER (decl) = NULL_TREE; 5412 } 5413 5414 if (TREE_CODE (decl) == FUNCTION_DECL) 5415 { 5416 struct cgraph_node *node; 5417 if (!(node = cgraph_node::get (decl)) 5418 || (!node->definition && !node->clones)) 5419 { 5420 if (node) 5421 node->release_body (); 5422 else 5423 { 5424 release_function_body (decl); 5425 DECL_ARGUMENTS (decl) = NULL; 5426 DECL_RESULT (decl) = NULL; 5427 DECL_INITIAL (decl) = error_mark_node; 5428 } 5429 } 5430 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p)) 5431 { 5432 tree t; 5433 5434 /* If DECL has a gimple body, then the context for its 5435 arguments must be DECL. Otherwise, it doesn't really 5436 matter, as we will not be emitting any code for DECL. In 5437 general, there may be other instances of DECL created by 5438 the front end and since PARM_DECLs are generally shared, 5439 their DECL_CONTEXT changes as the replicas of DECL are 5440 created. The only time where DECL_CONTEXT is important 5441 is for the FUNCTION_DECLs that have a gimple body (since 5442 the PARM_DECL will be used in the function's body). */ 5443 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t)) 5444 DECL_CONTEXT (t) = decl; 5445 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl)) 5446 DECL_FUNCTION_SPECIFIC_TARGET (decl) 5447 = target_option_default_node; 5448 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)) 5449 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl) 5450 = optimization_default_node; 5451 } 5452 5453 /* DECL_SAVED_TREE holds the GENERIC representation for DECL. 5454 At this point, it is not needed anymore. */ 5455 DECL_SAVED_TREE (decl) = NULL_TREE; 5456 5457 /* Clear the abstract origin if it refers to a method. 5458 Otherwise dwarf2out.c will ICE as we splice functions out of 5459 TYPE_FIELDS and thus the origin will not be output 5460 correctly. */ 5461 if (DECL_ABSTRACT_ORIGIN (decl) 5462 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl)) 5463 && RECORD_OR_UNION_TYPE_P 5464 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl)))) 5465 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE; 5466 5467 /* Sometimes the C++ frontend doesn't manage to transform a temporary 5468 DECL_VINDEX referring to itself into a vtable slot number as it 5469 should. Happens with functions that are copied and then forgotten 5470 about. Just clear it, it won't matter anymore. */ 5471 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl))) 5472 DECL_VINDEX (decl) = NULL_TREE; 5473 } 5474 else if (VAR_P (decl)) 5475 { 5476 if ((DECL_EXTERNAL (decl) 5477 && (!TREE_STATIC (decl) || !TREE_READONLY (decl))) 5478 || (decl_function_context (decl) && !TREE_STATIC (decl))) 5479 DECL_INITIAL (decl) = NULL_TREE; 5480 } 5481 else if (TREE_CODE (decl) == TYPE_DECL) 5482 { 5483 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT; 5484 DECL_VISIBILITY_SPECIFIED (decl) = 0; 5485 DECL_INITIAL (decl) = NULL_TREE; 5486 } 5487 else if (TREE_CODE (decl) == FIELD_DECL) 5488 DECL_INITIAL (decl) = NULL_TREE; 5489 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL 5490 && DECL_INITIAL (decl) 5491 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK) 5492 { 5493 /* Strip builtins from the translation-unit BLOCK. We still have targets 5494 without builtin_decl_explicit support and also builtins are shared 5495 nodes and thus we can't use TREE_CHAIN in multiple lists. */ 5496 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl)); 5497 while (*nextp) 5498 { 5499 tree var = *nextp; 5500 if (TREE_CODE (var) == FUNCTION_DECL 5501 && DECL_BUILT_IN (var)) 5502 *nextp = TREE_CHAIN (var); 5503 else 5504 nextp = &TREE_CHAIN (var); 5505 } 5506 } 5507 } 5508 5509 5510 /* Data used when collecting DECLs and TYPEs for language data removal. */ 5511 5512 struct free_lang_data_d 5513 { 5514 free_lang_data_d () : decls (100), types (100) {} 5515 5516 /* Worklist to avoid excessive recursion. */ 5517 auto_vec<tree> worklist; 5518 5519 /* Set of traversed objects. Used to avoid duplicate visits. */ 5520 hash_set<tree> pset; 5521 5522 /* Array of symbols to process with free_lang_data_in_decl. */ 5523 auto_vec<tree> decls; 5524 5525 /* Array of types to process with free_lang_data_in_type. */ 5526 auto_vec<tree> types; 5527 }; 5528 5529 5530 /* Save all language fields needed to generate proper debug information 5531 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */ 5532 5533 static void 5534 save_debug_info_for_decl (tree t) 5535 { 5536 /*struct saved_debug_info_d *sdi;*/ 5537 5538 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t)); 5539 5540 /* FIXME. Partial implementation for saving debug info removed. */ 5541 } 5542 5543 5544 /* Save all language fields needed to generate proper debug information 5545 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */ 5546 5547 static void 5548 save_debug_info_for_type (tree t) 5549 { 5550 /*struct saved_debug_info_d *sdi;*/ 5551 5552 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t)); 5553 5554 /* FIXME. Partial implementation for saving debug info removed. */ 5555 } 5556 5557 5558 /* Add type or decl T to one of the list of tree nodes that need their 5559 language data removed. The lists are held inside FLD. */ 5560 5561 static void 5562 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld) 5563 { 5564 if (DECL_P (t)) 5565 { 5566 fld->decls.safe_push (t); 5567 if (debug_info_level > DINFO_LEVEL_TERSE) 5568 save_debug_info_for_decl (t); 5569 } 5570 else if (TYPE_P (t)) 5571 { 5572 fld->types.safe_push (t); 5573 if (debug_info_level > DINFO_LEVEL_TERSE) 5574 save_debug_info_for_type (t); 5575 } 5576 else 5577 gcc_unreachable (); 5578 } 5579 5580 /* Push tree node T into FLD->WORKLIST. */ 5581 5582 static inline void 5583 fld_worklist_push (tree t, struct free_lang_data_d *fld) 5584 { 5585 if (t && !is_lang_specific (t) && !fld->pset.contains (t)) 5586 fld->worklist.safe_push ((t)); 5587 } 5588 5589 5590 /* Operand callback helper for free_lang_data_in_node. *TP is the 5591 subtree operand being considered. */ 5592 5593 static tree 5594 find_decls_types_r (tree *tp, int *ws, void *data) 5595 { 5596 tree t = *tp; 5597 struct free_lang_data_d *fld = (struct free_lang_data_d *) data; 5598 5599 if (TREE_CODE (t) == TREE_LIST) 5600 return NULL_TREE; 5601 5602 /* Language specific nodes will be removed, so there is no need 5603 to gather anything under them. */ 5604 if (is_lang_specific (t)) 5605 { 5606 *ws = 0; 5607 return NULL_TREE; 5608 } 5609 5610 if (DECL_P (t)) 5611 { 5612 /* Note that walk_tree does not traverse every possible field in 5613 decls, so we have to do our own traversals here. */ 5614 add_tree_to_fld_list (t, fld); 5615 5616 fld_worklist_push (DECL_NAME (t), fld); 5617 fld_worklist_push (DECL_CONTEXT (t), fld); 5618 fld_worklist_push (DECL_SIZE (t), fld); 5619 fld_worklist_push (DECL_SIZE_UNIT (t), fld); 5620 5621 /* We are going to remove everything under DECL_INITIAL for 5622 TYPE_DECLs. No point walking them. */ 5623 if (TREE_CODE (t) != TYPE_DECL) 5624 fld_worklist_push (DECL_INITIAL (t), fld); 5625 5626 fld_worklist_push (DECL_ATTRIBUTES (t), fld); 5627 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld); 5628 5629 if (TREE_CODE (t) == FUNCTION_DECL) 5630 { 5631 fld_worklist_push (DECL_ARGUMENTS (t), fld); 5632 fld_worklist_push (DECL_RESULT (t), fld); 5633 } 5634 else if (TREE_CODE (t) == TYPE_DECL) 5635 { 5636 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld); 5637 } 5638 else if (TREE_CODE (t) == FIELD_DECL) 5639 { 5640 fld_worklist_push (DECL_FIELD_OFFSET (t), fld); 5641 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld); 5642 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld); 5643 fld_worklist_push (DECL_FCONTEXT (t), fld); 5644 } 5645 5646 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL) 5647 && DECL_HAS_VALUE_EXPR_P (t)) 5648 fld_worklist_push (DECL_VALUE_EXPR (t), fld); 5649 5650 if (TREE_CODE (t) != FIELD_DECL 5651 && TREE_CODE (t) != TYPE_DECL) 5652 fld_worklist_push (TREE_CHAIN (t), fld); 5653 *ws = 0; 5654 } 5655 else if (TYPE_P (t)) 5656 { 5657 /* Note that walk_tree does not traverse every possible field in 5658 types, so we have to do our own traversals here. */ 5659 add_tree_to_fld_list (t, fld); 5660 5661 if (!RECORD_OR_UNION_TYPE_P (t)) 5662 fld_worklist_push (TYPE_CACHED_VALUES (t), fld); 5663 fld_worklist_push (TYPE_SIZE (t), fld); 5664 fld_worklist_push (TYPE_SIZE_UNIT (t), fld); 5665 fld_worklist_push (TYPE_ATTRIBUTES (t), fld); 5666 fld_worklist_push (TYPE_POINTER_TO (t), fld); 5667 fld_worklist_push (TYPE_REFERENCE_TO (t), fld); 5668 fld_worklist_push (TYPE_NAME (t), fld); 5669 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream 5670 them and thus do not and want not to reach unused pointer types 5671 this way. */ 5672 if (!POINTER_TYPE_P (t)) 5673 fld_worklist_push (TYPE_MINVAL (t), fld); 5674 if (!RECORD_OR_UNION_TYPE_P (t)) 5675 fld_worklist_push (TYPE_MAXVAL (t), fld); 5676 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld); 5677 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus 5678 do not and want not to reach unused variants this way. */ 5679 if (TYPE_CONTEXT (t)) 5680 { 5681 tree ctx = TYPE_CONTEXT (t); 5682 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one. 5683 So push that instead. */ 5684 while (ctx && TREE_CODE (ctx) == BLOCK) 5685 ctx = BLOCK_SUPERCONTEXT (ctx); 5686 fld_worklist_push (ctx, fld); 5687 } 5688 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not 5689 and want not to reach unused types this way. */ 5690 5691 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t)) 5692 { 5693 unsigned i; 5694 tree tem; 5695 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem) 5696 fld_worklist_push (TREE_TYPE (tem), fld); 5697 tem = BINFO_VIRTUALS (TYPE_BINFO (t)); 5698 if (tem 5699 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */ 5700 && TREE_CODE (tem) == TREE_LIST) 5701 do 5702 { 5703 fld_worklist_push (TREE_VALUE (tem), fld); 5704 tem = TREE_CHAIN (tem); 5705 } 5706 while (tem); 5707 } 5708 if (RECORD_OR_UNION_TYPE_P (t)) 5709 { 5710 tree tem; 5711 /* Push all TYPE_FIELDS - there can be interleaving interesting 5712 and non-interesting things. */ 5713 tem = TYPE_FIELDS (t); 5714 while (tem) 5715 { 5716 if (TREE_CODE (tem) == FIELD_DECL 5717 || (TREE_CODE (tem) == TYPE_DECL 5718 && !DECL_IGNORED_P (tem) 5719 && debug_info_level > DINFO_LEVEL_TERSE 5720 && !is_redundant_typedef (tem))) 5721 fld_worklist_push (tem, fld); 5722 tem = TREE_CHAIN (tem); 5723 } 5724 } 5725 5726 fld_worklist_push (TYPE_STUB_DECL (t), fld); 5727 *ws = 0; 5728 } 5729 else if (TREE_CODE (t) == BLOCK) 5730 { 5731 tree tem; 5732 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem)) 5733 fld_worklist_push (tem, fld); 5734 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem)) 5735 fld_worklist_push (tem, fld); 5736 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld); 5737 } 5738 5739 if (TREE_CODE (t) != IDENTIFIER_NODE 5740 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED)) 5741 fld_worklist_push (TREE_TYPE (t), fld); 5742 5743 return NULL_TREE; 5744 } 5745 5746 5747 /* Find decls and types in T. */ 5748 5749 static void 5750 find_decls_types (tree t, struct free_lang_data_d *fld) 5751 { 5752 while (1) 5753 { 5754 if (!fld->pset.contains (t)) 5755 walk_tree (&t, find_decls_types_r, fld, &fld->pset); 5756 if (fld->worklist.is_empty ()) 5757 break; 5758 t = fld->worklist.pop (); 5759 } 5760 } 5761 5762 /* Translate all the types in LIST with the corresponding runtime 5763 types. */ 5764 5765 static tree 5766 get_eh_types_for_runtime (tree list) 5767 { 5768 tree head, prev; 5769 5770 if (list == NULL_TREE) 5771 return NULL_TREE; 5772 5773 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list))); 5774 prev = head; 5775 list = TREE_CHAIN (list); 5776 while (list) 5777 { 5778 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list))); 5779 TREE_CHAIN (prev) = n; 5780 prev = TREE_CHAIN (prev); 5781 list = TREE_CHAIN (list); 5782 } 5783 5784 return head; 5785 } 5786 5787 5788 /* Find decls and types referenced in EH region R and store them in 5789 FLD->DECLS and FLD->TYPES. */ 5790 5791 static void 5792 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld) 5793 { 5794 switch (r->type) 5795 { 5796 case ERT_CLEANUP: 5797 break; 5798 5799 case ERT_TRY: 5800 { 5801 eh_catch c; 5802 5803 /* The types referenced in each catch must first be changed to the 5804 EH types used at runtime. This removes references to FE types 5805 in the region. */ 5806 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch) 5807 { 5808 c->type_list = get_eh_types_for_runtime (c->type_list); 5809 walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset); 5810 } 5811 } 5812 break; 5813 5814 case ERT_ALLOWED_EXCEPTIONS: 5815 r->u.allowed.type_list 5816 = get_eh_types_for_runtime (r->u.allowed.type_list); 5817 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset); 5818 break; 5819 5820 case ERT_MUST_NOT_THROW: 5821 walk_tree (&r->u.must_not_throw.failure_decl, 5822 find_decls_types_r, fld, &fld->pset); 5823 break; 5824 } 5825 } 5826 5827 5828 /* Find decls and types referenced in cgraph node N and store them in 5829 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will 5830 look for *every* kind of DECL and TYPE node reachable from N, 5831 including those embedded inside types and decls (i.e,, TYPE_DECLs, 5832 NAMESPACE_DECLs, etc). */ 5833 5834 static void 5835 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld) 5836 { 5837 basic_block bb; 5838 struct function *fn; 5839 unsigned ix; 5840 tree t; 5841 5842 find_decls_types (n->decl, fld); 5843 5844 if (!gimple_has_body_p (n->decl)) 5845 return; 5846 5847 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL); 5848 5849 fn = DECL_STRUCT_FUNCTION (n->decl); 5850 5851 /* Traverse locals. */ 5852 FOR_EACH_LOCAL_DECL (fn, ix, t) 5853 find_decls_types (t, fld); 5854 5855 /* Traverse EH regions in FN. */ 5856 { 5857 eh_region r; 5858 FOR_ALL_EH_REGION_FN (r, fn) 5859 find_decls_types_in_eh_region (r, fld); 5860 } 5861 5862 /* Traverse every statement in FN. */ 5863 FOR_EACH_BB_FN (bb, fn) 5864 { 5865 gphi_iterator psi; 5866 gimple_stmt_iterator si; 5867 unsigned i; 5868 5869 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi)) 5870 { 5871 gphi *phi = psi.phi (); 5872 5873 for (i = 0; i < gimple_phi_num_args (phi); i++) 5874 { 5875 tree *arg_p = gimple_phi_arg_def_ptr (phi, i); 5876 find_decls_types (*arg_p, fld); 5877 } 5878 } 5879 5880 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si)) 5881 { 5882 gimple *stmt = gsi_stmt (si); 5883 5884 if (is_gimple_call (stmt)) 5885 find_decls_types (gimple_call_fntype (stmt), fld); 5886 5887 for (i = 0; i < gimple_num_ops (stmt); i++) 5888 { 5889 tree arg = gimple_op (stmt, i); 5890 find_decls_types (arg, fld); 5891 } 5892 } 5893 } 5894 } 5895 5896 5897 /* Find decls and types referenced in varpool node N and store them in 5898 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will 5899 look for *every* kind of DECL and TYPE node reachable from N, 5900 including those embedded inside types and decls (i.e,, TYPE_DECLs, 5901 NAMESPACE_DECLs, etc). */ 5902 5903 static void 5904 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld) 5905 { 5906 find_decls_types (v->decl, fld); 5907 } 5908 5909 /* If T needs an assembler name, have one created for it. */ 5910 5911 void 5912 assign_assembler_name_if_needed (tree t) 5913 { 5914 if (need_assembler_name_p (t)) 5915 { 5916 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit 5917 diagnostics that use input_location to show locus 5918 information. The problem here is that, at this point, 5919 input_location is generally anchored to the end of the file 5920 (since the parser is long gone), so we don't have a good 5921 position to pin it to. 5922 5923 To alleviate this problem, this uses the location of T's 5924 declaration. Examples of this are 5925 testsuite/g++.dg/template/cond2.C and 5926 testsuite/g++.dg/template/pr35240.C. */ 5927 location_t saved_location = input_location; 5928 input_location = DECL_SOURCE_LOCATION (t); 5929 5930 decl_assembler_name (t); 5931 5932 input_location = saved_location; 5933 } 5934 } 5935 5936 5937 /* Free language specific information for every operand and expression 5938 in every node of the call graph. This process operates in three stages: 5939 5940 1- Every callgraph node and varpool node is traversed looking for 5941 decls and types embedded in them. This is a more exhaustive 5942 search than that done by find_referenced_vars, because it will 5943 also collect individual fields, decls embedded in types, etc. 5944 5945 2- All the decls found are sent to free_lang_data_in_decl. 5946 5947 3- All the types found are sent to free_lang_data_in_type. 5948 5949 The ordering between decls and types is important because 5950 free_lang_data_in_decl sets assembler names, which includes 5951 mangling. So types cannot be freed up until assembler names have 5952 been set up. */ 5953 5954 static void 5955 free_lang_data_in_cgraph (void) 5956 { 5957 struct cgraph_node *n; 5958 varpool_node *v; 5959 struct free_lang_data_d fld; 5960 tree t; 5961 unsigned i; 5962 alias_pair *p; 5963 5964 /* Find decls and types in the body of every function in the callgraph. */ 5965 FOR_EACH_FUNCTION (n) 5966 find_decls_types_in_node (n, &fld); 5967 5968 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p) 5969 find_decls_types (p->decl, &fld); 5970 5971 /* Find decls and types in every varpool symbol. */ 5972 FOR_EACH_VARIABLE (v) 5973 find_decls_types_in_var (v, &fld); 5974 5975 /* Set the assembler name on every decl found. We need to do this 5976 now because free_lang_data_in_decl will invalidate data needed 5977 for mangling. This breaks mangling on interdependent decls. */ 5978 FOR_EACH_VEC_ELT (fld.decls, i, t) 5979 assign_assembler_name_if_needed (t); 5980 5981 /* Traverse every decl found freeing its language data. */ 5982 FOR_EACH_VEC_ELT (fld.decls, i, t) 5983 free_lang_data_in_decl (t); 5984 5985 /* Traverse every type found freeing its language data. */ 5986 FOR_EACH_VEC_ELT (fld.types, i, t) 5987 free_lang_data_in_type (t); 5988 if (flag_checking) 5989 { 5990 FOR_EACH_VEC_ELT (fld.types, i, t) 5991 verify_type (t); 5992 } 5993 } 5994 5995 5996 /* Free resources that are used by FE but are not needed once they are done. */ 5997 5998 static unsigned 5999 free_lang_data (void) 6000 { 6001 unsigned i; 6002 6003 /* If we are the LTO frontend we have freed lang-specific data already. */ 6004 if (in_lto_p 6005 || (!flag_generate_lto && !flag_generate_offload)) 6006 return 0; 6007 6008 /* Allocate and assign alias sets to the standard integer types 6009 while the slots are still in the way the frontends generated them. */ 6010 for (i = 0; i < itk_none; ++i) 6011 if (integer_types[i]) 6012 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]); 6013 6014 /* Traverse the IL resetting language specific information for 6015 operands, expressions, etc. */ 6016 free_lang_data_in_cgraph (); 6017 6018 /* Create gimple variants for common types. */ 6019 fileptr_type_node = ptr_type_node; 6020 const_tm_ptr_type_node = const_ptr_type_node; 6021 6022 /* Reset some langhooks. Do not reset types_compatible_p, it may 6023 still be used indirectly via the get_alias_set langhook. */ 6024 lang_hooks.dwarf_name = lhd_dwarf_name; 6025 lang_hooks.decl_printable_name = gimple_decl_printable_name; 6026 lang_hooks.gimplify_expr = lhd_gimplify_expr; 6027 6028 /* We do not want the default decl_assembler_name implementation, 6029 rather if we have fixed everything we want a wrapper around it 6030 asserting that all non-local symbols already got their assembler 6031 name and only produce assembler names for local symbols. Or rather 6032 make sure we never call decl_assembler_name on local symbols and 6033 devise a separate, middle-end private scheme for it. */ 6034 6035 /* Reset diagnostic machinery. */ 6036 tree_diagnostics_defaults (global_dc); 6037 6038 return 0; 6039 } 6040 6041 6042 namespace { 6043 6044 const pass_data pass_data_ipa_free_lang_data = 6045 { 6046 SIMPLE_IPA_PASS, /* type */ 6047 "*free_lang_data", /* name */ 6048 OPTGROUP_NONE, /* optinfo_flags */ 6049 TV_IPA_FREE_LANG_DATA, /* tv_id */ 6050 0, /* properties_required */ 6051 0, /* properties_provided */ 6052 0, /* properties_destroyed */ 6053 0, /* todo_flags_start */ 6054 0, /* todo_flags_finish */ 6055 }; 6056 6057 class pass_ipa_free_lang_data : public simple_ipa_opt_pass 6058 { 6059 public: 6060 pass_ipa_free_lang_data (gcc::context *ctxt) 6061 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt) 6062 {} 6063 6064 /* opt_pass methods: */ 6065 virtual unsigned int execute (function *) { return free_lang_data (); } 6066 6067 }; // class pass_ipa_free_lang_data 6068 6069 } // anon namespace 6070 6071 simple_ipa_opt_pass * 6072 make_pass_ipa_free_lang_data (gcc::context *ctxt) 6073 { 6074 return new pass_ipa_free_lang_data (ctxt); 6075 } 6076 6077 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of 6078 ATTR_NAME. Also used internally by remove_attribute(). */ 6079 bool 6080 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident) 6081 { 6082 size_t ident_len = IDENTIFIER_LENGTH (ident); 6083 6084 if (ident_len == attr_len) 6085 { 6086 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0) 6087 return true; 6088 } 6089 else if (ident_len == attr_len + 4) 6090 { 6091 /* There is the possibility that ATTR is 'text' and IDENT is 6092 '__text__'. */ 6093 const char *p = IDENTIFIER_POINTER (ident); 6094 if (p[0] == '_' && p[1] == '_' 6095 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_' 6096 && strncmp (attr_name, p + 2, attr_len) == 0) 6097 return true; 6098 } 6099 6100 return false; 6101 } 6102 6103 /* The backbone of lookup_attribute(). ATTR_LEN is the string length 6104 of ATTR_NAME, and LIST is not NULL_TREE. */ 6105 tree 6106 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list) 6107 { 6108 while (list) 6109 { 6110 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list)); 6111 6112 if (ident_len == attr_len) 6113 { 6114 if (!strcmp (attr_name, 6115 IDENTIFIER_POINTER (get_attribute_name (list)))) 6116 break; 6117 } 6118 /* TODO: If we made sure that attributes were stored in the 6119 canonical form without '__...__' (ie, as in 'text' as opposed 6120 to '__text__') then we could avoid the following case. */ 6121 else if (ident_len == attr_len + 4) 6122 { 6123 const char *p = IDENTIFIER_POINTER (get_attribute_name (list)); 6124 if (p[0] == '_' && p[1] == '_' 6125 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_' 6126 && strncmp (attr_name, p + 2, attr_len) == 0) 6127 break; 6128 } 6129 list = TREE_CHAIN (list); 6130 } 6131 6132 return list; 6133 } 6134 6135 /* Given an attribute name ATTR_NAME and a list of attributes LIST, 6136 return a pointer to the attribute's list first element if the attribute 6137 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not 6138 '__text__'). */ 6139 6140 tree 6141 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len, 6142 tree list) 6143 { 6144 while (list) 6145 { 6146 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list)); 6147 6148 if (attr_len > ident_len) 6149 { 6150 list = TREE_CHAIN (list); 6151 continue; 6152 } 6153 6154 const char *p = IDENTIFIER_POINTER (get_attribute_name (list)); 6155 6156 if (strncmp (attr_name, p, attr_len) == 0) 6157 break; 6158 6159 /* TODO: If we made sure that attributes were stored in the 6160 canonical form without '__...__' (ie, as in 'text' as opposed 6161 to '__text__') then we could avoid the following case. */ 6162 if (p[0] == '_' && p[1] == '_' && 6163 strncmp (attr_name, p + 2, attr_len) == 0) 6164 break; 6165 6166 list = TREE_CHAIN (list); 6167 } 6168 6169 return list; 6170 } 6171 6172 6173 /* A variant of lookup_attribute() that can be used with an identifier 6174 as the first argument, and where the identifier can be either 6175 'text' or '__text__'. 6176 6177 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST, 6178 return a pointer to the attribute's list element if the attribute 6179 is part of the list, or NULL_TREE if not found. If the attribute 6180 appears more than once, this only returns the first occurrence; the 6181 TREE_CHAIN of the return value should be passed back in if further 6182 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but 6183 can be in the form 'text' or '__text__'. */ 6184 static tree 6185 lookup_ident_attribute (tree attr_identifier, tree list) 6186 { 6187 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE); 6188 6189 while (list) 6190 { 6191 gcc_checking_assert (TREE_CODE (get_attribute_name (list)) 6192 == IDENTIFIER_NODE); 6193 6194 if (cmp_attrib_identifiers (attr_identifier, 6195 get_attribute_name (list))) 6196 /* Found it. */ 6197 break; 6198 list = TREE_CHAIN (list); 6199 } 6200 6201 return list; 6202 } 6203 6204 /* Remove any instances of attribute ATTR_NAME in LIST and return the 6205 modified list. */ 6206 6207 tree 6208 remove_attribute (const char *attr_name, tree list) 6209 { 6210 tree *p; 6211 size_t attr_len = strlen (attr_name); 6212 6213 gcc_checking_assert (attr_name[0] != '_'); 6214 6215 for (p = &list; *p; ) 6216 { 6217 tree l = *p; 6218 /* TODO: If we were storing attributes in normalized form, here 6219 we could use a simple strcmp(). */ 6220 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l))) 6221 *p = TREE_CHAIN (l); 6222 else 6223 p = &TREE_CHAIN (l); 6224 } 6225 6226 return list; 6227 } 6228 6229 /* Return an attribute list that is the union of a1 and a2. */ 6230 6231 tree 6232 merge_attributes (tree a1, tree a2) 6233 { 6234 tree attributes; 6235 6236 /* Either one unset? Take the set one. */ 6237 6238 if ((attributes = a1) == 0) 6239 attributes = a2; 6240 6241 /* One that completely contains the other? Take it. */ 6242 6243 else if (a2 != 0 && ! attribute_list_contained (a1, a2)) 6244 { 6245 if (attribute_list_contained (a2, a1)) 6246 attributes = a2; 6247 else 6248 { 6249 /* Pick the longest list, and hang on the other list. */ 6250 6251 if (list_length (a1) < list_length (a2)) 6252 attributes = a2, a2 = a1; 6253 6254 for (; a2 != 0; a2 = TREE_CHAIN (a2)) 6255 { 6256 tree a; 6257 for (a = lookup_ident_attribute (get_attribute_name (a2), 6258 attributes); 6259 a != NULL_TREE && !attribute_value_equal (a, a2); 6260 a = lookup_ident_attribute (get_attribute_name (a2), 6261 TREE_CHAIN (a))) 6262 ; 6263 if (a == NULL_TREE) 6264 { 6265 a1 = copy_node (a2); 6266 TREE_CHAIN (a1) = attributes; 6267 attributes = a1; 6268 } 6269 } 6270 } 6271 } 6272 return attributes; 6273 } 6274 6275 /* Given types T1 and T2, merge their attributes and return 6276 the result. */ 6277 6278 tree 6279 merge_type_attributes (tree t1, tree t2) 6280 { 6281 return merge_attributes (TYPE_ATTRIBUTES (t1), 6282 TYPE_ATTRIBUTES (t2)); 6283 } 6284 6285 /* Given decls OLDDECL and NEWDECL, merge their attributes and return 6286 the result. */ 6287 6288 tree 6289 merge_decl_attributes (tree olddecl, tree newdecl) 6290 { 6291 return merge_attributes (DECL_ATTRIBUTES (olddecl), 6292 DECL_ATTRIBUTES (newdecl)); 6293 } 6294 6295 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES 6296 6297 /* Specialization of merge_decl_attributes for various Windows targets. 6298 6299 This handles the following situation: 6300 6301 __declspec (dllimport) int foo; 6302 int foo; 6303 6304 The second instance of `foo' nullifies the dllimport. */ 6305 6306 tree 6307 merge_dllimport_decl_attributes (tree old, tree new_tree) 6308 { 6309 tree a; 6310 int delete_dllimport_p = 1; 6311 6312 /* What we need to do here is remove from `old' dllimport if it doesn't 6313 appear in `new'. dllimport behaves like extern: if a declaration is 6314 marked dllimport and a definition appears later, then the object 6315 is not dllimport'd. We also remove a `new' dllimport if the old list 6316 contains dllexport: dllexport always overrides dllimport, regardless 6317 of the order of declaration. */ 6318 if (!VAR_OR_FUNCTION_DECL_P (new_tree)) 6319 delete_dllimport_p = 0; 6320 else if (DECL_DLLIMPORT_P (new_tree) 6321 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old))) 6322 { 6323 DECL_DLLIMPORT_P (new_tree) = 0; 6324 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: " 6325 "dllimport ignored", new_tree); 6326 } 6327 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree)) 6328 { 6329 /* Warn about overriding a symbol that has already been used, e.g.: 6330 extern int __attribute__ ((dllimport)) foo; 6331 int* bar () {return &foo;} 6332 int foo; 6333 */ 6334 if (TREE_USED (old)) 6335 { 6336 warning (0, "%q+D redeclared without dllimport attribute " 6337 "after being referenced with dll linkage", new_tree); 6338 /* If we have used a variable's address with dllimport linkage, 6339 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the 6340 decl may already have had TREE_CONSTANT computed. 6341 We still remove the attribute so that assembler code refers 6342 to '&foo rather than '_imp__foo'. */ 6343 if (VAR_P (old) && TREE_ADDRESSABLE (old)) 6344 DECL_DLLIMPORT_P (new_tree) = 1; 6345 } 6346 6347 /* Let an inline definition silently override the external reference, 6348 but otherwise warn about attribute inconsistency. */ 6349 else if (VAR_P (new_tree) || !DECL_DECLARED_INLINE_P (new_tree)) 6350 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: " 6351 "previous dllimport ignored", new_tree); 6352 } 6353 else 6354 delete_dllimport_p = 0; 6355 6356 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree)); 6357 6358 if (delete_dllimport_p) 6359 a = remove_attribute ("dllimport", a); 6360 6361 return a; 6362 } 6363 6364 /* Handle a "dllimport" or "dllexport" attribute; arguments as in 6365 struct attribute_spec.handler. */ 6366 6367 tree 6368 handle_dll_attribute (tree * pnode, tree name, tree args, int flags, 6369 bool *no_add_attrs) 6370 { 6371 tree node = *pnode; 6372 bool is_dllimport; 6373 6374 /* These attributes may apply to structure and union types being created, 6375 but otherwise should pass to the declaration involved. */ 6376 if (!DECL_P (node)) 6377 { 6378 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT 6379 | (int) ATTR_FLAG_ARRAY_NEXT)) 6380 { 6381 *no_add_attrs = true; 6382 return tree_cons (name, args, NULL_TREE); 6383 } 6384 if (TREE_CODE (node) == RECORD_TYPE 6385 || TREE_CODE (node) == UNION_TYPE) 6386 { 6387 node = TYPE_NAME (node); 6388 if (!node) 6389 return NULL_TREE; 6390 } 6391 else 6392 { 6393 warning (OPT_Wattributes, "%qE attribute ignored", 6394 name); 6395 *no_add_attrs = true; 6396 return NULL_TREE; 6397 } 6398 } 6399 6400 if (!VAR_OR_FUNCTION_DECL_P (node) && TREE_CODE (node) != TYPE_DECL) 6401 { 6402 *no_add_attrs = true; 6403 warning (OPT_Wattributes, "%qE attribute ignored", 6404 name); 6405 return NULL_TREE; 6406 } 6407 6408 if (TREE_CODE (node) == TYPE_DECL 6409 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE 6410 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE) 6411 { 6412 *no_add_attrs = true; 6413 warning (OPT_Wattributes, "%qE attribute ignored", 6414 name); 6415 return NULL_TREE; 6416 } 6417 6418 is_dllimport = is_attribute_p ("dllimport", name); 6419 6420 /* Report error on dllimport ambiguities seen now before they cause 6421 any damage. */ 6422 if (is_dllimport) 6423 { 6424 /* Honor any target-specific overrides. */ 6425 if (!targetm.valid_dllimport_attribute_p (node)) 6426 *no_add_attrs = true; 6427 6428 else if (TREE_CODE (node) == FUNCTION_DECL 6429 && DECL_DECLARED_INLINE_P (node)) 6430 { 6431 warning (OPT_Wattributes, "inline function %q+D declared as " 6432 " dllimport: attribute ignored", node); 6433 *no_add_attrs = true; 6434 } 6435 /* Like MS, treat definition of dllimported variables and 6436 non-inlined functions on declaration as syntax errors. */ 6437 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node)) 6438 { 6439 error ("function %q+D definition is marked dllimport", node); 6440 *no_add_attrs = true; 6441 } 6442 6443 else if (VAR_P (node)) 6444 { 6445 if (DECL_INITIAL (node)) 6446 { 6447 error ("variable %q+D definition is marked dllimport", 6448 node); 6449 *no_add_attrs = true; 6450 } 6451 6452 /* `extern' needn't be specified with dllimport. 6453 Specify `extern' now and hope for the best. Sigh. */ 6454 DECL_EXTERNAL (node) = 1; 6455 /* Also, implicitly give dllimport'd variables declared within 6456 a function global scope, unless declared static. */ 6457 if (current_function_decl != NULL_TREE && !TREE_STATIC (node)) 6458 TREE_PUBLIC (node) = 1; 6459 /* Clear TREE_STATIC because DECL_EXTERNAL is set, unless 6460 it is a C++ static data member. */ 6461 if (DECL_CONTEXT (node) == NULL_TREE 6462 || !RECORD_OR_UNION_TYPE_P (DECL_CONTEXT (node))) 6463 TREE_STATIC (node) = 0; 6464 } 6465 6466 if (*no_add_attrs == false) 6467 DECL_DLLIMPORT_P (node) = 1; 6468 } 6469 else if (TREE_CODE (node) == FUNCTION_DECL 6470 && DECL_DECLARED_INLINE_P (node) 6471 && flag_keep_inline_dllexport) 6472 /* An exported function, even if inline, must be emitted. */ 6473 DECL_EXTERNAL (node) = 0; 6474 6475 /* Report error if symbol is not accessible at global scope. */ 6476 if (!TREE_PUBLIC (node) && VAR_OR_FUNCTION_DECL_P (node)) 6477 { 6478 error ("external linkage required for symbol %q+D because of " 6479 "%qE attribute", node, name); 6480 *no_add_attrs = true; 6481 } 6482 6483 /* A dllexport'd entity must have default visibility so that other 6484 program units (shared libraries or the main executable) can see 6485 it. A dllimport'd entity must have default visibility so that 6486 the linker knows that undefined references within this program 6487 unit can be resolved by the dynamic linker. */ 6488 if (!*no_add_attrs) 6489 { 6490 if (DECL_VISIBILITY_SPECIFIED (node) 6491 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT) 6492 error ("%qE implies default visibility, but %qD has already " 6493 "been declared with a different visibility", 6494 name, node); 6495 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT; 6496 DECL_VISIBILITY_SPECIFIED (node) = 1; 6497 } 6498 6499 return NULL_TREE; 6500 } 6501 6502 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */ 6503 6504 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask 6505 of the various TYPE_QUAL values. */ 6506 6507 static void 6508 set_type_quals (tree type, int type_quals) 6509 { 6510 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0; 6511 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0; 6512 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0; 6513 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0; 6514 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals); 6515 } 6516 6517 /* Returns true iff CAND and BASE have equivalent language-specific 6518 qualifiers. */ 6519 6520 bool 6521 check_lang_type (const_tree cand, const_tree base) 6522 { 6523 if (lang_hooks.types.type_hash_eq == NULL) 6524 return true; 6525 /* type_hash_eq currently only applies to these types. */ 6526 if (TREE_CODE (cand) != FUNCTION_TYPE 6527 && TREE_CODE (cand) != METHOD_TYPE) 6528 return true; 6529 return lang_hooks.types.type_hash_eq (cand, base); 6530 } 6531 6532 /* Returns true iff unqualified CAND and BASE are equivalent. */ 6533 6534 bool 6535 check_base_type (const_tree cand, const_tree base) 6536 { 6537 return (TYPE_NAME (cand) == TYPE_NAME (base) 6538 /* Apparently this is needed for Objective-C. */ 6539 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base) 6540 /* Check alignment. */ 6541 && TYPE_ALIGN (cand) == TYPE_ALIGN (base) 6542 && attribute_list_equal (TYPE_ATTRIBUTES (cand), 6543 TYPE_ATTRIBUTES (base))); 6544 } 6545 6546 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */ 6547 6548 bool 6549 check_qualified_type (const_tree cand, const_tree base, int type_quals) 6550 { 6551 return (TYPE_QUALS (cand) == type_quals 6552 && check_base_type (cand, base) 6553 && check_lang_type (cand, base)); 6554 } 6555 6556 /* Returns true iff CAND is equivalent to BASE with ALIGN. */ 6557 6558 static bool 6559 check_aligned_type (const_tree cand, const_tree base, unsigned int align) 6560 { 6561 return (TYPE_QUALS (cand) == TYPE_QUALS (base) 6562 && TYPE_NAME (cand) == TYPE_NAME (base) 6563 /* Apparently this is needed for Objective-C. */ 6564 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base) 6565 /* Check alignment. */ 6566 && TYPE_ALIGN (cand) == align 6567 && attribute_list_equal (TYPE_ATTRIBUTES (cand), 6568 TYPE_ATTRIBUTES (base)) 6569 && check_lang_type (cand, base)); 6570 } 6571 6572 /* This function checks to see if TYPE matches the size one of the built-in 6573 atomic types, and returns that core atomic type. */ 6574 6575 static tree 6576 find_atomic_core_type (tree type) 6577 { 6578 tree base_atomic_type; 6579 6580 /* Only handle complete types. */ 6581 if (TYPE_SIZE (type) == NULL_TREE) 6582 return NULL_TREE; 6583 6584 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type)); 6585 switch (type_size) 6586 { 6587 case 8: 6588 base_atomic_type = atomicQI_type_node; 6589 break; 6590 6591 case 16: 6592 base_atomic_type = atomicHI_type_node; 6593 break; 6594 6595 case 32: 6596 base_atomic_type = atomicSI_type_node; 6597 break; 6598 6599 case 64: 6600 base_atomic_type = atomicDI_type_node; 6601 break; 6602 6603 case 128: 6604 base_atomic_type = atomicTI_type_node; 6605 break; 6606 6607 default: 6608 base_atomic_type = NULL_TREE; 6609 } 6610 6611 return base_atomic_type; 6612 } 6613 6614 /* Return a version of the TYPE, qualified as indicated by the 6615 TYPE_QUALS, if one exists. If no qualified version exists yet, 6616 return NULL_TREE. */ 6617 6618 tree 6619 get_qualified_type (tree type, int type_quals) 6620 { 6621 tree t; 6622 6623 if (TYPE_QUALS (type) == type_quals) 6624 return type; 6625 6626 /* Search the chain of variants to see if there is already one there just 6627 like the one we need to have. If so, use that existing one. We must 6628 preserve the TYPE_NAME, since there is code that depends on this. */ 6629 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) 6630 if (check_qualified_type (t, type, type_quals)) 6631 return t; 6632 6633 return NULL_TREE; 6634 } 6635 6636 /* Like get_qualified_type, but creates the type if it does not 6637 exist. This function never returns NULL_TREE. */ 6638 6639 tree 6640 build_qualified_type (tree type, int type_quals) 6641 { 6642 tree t; 6643 6644 /* See if we already have the appropriate qualified variant. */ 6645 t = get_qualified_type (type, type_quals); 6646 6647 /* If not, build it. */ 6648 if (!t) 6649 { 6650 t = build_variant_type_copy (type); 6651 set_type_quals (t, type_quals); 6652 6653 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC)) 6654 { 6655 /* See if this object can map to a basic atomic type. */ 6656 tree atomic_type = find_atomic_core_type (type); 6657 if (atomic_type) 6658 { 6659 /* Ensure the alignment of this type is compatible with 6660 the required alignment of the atomic type. */ 6661 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t)) 6662 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type)); 6663 } 6664 } 6665 6666 if (TYPE_STRUCTURAL_EQUALITY_P (type)) 6667 /* Propagate structural equality. */ 6668 SET_TYPE_STRUCTURAL_EQUALITY (t); 6669 else if (TYPE_CANONICAL (type) != type) 6670 /* Build the underlying canonical type, since it is different 6671 from TYPE. */ 6672 { 6673 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals); 6674 TYPE_CANONICAL (t) = TYPE_CANONICAL (c); 6675 } 6676 else 6677 /* T is its own canonical type. */ 6678 TYPE_CANONICAL (t) = t; 6679 6680 } 6681 6682 return t; 6683 } 6684 6685 /* Create a variant of type T with alignment ALIGN. */ 6686 6687 tree 6688 build_aligned_type (tree type, unsigned int align) 6689 { 6690 tree t; 6691 6692 if (TYPE_PACKED (type) 6693 || TYPE_ALIGN (type) == align) 6694 return type; 6695 6696 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) 6697 if (check_aligned_type (t, type, align)) 6698 return t; 6699 6700 t = build_variant_type_copy (type); 6701 SET_TYPE_ALIGN (t, align); 6702 TYPE_USER_ALIGN (t) = 1; 6703 6704 return t; 6705 } 6706 6707 /* Create a new distinct copy of TYPE. The new type is made its own 6708 MAIN_VARIANT. If TYPE requires structural equality checks, the 6709 resulting type requires structural equality checks; otherwise, its 6710 TYPE_CANONICAL points to itself. */ 6711 6712 tree 6713 build_distinct_type_copy (tree type) 6714 { 6715 tree t = copy_node (type); 6716 6717 TYPE_POINTER_TO (t) = 0; 6718 TYPE_REFERENCE_TO (t) = 0; 6719 6720 /* Set the canonical type either to a new equivalence class, or 6721 propagate the need for structural equality checks. */ 6722 if (TYPE_STRUCTURAL_EQUALITY_P (type)) 6723 SET_TYPE_STRUCTURAL_EQUALITY (t); 6724 else 6725 TYPE_CANONICAL (t) = t; 6726 6727 /* Make it its own variant. */ 6728 TYPE_MAIN_VARIANT (t) = t; 6729 TYPE_NEXT_VARIANT (t) = 0; 6730 6731 /* We do not record methods in type copies nor variants 6732 so we do not need to keep them up to date when new method 6733 is inserted. */ 6734 if (RECORD_OR_UNION_TYPE_P (t)) 6735 TYPE_METHODS (t) = NULL_TREE; 6736 6737 /* Note that it is now possible for TYPE_MIN_VALUE to be a value 6738 whose TREE_TYPE is not t. This can also happen in the Ada 6739 frontend when using subtypes. */ 6740 6741 return t; 6742 } 6743 6744 /* Create a new variant of TYPE, equivalent but distinct. This is so 6745 the caller can modify it. TYPE_CANONICAL for the return type will 6746 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types 6747 are considered equal by the language itself (or that both types 6748 require structural equality checks). */ 6749 6750 tree 6751 build_variant_type_copy (tree type) 6752 { 6753 tree t, m = TYPE_MAIN_VARIANT (type); 6754 6755 t = build_distinct_type_copy (type); 6756 6757 /* Since we're building a variant, assume that it is a non-semantic 6758 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */ 6759 TYPE_CANONICAL (t) = TYPE_CANONICAL (type); 6760 /* Type variants have no alias set defined. */ 6761 TYPE_ALIAS_SET (t) = -1; 6762 6763 /* Add the new type to the chain of variants of TYPE. */ 6764 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m); 6765 TYPE_NEXT_VARIANT (m) = t; 6766 TYPE_MAIN_VARIANT (t) = m; 6767 6768 return t; 6769 } 6770 6771 /* Return true if the from tree in both tree maps are equal. */ 6772 6773 int 6774 tree_map_base_eq (const void *va, const void *vb) 6775 { 6776 const struct tree_map_base *const a = (const struct tree_map_base *) va, 6777 *const b = (const struct tree_map_base *) vb; 6778 return (a->from == b->from); 6779 } 6780 6781 /* Hash a from tree in a tree_base_map. */ 6782 6783 unsigned int 6784 tree_map_base_hash (const void *item) 6785 { 6786 return htab_hash_pointer (((const struct tree_map_base *)item)->from); 6787 } 6788 6789 /* Return true if this tree map structure is marked for garbage collection 6790 purposes. We simply return true if the from tree is marked, so that this 6791 structure goes away when the from tree goes away. */ 6792 6793 int 6794 tree_map_base_marked_p (const void *p) 6795 { 6796 return ggc_marked_p (((const struct tree_map_base *) p)->from); 6797 } 6798 6799 /* Hash a from tree in a tree_map. */ 6800 6801 unsigned int 6802 tree_map_hash (const void *item) 6803 { 6804 return (((const struct tree_map *) item)->hash); 6805 } 6806 6807 /* Hash a from tree in a tree_decl_map. */ 6808 6809 unsigned int 6810 tree_decl_map_hash (const void *item) 6811 { 6812 return DECL_UID (((const struct tree_decl_map *) item)->base.from); 6813 } 6814 6815 /* Return the initialization priority for DECL. */ 6816 6817 priority_type 6818 decl_init_priority_lookup (tree decl) 6819 { 6820 symtab_node *snode = symtab_node::get (decl); 6821 6822 if (!snode) 6823 return DEFAULT_INIT_PRIORITY; 6824 return 6825 snode->get_init_priority (); 6826 } 6827 6828 /* Return the finalization priority for DECL. */ 6829 6830 priority_type 6831 decl_fini_priority_lookup (tree decl) 6832 { 6833 cgraph_node *node = cgraph_node::get (decl); 6834 6835 if (!node) 6836 return DEFAULT_INIT_PRIORITY; 6837 return 6838 node->get_fini_priority (); 6839 } 6840 6841 /* Set the initialization priority for DECL to PRIORITY. */ 6842 6843 void 6844 decl_init_priority_insert (tree decl, priority_type priority) 6845 { 6846 struct symtab_node *snode; 6847 6848 if (priority == DEFAULT_INIT_PRIORITY) 6849 { 6850 snode = symtab_node::get (decl); 6851 if (!snode) 6852 return; 6853 } 6854 else if (VAR_P (decl)) 6855 snode = varpool_node::get_create (decl); 6856 else 6857 snode = cgraph_node::get_create (decl); 6858 snode->set_init_priority (priority); 6859 } 6860 6861 /* Set the finalization priority for DECL to PRIORITY. */ 6862 6863 void 6864 decl_fini_priority_insert (tree decl, priority_type priority) 6865 { 6866 struct cgraph_node *node; 6867 6868 if (priority == DEFAULT_INIT_PRIORITY) 6869 { 6870 node = cgraph_node::get (decl); 6871 if (!node) 6872 return; 6873 } 6874 else 6875 node = cgraph_node::get_create (decl); 6876 node->set_fini_priority (priority); 6877 } 6878 6879 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */ 6880 6881 static void 6882 print_debug_expr_statistics (void) 6883 { 6884 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n", 6885 (long) debug_expr_for_decl->size (), 6886 (long) debug_expr_for_decl->elements (), 6887 debug_expr_for_decl->collisions ()); 6888 } 6889 6890 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */ 6891 6892 static void 6893 print_value_expr_statistics (void) 6894 { 6895 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n", 6896 (long) value_expr_for_decl->size (), 6897 (long) value_expr_for_decl->elements (), 6898 value_expr_for_decl->collisions ()); 6899 } 6900 6901 /* Lookup a debug expression for FROM, and return it if we find one. */ 6902 6903 tree 6904 decl_debug_expr_lookup (tree from) 6905 { 6906 struct tree_decl_map *h, in; 6907 in.base.from = from; 6908 6909 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from)); 6910 if (h) 6911 return h->to; 6912 return NULL_TREE; 6913 } 6914 6915 /* Insert a mapping FROM->TO in the debug expression hashtable. */ 6916 6917 void 6918 decl_debug_expr_insert (tree from, tree to) 6919 { 6920 struct tree_decl_map *h; 6921 6922 h = ggc_alloc<tree_decl_map> (); 6923 h->base.from = from; 6924 h->to = to; 6925 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h; 6926 } 6927 6928 /* Lookup a value expression for FROM, and return it if we find one. */ 6929 6930 tree 6931 decl_value_expr_lookup (tree from) 6932 { 6933 struct tree_decl_map *h, in; 6934 in.base.from = from; 6935 6936 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from)); 6937 if (h) 6938 return h->to; 6939 return NULL_TREE; 6940 } 6941 6942 /* Insert a mapping FROM->TO in the value expression hashtable. */ 6943 6944 void 6945 decl_value_expr_insert (tree from, tree to) 6946 { 6947 struct tree_decl_map *h; 6948 6949 h = ggc_alloc<tree_decl_map> (); 6950 h->base.from = from; 6951 h->to = to; 6952 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h; 6953 } 6954 6955 /* Lookup a vector of debug arguments for FROM, and return it if we 6956 find one. */ 6957 6958 vec<tree, va_gc> ** 6959 decl_debug_args_lookup (tree from) 6960 { 6961 struct tree_vec_map *h, in; 6962 6963 if (!DECL_HAS_DEBUG_ARGS_P (from)) 6964 return NULL; 6965 gcc_checking_assert (debug_args_for_decl != NULL); 6966 in.base.from = from; 6967 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from)); 6968 if (h) 6969 return &h->to; 6970 return NULL; 6971 } 6972 6973 /* Insert a mapping FROM->empty vector of debug arguments in the value 6974 expression hashtable. */ 6975 6976 vec<tree, va_gc> ** 6977 decl_debug_args_insert (tree from) 6978 { 6979 struct tree_vec_map *h; 6980 tree_vec_map **loc; 6981 6982 if (DECL_HAS_DEBUG_ARGS_P (from)) 6983 return decl_debug_args_lookup (from); 6984 if (debug_args_for_decl == NULL) 6985 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64); 6986 h = ggc_alloc<tree_vec_map> (); 6987 h->base.from = from; 6988 h->to = NULL; 6989 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT); 6990 *loc = h; 6991 DECL_HAS_DEBUG_ARGS_P (from) = 1; 6992 return &h->to; 6993 } 6994 6995 /* Hashing of types so that we don't make duplicates. 6996 The entry point is `type_hash_canon'. */ 6997 6998 /* Compute a hash code for a list of types (chain of TREE_LIST nodes 6999 with types in the TREE_VALUE slots), by adding the hash codes 7000 of the individual types. */ 7001 7002 static void 7003 type_hash_list (const_tree list, inchash::hash &hstate) 7004 { 7005 const_tree tail; 7006 7007 for (tail = list; tail; tail = TREE_CHAIN (tail)) 7008 if (TREE_VALUE (tail) != error_mark_node) 7009 hstate.add_object (TYPE_HASH (TREE_VALUE (tail))); 7010 } 7011 7012 /* These are the Hashtable callback functions. */ 7013 7014 /* Returns true iff the types are equivalent. */ 7015 7016 bool 7017 type_cache_hasher::equal (type_hash *a, type_hash *b) 7018 { 7019 /* First test the things that are the same for all types. */ 7020 if (a->hash != b->hash 7021 || TREE_CODE (a->type) != TREE_CODE (b->type) 7022 || TREE_TYPE (a->type) != TREE_TYPE (b->type) 7023 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type), 7024 TYPE_ATTRIBUTES (b->type)) 7025 || (TREE_CODE (a->type) != COMPLEX_TYPE 7026 && TYPE_NAME (a->type) != TYPE_NAME (b->type))) 7027 return 0; 7028 7029 /* Be careful about comparing arrays before and after the element type 7030 has been completed; don't compare TYPE_ALIGN unless both types are 7031 complete. */ 7032 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type) 7033 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type) 7034 || TYPE_MODE (a->type) != TYPE_MODE (b->type))) 7035 return 0; 7036 7037 switch (TREE_CODE (a->type)) 7038 { 7039 case VOID_TYPE: 7040 case COMPLEX_TYPE: 7041 case POINTER_TYPE: 7042 case REFERENCE_TYPE: 7043 case NULLPTR_TYPE: 7044 return 1; 7045 7046 case VECTOR_TYPE: 7047 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type); 7048 7049 case ENUMERAL_TYPE: 7050 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type) 7051 && !(TYPE_VALUES (a->type) 7052 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST 7053 && TYPE_VALUES (b->type) 7054 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST 7055 && type_list_equal (TYPE_VALUES (a->type), 7056 TYPE_VALUES (b->type)))) 7057 return 0; 7058 7059 /* fall through */ 7060 7061 case INTEGER_TYPE: 7062 case REAL_TYPE: 7063 case BOOLEAN_TYPE: 7064 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type)) 7065 return false; 7066 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type) 7067 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type), 7068 TYPE_MAX_VALUE (b->type))) 7069 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type) 7070 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type), 7071 TYPE_MIN_VALUE (b->type)))); 7072 7073 case FIXED_POINT_TYPE: 7074 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type); 7075 7076 case OFFSET_TYPE: 7077 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type); 7078 7079 case METHOD_TYPE: 7080 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type) 7081 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type) 7082 || (TYPE_ARG_TYPES (a->type) 7083 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST 7084 && TYPE_ARG_TYPES (b->type) 7085 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST 7086 && type_list_equal (TYPE_ARG_TYPES (a->type), 7087 TYPE_ARG_TYPES (b->type))))) 7088 break; 7089 return 0; 7090 case ARRAY_TYPE: 7091 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates, 7092 where the flag should be inherited from the element type 7093 and can change after ARRAY_TYPEs are created; on non-aggregates 7094 compare it and hash it, scalars will never have that flag set 7095 and we need to differentiate between arrays created by different 7096 front-ends or middle-end created arrays. */ 7097 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type) 7098 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type)) 7099 || (TYPE_TYPELESS_STORAGE (a->type) 7100 == TYPE_TYPELESS_STORAGE (b->type)))); 7101 7102 case RECORD_TYPE: 7103 case UNION_TYPE: 7104 case QUAL_UNION_TYPE: 7105 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type) 7106 || (TYPE_FIELDS (a->type) 7107 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST 7108 && TYPE_FIELDS (b->type) 7109 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST 7110 && type_list_equal (TYPE_FIELDS (a->type), 7111 TYPE_FIELDS (b->type)))); 7112 7113 case FUNCTION_TYPE: 7114 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type) 7115 || (TYPE_ARG_TYPES (a->type) 7116 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST 7117 && TYPE_ARG_TYPES (b->type) 7118 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST 7119 && type_list_equal (TYPE_ARG_TYPES (a->type), 7120 TYPE_ARG_TYPES (b->type)))) 7121 break; 7122 return 0; 7123 7124 default: 7125 return 0; 7126 } 7127 7128 if (lang_hooks.types.type_hash_eq != NULL) 7129 return lang_hooks.types.type_hash_eq (a->type, b->type); 7130 7131 return 1; 7132 } 7133 7134 /* Given TYPE, and HASHCODE its hash code, return the canonical 7135 object for an identical type if one already exists. 7136 Otherwise, return TYPE, and record it as the canonical object. 7137 7138 To use this function, first create a type of the sort you want. 7139 Then compute its hash code from the fields of the type that 7140 make it different from other similar types. 7141 Then call this function and use the value. */ 7142 7143 tree 7144 type_hash_canon (unsigned int hashcode, tree type) 7145 { 7146 type_hash in; 7147 type_hash **loc; 7148 7149 /* The hash table only contains main variants, so ensure that's what we're 7150 being passed. */ 7151 gcc_assert (TYPE_MAIN_VARIANT (type) == type); 7152 7153 /* The TYPE_ALIGN field of a type is set by layout_type(), so we 7154 must call that routine before comparing TYPE_ALIGNs. */ 7155 layout_type (type); 7156 7157 in.hash = hashcode; 7158 in.type = type; 7159 7160 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT); 7161 if (*loc) 7162 { 7163 tree t1 = ((type_hash *) *loc)->type; 7164 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1); 7165 free_node (type); 7166 return t1; 7167 } 7168 else 7169 { 7170 struct type_hash *h; 7171 7172 h = ggc_alloc<type_hash> (); 7173 h->hash = hashcode; 7174 h->type = type; 7175 *loc = h; 7176 7177 return type; 7178 } 7179 } 7180 7181 static void 7182 print_type_hash_statistics (void) 7183 { 7184 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n", 7185 (long) type_hash_table->size (), 7186 (long) type_hash_table->elements (), 7187 type_hash_table->collisions ()); 7188 } 7189 7190 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes 7191 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots), 7192 by adding the hash codes of the individual attributes. */ 7193 7194 static void 7195 attribute_hash_list (const_tree list, inchash::hash &hstate) 7196 { 7197 const_tree tail; 7198 7199 for (tail = list; tail; tail = TREE_CHAIN (tail)) 7200 /* ??? Do we want to add in TREE_VALUE too? */ 7201 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail))); 7202 } 7203 7204 /* Given two lists of attributes, return true if list l2 is 7205 equivalent to l1. */ 7206 7207 int 7208 attribute_list_equal (const_tree l1, const_tree l2) 7209 { 7210 if (l1 == l2) 7211 return 1; 7212 7213 return attribute_list_contained (l1, l2) 7214 && attribute_list_contained (l2, l1); 7215 } 7216 7217 /* Given two lists of attributes, return true if list L2 is 7218 completely contained within L1. */ 7219 /* ??? This would be faster if attribute names were stored in a canonicalized 7220 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method 7221 must be used to show these elements are equivalent (which they are). */ 7222 /* ??? It's not clear that attributes with arguments will always be handled 7223 correctly. */ 7224 7225 int 7226 attribute_list_contained (const_tree l1, const_tree l2) 7227 { 7228 const_tree t1, t2; 7229 7230 /* First check the obvious, maybe the lists are identical. */ 7231 if (l1 == l2) 7232 return 1; 7233 7234 /* Maybe the lists are similar. */ 7235 for (t1 = l1, t2 = l2; 7236 t1 != 0 && t2 != 0 7237 && get_attribute_name (t1) == get_attribute_name (t2) 7238 && TREE_VALUE (t1) == TREE_VALUE (t2); 7239 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2)) 7240 ; 7241 7242 /* Maybe the lists are equal. */ 7243 if (t1 == 0 && t2 == 0) 7244 return 1; 7245 7246 for (; t2 != 0; t2 = TREE_CHAIN (t2)) 7247 { 7248 const_tree attr; 7249 /* This CONST_CAST is okay because lookup_attribute does not 7250 modify its argument and the return value is assigned to a 7251 const_tree. */ 7252 for (attr = lookup_ident_attribute (get_attribute_name (t2), 7253 CONST_CAST_TREE (l1)); 7254 attr != NULL_TREE && !attribute_value_equal (t2, attr); 7255 attr = lookup_ident_attribute (get_attribute_name (t2), 7256 TREE_CHAIN (attr))) 7257 ; 7258 7259 if (attr == NULL_TREE) 7260 return 0; 7261 } 7262 7263 return 1; 7264 } 7265 7266 /* Given two lists of types 7267 (chains of TREE_LIST nodes with types in the TREE_VALUE slots) 7268 return 1 if the lists contain the same types in the same order. 7269 Also, the TREE_PURPOSEs must match. */ 7270 7271 int 7272 type_list_equal (const_tree l1, const_tree l2) 7273 { 7274 const_tree t1, t2; 7275 7276 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2)) 7277 if (TREE_VALUE (t1) != TREE_VALUE (t2) 7278 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2) 7279 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2)) 7280 && (TREE_TYPE (TREE_PURPOSE (t1)) 7281 == TREE_TYPE (TREE_PURPOSE (t2)))))) 7282 return 0; 7283 7284 return t1 == t2; 7285 } 7286 7287 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE 7288 given by TYPE. If the argument list accepts variable arguments, 7289 then this function counts only the ordinary arguments. */ 7290 7291 int 7292 type_num_arguments (const_tree type) 7293 { 7294 int i = 0; 7295 tree t; 7296 7297 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t)) 7298 /* If the function does not take a variable number of arguments, 7299 the last element in the list will have type `void'. */ 7300 if (VOID_TYPE_P (TREE_VALUE (t))) 7301 break; 7302 else 7303 ++i; 7304 7305 return i; 7306 } 7307 7308 /* Nonzero if integer constants T1 and T2 7309 represent the same constant value. */ 7310 7311 int 7312 tree_int_cst_equal (const_tree t1, const_tree t2) 7313 { 7314 if (t1 == t2) 7315 return 1; 7316 7317 if (t1 == 0 || t2 == 0) 7318 return 0; 7319 7320 if (TREE_CODE (t1) == INTEGER_CST 7321 && TREE_CODE (t2) == INTEGER_CST 7322 && wi::to_widest (t1) == wi::to_widest (t2)) 7323 return 1; 7324 7325 return 0; 7326 } 7327 7328 /* Return true if T is an INTEGER_CST whose numerical value (extended 7329 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */ 7330 7331 bool 7332 tree_fits_shwi_p (const_tree t) 7333 { 7334 return (t != NULL_TREE 7335 && TREE_CODE (t) == INTEGER_CST 7336 && wi::fits_shwi_p (wi::to_widest (t))); 7337 } 7338 7339 /* Return true if T is an INTEGER_CST whose numerical value (extended 7340 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */ 7341 7342 bool 7343 tree_fits_uhwi_p (const_tree t) 7344 { 7345 return (t != NULL_TREE 7346 && TREE_CODE (t) == INTEGER_CST 7347 && wi::fits_uhwi_p (wi::to_widest (t))); 7348 } 7349 7350 /* T is an INTEGER_CST whose numerical value (extended according to 7351 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that 7352 HOST_WIDE_INT. */ 7353 7354 HOST_WIDE_INT 7355 tree_to_shwi (const_tree t) 7356 { 7357 gcc_assert (tree_fits_shwi_p (t)); 7358 return TREE_INT_CST_LOW (t); 7359 } 7360 7361 /* T is an INTEGER_CST whose numerical value (extended according to 7362 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that 7363 HOST_WIDE_INT. */ 7364 7365 unsigned HOST_WIDE_INT 7366 tree_to_uhwi (const_tree t) 7367 { 7368 gcc_assert (tree_fits_uhwi_p (t)); 7369 return TREE_INT_CST_LOW (t); 7370 } 7371 7372 /* Return the most significant (sign) bit of T. */ 7373 7374 int 7375 tree_int_cst_sign_bit (const_tree t) 7376 { 7377 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1; 7378 7379 return wi::extract_uhwi (t, bitno, 1); 7380 } 7381 7382 /* Return an indication of the sign of the integer constant T. 7383 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0. 7384 Note that -1 will never be returned if T's type is unsigned. */ 7385 7386 int 7387 tree_int_cst_sgn (const_tree t) 7388 { 7389 if (wi::eq_p (t, 0)) 7390 return 0; 7391 else if (TYPE_UNSIGNED (TREE_TYPE (t))) 7392 return 1; 7393 else if (wi::neg_p (t)) 7394 return -1; 7395 else 7396 return 1; 7397 } 7398 7399 /* Return the minimum number of bits needed to represent VALUE in a 7400 signed or unsigned type, UNSIGNEDP says which. */ 7401 7402 unsigned int 7403 tree_int_cst_min_precision (tree value, signop sgn) 7404 { 7405 /* If the value is negative, compute its negative minus 1. The latter 7406 adjustment is because the absolute value of the largest negative value 7407 is one larger than the largest positive value. This is equivalent to 7408 a bit-wise negation, so use that operation instead. */ 7409 7410 if (tree_int_cst_sgn (value) < 0) 7411 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value); 7412 7413 /* Return the number of bits needed, taking into account the fact 7414 that we need one more bit for a signed than unsigned type. 7415 If value is 0 or -1, the minimum precision is 1 no matter 7416 whether unsignedp is true or false. */ 7417 7418 if (integer_zerop (value)) 7419 return 1; 7420 else 7421 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ; 7422 } 7423 7424 /* Return truthvalue of whether T1 is the same tree structure as T2. 7425 Return 1 if they are the same. 7426 Return 0 if they are understandably different. 7427 Return -1 if either contains tree structure not understood by 7428 this function. */ 7429 7430 int 7431 simple_cst_equal (const_tree t1, const_tree t2) 7432 { 7433 enum tree_code code1, code2; 7434 int cmp; 7435 int i; 7436 7437 if (t1 == t2) 7438 return 1; 7439 if (t1 == 0 || t2 == 0) 7440 return 0; 7441 7442 code1 = TREE_CODE (t1); 7443 code2 = TREE_CODE (t2); 7444 7445 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR) 7446 { 7447 if (CONVERT_EXPR_CODE_P (code2) 7448 || code2 == NON_LVALUE_EXPR) 7449 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); 7450 else 7451 return simple_cst_equal (TREE_OPERAND (t1, 0), t2); 7452 } 7453 7454 else if (CONVERT_EXPR_CODE_P (code2) 7455 || code2 == NON_LVALUE_EXPR) 7456 return simple_cst_equal (t1, TREE_OPERAND (t2, 0)); 7457 7458 if (code1 != code2) 7459 return 0; 7460 7461 switch (code1) 7462 { 7463 case INTEGER_CST: 7464 return wi::to_widest (t1) == wi::to_widest (t2); 7465 7466 case REAL_CST: 7467 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2)); 7468 7469 case FIXED_CST: 7470 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2)); 7471 7472 case STRING_CST: 7473 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2) 7474 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2), 7475 TREE_STRING_LENGTH (t1))); 7476 7477 case CONSTRUCTOR: 7478 { 7479 unsigned HOST_WIDE_INT idx; 7480 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1); 7481 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2); 7482 7483 if (vec_safe_length (v1) != vec_safe_length (v2)) 7484 return false; 7485 7486 for (idx = 0; idx < vec_safe_length (v1); ++idx) 7487 /* ??? Should we handle also fields here? */ 7488 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value)) 7489 return false; 7490 return true; 7491 } 7492 7493 case SAVE_EXPR: 7494 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); 7495 7496 case CALL_EXPR: 7497 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2)); 7498 if (cmp <= 0) 7499 return cmp; 7500 if (call_expr_nargs (t1) != call_expr_nargs (t2)) 7501 return 0; 7502 { 7503 const_tree arg1, arg2; 7504 const_call_expr_arg_iterator iter1, iter2; 7505 for (arg1 = first_const_call_expr_arg (t1, &iter1), 7506 arg2 = first_const_call_expr_arg (t2, &iter2); 7507 arg1 && arg2; 7508 arg1 = next_const_call_expr_arg (&iter1), 7509 arg2 = next_const_call_expr_arg (&iter2)) 7510 { 7511 cmp = simple_cst_equal (arg1, arg2); 7512 if (cmp <= 0) 7513 return cmp; 7514 } 7515 return arg1 == arg2; 7516 } 7517 7518 case TARGET_EXPR: 7519 /* Special case: if either target is an unallocated VAR_DECL, 7520 it means that it's going to be unified with whatever the 7521 TARGET_EXPR is really supposed to initialize, so treat it 7522 as being equivalent to anything. */ 7523 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL 7524 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE 7525 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0))) 7526 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL 7527 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE 7528 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0)))) 7529 cmp = 1; 7530 else 7531 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); 7532 7533 if (cmp <= 0) 7534 return cmp; 7535 7536 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1)); 7537 7538 case WITH_CLEANUP_EXPR: 7539 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); 7540 if (cmp <= 0) 7541 return cmp; 7542 7543 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1)); 7544 7545 case COMPONENT_REF: 7546 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1)) 7547 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); 7548 7549 return 0; 7550 7551 case VAR_DECL: 7552 case PARM_DECL: 7553 case CONST_DECL: 7554 case FUNCTION_DECL: 7555 return 0; 7556 7557 default: 7558 break; 7559 } 7560 7561 /* This general rule works for most tree codes. All exceptions should be 7562 handled above. If this is a language-specific tree code, we can't 7563 trust what might be in the operand, so say we don't know 7564 the situation. */ 7565 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE) 7566 return -1; 7567 7568 switch (TREE_CODE_CLASS (code1)) 7569 { 7570 case tcc_unary: 7571 case tcc_binary: 7572 case tcc_comparison: 7573 case tcc_expression: 7574 case tcc_reference: 7575 case tcc_statement: 7576 cmp = 1; 7577 for (i = 0; i < TREE_CODE_LENGTH (code1); i++) 7578 { 7579 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i)); 7580 if (cmp <= 0) 7581 return cmp; 7582 } 7583 7584 return cmp; 7585 7586 default: 7587 return -1; 7588 } 7589 } 7590 7591 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value. 7592 Return -1, 0, or 1 if the value of T is less than, equal to, or greater 7593 than U, respectively. */ 7594 7595 int 7596 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u) 7597 { 7598 if (tree_int_cst_sgn (t) < 0) 7599 return -1; 7600 else if (!tree_fits_uhwi_p (t)) 7601 return 1; 7602 else if (TREE_INT_CST_LOW (t) == u) 7603 return 0; 7604 else if (TREE_INT_CST_LOW (t) < u) 7605 return -1; 7606 else 7607 return 1; 7608 } 7609 7610 /* Return true if SIZE represents a constant size that is in bounds of 7611 what the middle-end and the backend accepts (covering not more than 7612 half of the address-space). */ 7613 7614 bool 7615 valid_constant_size_p (const_tree size) 7616 { 7617 if (! tree_fits_uhwi_p (size) 7618 || TREE_OVERFLOW (size) 7619 || tree_int_cst_sign_bit (size) != 0) 7620 return false; 7621 return true; 7622 } 7623 7624 /* Return the precision of the type, or for a complex or vector type the 7625 precision of the type of its elements. */ 7626 7627 unsigned int 7628 element_precision (const_tree type) 7629 { 7630 if (!TYPE_P (type)) 7631 type = TREE_TYPE (type); 7632 enum tree_code code = TREE_CODE (type); 7633 if (code == COMPLEX_TYPE || code == VECTOR_TYPE) 7634 type = TREE_TYPE (type); 7635 7636 return TYPE_PRECISION (type); 7637 } 7638 7639 /* Return true if CODE represents an associative tree code. Otherwise 7640 return false. */ 7641 bool 7642 associative_tree_code (enum tree_code code) 7643 { 7644 switch (code) 7645 { 7646 case BIT_IOR_EXPR: 7647 case BIT_AND_EXPR: 7648 case BIT_XOR_EXPR: 7649 case PLUS_EXPR: 7650 case MULT_EXPR: 7651 case MIN_EXPR: 7652 case MAX_EXPR: 7653 return true; 7654 7655 default: 7656 break; 7657 } 7658 return false; 7659 } 7660 7661 /* Return true if CODE represents a commutative tree code. Otherwise 7662 return false. */ 7663 bool 7664 commutative_tree_code (enum tree_code code) 7665 { 7666 switch (code) 7667 { 7668 case PLUS_EXPR: 7669 case MULT_EXPR: 7670 case MULT_HIGHPART_EXPR: 7671 case MIN_EXPR: 7672 case MAX_EXPR: 7673 case BIT_IOR_EXPR: 7674 case BIT_XOR_EXPR: 7675 case BIT_AND_EXPR: 7676 case NE_EXPR: 7677 case EQ_EXPR: 7678 case UNORDERED_EXPR: 7679 case ORDERED_EXPR: 7680 case UNEQ_EXPR: 7681 case LTGT_EXPR: 7682 case TRUTH_AND_EXPR: 7683 case TRUTH_XOR_EXPR: 7684 case TRUTH_OR_EXPR: 7685 case WIDEN_MULT_EXPR: 7686 case VEC_WIDEN_MULT_HI_EXPR: 7687 case VEC_WIDEN_MULT_LO_EXPR: 7688 case VEC_WIDEN_MULT_EVEN_EXPR: 7689 case VEC_WIDEN_MULT_ODD_EXPR: 7690 return true; 7691 7692 default: 7693 break; 7694 } 7695 return false; 7696 } 7697 7698 /* Return true if CODE represents a ternary tree code for which the 7699 first two operands are commutative. Otherwise return false. */ 7700 bool 7701 commutative_ternary_tree_code (enum tree_code code) 7702 { 7703 switch (code) 7704 { 7705 case WIDEN_MULT_PLUS_EXPR: 7706 case WIDEN_MULT_MINUS_EXPR: 7707 case DOT_PROD_EXPR: 7708 case FMA_EXPR: 7709 return true; 7710 7711 default: 7712 break; 7713 } 7714 return false; 7715 } 7716 7717 /* Returns true if CODE can overflow. */ 7718 7719 bool 7720 operation_can_overflow (enum tree_code code) 7721 { 7722 switch (code) 7723 { 7724 case PLUS_EXPR: 7725 case MINUS_EXPR: 7726 case MULT_EXPR: 7727 case LSHIFT_EXPR: 7728 /* Can overflow in various ways. */ 7729 return true; 7730 case TRUNC_DIV_EXPR: 7731 case EXACT_DIV_EXPR: 7732 case FLOOR_DIV_EXPR: 7733 case CEIL_DIV_EXPR: 7734 /* For INT_MIN / -1. */ 7735 return true; 7736 case NEGATE_EXPR: 7737 case ABS_EXPR: 7738 /* For -INT_MIN. */ 7739 return true; 7740 default: 7741 /* These operators cannot overflow. */ 7742 return false; 7743 } 7744 } 7745 7746 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or 7747 ftrapv doesn't generate trapping insns for CODE. */ 7748 7749 bool 7750 operation_no_trapping_overflow (tree type, enum tree_code code) 7751 { 7752 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type)); 7753 7754 /* We don't generate instructions that trap on overflow for complex or vector 7755 types. */ 7756 if (!INTEGRAL_TYPE_P (type)) 7757 return true; 7758 7759 if (!TYPE_OVERFLOW_TRAPS (type)) 7760 return true; 7761 7762 switch (code) 7763 { 7764 case PLUS_EXPR: 7765 case MINUS_EXPR: 7766 case MULT_EXPR: 7767 case NEGATE_EXPR: 7768 case ABS_EXPR: 7769 /* These operators can overflow, and -ftrapv generates trapping code for 7770 these. */ 7771 return false; 7772 case TRUNC_DIV_EXPR: 7773 case EXACT_DIV_EXPR: 7774 case FLOOR_DIV_EXPR: 7775 case CEIL_DIV_EXPR: 7776 case LSHIFT_EXPR: 7777 /* These operators can overflow, but -ftrapv does not generate trapping 7778 code for these. */ 7779 return true; 7780 default: 7781 /* These operators cannot overflow. */ 7782 return true; 7783 } 7784 } 7785 7786 namespace inchash 7787 { 7788 7789 /* Generate a hash value for an expression. This can be used iteratively 7790 by passing a previous result as the HSTATE argument. 7791 7792 This function is intended to produce the same hash for expressions which 7793 would compare equal using operand_equal_p. */ 7794 void 7795 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags) 7796 { 7797 int i; 7798 enum tree_code code; 7799 enum tree_code_class tclass; 7800 7801 if (t == NULL_TREE || t == error_mark_node) 7802 { 7803 hstate.merge_hash (0); 7804 return; 7805 } 7806 7807 if (!(flags & OEP_ADDRESS_OF)) 7808 STRIP_NOPS (t); 7809 7810 code = TREE_CODE (t); 7811 7812 switch (code) 7813 { 7814 /* Alas, constants aren't shared, so we can't rely on pointer 7815 identity. */ 7816 case VOID_CST: 7817 hstate.merge_hash (0); 7818 return; 7819 case INTEGER_CST: 7820 gcc_checking_assert (!(flags & OEP_ADDRESS_OF)); 7821 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++) 7822 hstate.add_wide_int (TREE_INT_CST_ELT (t, i)); 7823 return; 7824 case REAL_CST: 7825 { 7826 unsigned int val2; 7827 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t)) 7828 val2 = rvc_zero; 7829 else 7830 val2 = real_hash (TREE_REAL_CST_PTR (t)); 7831 hstate.merge_hash (val2); 7832 return; 7833 } 7834 case FIXED_CST: 7835 { 7836 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t)); 7837 hstate.merge_hash (val2); 7838 return; 7839 } 7840 case STRING_CST: 7841 hstate.add ((const void *) TREE_STRING_POINTER (t), 7842 TREE_STRING_LENGTH (t)); 7843 return; 7844 case COMPLEX_CST: 7845 inchash::add_expr (TREE_REALPART (t), hstate, flags); 7846 inchash::add_expr (TREE_IMAGPART (t), hstate, flags); 7847 return; 7848 case VECTOR_CST: 7849 { 7850 unsigned i; 7851 for (i = 0; i < VECTOR_CST_NELTS (t); ++i) 7852 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate, flags); 7853 return; 7854 } 7855 case SSA_NAME: 7856 /* We can just compare by pointer. */ 7857 hstate.add_wide_int (SSA_NAME_VERSION (t)); 7858 return; 7859 case PLACEHOLDER_EXPR: 7860 /* The node itself doesn't matter. */ 7861 return; 7862 case BLOCK: 7863 case OMP_CLAUSE: 7864 /* Ignore. */ 7865 return; 7866 case TREE_LIST: 7867 /* A list of expressions, for a CALL_EXPR or as the elements of a 7868 VECTOR_CST. */ 7869 for (; t; t = TREE_CHAIN (t)) 7870 inchash::add_expr (TREE_VALUE (t), hstate, flags); 7871 return; 7872 case CONSTRUCTOR: 7873 { 7874 unsigned HOST_WIDE_INT idx; 7875 tree field, value; 7876 flags &= ~OEP_ADDRESS_OF; 7877 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value) 7878 { 7879 inchash::add_expr (field, hstate, flags); 7880 inchash::add_expr (value, hstate, flags); 7881 } 7882 return; 7883 } 7884 case STATEMENT_LIST: 7885 { 7886 tree_stmt_iterator i; 7887 for (i = tsi_start (CONST_CAST_TREE (t)); 7888 !tsi_end_p (i); tsi_next (&i)) 7889 inchash::add_expr (tsi_stmt (i), hstate, flags); 7890 return; 7891 } 7892 case TREE_VEC: 7893 for (i = 0; i < TREE_VEC_LENGTH (t); ++i) 7894 inchash::add_expr (TREE_VEC_ELT (t, i), hstate, flags); 7895 return; 7896 case IDENTIFIER_NODE: 7897 hstate.add_object (IDENTIFIER_HASH_VALUE (t)); 7898 return; 7899 case FUNCTION_DECL: 7900 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form. 7901 Otherwise nodes that compare equal according to operand_equal_p might 7902 get different hash codes. However, don't do this for machine specific 7903 or front end builtins, since the function code is overloaded in those 7904 cases. */ 7905 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL 7906 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t))) 7907 { 7908 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t)); 7909 code = TREE_CODE (t); 7910 } 7911 /* FALL THROUGH */ 7912 default: 7913 tclass = TREE_CODE_CLASS (code); 7914 7915 if (tclass == tcc_declaration) 7916 { 7917 /* DECL's have a unique ID */ 7918 hstate.add_wide_int (DECL_UID (t)); 7919 } 7920 else if (tclass == tcc_comparison && !commutative_tree_code (code)) 7921 { 7922 /* For comparisons that can be swapped, use the lower 7923 tree code. */ 7924 enum tree_code ccode = swap_tree_comparison (code); 7925 if (code < ccode) 7926 ccode = code; 7927 hstate.add_object (ccode); 7928 inchash::add_expr (TREE_OPERAND (t, ccode != code), hstate, flags); 7929 inchash::add_expr (TREE_OPERAND (t, ccode == code), hstate, flags); 7930 } 7931 else if (CONVERT_EXPR_CODE_P (code)) 7932 { 7933 /* NOP_EXPR and CONVERT_EXPR are considered equal by 7934 operand_equal_p. */ 7935 enum tree_code ccode = NOP_EXPR; 7936 hstate.add_object (ccode); 7937 7938 /* Don't hash the type, that can lead to having nodes which 7939 compare equal according to operand_equal_p, but which 7940 have different hash codes. Make sure to include signedness 7941 in the hash computation. */ 7942 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t))); 7943 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags); 7944 } 7945 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */ 7946 else if (code == MEM_REF 7947 && (flags & OEP_ADDRESS_OF) != 0 7948 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR 7949 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) 7950 && integer_zerop (TREE_OPERAND (t, 1))) 7951 inchash::add_expr (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 7952 hstate, flags); 7953 /* Don't ICE on FE specific trees, or their arguments etc. 7954 during operand_equal_p hash verification. */ 7955 else if (!IS_EXPR_CODE_CLASS (tclass)) 7956 gcc_assert (flags & OEP_HASH_CHECK); 7957 else 7958 { 7959 unsigned int sflags = flags; 7960 7961 hstate.add_object (code); 7962 7963 switch (code) 7964 { 7965 case ADDR_EXPR: 7966 gcc_checking_assert (!(flags & OEP_ADDRESS_OF)); 7967 flags |= OEP_ADDRESS_OF; 7968 sflags = flags; 7969 break; 7970 7971 case INDIRECT_REF: 7972 case MEM_REF: 7973 case TARGET_MEM_REF: 7974 flags &= ~OEP_ADDRESS_OF; 7975 sflags = flags; 7976 break; 7977 7978 case ARRAY_REF: 7979 case ARRAY_RANGE_REF: 7980 case COMPONENT_REF: 7981 case BIT_FIELD_REF: 7982 sflags &= ~OEP_ADDRESS_OF; 7983 break; 7984 7985 case COND_EXPR: 7986 flags &= ~OEP_ADDRESS_OF; 7987 break; 7988 7989 case FMA_EXPR: 7990 case WIDEN_MULT_PLUS_EXPR: 7991 case WIDEN_MULT_MINUS_EXPR: 7992 { 7993 /* The multiplication operands are commutative. */ 7994 inchash::hash one, two; 7995 inchash::add_expr (TREE_OPERAND (t, 0), one, flags); 7996 inchash::add_expr (TREE_OPERAND (t, 1), two, flags); 7997 hstate.add_commutative (one, two); 7998 inchash::add_expr (TREE_OPERAND (t, 2), two, flags); 7999 return; 8000 } 8001 8002 case CALL_EXPR: 8003 if (CALL_EXPR_FN (t) == NULL_TREE) 8004 hstate.add_int (CALL_EXPR_IFN (t)); 8005 break; 8006 8007 case TARGET_EXPR: 8008 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT. 8009 Usually different TARGET_EXPRs just should use 8010 different temporaries in their slots. */ 8011 inchash::add_expr (TARGET_EXPR_SLOT (t), hstate, flags); 8012 return; 8013 8014 default: 8015 break; 8016 } 8017 8018 /* Don't hash the type, that can lead to having nodes which 8019 compare equal according to operand_equal_p, but which 8020 have different hash codes. */ 8021 if (code == NON_LVALUE_EXPR) 8022 { 8023 /* Make sure to include signness in the hash computation. */ 8024 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t))); 8025 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags); 8026 } 8027 8028 else if (commutative_tree_code (code)) 8029 { 8030 /* It's a commutative expression. We want to hash it the same 8031 however it appears. We do this by first hashing both operands 8032 and then rehashing based on the order of their independent 8033 hashes. */ 8034 inchash::hash one, two; 8035 inchash::add_expr (TREE_OPERAND (t, 0), one, flags); 8036 inchash::add_expr (TREE_OPERAND (t, 1), two, flags); 8037 hstate.add_commutative (one, two); 8038 } 8039 else 8040 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i) 8041 inchash::add_expr (TREE_OPERAND (t, i), hstate, 8042 i == 0 ? flags : sflags); 8043 } 8044 return; 8045 } 8046 } 8047 8048 } 8049 8050 /* Constructors for pointer, array and function types. 8051 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are 8052 constructed by language-dependent code, not here.) */ 8053 8054 /* Construct, lay out and return the type of pointers to TO_TYPE with 8055 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can 8056 reference all of memory. If such a type has already been 8057 constructed, reuse it. */ 8058 8059 tree 8060 build_pointer_type_for_mode (tree to_type, machine_mode mode, 8061 bool can_alias_all) 8062 { 8063 tree t; 8064 bool could_alias = can_alias_all; 8065 8066 if (to_type == error_mark_node) 8067 return error_mark_node; 8068 8069 /* If the pointed-to type has the may_alias attribute set, force 8070 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */ 8071 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type))) 8072 can_alias_all = true; 8073 8074 /* In some cases, languages will have things that aren't a POINTER_TYPE 8075 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO. 8076 In that case, return that type without regard to the rest of our 8077 operands. 8078 8079 ??? This is a kludge, but consistent with the way this function has 8080 always operated and there doesn't seem to be a good way to avoid this 8081 at the moment. */ 8082 if (TYPE_POINTER_TO (to_type) != 0 8083 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE) 8084 return TYPE_POINTER_TO (to_type); 8085 8086 /* First, if we already have a type for pointers to TO_TYPE and it's 8087 the proper mode, use it. */ 8088 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t)) 8089 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all) 8090 return t; 8091 8092 t = make_node (POINTER_TYPE); 8093 8094 TREE_TYPE (t) = to_type; 8095 SET_TYPE_MODE (t, mode); 8096 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all; 8097 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type); 8098 TYPE_POINTER_TO (to_type) = t; 8099 8100 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */ 8101 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p) 8102 SET_TYPE_STRUCTURAL_EQUALITY (t); 8103 else if (TYPE_CANONICAL (to_type) != to_type || could_alias) 8104 TYPE_CANONICAL (t) 8105 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type), 8106 mode, false); 8107 8108 /* Lay out the type. This function has many callers that are concerned 8109 with expression-construction, and this simplifies them all. */ 8110 layout_type (t); 8111 8112 return t; 8113 } 8114 8115 /* By default build pointers in ptr_mode. */ 8116 8117 tree 8118 build_pointer_type (tree to_type) 8119 { 8120 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC 8121 : TYPE_ADDR_SPACE (to_type); 8122 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as); 8123 return build_pointer_type_for_mode (to_type, pointer_mode, false); 8124 } 8125 8126 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */ 8127 8128 tree 8129 build_reference_type_for_mode (tree to_type, machine_mode mode, 8130 bool can_alias_all) 8131 { 8132 tree t; 8133 bool could_alias = can_alias_all; 8134 8135 if (to_type == error_mark_node) 8136 return error_mark_node; 8137 8138 /* If the pointed-to type has the may_alias attribute set, force 8139 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */ 8140 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type))) 8141 can_alias_all = true; 8142 8143 /* In some cases, languages will have things that aren't a REFERENCE_TYPE 8144 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO. 8145 In that case, return that type without regard to the rest of our 8146 operands. 8147 8148 ??? This is a kludge, but consistent with the way this function has 8149 always operated and there doesn't seem to be a good way to avoid this 8150 at the moment. */ 8151 if (TYPE_REFERENCE_TO (to_type) != 0 8152 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE) 8153 return TYPE_REFERENCE_TO (to_type); 8154 8155 /* First, if we already have a type for pointers to TO_TYPE and it's 8156 the proper mode, use it. */ 8157 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t)) 8158 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all) 8159 return t; 8160 8161 t = make_node (REFERENCE_TYPE); 8162 8163 TREE_TYPE (t) = to_type; 8164 SET_TYPE_MODE (t, mode); 8165 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all; 8166 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type); 8167 TYPE_REFERENCE_TO (to_type) = t; 8168 8169 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */ 8170 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p) 8171 SET_TYPE_STRUCTURAL_EQUALITY (t); 8172 else if (TYPE_CANONICAL (to_type) != to_type || could_alias) 8173 TYPE_CANONICAL (t) 8174 = build_reference_type_for_mode (TYPE_CANONICAL (to_type), 8175 mode, false); 8176 8177 layout_type (t); 8178 8179 return t; 8180 } 8181 8182 8183 /* Build the node for the type of references-to-TO_TYPE by default 8184 in ptr_mode. */ 8185 8186 tree 8187 build_reference_type (tree to_type) 8188 { 8189 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC 8190 : TYPE_ADDR_SPACE (to_type); 8191 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as); 8192 return build_reference_type_for_mode (to_type, pointer_mode, false); 8193 } 8194 8195 #define MAX_INT_CACHED_PREC \ 8196 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64) 8197 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2]; 8198 8199 /* Builds a signed or unsigned integer type of precision PRECISION. 8200 Used for C bitfields whose precision does not match that of 8201 built-in target types. */ 8202 tree 8203 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision, 8204 int unsignedp) 8205 { 8206 tree itype, ret; 8207 8208 if (unsignedp) 8209 unsignedp = MAX_INT_CACHED_PREC + 1; 8210 8211 if (precision <= MAX_INT_CACHED_PREC) 8212 { 8213 itype = nonstandard_integer_type_cache[precision + unsignedp]; 8214 if (itype) 8215 return itype; 8216 } 8217 8218 itype = make_node (INTEGER_TYPE); 8219 TYPE_PRECISION (itype) = precision; 8220 8221 if (unsignedp) 8222 fixup_unsigned_type (itype); 8223 else 8224 fixup_signed_type (itype); 8225 8226 ret = itype; 8227 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype))) 8228 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype); 8229 if (precision <= MAX_INT_CACHED_PREC) 8230 nonstandard_integer_type_cache[precision + unsignedp] = ret; 8231 8232 return ret; 8233 } 8234 8235 #define MAX_BOOL_CACHED_PREC \ 8236 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64) 8237 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1]; 8238 8239 /* Builds a boolean type of precision PRECISION. 8240 Used for boolean vectors to choose proper vector element size. */ 8241 tree 8242 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision) 8243 { 8244 tree type; 8245 8246 if (precision <= MAX_BOOL_CACHED_PREC) 8247 { 8248 type = nonstandard_boolean_type_cache[precision]; 8249 if (type) 8250 return type; 8251 } 8252 8253 type = make_node (BOOLEAN_TYPE); 8254 TYPE_PRECISION (type) = precision; 8255 fixup_signed_type (type); 8256 8257 if (precision <= MAX_INT_CACHED_PREC) 8258 nonstandard_boolean_type_cache[precision] = type; 8259 8260 return type; 8261 } 8262 8263 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE 8264 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED 8265 is true, reuse such a type that has already been constructed. */ 8266 8267 static tree 8268 build_range_type_1 (tree type, tree lowval, tree highval, bool shared) 8269 { 8270 tree itype = make_node (INTEGER_TYPE); 8271 inchash::hash hstate; 8272 8273 TREE_TYPE (itype) = type; 8274 8275 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval); 8276 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL; 8277 8278 TYPE_PRECISION (itype) = TYPE_PRECISION (type); 8279 SET_TYPE_MODE (itype, TYPE_MODE (type)); 8280 TYPE_SIZE (itype) = TYPE_SIZE (type); 8281 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type); 8282 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type)); 8283 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type); 8284 8285 if (!shared) 8286 return itype; 8287 8288 if ((TYPE_MIN_VALUE (itype) 8289 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST) 8290 || (TYPE_MAX_VALUE (itype) 8291 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)) 8292 { 8293 /* Since we cannot reliably merge this type, we need to compare it using 8294 structural equality checks. */ 8295 SET_TYPE_STRUCTURAL_EQUALITY (itype); 8296 return itype; 8297 } 8298 8299 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate); 8300 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate); 8301 hstate.merge_hash (TYPE_HASH (type)); 8302 itype = type_hash_canon (hstate.end (), itype); 8303 8304 return itype; 8305 } 8306 8307 /* Wrapper around build_range_type_1 with SHARED set to true. */ 8308 8309 tree 8310 build_range_type (tree type, tree lowval, tree highval) 8311 { 8312 return build_range_type_1 (type, lowval, highval, true); 8313 } 8314 8315 /* Wrapper around build_range_type_1 with SHARED set to false. */ 8316 8317 tree 8318 build_nonshared_range_type (tree type, tree lowval, tree highval) 8319 { 8320 return build_range_type_1 (type, lowval, highval, false); 8321 } 8322 8323 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE. 8324 MAXVAL should be the maximum value in the domain 8325 (one less than the length of the array). 8326 8327 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT. 8328 We don't enforce this limit, that is up to caller (e.g. language front end). 8329 The limit exists because the result is a signed type and we don't handle 8330 sizes that use more than one HOST_WIDE_INT. */ 8331 8332 tree 8333 build_index_type (tree maxval) 8334 { 8335 return build_range_type (sizetype, size_zero_node, maxval); 8336 } 8337 8338 /* Return true if the debug information for TYPE, a subtype, should be emitted 8339 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the 8340 high bound, respectively. Sometimes doing so unnecessarily obfuscates the 8341 debug info and doesn't reflect the source code. */ 8342 8343 bool 8344 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval) 8345 { 8346 tree base_type = TREE_TYPE (type), low, high; 8347 8348 /* Subrange types have a base type which is an integral type. */ 8349 if (!INTEGRAL_TYPE_P (base_type)) 8350 return false; 8351 8352 /* Get the real bounds of the subtype. */ 8353 if (lang_hooks.types.get_subrange_bounds) 8354 lang_hooks.types.get_subrange_bounds (type, &low, &high); 8355 else 8356 { 8357 low = TYPE_MIN_VALUE (type); 8358 high = TYPE_MAX_VALUE (type); 8359 } 8360 8361 /* If the type and its base type have the same representation and the same 8362 name, then the type is not a subrange but a copy of the base type. */ 8363 if ((TREE_CODE (base_type) == INTEGER_TYPE 8364 || TREE_CODE (base_type) == BOOLEAN_TYPE) 8365 && int_size_in_bytes (type) == int_size_in_bytes (base_type) 8366 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type)) 8367 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type)) 8368 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type)) 8369 return false; 8370 8371 if (lowval) 8372 *lowval = low; 8373 if (highval) 8374 *highval = high; 8375 return true; 8376 } 8377 8378 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE 8379 and number of elements specified by the range of values of INDEX_TYPE. 8380 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type. 8381 If SHARED is true, reuse such a type that has already been constructed. */ 8382 8383 static tree 8384 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage, 8385 bool shared) 8386 { 8387 tree t; 8388 8389 if (TREE_CODE (elt_type) == FUNCTION_TYPE) 8390 { 8391 error ("arrays of functions are not meaningful"); 8392 elt_type = integer_type_node; 8393 } 8394 8395 t = make_node (ARRAY_TYPE); 8396 TREE_TYPE (t) = elt_type; 8397 TYPE_DOMAIN (t) = index_type; 8398 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type); 8399 TYPE_TYPELESS_STORAGE (t) = typeless_storage; 8400 layout_type (t); 8401 8402 /* If the element type is incomplete at this point we get marked for 8403 structural equality. Do not record these types in the canonical 8404 type hashtable. */ 8405 if (TYPE_STRUCTURAL_EQUALITY_P (t)) 8406 return t; 8407 8408 if (shared) 8409 { 8410 inchash::hash hstate; 8411 hstate.add_object (TYPE_HASH (elt_type)); 8412 if (index_type) 8413 hstate.add_object (TYPE_HASH (index_type)); 8414 if (!AGGREGATE_TYPE_P (elt_type)) 8415 hstate.add_flag (TYPE_TYPELESS_STORAGE (t)); 8416 t = type_hash_canon (hstate.end (), t); 8417 } 8418 8419 if (TYPE_CANONICAL (t) == t) 8420 { 8421 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type) 8422 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)) 8423 || in_lto_p) 8424 SET_TYPE_STRUCTURAL_EQUALITY (t); 8425 else if (TYPE_CANONICAL (elt_type) != elt_type 8426 || (index_type && TYPE_CANONICAL (index_type) != index_type)) 8427 TYPE_CANONICAL (t) 8428 = build_array_type_1 (TYPE_CANONICAL (elt_type), 8429 index_type 8430 ? TYPE_CANONICAL (index_type) : NULL_TREE, 8431 typeless_storage, shared); 8432 } 8433 8434 return t; 8435 } 8436 8437 /* Wrapper around build_array_type_1 with SHARED set to true. */ 8438 8439 tree 8440 build_array_type (tree elt_type, tree index_type, bool typeless_storage) 8441 { 8442 return build_array_type_1 (elt_type, index_type, typeless_storage, true); 8443 } 8444 8445 /* Wrapper around build_array_type_1 with SHARED set to false. */ 8446 8447 tree 8448 build_nonshared_array_type (tree elt_type, tree index_type) 8449 { 8450 return build_array_type_1 (elt_type, index_type, false, false); 8451 } 8452 8453 /* Return a representation of ELT_TYPE[NELTS], using indices of type 8454 sizetype. */ 8455 8456 tree 8457 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts) 8458 { 8459 return build_array_type (elt_type, build_index_type (size_int (nelts - 1))); 8460 } 8461 8462 /* Recursively examines the array elements of TYPE, until a non-array 8463 element type is found. */ 8464 8465 tree 8466 strip_array_types (tree type) 8467 { 8468 while (TREE_CODE (type) == ARRAY_TYPE) 8469 type = TREE_TYPE (type); 8470 8471 return type; 8472 } 8473 8474 /* Computes the canonical argument types from the argument type list 8475 ARGTYPES. 8476 8477 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true 8478 on entry to this function, or if any of the ARGTYPES are 8479 structural. 8480 8481 Upon return, *ANY_NONCANONICAL_P will be true iff either it was 8482 true on entry to this function, or if any of the ARGTYPES are 8483 non-canonical. 8484 8485 Returns a canonical argument list, which may be ARGTYPES when the 8486 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is 8487 true) or would not differ from ARGTYPES. */ 8488 8489 static tree 8490 maybe_canonicalize_argtypes (tree argtypes, 8491 bool *any_structural_p, 8492 bool *any_noncanonical_p) 8493 { 8494 tree arg; 8495 bool any_noncanonical_argtypes_p = false; 8496 8497 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg)) 8498 { 8499 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node) 8500 /* Fail gracefully by stating that the type is structural. */ 8501 *any_structural_p = true; 8502 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg))) 8503 *any_structural_p = true; 8504 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg) 8505 || TREE_PURPOSE (arg)) 8506 /* If the argument has a default argument, we consider it 8507 non-canonical even though the type itself is canonical. 8508 That way, different variants of function and method types 8509 with default arguments will all point to the variant with 8510 no defaults as their canonical type. */ 8511 any_noncanonical_argtypes_p = true; 8512 } 8513 8514 if (*any_structural_p) 8515 return argtypes; 8516 8517 if (any_noncanonical_argtypes_p) 8518 { 8519 /* Build the canonical list of argument types. */ 8520 tree canon_argtypes = NULL_TREE; 8521 bool is_void = false; 8522 8523 for (arg = argtypes; arg; arg = TREE_CHAIN (arg)) 8524 { 8525 if (arg == void_list_node) 8526 is_void = true; 8527 else 8528 canon_argtypes = tree_cons (NULL_TREE, 8529 TYPE_CANONICAL (TREE_VALUE (arg)), 8530 canon_argtypes); 8531 } 8532 8533 canon_argtypes = nreverse (canon_argtypes); 8534 if (is_void) 8535 canon_argtypes = chainon (canon_argtypes, void_list_node); 8536 8537 /* There is a non-canonical type. */ 8538 *any_noncanonical_p = true; 8539 return canon_argtypes; 8540 } 8541 8542 /* The canonical argument types are the same as ARGTYPES. */ 8543 return argtypes; 8544 } 8545 8546 /* Construct, lay out and return 8547 the type of functions returning type VALUE_TYPE 8548 given arguments of types ARG_TYPES. 8549 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs 8550 are data type nodes for the arguments of the function. 8551 If such a type has already been constructed, reuse it. */ 8552 8553 tree 8554 build_function_type (tree value_type, tree arg_types) 8555 { 8556 tree t; 8557 inchash::hash hstate; 8558 bool any_structural_p, any_noncanonical_p; 8559 tree canon_argtypes; 8560 8561 if (TREE_CODE (value_type) == FUNCTION_TYPE) 8562 { 8563 error ("function return type cannot be function"); 8564 value_type = integer_type_node; 8565 } 8566 8567 /* Make a node of the sort we want. */ 8568 t = make_node (FUNCTION_TYPE); 8569 TREE_TYPE (t) = value_type; 8570 TYPE_ARG_TYPES (t) = arg_types; 8571 8572 /* If we already have such a type, use the old one. */ 8573 hstate.add_object (TYPE_HASH (value_type)); 8574 type_hash_list (arg_types, hstate); 8575 t = type_hash_canon (hstate.end (), t); 8576 8577 /* Set up the canonical type. */ 8578 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type); 8579 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type; 8580 canon_argtypes = maybe_canonicalize_argtypes (arg_types, 8581 &any_structural_p, 8582 &any_noncanonical_p); 8583 if (any_structural_p) 8584 SET_TYPE_STRUCTURAL_EQUALITY (t); 8585 else if (any_noncanonical_p) 8586 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type), 8587 canon_argtypes); 8588 8589 if (!COMPLETE_TYPE_P (t)) 8590 layout_type (t); 8591 return t; 8592 } 8593 8594 /* Build a function type. The RETURN_TYPE is the type returned by the 8595 function. If VAARGS is set, no void_type_node is appended to the 8596 list. ARGP must be always be terminated be a NULL_TREE. */ 8597 8598 static tree 8599 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp) 8600 { 8601 tree t, args, last; 8602 8603 t = va_arg (argp, tree); 8604 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree)) 8605 args = tree_cons (NULL_TREE, t, args); 8606 8607 if (vaargs) 8608 { 8609 last = args; 8610 if (args != NULL_TREE) 8611 args = nreverse (args); 8612 gcc_assert (last != void_list_node); 8613 } 8614 else if (args == NULL_TREE) 8615 args = void_list_node; 8616 else 8617 { 8618 last = args; 8619 args = nreverse (args); 8620 TREE_CHAIN (last) = void_list_node; 8621 } 8622 args = build_function_type (return_type, args); 8623 8624 return args; 8625 } 8626 8627 /* Build a function type. The RETURN_TYPE is the type returned by the 8628 function. If additional arguments are provided, they are 8629 additional argument types. The list of argument types must always 8630 be terminated by NULL_TREE. */ 8631 8632 tree 8633 build_function_type_list (tree return_type, ...) 8634 { 8635 tree args; 8636 va_list p; 8637 8638 va_start (p, return_type); 8639 args = build_function_type_list_1 (false, return_type, p); 8640 va_end (p); 8641 return args; 8642 } 8643 8644 /* Build a variable argument function type. The RETURN_TYPE is the 8645 type returned by the function. If additional arguments are provided, 8646 they are additional argument types. The list of argument types must 8647 always be terminated by NULL_TREE. */ 8648 8649 tree 8650 build_varargs_function_type_list (tree return_type, ...) 8651 { 8652 tree args; 8653 va_list p; 8654 8655 va_start (p, return_type); 8656 args = build_function_type_list_1 (true, return_type, p); 8657 va_end (p); 8658 8659 return args; 8660 } 8661 8662 /* Build a function type. RETURN_TYPE is the type returned by the 8663 function; VAARGS indicates whether the function takes varargs. The 8664 function takes N named arguments, the types of which are provided in 8665 ARG_TYPES. */ 8666 8667 static tree 8668 build_function_type_array_1 (bool vaargs, tree return_type, int n, 8669 tree *arg_types) 8670 { 8671 int i; 8672 tree t = vaargs ? NULL_TREE : void_list_node; 8673 8674 for (i = n - 1; i >= 0; i--) 8675 t = tree_cons (NULL_TREE, arg_types[i], t); 8676 8677 return build_function_type (return_type, t); 8678 } 8679 8680 /* Build a function type. RETURN_TYPE is the type returned by the 8681 function. The function takes N named arguments, the types of which 8682 are provided in ARG_TYPES. */ 8683 8684 tree 8685 build_function_type_array (tree return_type, int n, tree *arg_types) 8686 { 8687 return build_function_type_array_1 (false, return_type, n, arg_types); 8688 } 8689 8690 /* Build a variable argument function type. RETURN_TYPE is the type 8691 returned by the function. The function takes N named arguments, the 8692 types of which are provided in ARG_TYPES. */ 8693 8694 tree 8695 build_varargs_function_type_array (tree return_type, int n, tree *arg_types) 8696 { 8697 return build_function_type_array_1 (true, return_type, n, arg_types); 8698 } 8699 8700 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE) 8701 and ARGTYPES (a TREE_LIST) are the return type and arguments types 8702 for the method. An implicit additional parameter (of type 8703 pointer-to-BASETYPE) is added to the ARGTYPES. */ 8704 8705 tree 8706 build_method_type_directly (tree basetype, 8707 tree rettype, 8708 tree argtypes) 8709 { 8710 tree t; 8711 tree ptype; 8712 inchash::hash hstate; 8713 bool any_structural_p, any_noncanonical_p; 8714 tree canon_argtypes; 8715 8716 /* Make a node of the sort we want. */ 8717 t = make_node (METHOD_TYPE); 8718 8719 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype); 8720 TREE_TYPE (t) = rettype; 8721 ptype = build_pointer_type (basetype); 8722 8723 /* The actual arglist for this function includes a "hidden" argument 8724 which is "this". Put it into the list of argument types. */ 8725 argtypes = tree_cons (NULL_TREE, ptype, argtypes); 8726 TYPE_ARG_TYPES (t) = argtypes; 8727 8728 /* If we already have such a type, use the old one. */ 8729 hstate.add_object (TYPE_HASH (basetype)); 8730 hstate.add_object (TYPE_HASH (rettype)); 8731 type_hash_list (argtypes, hstate); 8732 t = type_hash_canon (hstate.end (), t); 8733 8734 /* Set up the canonical type. */ 8735 any_structural_p 8736 = (TYPE_STRUCTURAL_EQUALITY_P (basetype) 8737 || TYPE_STRUCTURAL_EQUALITY_P (rettype)); 8738 any_noncanonical_p 8739 = (TYPE_CANONICAL (basetype) != basetype 8740 || TYPE_CANONICAL (rettype) != rettype); 8741 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes), 8742 &any_structural_p, 8743 &any_noncanonical_p); 8744 if (any_structural_p) 8745 SET_TYPE_STRUCTURAL_EQUALITY (t); 8746 else if (any_noncanonical_p) 8747 TYPE_CANONICAL (t) 8748 = build_method_type_directly (TYPE_CANONICAL (basetype), 8749 TYPE_CANONICAL (rettype), 8750 canon_argtypes); 8751 if (!COMPLETE_TYPE_P (t)) 8752 layout_type (t); 8753 8754 return t; 8755 } 8756 8757 /* Construct, lay out and return the type of methods belonging to class 8758 BASETYPE and whose arguments and values are described by TYPE. 8759 If that type exists already, reuse it. 8760 TYPE must be a FUNCTION_TYPE node. */ 8761 8762 tree 8763 build_method_type (tree basetype, tree type) 8764 { 8765 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE); 8766 8767 return build_method_type_directly (basetype, 8768 TREE_TYPE (type), 8769 TYPE_ARG_TYPES (type)); 8770 } 8771 8772 /* Construct, lay out and return the type of offsets to a value 8773 of type TYPE, within an object of type BASETYPE. 8774 If a suitable offset type exists already, reuse it. */ 8775 8776 tree 8777 build_offset_type (tree basetype, tree type) 8778 { 8779 tree t; 8780 inchash::hash hstate; 8781 8782 /* Make a node of the sort we want. */ 8783 t = make_node (OFFSET_TYPE); 8784 8785 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype); 8786 TREE_TYPE (t) = type; 8787 8788 /* If we already have such a type, use the old one. */ 8789 hstate.add_object (TYPE_HASH (basetype)); 8790 hstate.add_object (TYPE_HASH (type)); 8791 t = type_hash_canon (hstate.end (), t); 8792 8793 if (!COMPLETE_TYPE_P (t)) 8794 layout_type (t); 8795 8796 if (TYPE_CANONICAL (t) == t) 8797 { 8798 if (TYPE_STRUCTURAL_EQUALITY_P (basetype) 8799 || TYPE_STRUCTURAL_EQUALITY_P (type)) 8800 SET_TYPE_STRUCTURAL_EQUALITY (t); 8801 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype 8802 || TYPE_CANONICAL (type) != type) 8803 TYPE_CANONICAL (t) 8804 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)), 8805 TYPE_CANONICAL (type)); 8806 } 8807 8808 return t; 8809 } 8810 8811 /* Create a complex type whose components are COMPONENT_TYPE. 8812 8813 If NAMED is true, the type is given a TYPE_NAME. We do not always 8814 do so because this creates a DECL node and thus make the DECL_UIDs 8815 dependent on the type canonicalization hashtable, which is GC-ed, 8816 so the DECL_UIDs would not be stable wrt garbage collection. */ 8817 8818 tree 8819 build_complex_type (tree component_type, bool named) 8820 { 8821 tree t; 8822 inchash::hash hstate; 8823 8824 gcc_assert (INTEGRAL_TYPE_P (component_type) 8825 || SCALAR_FLOAT_TYPE_P (component_type) 8826 || FIXED_POINT_TYPE_P (component_type)); 8827 8828 /* Make a node of the sort we want. */ 8829 t = make_node (COMPLEX_TYPE); 8830 8831 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type); 8832 8833 /* If we already have such a type, use the old one. */ 8834 hstate.add_object (TYPE_HASH (component_type)); 8835 t = type_hash_canon (hstate.end (), t); 8836 8837 if (!COMPLETE_TYPE_P (t)) 8838 layout_type (t); 8839 8840 if (TYPE_CANONICAL (t) == t) 8841 { 8842 if (TYPE_STRUCTURAL_EQUALITY_P (component_type)) 8843 SET_TYPE_STRUCTURAL_EQUALITY (t); 8844 else if (TYPE_CANONICAL (component_type) != component_type) 8845 TYPE_CANONICAL (t) 8846 = build_complex_type (TYPE_CANONICAL (component_type), named); 8847 } 8848 8849 /* We need to create a name, since complex is a fundamental type. */ 8850 if (!TYPE_NAME (t) && named) 8851 { 8852 const char *name; 8853 if (component_type == char_type_node) 8854 name = "complex char"; 8855 else if (component_type == signed_char_type_node) 8856 name = "complex signed char"; 8857 else if (component_type == unsigned_char_type_node) 8858 name = "complex unsigned char"; 8859 else if (component_type == short_integer_type_node) 8860 name = "complex short int"; 8861 else if (component_type == short_unsigned_type_node) 8862 name = "complex short unsigned int"; 8863 else if (component_type == integer_type_node) 8864 name = "complex int"; 8865 else if (component_type == unsigned_type_node) 8866 name = "complex unsigned int"; 8867 else if (component_type == long_integer_type_node) 8868 name = "complex long int"; 8869 else if (component_type == long_unsigned_type_node) 8870 name = "complex long unsigned int"; 8871 else if (component_type == long_long_integer_type_node) 8872 name = "complex long long int"; 8873 else if (component_type == long_long_unsigned_type_node) 8874 name = "complex long long unsigned int"; 8875 else 8876 name = 0; 8877 8878 if (name != 0) 8879 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL, 8880 get_identifier (name), t); 8881 } 8882 8883 return build_qualified_type (t, TYPE_QUALS (component_type)); 8884 } 8885 8886 /* If TYPE is a real or complex floating-point type and the target 8887 does not directly support arithmetic on TYPE then return the wider 8888 type to be used for arithmetic on TYPE. Otherwise, return 8889 NULL_TREE. */ 8890 8891 tree 8892 excess_precision_type (tree type) 8893 { 8894 /* The target can give two different responses to the question of 8895 which excess precision mode it would like depending on whether we 8896 are in -fexcess-precision=standard or -fexcess-precision=fast. */ 8897 8898 enum excess_precision_type requested_type 8899 = (flag_excess_precision == EXCESS_PRECISION_FAST 8900 ? EXCESS_PRECISION_TYPE_FAST 8901 : EXCESS_PRECISION_TYPE_STANDARD); 8902 8903 enum flt_eval_method target_flt_eval_method 8904 = targetm.c.excess_precision (requested_type); 8905 8906 /* The target should not ask for unpredictable float evaluation (though 8907 it might advertise that implicitly the evaluation is unpredictable, 8908 but we don't care about that here, it will have been reported 8909 elsewhere). If it does ask for unpredictable evaluation, we have 8910 nothing to do here. */ 8911 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE); 8912 8913 /* Nothing to do. The target has asked for all types we know about 8914 to be computed with their native precision and range. */ 8915 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16) 8916 return NULL_TREE; 8917 8918 /* The target will promote this type in a target-dependent way, so excess 8919 precision ought to leave it alone. */ 8920 if (targetm.promoted_type (type) != NULL_TREE) 8921 return NULL_TREE; 8922 8923 machine_mode float16_type_mode = (float16_type_node 8924 ? TYPE_MODE (float16_type_node) 8925 : VOIDmode); 8926 machine_mode float_type_mode = TYPE_MODE (float_type_node); 8927 machine_mode double_type_mode = TYPE_MODE (double_type_node); 8928 8929 switch (TREE_CODE (type)) 8930 { 8931 case REAL_TYPE: 8932 { 8933 machine_mode type_mode = TYPE_MODE (type); 8934 switch (target_flt_eval_method) 8935 { 8936 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT: 8937 if (type_mode == float16_type_mode) 8938 return float_type_node; 8939 break; 8940 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE: 8941 if (type_mode == float16_type_mode 8942 || type_mode == float_type_mode) 8943 return double_type_node; 8944 break; 8945 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE: 8946 if (type_mode == float16_type_mode 8947 || type_mode == float_type_mode 8948 || type_mode == double_type_mode) 8949 return long_double_type_node; 8950 break; 8951 default: 8952 gcc_unreachable (); 8953 } 8954 break; 8955 } 8956 case COMPLEX_TYPE: 8957 { 8958 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE) 8959 return NULL_TREE; 8960 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type)); 8961 switch (target_flt_eval_method) 8962 { 8963 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT: 8964 if (type_mode == float16_type_mode) 8965 return complex_float_type_node; 8966 break; 8967 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE: 8968 if (type_mode == float16_type_mode 8969 || type_mode == float_type_mode) 8970 return complex_double_type_node; 8971 break; 8972 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE: 8973 if (type_mode == float16_type_mode 8974 || type_mode == float_type_mode 8975 || type_mode == double_type_mode) 8976 return complex_long_double_type_node; 8977 break; 8978 default: 8979 gcc_unreachable (); 8980 } 8981 break; 8982 } 8983 default: 8984 break; 8985 } 8986 8987 return NULL_TREE; 8988 } 8989 8990 /* Return OP, stripped of any conversions to wider types as much as is safe. 8991 Converting the value back to OP's type makes a value equivalent to OP. 8992 8993 If FOR_TYPE is nonzero, we return a value which, if converted to 8994 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE. 8995 8996 OP must have integer, real or enumeral type. Pointers are not allowed! 8997 8998 There are some cases where the obvious value we could return 8999 would regenerate to OP if converted to OP's type, 9000 but would not extend like OP to wider types. 9001 If FOR_TYPE indicates such extension is contemplated, we eschew such values. 9002 For example, if OP is (unsigned short)(signed char)-1, 9003 we avoid returning (signed char)-1 if FOR_TYPE is int, 9004 even though extending that to an unsigned short would regenerate OP, 9005 since the result of extending (signed char)-1 to (int) 9006 is different from (int) OP. */ 9007 9008 tree 9009 get_unwidened (tree op, tree for_type) 9010 { 9011 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */ 9012 tree type = TREE_TYPE (op); 9013 unsigned final_prec 9014 = TYPE_PRECISION (for_type != 0 ? for_type : type); 9015 int uns 9016 = (for_type != 0 && for_type != type 9017 && final_prec > TYPE_PRECISION (type) 9018 && TYPE_UNSIGNED (type)); 9019 tree win = op; 9020 9021 while (CONVERT_EXPR_P (op)) 9022 { 9023 int bitschange; 9024 9025 /* TYPE_PRECISION on vector types has different meaning 9026 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions, 9027 so avoid them here. */ 9028 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE) 9029 break; 9030 9031 bitschange = TYPE_PRECISION (TREE_TYPE (op)) 9032 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))); 9033 9034 /* Truncations are many-one so cannot be removed. 9035 Unless we are later going to truncate down even farther. */ 9036 if (bitschange < 0 9037 && final_prec > TYPE_PRECISION (TREE_TYPE (op))) 9038 break; 9039 9040 /* See what's inside this conversion. If we decide to strip it, 9041 we will set WIN. */ 9042 op = TREE_OPERAND (op, 0); 9043 9044 /* If we have not stripped any zero-extensions (uns is 0), 9045 we can strip any kind of extension. 9046 If we have previously stripped a zero-extension, 9047 only zero-extensions can safely be stripped. 9048 Any extension can be stripped if the bits it would produce 9049 are all going to be discarded later by truncating to FOR_TYPE. */ 9050 9051 if (bitschange > 0) 9052 { 9053 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op))) 9054 win = op; 9055 /* TYPE_UNSIGNED says whether this is a zero-extension. 9056 Let's avoid computing it if it does not affect WIN 9057 and if UNS will not be needed again. */ 9058 if ((uns 9059 || CONVERT_EXPR_P (op)) 9060 && TYPE_UNSIGNED (TREE_TYPE (op))) 9061 { 9062 uns = 1; 9063 win = op; 9064 } 9065 } 9066 } 9067 9068 /* If we finally reach a constant see if it fits in sth smaller and 9069 in that case convert it. */ 9070 if (TREE_CODE (win) == INTEGER_CST) 9071 { 9072 tree wtype = TREE_TYPE (win); 9073 unsigned prec = wi::min_precision (win, TYPE_SIGN (wtype)); 9074 if (for_type) 9075 prec = MAX (prec, final_prec); 9076 if (prec < TYPE_PRECISION (wtype)) 9077 { 9078 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype)); 9079 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype)) 9080 win = fold_convert (t, win); 9081 } 9082 } 9083 9084 return win; 9085 } 9086 9087 /* Return OP or a simpler expression for a narrower value 9088 which can be sign-extended or zero-extended to give back OP. 9089 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended 9090 or 0 if the value should be sign-extended. */ 9091 9092 tree 9093 get_narrower (tree op, int *unsignedp_ptr) 9094 { 9095 int uns = 0; 9096 int first = 1; 9097 tree win = op; 9098 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op)); 9099 9100 while (TREE_CODE (op) == NOP_EXPR) 9101 { 9102 int bitschange 9103 = (TYPE_PRECISION (TREE_TYPE (op)) 9104 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)))); 9105 9106 /* Truncations are many-one so cannot be removed. */ 9107 if (bitschange < 0) 9108 break; 9109 9110 /* See what's inside this conversion. If we decide to strip it, 9111 we will set WIN. */ 9112 9113 if (bitschange > 0) 9114 { 9115 op = TREE_OPERAND (op, 0); 9116 /* An extension: the outermost one can be stripped, 9117 but remember whether it is zero or sign extension. */ 9118 if (first) 9119 uns = TYPE_UNSIGNED (TREE_TYPE (op)); 9120 /* Otherwise, if a sign extension has been stripped, 9121 only sign extensions can now be stripped; 9122 if a zero extension has been stripped, only zero-extensions. */ 9123 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op))) 9124 break; 9125 first = 0; 9126 } 9127 else /* bitschange == 0 */ 9128 { 9129 /* A change in nominal type can always be stripped, but we must 9130 preserve the unsignedness. */ 9131 if (first) 9132 uns = TYPE_UNSIGNED (TREE_TYPE (op)); 9133 first = 0; 9134 op = TREE_OPERAND (op, 0); 9135 /* Keep trying to narrow, but don't assign op to win if it 9136 would turn an integral type into something else. */ 9137 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p) 9138 continue; 9139 } 9140 9141 win = op; 9142 } 9143 9144 if (TREE_CODE (op) == COMPONENT_REF 9145 /* Since type_for_size always gives an integer type. */ 9146 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE 9147 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE 9148 /* Ensure field is laid out already. */ 9149 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0 9150 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1)))) 9151 { 9152 unsigned HOST_WIDE_INT innerprec 9153 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1))); 9154 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1)) 9155 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1)))); 9156 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp); 9157 9158 /* We can get this structure field in a narrower type that fits it, 9159 but the resulting extension to its nominal type (a fullword type) 9160 must satisfy the same conditions as for other extensions. 9161 9162 Do this only for fields that are aligned (not bit-fields), 9163 because when bit-field insns will be used there is no 9164 advantage in doing this. */ 9165 9166 if (innerprec < TYPE_PRECISION (TREE_TYPE (op)) 9167 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1)) 9168 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1))) 9169 && type != 0) 9170 { 9171 if (first) 9172 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1)); 9173 win = fold_convert (type, op); 9174 } 9175 } 9176 9177 *unsignedp_ptr = uns; 9178 return win; 9179 } 9180 9181 /* Return true if integer constant C has a value that is permissible 9182 for TYPE, an integral type. */ 9183 9184 bool 9185 int_fits_type_p (const_tree c, const_tree type) 9186 { 9187 tree type_low_bound, type_high_bound; 9188 bool ok_for_low_bound, ok_for_high_bound; 9189 signop sgn_c = TYPE_SIGN (TREE_TYPE (c)); 9190 9191 /* Non-standard boolean types can have arbitrary precision but various 9192 transformations assume that they can only take values 0 and +/-1. */ 9193 if (TREE_CODE (type) == BOOLEAN_TYPE) 9194 return wi::fits_to_boolean_p (c, type); 9195 9196 retry: 9197 type_low_bound = TYPE_MIN_VALUE (type); 9198 type_high_bound = TYPE_MAX_VALUE (type); 9199 9200 /* If at least one bound of the type is a constant integer, we can check 9201 ourselves and maybe make a decision. If no such decision is possible, but 9202 this type is a subtype, try checking against that. Otherwise, use 9203 fits_to_tree_p, which checks against the precision. 9204 9205 Compute the status for each possibly constant bound, and return if we see 9206 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1 9207 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1 9208 for "constant known to fit". */ 9209 9210 /* Check if c >= type_low_bound. */ 9211 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST) 9212 { 9213 if (tree_int_cst_lt (c, type_low_bound)) 9214 return false; 9215 ok_for_low_bound = true; 9216 } 9217 else 9218 ok_for_low_bound = false; 9219 9220 /* Check if c <= type_high_bound. */ 9221 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST) 9222 { 9223 if (tree_int_cst_lt (type_high_bound, c)) 9224 return false; 9225 ok_for_high_bound = true; 9226 } 9227 else 9228 ok_for_high_bound = false; 9229 9230 /* If the constant fits both bounds, the result is known. */ 9231 if (ok_for_low_bound && ok_for_high_bound) 9232 return true; 9233 9234 /* Perform some generic filtering which may allow making a decision 9235 even if the bounds are not constant. First, negative integers 9236 never fit in unsigned types, */ 9237 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c)) 9238 return false; 9239 9240 /* Second, narrower types always fit in wider ones. */ 9241 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c))) 9242 return true; 9243 9244 /* Third, unsigned integers with top bit set never fit signed types. */ 9245 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED) 9246 { 9247 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1; 9248 if (prec < TYPE_PRECISION (TREE_TYPE (c))) 9249 { 9250 /* When a tree_cst is converted to a wide-int, the precision 9251 is taken from the type. However, if the precision of the 9252 mode underneath the type is smaller than that, it is 9253 possible that the value will not fit. The test below 9254 fails if any bit is set between the sign bit of the 9255 underlying mode and the top bit of the type. */ 9256 if (wi::ne_p (wi::zext (c, prec - 1), c)) 9257 return false; 9258 } 9259 else if (wi::neg_p (c)) 9260 return false; 9261 } 9262 9263 /* If we haven't been able to decide at this point, there nothing more we 9264 can check ourselves here. Look at the base type if we have one and it 9265 has the same precision. */ 9266 if (TREE_CODE (type) == INTEGER_TYPE 9267 && TREE_TYPE (type) != 0 9268 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type))) 9269 { 9270 type = TREE_TYPE (type); 9271 goto retry; 9272 } 9273 9274 /* Or to fits_to_tree_p, if nothing else. */ 9275 return wi::fits_to_tree_p (c, type); 9276 } 9277 9278 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant 9279 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be 9280 represented (assuming two's-complement arithmetic) within the bit 9281 precision of the type are returned instead. */ 9282 9283 void 9284 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max) 9285 { 9286 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type) 9287 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST) 9288 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type)); 9289 else 9290 { 9291 if (TYPE_UNSIGNED (type)) 9292 mpz_set_ui (min, 0); 9293 else 9294 { 9295 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED); 9296 wi::to_mpz (mn, min, SIGNED); 9297 } 9298 } 9299 9300 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type) 9301 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST) 9302 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type)); 9303 else 9304 { 9305 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type)); 9306 wi::to_mpz (mn, max, TYPE_SIGN (type)); 9307 } 9308 } 9309 9310 /* Return true if VAR is an automatic variable defined in function FN. */ 9311 9312 bool 9313 auto_var_in_fn_p (const_tree var, const_tree fn) 9314 { 9315 return (DECL_P (var) && DECL_CONTEXT (var) == fn 9316 && ((((VAR_P (var) && ! DECL_EXTERNAL (var)) 9317 || TREE_CODE (var) == PARM_DECL) 9318 && ! TREE_STATIC (var)) 9319 || TREE_CODE (var) == LABEL_DECL 9320 || TREE_CODE (var) == RESULT_DECL)); 9321 } 9322 9323 /* Subprogram of following function. Called by walk_tree. 9324 9325 Return *TP if it is an automatic variable or parameter of the 9326 function passed in as DATA. */ 9327 9328 static tree 9329 find_var_from_fn (tree *tp, int *walk_subtrees, void *data) 9330 { 9331 tree fn = (tree) data; 9332 9333 if (TYPE_P (*tp)) 9334 *walk_subtrees = 0; 9335 9336 else if (DECL_P (*tp) 9337 && auto_var_in_fn_p (*tp, fn)) 9338 return *tp; 9339 9340 return NULL_TREE; 9341 } 9342 9343 /* Returns true if T is, contains, or refers to a type with variable 9344 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the 9345 arguments, but not the return type. If FN is nonzero, only return 9346 true if a modifier of the type or position of FN is a variable or 9347 parameter inside FN. 9348 9349 This concept is more general than that of C99 'variably modified types': 9350 in C99, a struct type is never variably modified because a VLA may not 9351 appear as a structure member. However, in GNU C code like: 9352 9353 struct S { int i[f()]; }; 9354 9355 is valid, and other languages may define similar constructs. */ 9356 9357 bool 9358 variably_modified_type_p (tree type, tree fn) 9359 { 9360 tree t; 9361 9362 /* Test if T is either variable (if FN is zero) or an expression containing 9363 a variable in FN. If TYPE isn't gimplified, return true also if 9364 gimplify_one_sizepos would gimplify the expression into a local 9365 variable. */ 9366 #define RETURN_TRUE_IF_VAR(T) \ 9367 do { tree _t = (T); \ 9368 if (_t != NULL_TREE \ 9369 && _t != error_mark_node \ 9370 && TREE_CODE (_t) != INTEGER_CST \ 9371 && TREE_CODE (_t) != PLACEHOLDER_EXPR \ 9372 && (!fn \ 9373 || (!TYPE_SIZES_GIMPLIFIED (type) \ 9374 && !is_gimple_sizepos (_t)) \ 9375 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \ 9376 return true; } while (0) 9377 9378 if (type == error_mark_node) 9379 return false; 9380 9381 /* If TYPE itself has variable size, it is variably modified. */ 9382 RETURN_TRUE_IF_VAR (TYPE_SIZE (type)); 9383 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type)); 9384 9385 switch (TREE_CODE (type)) 9386 { 9387 case POINTER_TYPE: 9388 case REFERENCE_TYPE: 9389 case VECTOR_TYPE: 9390 if (variably_modified_type_p (TREE_TYPE (type), fn)) 9391 return true; 9392 break; 9393 9394 case FUNCTION_TYPE: 9395 case METHOD_TYPE: 9396 /* If TYPE is a function type, it is variably modified if the 9397 return type is variably modified. */ 9398 if (variably_modified_type_p (TREE_TYPE (type), fn)) 9399 return true; 9400 break; 9401 9402 case INTEGER_TYPE: 9403 case REAL_TYPE: 9404 case FIXED_POINT_TYPE: 9405 case ENUMERAL_TYPE: 9406 case BOOLEAN_TYPE: 9407 /* Scalar types are variably modified if their end points 9408 aren't constant. */ 9409 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type)); 9410 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type)); 9411 break; 9412 9413 case RECORD_TYPE: 9414 case UNION_TYPE: 9415 case QUAL_UNION_TYPE: 9416 /* We can't see if any of the fields are variably-modified by the 9417 definition we normally use, since that would produce infinite 9418 recursion via pointers. */ 9419 /* This is variably modified if some field's type is. */ 9420 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t)) 9421 if (TREE_CODE (t) == FIELD_DECL) 9422 { 9423 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t)); 9424 RETURN_TRUE_IF_VAR (DECL_SIZE (t)); 9425 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t)); 9426 9427 if (TREE_CODE (type) == QUAL_UNION_TYPE) 9428 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t)); 9429 } 9430 break; 9431 9432 case ARRAY_TYPE: 9433 /* Do not call ourselves to avoid infinite recursion. This is 9434 variably modified if the element type is. */ 9435 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type))); 9436 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type))); 9437 break; 9438 9439 default: 9440 break; 9441 } 9442 9443 /* The current language may have other cases to check, but in general, 9444 all other types are not variably modified. */ 9445 return lang_hooks.tree_inlining.var_mod_type_p (type, fn); 9446 9447 #undef RETURN_TRUE_IF_VAR 9448 } 9449 9450 /* Given a DECL or TYPE, return the scope in which it was declared, or 9451 NULL_TREE if there is no containing scope. */ 9452 9453 tree 9454 get_containing_scope (const_tree t) 9455 { 9456 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t)); 9457 } 9458 9459 /* Return the innermost context enclosing DECL that is 9460 a FUNCTION_DECL, or zero if none. */ 9461 9462 tree 9463 decl_function_context (const_tree decl) 9464 { 9465 tree context; 9466 9467 if (TREE_CODE (decl) == ERROR_MARK) 9468 return 0; 9469 9470 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable 9471 where we look up the function at runtime. Such functions always take 9472 a first argument of type 'pointer to real context'. 9473 9474 C++ should really be fixed to use DECL_CONTEXT for the real context, 9475 and use something else for the "virtual context". */ 9476 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl)) 9477 context 9478 = TYPE_MAIN_VARIANT 9479 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl))))); 9480 else 9481 context = DECL_CONTEXT (decl); 9482 9483 while (context && TREE_CODE (context) != FUNCTION_DECL) 9484 { 9485 if (TREE_CODE (context) == BLOCK) 9486 context = BLOCK_SUPERCONTEXT (context); 9487 else 9488 context = get_containing_scope (context); 9489 } 9490 9491 return context; 9492 } 9493 9494 /* Return the innermost context enclosing DECL that is 9495 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none. 9496 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */ 9497 9498 tree 9499 decl_type_context (const_tree decl) 9500 { 9501 tree context = DECL_CONTEXT (decl); 9502 9503 while (context) 9504 switch (TREE_CODE (context)) 9505 { 9506 case NAMESPACE_DECL: 9507 case TRANSLATION_UNIT_DECL: 9508 return NULL_TREE; 9509 9510 case RECORD_TYPE: 9511 case UNION_TYPE: 9512 case QUAL_UNION_TYPE: 9513 return context; 9514 9515 case TYPE_DECL: 9516 case FUNCTION_DECL: 9517 context = DECL_CONTEXT (context); 9518 break; 9519 9520 case BLOCK: 9521 context = BLOCK_SUPERCONTEXT (context); 9522 break; 9523 9524 default: 9525 gcc_unreachable (); 9526 } 9527 9528 return NULL_TREE; 9529 } 9530 9531 /* CALL is a CALL_EXPR. Return the declaration for the function 9532 called, or NULL_TREE if the called function cannot be 9533 determined. */ 9534 9535 tree 9536 get_callee_fndecl (const_tree call) 9537 { 9538 tree addr; 9539 9540 if (call == error_mark_node) 9541 return error_mark_node; 9542 9543 /* It's invalid to call this function with anything but a 9544 CALL_EXPR. */ 9545 gcc_assert (TREE_CODE (call) == CALL_EXPR); 9546 9547 /* The first operand to the CALL is the address of the function 9548 called. */ 9549 addr = CALL_EXPR_FN (call); 9550 9551 /* If there is no function, return early. */ 9552 if (addr == NULL_TREE) 9553 return NULL_TREE; 9554 9555 STRIP_NOPS (addr); 9556 9557 /* If this is a readonly function pointer, extract its initial value. */ 9558 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL 9559 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr) 9560 && DECL_INITIAL (addr)) 9561 addr = DECL_INITIAL (addr); 9562 9563 /* If the address is just `&f' for some function `f', then we know 9564 that `f' is being called. */ 9565 if (TREE_CODE (addr) == ADDR_EXPR 9566 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL) 9567 return TREE_OPERAND (addr, 0); 9568 9569 /* We couldn't figure out what was being called. */ 9570 return NULL_TREE; 9571 } 9572 9573 /* If CALL_EXPR CALL calls a normal built-in function or an internal function, 9574 return the associated function code, otherwise return CFN_LAST. */ 9575 9576 combined_fn 9577 get_call_combined_fn (const_tree call) 9578 { 9579 /* It's invalid to call this function with anything but a CALL_EXPR. */ 9580 gcc_assert (TREE_CODE (call) == CALL_EXPR); 9581 9582 if (!CALL_EXPR_FN (call)) 9583 return as_combined_fn (CALL_EXPR_IFN (call)); 9584 9585 tree fndecl = get_callee_fndecl (call); 9586 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) 9587 return as_combined_fn (DECL_FUNCTION_CODE (fndecl)); 9588 9589 return CFN_LAST; 9590 } 9591 9592 #define TREE_MEM_USAGE_SPACES 40 9593 9594 /* Print debugging information about tree nodes generated during the compile, 9595 and any language-specific information. */ 9596 9597 void 9598 dump_tree_statistics (void) 9599 { 9600 if (GATHER_STATISTICS) 9601 { 9602 int i; 9603 int total_nodes, total_bytes; 9604 fprintf (stderr, "\nKind Nodes Bytes\n"); 9605 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES); 9606 total_nodes = total_bytes = 0; 9607 for (i = 0; i < (int) all_kinds; i++) 9608 { 9609 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i], 9610 tree_node_counts[i], tree_node_sizes[i]); 9611 total_nodes += tree_node_counts[i]; 9612 total_bytes += tree_node_sizes[i]; 9613 } 9614 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES); 9615 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes); 9616 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES); 9617 fprintf (stderr, "Code Nodes\n"); 9618 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES); 9619 for (i = 0; i < (int) MAX_TREE_CODES; i++) 9620 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i), 9621 tree_code_counts[i]); 9622 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES); 9623 fprintf (stderr, "\n"); 9624 ssanames_print_statistics (); 9625 fprintf (stderr, "\n"); 9626 phinodes_print_statistics (); 9627 fprintf (stderr, "\n"); 9628 } 9629 else 9630 fprintf (stderr, "(No per-node statistics)\n"); 9631 9632 print_type_hash_statistics (); 9633 print_debug_expr_statistics (); 9634 print_value_expr_statistics (); 9635 lang_hooks.print_statistics (); 9636 } 9637 9638 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s" 9639 9640 /* Generate a crc32 of a byte. */ 9641 9642 static unsigned 9643 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits) 9644 { 9645 unsigned ix; 9646 9647 for (ix = bits; ix--; value <<= 1) 9648 { 9649 unsigned feedback; 9650 9651 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0; 9652 chksum <<= 1; 9653 chksum ^= feedback; 9654 } 9655 return chksum; 9656 } 9657 9658 /* Generate a crc32 of a 32-bit unsigned. */ 9659 9660 unsigned 9661 crc32_unsigned (unsigned chksum, unsigned value) 9662 { 9663 return crc32_unsigned_bits (chksum, value, 32); 9664 } 9665 9666 /* Generate a crc32 of a byte. */ 9667 9668 unsigned 9669 crc32_byte (unsigned chksum, char byte) 9670 { 9671 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8); 9672 } 9673 9674 /* Generate a crc32 of a string. */ 9675 9676 unsigned 9677 crc32_string (unsigned chksum, const char *string) 9678 { 9679 do 9680 { 9681 chksum = crc32_byte (chksum, *string); 9682 } 9683 while (*string++); 9684 return chksum; 9685 } 9686 9687 /* P is a string that will be used in a symbol. Mask out any characters 9688 that are not valid in that context. */ 9689 9690 void 9691 clean_symbol_name (char *p) 9692 { 9693 for (; *p; p++) 9694 if (! (ISALNUM (*p) 9695 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */ 9696 || *p == '$' 9697 #endif 9698 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */ 9699 || *p == '.' 9700 #endif 9701 )) 9702 *p = '_'; 9703 } 9704 9705 /* For anonymous aggregate types, we need some sort of name to 9706 hold on to. In practice, this should not appear, but it should 9707 not be harmful if it does. */ 9708 bool 9709 anon_aggrname_p(const_tree id_node) 9710 { 9711 #ifndef NO_DOT_IN_LABEL 9712 return (IDENTIFIER_POINTER (id_node)[0] == '.' 9713 && IDENTIFIER_POINTER (id_node)[1] == '_'); 9714 #else /* NO_DOT_IN_LABEL */ 9715 #ifndef NO_DOLLAR_IN_LABEL 9716 return (IDENTIFIER_POINTER (id_node)[0] == '$' \ 9717 && IDENTIFIER_POINTER (id_node)[1] == '_'); 9718 #else /* NO_DOLLAR_IN_LABEL */ 9719 #define ANON_AGGRNAME_PREFIX "__anon_" 9720 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX, 9721 sizeof (ANON_AGGRNAME_PREFIX) - 1)); 9722 #endif /* NO_DOLLAR_IN_LABEL */ 9723 #endif /* NO_DOT_IN_LABEL */ 9724 } 9725 9726 /* Return a format for an anonymous aggregate name. */ 9727 const char * 9728 anon_aggrname_format() 9729 { 9730 #ifndef NO_DOT_IN_LABEL 9731 return "._%d"; 9732 #else /* NO_DOT_IN_LABEL */ 9733 #ifndef NO_DOLLAR_IN_LABEL 9734 return "$_%d"; 9735 #else /* NO_DOLLAR_IN_LABEL */ 9736 return "__anon_%d"; 9737 #endif /* NO_DOLLAR_IN_LABEL */ 9738 #endif /* NO_DOT_IN_LABEL */ 9739 } 9740 9741 /* Generate a name for a special-purpose function. 9742 The generated name may need to be unique across the whole link. 9743 Changes to this function may also require corresponding changes to 9744 xstrdup_mask_random. 9745 TYPE is some string to identify the purpose of this function to the 9746 linker or collect2; it must start with an uppercase letter, 9747 one of: 9748 I - for constructors 9749 D - for destructors 9750 N - for C++ anonymous namespaces 9751 F - for DWARF unwind frame information. */ 9752 9753 tree 9754 get_file_function_name (const char *type) 9755 { 9756 char *buf; 9757 const char *p; 9758 char *q; 9759 9760 /* If we already have a name we know to be unique, just use that. */ 9761 if (first_global_object_name) 9762 p = q = ASTRDUP (first_global_object_name); 9763 /* If the target is handling the constructors/destructors, they 9764 will be local to this file and the name is only necessary for 9765 debugging purposes. 9766 We also assign sub_I and sub_D sufixes to constructors called from 9767 the global static constructors. These are always local. */ 9768 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors) 9769 || (strncmp (type, "sub_", 4) == 0 9770 && (type[4] == 'I' || type[4] == 'D'))) 9771 { 9772 const char *file = main_input_filename; 9773 if (! file) 9774 file = LOCATION_FILE (input_location); 9775 /* Just use the file's basename, because the full pathname 9776 might be quite long. */ 9777 p = q = ASTRDUP (lbasename (file)); 9778 } 9779 else 9780 { 9781 /* Otherwise, the name must be unique across the entire link. 9782 We don't have anything that we know to be unique to this translation 9783 unit, so use what we do have and throw in some randomness. */ 9784 unsigned len; 9785 const char *name = weak_global_object_name; 9786 const char *file = main_input_filename; 9787 9788 if (! name) 9789 name = ""; 9790 if (! file) 9791 file = LOCATION_FILE (input_location); 9792 9793 len = strlen (file); 9794 q = (char *) alloca (9 + 19 + len + 1); 9795 memcpy (q, file, len + 1); 9796 9797 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX, 9798 crc32_string (0, name), get_random_seed (false)); 9799 9800 p = q; 9801 } 9802 9803 clean_symbol_name (q); 9804 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p) 9805 + strlen (type)); 9806 9807 /* Set up the name of the file-level functions we may need. 9808 Use a global object (which is already required to be unique over 9809 the program) rather than the file name (which imposes extra 9810 constraints). */ 9811 sprintf (buf, FILE_FUNCTION_FORMAT, type, p); 9812 9813 return get_identifier (buf); 9814 } 9815 9816 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007) 9817 9818 /* Complain that the tree code of NODE does not match the expected 0 9819 terminated list of trailing codes. The trailing code list can be 9820 empty, for a more vague error message. FILE, LINE, and FUNCTION 9821 are of the caller. */ 9822 9823 void 9824 tree_check_failed (const_tree node, const char *file, 9825 int line, const char *function, ...) 9826 { 9827 va_list args; 9828 const char *buffer; 9829 unsigned length = 0; 9830 enum tree_code code; 9831 9832 va_start (args, function); 9833 while ((code = (enum tree_code) va_arg (args, int))) 9834 length += 4 + strlen (get_tree_code_name (code)); 9835 va_end (args); 9836 if (length) 9837 { 9838 char *tmp; 9839 va_start (args, function); 9840 length += strlen ("expected "); 9841 buffer = tmp = (char *) alloca (length); 9842 length = 0; 9843 while ((code = (enum tree_code) va_arg (args, int))) 9844 { 9845 const char *prefix = length ? " or " : "expected "; 9846 9847 strcpy (tmp + length, prefix); 9848 length += strlen (prefix); 9849 strcpy (tmp + length, get_tree_code_name (code)); 9850 length += strlen (get_tree_code_name (code)); 9851 } 9852 va_end (args); 9853 } 9854 else 9855 buffer = "unexpected node"; 9856 9857 internal_error ("tree check: %s, have %s in %s, at %s:%d", 9858 buffer, get_tree_code_name (TREE_CODE (node)), 9859 function, trim_filename (file), line); 9860 } 9861 9862 /* Complain that the tree code of NODE does match the expected 0 9863 terminated list of trailing codes. FILE, LINE, and FUNCTION are of 9864 the caller. */ 9865 9866 void 9867 tree_not_check_failed (const_tree node, const char *file, 9868 int line, const char *function, ...) 9869 { 9870 va_list args; 9871 char *buffer; 9872 unsigned length = 0; 9873 enum tree_code code; 9874 9875 va_start (args, function); 9876 while ((code = (enum tree_code) va_arg (args, int))) 9877 length += 4 + strlen (get_tree_code_name (code)); 9878 va_end (args); 9879 va_start (args, function); 9880 buffer = (char *) alloca (length); 9881 length = 0; 9882 while ((code = (enum tree_code) va_arg (args, int))) 9883 { 9884 if (length) 9885 { 9886 strcpy (buffer + length, " or "); 9887 length += 4; 9888 } 9889 strcpy (buffer + length, get_tree_code_name (code)); 9890 length += strlen (get_tree_code_name (code)); 9891 } 9892 va_end (args); 9893 9894 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d", 9895 buffer, get_tree_code_name (TREE_CODE (node)), 9896 function, trim_filename (file), line); 9897 } 9898 9899 /* Similar to tree_check_failed, except that we check for a class of tree 9900 code, given in CL. */ 9901 9902 void 9903 tree_class_check_failed (const_tree node, const enum tree_code_class cl, 9904 const char *file, int line, const char *function) 9905 { 9906 internal_error 9907 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d", 9908 TREE_CODE_CLASS_STRING (cl), 9909 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))), 9910 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line); 9911 } 9912 9913 /* Similar to tree_check_failed, except that instead of specifying a 9914 dozen codes, use the knowledge that they're all sequential. */ 9915 9916 void 9917 tree_range_check_failed (const_tree node, const char *file, int line, 9918 const char *function, enum tree_code c1, 9919 enum tree_code c2) 9920 { 9921 char *buffer; 9922 unsigned length = 0; 9923 unsigned int c; 9924 9925 for (c = c1; c <= c2; ++c) 9926 length += 4 + strlen (get_tree_code_name ((enum tree_code) c)); 9927 9928 length += strlen ("expected "); 9929 buffer = (char *) alloca (length); 9930 length = 0; 9931 9932 for (c = c1; c <= c2; ++c) 9933 { 9934 const char *prefix = length ? " or " : "expected "; 9935 9936 strcpy (buffer + length, prefix); 9937 length += strlen (prefix); 9938 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c)); 9939 length += strlen (get_tree_code_name ((enum tree_code) c)); 9940 } 9941 9942 internal_error ("tree check: %s, have %s in %s, at %s:%d", 9943 buffer, get_tree_code_name (TREE_CODE (node)), 9944 function, trim_filename (file), line); 9945 } 9946 9947 9948 /* Similar to tree_check_failed, except that we check that a tree does 9949 not have the specified code, given in CL. */ 9950 9951 void 9952 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl, 9953 const char *file, int line, const char *function) 9954 { 9955 internal_error 9956 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d", 9957 TREE_CODE_CLASS_STRING (cl), 9958 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))), 9959 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line); 9960 } 9961 9962 9963 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */ 9964 9965 void 9966 omp_clause_check_failed (const_tree node, const char *file, int line, 9967 const char *function, enum omp_clause_code code) 9968 { 9969 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d", 9970 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)), 9971 function, trim_filename (file), line); 9972 } 9973 9974 9975 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */ 9976 9977 void 9978 omp_clause_range_check_failed (const_tree node, const char *file, int line, 9979 const char *function, enum omp_clause_code c1, 9980 enum omp_clause_code c2) 9981 { 9982 char *buffer; 9983 unsigned length = 0; 9984 unsigned int c; 9985 9986 for (c = c1; c <= c2; ++c) 9987 length += 4 + strlen (omp_clause_code_name[c]); 9988 9989 length += strlen ("expected "); 9990 buffer = (char *) alloca (length); 9991 length = 0; 9992 9993 for (c = c1; c <= c2; ++c) 9994 { 9995 const char *prefix = length ? " or " : "expected "; 9996 9997 strcpy (buffer + length, prefix); 9998 length += strlen (prefix); 9999 strcpy (buffer + length, omp_clause_code_name[c]); 10000 length += strlen (omp_clause_code_name[c]); 10001 } 10002 10003 internal_error ("tree check: %s, have %s in %s, at %s:%d", 10004 buffer, omp_clause_code_name[TREE_CODE (node)], 10005 function, trim_filename (file), line); 10006 } 10007 10008 10009 #undef DEFTREESTRUCT 10010 #define DEFTREESTRUCT(VAL, NAME) NAME, 10011 10012 static const char *ts_enum_names[] = { 10013 #include "treestruct.def" 10014 }; 10015 #undef DEFTREESTRUCT 10016 10017 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)]) 10018 10019 /* Similar to tree_class_check_failed, except that we check for 10020 whether CODE contains the tree structure identified by EN. */ 10021 10022 void 10023 tree_contains_struct_check_failed (const_tree node, 10024 const enum tree_node_structure_enum en, 10025 const char *file, int line, 10026 const char *function) 10027 { 10028 internal_error 10029 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d", 10030 TS_ENUM_NAME (en), 10031 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line); 10032 } 10033 10034 10035 /* Similar to above, except that the check is for the bounds of a TREE_VEC's 10036 (dynamically sized) vector. */ 10037 10038 void 10039 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line, 10040 const char *function) 10041 { 10042 internal_error 10043 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d", 10044 idx + 1, len, function, trim_filename (file), line); 10045 } 10046 10047 /* Similar to above, except that the check is for the bounds of a TREE_VEC's 10048 (dynamically sized) vector. */ 10049 10050 void 10051 tree_vec_elt_check_failed (int idx, int len, const char *file, int line, 10052 const char *function) 10053 { 10054 internal_error 10055 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d", 10056 idx + 1, len, function, trim_filename (file), line); 10057 } 10058 10059 /* Similar to above, except that the check is for the bounds of the operand 10060 vector of an expression node EXP. */ 10061 10062 void 10063 tree_operand_check_failed (int idx, const_tree exp, const char *file, 10064 int line, const char *function) 10065 { 10066 enum tree_code code = TREE_CODE (exp); 10067 internal_error 10068 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d", 10069 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp), 10070 function, trim_filename (file), line); 10071 } 10072 10073 /* Similar to above, except that the check is for the number of 10074 operands of an OMP_CLAUSE node. */ 10075 10076 void 10077 omp_clause_operand_check_failed (int idx, const_tree t, const char *file, 10078 int line, const char *function) 10079 { 10080 internal_error 10081 ("tree check: accessed operand %d of omp_clause %s with %d operands " 10082 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)], 10083 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function, 10084 trim_filename (file), line); 10085 } 10086 #endif /* ENABLE_TREE_CHECKING */ 10087 10088 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE, 10089 and mapped to the machine mode MODE. Initialize its fields and build 10090 the information necessary for debugging output. */ 10091 10092 static tree 10093 make_vector_type (tree innertype, int nunits, machine_mode mode) 10094 { 10095 tree t; 10096 inchash::hash hstate; 10097 tree mv_innertype = TYPE_MAIN_VARIANT (innertype); 10098 10099 t = make_node (VECTOR_TYPE); 10100 TREE_TYPE (t) = mv_innertype; 10101 SET_TYPE_VECTOR_SUBPARTS (t, nunits); 10102 SET_TYPE_MODE (t, mode); 10103 10104 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p) 10105 SET_TYPE_STRUCTURAL_EQUALITY (t); 10106 else if ((TYPE_CANONICAL (mv_innertype) != innertype 10107 || mode != VOIDmode) 10108 && !VECTOR_BOOLEAN_TYPE_P (t)) 10109 TYPE_CANONICAL (t) 10110 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode); 10111 10112 layout_type (t); 10113 10114 hstate.add_wide_int (VECTOR_TYPE); 10115 hstate.add_wide_int (nunits); 10116 hstate.add_wide_int (mode); 10117 hstate.add_object (TYPE_HASH (TREE_TYPE (t))); 10118 t = type_hash_canon (hstate.end (), t); 10119 10120 /* We have built a main variant, based on the main variant of the 10121 inner type. Use it to build the variant we return. */ 10122 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype)) 10123 && TREE_TYPE (t) != innertype) 10124 return build_type_attribute_qual_variant (t, 10125 TYPE_ATTRIBUTES (innertype), 10126 TYPE_QUALS (innertype)); 10127 10128 return t; 10129 } 10130 10131 static tree 10132 make_or_reuse_type (unsigned size, int unsignedp) 10133 { 10134 int i; 10135 10136 if (size == INT_TYPE_SIZE) 10137 return unsignedp ? unsigned_type_node : integer_type_node; 10138 if (size == CHAR_TYPE_SIZE) 10139 return unsignedp ? unsigned_char_type_node : signed_char_type_node; 10140 if (size == SHORT_TYPE_SIZE) 10141 return unsignedp ? short_unsigned_type_node : short_integer_type_node; 10142 if (size == LONG_TYPE_SIZE) 10143 return unsignedp ? long_unsigned_type_node : long_integer_type_node; 10144 if (size == LONG_LONG_TYPE_SIZE) 10145 return (unsignedp ? long_long_unsigned_type_node 10146 : long_long_integer_type_node); 10147 10148 for (i = 0; i < NUM_INT_N_ENTS; i ++) 10149 if (size == int_n_data[i].bitsize 10150 && int_n_enabled_p[i]) 10151 return (unsignedp ? int_n_trees[i].unsigned_type 10152 : int_n_trees[i].signed_type); 10153 10154 if (unsignedp) 10155 return make_unsigned_type (size); 10156 else 10157 return make_signed_type (size); 10158 } 10159 10160 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */ 10161 10162 static tree 10163 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp) 10164 { 10165 if (satp) 10166 { 10167 if (size == SHORT_FRACT_TYPE_SIZE) 10168 return unsignedp ? sat_unsigned_short_fract_type_node 10169 : sat_short_fract_type_node; 10170 if (size == FRACT_TYPE_SIZE) 10171 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node; 10172 if (size == LONG_FRACT_TYPE_SIZE) 10173 return unsignedp ? sat_unsigned_long_fract_type_node 10174 : sat_long_fract_type_node; 10175 if (size == LONG_LONG_FRACT_TYPE_SIZE) 10176 return unsignedp ? sat_unsigned_long_long_fract_type_node 10177 : sat_long_long_fract_type_node; 10178 } 10179 else 10180 { 10181 if (size == SHORT_FRACT_TYPE_SIZE) 10182 return unsignedp ? unsigned_short_fract_type_node 10183 : short_fract_type_node; 10184 if (size == FRACT_TYPE_SIZE) 10185 return unsignedp ? unsigned_fract_type_node : fract_type_node; 10186 if (size == LONG_FRACT_TYPE_SIZE) 10187 return unsignedp ? unsigned_long_fract_type_node 10188 : long_fract_type_node; 10189 if (size == LONG_LONG_FRACT_TYPE_SIZE) 10190 return unsignedp ? unsigned_long_long_fract_type_node 10191 : long_long_fract_type_node; 10192 } 10193 10194 return make_fract_type (size, unsignedp, satp); 10195 } 10196 10197 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */ 10198 10199 static tree 10200 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp) 10201 { 10202 if (satp) 10203 { 10204 if (size == SHORT_ACCUM_TYPE_SIZE) 10205 return unsignedp ? sat_unsigned_short_accum_type_node 10206 : sat_short_accum_type_node; 10207 if (size == ACCUM_TYPE_SIZE) 10208 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node; 10209 if (size == LONG_ACCUM_TYPE_SIZE) 10210 return unsignedp ? sat_unsigned_long_accum_type_node 10211 : sat_long_accum_type_node; 10212 if (size == LONG_LONG_ACCUM_TYPE_SIZE) 10213 return unsignedp ? sat_unsigned_long_long_accum_type_node 10214 : sat_long_long_accum_type_node; 10215 } 10216 else 10217 { 10218 if (size == SHORT_ACCUM_TYPE_SIZE) 10219 return unsignedp ? unsigned_short_accum_type_node 10220 : short_accum_type_node; 10221 if (size == ACCUM_TYPE_SIZE) 10222 return unsignedp ? unsigned_accum_type_node : accum_type_node; 10223 if (size == LONG_ACCUM_TYPE_SIZE) 10224 return unsignedp ? unsigned_long_accum_type_node 10225 : long_accum_type_node; 10226 if (size == LONG_LONG_ACCUM_TYPE_SIZE) 10227 return unsignedp ? unsigned_long_long_accum_type_node 10228 : long_long_accum_type_node; 10229 } 10230 10231 return make_accum_type (size, unsignedp, satp); 10232 } 10233 10234 10235 /* Create an atomic variant node for TYPE. This routine is called 10236 during initialization of data types to create the 5 basic atomic 10237 types. The generic build_variant_type function requires these to 10238 already be set up in order to function properly, so cannot be 10239 called from there. If ALIGN is non-zero, then ensure alignment is 10240 overridden to this value. */ 10241 10242 static tree 10243 build_atomic_base (tree type, unsigned int align) 10244 { 10245 tree t; 10246 10247 /* Make sure its not already registered. */ 10248 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC))) 10249 return t; 10250 10251 t = build_variant_type_copy (type); 10252 set_type_quals (t, TYPE_QUAL_ATOMIC); 10253 10254 if (align) 10255 SET_TYPE_ALIGN (t, align); 10256 10257 return t; 10258 } 10259 10260 /* Information about the _FloatN and _FloatNx types. This must be in 10261 the same order as the corresponding TI_* enum values. */ 10262 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] = 10263 { 10264 { 16, false }, 10265 { 32, false }, 10266 { 64, false }, 10267 { 128, false }, 10268 { 32, true }, 10269 { 64, true }, 10270 { 128, true }, 10271 }; 10272 10273 10274 /* Create nodes for all integer types (and error_mark_node) using the sizes 10275 of C datatypes. SIGNED_CHAR specifies whether char is signed. */ 10276 10277 void 10278 build_common_tree_nodes (bool signed_char) 10279 { 10280 int i; 10281 10282 error_mark_node = make_node (ERROR_MARK); 10283 TREE_TYPE (error_mark_node) = error_mark_node; 10284 10285 initialize_sizetypes (); 10286 10287 /* Define both `signed char' and `unsigned char'. */ 10288 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE); 10289 TYPE_STRING_FLAG (signed_char_type_node) = 1; 10290 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE); 10291 TYPE_STRING_FLAG (unsigned_char_type_node) = 1; 10292 10293 /* Define `char', which is like either `signed char' or `unsigned char' 10294 but not the same as either. */ 10295 char_type_node 10296 = (signed_char 10297 ? make_signed_type (CHAR_TYPE_SIZE) 10298 : make_unsigned_type (CHAR_TYPE_SIZE)); 10299 TYPE_STRING_FLAG (char_type_node) = 1; 10300 10301 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE); 10302 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE); 10303 integer_type_node = make_signed_type (INT_TYPE_SIZE); 10304 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE); 10305 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE); 10306 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE); 10307 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE); 10308 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE); 10309 10310 for (i = 0; i < NUM_INT_N_ENTS; i ++) 10311 { 10312 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize); 10313 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize); 10314 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize); 10315 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize); 10316 10317 if (int_n_enabled_p[i]) 10318 { 10319 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type; 10320 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type; 10321 } 10322 } 10323 10324 /* Define a boolean type. This type only represents boolean values but 10325 may be larger than char depending on the value of BOOL_TYPE_SIZE. */ 10326 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE); 10327 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE); 10328 TYPE_PRECISION (boolean_type_node) = 1; 10329 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1); 10330 10331 /* Define what type to use for size_t. */ 10332 if (strcmp (SIZE_TYPE, "unsigned int") == 0) 10333 size_type_node = unsigned_type_node; 10334 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0) 10335 size_type_node = long_unsigned_type_node; 10336 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0) 10337 size_type_node = long_long_unsigned_type_node; 10338 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0) 10339 size_type_node = short_unsigned_type_node; 10340 else 10341 { 10342 int i; 10343 10344 size_type_node = NULL_TREE; 10345 for (i = 0; i < NUM_INT_N_ENTS; i++) 10346 if (int_n_enabled_p[i]) 10347 { 10348 char name[50]; 10349 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize); 10350 10351 if (strcmp (name, SIZE_TYPE) == 0) 10352 { 10353 size_type_node = int_n_trees[i].unsigned_type; 10354 } 10355 } 10356 if (size_type_node == NULL_TREE) 10357 gcc_unreachable (); 10358 } 10359 10360 /* Define what type to use for ptrdiff_t. */ 10361 if (strcmp (PTRDIFF_TYPE, "int") == 0) 10362 ptrdiff_type_node = integer_type_node; 10363 else if (strcmp (PTRDIFF_TYPE, "long int") == 0) 10364 ptrdiff_type_node = long_integer_type_node; 10365 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0) 10366 ptrdiff_type_node = long_long_integer_type_node; 10367 else if (strcmp (PTRDIFF_TYPE, "short int") == 0) 10368 ptrdiff_type_node = short_integer_type_node; 10369 else 10370 { 10371 ptrdiff_type_node = NULL_TREE; 10372 for (int i = 0; i < NUM_INT_N_ENTS; i++) 10373 if (int_n_enabled_p[i]) 10374 { 10375 char name[50]; 10376 sprintf (name, "__int%d", int_n_data[i].bitsize); 10377 if (strcmp (name, PTRDIFF_TYPE) == 0) 10378 ptrdiff_type_node = int_n_trees[i].signed_type; 10379 } 10380 if (ptrdiff_type_node == NULL_TREE) 10381 gcc_unreachable (); 10382 } 10383 10384 /* Fill in the rest of the sized types. Reuse existing type nodes 10385 when possible. */ 10386 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0); 10387 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0); 10388 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0); 10389 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0); 10390 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0); 10391 10392 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1); 10393 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1); 10394 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1); 10395 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1); 10396 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1); 10397 10398 /* Don't call build_qualified type for atomics. That routine does 10399 special processing for atomics, and until they are initialized 10400 it's better not to make that call. 10401 10402 Check to see if there is a target override for atomic types. */ 10403 10404 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node, 10405 targetm.atomic_align_for_mode (QImode)); 10406 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node, 10407 targetm.atomic_align_for_mode (HImode)); 10408 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node, 10409 targetm.atomic_align_for_mode (SImode)); 10410 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node, 10411 targetm.atomic_align_for_mode (DImode)); 10412 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node, 10413 targetm.atomic_align_for_mode (TImode)); 10414 10415 access_public_node = get_identifier ("public"); 10416 access_protected_node = get_identifier ("protected"); 10417 access_private_node = get_identifier ("private"); 10418 10419 /* Define these next since types below may used them. */ 10420 integer_zero_node = build_int_cst (integer_type_node, 0); 10421 integer_one_node = build_int_cst (integer_type_node, 1); 10422 integer_three_node = build_int_cst (integer_type_node, 3); 10423 integer_minus_one_node = build_int_cst (integer_type_node, -1); 10424 10425 size_zero_node = size_int (0); 10426 size_one_node = size_int (1); 10427 bitsize_zero_node = bitsize_int (0); 10428 bitsize_one_node = bitsize_int (1); 10429 bitsize_unit_node = bitsize_int (BITS_PER_UNIT); 10430 10431 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node); 10432 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node); 10433 10434 void_type_node = make_node (VOID_TYPE); 10435 layout_type (void_type_node); 10436 10437 pointer_bounds_type_node = targetm.chkp_bound_type (); 10438 10439 /* We are not going to have real types in C with less than byte alignment, 10440 so we might as well not have any types that claim to have it. */ 10441 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT); 10442 TYPE_USER_ALIGN (void_type_node) = 0; 10443 10444 void_node = make_node (VOID_CST); 10445 TREE_TYPE (void_node) = void_type_node; 10446 10447 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0); 10448 layout_type (TREE_TYPE (null_pointer_node)); 10449 10450 ptr_type_node = build_pointer_type (void_type_node); 10451 const_ptr_type_node 10452 = build_pointer_type (build_type_variant (void_type_node, 1, 0)); 10453 fileptr_type_node = ptr_type_node; 10454 const_tm_ptr_type_node = const_ptr_type_node; 10455 10456 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1); 10457 10458 float_type_node = make_node (REAL_TYPE); 10459 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE; 10460 layout_type (float_type_node); 10461 10462 double_type_node = make_node (REAL_TYPE); 10463 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE; 10464 layout_type (double_type_node); 10465 10466 long_double_type_node = make_node (REAL_TYPE); 10467 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE; 10468 layout_type (long_double_type_node); 10469 10470 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++) 10471 { 10472 int n = floatn_nx_types[i].n; 10473 bool extended = floatn_nx_types[i].extended; 10474 machine_mode mode = targetm.floatn_mode (n, extended); 10475 if (mode == VOIDmode) 10476 continue; 10477 int precision = GET_MODE_PRECISION (mode); 10478 /* Work around the rs6000 KFmode having precision 113 not 10479 128. */ 10480 const struct real_format *fmt = REAL_MODE_FORMAT (mode); 10481 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3); 10482 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin); 10483 if (!extended) 10484 gcc_assert (min_precision == n); 10485 if (precision < min_precision) 10486 precision = min_precision; 10487 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE); 10488 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision; 10489 layout_type (FLOATN_NX_TYPE_NODE (i)); 10490 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode); 10491 } 10492 10493 float_ptr_type_node = build_pointer_type (float_type_node); 10494 double_ptr_type_node = build_pointer_type (double_type_node); 10495 long_double_ptr_type_node = build_pointer_type (long_double_type_node); 10496 integer_ptr_type_node = build_pointer_type (integer_type_node); 10497 10498 /* Fixed size integer types. */ 10499 uint16_type_node = make_or_reuse_type (16, 1); 10500 uint32_type_node = make_or_reuse_type (32, 1); 10501 uint64_type_node = make_or_reuse_type (64, 1); 10502 10503 /* Decimal float types. */ 10504 dfloat32_type_node = make_node (REAL_TYPE); 10505 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE; 10506 SET_TYPE_MODE (dfloat32_type_node, SDmode); 10507 layout_type (dfloat32_type_node); 10508 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node); 10509 10510 dfloat64_type_node = make_node (REAL_TYPE); 10511 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE; 10512 SET_TYPE_MODE (dfloat64_type_node, DDmode); 10513 layout_type (dfloat64_type_node); 10514 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node); 10515 10516 dfloat128_type_node = make_node (REAL_TYPE); 10517 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE; 10518 SET_TYPE_MODE (dfloat128_type_node, TDmode); 10519 layout_type (dfloat128_type_node); 10520 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node); 10521 10522 complex_integer_type_node = build_complex_type (integer_type_node, true); 10523 complex_float_type_node = build_complex_type (float_type_node, true); 10524 complex_double_type_node = build_complex_type (double_type_node, true); 10525 complex_long_double_type_node = build_complex_type (long_double_type_node, 10526 true); 10527 10528 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++) 10529 { 10530 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE) 10531 COMPLEX_FLOATN_NX_TYPE_NODE (i) 10532 = build_complex_type (FLOATN_NX_TYPE_NODE (i)); 10533 } 10534 10535 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */ 10536 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \ 10537 sat_ ## KIND ## _type_node = \ 10538 make_sat_signed_ ## KIND ## _type (SIZE); \ 10539 sat_unsigned_ ## KIND ## _type_node = \ 10540 make_sat_unsigned_ ## KIND ## _type (SIZE); \ 10541 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \ 10542 unsigned_ ## KIND ## _type_node = \ 10543 make_unsigned_ ## KIND ## _type (SIZE); 10544 10545 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \ 10546 sat_ ## WIDTH ## KIND ## _type_node = \ 10547 make_sat_signed_ ## KIND ## _type (SIZE); \ 10548 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \ 10549 make_sat_unsigned_ ## KIND ## _type (SIZE); \ 10550 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \ 10551 unsigned_ ## WIDTH ## KIND ## _type_node = \ 10552 make_unsigned_ ## KIND ## _type (SIZE); 10553 10554 /* Make fixed-point type nodes based on four different widths. */ 10555 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \ 10556 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \ 10557 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \ 10558 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \ 10559 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE) 10560 10561 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */ 10562 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \ 10563 NAME ## _type_node = \ 10564 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \ 10565 u ## NAME ## _type_node = \ 10566 make_or_reuse_unsigned_ ## KIND ## _type \ 10567 (GET_MODE_BITSIZE (U ## MODE ## mode)); \ 10568 sat_ ## NAME ## _type_node = \ 10569 make_or_reuse_sat_signed_ ## KIND ## _type \ 10570 (GET_MODE_BITSIZE (MODE ## mode)); \ 10571 sat_u ## NAME ## _type_node = \ 10572 make_or_reuse_sat_unsigned_ ## KIND ## _type \ 10573 (GET_MODE_BITSIZE (U ## MODE ## mode)); 10574 10575 /* Fixed-point type and mode nodes. */ 10576 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT) 10577 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM) 10578 MAKE_FIXED_MODE_NODE (fract, qq, QQ) 10579 MAKE_FIXED_MODE_NODE (fract, hq, HQ) 10580 MAKE_FIXED_MODE_NODE (fract, sq, SQ) 10581 MAKE_FIXED_MODE_NODE (fract, dq, DQ) 10582 MAKE_FIXED_MODE_NODE (fract, tq, TQ) 10583 MAKE_FIXED_MODE_NODE (accum, ha, HA) 10584 MAKE_FIXED_MODE_NODE (accum, sa, SA) 10585 MAKE_FIXED_MODE_NODE (accum, da, DA) 10586 MAKE_FIXED_MODE_NODE (accum, ta, TA) 10587 10588 { 10589 tree t = targetm.build_builtin_va_list (); 10590 10591 /* Many back-ends define record types without setting TYPE_NAME. 10592 If we copied the record type here, we'd keep the original 10593 record type without a name. This breaks name mangling. So, 10594 don't copy record types and let c_common_nodes_and_builtins() 10595 declare the type to be __builtin_va_list. */ 10596 if (TREE_CODE (t) != RECORD_TYPE) 10597 t = build_variant_type_copy (t); 10598 10599 va_list_type_node = t; 10600 } 10601 } 10602 10603 /* Modify DECL for given flags. 10604 TM_PURE attribute is set only on types, so the function will modify 10605 DECL's type when ECF_TM_PURE is used. */ 10606 10607 void 10608 set_call_expr_flags (tree decl, int flags) 10609 { 10610 if (flags & ECF_NOTHROW) 10611 TREE_NOTHROW (decl) = 1; 10612 if (flags & ECF_CONST) 10613 TREE_READONLY (decl) = 1; 10614 if (flags & ECF_PURE) 10615 DECL_PURE_P (decl) = 1; 10616 if (flags & ECF_LOOPING_CONST_OR_PURE) 10617 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1; 10618 if (flags & ECF_NOVOPS) 10619 DECL_IS_NOVOPS (decl) = 1; 10620 if (flags & ECF_NORETURN) 10621 TREE_THIS_VOLATILE (decl) = 1; 10622 if (flags & ECF_MALLOC) 10623 DECL_IS_MALLOC (decl) = 1; 10624 if (flags & ECF_RETURNS_TWICE) 10625 DECL_IS_RETURNS_TWICE (decl) = 1; 10626 if (flags & ECF_LEAF) 10627 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"), 10628 NULL, DECL_ATTRIBUTES (decl)); 10629 if (flags & ECF_RET1) 10630 DECL_ATTRIBUTES (decl) 10631 = tree_cons (get_identifier ("fn spec"), 10632 build_tree_list (NULL_TREE, build_string (1, "1")), 10633 DECL_ATTRIBUTES (decl)); 10634 if ((flags & ECF_TM_PURE) && flag_tm) 10635 apply_tm_attr (decl, get_identifier ("transaction_pure")); 10636 /* Looping const or pure is implied by noreturn. 10637 There is currently no way to declare looping const or looping pure alone. */ 10638 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE) 10639 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE)))); 10640 } 10641 10642 10643 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */ 10644 10645 static void 10646 local_define_builtin (const char *name, tree type, enum built_in_function code, 10647 const char *library_name, int ecf_flags) 10648 { 10649 tree decl; 10650 10651 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL, 10652 library_name, NULL_TREE); 10653 set_call_expr_flags (decl, ecf_flags); 10654 10655 set_builtin_decl (code, decl, true); 10656 } 10657 10658 /* Call this function after instantiating all builtins that the language 10659 front end cares about. This will build the rest of the builtins 10660 and internal functions that are relied upon by the tree optimizers and 10661 the middle-end. */ 10662 10663 void 10664 build_common_builtin_nodes (void) 10665 { 10666 tree tmp, ftype; 10667 int ecf_flags; 10668 10669 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE) 10670 || !builtin_decl_explicit_p (BUILT_IN_ABORT)) 10671 { 10672 ftype = build_function_type (void_type_node, void_list_node); 10673 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)) 10674 local_define_builtin ("__builtin_unreachable", ftype, 10675 BUILT_IN_UNREACHABLE, 10676 "__builtin_unreachable", 10677 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN 10678 | ECF_CONST); 10679 if (!builtin_decl_explicit_p (BUILT_IN_ABORT)) 10680 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT, 10681 "abort", 10682 ECF_LEAF | ECF_NORETURN | ECF_CONST); 10683 } 10684 10685 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY) 10686 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE)) 10687 { 10688 ftype = build_function_type_list (ptr_type_node, 10689 ptr_type_node, const_ptr_type_node, 10690 size_type_node, NULL_TREE); 10691 10692 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)) 10693 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY, 10694 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1); 10695 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE)) 10696 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE, 10697 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1); 10698 } 10699 10700 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP)) 10701 { 10702 ftype = build_function_type_list (integer_type_node, const_ptr_type_node, 10703 const_ptr_type_node, size_type_node, 10704 NULL_TREE); 10705 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP, 10706 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF); 10707 } 10708 10709 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET)) 10710 { 10711 ftype = build_function_type_list (ptr_type_node, 10712 ptr_type_node, integer_type_node, 10713 size_type_node, NULL_TREE); 10714 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET, 10715 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1); 10716 } 10717 10718 /* If we're checking the stack, `alloca' can throw. */ 10719 const int alloca_flags 10720 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW); 10721 10722 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA)) 10723 { 10724 ftype = build_function_type_list (ptr_type_node, 10725 size_type_node, NULL_TREE); 10726 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA, 10727 "alloca", alloca_flags); 10728 } 10729 10730 ftype = build_function_type_list (ptr_type_node, size_type_node, 10731 size_type_node, NULL_TREE); 10732 local_define_builtin ("__builtin_alloca_with_align", ftype, 10733 BUILT_IN_ALLOCA_WITH_ALIGN, 10734 "__builtin_alloca_with_align", 10735 alloca_flags); 10736 10737 ftype = build_function_type_list (void_type_node, 10738 ptr_type_node, ptr_type_node, 10739 ptr_type_node, NULL_TREE); 10740 local_define_builtin ("__builtin_init_trampoline", ftype, 10741 BUILT_IN_INIT_TRAMPOLINE, 10742 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF); 10743 local_define_builtin ("__builtin_init_heap_trampoline", ftype, 10744 BUILT_IN_INIT_HEAP_TRAMPOLINE, 10745 "__builtin_init_heap_trampoline", 10746 ECF_NOTHROW | ECF_LEAF); 10747 local_define_builtin ("__builtin_init_descriptor", ftype, 10748 BUILT_IN_INIT_DESCRIPTOR, 10749 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF); 10750 10751 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE); 10752 local_define_builtin ("__builtin_adjust_trampoline", ftype, 10753 BUILT_IN_ADJUST_TRAMPOLINE, 10754 "__builtin_adjust_trampoline", 10755 ECF_CONST | ECF_NOTHROW); 10756 local_define_builtin ("__builtin_adjust_descriptor", ftype, 10757 BUILT_IN_ADJUST_DESCRIPTOR, 10758 "__builtin_adjust_descriptor", 10759 ECF_CONST | ECF_NOTHROW); 10760 10761 ftype = build_function_type_list (void_type_node, 10762 ptr_type_node, ptr_type_node, NULL_TREE); 10763 local_define_builtin ("__builtin_nonlocal_goto", ftype, 10764 BUILT_IN_NONLOCAL_GOTO, 10765 "__builtin_nonlocal_goto", 10766 ECF_NORETURN | ECF_NOTHROW); 10767 10768 ftype = build_function_type_list (void_type_node, 10769 ptr_type_node, ptr_type_node, NULL_TREE); 10770 local_define_builtin ("__builtin_setjmp_setup", ftype, 10771 BUILT_IN_SETJMP_SETUP, 10772 "__builtin_setjmp_setup", ECF_NOTHROW); 10773 10774 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE); 10775 local_define_builtin ("__builtin_setjmp_receiver", ftype, 10776 BUILT_IN_SETJMP_RECEIVER, 10777 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF); 10778 10779 ftype = build_function_type_list (ptr_type_node, NULL_TREE); 10780 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE, 10781 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF); 10782 10783 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE); 10784 local_define_builtin ("__builtin_stack_restore", ftype, 10785 BUILT_IN_STACK_RESTORE, 10786 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF); 10787 10788 ftype = build_function_type_list (integer_type_node, const_ptr_type_node, 10789 const_ptr_type_node, size_type_node, 10790 NULL_TREE); 10791 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ, 10792 "__builtin_memcmp_eq", 10793 ECF_PURE | ECF_NOTHROW | ECF_LEAF); 10794 10795 /* If there's a possibility that we might use the ARM EABI, build the 10796 alternate __cxa_end_cleanup node used to resume from C++ and Java. */ 10797 if (targetm.arm_eabi_unwinder) 10798 { 10799 ftype = build_function_type_list (void_type_node, NULL_TREE); 10800 local_define_builtin ("__builtin_cxa_end_cleanup", ftype, 10801 BUILT_IN_CXA_END_CLEANUP, 10802 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF); 10803 } 10804 10805 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE); 10806 local_define_builtin ("__builtin_unwind_resume", ftype, 10807 BUILT_IN_UNWIND_RESUME, 10808 ((targetm_common.except_unwind_info (&global_options) 10809 == UI_SJLJ) 10810 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"), 10811 ECF_NORETURN); 10812 10813 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE) 10814 { 10815 ftype = build_function_type_list (ptr_type_node, integer_type_node, 10816 NULL_TREE); 10817 local_define_builtin ("__builtin_return_address", ftype, 10818 BUILT_IN_RETURN_ADDRESS, 10819 "__builtin_return_address", 10820 ECF_NOTHROW); 10821 } 10822 10823 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER) 10824 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT)) 10825 { 10826 ftype = build_function_type_list (void_type_node, ptr_type_node, 10827 ptr_type_node, NULL_TREE); 10828 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)) 10829 local_define_builtin ("__cyg_profile_func_enter", ftype, 10830 BUILT_IN_PROFILE_FUNC_ENTER, 10831 "__cyg_profile_func_enter", 0); 10832 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT)) 10833 local_define_builtin ("__cyg_profile_func_exit", ftype, 10834 BUILT_IN_PROFILE_FUNC_EXIT, 10835 "__cyg_profile_func_exit", 0); 10836 } 10837 10838 /* The exception object and filter values from the runtime. The argument 10839 must be zero before exception lowering, i.e. from the front end. After 10840 exception lowering, it will be the region number for the exception 10841 landing pad. These functions are PURE instead of CONST to prevent 10842 them from being hoisted past the exception edge that will initialize 10843 its value in the landing pad. */ 10844 ftype = build_function_type_list (ptr_type_node, 10845 integer_type_node, NULL_TREE); 10846 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF; 10847 /* Only use TM_PURE if we have TM language support. */ 10848 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1)) 10849 ecf_flags |= ECF_TM_PURE; 10850 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER, 10851 "__builtin_eh_pointer", ecf_flags); 10852 10853 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0); 10854 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE); 10855 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER, 10856 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF); 10857 10858 ftype = build_function_type_list (void_type_node, 10859 integer_type_node, integer_type_node, 10860 NULL_TREE); 10861 local_define_builtin ("__builtin_eh_copy_values", ftype, 10862 BUILT_IN_EH_COPY_VALUES, 10863 "__builtin_eh_copy_values", ECF_NOTHROW); 10864 10865 /* Complex multiplication and division. These are handled as builtins 10866 rather than optabs because emit_library_call_value doesn't support 10867 complex. Further, we can do slightly better with folding these 10868 beasties if the real and complex parts of the arguments are separate. */ 10869 { 10870 int mode; 10871 10872 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode) 10873 { 10874 char mode_name_buf[4], *q; 10875 const char *p; 10876 enum built_in_function mcode, dcode; 10877 tree type, inner_type; 10878 const char *prefix = "__"; 10879 10880 if (targetm.libfunc_gnu_prefix) 10881 prefix = "__gnu_"; 10882 10883 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0); 10884 if (type == NULL) 10885 continue; 10886 inner_type = TREE_TYPE (type); 10887 10888 ftype = build_function_type_list (type, inner_type, inner_type, 10889 inner_type, inner_type, NULL_TREE); 10890 10891 mcode = ((enum built_in_function) 10892 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT)); 10893 dcode = ((enum built_in_function) 10894 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT)); 10895 10896 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++) 10897 *q = TOLOWER (*p); 10898 *q = '\0'; 10899 10900 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3", 10901 NULL); 10902 local_define_builtin (built_in_names[mcode], ftype, mcode, 10903 built_in_names[mcode], 10904 ECF_CONST | ECF_NOTHROW | ECF_LEAF); 10905 10906 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3", 10907 NULL); 10908 local_define_builtin (built_in_names[dcode], ftype, dcode, 10909 built_in_names[dcode], 10910 ECF_CONST | ECF_NOTHROW | ECF_LEAF); 10911 } 10912 } 10913 10914 init_internal_fns (); 10915 } 10916 10917 /* HACK. GROSS. This is absolutely disgusting. I wish there was a 10918 better way. 10919 10920 If we requested a pointer to a vector, build up the pointers that 10921 we stripped off while looking for the inner type. Similarly for 10922 return values from functions. 10923 10924 The argument TYPE is the top of the chain, and BOTTOM is the 10925 new type which we will point to. */ 10926 10927 tree 10928 reconstruct_complex_type (tree type, tree bottom) 10929 { 10930 tree inner, outer; 10931 10932 if (TREE_CODE (type) == POINTER_TYPE) 10933 { 10934 inner = reconstruct_complex_type (TREE_TYPE (type), bottom); 10935 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type), 10936 TYPE_REF_CAN_ALIAS_ALL (type)); 10937 } 10938 else if (TREE_CODE (type) == REFERENCE_TYPE) 10939 { 10940 inner = reconstruct_complex_type (TREE_TYPE (type), bottom); 10941 outer = build_reference_type_for_mode (inner, TYPE_MODE (type), 10942 TYPE_REF_CAN_ALIAS_ALL (type)); 10943 } 10944 else if (TREE_CODE (type) == ARRAY_TYPE) 10945 { 10946 inner = reconstruct_complex_type (TREE_TYPE (type), bottom); 10947 outer = build_array_type (inner, TYPE_DOMAIN (type)); 10948 } 10949 else if (TREE_CODE (type) == FUNCTION_TYPE) 10950 { 10951 inner = reconstruct_complex_type (TREE_TYPE (type), bottom); 10952 outer = build_function_type (inner, TYPE_ARG_TYPES (type)); 10953 } 10954 else if (TREE_CODE (type) == METHOD_TYPE) 10955 { 10956 inner = reconstruct_complex_type (TREE_TYPE (type), bottom); 10957 /* The build_method_type_directly() routine prepends 'this' to argument list, 10958 so we must compensate by getting rid of it. */ 10959 outer 10960 = build_method_type_directly 10961 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))), 10962 inner, 10963 TREE_CHAIN (TYPE_ARG_TYPES (type))); 10964 } 10965 else if (TREE_CODE (type) == OFFSET_TYPE) 10966 { 10967 inner = reconstruct_complex_type (TREE_TYPE (type), bottom); 10968 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner); 10969 } 10970 else 10971 return bottom; 10972 10973 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type), 10974 TYPE_QUALS (type)); 10975 } 10976 10977 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and 10978 the inner type. */ 10979 tree 10980 build_vector_type_for_mode (tree innertype, machine_mode mode) 10981 { 10982 int nunits; 10983 10984 switch (GET_MODE_CLASS (mode)) 10985 { 10986 case MODE_VECTOR_INT: 10987 case MODE_VECTOR_FLOAT: 10988 case MODE_VECTOR_FRACT: 10989 case MODE_VECTOR_UFRACT: 10990 case MODE_VECTOR_ACCUM: 10991 case MODE_VECTOR_UACCUM: 10992 nunits = GET_MODE_NUNITS (mode); 10993 break; 10994 10995 case MODE_INT: 10996 /* Check that there are no leftover bits. */ 10997 gcc_assert (GET_MODE_BITSIZE (mode) 10998 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0); 10999 11000 nunits = GET_MODE_BITSIZE (mode) 11001 / TREE_INT_CST_LOW (TYPE_SIZE (innertype)); 11002 break; 11003 11004 default: 11005 gcc_unreachable (); 11006 } 11007 11008 return make_vector_type (innertype, nunits, mode); 11009 } 11010 11011 /* Similarly, but takes the inner type and number of units, which must be 11012 a power of two. */ 11013 11014 tree 11015 build_vector_type (tree innertype, int nunits) 11016 { 11017 return make_vector_type (innertype, nunits, VOIDmode); 11018 } 11019 11020 /* Build truth vector with specified length and number of units. */ 11021 11022 tree 11023 build_truth_vector_type (unsigned nunits, unsigned vector_size) 11024 { 11025 machine_mode mask_mode = targetm.vectorize.get_mask_mode (nunits, 11026 vector_size); 11027 11028 gcc_assert (mask_mode != VOIDmode); 11029 11030 unsigned HOST_WIDE_INT vsize; 11031 if (mask_mode == BLKmode) 11032 vsize = vector_size * BITS_PER_UNIT; 11033 else 11034 vsize = GET_MODE_BITSIZE (mask_mode); 11035 11036 unsigned HOST_WIDE_INT esize = vsize / nunits; 11037 gcc_assert (esize * nunits == vsize); 11038 11039 tree bool_type = build_nonstandard_boolean_type (esize); 11040 11041 return make_vector_type (bool_type, nunits, mask_mode); 11042 } 11043 11044 /* Returns a vector type corresponding to a comparison of VECTYPE. */ 11045 11046 tree 11047 build_same_sized_truth_vector_type (tree vectype) 11048 { 11049 if (VECTOR_BOOLEAN_TYPE_P (vectype)) 11050 return vectype; 11051 11052 unsigned HOST_WIDE_INT size = GET_MODE_SIZE (TYPE_MODE (vectype)); 11053 11054 if (!size) 11055 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype)); 11056 11057 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size); 11058 } 11059 11060 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */ 11061 11062 tree 11063 build_opaque_vector_type (tree innertype, int nunits) 11064 { 11065 tree t = make_vector_type (innertype, nunits, VOIDmode); 11066 tree cand; 11067 /* We always build the non-opaque variant before the opaque one, 11068 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */ 11069 cand = TYPE_NEXT_VARIANT (t); 11070 if (cand 11071 && TYPE_VECTOR_OPAQUE (cand) 11072 && check_qualified_type (cand, t, TYPE_QUALS (t))) 11073 return cand; 11074 /* Othewise build a variant type and make sure to queue it after 11075 the non-opaque type. */ 11076 cand = build_distinct_type_copy (t); 11077 TYPE_VECTOR_OPAQUE (cand) = true; 11078 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t); 11079 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t); 11080 TYPE_NEXT_VARIANT (t) = cand; 11081 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t); 11082 return cand; 11083 } 11084 11085 11086 /* Given an initializer INIT, return TRUE if INIT is zero or some 11087 aggregate of zeros. Otherwise return FALSE. */ 11088 bool 11089 initializer_zerop (const_tree init) 11090 { 11091 tree elt; 11092 11093 STRIP_NOPS (init); 11094 11095 switch (TREE_CODE (init)) 11096 { 11097 case INTEGER_CST: 11098 return integer_zerop (init); 11099 11100 case REAL_CST: 11101 /* ??? Note that this is not correct for C4X float formats. There, 11102 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most 11103 negative exponent. */ 11104 return real_zerop (init) 11105 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)); 11106 11107 case FIXED_CST: 11108 return fixed_zerop (init); 11109 11110 case COMPLEX_CST: 11111 return integer_zerop (init) 11112 || (real_zerop (init) 11113 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init))) 11114 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))); 11115 11116 case VECTOR_CST: 11117 { 11118 unsigned i; 11119 for (i = 0; i < VECTOR_CST_NELTS (init); ++i) 11120 if (!initializer_zerop (VECTOR_CST_ELT (init, i))) 11121 return false; 11122 return true; 11123 } 11124 11125 case CONSTRUCTOR: 11126 { 11127 unsigned HOST_WIDE_INT idx; 11128 11129 if (TREE_CLOBBER_P (init)) 11130 return false; 11131 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt) 11132 if (!initializer_zerop (elt)) 11133 return false; 11134 return true; 11135 } 11136 11137 case STRING_CST: 11138 { 11139 int i; 11140 11141 /* We need to loop through all elements to handle cases like 11142 "\0" and "\0foobar". */ 11143 for (i = 0; i < TREE_STRING_LENGTH (init); ++i) 11144 if (TREE_STRING_POINTER (init)[i] != '\0') 11145 return false; 11146 11147 return true; 11148 } 11149 11150 default: 11151 return false; 11152 } 11153 } 11154 11155 /* Check if vector VEC consists of all the equal elements and 11156 that the number of elements corresponds to the type of VEC. 11157 The function returns first element of the vector 11158 or NULL_TREE if the vector is not uniform. */ 11159 tree 11160 uniform_vector_p (const_tree vec) 11161 { 11162 tree first, t; 11163 unsigned i; 11164 11165 if (vec == NULL_TREE) 11166 return NULL_TREE; 11167 11168 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec))); 11169 11170 if (TREE_CODE (vec) == VECTOR_CST) 11171 { 11172 first = VECTOR_CST_ELT (vec, 0); 11173 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i) 11174 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0)) 11175 return NULL_TREE; 11176 11177 return first; 11178 } 11179 11180 else if (TREE_CODE (vec) == CONSTRUCTOR) 11181 { 11182 first = error_mark_node; 11183 11184 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t) 11185 { 11186 if (i == 0) 11187 { 11188 first = t; 11189 continue; 11190 } 11191 if (!operand_equal_p (first, t, 0)) 11192 return NULL_TREE; 11193 } 11194 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec))) 11195 return NULL_TREE; 11196 11197 return first; 11198 } 11199 11200 return NULL_TREE; 11201 } 11202 11203 /* Build an empty statement at location LOC. */ 11204 11205 tree 11206 build_empty_stmt (location_t loc) 11207 { 11208 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node); 11209 SET_EXPR_LOCATION (t, loc); 11210 return t; 11211 } 11212 11213 11214 /* Build an OpenMP clause with code CODE. LOC is the location of the 11215 clause. */ 11216 11217 tree 11218 build_omp_clause (location_t loc, enum omp_clause_code code) 11219 { 11220 tree t; 11221 int size, length; 11222 11223 length = omp_clause_num_ops[code]; 11224 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree)); 11225 11226 record_node_allocation_statistics (OMP_CLAUSE, size); 11227 11228 t = (tree) ggc_internal_alloc (size); 11229 memset (t, 0, size); 11230 TREE_SET_CODE (t, OMP_CLAUSE); 11231 OMP_CLAUSE_SET_CODE (t, code); 11232 OMP_CLAUSE_LOCATION (t) = loc; 11233 11234 return t; 11235 } 11236 11237 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN 11238 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1. 11239 Except for the CODE and operand count field, other storage for the 11240 object is initialized to zeros. */ 11241 11242 tree 11243 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL) 11244 { 11245 tree t; 11246 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp); 11247 11248 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp); 11249 gcc_assert (len >= 1); 11250 11251 record_node_allocation_statistics (code, length); 11252 11253 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT); 11254 11255 TREE_SET_CODE (t, code); 11256 11257 /* Can't use TREE_OPERAND to store the length because if checking is 11258 enabled, it will try to check the length before we store it. :-P */ 11259 t->exp.operands[0] = build_int_cst (sizetype, len); 11260 11261 return t; 11262 } 11263 11264 /* Helper function for build_call_* functions; build a CALL_EXPR with 11265 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of 11266 the argument slots. */ 11267 11268 static tree 11269 build_call_1 (tree return_type, tree fn, int nargs) 11270 { 11271 tree t; 11272 11273 t = build_vl_exp (CALL_EXPR, nargs + 3); 11274 TREE_TYPE (t) = return_type; 11275 CALL_EXPR_FN (t) = fn; 11276 CALL_EXPR_STATIC_CHAIN (t) = NULL; 11277 11278 return t; 11279 } 11280 11281 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and 11282 FN and a null static chain slot. NARGS is the number of call arguments 11283 which are specified as "..." arguments. */ 11284 11285 tree 11286 build_call_nary (tree return_type, tree fn, int nargs, ...) 11287 { 11288 tree ret; 11289 va_list args; 11290 va_start (args, nargs); 11291 ret = build_call_valist (return_type, fn, nargs, args); 11292 va_end (args); 11293 return ret; 11294 } 11295 11296 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and 11297 FN and a null static chain slot. NARGS is the number of call arguments 11298 which are specified as a va_list ARGS. */ 11299 11300 tree 11301 build_call_valist (tree return_type, tree fn, int nargs, va_list args) 11302 { 11303 tree t; 11304 int i; 11305 11306 t = build_call_1 (return_type, fn, nargs); 11307 for (i = 0; i < nargs; i++) 11308 CALL_EXPR_ARG (t, i) = va_arg (args, tree); 11309 process_call_operands (t); 11310 return t; 11311 } 11312 11313 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and 11314 FN and a null static chain slot. NARGS is the number of call arguments 11315 which are specified as a tree array ARGS. */ 11316 11317 tree 11318 build_call_array_loc (location_t loc, tree return_type, tree fn, 11319 int nargs, const tree *args) 11320 { 11321 tree t; 11322 int i; 11323 11324 t = build_call_1 (return_type, fn, nargs); 11325 for (i = 0; i < nargs; i++) 11326 CALL_EXPR_ARG (t, i) = args[i]; 11327 process_call_operands (t); 11328 SET_EXPR_LOCATION (t, loc); 11329 return t; 11330 } 11331 11332 /* Like build_call_array, but takes a vec. */ 11333 11334 tree 11335 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args) 11336 { 11337 tree ret, t; 11338 unsigned int ix; 11339 11340 ret = build_call_1 (return_type, fn, vec_safe_length (args)); 11341 FOR_EACH_VEC_SAFE_ELT (args, ix, t) 11342 CALL_EXPR_ARG (ret, ix) = t; 11343 process_call_operands (ret); 11344 return ret; 11345 } 11346 11347 /* Conveniently construct a function call expression. FNDECL names the 11348 function to be called and N arguments are passed in the array 11349 ARGARRAY. */ 11350 11351 tree 11352 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray) 11353 { 11354 tree fntype = TREE_TYPE (fndecl); 11355 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl); 11356 11357 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray); 11358 } 11359 11360 /* Conveniently construct a function call expression. FNDECL names the 11361 function to be called and the arguments are passed in the vector 11362 VEC. */ 11363 11364 tree 11365 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec) 11366 { 11367 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec), 11368 vec_safe_address (vec)); 11369 } 11370 11371 11372 /* Conveniently construct a function call expression. FNDECL names the 11373 function to be called, N is the number of arguments, and the "..." 11374 parameters are the argument expressions. */ 11375 11376 tree 11377 build_call_expr_loc (location_t loc, tree fndecl, int n, ...) 11378 { 11379 va_list ap; 11380 tree *argarray = XALLOCAVEC (tree, n); 11381 int i; 11382 11383 va_start (ap, n); 11384 for (i = 0; i < n; i++) 11385 argarray[i] = va_arg (ap, tree); 11386 va_end (ap); 11387 return build_call_expr_loc_array (loc, fndecl, n, argarray); 11388 } 11389 11390 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because 11391 varargs macros aren't supported by all bootstrap compilers. */ 11392 11393 tree 11394 build_call_expr (tree fndecl, int n, ...) 11395 { 11396 va_list ap; 11397 tree *argarray = XALLOCAVEC (tree, n); 11398 int i; 11399 11400 va_start (ap, n); 11401 for (i = 0; i < n; i++) 11402 argarray[i] = va_arg (ap, tree); 11403 va_end (ap); 11404 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray); 11405 } 11406 11407 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return 11408 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL. 11409 It will get gimplified later into an ordinary internal function. */ 11410 11411 tree 11412 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn, 11413 tree type, int n, const tree *args) 11414 { 11415 tree t = build_call_1 (type, NULL_TREE, n); 11416 for (int i = 0; i < n; ++i) 11417 CALL_EXPR_ARG (t, i) = args[i]; 11418 SET_EXPR_LOCATION (t, loc); 11419 CALL_EXPR_IFN (t) = ifn; 11420 return t; 11421 } 11422 11423 /* Build internal call expression. This is just like CALL_EXPR, except 11424 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary 11425 internal function. */ 11426 11427 tree 11428 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn, 11429 tree type, int n, ...) 11430 { 11431 va_list ap; 11432 tree *argarray = XALLOCAVEC (tree, n); 11433 int i; 11434 11435 va_start (ap, n); 11436 for (i = 0; i < n; i++) 11437 argarray[i] = va_arg (ap, tree); 11438 va_end (ap); 11439 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray); 11440 } 11441 11442 /* Return a function call to FN, if the target is guaranteed to support it, 11443 or null otherwise. 11444 11445 N is the number of arguments, passed in the "...", and TYPE is the 11446 type of the return value. */ 11447 11448 tree 11449 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type, 11450 int n, ...) 11451 { 11452 va_list ap; 11453 tree *argarray = XALLOCAVEC (tree, n); 11454 int i; 11455 11456 va_start (ap, n); 11457 for (i = 0; i < n; i++) 11458 argarray[i] = va_arg (ap, tree); 11459 va_end (ap); 11460 if (internal_fn_p (fn)) 11461 { 11462 internal_fn ifn = as_internal_fn (fn); 11463 if (direct_internal_fn_p (ifn)) 11464 { 11465 tree_pair types = direct_internal_fn_types (ifn, type, argarray); 11466 if (!direct_internal_fn_supported_p (ifn, types, 11467 OPTIMIZE_FOR_BOTH)) 11468 return NULL_TREE; 11469 } 11470 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray); 11471 } 11472 else 11473 { 11474 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn)); 11475 if (!fndecl) 11476 return NULL_TREE; 11477 return build_call_expr_loc_array (loc, fndecl, n, argarray); 11478 } 11479 } 11480 11481 /* Create a new constant string literal and return a char* pointer to it. 11482 The STRING_CST value is the LEN characters at STR. */ 11483 tree 11484 build_string_literal (int len, const char *str) 11485 { 11486 tree t, elem, index, type; 11487 11488 t = build_string (len, str); 11489 elem = build_type_variant (char_type_node, 1, 0); 11490 index = build_index_type (size_int (len - 1)); 11491 type = build_array_type (elem, index); 11492 TREE_TYPE (t) = type; 11493 TREE_CONSTANT (t) = 1; 11494 TREE_READONLY (t) = 1; 11495 TREE_STATIC (t) = 1; 11496 11497 type = build_pointer_type (elem); 11498 t = build1 (ADDR_EXPR, type, 11499 build4 (ARRAY_REF, elem, 11500 t, integer_zero_node, NULL_TREE, NULL_TREE)); 11501 return t; 11502 } 11503 11504 11505 11506 /* Return true if T (assumed to be a DECL) must be assigned a memory 11507 location. */ 11508 11509 bool 11510 needs_to_live_in_memory (const_tree t) 11511 { 11512 return (TREE_ADDRESSABLE (t) 11513 || is_global_var (t) 11514 || (TREE_CODE (t) == RESULT_DECL 11515 && !DECL_BY_REFERENCE (t) 11516 && aggregate_value_p (t, current_function_decl))); 11517 } 11518 11519 /* Return value of a constant X and sign-extend it. */ 11520 11521 HOST_WIDE_INT 11522 int_cst_value (const_tree x) 11523 { 11524 unsigned bits = TYPE_PRECISION (TREE_TYPE (x)); 11525 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x); 11526 11527 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */ 11528 gcc_assert (cst_and_fits_in_hwi (x)); 11529 11530 if (bits < HOST_BITS_PER_WIDE_INT) 11531 { 11532 bool negative = ((val >> (bits - 1)) & 1) != 0; 11533 if (negative) 11534 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1; 11535 else 11536 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1); 11537 } 11538 11539 return val; 11540 } 11541 11542 /* If TYPE is an integral or pointer type, return an integer type with 11543 the same precision which is unsigned iff UNSIGNEDP is true, or itself 11544 if TYPE is already an integer type of signedness UNSIGNEDP. */ 11545 11546 tree 11547 signed_or_unsigned_type_for (int unsignedp, tree type) 11548 { 11549 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp) 11550 return type; 11551 11552 if (TREE_CODE (type) == VECTOR_TYPE) 11553 { 11554 tree inner = TREE_TYPE (type); 11555 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner); 11556 if (!inner2) 11557 return NULL_TREE; 11558 if (inner == inner2) 11559 return type; 11560 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type)); 11561 } 11562 11563 if (!INTEGRAL_TYPE_P (type) 11564 && !POINTER_TYPE_P (type) 11565 && TREE_CODE (type) != OFFSET_TYPE) 11566 return NULL_TREE; 11567 11568 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp); 11569 } 11570 11571 /* If TYPE is an integral or pointer type, return an integer type with 11572 the same precision which is unsigned, or itself if TYPE is already an 11573 unsigned integer type. */ 11574 11575 tree 11576 unsigned_type_for (tree type) 11577 { 11578 return signed_or_unsigned_type_for (1, type); 11579 } 11580 11581 /* If TYPE is an integral or pointer type, return an integer type with 11582 the same precision which is signed, or itself if TYPE is already a 11583 signed integer type. */ 11584 11585 tree 11586 signed_type_for (tree type) 11587 { 11588 return signed_or_unsigned_type_for (0, type); 11589 } 11590 11591 /* If TYPE is a vector type, return a signed integer vector type with the 11592 same width and number of subparts. Otherwise return boolean_type_node. */ 11593 11594 tree 11595 truth_type_for (tree type) 11596 { 11597 if (TREE_CODE (type) == VECTOR_TYPE) 11598 { 11599 if (VECTOR_BOOLEAN_TYPE_P (type)) 11600 return type; 11601 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type), 11602 GET_MODE_SIZE (TYPE_MODE (type))); 11603 } 11604 else 11605 return boolean_type_node; 11606 } 11607 11608 /* Returns the largest value obtainable by casting something in INNER type to 11609 OUTER type. */ 11610 11611 tree 11612 upper_bound_in_type (tree outer, tree inner) 11613 { 11614 unsigned int det = 0; 11615 unsigned oprec = TYPE_PRECISION (outer); 11616 unsigned iprec = TYPE_PRECISION (inner); 11617 unsigned prec; 11618 11619 /* Compute a unique number for every combination. */ 11620 det |= (oprec > iprec) ? 4 : 0; 11621 det |= TYPE_UNSIGNED (outer) ? 2 : 0; 11622 det |= TYPE_UNSIGNED (inner) ? 1 : 0; 11623 11624 /* Determine the exponent to use. */ 11625 switch (det) 11626 { 11627 case 0: 11628 case 1: 11629 /* oprec <= iprec, outer: signed, inner: don't care. */ 11630 prec = oprec - 1; 11631 break; 11632 case 2: 11633 case 3: 11634 /* oprec <= iprec, outer: unsigned, inner: don't care. */ 11635 prec = oprec; 11636 break; 11637 case 4: 11638 /* oprec > iprec, outer: signed, inner: signed. */ 11639 prec = iprec - 1; 11640 break; 11641 case 5: 11642 /* oprec > iprec, outer: signed, inner: unsigned. */ 11643 prec = iprec; 11644 break; 11645 case 6: 11646 /* oprec > iprec, outer: unsigned, inner: signed. */ 11647 prec = oprec; 11648 break; 11649 case 7: 11650 /* oprec > iprec, outer: unsigned, inner: unsigned. */ 11651 prec = iprec; 11652 break; 11653 default: 11654 gcc_unreachable (); 11655 } 11656 11657 return wide_int_to_tree (outer, 11658 wi::mask (prec, false, TYPE_PRECISION (outer))); 11659 } 11660 11661 /* Returns the smallest value obtainable by casting something in INNER type to 11662 OUTER type. */ 11663 11664 tree 11665 lower_bound_in_type (tree outer, tree inner) 11666 { 11667 unsigned oprec = TYPE_PRECISION (outer); 11668 unsigned iprec = TYPE_PRECISION (inner); 11669 11670 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type 11671 and obtain 0. */ 11672 if (TYPE_UNSIGNED (outer) 11673 /* If we are widening something of an unsigned type, OUTER type 11674 contains all values of INNER type. In particular, both INNER 11675 and OUTER types have zero in common. */ 11676 || (oprec > iprec && TYPE_UNSIGNED (inner))) 11677 return build_int_cst (outer, 0); 11678 else 11679 { 11680 /* If we are widening a signed type to another signed type, we 11681 want to obtain -2^^(iprec-1). If we are keeping the 11682 precision or narrowing to a signed type, we want to obtain 11683 -2^(oprec-1). */ 11684 unsigned prec = oprec > iprec ? iprec : oprec; 11685 return wide_int_to_tree (outer, 11686 wi::mask (prec - 1, true, 11687 TYPE_PRECISION (outer))); 11688 } 11689 } 11690 11691 /* Return nonzero if two operands that are suitable for PHI nodes are 11692 necessarily equal. Specifically, both ARG0 and ARG1 must be either 11693 SSA_NAME or invariant. Note that this is strictly an optimization. 11694 That is, callers of this function can directly call operand_equal_p 11695 and get the same result, only slower. */ 11696 11697 int 11698 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1) 11699 { 11700 if (arg0 == arg1) 11701 return 1; 11702 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME) 11703 return 0; 11704 return operand_equal_p (arg0, arg1, 0); 11705 } 11706 11707 /* Returns number of zeros at the end of binary representation of X. */ 11708 11709 tree 11710 num_ending_zeros (const_tree x) 11711 { 11712 return build_int_cst (TREE_TYPE (x), wi::ctz (x)); 11713 } 11714 11715 11716 #define WALK_SUBTREE(NODE) \ 11717 do \ 11718 { \ 11719 result = walk_tree_1 (&(NODE), func, data, pset, lh); \ 11720 if (result) \ 11721 return result; \ 11722 } \ 11723 while (0) 11724 11725 /* This is a subroutine of walk_tree that walks field of TYPE that are to 11726 be walked whenever a type is seen in the tree. Rest of operands and return 11727 value are as for walk_tree. */ 11728 11729 static tree 11730 walk_type_fields (tree type, walk_tree_fn func, void *data, 11731 hash_set<tree> *pset, walk_tree_lh lh) 11732 { 11733 tree result = NULL_TREE; 11734 11735 switch (TREE_CODE (type)) 11736 { 11737 case POINTER_TYPE: 11738 case REFERENCE_TYPE: 11739 case VECTOR_TYPE: 11740 /* We have to worry about mutually recursive pointers. These can't 11741 be written in C. They can in Ada. It's pathological, but 11742 there's an ACATS test (c38102a) that checks it. Deal with this 11743 by checking if we're pointing to another pointer, that one 11744 points to another pointer, that one does too, and we have no htab. 11745 If so, get a hash table. We check three levels deep to avoid 11746 the cost of the hash table if we don't need one. */ 11747 if (POINTER_TYPE_P (TREE_TYPE (type)) 11748 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type))) 11749 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type)))) 11750 && !pset) 11751 { 11752 result = walk_tree_without_duplicates (&TREE_TYPE (type), 11753 func, data); 11754 if (result) 11755 return result; 11756 11757 break; 11758 } 11759 11760 /* fall through */ 11761 11762 case COMPLEX_TYPE: 11763 WALK_SUBTREE (TREE_TYPE (type)); 11764 break; 11765 11766 case METHOD_TYPE: 11767 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type)); 11768 11769 /* Fall through. */ 11770 11771 case FUNCTION_TYPE: 11772 WALK_SUBTREE (TREE_TYPE (type)); 11773 { 11774 tree arg; 11775 11776 /* We never want to walk into default arguments. */ 11777 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg)) 11778 WALK_SUBTREE (TREE_VALUE (arg)); 11779 } 11780 break; 11781 11782 case ARRAY_TYPE: 11783 /* Don't follow this nodes's type if a pointer for fear that 11784 we'll have infinite recursion. If we have a PSET, then we 11785 need not fear. */ 11786 if (pset 11787 || (!POINTER_TYPE_P (TREE_TYPE (type)) 11788 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE)) 11789 WALK_SUBTREE (TREE_TYPE (type)); 11790 WALK_SUBTREE (TYPE_DOMAIN (type)); 11791 break; 11792 11793 case OFFSET_TYPE: 11794 WALK_SUBTREE (TREE_TYPE (type)); 11795 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type)); 11796 break; 11797 11798 default: 11799 break; 11800 } 11801 11802 return NULL_TREE; 11803 } 11804 11805 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is 11806 called with the DATA and the address of each sub-tree. If FUNC returns a 11807 non-NULL value, the traversal is stopped, and the value returned by FUNC 11808 is returned. If PSET is non-NULL it is used to record the nodes visited, 11809 and to avoid visiting a node more than once. */ 11810 11811 tree 11812 walk_tree_1 (tree *tp, walk_tree_fn func, void *data, 11813 hash_set<tree> *pset, walk_tree_lh lh) 11814 { 11815 enum tree_code code; 11816 int walk_subtrees; 11817 tree result; 11818 11819 #define WALK_SUBTREE_TAIL(NODE) \ 11820 do \ 11821 { \ 11822 tp = & (NODE); \ 11823 goto tail_recurse; \ 11824 } \ 11825 while (0) 11826 11827 tail_recurse: 11828 /* Skip empty subtrees. */ 11829 if (!*tp) 11830 return NULL_TREE; 11831 11832 /* Don't walk the same tree twice, if the user has requested 11833 that we avoid doing so. */ 11834 if (pset && pset->add (*tp)) 11835 return NULL_TREE; 11836 11837 /* Call the function. */ 11838 walk_subtrees = 1; 11839 result = (*func) (tp, &walk_subtrees, data); 11840 11841 /* If we found something, return it. */ 11842 if (result) 11843 return result; 11844 11845 code = TREE_CODE (*tp); 11846 11847 /* Even if we didn't, FUNC may have decided that there was nothing 11848 interesting below this point in the tree. */ 11849 if (!walk_subtrees) 11850 { 11851 /* But we still need to check our siblings. */ 11852 if (code == TREE_LIST) 11853 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp)); 11854 else if (code == OMP_CLAUSE) 11855 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); 11856 else 11857 return NULL_TREE; 11858 } 11859 11860 if (lh) 11861 { 11862 result = (*lh) (tp, &walk_subtrees, func, data, pset); 11863 if (result || !walk_subtrees) 11864 return result; 11865 } 11866 11867 switch (code) 11868 { 11869 case ERROR_MARK: 11870 case IDENTIFIER_NODE: 11871 case INTEGER_CST: 11872 case REAL_CST: 11873 case FIXED_CST: 11874 case VECTOR_CST: 11875 case STRING_CST: 11876 case BLOCK: 11877 case PLACEHOLDER_EXPR: 11878 case SSA_NAME: 11879 case FIELD_DECL: 11880 case RESULT_DECL: 11881 /* None of these have subtrees other than those already walked 11882 above. */ 11883 break; 11884 11885 case TREE_LIST: 11886 WALK_SUBTREE (TREE_VALUE (*tp)); 11887 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp)); 11888 break; 11889 11890 case TREE_VEC: 11891 { 11892 int len = TREE_VEC_LENGTH (*tp); 11893 11894 if (len == 0) 11895 break; 11896 11897 /* Walk all elements but the first. */ 11898 while (--len) 11899 WALK_SUBTREE (TREE_VEC_ELT (*tp, len)); 11900 11901 /* Now walk the first one as a tail call. */ 11902 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0)); 11903 } 11904 11905 case COMPLEX_CST: 11906 WALK_SUBTREE (TREE_REALPART (*tp)); 11907 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp)); 11908 11909 case CONSTRUCTOR: 11910 { 11911 unsigned HOST_WIDE_INT idx; 11912 constructor_elt *ce; 11913 11914 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce); 11915 idx++) 11916 WALK_SUBTREE (ce->value); 11917 } 11918 break; 11919 11920 case SAVE_EXPR: 11921 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0)); 11922 11923 case BIND_EXPR: 11924 { 11925 tree decl; 11926 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl)) 11927 { 11928 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk 11929 into declarations that are just mentioned, rather than 11930 declared; they don't really belong to this part of the tree. 11931 And, we can see cycles: the initializer for a declaration 11932 can refer to the declaration itself. */ 11933 WALK_SUBTREE (DECL_INITIAL (decl)); 11934 WALK_SUBTREE (DECL_SIZE (decl)); 11935 WALK_SUBTREE (DECL_SIZE_UNIT (decl)); 11936 } 11937 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp)); 11938 } 11939 11940 case STATEMENT_LIST: 11941 { 11942 tree_stmt_iterator i; 11943 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i)) 11944 WALK_SUBTREE (*tsi_stmt_ptr (i)); 11945 } 11946 break; 11947 11948 case OMP_CLAUSE: 11949 switch (OMP_CLAUSE_CODE (*tp)) 11950 { 11951 case OMP_CLAUSE_GANG: 11952 case OMP_CLAUSE__GRIDDIM_: 11953 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1)); 11954 /* FALLTHRU */ 11955 11956 case OMP_CLAUSE_ASYNC: 11957 case OMP_CLAUSE_WAIT: 11958 case OMP_CLAUSE_WORKER: 11959 case OMP_CLAUSE_VECTOR: 11960 case OMP_CLAUSE_NUM_GANGS: 11961 case OMP_CLAUSE_NUM_WORKERS: 11962 case OMP_CLAUSE_VECTOR_LENGTH: 11963 case OMP_CLAUSE_PRIVATE: 11964 case OMP_CLAUSE_SHARED: 11965 case OMP_CLAUSE_FIRSTPRIVATE: 11966 case OMP_CLAUSE_COPYIN: 11967 case OMP_CLAUSE_COPYPRIVATE: 11968 case OMP_CLAUSE_FINAL: 11969 case OMP_CLAUSE_IF: 11970 case OMP_CLAUSE_NUM_THREADS: 11971 case OMP_CLAUSE_SCHEDULE: 11972 case OMP_CLAUSE_UNIFORM: 11973 case OMP_CLAUSE_DEPEND: 11974 case OMP_CLAUSE_NUM_TEAMS: 11975 case OMP_CLAUSE_THREAD_LIMIT: 11976 case OMP_CLAUSE_DEVICE: 11977 case OMP_CLAUSE_DIST_SCHEDULE: 11978 case OMP_CLAUSE_SAFELEN: 11979 case OMP_CLAUSE_SIMDLEN: 11980 case OMP_CLAUSE_ORDERED: 11981 case OMP_CLAUSE_PRIORITY: 11982 case OMP_CLAUSE_GRAINSIZE: 11983 case OMP_CLAUSE_NUM_TASKS: 11984 case OMP_CLAUSE_HINT: 11985 case OMP_CLAUSE_TO_DECLARE: 11986 case OMP_CLAUSE_LINK: 11987 case OMP_CLAUSE_USE_DEVICE_PTR: 11988 case OMP_CLAUSE_IS_DEVICE_PTR: 11989 case OMP_CLAUSE__LOOPTEMP_: 11990 case OMP_CLAUSE__SIMDUID_: 11991 case OMP_CLAUSE__CILK_FOR_COUNT_: 11992 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0)); 11993 /* FALLTHRU */ 11994 11995 case OMP_CLAUSE_INDEPENDENT: 11996 case OMP_CLAUSE_NOWAIT: 11997 case OMP_CLAUSE_DEFAULT: 11998 case OMP_CLAUSE_UNTIED: 11999 case OMP_CLAUSE_MERGEABLE: 12000 case OMP_CLAUSE_PROC_BIND: 12001 case OMP_CLAUSE_INBRANCH: 12002 case OMP_CLAUSE_NOTINBRANCH: 12003 case OMP_CLAUSE_FOR: 12004 case OMP_CLAUSE_PARALLEL: 12005 case OMP_CLAUSE_SECTIONS: 12006 case OMP_CLAUSE_TASKGROUP: 12007 case OMP_CLAUSE_NOGROUP: 12008 case OMP_CLAUSE_THREADS: 12009 case OMP_CLAUSE_SIMD: 12010 case OMP_CLAUSE_DEFAULTMAP: 12011 case OMP_CLAUSE_AUTO: 12012 case OMP_CLAUSE_SEQ: 12013 case OMP_CLAUSE_TILE: 12014 case OMP_CLAUSE__SIMT_: 12015 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); 12016 12017 case OMP_CLAUSE_LASTPRIVATE: 12018 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp)); 12019 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp)); 12020 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); 12021 12022 case OMP_CLAUSE_COLLAPSE: 12023 { 12024 int i; 12025 for (i = 0; i < 3; i++) 12026 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i)); 12027 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); 12028 } 12029 12030 case OMP_CLAUSE_LINEAR: 12031 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp)); 12032 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp)); 12033 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp)); 12034 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); 12035 12036 case OMP_CLAUSE_ALIGNED: 12037 case OMP_CLAUSE_FROM: 12038 case OMP_CLAUSE_TO: 12039 case OMP_CLAUSE_MAP: 12040 case OMP_CLAUSE__CACHE_: 12041 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp)); 12042 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1)); 12043 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); 12044 12045 case OMP_CLAUSE_REDUCTION: 12046 { 12047 int i; 12048 for (i = 0; i < 5; i++) 12049 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i)); 12050 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); 12051 } 12052 12053 default: 12054 gcc_unreachable (); 12055 } 12056 break; 12057 12058 case TARGET_EXPR: 12059 { 12060 int i, len; 12061 12062 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same. 12063 But, we only want to walk once. */ 12064 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3; 12065 for (i = 0; i < len; ++i) 12066 WALK_SUBTREE (TREE_OPERAND (*tp, i)); 12067 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len)); 12068 } 12069 12070 case DECL_EXPR: 12071 /* If this is a TYPE_DECL, walk into the fields of the type that it's 12072 defining. We only want to walk into these fields of a type in this 12073 case and not in the general case of a mere reference to the type. 12074 12075 The criterion is as follows: if the field can be an expression, it 12076 must be walked only here. This should be in keeping with the fields 12077 that are directly gimplified in gimplify_type_sizes in order for the 12078 mark/copy-if-shared/unmark machinery of the gimplifier to work with 12079 variable-sized types. 12080 12081 Note that DECLs get walked as part of processing the BIND_EXPR. */ 12082 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL) 12083 { 12084 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp)); 12085 if (TREE_CODE (*type_p) == ERROR_MARK) 12086 return NULL_TREE; 12087 12088 /* Call the function for the type. See if it returns anything or 12089 doesn't want us to continue. If we are to continue, walk both 12090 the normal fields and those for the declaration case. */ 12091 result = (*func) (type_p, &walk_subtrees, data); 12092 if (result || !walk_subtrees) 12093 return result; 12094 12095 /* But do not walk a pointed-to type since it may itself need to 12096 be walked in the declaration case if it isn't anonymous. */ 12097 if (!POINTER_TYPE_P (*type_p)) 12098 { 12099 result = walk_type_fields (*type_p, func, data, pset, lh); 12100 if (result) 12101 return result; 12102 } 12103 12104 /* If this is a record type, also walk the fields. */ 12105 if (RECORD_OR_UNION_TYPE_P (*type_p)) 12106 { 12107 tree field; 12108 12109 for (field = TYPE_FIELDS (*type_p); field; 12110 field = DECL_CHAIN (field)) 12111 { 12112 /* We'd like to look at the type of the field, but we can 12113 easily get infinite recursion. So assume it's pointed 12114 to elsewhere in the tree. Also, ignore things that 12115 aren't fields. */ 12116 if (TREE_CODE (field) != FIELD_DECL) 12117 continue; 12118 12119 WALK_SUBTREE (DECL_FIELD_OFFSET (field)); 12120 WALK_SUBTREE (DECL_SIZE (field)); 12121 WALK_SUBTREE (DECL_SIZE_UNIT (field)); 12122 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE) 12123 WALK_SUBTREE (DECL_QUALIFIER (field)); 12124 } 12125 } 12126 12127 /* Same for scalar types. */ 12128 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE 12129 || TREE_CODE (*type_p) == ENUMERAL_TYPE 12130 || TREE_CODE (*type_p) == INTEGER_TYPE 12131 || TREE_CODE (*type_p) == FIXED_POINT_TYPE 12132 || TREE_CODE (*type_p) == REAL_TYPE) 12133 { 12134 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p)); 12135 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p)); 12136 } 12137 12138 WALK_SUBTREE (TYPE_SIZE (*type_p)); 12139 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p)); 12140 } 12141 /* FALLTHRU */ 12142 12143 default: 12144 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))) 12145 { 12146 int i, len; 12147 12148 /* Walk over all the sub-trees of this operand. */ 12149 len = TREE_OPERAND_LENGTH (*tp); 12150 12151 /* Go through the subtrees. We need to do this in forward order so 12152 that the scope of a FOR_EXPR is handled properly. */ 12153 if (len) 12154 { 12155 for (i = 0; i < len - 1; ++i) 12156 WALK_SUBTREE (TREE_OPERAND (*tp, i)); 12157 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1)); 12158 } 12159 } 12160 /* If this is a type, walk the needed fields in the type. */ 12161 else if (TYPE_P (*tp)) 12162 return walk_type_fields (*tp, func, data, pset, lh); 12163 break; 12164 } 12165 12166 /* We didn't find what we were looking for. */ 12167 return NULL_TREE; 12168 12169 #undef WALK_SUBTREE_TAIL 12170 } 12171 #undef WALK_SUBTREE 12172 12173 /* Like walk_tree, but does not walk duplicate nodes more than once. */ 12174 12175 tree 12176 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data, 12177 walk_tree_lh lh) 12178 { 12179 tree result; 12180 12181 hash_set<tree> pset; 12182 result = walk_tree_1 (tp, func, data, &pset, lh); 12183 return result; 12184 } 12185 12186 12187 tree 12188 tree_block (tree t) 12189 { 12190 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t)); 12191 12192 if (IS_EXPR_CODE_CLASS (c)) 12193 return LOCATION_BLOCK (t->exp.locus); 12194 gcc_unreachable (); 12195 return NULL; 12196 } 12197 12198 void 12199 tree_set_block (tree t, tree b) 12200 { 12201 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t)); 12202 12203 if (IS_EXPR_CODE_CLASS (c)) 12204 { 12205 t->exp.locus = set_block (t->exp.locus, b); 12206 } 12207 else 12208 gcc_unreachable (); 12209 } 12210 12211 /* Create a nameless artificial label and put it in the current 12212 function context. The label has a location of LOC. Returns the 12213 newly created label. */ 12214 12215 tree 12216 create_artificial_label (location_t loc) 12217 { 12218 tree lab = build_decl (loc, 12219 LABEL_DECL, NULL_TREE, void_type_node); 12220 12221 DECL_ARTIFICIAL (lab) = 1; 12222 DECL_IGNORED_P (lab) = 1; 12223 DECL_CONTEXT (lab) = current_function_decl; 12224 return lab; 12225 } 12226 12227 /* Given a tree, try to return a useful variable name that we can use 12228 to prefix a temporary that is being assigned the value of the tree. 12229 I.E. given <temp> = &A, return A. */ 12230 12231 const char * 12232 get_name (tree t) 12233 { 12234 tree stripped_decl; 12235 12236 stripped_decl = t; 12237 STRIP_NOPS (stripped_decl); 12238 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl)) 12239 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl)); 12240 else if (TREE_CODE (stripped_decl) == SSA_NAME) 12241 { 12242 tree name = SSA_NAME_IDENTIFIER (stripped_decl); 12243 if (!name) 12244 return NULL; 12245 return IDENTIFIER_POINTER (name); 12246 } 12247 else 12248 { 12249 switch (TREE_CODE (stripped_decl)) 12250 { 12251 case ADDR_EXPR: 12252 return get_name (TREE_OPERAND (stripped_decl, 0)); 12253 default: 12254 return NULL; 12255 } 12256 } 12257 } 12258 12259 /* Return true if TYPE has a variable argument list. */ 12260 12261 bool 12262 stdarg_p (const_tree fntype) 12263 { 12264 function_args_iterator args_iter; 12265 tree n = NULL_TREE, t; 12266 12267 if (!fntype) 12268 return false; 12269 12270 FOREACH_FUNCTION_ARGS (fntype, t, args_iter) 12271 { 12272 n = t; 12273 } 12274 12275 return n != NULL_TREE && n != void_type_node; 12276 } 12277 12278 /* Return true if TYPE has a prototype. */ 12279 12280 bool 12281 prototype_p (const_tree fntype) 12282 { 12283 tree t; 12284 12285 gcc_assert (fntype != NULL_TREE); 12286 12287 t = TYPE_ARG_TYPES (fntype); 12288 return (t != NULL_TREE); 12289 } 12290 12291 /* If BLOCK is inlined from an __attribute__((__artificial__)) 12292 routine, return pointer to location from where it has been 12293 called. */ 12294 location_t * 12295 block_nonartificial_location (tree block) 12296 { 12297 location_t *ret = NULL; 12298 12299 while (block && TREE_CODE (block) == BLOCK 12300 && BLOCK_ABSTRACT_ORIGIN (block)) 12301 { 12302 tree ao = BLOCK_ABSTRACT_ORIGIN (block); 12303 12304 while (TREE_CODE (ao) == BLOCK 12305 && BLOCK_ABSTRACT_ORIGIN (ao) 12306 && BLOCK_ABSTRACT_ORIGIN (ao) != ao) 12307 ao = BLOCK_ABSTRACT_ORIGIN (ao); 12308 12309 if (TREE_CODE (ao) == FUNCTION_DECL) 12310 { 12311 /* If AO is an artificial inline, point RET to the 12312 call site locus at which it has been inlined and continue 12313 the loop, in case AO's caller is also an artificial 12314 inline. */ 12315 if (DECL_DECLARED_INLINE_P (ao) 12316 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao))) 12317 ret = &BLOCK_SOURCE_LOCATION (block); 12318 else 12319 break; 12320 } 12321 else if (TREE_CODE (ao) != BLOCK) 12322 break; 12323 12324 block = BLOCK_SUPERCONTEXT (block); 12325 } 12326 return ret; 12327 } 12328 12329 12330 /* If EXP is inlined from an __attribute__((__artificial__)) 12331 function, return the location of the original call expression. */ 12332 12333 location_t 12334 tree_nonartificial_location (tree exp) 12335 { 12336 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp)); 12337 12338 if (loc) 12339 return *loc; 12340 else 12341 return EXPR_LOCATION (exp); 12342 } 12343 12344 12345 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq 12346 nodes. */ 12347 12348 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */ 12349 12350 hashval_t 12351 cl_option_hasher::hash (tree x) 12352 { 12353 const_tree const t = x; 12354 const char *p; 12355 size_t i; 12356 size_t len = 0; 12357 hashval_t hash = 0; 12358 12359 if (TREE_CODE (t) == OPTIMIZATION_NODE) 12360 { 12361 p = (const char *)TREE_OPTIMIZATION (t); 12362 len = sizeof (struct cl_optimization); 12363 } 12364 12365 else if (TREE_CODE (t) == TARGET_OPTION_NODE) 12366 return cl_target_option_hash (TREE_TARGET_OPTION (t)); 12367 12368 else 12369 gcc_unreachable (); 12370 12371 /* assume most opt flags are just 0/1, some are 2-3, and a few might be 12372 something else. */ 12373 for (i = 0; i < len; i++) 12374 if (p[i]) 12375 hash = (hash << 4) ^ ((i << 2) | p[i]); 12376 12377 return hash; 12378 } 12379 12380 /* Return nonzero if the value represented by *X (an OPTIMIZATION or 12381 TARGET_OPTION tree node) is the same as that given by *Y, which is the 12382 same. */ 12383 12384 bool 12385 cl_option_hasher::equal (tree x, tree y) 12386 { 12387 const_tree const xt = x; 12388 const_tree const yt = y; 12389 const char *xp; 12390 const char *yp; 12391 size_t len; 12392 12393 if (TREE_CODE (xt) != TREE_CODE (yt)) 12394 return 0; 12395 12396 if (TREE_CODE (xt) == OPTIMIZATION_NODE) 12397 { 12398 xp = (const char *)TREE_OPTIMIZATION (xt); 12399 yp = (const char *)TREE_OPTIMIZATION (yt); 12400 len = sizeof (struct cl_optimization); 12401 } 12402 12403 else if (TREE_CODE (xt) == TARGET_OPTION_NODE) 12404 { 12405 return cl_target_option_eq (TREE_TARGET_OPTION (xt), 12406 TREE_TARGET_OPTION (yt)); 12407 } 12408 12409 else 12410 gcc_unreachable (); 12411 12412 return (memcmp (xp, yp, len) == 0); 12413 } 12414 12415 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */ 12416 12417 tree 12418 build_optimization_node (struct gcc_options *opts) 12419 { 12420 tree t; 12421 12422 /* Use the cache of optimization nodes. */ 12423 12424 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node), 12425 opts); 12426 12427 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT); 12428 t = *slot; 12429 if (!t) 12430 { 12431 /* Insert this one into the hash table. */ 12432 t = cl_optimization_node; 12433 *slot = t; 12434 12435 /* Make a new node for next time round. */ 12436 cl_optimization_node = make_node (OPTIMIZATION_NODE); 12437 } 12438 12439 return t; 12440 } 12441 12442 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */ 12443 12444 tree 12445 build_target_option_node (struct gcc_options *opts) 12446 { 12447 tree t; 12448 12449 /* Use the cache of optimization nodes. */ 12450 12451 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node), 12452 opts); 12453 12454 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT); 12455 t = *slot; 12456 if (!t) 12457 { 12458 /* Insert this one into the hash table. */ 12459 t = cl_target_option_node; 12460 *slot = t; 12461 12462 /* Make a new node for next time round. */ 12463 cl_target_option_node = make_node (TARGET_OPTION_NODE); 12464 } 12465 12466 return t; 12467 } 12468 12469 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees, 12470 so that they aren't saved during PCH writing. */ 12471 12472 void 12473 prepare_target_option_nodes_for_pch (void) 12474 { 12475 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin (); 12476 for (; iter != cl_option_hash_table->end (); ++iter) 12477 if (TREE_CODE (*iter) == TARGET_OPTION_NODE) 12478 TREE_TARGET_GLOBALS (*iter) = NULL; 12479 } 12480 12481 /* Determine the "ultimate origin" of a block. The block may be an inlined 12482 instance of an inlined instance of a block which is local to an inline 12483 function, so we have to trace all of the way back through the origin chain 12484 to find out what sort of node actually served as the original seed for the 12485 given block. */ 12486 12487 tree 12488 block_ultimate_origin (const_tree block) 12489 { 12490 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block); 12491 12492 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if 12493 we're trying to output the abstract instance of this function. */ 12494 if (BLOCK_ABSTRACT (block) && immediate_origin == block) 12495 return NULL_TREE; 12496 12497 if (immediate_origin == NULL_TREE) 12498 return NULL_TREE; 12499 else 12500 { 12501 tree ret_val; 12502 tree lookahead = immediate_origin; 12503 12504 do 12505 { 12506 ret_val = lookahead; 12507 lookahead = (TREE_CODE (ret_val) == BLOCK 12508 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL); 12509 } 12510 while (lookahead != NULL && lookahead != ret_val); 12511 12512 /* The block's abstract origin chain may not be the *ultimate* origin of 12513 the block. It could lead to a DECL that has an abstract origin set. 12514 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN 12515 will give us if it has one). Note that DECL's abstract origins are 12516 supposed to be the most distant ancestor (or so decl_ultimate_origin 12517 claims), so we don't need to loop following the DECL origins. */ 12518 if (DECL_P (ret_val)) 12519 return DECL_ORIGIN (ret_val); 12520 12521 return ret_val; 12522 } 12523 } 12524 12525 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates 12526 no instruction. */ 12527 12528 bool 12529 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type) 12530 { 12531 /* Do not strip casts into or out of differing address spaces. */ 12532 if (POINTER_TYPE_P (outer_type) 12533 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC) 12534 { 12535 if (!POINTER_TYPE_P (inner_type) 12536 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) 12537 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type)))) 12538 return false; 12539 } 12540 else if (POINTER_TYPE_P (inner_type) 12541 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC) 12542 { 12543 /* We already know that outer_type is not a pointer with 12544 a non-generic address space. */ 12545 return false; 12546 } 12547 12548 /* Use precision rather then machine mode when we can, which gives 12549 the correct answer even for submode (bit-field) types. */ 12550 if ((INTEGRAL_TYPE_P (outer_type) 12551 || POINTER_TYPE_P (outer_type) 12552 || TREE_CODE (outer_type) == OFFSET_TYPE) 12553 && (INTEGRAL_TYPE_P (inner_type) 12554 || POINTER_TYPE_P (inner_type) 12555 || TREE_CODE (inner_type) == OFFSET_TYPE)) 12556 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type); 12557 12558 /* Otherwise fall back on comparing machine modes (e.g. for 12559 aggregate types, floats). */ 12560 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type); 12561 } 12562 12563 /* Return true iff conversion in EXP generates no instruction. Mark 12564 it inline so that we fully inline into the stripping functions even 12565 though we have two uses of this function. */ 12566 12567 static inline bool 12568 tree_nop_conversion (const_tree exp) 12569 { 12570 tree outer_type, inner_type; 12571 12572 if (!CONVERT_EXPR_P (exp) 12573 && TREE_CODE (exp) != NON_LVALUE_EXPR) 12574 return false; 12575 if (TREE_OPERAND (exp, 0) == error_mark_node) 12576 return false; 12577 12578 outer_type = TREE_TYPE (exp); 12579 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); 12580 12581 if (!inner_type) 12582 return false; 12583 12584 return tree_nop_conversion_p (outer_type, inner_type); 12585 } 12586 12587 /* Return true iff conversion in EXP generates no instruction. Don't 12588 consider conversions changing the signedness. */ 12589 12590 static bool 12591 tree_sign_nop_conversion (const_tree exp) 12592 { 12593 tree outer_type, inner_type; 12594 12595 if (!tree_nop_conversion (exp)) 12596 return false; 12597 12598 outer_type = TREE_TYPE (exp); 12599 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); 12600 12601 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type) 12602 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type)); 12603 } 12604 12605 /* Strip conversions from EXP according to tree_nop_conversion and 12606 return the resulting expression. */ 12607 12608 tree 12609 tree_strip_nop_conversions (tree exp) 12610 { 12611 while (tree_nop_conversion (exp)) 12612 exp = TREE_OPERAND (exp, 0); 12613 return exp; 12614 } 12615 12616 /* Strip conversions from EXP according to tree_sign_nop_conversion 12617 and return the resulting expression. */ 12618 12619 tree 12620 tree_strip_sign_nop_conversions (tree exp) 12621 { 12622 while (tree_sign_nop_conversion (exp)) 12623 exp = TREE_OPERAND (exp, 0); 12624 return exp; 12625 } 12626 12627 /* Avoid any floating point extensions from EXP. */ 12628 tree 12629 strip_float_extensions (tree exp) 12630 { 12631 tree sub, expt, subt; 12632 12633 /* For floating point constant look up the narrowest type that can hold 12634 it properly and handle it like (type)(narrowest_type)constant. 12635 This way we can optimize for instance a=a*2.0 where "a" is float 12636 but 2.0 is double constant. */ 12637 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp))) 12638 { 12639 REAL_VALUE_TYPE orig; 12640 tree type = NULL; 12641 12642 orig = TREE_REAL_CST (exp); 12643 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node) 12644 && exact_real_truncate (TYPE_MODE (float_type_node), &orig)) 12645 type = float_type_node; 12646 else if (TYPE_PRECISION (TREE_TYPE (exp)) 12647 > TYPE_PRECISION (double_type_node) 12648 && exact_real_truncate (TYPE_MODE (double_type_node), &orig)) 12649 type = double_type_node; 12650 if (type) 12651 return build_real_truncate (type, orig); 12652 } 12653 12654 if (!CONVERT_EXPR_P (exp)) 12655 return exp; 12656 12657 sub = TREE_OPERAND (exp, 0); 12658 subt = TREE_TYPE (sub); 12659 expt = TREE_TYPE (exp); 12660 12661 if (!FLOAT_TYPE_P (subt)) 12662 return exp; 12663 12664 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt)) 12665 return exp; 12666 12667 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt)) 12668 return exp; 12669 12670 return strip_float_extensions (sub); 12671 } 12672 12673 /* Strip out all handled components that produce invariant 12674 offsets. */ 12675 12676 const_tree 12677 strip_invariant_refs (const_tree op) 12678 { 12679 while (handled_component_p (op)) 12680 { 12681 switch (TREE_CODE (op)) 12682 { 12683 case ARRAY_REF: 12684 case ARRAY_RANGE_REF: 12685 if (!is_gimple_constant (TREE_OPERAND (op, 1)) 12686 || TREE_OPERAND (op, 2) != NULL_TREE 12687 || TREE_OPERAND (op, 3) != NULL_TREE) 12688 return NULL; 12689 break; 12690 12691 case COMPONENT_REF: 12692 if (TREE_OPERAND (op, 2) != NULL_TREE) 12693 return NULL; 12694 break; 12695 12696 default:; 12697 } 12698 op = TREE_OPERAND (op, 0); 12699 } 12700 12701 return op; 12702 } 12703 12704 static GTY(()) tree gcc_eh_personality_decl; 12705 12706 /* Return the GCC personality function decl. */ 12707 12708 tree 12709 lhd_gcc_personality (void) 12710 { 12711 if (!gcc_eh_personality_decl) 12712 gcc_eh_personality_decl = build_personality_function ("gcc"); 12713 return gcc_eh_personality_decl; 12714 } 12715 12716 /* TARGET is a call target of GIMPLE call statement 12717 (obtained by gimple_call_fn). Return true if it is 12718 OBJ_TYPE_REF representing an virtual call of C++ method. 12719 (As opposed to OBJ_TYPE_REF representing objc calls 12720 through a cast where middle-end devirtualization machinery 12721 can't apply.) */ 12722 12723 bool 12724 virtual_method_call_p (const_tree target) 12725 { 12726 if (TREE_CODE (target) != OBJ_TYPE_REF) 12727 return false; 12728 tree t = TREE_TYPE (target); 12729 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE); 12730 t = TREE_TYPE (t); 12731 if (TREE_CODE (t) == FUNCTION_TYPE) 12732 return false; 12733 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE); 12734 /* If we do not have BINFO associated, it means that type was built 12735 without devirtualization enabled. Do not consider this a virtual 12736 call. */ 12737 if (!TYPE_BINFO (obj_type_ref_class (target))) 12738 return false; 12739 return true; 12740 } 12741 12742 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */ 12743 12744 tree 12745 obj_type_ref_class (const_tree ref) 12746 { 12747 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF); 12748 ref = TREE_TYPE (ref); 12749 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE); 12750 ref = TREE_TYPE (ref); 12751 /* We look for type THIS points to. ObjC also builds 12752 OBJ_TYPE_REF with non-method calls, Their first parameter 12753 ID however also corresponds to class type. */ 12754 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE 12755 || TREE_CODE (ref) == FUNCTION_TYPE); 12756 ref = TREE_VALUE (TYPE_ARG_TYPES (ref)); 12757 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE); 12758 return TREE_TYPE (ref); 12759 } 12760 12761 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */ 12762 12763 static tree 12764 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos) 12765 { 12766 unsigned int i; 12767 tree base_binfo, b; 12768 12769 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++) 12770 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo)) 12771 && types_same_for_odr (TREE_TYPE (base_binfo), type)) 12772 return base_binfo; 12773 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL) 12774 return b; 12775 return NULL; 12776 } 12777 12778 /* Try to find a base info of BINFO that would have its field decl at offset 12779 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be 12780 found, return, otherwise return NULL_TREE. */ 12781 12782 tree 12783 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type) 12784 { 12785 tree type = BINFO_TYPE (binfo); 12786 12787 while (true) 12788 { 12789 HOST_WIDE_INT pos, size; 12790 tree fld; 12791 int i; 12792 12793 if (types_same_for_odr (type, expected_type)) 12794 return binfo; 12795 if (offset < 0) 12796 return NULL_TREE; 12797 12798 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld)) 12799 { 12800 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld)) 12801 continue; 12802 12803 pos = int_bit_position (fld); 12804 size = tree_to_uhwi (DECL_SIZE (fld)); 12805 if (pos <= offset && (pos + size) > offset) 12806 break; 12807 } 12808 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE) 12809 return NULL_TREE; 12810 12811 /* Offset 0 indicates the primary base, whose vtable contents are 12812 represented in the binfo for the derived class. */ 12813 else if (offset != 0) 12814 { 12815 tree found_binfo = NULL, base_binfo; 12816 /* Offsets in BINFO are in bytes relative to the whole structure 12817 while POS is in bits relative to the containing field. */ 12818 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos 12819 / BITS_PER_UNIT); 12820 12821 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++) 12822 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset 12823 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld))) 12824 { 12825 found_binfo = base_binfo; 12826 break; 12827 } 12828 if (found_binfo) 12829 binfo = found_binfo; 12830 else 12831 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld), 12832 binfo_offset); 12833 } 12834 12835 type = TREE_TYPE (fld); 12836 offset -= pos; 12837 } 12838 } 12839 12840 /* Returns true if X is a typedef decl. */ 12841 12842 bool 12843 is_typedef_decl (const_tree x) 12844 { 12845 return (x && TREE_CODE (x) == TYPE_DECL 12846 && DECL_ORIGINAL_TYPE (x) != NULL_TREE); 12847 } 12848 12849 /* Returns true iff TYPE is a type variant created for a typedef. */ 12850 12851 bool 12852 typedef_variant_p (const_tree type) 12853 { 12854 return is_typedef_decl (TYPE_NAME (type)); 12855 } 12856 12857 /* Warn about a use of an identifier which was marked deprecated. */ 12858 void 12859 warn_deprecated_use (tree node, tree attr) 12860 { 12861 const char *msg; 12862 12863 if (node == 0 || !warn_deprecated_decl) 12864 return; 12865 12866 if (!attr) 12867 { 12868 if (DECL_P (node)) 12869 attr = DECL_ATTRIBUTES (node); 12870 else if (TYPE_P (node)) 12871 { 12872 tree decl = TYPE_STUB_DECL (node); 12873 if (decl) 12874 attr = lookup_attribute ("deprecated", 12875 TYPE_ATTRIBUTES (TREE_TYPE (decl))); 12876 } 12877 } 12878 12879 if (attr) 12880 attr = lookup_attribute ("deprecated", attr); 12881 12882 if (attr) 12883 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))); 12884 else 12885 msg = NULL; 12886 12887 bool w; 12888 if (DECL_P (node)) 12889 { 12890 if (msg) 12891 w = warning (OPT_Wdeprecated_declarations, 12892 "%qD is deprecated: %s", node, msg); 12893 else 12894 w = warning (OPT_Wdeprecated_declarations, 12895 "%qD is deprecated", node); 12896 if (w) 12897 inform (DECL_SOURCE_LOCATION (node), "declared here"); 12898 } 12899 else if (TYPE_P (node)) 12900 { 12901 tree what = NULL_TREE; 12902 tree decl = TYPE_STUB_DECL (node); 12903 12904 if (TYPE_NAME (node)) 12905 { 12906 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE) 12907 what = TYPE_NAME (node); 12908 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL 12909 && DECL_NAME (TYPE_NAME (node))) 12910 what = DECL_NAME (TYPE_NAME (node)); 12911 } 12912 12913 if (decl) 12914 { 12915 if (what) 12916 { 12917 if (msg) 12918 w = warning (OPT_Wdeprecated_declarations, 12919 "%qE is deprecated: %s", what, msg); 12920 else 12921 w = warning (OPT_Wdeprecated_declarations, 12922 "%qE is deprecated", what); 12923 } 12924 else 12925 { 12926 if (msg) 12927 w = warning (OPT_Wdeprecated_declarations, 12928 "type is deprecated: %s", msg); 12929 else 12930 w = warning (OPT_Wdeprecated_declarations, 12931 "type is deprecated"); 12932 } 12933 if (w) 12934 inform (DECL_SOURCE_LOCATION (decl), "declared here"); 12935 } 12936 else 12937 { 12938 if (what) 12939 { 12940 if (msg) 12941 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s", 12942 what, msg); 12943 else 12944 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what); 12945 } 12946 else 12947 { 12948 if (msg) 12949 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s", 12950 msg); 12951 else 12952 warning (OPT_Wdeprecated_declarations, "type is deprecated"); 12953 } 12954 } 12955 } 12956 } 12957 12958 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration 12959 somewhere in it. */ 12960 12961 bool 12962 contains_bitfld_component_ref_p (const_tree ref) 12963 { 12964 while (handled_component_p (ref)) 12965 { 12966 if (TREE_CODE (ref) == COMPONENT_REF 12967 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))) 12968 return true; 12969 ref = TREE_OPERAND (ref, 0); 12970 } 12971 12972 return false; 12973 } 12974 12975 /* Try to determine whether a TRY_CATCH expression can fall through. 12976 This is a subroutine of block_may_fallthru. */ 12977 12978 static bool 12979 try_catch_may_fallthru (const_tree stmt) 12980 { 12981 tree_stmt_iterator i; 12982 12983 /* If the TRY block can fall through, the whole TRY_CATCH can 12984 fall through. */ 12985 if (block_may_fallthru (TREE_OPERAND (stmt, 0))) 12986 return true; 12987 12988 i = tsi_start (TREE_OPERAND (stmt, 1)); 12989 switch (TREE_CODE (tsi_stmt (i))) 12990 { 12991 case CATCH_EXPR: 12992 /* We expect to see a sequence of CATCH_EXPR trees, each with a 12993 catch expression and a body. The whole TRY_CATCH may fall 12994 through iff any of the catch bodies falls through. */ 12995 for (; !tsi_end_p (i); tsi_next (&i)) 12996 { 12997 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i)))) 12998 return true; 12999 } 13000 return false; 13001 13002 case EH_FILTER_EXPR: 13003 /* The exception filter expression only matters if there is an 13004 exception. If the exception does not match EH_FILTER_TYPES, 13005 we will execute EH_FILTER_FAILURE, and we will fall through 13006 if that falls through. If the exception does match 13007 EH_FILTER_TYPES, the stack unwinder will continue up the 13008 stack, so we will not fall through. We don't know whether we 13009 will throw an exception which matches EH_FILTER_TYPES or not, 13010 so we just ignore EH_FILTER_TYPES and assume that we might 13011 throw an exception which doesn't match. */ 13012 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i))); 13013 13014 default: 13015 /* This case represents statements to be executed when an 13016 exception occurs. Those statements are implicitly followed 13017 by a RESX statement to resume execution after the exception. 13018 So in this case the TRY_CATCH never falls through. */ 13019 return false; 13020 } 13021 } 13022 13023 /* Try to determine if we can fall out of the bottom of BLOCK. This guess 13024 need not be 100% accurate; simply be conservative and return true if we 13025 don't know. This is used only to avoid stupidly generating extra code. 13026 If we're wrong, we'll just delete the extra code later. */ 13027 13028 bool 13029 block_may_fallthru (const_tree block) 13030 { 13031 /* This CONST_CAST is okay because expr_last returns its argument 13032 unmodified and we assign it to a const_tree. */ 13033 const_tree stmt = expr_last (CONST_CAST_TREE (block)); 13034 13035 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK) 13036 { 13037 case GOTO_EXPR: 13038 case RETURN_EXPR: 13039 /* Easy cases. If the last statement of the block implies 13040 control transfer, then we can't fall through. */ 13041 return false; 13042 13043 case SWITCH_EXPR: 13044 /* If SWITCH_LABELS is set, this is lowered, and represents a 13045 branch to a selected label and hence can not fall through. 13046 Otherwise SWITCH_BODY is set, and the switch can fall 13047 through. */ 13048 return SWITCH_LABELS (stmt) == NULL_TREE; 13049 13050 case COND_EXPR: 13051 if (block_may_fallthru (COND_EXPR_THEN (stmt))) 13052 return true; 13053 return block_may_fallthru (COND_EXPR_ELSE (stmt)); 13054 13055 case BIND_EXPR: 13056 return block_may_fallthru (BIND_EXPR_BODY (stmt)); 13057 13058 case TRY_CATCH_EXPR: 13059 return try_catch_may_fallthru (stmt); 13060 13061 case TRY_FINALLY_EXPR: 13062 /* The finally clause is always executed after the try clause, 13063 so if it does not fall through, then the try-finally will not 13064 fall through. Otherwise, if the try clause does not fall 13065 through, then when the finally clause falls through it will 13066 resume execution wherever the try clause was going. So the 13067 whole try-finally will only fall through if both the try 13068 clause and the finally clause fall through. */ 13069 return (block_may_fallthru (TREE_OPERAND (stmt, 0)) 13070 && block_may_fallthru (TREE_OPERAND (stmt, 1))); 13071 13072 case MODIFY_EXPR: 13073 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR) 13074 stmt = TREE_OPERAND (stmt, 1); 13075 else 13076 return true; 13077 /* FALLTHRU */ 13078 13079 case CALL_EXPR: 13080 /* Functions that do not return do not fall through. */ 13081 return (call_expr_flags (stmt) & ECF_NORETURN) == 0; 13082 13083 case CLEANUP_POINT_EXPR: 13084 return block_may_fallthru (TREE_OPERAND (stmt, 0)); 13085 13086 case TARGET_EXPR: 13087 return block_may_fallthru (TREE_OPERAND (stmt, 1)); 13088 13089 case ERROR_MARK: 13090 return true; 13091 13092 default: 13093 return lang_hooks.block_may_fallthru (stmt); 13094 } 13095 } 13096 13097 /* True if we are using EH to handle cleanups. */ 13098 static bool using_eh_for_cleanups_flag = false; 13099 13100 /* This routine is called from front ends to indicate eh should be used for 13101 cleanups. */ 13102 void 13103 using_eh_for_cleanups (void) 13104 { 13105 using_eh_for_cleanups_flag = true; 13106 } 13107 13108 /* Query whether EH is used for cleanups. */ 13109 bool 13110 using_eh_for_cleanups_p (void) 13111 { 13112 return using_eh_for_cleanups_flag; 13113 } 13114 13115 /* Wrapper for tree_code_name to ensure that tree code is valid */ 13116 const char * 13117 get_tree_code_name (enum tree_code code) 13118 { 13119 const char *invalid = "<invalid tree code>"; 13120 13121 if (code >= MAX_TREE_CODES) 13122 return invalid; 13123 13124 return tree_code_name[code]; 13125 } 13126 13127 /* Drops the TREE_OVERFLOW flag from T. */ 13128 13129 tree 13130 drop_tree_overflow (tree t) 13131 { 13132 gcc_checking_assert (TREE_OVERFLOW (t)); 13133 13134 /* For tree codes with a sharing machinery re-build the result. */ 13135 if (TREE_CODE (t) == INTEGER_CST) 13136 return wide_int_to_tree (TREE_TYPE (t), t); 13137 13138 /* Otherwise, as all tcc_constants are possibly shared, copy the node 13139 and drop the flag. */ 13140 t = copy_node (t); 13141 TREE_OVERFLOW (t) = 0; 13142 return t; 13143 } 13144 13145 /* Given a memory reference expression T, return its base address. 13146 The base address of a memory reference expression is the main 13147 object being referenced. For instance, the base address for 13148 'array[i].fld[j]' is 'array'. You can think of this as stripping 13149 away the offset part from a memory address. 13150 13151 This function calls handled_component_p to strip away all the inner 13152 parts of the memory reference until it reaches the base object. */ 13153 13154 tree 13155 get_base_address (tree t) 13156 { 13157 while (handled_component_p (t)) 13158 t = TREE_OPERAND (t, 0); 13159 13160 if ((TREE_CODE (t) == MEM_REF 13161 || TREE_CODE (t) == TARGET_MEM_REF) 13162 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR) 13163 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0); 13164 13165 /* ??? Either the alias oracle or all callers need to properly deal 13166 with WITH_SIZE_EXPRs before we can look through those. */ 13167 if (TREE_CODE (t) == WITH_SIZE_EXPR) 13168 return NULL_TREE; 13169 13170 return t; 13171 } 13172 13173 /* Return a tree of sizetype representing the size, in bytes, of the element 13174 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */ 13175 13176 tree 13177 array_ref_element_size (tree exp) 13178 { 13179 tree aligned_size = TREE_OPERAND (exp, 3); 13180 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))); 13181 location_t loc = EXPR_LOCATION (exp); 13182 13183 /* If a size was specified in the ARRAY_REF, it's the size measured 13184 in alignment units of the element type. So multiply by that value. */ 13185 if (aligned_size) 13186 { 13187 /* ??? tree_ssa_useless_type_conversion will eliminate casts to 13188 sizetype from another type of the same width and signedness. */ 13189 if (TREE_TYPE (aligned_size) != sizetype) 13190 aligned_size = fold_convert_loc (loc, sizetype, aligned_size); 13191 return size_binop_loc (loc, MULT_EXPR, aligned_size, 13192 size_int (TYPE_ALIGN_UNIT (elmt_type))); 13193 } 13194 13195 /* Otherwise, take the size from that of the element type. Substitute 13196 any PLACEHOLDER_EXPR that we have. */ 13197 else 13198 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp); 13199 } 13200 13201 /* Return a tree representing the lower bound of the array mentioned in 13202 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */ 13203 13204 tree 13205 array_ref_low_bound (tree exp) 13206 { 13207 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0))); 13208 13209 /* If a lower bound is specified in EXP, use it. */ 13210 if (TREE_OPERAND (exp, 2)) 13211 return TREE_OPERAND (exp, 2); 13212 13213 /* Otherwise, if there is a domain type and it has a lower bound, use it, 13214 substituting for a PLACEHOLDER_EXPR as needed. */ 13215 if (domain_type && TYPE_MIN_VALUE (domain_type)) 13216 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp); 13217 13218 /* Otherwise, return a zero of the appropriate type. */ 13219 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0); 13220 } 13221 13222 /* Return a tree representing the upper bound of the array mentioned in 13223 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */ 13224 13225 tree 13226 array_ref_up_bound (tree exp) 13227 { 13228 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0))); 13229 13230 /* If there is a domain type and it has an upper bound, use it, substituting 13231 for a PLACEHOLDER_EXPR as needed. */ 13232 if (domain_type && TYPE_MAX_VALUE (domain_type)) 13233 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp); 13234 13235 /* Otherwise fail. */ 13236 return NULL_TREE; 13237 } 13238 13239 /* Returns true if REF is an array reference or a component reference 13240 to an array at the end of a structure. 13241 If this is the case, the array may be allocated larger 13242 than its upper bound implies. */ 13243 13244 bool 13245 array_at_struct_end_p (tree ref) 13246 { 13247 tree atype; 13248 13249 if (TREE_CODE (ref) == ARRAY_REF 13250 || TREE_CODE (ref) == ARRAY_RANGE_REF) 13251 { 13252 atype = TREE_TYPE (TREE_OPERAND (ref, 0)); 13253 ref = TREE_OPERAND (ref, 0); 13254 } 13255 else if (TREE_CODE (ref) == COMPONENT_REF 13256 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE) 13257 atype = TREE_TYPE (TREE_OPERAND (ref, 1)); 13258 else 13259 return false; 13260 13261 while (handled_component_p (ref)) 13262 { 13263 /* If the reference chain contains a component reference to a 13264 non-union type and there follows another field the reference 13265 is not at the end of a structure. */ 13266 if (TREE_CODE (ref) == COMPONENT_REF) 13267 { 13268 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE) 13269 { 13270 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1)); 13271 while (nextf && TREE_CODE (nextf) != FIELD_DECL) 13272 nextf = DECL_CHAIN (nextf); 13273 if (nextf) 13274 return false; 13275 } 13276 } 13277 /* If we have a multi-dimensional array we do not consider 13278 a non-innermost dimension as flex array if the whole 13279 multi-dimensional array is at struct end. 13280 Same for an array of aggregates with a trailing array 13281 member. */ 13282 else if (TREE_CODE (ref) == ARRAY_REF) 13283 return false; 13284 else if (TREE_CODE (ref) == ARRAY_RANGE_REF) 13285 ; 13286 /* If we view an underlying object as sth else then what we 13287 gathered up to now is what we have to rely on. */ 13288 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR) 13289 break; 13290 else 13291 gcc_unreachable (); 13292 13293 ref = TREE_OPERAND (ref, 0); 13294 } 13295 13296 /* The array now is at struct end. Treat flexible arrays as 13297 always subject to extend, even into just padding constrained by 13298 an underlying decl. */ 13299 if (! TYPE_SIZE (atype)) 13300 return true; 13301 13302 tree size = NULL; 13303 13304 if (TREE_CODE (ref) == MEM_REF 13305 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR) 13306 { 13307 size = TYPE_SIZE (TREE_TYPE (ref)); 13308 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0); 13309 } 13310 13311 /* If the reference is based on a declared entity, the size of the array 13312 is constrained by its given domain. (Do not trust commons PR/69368). */ 13313 if (DECL_P (ref) 13314 /* Be sure the size of MEM_REF target match. For example: 13315 13316 char buf[10]; 13317 struct foo *str = (struct foo *)&buf; 13318 13319 str->trailin_array[2] = 1; 13320 13321 is valid because BUF allocate enough space. */ 13322 13323 && (!size || (DECL_SIZE (ref) != NULL 13324 && operand_equal_p (DECL_SIZE (ref), size, 0))) 13325 && !(flag_unconstrained_commons 13326 && VAR_P (ref) && DECL_COMMON (ref))) 13327 return false; 13328 13329 return true; 13330 } 13331 13332 /* Return a tree representing the offset, in bytes, of the field referenced 13333 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */ 13334 13335 tree 13336 component_ref_field_offset (tree exp) 13337 { 13338 tree aligned_offset = TREE_OPERAND (exp, 2); 13339 tree field = TREE_OPERAND (exp, 1); 13340 location_t loc = EXPR_LOCATION (exp); 13341 13342 /* If an offset was specified in the COMPONENT_REF, it's the offset measured 13343 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that 13344 value. */ 13345 if (aligned_offset) 13346 { 13347 /* ??? tree_ssa_useless_type_conversion will eliminate casts to 13348 sizetype from another type of the same width and signedness. */ 13349 if (TREE_TYPE (aligned_offset) != sizetype) 13350 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset); 13351 return size_binop_loc (loc, MULT_EXPR, aligned_offset, 13352 size_int (DECL_OFFSET_ALIGN (field) 13353 / BITS_PER_UNIT)); 13354 } 13355 13356 /* Otherwise, take the offset from that of the field. Substitute 13357 any PLACEHOLDER_EXPR that we have. */ 13358 else 13359 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp); 13360 } 13361 13362 /* Return the machine mode of T. For vectors, returns the mode of the 13363 inner type. The main use case is to feed the result to HONOR_NANS, 13364 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */ 13365 13366 machine_mode 13367 element_mode (const_tree t) 13368 { 13369 if (!TYPE_P (t)) 13370 t = TREE_TYPE (t); 13371 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE) 13372 t = TREE_TYPE (t); 13373 return TYPE_MODE (t); 13374 } 13375 13376 13377 /* Veirfy that basic properties of T match TV and thus T can be a variant of 13378 TV. TV should be the more specified variant (i.e. the main variant). */ 13379 13380 static bool 13381 verify_type_variant (const_tree t, tree tv) 13382 { 13383 /* Type variant can differ by: 13384 13385 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT, 13386 ENCODE_QUAL_ADDR_SPACE. 13387 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P 13388 in this case some values may not be set in the variant types 13389 (see TYPE_COMPLETE_P checks). 13390 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type 13391 - by TYPE_NAME and attributes (i.e. when variant originate by typedef) 13392 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants) 13393 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN 13394 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P 13395 this is necessary to make it possible to merge types form different TUs 13396 - arrays, pointers and references may have TREE_TYPE that is a variant 13397 of TREE_TYPE of their main variants. 13398 - aggregates may have new TYPE_FIELDS list that list variants of 13399 the main variant TYPE_FIELDS. 13400 - vector types may differ by TYPE_VECTOR_OPAQUE 13401 - TYPE_METHODS is always NULL for variant types and maintained for 13402 main variant only. 13403 */ 13404 13405 /* Convenience macro for matching individual fields. */ 13406 #define verify_variant_match(flag) \ 13407 do { \ 13408 if (flag (tv) != flag (t)) \ 13409 { \ 13410 error ("type variant differs by " #flag "."); \ 13411 debug_tree (tv); \ 13412 return false; \ 13413 } \ 13414 } while (false) 13415 13416 /* tree_base checks. */ 13417 13418 verify_variant_match (TREE_CODE); 13419 /* FIXME: Ada builds non-artificial variants of artificial types. */ 13420 if (TYPE_ARTIFICIAL (tv) && 0) 13421 verify_variant_match (TYPE_ARTIFICIAL); 13422 if (POINTER_TYPE_P (tv)) 13423 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL); 13424 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */ 13425 verify_variant_match (TYPE_UNSIGNED); 13426 verify_variant_match (TYPE_PACKED); 13427 if (TREE_CODE (t) == REFERENCE_TYPE) 13428 verify_variant_match (TYPE_REF_IS_RVALUE); 13429 if (AGGREGATE_TYPE_P (t)) 13430 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER); 13431 else 13432 verify_variant_match (TYPE_SATURATING); 13433 /* FIXME: This check trigger during libstdc++ build. */ 13434 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0) 13435 verify_variant_match (TYPE_FINAL_P); 13436 13437 /* tree_type_common checks. */ 13438 13439 if (COMPLETE_TYPE_P (t)) 13440 { 13441 verify_variant_match (TYPE_MODE); 13442 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR 13443 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR) 13444 verify_variant_match (TYPE_SIZE); 13445 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR 13446 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR 13447 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)) 13448 { 13449 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t), 13450 TYPE_SIZE_UNIT (tv), 0)); 13451 error ("type variant has different TYPE_SIZE_UNIT"); 13452 debug_tree (tv); 13453 error ("type variant's TYPE_SIZE_UNIT"); 13454 debug_tree (TYPE_SIZE_UNIT (tv)); 13455 error ("type's TYPE_SIZE_UNIT"); 13456 debug_tree (TYPE_SIZE_UNIT (t)); 13457 return false; 13458 } 13459 } 13460 verify_variant_match (TYPE_PRECISION); 13461 verify_variant_match (TYPE_NEEDS_CONSTRUCTING); 13462 if (RECORD_OR_UNION_TYPE_P (t)) 13463 verify_variant_match (TYPE_TRANSPARENT_AGGR); 13464 else if (TREE_CODE (t) == ARRAY_TYPE) 13465 verify_variant_match (TYPE_NONALIASED_COMPONENT); 13466 /* During LTO we merge variant lists from diferent translation units 13467 that may differ BY TYPE_CONTEXT that in turn may point 13468 to TRANSLATION_UNIT_DECL. 13469 Ada also builds variants of types with different TYPE_CONTEXT. */ 13470 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0) 13471 verify_variant_match (TYPE_CONTEXT); 13472 verify_variant_match (TYPE_STRING_FLAG); 13473 if (TYPE_ALIAS_SET_KNOWN_P (t)) 13474 { 13475 error ("type variant with TYPE_ALIAS_SET_KNOWN_P"); 13476 debug_tree (tv); 13477 return false; 13478 } 13479 13480 /* tree_type_non_common checks. */ 13481 13482 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS 13483 and dangle the pointer from time to time. */ 13484 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv) 13485 && (in_lto_p || !TYPE_VFIELD (tv) 13486 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST)) 13487 { 13488 error ("type variant has different TYPE_VFIELD"); 13489 debug_tree (tv); 13490 return false; 13491 } 13492 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t)) 13493 || TREE_CODE (t) == INTEGER_TYPE 13494 || TREE_CODE (t) == BOOLEAN_TYPE 13495 || TREE_CODE (t) == REAL_TYPE 13496 || TREE_CODE (t) == FIXED_POINT_TYPE) 13497 { 13498 verify_variant_match (TYPE_MAX_VALUE); 13499 verify_variant_match (TYPE_MIN_VALUE); 13500 } 13501 if (TREE_CODE (t) == METHOD_TYPE) 13502 verify_variant_match (TYPE_METHOD_BASETYPE); 13503 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t)) 13504 { 13505 error ("type variant has TYPE_METHODS"); 13506 debug_tree (tv); 13507 return false; 13508 } 13509 if (TREE_CODE (t) == OFFSET_TYPE) 13510 verify_variant_match (TYPE_OFFSET_BASETYPE); 13511 if (TREE_CODE (t) == ARRAY_TYPE) 13512 verify_variant_match (TYPE_ARRAY_MAX_SIZE); 13513 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types 13514 or even type's main variant. This is needed to make bootstrap pass 13515 and the bug seems new in GCC 5. 13516 C++ FE should be updated to make this consistent and we should check 13517 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there 13518 is a match with main variant. 13519 13520 Also disable the check for Java for now because of parser hack that builds 13521 first an dummy BINFO and then sometimes replace it by real BINFO in some 13522 of the copies. */ 13523 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv) 13524 && TYPE_BINFO (t) != TYPE_BINFO (tv) 13525 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types. 13526 Since there is no cheap way to tell C++/Java type w/o LTO, do checking 13527 at LTO time only. */ 13528 && (in_lto_p && odr_type_p (t))) 13529 { 13530 error ("type variant has different TYPE_BINFO"); 13531 debug_tree (tv); 13532 error ("type variant's TYPE_BINFO"); 13533 debug_tree (TYPE_BINFO (tv)); 13534 error ("type's TYPE_BINFO"); 13535 debug_tree (TYPE_BINFO (t)); 13536 return false; 13537 } 13538 13539 /* Check various uses of TYPE_VALUES_RAW. */ 13540 if (TREE_CODE (t) == ENUMERAL_TYPE) 13541 verify_variant_match (TYPE_VALUES); 13542 else if (TREE_CODE (t) == ARRAY_TYPE) 13543 verify_variant_match (TYPE_DOMAIN); 13544 /* Permit incomplete variants of complete type. While FEs may complete 13545 all variants, this does not happen for C++ templates in all cases. */ 13546 else if (RECORD_OR_UNION_TYPE_P (t) 13547 && COMPLETE_TYPE_P (t) 13548 && TYPE_FIELDS (t) != TYPE_FIELDS (tv)) 13549 { 13550 tree f1, f2; 13551 13552 /* Fortran builds qualified variants as new records with items of 13553 qualified type. Verify that they looks same. */ 13554 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv); 13555 f1 && f2; 13556 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2)) 13557 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL 13558 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1)) 13559 != TYPE_MAIN_VARIANT (TREE_TYPE (f2)) 13560 /* FIXME: gfc_nonrestricted_type builds all types as variants 13561 with exception of pointer types. It deeply copies the type 13562 which means that we may end up with a variant type 13563 referring non-variant pointer. We may change it to 13564 produce types as variants, too, like 13565 objc_get_protocol_qualified_type does. */ 13566 && !POINTER_TYPE_P (TREE_TYPE (f1))) 13567 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2) 13568 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2)) 13569 break; 13570 if (f1 || f2) 13571 { 13572 error ("type variant has different TYPE_FIELDS"); 13573 debug_tree (tv); 13574 error ("first mismatch is field"); 13575 debug_tree (f1); 13576 error ("and field"); 13577 debug_tree (f2); 13578 return false; 13579 } 13580 } 13581 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)) 13582 verify_variant_match (TYPE_ARG_TYPES); 13583 /* For C++ the qualified variant of array type is really an array type 13584 of qualified TREE_TYPE. 13585 objc builds variants of pointer where pointer to type is a variant, too 13586 in objc_get_protocol_qualified_type. */ 13587 if (TREE_TYPE (t) != TREE_TYPE (tv) 13588 && ((TREE_CODE (t) != ARRAY_TYPE 13589 && !POINTER_TYPE_P (t)) 13590 || TYPE_MAIN_VARIANT (TREE_TYPE (t)) 13591 != TYPE_MAIN_VARIANT (TREE_TYPE (tv)))) 13592 { 13593 error ("type variant has different TREE_TYPE"); 13594 debug_tree (tv); 13595 error ("type variant's TREE_TYPE"); 13596 debug_tree (TREE_TYPE (tv)); 13597 error ("type's TREE_TYPE"); 13598 debug_tree (TREE_TYPE (t)); 13599 return false; 13600 } 13601 if (type_with_alias_set_p (t) 13602 && !gimple_canonical_types_compatible_p (t, tv, false)) 13603 { 13604 error ("type is not compatible with its variant"); 13605 debug_tree (tv); 13606 error ("type variant's TREE_TYPE"); 13607 debug_tree (TREE_TYPE (tv)); 13608 error ("type's TREE_TYPE"); 13609 debug_tree (TREE_TYPE (t)); 13610 return false; 13611 } 13612 return true; 13613 #undef verify_variant_match 13614 } 13615 13616 13617 /* The TYPE_CANONICAL merging machinery. It should closely resemble 13618 the middle-end types_compatible_p function. It needs to avoid 13619 claiming types are different for types that should be treated 13620 the same with respect to TBAA. Canonical types are also used 13621 for IL consistency checks via the useless_type_conversion_p 13622 predicate which does not handle all type kinds itself but falls 13623 back to pointer-comparison of TYPE_CANONICAL for aggregates 13624 for example. */ 13625 13626 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical 13627 type calculation because we need to allow inter-operability between signed 13628 and unsigned variants. */ 13629 13630 bool 13631 type_with_interoperable_signedness (const_tree type) 13632 { 13633 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both 13634 signed char and unsigned char. Similarly fortran FE builds 13635 C_SIZE_T as signed type, while C defines it unsigned. */ 13636 13637 return tree_code_for_canonical_type_merging (TREE_CODE (type)) 13638 == INTEGER_TYPE 13639 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node) 13640 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node)); 13641 } 13642 13643 /* Return true iff T1 and T2 are structurally identical for what 13644 TBAA is concerned. 13645 This function is used both by lto.c canonical type merging and by the 13646 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types 13647 that have TYPE_CANONICAL defined and assume them equivalent. This is useful 13648 only for LTO because only in these cases TYPE_CANONICAL equivalence 13649 correspond to one defined by gimple_canonical_types_compatible_p. */ 13650 13651 bool 13652 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2, 13653 bool trust_type_canonical) 13654 { 13655 /* Type variants should be same as the main variant. When not doing sanity 13656 checking to verify this fact, go to main variants and save some work. */ 13657 if (trust_type_canonical) 13658 { 13659 t1 = TYPE_MAIN_VARIANT (t1); 13660 t2 = TYPE_MAIN_VARIANT (t2); 13661 } 13662 13663 /* Check first for the obvious case of pointer identity. */ 13664 if (t1 == t2) 13665 return true; 13666 13667 /* Check that we have two types to compare. */ 13668 if (t1 == NULL_TREE || t2 == NULL_TREE) 13669 return false; 13670 13671 /* We consider complete types always compatible with incomplete type. 13672 This does not make sense for canonical type calculation and thus we 13673 need to ensure that we are never called on it. 13674 13675 FIXME: For more correctness the function probably should have three modes 13676 1) mode assuming that types are complete mathcing their structure 13677 2) mode allowing incomplete types but producing equivalence classes 13678 and thus ignoring all info from complete types 13679 3) mode allowing incomplete types to match complete but checking 13680 compatibility between complete types. 13681 13682 1 and 2 can be used for canonical type calculation. 3 is the real 13683 definition of type compatibility that can be used i.e. for warnings during 13684 declaration merging. */ 13685 13686 gcc_assert (!trust_type_canonical 13687 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2))); 13688 /* If the types have been previously registered and found equal 13689 they still are. */ 13690 13691 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2) 13692 && trust_type_canonical) 13693 { 13694 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types 13695 they are always NULL, but they are set to non-NULL for types 13696 constructed by build_pointer_type and variants. In this case the 13697 TYPE_CANONICAL is more fine grained than the equivalnce we test (where 13698 all pointers are considered equal. Be sure to not return false 13699 negatives. */ 13700 gcc_checking_assert (canonical_type_used_p (t1) 13701 && canonical_type_used_p (t2)); 13702 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2); 13703 } 13704 13705 /* Can't be the same type if the types don't have the same code. */ 13706 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1)); 13707 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2))) 13708 return false; 13709 13710 /* Qualifiers do not matter for canonical type comparison purposes. */ 13711 13712 /* Void types and nullptr types are always the same. */ 13713 if (TREE_CODE (t1) == VOID_TYPE 13714 || TREE_CODE (t1) == NULLPTR_TYPE) 13715 return true; 13716 13717 /* Can't be the same type if they have different mode. */ 13718 if (TYPE_MODE (t1) != TYPE_MODE (t2)) 13719 return false; 13720 13721 /* Non-aggregate types can be handled cheaply. */ 13722 if (INTEGRAL_TYPE_P (t1) 13723 || SCALAR_FLOAT_TYPE_P (t1) 13724 || FIXED_POINT_TYPE_P (t1) 13725 || TREE_CODE (t1) == VECTOR_TYPE 13726 || TREE_CODE (t1) == COMPLEX_TYPE 13727 || TREE_CODE (t1) == OFFSET_TYPE 13728 || POINTER_TYPE_P (t1)) 13729 { 13730 /* Can't be the same type if they have different recision. */ 13731 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)) 13732 return false; 13733 13734 /* In some cases the signed and unsigned types are required to be 13735 inter-operable. */ 13736 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2) 13737 && !type_with_interoperable_signedness (t1)) 13738 return false; 13739 13740 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be 13741 interoperable with "signed char". Unless all frontends are revisited 13742 to agree on these types, we must ignore the flag completely. */ 13743 13744 /* Fortran standard define C_PTR type that is compatible with every 13745 C pointer. For this reason we need to glob all pointers into one. 13746 Still pointers in different address spaces are not compatible. */ 13747 if (POINTER_TYPE_P (t1)) 13748 { 13749 if (TYPE_ADDR_SPACE (TREE_TYPE (t1)) 13750 != TYPE_ADDR_SPACE (TREE_TYPE (t2))) 13751 return false; 13752 } 13753 13754 /* Tail-recurse to components. */ 13755 if (TREE_CODE (t1) == VECTOR_TYPE 13756 || TREE_CODE (t1) == COMPLEX_TYPE) 13757 return gimple_canonical_types_compatible_p (TREE_TYPE (t1), 13758 TREE_TYPE (t2), 13759 trust_type_canonical); 13760 13761 return true; 13762 } 13763 13764 /* Do type-specific comparisons. */ 13765 switch (TREE_CODE (t1)) 13766 { 13767 case ARRAY_TYPE: 13768 /* Array types are the same if the element types are the same and 13769 the number of elements are the same. */ 13770 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2), 13771 trust_type_canonical) 13772 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2) 13773 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2) 13774 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2)) 13775 return false; 13776 else 13777 { 13778 tree i1 = TYPE_DOMAIN (t1); 13779 tree i2 = TYPE_DOMAIN (t2); 13780 13781 /* For an incomplete external array, the type domain can be 13782 NULL_TREE. Check this condition also. */ 13783 if (i1 == NULL_TREE && i2 == NULL_TREE) 13784 return true; 13785 else if (i1 == NULL_TREE || i2 == NULL_TREE) 13786 return false; 13787 else 13788 { 13789 tree min1 = TYPE_MIN_VALUE (i1); 13790 tree min2 = TYPE_MIN_VALUE (i2); 13791 tree max1 = TYPE_MAX_VALUE (i1); 13792 tree max2 = TYPE_MAX_VALUE (i2); 13793 13794 /* The minimum/maximum values have to be the same. */ 13795 if ((min1 == min2 13796 || (min1 && min2 13797 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR 13798 && TREE_CODE (min2) == PLACEHOLDER_EXPR) 13799 || operand_equal_p (min1, min2, 0)))) 13800 && (max1 == max2 13801 || (max1 && max2 13802 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR 13803 && TREE_CODE (max2) == PLACEHOLDER_EXPR) 13804 || operand_equal_p (max1, max2, 0))))) 13805 return true; 13806 else 13807 return false; 13808 } 13809 } 13810 13811 case METHOD_TYPE: 13812 case FUNCTION_TYPE: 13813 /* Function types are the same if the return type and arguments types 13814 are the same. */ 13815 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2), 13816 trust_type_canonical)) 13817 return false; 13818 13819 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)) 13820 return true; 13821 else 13822 { 13823 tree parms1, parms2; 13824 13825 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2); 13826 parms1 && parms2; 13827 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2)) 13828 { 13829 if (!gimple_canonical_types_compatible_p 13830 (TREE_VALUE (parms1), TREE_VALUE (parms2), 13831 trust_type_canonical)) 13832 return false; 13833 } 13834 13835 if (parms1 || parms2) 13836 return false; 13837 13838 return true; 13839 } 13840 13841 case RECORD_TYPE: 13842 case UNION_TYPE: 13843 case QUAL_UNION_TYPE: 13844 { 13845 tree f1, f2; 13846 13847 /* Don't try to compare variants of an incomplete type, before 13848 TYPE_FIELDS has been copied around. */ 13849 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2)) 13850 return true; 13851 13852 13853 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)) 13854 return false; 13855 13856 /* For aggregate types, all the fields must be the same. */ 13857 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2); 13858 f1 || f2; 13859 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2)) 13860 { 13861 /* Skip non-fields and zero-sized fields. */ 13862 while (f1 && (TREE_CODE (f1) != FIELD_DECL 13863 || (DECL_SIZE (f1) 13864 && integer_zerop (DECL_SIZE (f1))))) 13865 f1 = TREE_CHAIN (f1); 13866 while (f2 && (TREE_CODE (f2) != FIELD_DECL 13867 || (DECL_SIZE (f2) 13868 && integer_zerop (DECL_SIZE (f2))))) 13869 f2 = TREE_CHAIN (f2); 13870 if (!f1 || !f2) 13871 break; 13872 /* The fields must have the same name, offset and type. */ 13873 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2) 13874 || !gimple_compare_field_offset (f1, f2) 13875 || !gimple_canonical_types_compatible_p 13876 (TREE_TYPE (f1), TREE_TYPE (f2), 13877 trust_type_canonical)) 13878 return false; 13879 } 13880 13881 /* If one aggregate has more fields than the other, they 13882 are not the same. */ 13883 if (f1 || f2) 13884 return false; 13885 13886 return true; 13887 } 13888 13889 default: 13890 /* Consider all types with language specific trees in them mutually 13891 compatible. This is executed only from verify_type and false 13892 positives can be tolerated. */ 13893 gcc_assert (!in_lto_p); 13894 return true; 13895 } 13896 } 13897 13898 /* Verify type T. */ 13899 13900 void 13901 verify_type (const_tree t) 13902 { 13903 bool error_found = false; 13904 tree mv = TYPE_MAIN_VARIANT (t); 13905 if (!mv) 13906 { 13907 error ("Main variant is not defined"); 13908 error_found = true; 13909 } 13910 else if (mv != TYPE_MAIN_VARIANT (mv)) 13911 { 13912 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT"); 13913 debug_tree (mv); 13914 error_found = true; 13915 } 13916 else if (t != mv && !verify_type_variant (t, mv)) 13917 error_found = true; 13918 13919 tree ct = TYPE_CANONICAL (t); 13920 if (!ct) 13921 ; 13922 else if (TYPE_CANONICAL (t) != ct) 13923 { 13924 error ("TYPE_CANONICAL has different TYPE_CANONICAL"); 13925 debug_tree (ct); 13926 error_found = true; 13927 } 13928 /* Method and function types can not be used to address memory and thus 13929 TYPE_CANONICAL really matters only for determining useless conversions. 13930 13931 FIXME: C++ FE produce declarations of builtin functions that are not 13932 compatible with main variants. */ 13933 else if (TREE_CODE (t) == FUNCTION_TYPE) 13934 ; 13935 else if (t != ct 13936 /* FIXME: gimple_canonical_types_compatible_p can not compare types 13937 with variably sized arrays because their sizes possibly 13938 gimplified to different variables. */ 13939 && !variably_modified_type_p (ct, NULL) 13940 && !gimple_canonical_types_compatible_p (t, ct, false)) 13941 { 13942 error ("TYPE_CANONICAL is not compatible"); 13943 debug_tree (ct); 13944 error_found = true; 13945 } 13946 13947 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t) 13948 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t))) 13949 { 13950 error ("TYPE_MODE of TYPE_CANONICAL is not compatible"); 13951 debug_tree (ct); 13952 error_found = true; 13953 } 13954 /* FIXME: this is violated by the C++ FE as discussed in PR70029, when 13955 FUNCTION_*_QUALIFIED flags are set. */ 13956 if (0 && TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct) 13957 { 13958 error ("TYPE_CANONICAL of main variant is not main variant"); 13959 debug_tree (ct); 13960 debug_tree (TYPE_MAIN_VARIANT (ct)); 13961 error_found = true; 13962 } 13963 13964 13965 /* Check various uses of TYPE_MINVAL. */ 13966 if (RECORD_OR_UNION_TYPE_P (t)) 13967 { 13968 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS 13969 and danagle the pointer from time to time. */ 13970 if (TYPE_VFIELD (t) 13971 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL 13972 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST) 13973 { 13974 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST"); 13975 debug_tree (TYPE_VFIELD (t)); 13976 error_found = true; 13977 } 13978 } 13979 else if (TREE_CODE (t) == POINTER_TYPE) 13980 { 13981 if (TYPE_NEXT_PTR_TO (t) 13982 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE) 13983 { 13984 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE"); 13985 debug_tree (TYPE_NEXT_PTR_TO (t)); 13986 error_found = true; 13987 } 13988 } 13989 else if (TREE_CODE (t) == REFERENCE_TYPE) 13990 { 13991 if (TYPE_NEXT_REF_TO (t) 13992 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE) 13993 { 13994 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE"); 13995 debug_tree (TYPE_NEXT_REF_TO (t)); 13996 error_found = true; 13997 } 13998 } 13999 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE 14000 || TREE_CODE (t) == FIXED_POINT_TYPE) 14001 { 14002 /* FIXME: The following check should pass: 14003 useless_type_conversion_p (const_cast <tree> (t), 14004 TREE_TYPE (TYPE_MIN_VALUE (t)) 14005 but does not for C sizetypes in LTO. */ 14006 } 14007 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */ 14008 else if (TYPE_MINVAL (t) 14009 && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE) 14010 || in_lto_p)) 14011 { 14012 error ("TYPE_MINVAL non-NULL"); 14013 debug_tree (TYPE_MINVAL (t)); 14014 error_found = true; 14015 } 14016 14017 /* Check various uses of TYPE_MAXVAL. */ 14018 if (RECORD_OR_UNION_TYPE_P (t)) 14019 { 14020 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL 14021 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL 14022 && TYPE_METHODS (t) != error_mark_node) 14023 { 14024 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node"); 14025 debug_tree (TYPE_METHODS (t)); 14026 error_found = true; 14027 } 14028 } 14029 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE) 14030 { 14031 if (TYPE_METHOD_BASETYPE (t) 14032 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE 14033 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE) 14034 { 14035 error ("TYPE_METHOD_BASETYPE is not record nor union"); 14036 debug_tree (TYPE_METHOD_BASETYPE (t)); 14037 error_found = true; 14038 } 14039 } 14040 else if (TREE_CODE (t) == OFFSET_TYPE) 14041 { 14042 if (TYPE_OFFSET_BASETYPE (t) 14043 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE 14044 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE) 14045 { 14046 error ("TYPE_OFFSET_BASETYPE is not record nor union"); 14047 debug_tree (TYPE_OFFSET_BASETYPE (t)); 14048 error_found = true; 14049 } 14050 } 14051 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE 14052 || TREE_CODE (t) == FIXED_POINT_TYPE) 14053 { 14054 /* FIXME: The following check should pass: 14055 useless_type_conversion_p (const_cast <tree> (t), 14056 TREE_TYPE (TYPE_MAX_VALUE (t)) 14057 but does not for C sizetypes in LTO. */ 14058 } 14059 else if (TREE_CODE (t) == ARRAY_TYPE) 14060 { 14061 if (TYPE_ARRAY_MAX_SIZE (t) 14062 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST) 14063 { 14064 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST"); 14065 debug_tree (TYPE_ARRAY_MAX_SIZE (t)); 14066 error_found = true; 14067 } 14068 } 14069 else if (TYPE_MAXVAL (t)) 14070 { 14071 error ("TYPE_MAXVAL non-NULL"); 14072 debug_tree (TYPE_MAXVAL (t)); 14073 error_found = true; 14074 } 14075 14076 /* Check various uses of TYPE_BINFO. */ 14077 if (RECORD_OR_UNION_TYPE_P (t)) 14078 { 14079 if (!TYPE_BINFO (t)) 14080 ; 14081 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO) 14082 { 14083 error ("TYPE_BINFO is not TREE_BINFO"); 14084 debug_tree (TYPE_BINFO (t)); 14085 error_found = true; 14086 } 14087 /* FIXME: Java builds invalid empty binfos that do not have 14088 TREE_TYPE set. */ 14089 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0) 14090 { 14091 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT"); 14092 debug_tree (TREE_TYPE (TYPE_BINFO (t))); 14093 error_found = true; 14094 } 14095 } 14096 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p) 14097 { 14098 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL"); 14099 debug_tree (TYPE_LANG_SLOT_1 (t)); 14100 error_found = true; 14101 } 14102 14103 /* Check various uses of TYPE_VALUES_RAW. */ 14104 if (TREE_CODE (t) == ENUMERAL_TYPE) 14105 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l)) 14106 { 14107 tree value = TREE_VALUE (l); 14108 tree name = TREE_PURPOSE (l); 14109 14110 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses 14111 CONST_DECL of ENUMERAL TYPE. */ 14112 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL) 14113 { 14114 error ("Enum value is not CONST_DECL or INTEGER_CST"); 14115 debug_tree (value); 14116 debug_tree (name); 14117 error_found = true; 14118 } 14119 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE 14120 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value))) 14121 { 14122 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum"); 14123 debug_tree (value); 14124 debug_tree (name); 14125 error_found = true; 14126 } 14127 if (TREE_CODE (name) != IDENTIFIER_NODE) 14128 { 14129 error ("Enum value name is not IDENTIFIER_NODE"); 14130 debug_tree (value); 14131 debug_tree (name); 14132 error_found = true; 14133 } 14134 } 14135 else if (TREE_CODE (t) == ARRAY_TYPE) 14136 { 14137 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE) 14138 { 14139 error ("Array TYPE_DOMAIN is not integer type"); 14140 debug_tree (TYPE_DOMAIN (t)); 14141 error_found = true; 14142 } 14143 } 14144 else if (RECORD_OR_UNION_TYPE_P (t)) 14145 { 14146 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p) 14147 { 14148 error ("TYPE_FIELDS defined in incomplete type"); 14149 error_found = true; 14150 } 14151 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld)) 14152 { 14153 /* TODO: verify properties of decls. */ 14154 if (TREE_CODE (fld) == FIELD_DECL) 14155 ; 14156 else if (TREE_CODE (fld) == TYPE_DECL) 14157 ; 14158 else if (TREE_CODE (fld) == CONST_DECL) 14159 ; 14160 else if (VAR_P (fld)) 14161 ; 14162 else if (TREE_CODE (fld) == TEMPLATE_DECL) 14163 ; 14164 else if (TREE_CODE (fld) == USING_DECL) 14165 ; 14166 else 14167 { 14168 error ("Wrong tree in TYPE_FIELDS list"); 14169 debug_tree (fld); 14170 error_found = true; 14171 } 14172 } 14173 } 14174 else if (TREE_CODE (t) == INTEGER_TYPE 14175 || TREE_CODE (t) == BOOLEAN_TYPE 14176 || TREE_CODE (t) == OFFSET_TYPE 14177 || TREE_CODE (t) == REFERENCE_TYPE 14178 || TREE_CODE (t) == NULLPTR_TYPE 14179 || TREE_CODE (t) == POINTER_TYPE) 14180 { 14181 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL)) 14182 { 14183 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p", 14184 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t)); 14185 error_found = true; 14186 } 14187 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC) 14188 { 14189 error ("TYPE_CACHED_VALUES is not TREE_VEC"); 14190 debug_tree (TYPE_CACHED_VALUES (t)); 14191 error_found = true; 14192 } 14193 /* Verify just enough of cache to ensure that no one copied it to new type. 14194 All copying should go by copy_node that should clear it. */ 14195 else if (TYPE_CACHED_VALUES_P (t)) 14196 { 14197 int i; 14198 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++) 14199 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i) 14200 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t) 14201 { 14202 error ("wrong TYPE_CACHED_VALUES entry"); 14203 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)); 14204 error_found = true; 14205 break; 14206 } 14207 } 14208 } 14209 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE) 14210 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l)) 14211 { 14212 /* C++ FE uses TREE_PURPOSE to store initial values. */ 14213 if (TREE_PURPOSE (l) && in_lto_p) 14214 { 14215 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list"); 14216 debug_tree (l); 14217 error_found = true; 14218 } 14219 if (!TYPE_P (TREE_VALUE (l))) 14220 { 14221 error ("Wrong entry in TYPE_ARG_TYPES list"); 14222 debug_tree (l); 14223 error_found = true; 14224 } 14225 } 14226 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t)) 14227 { 14228 error ("TYPE_VALUES_RAW field is non-NULL"); 14229 debug_tree (TYPE_VALUES_RAW (t)); 14230 error_found = true; 14231 } 14232 if (TREE_CODE (t) != INTEGER_TYPE 14233 && TREE_CODE (t) != BOOLEAN_TYPE 14234 && TREE_CODE (t) != OFFSET_TYPE 14235 && TREE_CODE (t) != REFERENCE_TYPE 14236 && TREE_CODE (t) != NULLPTR_TYPE 14237 && TREE_CODE (t) != POINTER_TYPE 14238 && TYPE_CACHED_VALUES_P (t)) 14239 { 14240 error ("TYPE_CACHED_VALUES_P is set while it should not"); 14241 error_found = true; 14242 } 14243 if (TYPE_STRING_FLAG (t) 14244 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE) 14245 { 14246 error ("TYPE_STRING_FLAG is set on wrong type code"); 14247 error_found = true; 14248 } 14249 else if (TYPE_STRING_FLAG (t)) 14250 { 14251 const_tree b = t; 14252 if (TREE_CODE (b) == ARRAY_TYPE) 14253 b = TREE_TYPE (t); 14254 /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type 14255 that is 32bits. */ 14256 if (TREE_CODE (b) != INTEGER_TYPE) 14257 { 14258 error ("TYPE_STRING_FLAG is set on type that does not look like " 14259 "char nor array of chars"); 14260 error_found = true; 14261 } 14262 } 14263 14264 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always 14265 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns 14266 of a type. */ 14267 if (TREE_CODE (t) == METHOD_TYPE 14268 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t)) 14269 { 14270 error ("TYPE_METHOD_BASETYPE is not main variant"); 14271 error_found = true; 14272 } 14273 14274 if (error_found) 14275 { 14276 debug_tree (const_cast <tree> (t)); 14277 internal_error ("verify_type failed"); 14278 } 14279 } 14280 14281 14282 /* Return 1 if ARG interpreted as signed in its precision is known to be 14283 always positive or 2 if ARG is known to be always negative, or 3 if 14284 ARG may be positive or negative. */ 14285 14286 int 14287 get_range_pos_neg (tree arg) 14288 { 14289 if (arg == error_mark_node) 14290 return 3; 14291 14292 int prec = TYPE_PRECISION (TREE_TYPE (arg)); 14293 int cnt = 0; 14294 if (TREE_CODE (arg) == INTEGER_CST) 14295 { 14296 wide_int w = wi::sext (arg, prec); 14297 if (wi::neg_p (w)) 14298 return 2; 14299 else 14300 return 1; 14301 } 14302 while (CONVERT_EXPR_P (arg) 14303 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0))) 14304 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec) 14305 { 14306 arg = TREE_OPERAND (arg, 0); 14307 /* Narrower value zero extended into wider type 14308 will always result in positive values. */ 14309 if (TYPE_UNSIGNED (TREE_TYPE (arg)) 14310 && TYPE_PRECISION (TREE_TYPE (arg)) < prec) 14311 return 1; 14312 prec = TYPE_PRECISION (TREE_TYPE (arg)); 14313 if (++cnt > 30) 14314 return 3; 14315 } 14316 14317 if (TREE_CODE (arg) != SSA_NAME) 14318 return 3; 14319 wide_int arg_min, arg_max; 14320 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE) 14321 { 14322 gimple *g = SSA_NAME_DEF_STMT (arg); 14323 if (is_gimple_assign (g) 14324 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g))) 14325 { 14326 tree t = gimple_assign_rhs1 (g); 14327 if (INTEGRAL_TYPE_P (TREE_TYPE (t)) 14328 && TYPE_PRECISION (TREE_TYPE (t)) <= prec) 14329 { 14330 if (TYPE_UNSIGNED (TREE_TYPE (t)) 14331 && TYPE_PRECISION (TREE_TYPE (t)) < prec) 14332 return 1; 14333 prec = TYPE_PRECISION (TREE_TYPE (t)); 14334 arg = t; 14335 if (++cnt > 30) 14336 return 3; 14337 continue; 14338 } 14339 } 14340 return 3; 14341 } 14342 if (TYPE_UNSIGNED (TREE_TYPE (arg))) 14343 { 14344 /* For unsigned values, the "positive" range comes 14345 below the "negative" range. */ 14346 if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED)) 14347 return 1; 14348 if (wi::neg_p (wi::sext (arg_min, prec), SIGNED)) 14349 return 2; 14350 } 14351 else 14352 { 14353 if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED)) 14354 return 1; 14355 if (wi::neg_p (wi::sext (arg_max, prec), SIGNED)) 14356 return 2; 14357 } 14358 return 3; 14359 } 14360 14361 14362 14363 14364 /* Return true if ARG is marked with the nonnull attribute in the 14365 current function signature. */ 14366 14367 bool 14368 nonnull_arg_p (const_tree arg) 14369 { 14370 tree t, attrs, fntype; 14371 unsigned HOST_WIDE_INT arg_num; 14372 14373 gcc_assert (TREE_CODE (arg) == PARM_DECL 14374 && (POINTER_TYPE_P (TREE_TYPE (arg)) 14375 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE)); 14376 14377 /* The static chain decl is always non null. */ 14378 if (arg == cfun->static_chain_decl) 14379 return true; 14380 14381 /* THIS argument of method is always non-NULL. */ 14382 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE 14383 && arg == DECL_ARGUMENTS (cfun->decl) 14384 && flag_delete_null_pointer_checks) 14385 return true; 14386 14387 /* Values passed by reference are always non-NULL. */ 14388 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE 14389 && flag_delete_null_pointer_checks) 14390 return true; 14391 14392 fntype = TREE_TYPE (cfun->decl); 14393 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs)) 14394 { 14395 attrs = lookup_attribute ("nonnull", attrs); 14396 14397 /* If "nonnull" wasn't specified, we know nothing about the argument. */ 14398 if (attrs == NULL_TREE) 14399 return false; 14400 14401 /* If "nonnull" applies to all the arguments, then ARG is non-null. */ 14402 if (TREE_VALUE (attrs) == NULL_TREE) 14403 return true; 14404 14405 /* Get the position number for ARG in the function signature. */ 14406 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl); 14407 t; 14408 t = DECL_CHAIN (t), arg_num++) 14409 { 14410 if (t == arg) 14411 break; 14412 } 14413 14414 gcc_assert (t == arg); 14415 14416 /* Now see if ARG_NUM is mentioned in the nonnull list. */ 14417 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t)) 14418 { 14419 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0) 14420 return true; 14421 } 14422 } 14423 14424 return false; 14425 } 14426 14427 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range 14428 information. */ 14429 14430 location_t 14431 set_block (location_t loc, tree block) 14432 { 14433 location_t pure_loc = get_pure_location (loc); 14434 source_range src_range = get_range_from_loc (line_table, loc); 14435 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block); 14436 } 14437 14438 location_t 14439 set_source_range (tree expr, location_t start, location_t finish) 14440 { 14441 source_range src_range; 14442 src_range.m_start = start; 14443 src_range.m_finish = finish; 14444 return set_source_range (expr, src_range); 14445 } 14446 14447 location_t 14448 set_source_range (tree expr, source_range src_range) 14449 { 14450 if (!EXPR_P (expr)) 14451 return UNKNOWN_LOCATION; 14452 14453 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr)); 14454 location_t adhoc = COMBINE_LOCATION_DATA (line_table, 14455 pure_loc, 14456 src_range, 14457 NULL); 14458 SET_EXPR_LOCATION (expr, adhoc); 14459 return adhoc; 14460 } 14461 14462 /* Return the name of combined function FN, for debugging purposes. */ 14463 14464 const char * 14465 combined_fn_name (combined_fn fn) 14466 { 14467 if (builtin_fn_p (fn)) 14468 { 14469 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn)); 14470 return IDENTIFIER_POINTER (DECL_NAME (fndecl)); 14471 } 14472 else 14473 return internal_fn_name (as_internal_fn (fn)); 14474 } 14475 14476 /* Return a bitmap with a bit set corresponding to each argument in 14477 a function call type FNTYPE declared with attribute nonnull, 14478 or null if none of the function's argument are nonnull. The caller 14479 must free the bitmap. */ 14480 14481 bitmap 14482 get_nonnull_args (const_tree fntype) 14483 { 14484 if (fntype == NULL_TREE) 14485 return NULL; 14486 14487 tree attrs = TYPE_ATTRIBUTES (fntype); 14488 if (!attrs) 14489 return NULL; 14490 14491 bitmap argmap = NULL; 14492 14493 /* A function declaration can specify multiple attribute nonnull, 14494 each with zero or more arguments. The loop below creates a bitmap 14495 representing a union of all the arguments. An empty (but non-null) 14496 bitmap means that all arguments have been declaraed nonnull. */ 14497 for ( ; attrs; attrs = TREE_CHAIN (attrs)) 14498 { 14499 attrs = lookup_attribute ("nonnull", attrs); 14500 if (!attrs) 14501 break; 14502 14503 if (!argmap) 14504 argmap = BITMAP_ALLOC (NULL); 14505 14506 if (!TREE_VALUE (attrs)) 14507 { 14508 /* Clear the bitmap in case a previous attribute nonnull 14509 set it and this one overrides it for all arguments. */ 14510 bitmap_clear (argmap); 14511 return argmap; 14512 } 14513 14514 /* Iterate over the indices of the format arguments declared nonnull 14515 and set a bit for each. */ 14516 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx)) 14517 { 14518 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1; 14519 bitmap_set_bit (argmap, val); 14520 } 14521 } 14522 14523 return argmap; 14524 } 14525 14526 /* Return true if an expression with CODE has to have the same result type as 14527 its first operand. */ 14528 14529 bool 14530 expr_type_first_operand_type_p (tree_code code) 14531 { 14532 switch (code) 14533 { 14534 case NEGATE_EXPR: 14535 case ABS_EXPR: 14536 case BIT_NOT_EXPR: 14537 case PAREN_EXPR: 14538 case CONJ_EXPR: 14539 14540 case PLUS_EXPR: 14541 case MINUS_EXPR: 14542 case MULT_EXPR: 14543 case TRUNC_DIV_EXPR: 14544 case CEIL_DIV_EXPR: 14545 case FLOOR_DIV_EXPR: 14546 case ROUND_DIV_EXPR: 14547 case TRUNC_MOD_EXPR: 14548 case CEIL_MOD_EXPR: 14549 case FLOOR_MOD_EXPR: 14550 case ROUND_MOD_EXPR: 14551 case RDIV_EXPR: 14552 case EXACT_DIV_EXPR: 14553 case MIN_EXPR: 14554 case MAX_EXPR: 14555 case BIT_IOR_EXPR: 14556 case BIT_XOR_EXPR: 14557 case BIT_AND_EXPR: 14558 14559 case LSHIFT_EXPR: 14560 case RSHIFT_EXPR: 14561 case LROTATE_EXPR: 14562 case RROTATE_EXPR: 14563 return true; 14564 14565 default: 14566 return false; 14567 } 14568 } 14569 14570 #if CHECKING_P 14571 14572 namespace selftest { 14573 14574 /* Selftests for tree. */ 14575 14576 /* Verify that integer constants are sane. */ 14577 14578 static void 14579 test_integer_constants () 14580 { 14581 ASSERT_TRUE (integer_type_node != NULL); 14582 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL); 14583 14584 tree type = integer_type_node; 14585 14586 tree zero = build_zero_cst (type); 14587 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero)); 14588 ASSERT_EQ (type, TREE_TYPE (zero)); 14589 14590 tree one = build_int_cst (type, 1); 14591 ASSERT_EQ (INTEGER_CST, TREE_CODE (one)); 14592 ASSERT_EQ (type, TREE_TYPE (zero)); 14593 } 14594 14595 /* Verify identifiers. */ 14596 14597 static void 14598 test_identifiers () 14599 { 14600 tree identifier = get_identifier ("foo"); 14601 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier)); 14602 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier)); 14603 } 14604 14605 /* Verify LABEL_DECL. */ 14606 14607 static void 14608 test_labels () 14609 { 14610 tree identifier = get_identifier ("err"); 14611 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL, 14612 identifier, void_type_node); 14613 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl)); 14614 ASSERT_FALSE (FORCED_LABEL (label_decl)); 14615 } 14616 14617 /* Run all of the selftests within this file. */ 14618 14619 void 14620 tree_c_tests () 14621 { 14622 test_integer_constants (); 14623 test_identifiers (); 14624 test_labels (); 14625 } 14626 14627 } // namespace selftest 14628 14629 #endif /* CHECKING_P */ 14630 14631 #include "gt-tree.h" 14632