1 /* Language-independent node constructors for parse phase of GNU compiler. 2 Copyright (C) 1987-2017 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify it under 7 the terms of the GNU General Public License as published by the Free 8 Software Foundation; either version 3, or (at your option) any later 9 version. 10 11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 12 WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14 for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 /* This file contains the low level primitives for operating on tree nodes, 21 including allocation, list operations, interning of identifiers, 22 construction of data type nodes and statement nodes, 23 and construction of type conversion nodes. It also contains 24 tables index by tree code that describe how to take apart 25 nodes of that code. 26 27 It is intended to be language-independent but can occasionally 28 calls language-dependent routines. */ 29 30 #include "config.h" 31 #include "system.h" 32 #include "coretypes.h" 33 #include "backend.h" 34 #include "target.h" 35 #include "tree.h" 36 #include "gimple.h" 37 #include "tree-pass.h" 38 #include "ssa.h" 39 #include "cgraph.h" 40 #include "diagnostic.h" 41 #include "flags.h" 42 #include "alias.h" 43 #include "fold-const.h" 44 #include "stor-layout.h" 45 #include "calls.h" 46 #include "attribs.h" 47 #include "toplev.h" /* get_random_seed */ 48 #include "output.h" 49 #include "common/common-target.h" 50 #include "langhooks.h" 51 #include "tree-inline.h" 52 #include "tree-iterator.h" 53 #include "internal-fn.h" 54 #include "gimple-iterator.h" 55 #include "gimplify.h" 56 #include "tree-dfa.h" 57 #include "params.h" 58 #include "langhooks-def.h" 59 #include "tree-diagnostic.h" 60 #include "except.h" 61 #include "builtins.h" 62 #include "print-tree.h" 63 #include "ipa-utils.h" 64 #include "selftest.h" 65 66 /* Tree code classes. */ 67 68 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE, 69 #define END_OF_BASE_TREE_CODES tcc_exceptional, 70 71 const enum tree_code_class tree_code_type[] = { 72 #include "all-tree.def" 73 }; 74 75 #undef DEFTREECODE 76 #undef END_OF_BASE_TREE_CODES 77 78 /* Table indexed by tree code giving number of expression 79 operands beyond the fixed part of the node structure. 80 Not used for types or decls. */ 81 82 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH, 83 #define END_OF_BASE_TREE_CODES 0, 84 85 const unsigned char tree_code_length[] = { 86 #include "all-tree.def" 87 }; 88 89 #undef DEFTREECODE 90 #undef END_OF_BASE_TREE_CODES 91 92 /* Names of tree components. 93 Used for printing out the tree and error messages. */ 94 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME, 95 #define END_OF_BASE_TREE_CODES "@dummy", 96 97 static const char *const tree_code_name[] = { 98 #include "all-tree.def" 99 }; 100 101 #undef DEFTREECODE 102 #undef END_OF_BASE_TREE_CODES 103 104 /* Each tree code class has an associated string representation. 105 These must correspond to the tree_code_class entries. */ 106 107 const char *const tree_code_class_strings[] = 108 { 109 "exceptional", 110 "constant", 111 "type", 112 "declaration", 113 "reference", 114 "comparison", 115 "unary", 116 "binary", 117 "statement", 118 "vl_exp", 119 "expression" 120 }; 121 122 /* obstack.[ch] explicitly declined to prototype this. */ 123 extern int _obstack_allocated_p (struct obstack *h, void *obj); 124 125 /* Statistics-gathering stuff. */ 126 127 static int tree_code_counts[MAX_TREE_CODES]; 128 int tree_node_counts[(int) all_kinds]; 129 int tree_node_sizes[(int) all_kinds]; 130 131 /* Keep in sync with tree.h:enum tree_node_kind. */ 132 static const char * const tree_node_kind_names[] = { 133 "decls", 134 "types", 135 "blocks", 136 "stmts", 137 "refs", 138 "exprs", 139 "constants", 140 "identifiers", 141 "vecs", 142 "binfos", 143 "ssa names", 144 "constructors", 145 "random kinds", 146 "lang_decl kinds", 147 "lang_type kinds", 148 "omp clauses", 149 }; 150 151 /* Unique id for next decl created. */ 152 static GTY(()) int next_decl_uid; 153 /* Unique id for next type created. */ 154 static GTY(()) int next_type_uid = 1; 155 /* Unique id for next debug decl created. Use negative numbers, 156 to catch erroneous uses. */ 157 static GTY(()) int next_debug_decl_uid; 158 159 /* Since we cannot rehash a type after it is in the table, we have to 160 keep the hash code. */ 161 162 struct GTY((for_user)) type_hash { 163 unsigned long hash; 164 tree type; 165 }; 166 167 /* Initial size of the hash table (rounded to next prime). */ 168 #define TYPE_HASH_INITIAL_SIZE 1000 169 170 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash> 171 { 172 static hashval_t hash (type_hash *t) { return t->hash; } 173 static bool equal (type_hash *a, type_hash *b); 174 175 static int 176 keep_cache_entry (type_hash *&t) 177 { 178 return ggc_marked_p (t->type); 179 } 180 }; 181 182 /* Now here is the hash table. When recording a type, it is added to 183 the slot whose index is the hash code. Note that the hash table is 184 used for several kinds of types (function types, array types and 185 array index range types, for now). While all these live in the 186 same table, they are completely independent, and the hash code is 187 computed differently for each of these. */ 188 189 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table; 190 191 /* Hash table and temporary node for larger integer const values. */ 192 static GTY (()) tree int_cst_node; 193 194 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node> 195 { 196 static hashval_t hash (tree t); 197 static bool equal (tree x, tree y); 198 }; 199 200 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table; 201 202 /* Hash table for optimization flags and target option flags. Use the same 203 hash table for both sets of options. Nodes for building the current 204 optimization and target option nodes. The assumption is most of the time 205 the options created will already be in the hash table, so we avoid 206 allocating and freeing up a node repeatably. */ 207 static GTY (()) tree cl_optimization_node; 208 static GTY (()) tree cl_target_option_node; 209 210 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node> 211 { 212 static hashval_t hash (tree t); 213 static bool equal (tree x, tree y); 214 }; 215 216 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table; 217 218 /* General tree->tree mapping structure for use in hash tables. */ 219 220 221 static GTY ((cache)) 222 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl; 223 224 static GTY ((cache)) 225 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl; 226 227 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map> 228 { 229 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); } 230 231 static bool 232 equal (tree_vec_map *a, tree_vec_map *b) 233 { 234 return a->base.from == b->base.from; 235 } 236 237 static int 238 keep_cache_entry (tree_vec_map *&m) 239 { 240 return ggc_marked_p (m->base.from); 241 } 242 }; 243 244 static GTY ((cache)) 245 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl; 246 247 static void set_type_quals (tree, int); 248 static void print_type_hash_statistics (void); 249 static void print_debug_expr_statistics (void); 250 static void print_value_expr_statistics (void); 251 static void type_hash_list (const_tree, inchash::hash &); 252 static void attribute_hash_list (const_tree, inchash::hash &); 253 254 tree global_trees[TI_MAX]; 255 tree integer_types[itk_none]; 256 257 bool int_n_enabled_p[NUM_INT_N_ENTS]; 258 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS]; 259 260 unsigned char tree_contains_struct[MAX_TREE_CODES][64]; 261 262 /* Number of operands for each OpenMP clause. */ 263 unsigned const char omp_clause_num_ops[] = 264 { 265 0, /* OMP_CLAUSE_ERROR */ 266 1, /* OMP_CLAUSE_PRIVATE */ 267 1, /* OMP_CLAUSE_SHARED */ 268 1, /* OMP_CLAUSE_FIRSTPRIVATE */ 269 2, /* OMP_CLAUSE_LASTPRIVATE */ 270 5, /* OMP_CLAUSE_REDUCTION */ 271 1, /* OMP_CLAUSE_COPYIN */ 272 1, /* OMP_CLAUSE_COPYPRIVATE */ 273 3, /* OMP_CLAUSE_LINEAR */ 274 2, /* OMP_CLAUSE_ALIGNED */ 275 1, /* OMP_CLAUSE_DEPEND */ 276 1, /* OMP_CLAUSE_UNIFORM */ 277 1, /* OMP_CLAUSE_TO_DECLARE */ 278 1, /* OMP_CLAUSE_LINK */ 279 2, /* OMP_CLAUSE_FROM */ 280 2, /* OMP_CLAUSE_TO */ 281 2, /* OMP_CLAUSE_MAP */ 282 1, /* OMP_CLAUSE_USE_DEVICE_PTR */ 283 1, /* OMP_CLAUSE_IS_DEVICE_PTR */ 284 2, /* OMP_CLAUSE__CACHE_ */ 285 2, /* OMP_CLAUSE_GANG */ 286 1, /* OMP_CLAUSE_ASYNC */ 287 1, /* OMP_CLAUSE_WAIT */ 288 0, /* OMP_CLAUSE_AUTO */ 289 0, /* OMP_CLAUSE_SEQ */ 290 1, /* OMP_CLAUSE__LOOPTEMP_ */ 291 1, /* OMP_CLAUSE_IF */ 292 1, /* OMP_CLAUSE_NUM_THREADS */ 293 1, /* OMP_CLAUSE_SCHEDULE */ 294 0, /* OMP_CLAUSE_NOWAIT */ 295 1, /* OMP_CLAUSE_ORDERED */ 296 0, /* OMP_CLAUSE_DEFAULT */ 297 3, /* OMP_CLAUSE_COLLAPSE */ 298 0, /* OMP_CLAUSE_UNTIED */ 299 1, /* OMP_CLAUSE_FINAL */ 300 0, /* OMP_CLAUSE_MERGEABLE */ 301 1, /* OMP_CLAUSE_DEVICE */ 302 1, /* OMP_CLAUSE_DIST_SCHEDULE */ 303 0, /* OMP_CLAUSE_INBRANCH */ 304 0, /* OMP_CLAUSE_NOTINBRANCH */ 305 1, /* OMP_CLAUSE_NUM_TEAMS */ 306 1, /* OMP_CLAUSE_THREAD_LIMIT */ 307 0, /* OMP_CLAUSE_PROC_BIND */ 308 1, /* OMP_CLAUSE_SAFELEN */ 309 1, /* OMP_CLAUSE_SIMDLEN */ 310 0, /* OMP_CLAUSE_FOR */ 311 0, /* OMP_CLAUSE_PARALLEL */ 312 0, /* OMP_CLAUSE_SECTIONS */ 313 0, /* OMP_CLAUSE_TASKGROUP */ 314 1, /* OMP_CLAUSE_PRIORITY */ 315 1, /* OMP_CLAUSE_GRAINSIZE */ 316 1, /* OMP_CLAUSE_NUM_TASKS */ 317 0, /* OMP_CLAUSE_NOGROUP */ 318 0, /* OMP_CLAUSE_THREADS */ 319 0, /* OMP_CLAUSE_SIMD */ 320 1, /* OMP_CLAUSE_HINT */ 321 0, /* OMP_CLAUSE_DEFALTMAP */ 322 1, /* OMP_CLAUSE__SIMDUID_ */ 323 0, /* OMP_CLAUSE__SIMT_ */ 324 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */ 325 0, /* OMP_CLAUSE_INDEPENDENT */ 326 1, /* OMP_CLAUSE_WORKER */ 327 1, /* OMP_CLAUSE_VECTOR */ 328 1, /* OMP_CLAUSE_NUM_GANGS */ 329 1, /* OMP_CLAUSE_NUM_WORKERS */ 330 1, /* OMP_CLAUSE_VECTOR_LENGTH */ 331 3, /* OMP_CLAUSE_TILE */ 332 2, /* OMP_CLAUSE__GRIDDIM_ */ 333 }; 334 335 const char * const omp_clause_code_name[] = 336 { 337 "error_clause", 338 "private", 339 "shared", 340 "firstprivate", 341 "lastprivate", 342 "reduction", 343 "copyin", 344 "copyprivate", 345 "linear", 346 "aligned", 347 "depend", 348 "uniform", 349 "to", 350 "link", 351 "from", 352 "to", 353 "map", 354 "use_device_ptr", 355 "is_device_ptr", 356 "_cache_", 357 "gang", 358 "async", 359 "wait", 360 "auto", 361 "seq", 362 "_looptemp_", 363 "if", 364 "num_threads", 365 "schedule", 366 "nowait", 367 "ordered", 368 "default", 369 "collapse", 370 "untied", 371 "final", 372 "mergeable", 373 "device", 374 "dist_schedule", 375 "inbranch", 376 "notinbranch", 377 "num_teams", 378 "thread_limit", 379 "proc_bind", 380 "safelen", 381 "simdlen", 382 "for", 383 "parallel", 384 "sections", 385 "taskgroup", 386 "priority", 387 "grainsize", 388 "num_tasks", 389 "nogroup", 390 "threads", 391 "simd", 392 "hint", 393 "defaultmap", 394 "_simduid_", 395 "_simt_", 396 "_Cilk_for_count_", 397 "independent", 398 "worker", 399 "vector", 400 "num_gangs", 401 "num_workers", 402 "vector_length", 403 "tile", 404 "_griddim_" 405 }; 406 407 408 /* Return the tree node structure used by tree code CODE. */ 409 410 static inline enum tree_node_structure_enum 411 tree_node_structure_for_code (enum tree_code code) 412 { 413 switch (TREE_CODE_CLASS (code)) 414 { 415 case tcc_declaration: 416 { 417 switch (code) 418 { 419 case FIELD_DECL: 420 return TS_FIELD_DECL; 421 case PARM_DECL: 422 return TS_PARM_DECL; 423 case VAR_DECL: 424 return TS_VAR_DECL; 425 case LABEL_DECL: 426 return TS_LABEL_DECL; 427 case RESULT_DECL: 428 return TS_RESULT_DECL; 429 case DEBUG_EXPR_DECL: 430 return TS_DECL_WRTL; 431 case CONST_DECL: 432 return TS_CONST_DECL; 433 case TYPE_DECL: 434 return TS_TYPE_DECL; 435 case FUNCTION_DECL: 436 return TS_FUNCTION_DECL; 437 case TRANSLATION_UNIT_DECL: 438 return TS_TRANSLATION_UNIT_DECL; 439 default: 440 return TS_DECL_NON_COMMON; 441 } 442 } 443 case tcc_type: 444 return TS_TYPE_NON_COMMON; 445 case tcc_reference: 446 case tcc_comparison: 447 case tcc_unary: 448 case tcc_binary: 449 case tcc_expression: 450 case tcc_statement: 451 case tcc_vl_exp: 452 return TS_EXP; 453 default: /* tcc_constant and tcc_exceptional */ 454 break; 455 } 456 switch (code) 457 { 458 /* tcc_constant cases. */ 459 case VOID_CST: return TS_TYPED; 460 case INTEGER_CST: return TS_INT_CST; 461 case REAL_CST: return TS_REAL_CST; 462 case FIXED_CST: return TS_FIXED_CST; 463 case COMPLEX_CST: return TS_COMPLEX; 464 case VECTOR_CST: return TS_VECTOR; 465 case STRING_CST: return TS_STRING; 466 /* tcc_exceptional cases. */ 467 case ERROR_MARK: return TS_COMMON; 468 case IDENTIFIER_NODE: return TS_IDENTIFIER; 469 case TREE_LIST: return TS_LIST; 470 case TREE_VEC: return TS_VEC; 471 case SSA_NAME: return TS_SSA_NAME; 472 case PLACEHOLDER_EXPR: return TS_COMMON; 473 case STATEMENT_LIST: return TS_STATEMENT_LIST; 474 case BLOCK: return TS_BLOCK; 475 case CONSTRUCTOR: return TS_CONSTRUCTOR; 476 case TREE_BINFO: return TS_BINFO; 477 case OMP_CLAUSE: return TS_OMP_CLAUSE; 478 case OPTIMIZATION_NODE: return TS_OPTIMIZATION; 479 case TARGET_OPTION_NODE: return TS_TARGET_OPTION; 480 481 default: 482 gcc_unreachable (); 483 } 484 } 485 486 487 /* Initialize tree_contains_struct to describe the hierarchy of tree 488 nodes. */ 489 490 static void 491 initialize_tree_contains_struct (void) 492 { 493 unsigned i; 494 495 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++) 496 { 497 enum tree_code code; 498 enum tree_node_structure_enum ts_code; 499 500 code = (enum tree_code) i; 501 ts_code = tree_node_structure_for_code (code); 502 503 /* Mark the TS structure itself. */ 504 tree_contains_struct[code][ts_code] = 1; 505 506 /* Mark all the structures that TS is derived from. */ 507 switch (ts_code) 508 { 509 case TS_TYPED: 510 case TS_BLOCK: 511 case TS_OPTIMIZATION: 512 case TS_TARGET_OPTION: 513 MARK_TS_BASE (code); 514 break; 515 516 case TS_COMMON: 517 case TS_INT_CST: 518 case TS_REAL_CST: 519 case TS_FIXED_CST: 520 case TS_VECTOR: 521 case TS_STRING: 522 case TS_COMPLEX: 523 case TS_SSA_NAME: 524 case TS_CONSTRUCTOR: 525 case TS_EXP: 526 case TS_STATEMENT_LIST: 527 MARK_TS_TYPED (code); 528 break; 529 530 case TS_IDENTIFIER: 531 case TS_DECL_MINIMAL: 532 case TS_TYPE_COMMON: 533 case TS_LIST: 534 case TS_VEC: 535 case TS_BINFO: 536 case TS_OMP_CLAUSE: 537 MARK_TS_COMMON (code); 538 break; 539 540 case TS_TYPE_WITH_LANG_SPECIFIC: 541 MARK_TS_TYPE_COMMON (code); 542 break; 543 544 case TS_TYPE_NON_COMMON: 545 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code); 546 break; 547 548 case TS_DECL_COMMON: 549 MARK_TS_DECL_MINIMAL (code); 550 break; 551 552 case TS_DECL_WRTL: 553 case TS_CONST_DECL: 554 MARK_TS_DECL_COMMON (code); 555 break; 556 557 case TS_DECL_NON_COMMON: 558 MARK_TS_DECL_WITH_VIS (code); 559 break; 560 561 case TS_DECL_WITH_VIS: 562 case TS_PARM_DECL: 563 case TS_LABEL_DECL: 564 case TS_RESULT_DECL: 565 MARK_TS_DECL_WRTL (code); 566 break; 567 568 case TS_FIELD_DECL: 569 MARK_TS_DECL_COMMON (code); 570 break; 571 572 case TS_VAR_DECL: 573 MARK_TS_DECL_WITH_VIS (code); 574 break; 575 576 case TS_TYPE_DECL: 577 case TS_FUNCTION_DECL: 578 MARK_TS_DECL_NON_COMMON (code); 579 break; 580 581 case TS_TRANSLATION_UNIT_DECL: 582 MARK_TS_DECL_COMMON (code); 583 break; 584 585 default: 586 gcc_unreachable (); 587 } 588 } 589 590 /* Basic consistency checks for attributes used in fold. */ 591 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]); 592 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]); 593 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]); 594 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]); 595 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]); 596 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]); 597 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]); 598 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]); 599 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]); 600 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]); 601 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]); 602 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]); 603 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]); 604 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]); 605 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]); 606 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]); 607 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]); 608 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]); 609 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]); 610 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]); 611 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]); 612 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]); 613 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]); 614 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]); 615 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]); 616 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]); 617 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]); 618 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]); 619 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]); 620 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]); 621 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]); 622 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]); 623 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]); 624 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]); 625 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]); 626 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]); 627 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]); 628 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]); 629 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]); 630 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]); 631 } 632 633 634 /* Init tree.c. */ 635 636 void 637 init_ttree (void) 638 { 639 /* Initialize the hash table of types. */ 640 type_hash_table 641 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE); 642 643 debug_expr_for_decl 644 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512); 645 646 value_expr_for_decl 647 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512); 648 649 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024); 650 651 int_cst_node = make_int_cst (1, 1); 652 653 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64); 654 655 cl_optimization_node = make_node (OPTIMIZATION_NODE); 656 cl_target_option_node = make_node (TARGET_OPTION_NODE); 657 658 /* Initialize the tree_contains_struct array. */ 659 initialize_tree_contains_struct (); 660 lang_hooks.init_ts (); 661 } 662 663 664 /* The name of the object as the assembler will see it (but before any 665 translations made by ASM_OUTPUT_LABELREF). Often this is the same 666 as DECL_NAME. It is an IDENTIFIER_NODE. */ 667 tree 668 decl_assembler_name (tree decl) 669 { 670 if (!DECL_ASSEMBLER_NAME_SET_P (decl)) 671 lang_hooks.set_decl_assembler_name (decl); 672 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name; 673 } 674 675 /* When the target supports COMDAT groups, this indicates which group the 676 DECL is associated with. This can be either an IDENTIFIER_NODE or a 677 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */ 678 tree 679 decl_comdat_group (const_tree node) 680 { 681 struct symtab_node *snode = symtab_node::get (node); 682 if (!snode) 683 return NULL; 684 return snode->get_comdat_group (); 685 } 686 687 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */ 688 tree 689 decl_comdat_group_id (const_tree node) 690 { 691 struct symtab_node *snode = symtab_node::get (node); 692 if (!snode) 693 return NULL; 694 return snode->get_comdat_group_id (); 695 } 696 697 /* When the target supports named section, return its name as IDENTIFIER_NODE 698 or NULL if it is in no section. */ 699 const char * 700 decl_section_name (const_tree node) 701 { 702 struct symtab_node *snode = symtab_node::get (node); 703 if (!snode) 704 return NULL; 705 return snode->get_section (); 706 } 707 708 /* Set section name of NODE to VALUE (that is expected to be 709 identifier node) */ 710 void 711 set_decl_section_name (tree node, const char *value) 712 { 713 struct symtab_node *snode; 714 715 if (value == NULL) 716 { 717 snode = symtab_node::get (node); 718 if (!snode) 719 return; 720 } 721 else if (VAR_P (node)) 722 snode = varpool_node::get_create (node); 723 else 724 snode = cgraph_node::get_create (node); 725 snode->set_section (value); 726 } 727 728 /* Return TLS model of a variable NODE. */ 729 enum tls_model 730 decl_tls_model (const_tree node) 731 { 732 struct varpool_node *snode = varpool_node::get (node); 733 if (!snode) 734 return TLS_MODEL_NONE; 735 return snode->tls_model; 736 } 737 738 /* Set TLS model of variable NODE to MODEL. */ 739 void 740 set_decl_tls_model (tree node, enum tls_model model) 741 { 742 struct varpool_node *vnode; 743 744 if (model == TLS_MODEL_NONE) 745 { 746 vnode = varpool_node::get (node); 747 if (!vnode) 748 return; 749 } 750 else 751 vnode = varpool_node::get_create (node); 752 vnode->tls_model = model; 753 } 754 755 /* Compute the number of bytes occupied by a tree with code CODE. 756 This function cannot be used for nodes that have variable sizes, 757 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */ 758 size_t 759 tree_code_size (enum tree_code code) 760 { 761 switch (TREE_CODE_CLASS (code)) 762 { 763 case tcc_declaration: /* A decl node */ 764 { 765 switch (code) 766 { 767 case FIELD_DECL: 768 return sizeof (struct tree_field_decl); 769 case PARM_DECL: 770 return sizeof (struct tree_parm_decl); 771 case VAR_DECL: 772 return sizeof (struct tree_var_decl); 773 case LABEL_DECL: 774 return sizeof (struct tree_label_decl); 775 case RESULT_DECL: 776 return sizeof (struct tree_result_decl); 777 case CONST_DECL: 778 return sizeof (struct tree_const_decl); 779 case TYPE_DECL: 780 return sizeof (struct tree_type_decl); 781 case FUNCTION_DECL: 782 return sizeof (struct tree_function_decl); 783 case DEBUG_EXPR_DECL: 784 return sizeof (struct tree_decl_with_rtl); 785 case TRANSLATION_UNIT_DECL: 786 return sizeof (struct tree_translation_unit_decl); 787 case NAMESPACE_DECL: 788 case IMPORTED_DECL: 789 case NAMELIST_DECL: 790 return sizeof (struct tree_decl_non_common); 791 default: 792 return lang_hooks.tree_size (code); 793 } 794 } 795 796 case tcc_type: /* a type node */ 797 return sizeof (struct tree_type_non_common); 798 799 case tcc_reference: /* a reference */ 800 case tcc_expression: /* an expression */ 801 case tcc_statement: /* an expression with side effects */ 802 case tcc_comparison: /* a comparison expression */ 803 case tcc_unary: /* a unary arithmetic expression */ 804 case tcc_binary: /* a binary arithmetic expression */ 805 return (sizeof (struct tree_exp) 806 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree)); 807 808 case tcc_constant: /* a constant */ 809 switch (code) 810 { 811 case VOID_CST: return sizeof (struct tree_typed); 812 case INTEGER_CST: gcc_unreachable (); 813 case REAL_CST: return sizeof (struct tree_real_cst); 814 case FIXED_CST: return sizeof (struct tree_fixed_cst); 815 case COMPLEX_CST: return sizeof (struct tree_complex); 816 case VECTOR_CST: return sizeof (struct tree_vector); 817 case STRING_CST: gcc_unreachable (); 818 default: 819 return lang_hooks.tree_size (code); 820 } 821 822 case tcc_exceptional: /* something random, like an identifier. */ 823 switch (code) 824 { 825 case IDENTIFIER_NODE: return lang_hooks.identifier_size; 826 case TREE_LIST: return sizeof (struct tree_list); 827 828 case ERROR_MARK: 829 case PLACEHOLDER_EXPR: return sizeof (struct tree_common); 830 831 case TREE_VEC: 832 case OMP_CLAUSE: gcc_unreachable (); 833 834 case SSA_NAME: return sizeof (struct tree_ssa_name); 835 836 case STATEMENT_LIST: return sizeof (struct tree_statement_list); 837 case BLOCK: return sizeof (struct tree_block); 838 case CONSTRUCTOR: return sizeof (struct tree_constructor); 839 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option); 840 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option); 841 842 default: 843 return lang_hooks.tree_size (code); 844 } 845 846 default: 847 gcc_unreachable (); 848 } 849 } 850 851 /* Compute the number of bytes occupied by NODE. This routine only 852 looks at TREE_CODE, except for those nodes that have variable sizes. */ 853 size_t 854 tree_size (const_tree node) 855 { 856 const enum tree_code code = TREE_CODE (node); 857 switch (code) 858 { 859 case INTEGER_CST: 860 return (sizeof (struct tree_int_cst) 861 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT)); 862 863 case TREE_BINFO: 864 return (offsetof (struct tree_binfo, base_binfos) 865 + vec<tree, va_gc> 866 ::embedded_size (BINFO_N_BASE_BINFOS (node))); 867 868 case TREE_VEC: 869 return (sizeof (struct tree_vec) 870 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree)); 871 872 case VECTOR_CST: 873 return (sizeof (struct tree_vector) 874 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree)); 875 876 case STRING_CST: 877 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1; 878 879 case OMP_CLAUSE: 880 return (sizeof (struct tree_omp_clause) 881 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1) 882 * sizeof (tree)); 883 884 default: 885 if (TREE_CODE_CLASS (code) == tcc_vl_exp) 886 return (sizeof (struct tree_exp) 887 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree)); 888 else 889 return tree_code_size (code); 890 } 891 } 892 893 /* Record interesting allocation statistics for a tree node with CODE 894 and LENGTH. */ 895 896 static void 897 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED, 898 size_t length ATTRIBUTE_UNUSED) 899 { 900 enum tree_code_class type = TREE_CODE_CLASS (code); 901 tree_node_kind kind; 902 903 if (!GATHER_STATISTICS) 904 return; 905 906 switch (type) 907 { 908 case tcc_declaration: /* A decl node */ 909 kind = d_kind; 910 break; 911 912 case tcc_type: /* a type node */ 913 kind = t_kind; 914 break; 915 916 case tcc_statement: /* an expression with side effects */ 917 kind = s_kind; 918 break; 919 920 case tcc_reference: /* a reference */ 921 kind = r_kind; 922 break; 923 924 case tcc_expression: /* an expression */ 925 case tcc_comparison: /* a comparison expression */ 926 case tcc_unary: /* a unary arithmetic expression */ 927 case tcc_binary: /* a binary arithmetic expression */ 928 kind = e_kind; 929 break; 930 931 case tcc_constant: /* a constant */ 932 kind = c_kind; 933 break; 934 935 case tcc_exceptional: /* something random, like an identifier. */ 936 switch (code) 937 { 938 case IDENTIFIER_NODE: 939 kind = id_kind; 940 break; 941 942 case TREE_VEC: 943 kind = vec_kind; 944 break; 945 946 case TREE_BINFO: 947 kind = binfo_kind; 948 break; 949 950 case SSA_NAME: 951 kind = ssa_name_kind; 952 break; 953 954 case BLOCK: 955 kind = b_kind; 956 break; 957 958 case CONSTRUCTOR: 959 kind = constr_kind; 960 break; 961 962 case OMP_CLAUSE: 963 kind = omp_clause_kind; 964 break; 965 966 default: 967 kind = x_kind; 968 break; 969 } 970 break; 971 972 case tcc_vl_exp: 973 kind = e_kind; 974 break; 975 976 default: 977 gcc_unreachable (); 978 } 979 980 tree_code_counts[(int) code]++; 981 tree_node_counts[(int) kind]++; 982 tree_node_sizes[(int) kind] += length; 983 } 984 985 /* Allocate and return a new UID from the DECL_UID namespace. */ 986 987 int 988 allocate_decl_uid (void) 989 { 990 return next_decl_uid++; 991 } 992 993 /* Return a newly allocated node of code CODE. For decl and type 994 nodes, some other fields are initialized. The rest of the node is 995 initialized to zero. This function cannot be used for TREE_VEC, 996 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in 997 tree_code_size. 998 999 Achoo! I got a code in the node. */ 1000 1001 tree 1002 make_node_stat (enum tree_code code MEM_STAT_DECL) 1003 { 1004 tree t; 1005 enum tree_code_class type = TREE_CODE_CLASS (code); 1006 size_t length = tree_code_size (code); 1007 1008 record_node_allocation_statistics (code, length); 1009 1010 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT); 1011 TREE_SET_CODE (t, code); 1012 1013 switch (type) 1014 { 1015 case tcc_statement: 1016 TREE_SIDE_EFFECTS (t) = 1; 1017 break; 1018 1019 case tcc_declaration: 1020 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) 1021 { 1022 if (code == FUNCTION_DECL) 1023 { 1024 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY)); 1025 SET_DECL_MODE (t, FUNCTION_MODE); 1026 } 1027 else 1028 SET_DECL_ALIGN (t, 1); 1029 } 1030 DECL_SOURCE_LOCATION (t) = input_location; 1031 if (TREE_CODE (t) == DEBUG_EXPR_DECL) 1032 DECL_UID (t) = --next_debug_decl_uid; 1033 else 1034 { 1035 DECL_UID (t) = allocate_decl_uid (); 1036 SET_DECL_PT_UID (t, -1); 1037 } 1038 if (TREE_CODE (t) == LABEL_DECL) 1039 LABEL_DECL_UID (t) = -1; 1040 1041 break; 1042 1043 case tcc_type: 1044 TYPE_UID (t) = next_type_uid++; 1045 SET_TYPE_ALIGN (t, BITS_PER_UNIT); 1046 TYPE_USER_ALIGN (t) = 0; 1047 TYPE_MAIN_VARIANT (t) = t; 1048 TYPE_CANONICAL (t) = t; 1049 1050 /* Default to no attributes for type, but let target change that. */ 1051 TYPE_ATTRIBUTES (t) = NULL_TREE; 1052 targetm.set_default_type_attributes (t); 1053 1054 /* We have not yet computed the alias set for this type. */ 1055 TYPE_ALIAS_SET (t) = -1; 1056 break; 1057 1058 case tcc_constant: 1059 TREE_CONSTANT (t) = 1; 1060 break; 1061 1062 case tcc_expression: 1063 switch (code) 1064 { 1065 case INIT_EXPR: 1066 case MODIFY_EXPR: 1067 case VA_ARG_EXPR: 1068 case PREDECREMENT_EXPR: 1069 case PREINCREMENT_EXPR: 1070 case POSTDECREMENT_EXPR: 1071 case POSTINCREMENT_EXPR: 1072 /* All of these have side-effects, no matter what their 1073 operands are. */ 1074 TREE_SIDE_EFFECTS (t) = 1; 1075 break; 1076 1077 default: 1078 break; 1079 } 1080 break; 1081 1082 case tcc_exceptional: 1083 switch (code) 1084 { 1085 case TARGET_OPTION_NODE: 1086 TREE_TARGET_OPTION(t) 1087 = ggc_cleared_alloc<struct cl_target_option> (); 1088 break; 1089 1090 case OPTIMIZATION_NODE: 1091 TREE_OPTIMIZATION (t) 1092 = ggc_cleared_alloc<struct cl_optimization> (); 1093 break; 1094 1095 default: 1096 break; 1097 } 1098 break; 1099 1100 default: 1101 /* Other classes need no special treatment. */ 1102 break; 1103 } 1104 1105 return t; 1106 } 1107 1108 /* Free tree node. */ 1109 1110 void 1111 free_node (tree node) 1112 { 1113 enum tree_code code = TREE_CODE (node); 1114 if (GATHER_STATISTICS) 1115 { 1116 tree_code_counts[(int) TREE_CODE (node)]--; 1117 tree_node_counts[(int) t_kind]--; 1118 tree_node_sizes[(int) t_kind] -= tree_size (node); 1119 } 1120 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR)) 1121 vec_free (CONSTRUCTOR_ELTS (node)); 1122 else if (code == BLOCK) 1123 vec_free (BLOCK_NONLOCALIZED_VARS (node)); 1124 else if (code == TREE_BINFO) 1125 vec_free (BINFO_BASE_ACCESSES (node)); 1126 ggc_free (node); 1127 } 1128 1129 /* Return a new node with the same contents as NODE except that its 1130 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */ 1131 1132 tree 1133 copy_node_stat (tree node MEM_STAT_DECL) 1134 { 1135 tree t; 1136 enum tree_code code = TREE_CODE (node); 1137 size_t length; 1138 1139 gcc_assert (code != STATEMENT_LIST); 1140 1141 length = tree_size (node); 1142 record_node_allocation_statistics (code, length); 1143 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT); 1144 memcpy (t, node, length); 1145 1146 if (CODE_CONTAINS_STRUCT (code, TS_COMMON)) 1147 TREE_CHAIN (t) = 0; 1148 TREE_ASM_WRITTEN (t) = 0; 1149 TREE_VISITED (t) = 0; 1150 1151 if (TREE_CODE_CLASS (code) == tcc_declaration) 1152 { 1153 if (code == DEBUG_EXPR_DECL) 1154 DECL_UID (t) = --next_debug_decl_uid; 1155 else 1156 { 1157 DECL_UID (t) = allocate_decl_uid (); 1158 if (DECL_PT_UID_SET_P (node)) 1159 SET_DECL_PT_UID (t, DECL_PT_UID (node)); 1160 } 1161 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node)) 1162 && DECL_HAS_VALUE_EXPR_P (node)) 1163 { 1164 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node)); 1165 DECL_HAS_VALUE_EXPR_P (t) = 1; 1166 } 1167 /* DECL_DEBUG_EXPR is copied explicitely by callers. */ 1168 if (VAR_P (node)) 1169 { 1170 DECL_HAS_DEBUG_EXPR_P (t) = 0; 1171 t->decl_with_vis.symtab_node = NULL; 1172 } 1173 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node)) 1174 { 1175 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node)); 1176 DECL_HAS_INIT_PRIORITY_P (t) = 1; 1177 } 1178 if (TREE_CODE (node) == FUNCTION_DECL) 1179 { 1180 DECL_STRUCT_FUNCTION (t) = NULL; 1181 t->decl_with_vis.symtab_node = NULL; 1182 } 1183 } 1184 else if (TREE_CODE_CLASS (code) == tcc_type) 1185 { 1186 TYPE_UID (t) = next_type_uid++; 1187 /* The following is so that the debug code for 1188 the copy is different from the original type. 1189 The two statements usually duplicate each other 1190 (because they clear fields of the same union), 1191 but the optimizer should catch that. */ 1192 TYPE_SYMTAB_POINTER (t) = 0; 1193 TYPE_SYMTAB_ADDRESS (t) = 0; 1194 1195 /* Do not copy the values cache. */ 1196 if (TYPE_CACHED_VALUES_P (t)) 1197 { 1198 TYPE_CACHED_VALUES_P (t) = 0; 1199 TYPE_CACHED_VALUES (t) = NULL_TREE; 1200 } 1201 } 1202 else if (code == TARGET_OPTION_NODE) 1203 { 1204 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>(); 1205 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node), 1206 sizeof (struct cl_target_option)); 1207 } 1208 else if (code == OPTIMIZATION_NODE) 1209 { 1210 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>(); 1211 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node), 1212 sizeof (struct cl_optimization)); 1213 } 1214 1215 return t; 1216 } 1217 1218 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field. 1219 For example, this can copy a list made of TREE_LIST nodes. */ 1220 1221 tree 1222 copy_list (tree list) 1223 { 1224 tree head; 1225 tree prev, next; 1226 1227 if (list == 0) 1228 return 0; 1229 1230 head = prev = copy_node (list); 1231 next = TREE_CHAIN (list); 1232 while (next) 1233 { 1234 TREE_CHAIN (prev) = copy_node (next); 1235 prev = TREE_CHAIN (prev); 1236 next = TREE_CHAIN (next); 1237 } 1238 return head; 1239 } 1240 1241 1242 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an 1243 INTEGER_CST with value CST and type TYPE. */ 1244 1245 static unsigned int 1246 get_int_cst_ext_nunits (tree type, const wide_int &cst) 1247 { 1248 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type)); 1249 /* We need extra HWIs if CST is an unsigned integer with its 1250 upper bit set. */ 1251 if (TYPE_UNSIGNED (type) && wi::neg_p (cst)) 1252 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1; 1253 return cst.get_len (); 1254 } 1255 1256 /* Return a new INTEGER_CST with value CST and type TYPE. */ 1257 1258 static tree 1259 build_new_int_cst (tree type, const wide_int &cst) 1260 { 1261 unsigned int len = cst.get_len (); 1262 unsigned int ext_len = get_int_cst_ext_nunits (type, cst); 1263 tree nt = make_int_cst (len, ext_len); 1264 1265 if (len < ext_len) 1266 { 1267 --ext_len; 1268 TREE_INT_CST_ELT (nt, ext_len) 1269 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT); 1270 for (unsigned int i = len; i < ext_len; ++i) 1271 TREE_INT_CST_ELT (nt, i) = -1; 1272 } 1273 else if (TYPE_UNSIGNED (type) 1274 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT) 1275 { 1276 len--; 1277 TREE_INT_CST_ELT (nt, len) 1278 = zext_hwi (cst.elt (len), 1279 cst.get_precision () % HOST_BITS_PER_WIDE_INT); 1280 } 1281 1282 for (unsigned int i = 0; i < len; i++) 1283 TREE_INT_CST_ELT (nt, i) = cst.elt (i); 1284 TREE_TYPE (nt) = type; 1285 return nt; 1286 } 1287 1288 /* Create an INT_CST node with a LOW value sign extended to TYPE. */ 1289 1290 tree 1291 build_int_cst (tree type, HOST_WIDE_INT low) 1292 { 1293 /* Support legacy code. */ 1294 if (!type) 1295 type = integer_type_node; 1296 1297 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type))); 1298 } 1299 1300 tree 1301 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst) 1302 { 1303 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type))); 1304 } 1305 1306 /* Create an INT_CST node with a LOW value sign extended to TYPE. */ 1307 1308 tree 1309 build_int_cst_type (tree type, HOST_WIDE_INT low) 1310 { 1311 gcc_assert (type); 1312 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type))); 1313 } 1314 1315 /* Constructs tree in type TYPE from with value given by CST. Signedness 1316 of CST is assumed to be the same as the signedness of TYPE. */ 1317 1318 tree 1319 double_int_to_tree (tree type, double_int cst) 1320 { 1321 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type))); 1322 } 1323 1324 /* We force the wide_int CST to the range of the type TYPE by sign or 1325 zero extending it. OVERFLOWABLE indicates if we are interested in 1326 overflow of the value, when >0 we are only interested in signed 1327 overflow, for <0 we are interested in any overflow. OVERFLOWED 1328 indicates whether overflow has already occurred. CONST_OVERFLOWED 1329 indicates whether constant overflow has already occurred. We force 1330 T's value to be within range of T's type (by setting to 0 or 1 all 1331 the bits outside the type's range). We set TREE_OVERFLOWED if, 1332 OVERFLOWED is nonzero, 1333 or OVERFLOWABLE is >0 and signed overflow occurs 1334 or OVERFLOWABLE is <0 and any overflow occurs 1335 We return a new tree node for the extended wide_int. The node 1336 is shared if no overflow flags are set. */ 1337 1338 1339 tree 1340 force_fit_type (tree type, const wide_int_ref &cst, 1341 int overflowable, bool overflowed) 1342 { 1343 signop sign = TYPE_SIGN (type); 1344 1345 /* If we need to set overflow flags, return a new unshared node. */ 1346 if (overflowed || !wi::fits_to_tree_p (cst, type)) 1347 { 1348 if (overflowed 1349 || overflowable < 0 1350 || (overflowable > 0 && sign == SIGNED)) 1351 { 1352 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign); 1353 tree t = build_new_int_cst (type, tmp); 1354 TREE_OVERFLOW (t) = 1; 1355 return t; 1356 } 1357 } 1358 1359 /* Else build a shared node. */ 1360 return wide_int_to_tree (type, cst); 1361 } 1362 1363 /* These are the hash table functions for the hash table of INTEGER_CST 1364 nodes of a sizetype. */ 1365 1366 /* Return the hash code X, an INTEGER_CST. */ 1367 1368 hashval_t 1369 int_cst_hasher::hash (tree x) 1370 { 1371 const_tree const t = x; 1372 hashval_t code = TYPE_UID (TREE_TYPE (t)); 1373 int i; 1374 1375 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++) 1376 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code); 1377 1378 return code; 1379 } 1380 1381 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node) 1382 is the same as that given by *Y, which is the same. */ 1383 1384 bool 1385 int_cst_hasher::equal (tree x, tree y) 1386 { 1387 const_tree const xt = x; 1388 const_tree const yt = y; 1389 1390 if (TREE_TYPE (xt) != TREE_TYPE (yt) 1391 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt) 1392 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt)) 1393 return false; 1394 1395 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++) 1396 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i)) 1397 return false; 1398 1399 return true; 1400 } 1401 1402 /* Create an INT_CST node of TYPE and value CST. 1403 The returned node is always shared. For small integers we use a 1404 per-type vector cache, for larger ones we use a single hash table. 1405 The value is extended from its precision according to the sign of 1406 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines 1407 the upper bits and ensures that hashing and value equality based 1408 upon the underlying HOST_WIDE_INTs works without masking. */ 1409 1410 tree 1411 wide_int_to_tree (tree type, const wide_int_ref &pcst) 1412 { 1413 tree t; 1414 int ix = -1; 1415 int limit = 0; 1416 1417 gcc_assert (type); 1418 unsigned int prec = TYPE_PRECISION (type); 1419 signop sgn = TYPE_SIGN (type); 1420 1421 /* Verify that everything is canonical. */ 1422 int l = pcst.get_len (); 1423 if (l > 1) 1424 { 1425 if (pcst.elt (l - 1) == 0) 1426 gcc_checking_assert (pcst.elt (l - 2) < 0); 1427 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1) 1428 gcc_checking_assert (pcst.elt (l - 2) >= 0); 1429 } 1430 1431 wide_int cst = wide_int::from (pcst, prec, sgn); 1432 unsigned int ext_len = get_int_cst_ext_nunits (type, cst); 1433 1434 if (ext_len == 1) 1435 { 1436 /* We just need to store a single HOST_WIDE_INT. */ 1437 HOST_WIDE_INT hwi; 1438 if (TYPE_UNSIGNED (type)) 1439 hwi = cst.to_uhwi (); 1440 else 1441 hwi = cst.to_shwi (); 1442 1443 switch (TREE_CODE (type)) 1444 { 1445 case NULLPTR_TYPE: 1446 gcc_assert (hwi == 0); 1447 /* Fallthru. */ 1448 1449 case POINTER_TYPE: 1450 case REFERENCE_TYPE: 1451 case POINTER_BOUNDS_TYPE: 1452 /* Cache NULL pointer and zero bounds. */ 1453 if (hwi == 0) 1454 { 1455 limit = 1; 1456 ix = 0; 1457 } 1458 break; 1459 1460 case BOOLEAN_TYPE: 1461 /* Cache false or true. */ 1462 limit = 2; 1463 if (IN_RANGE (hwi, 0, 1)) 1464 ix = hwi; 1465 break; 1466 1467 case INTEGER_TYPE: 1468 case OFFSET_TYPE: 1469 if (TYPE_SIGN (type) == UNSIGNED) 1470 { 1471 /* Cache [0, N). */ 1472 limit = INTEGER_SHARE_LIMIT; 1473 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1)) 1474 ix = hwi; 1475 } 1476 else 1477 { 1478 /* Cache [-1, N). */ 1479 limit = INTEGER_SHARE_LIMIT + 1; 1480 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1)) 1481 ix = hwi + 1; 1482 } 1483 break; 1484 1485 case ENUMERAL_TYPE: 1486 break; 1487 1488 default: 1489 gcc_unreachable (); 1490 } 1491 1492 if (ix >= 0) 1493 { 1494 /* Look for it in the type's vector of small shared ints. */ 1495 if (!TYPE_CACHED_VALUES_P (type)) 1496 { 1497 TYPE_CACHED_VALUES_P (type) = 1; 1498 TYPE_CACHED_VALUES (type) = make_tree_vec (limit); 1499 } 1500 1501 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix); 1502 if (t) 1503 /* Make sure no one is clobbering the shared constant. */ 1504 gcc_checking_assert (TREE_TYPE (t) == type 1505 && TREE_INT_CST_NUNITS (t) == 1 1506 && TREE_INT_CST_OFFSET_NUNITS (t) == 1 1507 && TREE_INT_CST_EXT_NUNITS (t) == 1 1508 && TREE_INT_CST_ELT (t, 0) == hwi); 1509 else 1510 { 1511 /* Create a new shared int. */ 1512 t = build_new_int_cst (type, cst); 1513 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t; 1514 } 1515 } 1516 else 1517 { 1518 /* Use the cache of larger shared ints, using int_cst_node as 1519 a temporary. */ 1520 1521 TREE_INT_CST_ELT (int_cst_node, 0) = hwi; 1522 TREE_TYPE (int_cst_node) = type; 1523 1524 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT); 1525 t = *slot; 1526 if (!t) 1527 { 1528 /* Insert this one into the hash table. */ 1529 t = int_cst_node; 1530 *slot = t; 1531 /* Make a new node for next time round. */ 1532 int_cst_node = make_int_cst (1, 1); 1533 } 1534 } 1535 } 1536 else 1537 { 1538 /* The value either hashes properly or we drop it on the floor 1539 for the gc to take care of. There will not be enough of them 1540 to worry about. */ 1541 1542 tree nt = build_new_int_cst (type, cst); 1543 tree *slot = int_cst_hash_table->find_slot (nt, INSERT); 1544 t = *slot; 1545 if (!t) 1546 { 1547 /* Insert this one into the hash table. */ 1548 t = nt; 1549 *slot = t; 1550 } 1551 } 1552 1553 return t; 1554 } 1555 1556 void 1557 cache_integer_cst (tree t) 1558 { 1559 tree type = TREE_TYPE (t); 1560 int ix = -1; 1561 int limit = 0; 1562 int prec = TYPE_PRECISION (type); 1563 1564 gcc_assert (!TREE_OVERFLOW (t)); 1565 1566 switch (TREE_CODE (type)) 1567 { 1568 case NULLPTR_TYPE: 1569 gcc_assert (integer_zerop (t)); 1570 /* Fallthru. */ 1571 1572 case POINTER_TYPE: 1573 case REFERENCE_TYPE: 1574 /* Cache NULL pointer. */ 1575 if (integer_zerop (t)) 1576 { 1577 limit = 1; 1578 ix = 0; 1579 } 1580 break; 1581 1582 case BOOLEAN_TYPE: 1583 /* Cache false or true. */ 1584 limit = 2; 1585 if (wi::ltu_p (t, 2)) 1586 ix = TREE_INT_CST_ELT (t, 0); 1587 break; 1588 1589 case INTEGER_TYPE: 1590 case OFFSET_TYPE: 1591 if (TYPE_UNSIGNED (type)) 1592 { 1593 /* Cache 0..N */ 1594 limit = INTEGER_SHARE_LIMIT; 1595 1596 /* This is a little hokie, but if the prec is smaller than 1597 what is necessary to hold INTEGER_SHARE_LIMIT, then the 1598 obvious test will not get the correct answer. */ 1599 if (prec < HOST_BITS_PER_WIDE_INT) 1600 { 1601 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT) 1602 ix = tree_to_uhwi (t); 1603 } 1604 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT)) 1605 ix = tree_to_uhwi (t); 1606 } 1607 else 1608 { 1609 /* Cache -1..N */ 1610 limit = INTEGER_SHARE_LIMIT + 1; 1611 1612 if (integer_minus_onep (t)) 1613 ix = 0; 1614 else if (!wi::neg_p (t)) 1615 { 1616 if (prec < HOST_BITS_PER_WIDE_INT) 1617 { 1618 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT) 1619 ix = tree_to_shwi (t) + 1; 1620 } 1621 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT)) 1622 ix = tree_to_shwi (t) + 1; 1623 } 1624 } 1625 break; 1626 1627 case ENUMERAL_TYPE: 1628 break; 1629 1630 default: 1631 gcc_unreachable (); 1632 } 1633 1634 if (ix >= 0) 1635 { 1636 /* Look for it in the type's vector of small shared ints. */ 1637 if (!TYPE_CACHED_VALUES_P (type)) 1638 { 1639 TYPE_CACHED_VALUES_P (type) = 1; 1640 TYPE_CACHED_VALUES (type) = make_tree_vec (limit); 1641 } 1642 1643 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE); 1644 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t; 1645 } 1646 else 1647 { 1648 /* Use the cache of larger shared ints. */ 1649 tree *slot = int_cst_hash_table->find_slot (t, INSERT); 1650 /* If there is already an entry for the number verify it's the 1651 same. */ 1652 if (*slot) 1653 gcc_assert (wi::eq_p (tree (*slot), t)); 1654 else 1655 /* Otherwise insert this one into the hash table. */ 1656 *slot = t; 1657 } 1658 } 1659 1660 1661 /* Builds an integer constant in TYPE such that lowest BITS bits are ones 1662 and the rest are zeros. */ 1663 1664 tree 1665 build_low_bits_mask (tree type, unsigned bits) 1666 { 1667 gcc_assert (bits <= TYPE_PRECISION (type)); 1668 1669 return wide_int_to_tree (type, wi::mask (bits, false, 1670 TYPE_PRECISION (type))); 1671 } 1672 1673 /* Checks that X is integer constant that can be expressed in (unsigned) 1674 HOST_WIDE_INT without loss of precision. */ 1675 1676 bool 1677 cst_and_fits_in_hwi (const_tree x) 1678 { 1679 return (TREE_CODE (x) == INTEGER_CST 1680 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x))); 1681 } 1682 1683 /* Build a newly constructed VECTOR_CST node of length LEN. */ 1684 1685 tree 1686 make_vector_stat (unsigned len MEM_STAT_DECL) 1687 { 1688 tree t; 1689 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector); 1690 1691 record_node_allocation_statistics (VECTOR_CST, length); 1692 1693 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT); 1694 1695 TREE_SET_CODE (t, VECTOR_CST); 1696 TREE_CONSTANT (t) = 1; 1697 1698 return t; 1699 } 1700 1701 /* Return a new VECTOR_CST node whose type is TYPE and whose values 1702 are in a list pointed to by VALS. */ 1703 1704 tree 1705 build_vector_stat (tree type, tree *vals MEM_STAT_DECL) 1706 { 1707 int over = 0; 1708 unsigned cnt = 0; 1709 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type)); 1710 TREE_TYPE (v) = type; 1711 1712 /* Iterate through elements and check for overflow. */ 1713 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt) 1714 { 1715 tree value = vals[cnt]; 1716 1717 VECTOR_CST_ELT (v, cnt) = value; 1718 1719 /* Don't crash if we get an address constant. */ 1720 if (!CONSTANT_CLASS_P (value)) 1721 continue; 1722 1723 over |= TREE_OVERFLOW (value); 1724 } 1725 1726 TREE_OVERFLOW (v) = over; 1727 return v; 1728 } 1729 1730 /* Return a new VECTOR_CST node whose type is TYPE and whose values 1731 are extracted from V, a vector of CONSTRUCTOR_ELT. */ 1732 1733 tree 1734 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v) 1735 { 1736 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type)); 1737 unsigned HOST_WIDE_INT idx, pos = 0; 1738 tree value; 1739 1740 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value) 1741 { 1742 if (TREE_CODE (value) == VECTOR_CST) 1743 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i) 1744 vec[pos++] = VECTOR_CST_ELT (value, i); 1745 else 1746 vec[pos++] = value; 1747 } 1748 while (pos < TYPE_VECTOR_SUBPARTS (type)) 1749 vec[pos++] = build_zero_cst (TREE_TYPE (type)); 1750 1751 return build_vector (type, vec); 1752 } 1753 1754 /* Build a vector of type VECTYPE where all the elements are SCs. */ 1755 tree 1756 build_vector_from_val (tree vectype, tree sc) 1757 { 1758 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype); 1759 1760 if (sc == error_mark_node) 1761 return sc; 1762 1763 /* Verify that the vector type is suitable for SC. Note that there 1764 is some inconsistency in the type-system with respect to restrict 1765 qualifications of pointers. Vector types always have a main-variant 1766 element type and the qualification is applied to the vector-type. 1767 So TREE_TYPE (vector-type) does not return a properly qualified 1768 vector element-type. */ 1769 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)), 1770 TREE_TYPE (vectype))); 1771 1772 if (CONSTANT_CLASS_P (sc)) 1773 { 1774 tree *v = XALLOCAVEC (tree, nunits); 1775 for (i = 0; i < nunits; ++i) 1776 v[i] = sc; 1777 return build_vector (vectype, v); 1778 } 1779 else 1780 { 1781 vec<constructor_elt, va_gc> *v; 1782 vec_alloc (v, nunits); 1783 for (i = 0; i < nunits; ++i) 1784 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc); 1785 return build_constructor (vectype, v); 1786 } 1787 } 1788 1789 /* Something has messed with the elements of CONSTRUCTOR C after it was built; 1790 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */ 1791 1792 void 1793 recompute_constructor_flags (tree c) 1794 { 1795 unsigned int i; 1796 tree val; 1797 bool constant_p = true; 1798 bool side_effects_p = false; 1799 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c); 1800 1801 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val) 1802 { 1803 /* Mostly ctors will have elts that don't have side-effects, so 1804 the usual case is to scan all the elements. Hence a single 1805 loop for both const and side effects, rather than one loop 1806 each (with early outs). */ 1807 if (!TREE_CONSTANT (val)) 1808 constant_p = false; 1809 if (TREE_SIDE_EFFECTS (val)) 1810 side_effects_p = true; 1811 } 1812 1813 TREE_SIDE_EFFECTS (c) = side_effects_p; 1814 TREE_CONSTANT (c) = constant_p; 1815 } 1816 1817 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for 1818 CONSTRUCTOR C. */ 1819 1820 void 1821 verify_constructor_flags (tree c) 1822 { 1823 unsigned int i; 1824 tree val; 1825 bool constant_p = TREE_CONSTANT (c); 1826 bool side_effects_p = TREE_SIDE_EFFECTS (c); 1827 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c); 1828 1829 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val) 1830 { 1831 if (constant_p && !TREE_CONSTANT (val)) 1832 internal_error ("non-constant element in constant CONSTRUCTOR"); 1833 if (!side_effects_p && TREE_SIDE_EFFECTS (val)) 1834 internal_error ("side-effects element in no-side-effects CONSTRUCTOR"); 1835 } 1836 } 1837 1838 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values 1839 are in the vec pointed to by VALS. */ 1840 tree 1841 build_constructor (tree type, vec<constructor_elt, va_gc> *vals) 1842 { 1843 tree c = make_node (CONSTRUCTOR); 1844 1845 TREE_TYPE (c) = type; 1846 CONSTRUCTOR_ELTS (c) = vals; 1847 1848 recompute_constructor_flags (c); 1849 1850 return c; 1851 } 1852 1853 /* Build a CONSTRUCTOR node made of a single initializer, with the specified 1854 INDEX and VALUE. */ 1855 tree 1856 build_constructor_single (tree type, tree index, tree value) 1857 { 1858 vec<constructor_elt, va_gc> *v; 1859 constructor_elt elt = {index, value}; 1860 1861 vec_alloc (v, 1); 1862 v->quick_push (elt); 1863 1864 return build_constructor (type, v); 1865 } 1866 1867 1868 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values 1869 are in a list pointed to by VALS. */ 1870 tree 1871 build_constructor_from_list (tree type, tree vals) 1872 { 1873 tree t; 1874 vec<constructor_elt, va_gc> *v = NULL; 1875 1876 if (vals) 1877 { 1878 vec_alloc (v, list_length (vals)); 1879 for (t = vals; t; t = TREE_CHAIN (t)) 1880 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t)); 1881 } 1882 1883 return build_constructor (type, v); 1884 } 1885 1886 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number 1887 of elements, provided as index/value pairs. */ 1888 1889 tree 1890 build_constructor_va (tree type, int nelts, ...) 1891 { 1892 vec<constructor_elt, va_gc> *v = NULL; 1893 va_list p; 1894 1895 va_start (p, nelts); 1896 vec_alloc (v, nelts); 1897 while (nelts--) 1898 { 1899 tree index = va_arg (p, tree); 1900 tree value = va_arg (p, tree); 1901 CONSTRUCTOR_APPEND_ELT (v, index, value); 1902 } 1903 va_end (p); 1904 return build_constructor (type, v); 1905 } 1906 1907 /* Return a new FIXED_CST node whose type is TYPE and value is F. */ 1908 1909 tree 1910 build_fixed (tree type, FIXED_VALUE_TYPE f) 1911 { 1912 tree v; 1913 FIXED_VALUE_TYPE *fp; 1914 1915 v = make_node (FIXED_CST); 1916 fp = ggc_alloc<fixed_value> (); 1917 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE)); 1918 1919 TREE_TYPE (v) = type; 1920 TREE_FIXED_CST_PTR (v) = fp; 1921 return v; 1922 } 1923 1924 /* Return a new REAL_CST node whose type is TYPE and value is D. */ 1925 1926 tree 1927 build_real (tree type, REAL_VALUE_TYPE d) 1928 { 1929 tree v; 1930 REAL_VALUE_TYPE *dp; 1931 int overflow = 0; 1932 1933 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE. 1934 Consider doing it via real_convert now. */ 1935 1936 v = make_node (REAL_CST); 1937 dp = ggc_alloc<real_value> (); 1938 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE)); 1939 1940 TREE_TYPE (v) = type; 1941 TREE_REAL_CST_PTR (v) = dp; 1942 TREE_OVERFLOW (v) = overflow; 1943 return v; 1944 } 1945 1946 /* Like build_real, but first truncate D to the type. */ 1947 1948 tree 1949 build_real_truncate (tree type, REAL_VALUE_TYPE d) 1950 { 1951 return build_real (type, real_value_truncate (TYPE_MODE (type), d)); 1952 } 1953 1954 /* Return a new REAL_CST node whose type is TYPE 1955 and whose value is the integer value of the INTEGER_CST node I. */ 1956 1957 REAL_VALUE_TYPE 1958 real_value_from_int_cst (const_tree type, const_tree i) 1959 { 1960 REAL_VALUE_TYPE d; 1961 1962 /* Clear all bits of the real value type so that we can later do 1963 bitwise comparisons to see if two values are the same. */ 1964 memset (&d, 0, sizeof d); 1965 1966 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i, 1967 TYPE_SIGN (TREE_TYPE (i))); 1968 return d; 1969 } 1970 1971 /* Given a tree representing an integer constant I, return a tree 1972 representing the same value as a floating-point constant of type TYPE. */ 1973 1974 tree 1975 build_real_from_int_cst (tree type, const_tree i) 1976 { 1977 tree v; 1978 int overflow = TREE_OVERFLOW (i); 1979 1980 v = build_real (type, real_value_from_int_cst (type, i)); 1981 1982 TREE_OVERFLOW (v) |= overflow; 1983 return v; 1984 } 1985 1986 /* Return a newly constructed STRING_CST node whose value is 1987 the LEN characters at STR. 1988 Note that for a C string literal, LEN should include the trailing NUL. 1989 The TREE_TYPE is not initialized. */ 1990 1991 tree 1992 build_string (int len, const char *str) 1993 { 1994 tree s; 1995 size_t length; 1996 1997 /* Do not waste bytes provided by padding of struct tree_string. */ 1998 length = len + offsetof (struct tree_string, str) + 1; 1999 2000 record_node_allocation_statistics (STRING_CST, length); 2001 2002 s = (tree) ggc_internal_alloc (length); 2003 2004 memset (s, 0, sizeof (struct tree_typed)); 2005 TREE_SET_CODE (s, STRING_CST); 2006 TREE_CONSTANT (s) = 1; 2007 TREE_STRING_LENGTH (s) = len; 2008 memcpy (s->string.str, str, len); 2009 s->string.str[len] = '\0'; 2010 2011 return s; 2012 } 2013 2014 /* Return a newly constructed COMPLEX_CST node whose value is 2015 specified by the real and imaginary parts REAL and IMAG. 2016 Both REAL and IMAG should be constant nodes. TYPE, if specified, 2017 will be the type of the COMPLEX_CST; otherwise a new type will be made. */ 2018 2019 tree 2020 build_complex (tree type, tree real, tree imag) 2021 { 2022 tree t = make_node (COMPLEX_CST); 2023 2024 TREE_REALPART (t) = real; 2025 TREE_IMAGPART (t) = imag; 2026 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real)); 2027 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag); 2028 return t; 2029 } 2030 2031 /* Build a complex (inf +- 0i), such as for the result of cproj. 2032 TYPE is the complex tree type of the result. If NEG is true, the 2033 imaginary zero is negative. */ 2034 2035 tree 2036 build_complex_inf (tree type, bool neg) 2037 { 2038 REAL_VALUE_TYPE rinf, rzero = dconst0; 2039 2040 real_inf (&rinf); 2041 rzero.sign = neg; 2042 return build_complex (type, build_real (TREE_TYPE (type), rinf), 2043 build_real (TREE_TYPE (type), rzero)); 2044 } 2045 2046 /* Return the constant 1 in type TYPE. If TYPE has several elements, each 2047 element is set to 1. In particular, this is 1 + i for complex types. */ 2048 2049 tree 2050 build_each_one_cst (tree type) 2051 { 2052 if (TREE_CODE (type) == COMPLEX_TYPE) 2053 { 2054 tree scalar = build_one_cst (TREE_TYPE (type)); 2055 return build_complex (type, scalar, scalar); 2056 } 2057 else 2058 return build_one_cst (type); 2059 } 2060 2061 /* Return a constant of arithmetic type TYPE which is the 2062 multiplicative identity of the set TYPE. */ 2063 2064 tree 2065 build_one_cst (tree type) 2066 { 2067 switch (TREE_CODE (type)) 2068 { 2069 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: 2070 case POINTER_TYPE: case REFERENCE_TYPE: 2071 case OFFSET_TYPE: 2072 return build_int_cst (type, 1); 2073 2074 case REAL_TYPE: 2075 return build_real (type, dconst1); 2076 2077 case FIXED_POINT_TYPE: 2078 /* We can only generate 1 for accum types. */ 2079 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type))); 2080 return build_fixed (type, FCONST1 (TYPE_MODE (type))); 2081 2082 case VECTOR_TYPE: 2083 { 2084 tree scalar = build_one_cst (TREE_TYPE (type)); 2085 2086 return build_vector_from_val (type, scalar); 2087 } 2088 2089 case COMPLEX_TYPE: 2090 return build_complex (type, 2091 build_one_cst (TREE_TYPE (type)), 2092 build_zero_cst (TREE_TYPE (type))); 2093 2094 default: 2095 gcc_unreachable (); 2096 } 2097 } 2098 2099 /* Return an integer of type TYPE containing all 1's in as much precision as 2100 it contains, or a complex or vector whose subparts are such integers. */ 2101 2102 tree 2103 build_all_ones_cst (tree type) 2104 { 2105 if (TREE_CODE (type) == COMPLEX_TYPE) 2106 { 2107 tree scalar = build_all_ones_cst (TREE_TYPE (type)); 2108 return build_complex (type, scalar, scalar); 2109 } 2110 else 2111 return build_minus_one_cst (type); 2112 } 2113 2114 /* Return a constant of arithmetic type TYPE which is the 2115 opposite of the multiplicative identity of the set TYPE. */ 2116 2117 tree 2118 build_minus_one_cst (tree type) 2119 { 2120 switch (TREE_CODE (type)) 2121 { 2122 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: 2123 case POINTER_TYPE: case REFERENCE_TYPE: 2124 case OFFSET_TYPE: 2125 return build_int_cst (type, -1); 2126 2127 case REAL_TYPE: 2128 return build_real (type, dconstm1); 2129 2130 case FIXED_POINT_TYPE: 2131 /* We can only generate 1 for accum types. */ 2132 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type))); 2133 return build_fixed (type, fixed_from_double_int (double_int_minus_one, 2134 TYPE_MODE (type))); 2135 2136 case VECTOR_TYPE: 2137 { 2138 tree scalar = build_minus_one_cst (TREE_TYPE (type)); 2139 2140 return build_vector_from_val (type, scalar); 2141 } 2142 2143 case COMPLEX_TYPE: 2144 return build_complex (type, 2145 build_minus_one_cst (TREE_TYPE (type)), 2146 build_zero_cst (TREE_TYPE (type))); 2147 2148 default: 2149 gcc_unreachable (); 2150 } 2151 } 2152 2153 /* Build 0 constant of type TYPE. This is used by constructor folding 2154 and thus the constant should be represented in memory by 2155 zero(es). */ 2156 2157 tree 2158 build_zero_cst (tree type) 2159 { 2160 switch (TREE_CODE (type)) 2161 { 2162 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: 2163 case POINTER_TYPE: case REFERENCE_TYPE: 2164 case OFFSET_TYPE: case NULLPTR_TYPE: 2165 return build_int_cst (type, 0); 2166 2167 case REAL_TYPE: 2168 return build_real (type, dconst0); 2169 2170 case FIXED_POINT_TYPE: 2171 return build_fixed (type, FCONST0 (TYPE_MODE (type))); 2172 2173 case VECTOR_TYPE: 2174 { 2175 tree scalar = build_zero_cst (TREE_TYPE (type)); 2176 2177 return build_vector_from_val (type, scalar); 2178 } 2179 2180 case COMPLEX_TYPE: 2181 { 2182 tree zero = build_zero_cst (TREE_TYPE (type)); 2183 2184 return build_complex (type, zero, zero); 2185 } 2186 2187 default: 2188 if (!AGGREGATE_TYPE_P (type)) 2189 return fold_convert (type, integer_zero_node); 2190 return build_constructor (type, NULL); 2191 } 2192 } 2193 2194 2195 /* Build a BINFO with LEN language slots. */ 2196 2197 tree 2198 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL) 2199 { 2200 tree t; 2201 size_t length = (offsetof (struct tree_binfo, base_binfos) 2202 + vec<tree, va_gc>::embedded_size (base_binfos)); 2203 2204 record_node_allocation_statistics (TREE_BINFO, length); 2205 2206 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT); 2207 2208 memset (t, 0, offsetof (struct tree_binfo, base_binfos)); 2209 2210 TREE_SET_CODE (t, TREE_BINFO); 2211 2212 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos); 2213 2214 return t; 2215 } 2216 2217 /* Create a CASE_LABEL_EXPR tree node and return it. */ 2218 2219 tree 2220 build_case_label (tree low_value, tree high_value, tree label_decl) 2221 { 2222 tree t = make_node (CASE_LABEL_EXPR); 2223 2224 TREE_TYPE (t) = void_type_node; 2225 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl)); 2226 2227 CASE_LOW (t) = low_value; 2228 CASE_HIGH (t) = high_value; 2229 CASE_LABEL (t) = label_decl; 2230 CASE_CHAIN (t) = NULL_TREE; 2231 2232 return t; 2233 } 2234 2235 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the 2236 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively. 2237 The latter determines the length of the HOST_WIDE_INT vector. */ 2238 2239 tree 2240 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL) 2241 { 2242 tree t; 2243 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT) 2244 + sizeof (struct tree_int_cst)); 2245 2246 gcc_assert (len); 2247 record_node_allocation_statistics (INTEGER_CST, length); 2248 2249 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT); 2250 2251 TREE_SET_CODE (t, INTEGER_CST); 2252 TREE_INT_CST_NUNITS (t) = len; 2253 TREE_INT_CST_EXT_NUNITS (t) = ext_len; 2254 /* to_offset can only be applied to trees that are offset_int-sized 2255 or smaller. EXT_LEN is correct if it fits, otherwise the constant 2256 must be exactly the precision of offset_int and so LEN is correct. */ 2257 if (ext_len <= OFFSET_INT_ELTS) 2258 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len; 2259 else 2260 TREE_INT_CST_OFFSET_NUNITS (t) = len; 2261 2262 TREE_CONSTANT (t) = 1; 2263 2264 return t; 2265 } 2266 2267 /* Build a newly constructed TREE_VEC node of length LEN. */ 2268 2269 tree 2270 make_tree_vec_stat (int len MEM_STAT_DECL) 2271 { 2272 tree t; 2273 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec); 2274 2275 record_node_allocation_statistics (TREE_VEC, length); 2276 2277 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT); 2278 2279 TREE_SET_CODE (t, TREE_VEC); 2280 TREE_VEC_LENGTH (t) = len; 2281 2282 return t; 2283 } 2284 2285 /* Grow a TREE_VEC node to new length LEN. */ 2286 2287 tree 2288 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL) 2289 { 2290 gcc_assert (TREE_CODE (v) == TREE_VEC); 2291 2292 int oldlen = TREE_VEC_LENGTH (v); 2293 gcc_assert (len > oldlen); 2294 2295 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec); 2296 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec); 2297 2298 record_node_allocation_statistics (TREE_VEC, length - oldlength); 2299 2300 v = (tree) ggc_realloc (v, length PASS_MEM_STAT); 2301 2302 TREE_VEC_LENGTH (v) = len; 2303 2304 return v; 2305 } 2306 2307 /* Return 1 if EXPR is the constant zero, whether it is integral, float or 2308 fixed, and scalar, complex or vector. */ 2309 2310 int 2311 zerop (const_tree expr) 2312 { 2313 return (integer_zerop (expr) 2314 || real_zerop (expr) 2315 || fixed_zerop (expr)); 2316 } 2317 2318 /* Return 1 if EXPR is the integer constant zero or a complex constant 2319 of zero. */ 2320 2321 int 2322 integer_zerop (const_tree expr) 2323 { 2324 switch (TREE_CODE (expr)) 2325 { 2326 case INTEGER_CST: 2327 return wi::eq_p (expr, 0); 2328 case COMPLEX_CST: 2329 return (integer_zerop (TREE_REALPART (expr)) 2330 && integer_zerop (TREE_IMAGPART (expr))); 2331 case VECTOR_CST: 2332 { 2333 unsigned i; 2334 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i) 2335 if (!integer_zerop (VECTOR_CST_ELT (expr, i))) 2336 return false; 2337 return true; 2338 } 2339 default: 2340 return false; 2341 } 2342 } 2343 2344 /* Return 1 if EXPR is the integer constant one or the corresponding 2345 complex constant. */ 2346 2347 int 2348 integer_onep (const_tree expr) 2349 { 2350 switch (TREE_CODE (expr)) 2351 { 2352 case INTEGER_CST: 2353 return wi::eq_p (wi::to_widest (expr), 1); 2354 case COMPLEX_CST: 2355 return (integer_onep (TREE_REALPART (expr)) 2356 && integer_zerop (TREE_IMAGPART (expr))); 2357 case VECTOR_CST: 2358 { 2359 unsigned i; 2360 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i) 2361 if (!integer_onep (VECTOR_CST_ELT (expr, i))) 2362 return false; 2363 return true; 2364 } 2365 default: 2366 return false; 2367 } 2368 } 2369 2370 /* Return 1 if EXPR is the integer constant one. For complex and vector, 2371 return 1 if every piece is the integer constant one. */ 2372 2373 int 2374 integer_each_onep (const_tree expr) 2375 { 2376 if (TREE_CODE (expr) == COMPLEX_CST) 2377 return (integer_onep (TREE_REALPART (expr)) 2378 && integer_onep (TREE_IMAGPART (expr))); 2379 else 2380 return integer_onep (expr); 2381 } 2382 2383 /* Return 1 if EXPR is an integer containing all 1's in as much precision as 2384 it contains, or a complex or vector whose subparts are such integers. */ 2385 2386 int 2387 integer_all_onesp (const_tree expr) 2388 { 2389 if (TREE_CODE (expr) == COMPLEX_CST 2390 && integer_all_onesp (TREE_REALPART (expr)) 2391 && integer_all_onesp (TREE_IMAGPART (expr))) 2392 return 1; 2393 2394 else if (TREE_CODE (expr) == VECTOR_CST) 2395 { 2396 unsigned i; 2397 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i) 2398 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i))) 2399 return 0; 2400 return 1; 2401 } 2402 2403 else if (TREE_CODE (expr) != INTEGER_CST) 2404 return 0; 2405 2406 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr; 2407 } 2408 2409 /* Return 1 if EXPR is the integer constant minus one. */ 2410 2411 int 2412 integer_minus_onep (const_tree expr) 2413 { 2414 if (TREE_CODE (expr) == COMPLEX_CST) 2415 return (integer_all_onesp (TREE_REALPART (expr)) 2416 && integer_zerop (TREE_IMAGPART (expr))); 2417 else 2418 return integer_all_onesp (expr); 2419 } 2420 2421 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only 2422 one bit on). */ 2423 2424 int 2425 integer_pow2p (const_tree expr) 2426 { 2427 if (TREE_CODE (expr) == COMPLEX_CST 2428 && integer_pow2p (TREE_REALPART (expr)) 2429 && integer_zerop (TREE_IMAGPART (expr))) 2430 return 1; 2431 2432 if (TREE_CODE (expr) != INTEGER_CST) 2433 return 0; 2434 2435 return wi::popcount (expr) == 1; 2436 } 2437 2438 /* Return 1 if EXPR is an integer constant other than zero or a 2439 complex constant other than zero. */ 2440 2441 int 2442 integer_nonzerop (const_tree expr) 2443 { 2444 return ((TREE_CODE (expr) == INTEGER_CST 2445 && !wi::eq_p (expr, 0)) 2446 || (TREE_CODE (expr) == COMPLEX_CST 2447 && (integer_nonzerop (TREE_REALPART (expr)) 2448 || integer_nonzerop (TREE_IMAGPART (expr))))); 2449 } 2450 2451 /* Return 1 if EXPR is the integer constant one. For vector, 2452 return 1 if every piece is the integer constant minus one 2453 (representing the value TRUE). */ 2454 2455 int 2456 integer_truep (const_tree expr) 2457 { 2458 if (TREE_CODE (expr) == VECTOR_CST) 2459 return integer_all_onesp (expr); 2460 return integer_onep (expr); 2461 } 2462 2463 /* Return 1 if EXPR is the fixed-point constant zero. */ 2464 2465 int 2466 fixed_zerop (const_tree expr) 2467 { 2468 return (TREE_CODE (expr) == FIXED_CST 2469 && TREE_FIXED_CST (expr).data.is_zero ()); 2470 } 2471 2472 /* Return the power of two represented by a tree node known to be a 2473 power of two. */ 2474 2475 int 2476 tree_log2 (const_tree expr) 2477 { 2478 if (TREE_CODE (expr) == COMPLEX_CST) 2479 return tree_log2 (TREE_REALPART (expr)); 2480 2481 return wi::exact_log2 (expr); 2482 } 2483 2484 /* Similar, but return the largest integer Y such that 2 ** Y is less 2485 than or equal to EXPR. */ 2486 2487 int 2488 tree_floor_log2 (const_tree expr) 2489 { 2490 if (TREE_CODE (expr) == COMPLEX_CST) 2491 return tree_log2 (TREE_REALPART (expr)); 2492 2493 return wi::floor_log2 (expr); 2494 } 2495 2496 /* Return number of known trailing zero bits in EXPR, or, if the value of 2497 EXPR is known to be zero, the precision of it's type. */ 2498 2499 unsigned int 2500 tree_ctz (const_tree expr) 2501 { 2502 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr)) 2503 && !POINTER_TYPE_P (TREE_TYPE (expr))) 2504 return 0; 2505 2506 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr)); 2507 switch (TREE_CODE (expr)) 2508 { 2509 case INTEGER_CST: 2510 ret1 = wi::ctz (expr); 2511 return MIN (ret1, prec); 2512 case SSA_NAME: 2513 ret1 = wi::ctz (get_nonzero_bits (expr)); 2514 return MIN (ret1, prec); 2515 case PLUS_EXPR: 2516 case MINUS_EXPR: 2517 case BIT_IOR_EXPR: 2518 case BIT_XOR_EXPR: 2519 case MIN_EXPR: 2520 case MAX_EXPR: 2521 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2522 if (ret1 == 0) 2523 return ret1; 2524 ret2 = tree_ctz (TREE_OPERAND (expr, 1)); 2525 return MIN (ret1, ret2); 2526 case POINTER_PLUS_EXPR: 2527 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2528 ret2 = tree_ctz (TREE_OPERAND (expr, 1)); 2529 /* Second operand is sizetype, which could be in theory 2530 wider than pointer's precision. Make sure we never 2531 return more than prec. */ 2532 ret2 = MIN (ret2, prec); 2533 return MIN (ret1, ret2); 2534 case BIT_AND_EXPR: 2535 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2536 ret2 = tree_ctz (TREE_OPERAND (expr, 1)); 2537 return MAX (ret1, ret2); 2538 case MULT_EXPR: 2539 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2540 ret2 = tree_ctz (TREE_OPERAND (expr, 1)); 2541 return MIN (ret1 + ret2, prec); 2542 case LSHIFT_EXPR: 2543 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2544 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1)) 2545 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec)) 2546 { 2547 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1)); 2548 return MIN (ret1 + ret2, prec); 2549 } 2550 return ret1; 2551 case RSHIFT_EXPR: 2552 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1)) 2553 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec)) 2554 { 2555 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2556 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1)); 2557 if (ret1 > ret2) 2558 return ret1 - ret2; 2559 } 2560 return 0; 2561 case TRUNC_DIV_EXPR: 2562 case CEIL_DIV_EXPR: 2563 case FLOOR_DIV_EXPR: 2564 case ROUND_DIV_EXPR: 2565 case EXACT_DIV_EXPR: 2566 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST 2567 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1) 2568 { 2569 int l = tree_log2 (TREE_OPERAND (expr, 1)); 2570 if (l >= 0) 2571 { 2572 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2573 ret2 = l; 2574 if (ret1 > ret2) 2575 return ret1 - ret2; 2576 } 2577 } 2578 return 0; 2579 CASE_CONVERT: 2580 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2581 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0)))) 2582 ret1 = prec; 2583 return MIN (ret1, prec); 2584 case SAVE_EXPR: 2585 return tree_ctz (TREE_OPERAND (expr, 0)); 2586 case COND_EXPR: 2587 ret1 = tree_ctz (TREE_OPERAND (expr, 1)); 2588 if (ret1 == 0) 2589 return 0; 2590 ret2 = tree_ctz (TREE_OPERAND (expr, 2)); 2591 return MIN (ret1, ret2); 2592 case COMPOUND_EXPR: 2593 return tree_ctz (TREE_OPERAND (expr, 1)); 2594 case ADDR_EXPR: 2595 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr)); 2596 if (ret1 > BITS_PER_UNIT) 2597 { 2598 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT); 2599 return MIN (ret1, prec); 2600 } 2601 return 0; 2602 default: 2603 return 0; 2604 } 2605 } 2606 2607 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for 2608 decimal float constants, so don't return 1 for them. */ 2609 2610 int 2611 real_zerop (const_tree expr) 2612 { 2613 switch (TREE_CODE (expr)) 2614 { 2615 case REAL_CST: 2616 return real_equal (&TREE_REAL_CST (expr), &dconst0) 2617 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr)))); 2618 case COMPLEX_CST: 2619 return real_zerop (TREE_REALPART (expr)) 2620 && real_zerop (TREE_IMAGPART (expr)); 2621 case VECTOR_CST: 2622 { 2623 unsigned i; 2624 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i) 2625 if (!real_zerop (VECTOR_CST_ELT (expr, i))) 2626 return false; 2627 return true; 2628 } 2629 default: 2630 return false; 2631 } 2632 } 2633 2634 /* Return 1 if EXPR is the real constant one in real or complex form. 2635 Trailing zeroes matter for decimal float constants, so don't return 2636 1 for them. */ 2637 2638 int 2639 real_onep (const_tree expr) 2640 { 2641 switch (TREE_CODE (expr)) 2642 { 2643 case REAL_CST: 2644 return real_equal (&TREE_REAL_CST (expr), &dconst1) 2645 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr)))); 2646 case COMPLEX_CST: 2647 return real_onep (TREE_REALPART (expr)) 2648 && real_zerop (TREE_IMAGPART (expr)); 2649 case VECTOR_CST: 2650 { 2651 unsigned i; 2652 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i) 2653 if (!real_onep (VECTOR_CST_ELT (expr, i))) 2654 return false; 2655 return true; 2656 } 2657 default: 2658 return false; 2659 } 2660 } 2661 2662 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes 2663 matter for decimal float constants, so don't return 1 for them. */ 2664 2665 int 2666 real_minus_onep (const_tree expr) 2667 { 2668 switch (TREE_CODE (expr)) 2669 { 2670 case REAL_CST: 2671 return real_equal (&TREE_REAL_CST (expr), &dconstm1) 2672 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr)))); 2673 case COMPLEX_CST: 2674 return real_minus_onep (TREE_REALPART (expr)) 2675 && real_zerop (TREE_IMAGPART (expr)); 2676 case VECTOR_CST: 2677 { 2678 unsigned i; 2679 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i) 2680 if (!real_minus_onep (VECTOR_CST_ELT (expr, i))) 2681 return false; 2682 return true; 2683 } 2684 default: 2685 return false; 2686 } 2687 } 2688 2689 /* Nonzero if EXP is a constant or a cast of a constant. */ 2690 2691 int 2692 really_constant_p (const_tree exp) 2693 { 2694 /* This is not quite the same as STRIP_NOPS. It does more. */ 2695 while (CONVERT_EXPR_P (exp) 2696 || TREE_CODE (exp) == NON_LVALUE_EXPR) 2697 exp = TREE_OPERAND (exp, 0); 2698 return TREE_CONSTANT (exp); 2699 } 2700 2701 /* Return first list element whose TREE_VALUE is ELEM. 2702 Return 0 if ELEM is not in LIST. */ 2703 2704 tree 2705 value_member (tree elem, tree list) 2706 { 2707 while (list) 2708 { 2709 if (elem == TREE_VALUE (list)) 2710 return list; 2711 list = TREE_CHAIN (list); 2712 } 2713 return NULL_TREE; 2714 } 2715 2716 /* Return first list element whose TREE_PURPOSE is ELEM. 2717 Return 0 if ELEM is not in LIST. */ 2718 2719 tree 2720 purpose_member (const_tree elem, tree list) 2721 { 2722 while (list) 2723 { 2724 if (elem == TREE_PURPOSE (list)) 2725 return list; 2726 list = TREE_CHAIN (list); 2727 } 2728 return NULL_TREE; 2729 } 2730 2731 /* Return true if ELEM is in V. */ 2732 2733 bool 2734 vec_member (const_tree elem, vec<tree, va_gc> *v) 2735 { 2736 unsigned ix; 2737 tree t; 2738 FOR_EACH_VEC_SAFE_ELT (v, ix, t) 2739 if (elem == t) 2740 return true; 2741 return false; 2742 } 2743 2744 /* Returns element number IDX (zero-origin) of chain CHAIN, or 2745 NULL_TREE. */ 2746 2747 tree 2748 chain_index (int idx, tree chain) 2749 { 2750 for (; chain && idx > 0; --idx) 2751 chain = TREE_CHAIN (chain); 2752 return chain; 2753 } 2754 2755 /* Return nonzero if ELEM is part of the chain CHAIN. */ 2756 2757 int 2758 chain_member (const_tree elem, const_tree chain) 2759 { 2760 while (chain) 2761 { 2762 if (elem == chain) 2763 return 1; 2764 chain = DECL_CHAIN (chain); 2765 } 2766 2767 return 0; 2768 } 2769 2770 /* Return the length of a chain of nodes chained through TREE_CHAIN. 2771 We expect a null pointer to mark the end of the chain. 2772 This is the Lisp primitive `length'. */ 2773 2774 int 2775 list_length (const_tree t) 2776 { 2777 const_tree p = t; 2778 #ifdef ENABLE_TREE_CHECKING 2779 const_tree q = t; 2780 #endif 2781 int len = 0; 2782 2783 while (p) 2784 { 2785 p = TREE_CHAIN (p); 2786 #ifdef ENABLE_TREE_CHECKING 2787 if (len % 2) 2788 q = TREE_CHAIN (q); 2789 gcc_assert (p != q); 2790 #endif 2791 len++; 2792 } 2793 2794 return len; 2795 } 2796 2797 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or 2798 UNION_TYPE TYPE, or NULL_TREE if none. */ 2799 2800 tree 2801 first_field (const_tree type) 2802 { 2803 tree t = TYPE_FIELDS (type); 2804 while (t && TREE_CODE (t) != FIELD_DECL) 2805 t = TREE_CHAIN (t); 2806 return t; 2807 } 2808 2809 /* Concatenate two chains of nodes (chained through TREE_CHAIN) 2810 by modifying the last node in chain 1 to point to chain 2. 2811 This is the Lisp primitive `nconc'. */ 2812 2813 tree 2814 chainon (tree op1, tree op2) 2815 { 2816 tree t1; 2817 2818 if (!op1) 2819 return op2; 2820 if (!op2) 2821 return op1; 2822 2823 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1)) 2824 continue; 2825 TREE_CHAIN (t1) = op2; 2826 2827 #ifdef ENABLE_TREE_CHECKING 2828 { 2829 tree t2; 2830 for (t2 = op2; t2; t2 = TREE_CHAIN (t2)) 2831 gcc_assert (t2 != t1); 2832 } 2833 #endif 2834 2835 return op1; 2836 } 2837 2838 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */ 2839 2840 tree 2841 tree_last (tree chain) 2842 { 2843 tree next; 2844 if (chain) 2845 while ((next = TREE_CHAIN (chain))) 2846 chain = next; 2847 return chain; 2848 } 2849 2850 /* Reverse the order of elements in the chain T, 2851 and return the new head of the chain (old last element). */ 2852 2853 tree 2854 nreverse (tree t) 2855 { 2856 tree prev = 0, decl, next; 2857 for (decl = t; decl; decl = next) 2858 { 2859 /* We shouldn't be using this function to reverse BLOCK chains; we 2860 have blocks_nreverse for that. */ 2861 gcc_checking_assert (TREE_CODE (decl) != BLOCK); 2862 next = TREE_CHAIN (decl); 2863 TREE_CHAIN (decl) = prev; 2864 prev = decl; 2865 } 2866 return prev; 2867 } 2868 2869 /* Return a newly created TREE_LIST node whose 2870 purpose and value fields are PARM and VALUE. */ 2871 2872 tree 2873 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL) 2874 { 2875 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT); 2876 TREE_PURPOSE (t) = parm; 2877 TREE_VALUE (t) = value; 2878 return t; 2879 } 2880 2881 /* Build a chain of TREE_LIST nodes from a vector. */ 2882 2883 tree 2884 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL) 2885 { 2886 tree ret = NULL_TREE; 2887 tree *pp = &ret; 2888 unsigned int i; 2889 tree t; 2890 FOR_EACH_VEC_SAFE_ELT (vec, i, t) 2891 { 2892 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT); 2893 pp = &TREE_CHAIN (*pp); 2894 } 2895 return ret; 2896 } 2897 2898 /* Return a newly created TREE_LIST node whose 2899 purpose and value fields are PURPOSE and VALUE 2900 and whose TREE_CHAIN is CHAIN. */ 2901 2902 tree 2903 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL) 2904 { 2905 tree node; 2906 2907 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT); 2908 memset (node, 0, sizeof (struct tree_common)); 2909 2910 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list)); 2911 2912 TREE_SET_CODE (node, TREE_LIST); 2913 TREE_CHAIN (node) = chain; 2914 TREE_PURPOSE (node) = purpose; 2915 TREE_VALUE (node) = value; 2916 return node; 2917 } 2918 2919 /* Return the values of the elements of a CONSTRUCTOR as a vector of 2920 trees. */ 2921 2922 vec<tree, va_gc> * 2923 ctor_to_vec (tree ctor) 2924 { 2925 vec<tree, va_gc> *vec; 2926 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor)); 2927 unsigned int ix; 2928 tree val; 2929 2930 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val) 2931 vec->quick_push (val); 2932 2933 return vec; 2934 } 2935 2936 /* Return the size nominally occupied by an object of type TYPE 2937 when it resides in memory. The value is measured in units of bytes, 2938 and its data type is that normally used for type sizes 2939 (which is the first type created by make_signed_type or 2940 make_unsigned_type). */ 2941 2942 tree 2943 size_in_bytes_loc (location_t loc, const_tree type) 2944 { 2945 tree t; 2946 2947 if (type == error_mark_node) 2948 return integer_zero_node; 2949 2950 type = TYPE_MAIN_VARIANT (type); 2951 t = TYPE_SIZE_UNIT (type); 2952 2953 if (t == 0) 2954 { 2955 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type); 2956 return size_zero_node; 2957 } 2958 2959 return t; 2960 } 2961 2962 /* Return the size of TYPE (in bytes) as a wide integer 2963 or return -1 if the size can vary or is larger than an integer. */ 2964 2965 HOST_WIDE_INT 2966 int_size_in_bytes (const_tree type) 2967 { 2968 tree t; 2969 2970 if (type == error_mark_node) 2971 return 0; 2972 2973 type = TYPE_MAIN_VARIANT (type); 2974 t = TYPE_SIZE_UNIT (type); 2975 2976 if (t && tree_fits_uhwi_p (t)) 2977 return TREE_INT_CST_LOW (t); 2978 else 2979 return -1; 2980 } 2981 2982 /* Return the maximum size of TYPE (in bytes) as a wide integer 2983 or return -1 if the size can vary or is larger than an integer. */ 2984 2985 HOST_WIDE_INT 2986 max_int_size_in_bytes (const_tree type) 2987 { 2988 HOST_WIDE_INT size = -1; 2989 tree size_tree; 2990 2991 /* If this is an array type, check for a possible MAX_SIZE attached. */ 2992 2993 if (TREE_CODE (type) == ARRAY_TYPE) 2994 { 2995 size_tree = TYPE_ARRAY_MAX_SIZE (type); 2996 2997 if (size_tree && tree_fits_uhwi_p (size_tree)) 2998 size = tree_to_uhwi (size_tree); 2999 } 3000 3001 /* If we still haven't been able to get a size, see if the language 3002 can compute a maximum size. */ 3003 3004 if (size == -1) 3005 { 3006 size_tree = lang_hooks.types.max_size (type); 3007 3008 if (size_tree && tree_fits_uhwi_p (size_tree)) 3009 size = tree_to_uhwi (size_tree); 3010 } 3011 3012 return size; 3013 } 3014 3015 /* Return the bit position of FIELD, in bits from the start of the record. 3016 This is a tree of type bitsizetype. */ 3017 3018 tree 3019 bit_position (const_tree field) 3020 { 3021 return bit_from_pos (DECL_FIELD_OFFSET (field), 3022 DECL_FIELD_BIT_OFFSET (field)); 3023 } 3024 3025 /* Return the byte position of FIELD, in bytes from the start of the record. 3026 This is a tree of type sizetype. */ 3027 3028 tree 3029 byte_position (const_tree field) 3030 { 3031 return byte_from_pos (DECL_FIELD_OFFSET (field), 3032 DECL_FIELD_BIT_OFFSET (field)); 3033 } 3034 3035 /* Likewise, but return as an integer. It must be representable in 3036 that way (since it could be a signed value, we don't have the 3037 option of returning -1 like int_size_in_byte can. */ 3038 3039 HOST_WIDE_INT 3040 int_byte_position (const_tree field) 3041 { 3042 return tree_to_shwi (byte_position (field)); 3043 } 3044 3045 /* Return the strictest alignment, in bits, that T is known to have. */ 3046 3047 unsigned int 3048 expr_align (const_tree t) 3049 { 3050 unsigned int align0, align1; 3051 3052 switch (TREE_CODE (t)) 3053 { 3054 CASE_CONVERT: case NON_LVALUE_EXPR: 3055 /* If we have conversions, we know that the alignment of the 3056 object must meet each of the alignments of the types. */ 3057 align0 = expr_align (TREE_OPERAND (t, 0)); 3058 align1 = TYPE_ALIGN (TREE_TYPE (t)); 3059 return MAX (align0, align1); 3060 3061 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR: 3062 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR: 3063 case CLEANUP_POINT_EXPR: 3064 /* These don't change the alignment of an object. */ 3065 return expr_align (TREE_OPERAND (t, 0)); 3066 3067 case COND_EXPR: 3068 /* The best we can do is say that the alignment is the least aligned 3069 of the two arms. */ 3070 align0 = expr_align (TREE_OPERAND (t, 1)); 3071 align1 = expr_align (TREE_OPERAND (t, 2)); 3072 return MIN (align0, align1); 3073 3074 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set 3075 meaningfully, it's always 1. */ 3076 case LABEL_DECL: case CONST_DECL: 3077 case VAR_DECL: case PARM_DECL: case RESULT_DECL: 3078 case FUNCTION_DECL: 3079 gcc_assert (DECL_ALIGN (t) != 0); 3080 return DECL_ALIGN (t); 3081 3082 default: 3083 break; 3084 } 3085 3086 /* Otherwise take the alignment from that of the type. */ 3087 return TYPE_ALIGN (TREE_TYPE (t)); 3088 } 3089 3090 /* Return, as a tree node, the number of elements for TYPE (which is an 3091 ARRAY_TYPE) minus one. This counts only elements of the top array. */ 3092 3093 tree 3094 array_type_nelts (const_tree type) 3095 { 3096 tree index_type, min, max; 3097 3098 /* If they did it with unspecified bounds, then we should have already 3099 given an error about it before we got here. */ 3100 if (! TYPE_DOMAIN (type)) 3101 return error_mark_node; 3102 3103 index_type = TYPE_DOMAIN (type); 3104 min = TYPE_MIN_VALUE (index_type); 3105 max = TYPE_MAX_VALUE (index_type); 3106 3107 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */ 3108 if (!max) 3109 return error_mark_node; 3110 3111 return (integer_zerop (min) 3112 ? max 3113 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min)); 3114 } 3115 3116 /* If arg is static -- a reference to an object in static storage -- then 3117 return the object. This is not the same as the C meaning of `static'. 3118 If arg isn't static, return NULL. */ 3119 3120 tree 3121 staticp (tree arg) 3122 { 3123 switch (TREE_CODE (arg)) 3124 { 3125 case FUNCTION_DECL: 3126 /* Nested functions are static, even though taking their address will 3127 involve a trampoline as we unnest the nested function and create 3128 the trampoline on the tree level. */ 3129 return arg; 3130 3131 case VAR_DECL: 3132 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg)) 3133 && ! DECL_THREAD_LOCAL_P (arg) 3134 && ! DECL_DLLIMPORT_P (arg) 3135 ? arg : NULL); 3136 3137 case CONST_DECL: 3138 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg)) 3139 ? arg : NULL); 3140 3141 case CONSTRUCTOR: 3142 return TREE_STATIC (arg) ? arg : NULL; 3143 3144 case LABEL_DECL: 3145 case STRING_CST: 3146 return arg; 3147 3148 case COMPONENT_REF: 3149 /* If the thing being referenced is not a field, then it is 3150 something language specific. */ 3151 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL); 3152 3153 /* If we are referencing a bitfield, we can't evaluate an 3154 ADDR_EXPR at compile time and so it isn't a constant. */ 3155 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1))) 3156 return NULL; 3157 3158 return staticp (TREE_OPERAND (arg, 0)); 3159 3160 case BIT_FIELD_REF: 3161 return NULL; 3162 3163 case INDIRECT_REF: 3164 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL; 3165 3166 case ARRAY_REF: 3167 case ARRAY_RANGE_REF: 3168 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST 3169 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST) 3170 return staticp (TREE_OPERAND (arg, 0)); 3171 else 3172 return NULL; 3173 3174 case COMPOUND_LITERAL_EXPR: 3175 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL; 3176 3177 default: 3178 return NULL; 3179 } 3180 } 3181 3182 3183 3184 3185 /* Return whether OP is a DECL whose address is function-invariant. */ 3186 3187 bool 3188 decl_address_invariant_p (const_tree op) 3189 { 3190 /* The conditions below are slightly less strict than the one in 3191 staticp. */ 3192 3193 switch (TREE_CODE (op)) 3194 { 3195 case PARM_DECL: 3196 case RESULT_DECL: 3197 case LABEL_DECL: 3198 case FUNCTION_DECL: 3199 return true; 3200 3201 case VAR_DECL: 3202 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)) 3203 || DECL_THREAD_LOCAL_P (op) 3204 || DECL_CONTEXT (op) == current_function_decl 3205 || decl_function_context (op) == current_function_decl) 3206 return true; 3207 break; 3208 3209 case CONST_DECL: 3210 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)) 3211 || decl_function_context (op) == current_function_decl) 3212 return true; 3213 break; 3214 3215 default: 3216 break; 3217 } 3218 3219 return false; 3220 } 3221 3222 /* Return whether OP is a DECL whose address is interprocedural-invariant. */ 3223 3224 bool 3225 decl_address_ip_invariant_p (const_tree op) 3226 { 3227 /* The conditions below are slightly less strict than the one in 3228 staticp. */ 3229 3230 switch (TREE_CODE (op)) 3231 { 3232 case LABEL_DECL: 3233 case FUNCTION_DECL: 3234 case STRING_CST: 3235 return true; 3236 3237 case VAR_DECL: 3238 if (((TREE_STATIC (op) || DECL_EXTERNAL (op)) 3239 && !DECL_DLLIMPORT_P (op)) 3240 || DECL_THREAD_LOCAL_P (op)) 3241 return true; 3242 break; 3243 3244 case CONST_DECL: 3245 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))) 3246 return true; 3247 break; 3248 3249 default: 3250 break; 3251 } 3252 3253 return false; 3254 } 3255 3256 3257 /* Return true if T is function-invariant (internal function, does 3258 not handle arithmetic; that's handled in skip_simple_arithmetic and 3259 tree_invariant_p). */ 3260 3261 static bool 3262 tree_invariant_p_1 (tree t) 3263 { 3264 tree op; 3265 3266 if (TREE_CONSTANT (t) 3267 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t))) 3268 return true; 3269 3270 switch (TREE_CODE (t)) 3271 { 3272 case SAVE_EXPR: 3273 return true; 3274 3275 case ADDR_EXPR: 3276 op = TREE_OPERAND (t, 0); 3277 while (handled_component_p (op)) 3278 { 3279 switch (TREE_CODE (op)) 3280 { 3281 case ARRAY_REF: 3282 case ARRAY_RANGE_REF: 3283 if (!tree_invariant_p (TREE_OPERAND (op, 1)) 3284 || TREE_OPERAND (op, 2) != NULL_TREE 3285 || TREE_OPERAND (op, 3) != NULL_TREE) 3286 return false; 3287 break; 3288 3289 case COMPONENT_REF: 3290 if (TREE_OPERAND (op, 2) != NULL_TREE) 3291 return false; 3292 break; 3293 3294 default:; 3295 } 3296 op = TREE_OPERAND (op, 0); 3297 } 3298 3299 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op); 3300 3301 default: 3302 break; 3303 } 3304 3305 return false; 3306 } 3307 3308 /* Return true if T is function-invariant. */ 3309 3310 bool 3311 tree_invariant_p (tree t) 3312 { 3313 tree inner = skip_simple_arithmetic (t); 3314 return tree_invariant_p_1 (inner); 3315 } 3316 3317 /* Wrap a SAVE_EXPR around EXPR, if appropriate. 3318 Do this to any expression which may be used in more than one place, 3319 but must be evaluated only once. 3320 3321 Normally, expand_expr would reevaluate the expression each time. 3322 Calling save_expr produces something that is evaluated and recorded 3323 the first time expand_expr is called on it. Subsequent calls to 3324 expand_expr just reuse the recorded value. 3325 3326 The call to expand_expr that generates code that actually computes 3327 the value is the first call *at compile time*. Subsequent calls 3328 *at compile time* generate code to use the saved value. 3329 This produces correct result provided that *at run time* control 3330 always flows through the insns made by the first expand_expr 3331 before reaching the other places where the save_expr was evaluated. 3332 You, the caller of save_expr, must make sure this is so. 3333 3334 Constants, and certain read-only nodes, are returned with no 3335 SAVE_EXPR because that is safe. Expressions containing placeholders 3336 are not touched; see tree.def for an explanation of what these 3337 are used for. */ 3338 3339 tree 3340 save_expr (tree expr) 3341 { 3342 tree t = fold (expr); 3343 tree inner; 3344 3345 /* If the tree evaluates to a constant, then we don't want to hide that 3346 fact (i.e. this allows further folding, and direct checks for constants). 3347 However, a read-only object that has side effects cannot be bypassed. 3348 Since it is no problem to reevaluate literals, we just return the 3349 literal node. */ 3350 inner = skip_simple_arithmetic (t); 3351 if (TREE_CODE (inner) == ERROR_MARK) 3352 return inner; 3353 3354 if (tree_invariant_p_1 (inner)) 3355 return t; 3356 3357 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since 3358 it means that the size or offset of some field of an object depends on 3359 the value within another field. 3360 3361 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR 3362 and some variable since it would then need to be both evaluated once and 3363 evaluated more than once. Front-ends must assure this case cannot 3364 happen by surrounding any such subexpressions in their own SAVE_EXPR 3365 and forcing evaluation at the proper time. */ 3366 if (contains_placeholder_p (inner)) 3367 return t; 3368 3369 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t); 3370 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr)); 3371 3372 /* This expression might be placed ahead of a jump to ensure that the 3373 value was computed on both sides of the jump. So make sure it isn't 3374 eliminated as dead. */ 3375 TREE_SIDE_EFFECTS (t) = 1; 3376 return t; 3377 } 3378 3379 /* Look inside EXPR into any simple arithmetic operations. Return the 3380 outermost non-arithmetic or non-invariant node. */ 3381 3382 tree 3383 skip_simple_arithmetic (tree expr) 3384 { 3385 /* We don't care about whether this can be used as an lvalue in this 3386 context. */ 3387 while (TREE_CODE (expr) == NON_LVALUE_EXPR) 3388 expr = TREE_OPERAND (expr, 0); 3389 3390 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and 3391 a constant, it will be more efficient to not make another SAVE_EXPR since 3392 it will allow better simplification and GCSE will be able to merge the 3393 computations if they actually occur. */ 3394 while (true) 3395 { 3396 if (UNARY_CLASS_P (expr)) 3397 expr = TREE_OPERAND (expr, 0); 3398 else if (BINARY_CLASS_P (expr)) 3399 { 3400 if (tree_invariant_p (TREE_OPERAND (expr, 1))) 3401 expr = TREE_OPERAND (expr, 0); 3402 else if (tree_invariant_p (TREE_OPERAND (expr, 0))) 3403 expr = TREE_OPERAND (expr, 1); 3404 else 3405 break; 3406 } 3407 else 3408 break; 3409 } 3410 3411 return expr; 3412 } 3413 3414 /* Look inside EXPR into simple arithmetic operations involving constants. 3415 Return the outermost non-arithmetic or non-constant node. */ 3416 3417 tree 3418 skip_simple_constant_arithmetic (tree expr) 3419 { 3420 while (TREE_CODE (expr) == NON_LVALUE_EXPR) 3421 expr = TREE_OPERAND (expr, 0); 3422 3423 while (true) 3424 { 3425 if (UNARY_CLASS_P (expr)) 3426 expr = TREE_OPERAND (expr, 0); 3427 else if (BINARY_CLASS_P (expr)) 3428 { 3429 if (TREE_CONSTANT (TREE_OPERAND (expr, 1))) 3430 expr = TREE_OPERAND (expr, 0); 3431 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0))) 3432 expr = TREE_OPERAND (expr, 1); 3433 else 3434 break; 3435 } 3436 else 3437 break; 3438 } 3439 3440 return expr; 3441 } 3442 3443 /* Return which tree structure is used by T. */ 3444 3445 enum tree_node_structure_enum 3446 tree_node_structure (const_tree t) 3447 { 3448 const enum tree_code code = TREE_CODE (t); 3449 return tree_node_structure_for_code (code); 3450 } 3451 3452 /* Set various status flags when building a CALL_EXPR object T. */ 3453 3454 static void 3455 process_call_operands (tree t) 3456 { 3457 bool side_effects = TREE_SIDE_EFFECTS (t); 3458 bool read_only = false; 3459 int i = call_expr_flags (t); 3460 3461 /* Calls have side-effects, except those to const or pure functions. */ 3462 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE))) 3463 side_effects = true; 3464 /* Propagate TREE_READONLY of arguments for const functions. */ 3465 if (i & ECF_CONST) 3466 read_only = true; 3467 3468 if (!side_effects || read_only) 3469 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++) 3470 { 3471 tree op = TREE_OPERAND (t, i); 3472 if (op && TREE_SIDE_EFFECTS (op)) 3473 side_effects = true; 3474 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op)) 3475 read_only = false; 3476 } 3477 3478 TREE_SIDE_EFFECTS (t) = side_effects; 3479 TREE_READONLY (t) = read_only; 3480 } 3481 3482 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a 3483 size or offset that depends on a field within a record. */ 3484 3485 bool 3486 contains_placeholder_p (const_tree exp) 3487 { 3488 enum tree_code code; 3489 3490 if (!exp) 3491 return 0; 3492 3493 code = TREE_CODE (exp); 3494 if (code == PLACEHOLDER_EXPR) 3495 return 1; 3496 3497 switch (TREE_CODE_CLASS (code)) 3498 { 3499 case tcc_reference: 3500 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit 3501 position computations since they will be converted into a 3502 WITH_RECORD_EXPR involving the reference, which will assume 3503 here will be valid. */ 3504 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0)); 3505 3506 case tcc_exceptional: 3507 if (code == TREE_LIST) 3508 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp)) 3509 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp))); 3510 break; 3511 3512 case tcc_unary: 3513 case tcc_binary: 3514 case tcc_comparison: 3515 case tcc_expression: 3516 switch (code) 3517 { 3518 case COMPOUND_EXPR: 3519 /* Ignoring the first operand isn't quite right, but works best. */ 3520 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)); 3521 3522 case COND_EXPR: 3523 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0)) 3524 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)) 3525 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2))); 3526 3527 case SAVE_EXPR: 3528 /* The save_expr function never wraps anything containing 3529 a PLACEHOLDER_EXPR. */ 3530 return 0; 3531 3532 default: 3533 break; 3534 } 3535 3536 switch (TREE_CODE_LENGTH (code)) 3537 { 3538 case 1: 3539 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0)); 3540 case 2: 3541 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0)) 3542 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))); 3543 default: 3544 return 0; 3545 } 3546 3547 case tcc_vl_exp: 3548 switch (code) 3549 { 3550 case CALL_EXPR: 3551 { 3552 const_tree arg; 3553 const_call_expr_arg_iterator iter; 3554 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp) 3555 if (CONTAINS_PLACEHOLDER_P (arg)) 3556 return 1; 3557 return 0; 3558 } 3559 default: 3560 return 0; 3561 } 3562 3563 default: 3564 return 0; 3565 } 3566 return 0; 3567 } 3568 3569 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR 3570 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and 3571 field positions. */ 3572 3573 static bool 3574 type_contains_placeholder_1 (const_tree type) 3575 { 3576 /* If the size contains a placeholder or the parent type (component type in 3577 the case of arrays) type involves a placeholder, this type does. */ 3578 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type)) 3579 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type)) 3580 || (!POINTER_TYPE_P (type) 3581 && TREE_TYPE (type) 3582 && type_contains_placeholder_p (TREE_TYPE (type)))) 3583 return true; 3584 3585 /* Now do type-specific checks. Note that the last part of the check above 3586 greatly limits what we have to do below. */ 3587 switch (TREE_CODE (type)) 3588 { 3589 case VOID_TYPE: 3590 case POINTER_BOUNDS_TYPE: 3591 case COMPLEX_TYPE: 3592 case ENUMERAL_TYPE: 3593 case BOOLEAN_TYPE: 3594 case POINTER_TYPE: 3595 case OFFSET_TYPE: 3596 case REFERENCE_TYPE: 3597 case METHOD_TYPE: 3598 case FUNCTION_TYPE: 3599 case VECTOR_TYPE: 3600 case NULLPTR_TYPE: 3601 return false; 3602 3603 case INTEGER_TYPE: 3604 case REAL_TYPE: 3605 case FIXED_POINT_TYPE: 3606 /* Here we just check the bounds. */ 3607 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type)) 3608 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type))); 3609 3610 case ARRAY_TYPE: 3611 /* We have already checked the component type above, so just check 3612 the domain type. Flexible array members have a null domain. */ 3613 return TYPE_DOMAIN (type) ? 3614 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false; 3615 3616 case RECORD_TYPE: 3617 case UNION_TYPE: 3618 case QUAL_UNION_TYPE: 3619 { 3620 tree field; 3621 3622 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) 3623 if (TREE_CODE (field) == FIELD_DECL 3624 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field)) 3625 || (TREE_CODE (type) == QUAL_UNION_TYPE 3626 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field))) 3627 || type_contains_placeholder_p (TREE_TYPE (field)))) 3628 return true; 3629 3630 return false; 3631 } 3632 3633 default: 3634 gcc_unreachable (); 3635 } 3636 } 3637 3638 /* Wrapper around above function used to cache its result. */ 3639 3640 bool 3641 type_contains_placeholder_p (tree type) 3642 { 3643 bool result; 3644 3645 /* If the contains_placeholder_bits field has been initialized, 3646 then we know the answer. */ 3647 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0) 3648 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1; 3649 3650 /* Indicate that we've seen this type node, and the answer is false. 3651 This is what we want to return if we run into recursion via fields. */ 3652 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1; 3653 3654 /* Compute the real value. */ 3655 result = type_contains_placeholder_1 (type); 3656 3657 /* Store the real value. */ 3658 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1; 3659 3660 return result; 3661 } 3662 3663 /* Push tree EXP onto vector QUEUE if it is not already present. */ 3664 3665 static void 3666 push_without_duplicates (tree exp, vec<tree> *queue) 3667 { 3668 unsigned int i; 3669 tree iter; 3670 3671 FOR_EACH_VEC_ELT (*queue, i, iter) 3672 if (simple_cst_equal (iter, exp) == 1) 3673 break; 3674 3675 if (!iter) 3676 queue->safe_push (exp); 3677 } 3678 3679 /* Given a tree EXP, find all occurrences of references to fields 3680 in a PLACEHOLDER_EXPR and place them in vector REFS without 3681 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that 3682 we assume here that EXP contains only arithmetic expressions 3683 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their 3684 argument list. */ 3685 3686 void 3687 find_placeholder_in_expr (tree exp, vec<tree> *refs) 3688 { 3689 enum tree_code code = TREE_CODE (exp); 3690 tree inner; 3691 int i; 3692 3693 /* We handle TREE_LIST and COMPONENT_REF separately. */ 3694 if (code == TREE_LIST) 3695 { 3696 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs); 3697 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs); 3698 } 3699 else if (code == COMPONENT_REF) 3700 { 3701 for (inner = TREE_OPERAND (exp, 0); 3702 REFERENCE_CLASS_P (inner); 3703 inner = TREE_OPERAND (inner, 0)) 3704 ; 3705 3706 if (TREE_CODE (inner) == PLACEHOLDER_EXPR) 3707 push_without_duplicates (exp, refs); 3708 else 3709 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs); 3710 } 3711 else 3712 switch (TREE_CODE_CLASS (code)) 3713 { 3714 case tcc_constant: 3715 break; 3716 3717 case tcc_declaration: 3718 /* Variables allocated to static storage can stay. */ 3719 if (!TREE_STATIC (exp)) 3720 push_without_duplicates (exp, refs); 3721 break; 3722 3723 case tcc_expression: 3724 /* This is the pattern built in ada/make_aligning_type. */ 3725 if (code == ADDR_EXPR 3726 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR) 3727 { 3728 push_without_duplicates (exp, refs); 3729 break; 3730 } 3731 3732 /* Fall through. */ 3733 3734 case tcc_exceptional: 3735 case tcc_unary: 3736 case tcc_binary: 3737 case tcc_comparison: 3738 case tcc_reference: 3739 for (i = 0; i < TREE_CODE_LENGTH (code); i++) 3740 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs); 3741 break; 3742 3743 case tcc_vl_exp: 3744 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++) 3745 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs); 3746 break; 3747 3748 default: 3749 gcc_unreachable (); 3750 } 3751 } 3752 3753 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R, 3754 return a tree with all occurrences of references to F in a 3755 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and 3756 CONST_DECLs. Note that we assume here that EXP contains only 3757 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs 3758 occurring only in their argument list. */ 3759 3760 tree 3761 substitute_in_expr (tree exp, tree f, tree r) 3762 { 3763 enum tree_code code = TREE_CODE (exp); 3764 tree op0, op1, op2, op3; 3765 tree new_tree; 3766 3767 /* We handle TREE_LIST and COMPONENT_REF separately. */ 3768 if (code == TREE_LIST) 3769 { 3770 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r); 3771 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r); 3772 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp)) 3773 return exp; 3774 3775 return tree_cons (TREE_PURPOSE (exp), op1, op0); 3776 } 3777 else if (code == COMPONENT_REF) 3778 { 3779 tree inner; 3780 3781 /* If this expression is getting a value from a PLACEHOLDER_EXPR 3782 and it is the right field, replace it with R. */ 3783 for (inner = TREE_OPERAND (exp, 0); 3784 REFERENCE_CLASS_P (inner); 3785 inner = TREE_OPERAND (inner, 0)) 3786 ; 3787 3788 /* The field. */ 3789 op1 = TREE_OPERAND (exp, 1); 3790 3791 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f) 3792 return r; 3793 3794 /* If this expression hasn't been completed let, leave it alone. */ 3795 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner)) 3796 return exp; 3797 3798 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r); 3799 if (op0 == TREE_OPERAND (exp, 0)) 3800 return exp; 3801 3802 new_tree 3803 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE); 3804 } 3805 else 3806 switch (TREE_CODE_CLASS (code)) 3807 { 3808 case tcc_constant: 3809 return exp; 3810 3811 case tcc_declaration: 3812 if (exp == f) 3813 return r; 3814 else 3815 return exp; 3816 3817 case tcc_expression: 3818 if (exp == f) 3819 return r; 3820 3821 /* Fall through. */ 3822 3823 case tcc_exceptional: 3824 case tcc_unary: 3825 case tcc_binary: 3826 case tcc_comparison: 3827 case tcc_reference: 3828 switch (TREE_CODE_LENGTH (code)) 3829 { 3830 case 0: 3831 return exp; 3832 3833 case 1: 3834 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r); 3835 if (op0 == TREE_OPERAND (exp, 0)) 3836 return exp; 3837 3838 new_tree = fold_build1 (code, TREE_TYPE (exp), op0); 3839 break; 3840 3841 case 2: 3842 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r); 3843 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r); 3844 3845 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)) 3846 return exp; 3847 3848 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1); 3849 break; 3850 3851 case 3: 3852 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r); 3853 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r); 3854 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r); 3855 3856 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1) 3857 && op2 == TREE_OPERAND (exp, 2)) 3858 return exp; 3859 3860 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2); 3861 break; 3862 3863 case 4: 3864 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r); 3865 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r); 3866 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r); 3867 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r); 3868 3869 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1) 3870 && op2 == TREE_OPERAND (exp, 2) 3871 && op3 == TREE_OPERAND (exp, 3)) 3872 return exp; 3873 3874 new_tree 3875 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3)); 3876 break; 3877 3878 default: 3879 gcc_unreachable (); 3880 } 3881 break; 3882 3883 case tcc_vl_exp: 3884 { 3885 int i; 3886 3887 new_tree = NULL_TREE; 3888 3889 /* If we are trying to replace F with a constant, inline back 3890 functions which do nothing else than computing a value from 3891 the arguments they are passed. This makes it possible to 3892 fold partially or entirely the replacement expression. */ 3893 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR) 3894 { 3895 tree t = maybe_inline_call_in_expr (exp); 3896 if (t) 3897 return SUBSTITUTE_IN_EXPR (t, f, r); 3898 } 3899 3900 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++) 3901 { 3902 tree op = TREE_OPERAND (exp, i); 3903 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r); 3904 if (new_op != op) 3905 { 3906 if (!new_tree) 3907 new_tree = copy_node (exp); 3908 TREE_OPERAND (new_tree, i) = new_op; 3909 } 3910 } 3911 3912 if (new_tree) 3913 { 3914 new_tree = fold (new_tree); 3915 if (TREE_CODE (new_tree) == CALL_EXPR) 3916 process_call_operands (new_tree); 3917 } 3918 else 3919 return exp; 3920 } 3921 break; 3922 3923 default: 3924 gcc_unreachable (); 3925 } 3926 3927 TREE_READONLY (new_tree) |= TREE_READONLY (exp); 3928 3929 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF) 3930 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp); 3931 3932 return new_tree; 3933 } 3934 3935 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement 3936 for it within OBJ, a tree that is an object or a chain of references. */ 3937 3938 tree 3939 substitute_placeholder_in_expr (tree exp, tree obj) 3940 { 3941 enum tree_code code = TREE_CODE (exp); 3942 tree op0, op1, op2, op3; 3943 tree new_tree; 3944 3945 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type 3946 in the chain of OBJ. */ 3947 if (code == PLACEHOLDER_EXPR) 3948 { 3949 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp)); 3950 tree elt; 3951 3952 for (elt = obj; elt != 0; 3953 elt = ((TREE_CODE (elt) == COMPOUND_EXPR 3954 || TREE_CODE (elt) == COND_EXPR) 3955 ? TREE_OPERAND (elt, 1) 3956 : (REFERENCE_CLASS_P (elt) 3957 || UNARY_CLASS_P (elt) 3958 || BINARY_CLASS_P (elt) 3959 || VL_EXP_CLASS_P (elt) 3960 || EXPRESSION_CLASS_P (elt)) 3961 ? TREE_OPERAND (elt, 0) : 0)) 3962 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type) 3963 return elt; 3964 3965 for (elt = obj; elt != 0; 3966 elt = ((TREE_CODE (elt) == COMPOUND_EXPR 3967 || TREE_CODE (elt) == COND_EXPR) 3968 ? TREE_OPERAND (elt, 1) 3969 : (REFERENCE_CLASS_P (elt) 3970 || UNARY_CLASS_P (elt) 3971 || BINARY_CLASS_P (elt) 3972 || VL_EXP_CLASS_P (elt) 3973 || EXPRESSION_CLASS_P (elt)) 3974 ? TREE_OPERAND (elt, 0) : 0)) 3975 if (POINTER_TYPE_P (TREE_TYPE (elt)) 3976 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt))) 3977 == need_type)) 3978 return fold_build1 (INDIRECT_REF, need_type, elt); 3979 3980 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it 3981 survives until RTL generation, there will be an error. */ 3982 return exp; 3983 } 3984 3985 /* TREE_LIST is special because we need to look at TREE_VALUE 3986 and TREE_CHAIN, not TREE_OPERANDS. */ 3987 else if (code == TREE_LIST) 3988 { 3989 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj); 3990 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj); 3991 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp)) 3992 return exp; 3993 3994 return tree_cons (TREE_PURPOSE (exp), op1, op0); 3995 } 3996 else 3997 switch (TREE_CODE_CLASS (code)) 3998 { 3999 case tcc_constant: 4000 case tcc_declaration: 4001 return exp; 4002 4003 case tcc_exceptional: 4004 case tcc_unary: 4005 case tcc_binary: 4006 case tcc_comparison: 4007 case tcc_expression: 4008 case tcc_reference: 4009 case tcc_statement: 4010 switch (TREE_CODE_LENGTH (code)) 4011 { 4012 case 0: 4013 return exp; 4014 4015 case 1: 4016 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj); 4017 if (op0 == TREE_OPERAND (exp, 0)) 4018 return exp; 4019 4020 new_tree = fold_build1 (code, TREE_TYPE (exp), op0); 4021 break; 4022 4023 case 2: 4024 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj); 4025 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj); 4026 4027 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)) 4028 return exp; 4029 4030 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1); 4031 break; 4032 4033 case 3: 4034 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj); 4035 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj); 4036 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj); 4037 4038 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1) 4039 && op2 == TREE_OPERAND (exp, 2)) 4040 return exp; 4041 4042 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2); 4043 break; 4044 4045 case 4: 4046 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj); 4047 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj); 4048 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj); 4049 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj); 4050 4051 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1) 4052 && op2 == TREE_OPERAND (exp, 2) 4053 && op3 == TREE_OPERAND (exp, 3)) 4054 return exp; 4055 4056 new_tree 4057 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3)); 4058 break; 4059 4060 default: 4061 gcc_unreachable (); 4062 } 4063 break; 4064 4065 case tcc_vl_exp: 4066 { 4067 int i; 4068 4069 new_tree = NULL_TREE; 4070 4071 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++) 4072 { 4073 tree op = TREE_OPERAND (exp, i); 4074 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj); 4075 if (new_op != op) 4076 { 4077 if (!new_tree) 4078 new_tree = copy_node (exp); 4079 TREE_OPERAND (new_tree, i) = new_op; 4080 } 4081 } 4082 4083 if (new_tree) 4084 { 4085 new_tree = fold (new_tree); 4086 if (TREE_CODE (new_tree) == CALL_EXPR) 4087 process_call_operands (new_tree); 4088 } 4089 else 4090 return exp; 4091 } 4092 break; 4093 4094 default: 4095 gcc_unreachable (); 4096 } 4097 4098 TREE_READONLY (new_tree) |= TREE_READONLY (exp); 4099 4100 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF) 4101 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp); 4102 4103 return new_tree; 4104 } 4105 4106 4107 /* Subroutine of stabilize_reference; this is called for subtrees of 4108 references. Any expression with side-effects must be put in a SAVE_EXPR 4109 to ensure that it is only evaluated once. 4110 4111 We don't put SAVE_EXPR nodes around everything, because assigning very 4112 simple expressions to temporaries causes us to miss good opportunities 4113 for optimizations. Among other things, the opportunity to fold in the 4114 addition of a constant into an addressing mode often gets lost, e.g. 4115 "y[i+1] += x;". In general, we take the approach that we should not make 4116 an assignment unless we are forced into it - i.e., that any non-side effect 4117 operator should be allowed, and that cse should take care of coalescing 4118 multiple utterances of the same expression should that prove fruitful. */ 4119 4120 static tree 4121 stabilize_reference_1 (tree e) 4122 { 4123 tree result; 4124 enum tree_code code = TREE_CODE (e); 4125 4126 /* We cannot ignore const expressions because it might be a reference 4127 to a const array but whose index contains side-effects. But we can 4128 ignore things that are actual constant or that already have been 4129 handled by this function. */ 4130 4131 if (tree_invariant_p (e)) 4132 return e; 4133 4134 switch (TREE_CODE_CLASS (code)) 4135 { 4136 case tcc_exceptional: 4137 case tcc_type: 4138 case tcc_declaration: 4139 case tcc_comparison: 4140 case tcc_statement: 4141 case tcc_expression: 4142 case tcc_reference: 4143 case tcc_vl_exp: 4144 /* If the expression has side-effects, then encase it in a SAVE_EXPR 4145 so that it will only be evaluated once. */ 4146 /* The reference (r) and comparison (<) classes could be handled as 4147 below, but it is generally faster to only evaluate them once. */ 4148 if (TREE_SIDE_EFFECTS (e)) 4149 return save_expr (e); 4150 return e; 4151 4152 case tcc_constant: 4153 /* Constants need no processing. In fact, we should never reach 4154 here. */ 4155 return e; 4156 4157 case tcc_binary: 4158 /* Division is slow and tends to be compiled with jumps, 4159 especially the division by powers of 2 that is often 4160 found inside of an array reference. So do it just once. */ 4161 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR 4162 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR 4163 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR 4164 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR) 4165 return save_expr (e); 4166 /* Recursively stabilize each operand. */ 4167 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)), 4168 stabilize_reference_1 (TREE_OPERAND (e, 1))); 4169 break; 4170 4171 case tcc_unary: 4172 /* Recursively stabilize each operand. */ 4173 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0))); 4174 break; 4175 4176 default: 4177 gcc_unreachable (); 4178 } 4179 4180 TREE_TYPE (result) = TREE_TYPE (e); 4181 TREE_READONLY (result) = TREE_READONLY (e); 4182 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e); 4183 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e); 4184 4185 return result; 4186 } 4187 4188 /* Stabilize a reference so that we can use it any number of times 4189 without causing its operands to be evaluated more than once. 4190 Returns the stabilized reference. This works by means of save_expr, 4191 so see the caveats in the comments about save_expr. 4192 4193 Also allows conversion expressions whose operands are references. 4194 Any other kind of expression is returned unchanged. */ 4195 4196 tree 4197 stabilize_reference (tree ref) 4198 { 4199 tree result; 4200 enum tree_code code = TREE_CODE (ref); 4201 4202 switch (code) 4203 { 4204 case VAR_DECL: 4205 case PARM_DECL: 4206 case RESULT_DECL: 4207 /* No action is needed in this case. */ 4208 return ref; 4209 4210 CASE_CONVERT: 4211 case FLOAT_EXPR: 4212 case FIX_TRUNC_EXPR: 4213 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0))); 4214 break; 4215 4216 case INDIRECT_REF: 4217 result = build_nt (INDIRECT_REF, 4218 stabilize_reference_1 (TREE_OPERAND (ref, 0))); 4219 break; 4220 4221 case COMPONENT_REF: 4222 result = build_nt (COMPONENT_REF, 4223 stabilize_reference (TREE_OPERAND (ref, 0)), 4224 TREE_OPERAND (ref, 1), NULL_TREE); 4225 break; 4226 4227 case BIT_FIELD_REF: 4228 result = build_nt (BIT_FIELD_REF, 4229 stabilize_reference (TREE_OPERAND (ref, 0)), 4230 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2)); 4231 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref); 4232 break; 4233 4234 case ARRAY_REF: 4235 result = build_nt (ARRAY_REF, 4236 stabilize_reference (TREE_OPERAND (ref, 0)), 4237 stabilize_reference_1 (TREE_OPERAND (ref, 1)), 4238 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3)); 4239 break; 4240 4241 case ARRAY_RANGE_REF: 4242 result = build_nt (ARRAY_RANGE_REF, 4243 stabilize_reference (TREE_OPERAND (ref, 0)), 4244 stabilize_reference_1 (TREE_OPERAND (ref, 1)), 4245 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3)); 4246 break; 4247 4248 case COMPOUND_EXPR: 4249 /* We cannot wrap the first expression in a SAVE_EXPR, as then 4250 it wouldn't be ignored. This matters when dealing with 4251 volatiles. */ 4252 return stabilize_reference_1 (ref); 4253 4254 /* If arg isn't a kind of lvalue we recognize, make no change. 4255 Caller should recognize the error for an invalid lvalue. */ 4256 default: 4257 return ref; 4258 4259 case ERROR_MARK: 4260 return error_mark_node; 4261 } 4262 4263 TREE_TYPE (result) = TREE_TYPE (ref); 4264 TREE_READONLY (result) = TREE_READONLY (ref); 4265 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref); 4266 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref); 4267 4268 return result; 4269 } 4270 4271 /* Low-level constructors for expressions. */ 4272 4273 /* A helper function for build1 and constant folders. Set TREE_CONSTANT, 4274 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */ 4275 4276 void 4277 recompute_tree_invariant_for_addr_expr (tree t) 4278 { 4279 tree node; 4280 bool tc = true, se = false; 4281 4282 gcc_assert (TREE_CODE (t) == ADDR_EXPR); 4283 4284 /* We started out assuming this address is both invariant and constant, but 4285 does not have side effects. Now go down any handled components and see if 4286 any of them involve offsets that are either non-constant or non-invariant. 4287 Also check for side-effects. 4288 4289 ??? Note that this code makes no attempt to deal with the case where 4290 taking the address of something causes a copy due to misalignment. */ 4291 4292 #define UPDATE_FLAGS(NODE) \ 4293 do { tree _node = (NODE); \ 4294 if (_node && !TREE_CONSTANT (_node)) tc = false; \ 4295 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0) 4296 4297 for (node = TREE_OPERAND (t, 0); handled_component_p (node); 4298 node = TREE_OPERAND (node, 0)) 4299 { 4300 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus 4301 array reference (probably made temporarily by the G++ front end), 4302 so ignore all the operands. */ 4303 if ((TREE_CODE (node) == ARRAY_REF 4304 || TREE_CODE (node) == ARRAY_RANGE_REF) 4305 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE) 4306 { 4307 UPDATE_FLAGS (TREE_OPERAND (node, 1)); 4308 if (TREE_OPERAND (node, 2)) 4309 UPDATE_FLAGS (TREE_OPERAND (node, 2)); 4310 if (TREE_OPERAND (node, 3)) 4311 UPDATE_FLAGS (TREE_OPERAND (node, 3)); 4312 } 4313 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a 4314 FIELD_DECL, apparently. The G++ front end can put something else 4315 there, at least temporarily. */ 4316 else if (TREE_CODE (node) == COMPONENT_REF 4317 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL) 4318 { 4319 if (TREE_OPERAND (node, 2)) 4320 UPDATE_FLAGS (TREE_OPERAND (node, 2)); 4321 } 4322 } 4323 4324 node = lang_hooks.expr_to_decl (node, &tc, &se); 4325 4326 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from 4327 the address, since &(*a)->b is a form of addition. If it's a constant, the 4328 address is constant too. If it's a decl, its address is constant if the 4329 decl is static. Everything else is not constant and, furthermore, 4330 taking the address of a volatile variable is not volatile. */ 4331 if (TREE_CODE (node) == INDIRECT_REF 4332 || TREE_CODE (node) == MEM_REF) 4333 UPDATE_FLAGS (TREE_OPERAND (node, 0)); 4334 else if (CONSTANT_CLASS_P (node)) 4335 ; 4336 else if (DECL_P (node)) 4337 tc &= (staticp (node) != NULL_TREE); 4338 else 4339 { 4340 tc = false; 4341 se |= TREE_SIDE_EFFECTS (node); 4342 } 4343 4344 4345 TREE_CONSTANT (t) = tc; 4346 TREE_SIDE_EFFECTS (t) = se; 4347 #undef UPDATE_FLAGS 4348 } 4349 4350 /* Build an expression of code CODE, data type TYPE, and operands as 4351 specified. Expressions and reference nodes can be created this way. 4352 Constants, decls, types and misc nodes cannot be. 4353 4354 We define 5 non-variadic functions, from 0 to 4 arguments. This is 4355 enough for all extant tree codes. */ 4356 4357 tree 4358 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL) 4359 { 4360 tree t; 4361 4362 gcc_assert (TREE_CODE_LENGTH (code) == 0); 4363 4364 t = make_node_stat (code PASS_MEM_STAT); 4365 TREE_TYPE (t) = tt; 4366 4367 return t; 4368 } 4369 4370 tree 4371 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL) 4372 { 4373 int length = sizeof (struct tree_exp); 4374 tree t; 4375 4376 record_node_allocation_statistics (code, length); 4377 4378 gcc_assert (TREE_CODE_LENGTH (code) == 1); 4379 4380 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT); 4381 4382 memset (t, 0, sizeof (struct tree_common)); 4383 4384 TREE_SET_CODE (t, code); 4385 4386 TREE_TYPE (t) = type; 4387 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION); 4388 TREE_OPERAND (t, 0) = node; 4389 if (node && !TYPE_P (node)) 4390 { 4391 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node); 4392 TREE_READONLY (t) = TREE_READONLY (node); 4393 } 4394 4395 if (TREE_CODE_CLASS (code) == tcc_statement) 4396 TREE_SIDE_EFFECTS (t) = 1; 4397 else switch (code) 4398 { 4399 case VA_ARG_EXPR: 4400 /* All of these have side-effects, no matter what their 4401 operands are. */ 4402 TREE_SIDE_EFFECTS (t) = 1; 4403 TREE_READONLY (t) = 0; 4404 break; 4405 4406 case INDIRECT_REF: 4407 /* Whether a dereference is readonly has nothing to do with whether 4408 its operand is readonly. */ 4409 TREE_READONLY (t) = 0; 4410 break; 4411 4412 case ADDR_EXPR: 4413 if (node) 4414 recompute_tree_invariant_for_addr_expr (t); 4415 break; 4416 4417 default: 4418 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR) 4419 && node && !TYPE_P (node) 4420 && TREE_CONSTANT (node)) 4421 TREE_CONSTANT (t) = 1; 4422 if (TREE_CODE_CLASS (code) == tcc_reference 4423 && node && TREE_THIS_VOLATILE (node)) 4424 TREE_THIS_VOLATILE (t) = 1; 4425 break; 4426 } 4427 4428 return t; 4429 } 4430 4431 #define PROCESS_ARG(N) \ 4432 do { \ 4433 TREE_OPERAND (t, N) = arg##N; \ 4434 if (arg##N &&!TYPE_P (arg##N)) \ 4435 { \ 4436 if (TREE_SIDE_EFFECTS (arg##N)) \ 4437 side_effects = 1; \ 4438 if (!TREE_READONLY (arg##N) \ 4439 && !CONSTANT_CLASS_P (arg##N)) \ 4440 (void) (read_only = 0); \ 4441 if (!TREE_CONSTANT (arg##N)) \ 4442 (void) (constant = 0); \ 4443 } \ 4444 } while (0) 4445 4446 tree 4447 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL) 4448 { 4449 bool constant, read_only, side_effects; 4450 tree t; 4451 4452 gcc_assert (TREE_CODE_LENGTH (code) == 2); 4453 4454 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR) 4455 && arg0 && arg1 && tt && POINTER_TYPE_P (tt) 4456 /* When sizetype precision doesn't match that of pointers 4457 we need to be able to build explicit extensions or truncations 4458 of the offset argument. */ 4459 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt)) 4460 gcc_assert (TREE_CODE (arg0) == INTEGER_CST 4461 && TREE_CODE (arg1) == INTEGER_CST); 4462 4463 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt) 4464 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0)) 4465 && ptrofftype_p (TREE_TYPE (arg1))); 4466 4467 t = make_node_stat (code PASS_MEM_STAT); 4468 TREE_TYPE (t) = tt; 4469 4470 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the 4471 result based on those same flags for the arguments. But if the 4472 arguments aren't really even `tree' expressions, we shouldn't be trying 4473 to do this. */ 4474 4475 /* Expressions without side effects may be constant if their 4476 arguments are as well. */ 4477 constant = (TREE_CODE_CLASS (code) == tcc_comparison 4478 || TREE_CODE_CLASS (code) == tcc_binary); 4479 read_only = 1; 4480 side_effects = TREE_SIDE_EFFECTS (t); 4481 4482 PROCESS_ARG (0); 4483 PROCESS_ARG (1); 4484 4485 TREE_SIDE_EFFECTS (t) = side_effects; 4486 if (code == MEM_REF) 4487 { 4488 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR) 4489 { 4490 tree o = TREE_OPERAND (arg0, 0); 4491 TREE_READONLY (t) = TREE_READONLY (o); 4492 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o); 4493 } 4494 } 4495 else 4496 { 4497 TREE_READONLY (t) = read_only; 4498 TREE_CONSTANT (t) = constant; 4499 TREE_THIS_VOLATILE (t) 4500 = (TREE_CODE_CLASS (code) == tcc_reference 4501 && arg0 && TREE_THIS_VOLATILE (arg0)); 4502 } 4503 4504 return t; 4505 } 4506 4507 4508 tree 4509 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1, 4510 tree arg2 MEM_STAT_DECL) 4511 { 4512 bool constant, read_only, side_effects; 4513 tree t; 4514 4515 gcc_assert (TREE_CODE_LENGTH (code) == 3); 4516 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp); 4517 4518 t = make_node_stat (code PASS_MEM_STAT); 4519 TREE_TYPE (t) = tt; 4520 4521 read_only = 1; 4522 4523 /* As a special exception, if COND_EXPR has NULL branches, we 4524 assume that it is a gimple statement and always consider 4525 it to have side effects. */ 4526 if (code == COND_EXPR 4527 && tt == void_type_node 4528 && arg1 == NULL_TREE 4529 && arg2 == NULL_TREE) 4530 side_effects = true; 4531 else 4532 side_effects = TREE_SIDE_EFFECTS (t); 4533 4534 PROCESS_ARG (0); 4535 PROCESS_ARG (1); 4536 PROCESS_ARG (2); 4537 4538 if (code == COND_EXPR) 4539 TREE_READONLY (t) = read_only; 4540 4541 TREE_SIDE_EFFECTS (t) = side_effects; 4542 TREE_THIS_VOLATILE (t) 4543 = (TREE_CODE_CLASS (code) == tcc_reference 4544 && arg0 && TREE_THIS_VOLATILE (arg0)); 4545 4546 return t; 4547 } 4548 4549 tree 4550 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1, 4551 tree arg2, tree arg3 MEM_STAT_DECL) 4552 { 4553 bool constant, read_only, side_effects; 4554 tree t; 4555 4556 gcc_assert (TREE_CODE_LENGTH (code) == 4); 4557 4558 t = make_node_stat (code PASS_MEM_STAT); 4559 TREE_TYPE (t) = tt; 4560 4561 side_effects = TREE_SIDE_EFFECTS (t); 4562 4563 PROCESS_ARG (0); 4564 PROCESS_ARG (1); 4565 PROCESS_ARG (2); 4566 PROCESS_ARG (3); 4567 4568 TREE_SIDE_EFFECTS (t) = side_effects; 4569 TREE_THIS_VOLATILE (t) 4570 = (TREE_CODE_CLASS (code) == tcc_reference 4571 && arg0 && TREE_THIS_VOLATILE (arg0)); 4572 4573 return t; 4574 } 4575 4576 tree 4577 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1, 4578 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL) 4579 { 4580 bool constant, read_only, side_effects; 4581 tree t; 4582 4583 gcc_assert (TREE_CODE_LENGTH (code) == 5); 4584 4585 t = make_node_stat (code PASS_MEM_STAT); 4586 TREE_TYPE (t) = tt; 4587 4588 side_effects = TREE_SIDE_EFFECTS (t); 4589 4590 PROCESS_ARG (0); 4591 PROCESS_ARG (1); 4592 PROCESS_ARG (2); 4593 PROCESS_ARG (3); 4594 PROCESS_ARG (4); 4595 4596 TREE_SIDE_EFFECTS (t) = side_effects; 4597 if (code == TARGET_MEM_REF) 4598 { 4599 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR) 4600 { 4601 tree o = TREE_OPERAND (arg0, 0); 4602 TREE_READONLY (t) = TREE_READONLY (o); 4603 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o); 4604 } 4605 } 4606 else 4607 TREE_THIS_VOLATILE (t) 4608 = (TREE_CODE_CLASS (code) == tcc_reference 4609 && arg0 && TREE_THIS_VOLATILE (arg0)); 4610 4611 return t; 4612 } 4613 4614 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF 4615 on the pointer PTR. */ 4616 4617 tree 4618 build_simple_mem_ref_loc (location_t loc, tree ptr) 4619 { 4620 HOST_WIDE_INT offset = 0; 4621 tree ptype = TREE_TYPE (ptr); 4622 tree tem; 4623 /* For convenience allow addresses that collapse to a simple base 4624 and offset. */ 4625 if (TREE_CODE (ptr) == ADDR_EXPR 4626 && (handled_component_p (TREE_OPERAND (ptr, 0)) 4627 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF)) 4628 { 4629 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset); 4630 gcc_assert (ptr); 4631 ptr = build_fold_addr_expr (ptr); 4632 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr)); 4633 } 4634 tem = build2 (MEM_REF, TREE_TYPE (ptype), 4635 ptr, build_int_cst (ptype, offset)); 4636 SET_EXPR_LOCATION (tem, loc); 4637 return tem; 4638 } 4639 4640 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */ 4641 4642 offset_int 4643 mem_ref_offset (const_tree t) 4644 { 4645 return offset_int::from (TREE_OPERAND (t, 1), SIGNED); 4646 } 4647 4648 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE 4649 offsetted by OFFSET units. */ 4650 4651 tree 4652 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset) 4653 { 4654 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type), 4655 build_fold_addr_expr (base), 4656 build_int_cst (ptr_type_node, offset)); 4657 tree addr = build1 (ADDR_EXPR, type, ref); 4658 recompute_tree_invariant_for_addr_expr (addr); 4659 return addr; 4660 } 4661 4662 /* Similar except don't specify the TREE_TYPE 4663 and leave the TREE_SIDE_EFFECTS as 0. 4664 It is permissible for arguments to be null, 4665 or even garbage if their values do not matter. */ 4666 4667 tree 4668 build_nt (enum tree_code code, ...) 4669 { 4670 tree t; 4671 int length; 4672 int i; 4673 va_list p; 4674 4675 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp); 4676 4677 va_start (p, code); 4678 4679 t = make_node (code); 4680 length = TREE_CODE_LENGTH (code); 4681 4682 for (i = 0; i < length; i++) 4683 TREE_OPERAND (t, i) = va_arg (p, tree); 4684 4685 va_end (p); 4686 return t; 4687 } 4688 4689 /* Similar to build_nt, but for creating a CALL_EXPR object with a 4690 tree vec. */ 4691 4692 tree 4693 build_nt_call_vec (tree fn, vec<tree, va_gc> *args) 4694 { 4695 tree ret, t; 4696 unsigned int ix; 4697 4698 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3); 4699 CALL_EXPR_FN (ret) = fn; 4700 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE; 4701 FOR_EACH_VEC_SAFE_ELT (args, ix, t) 4702 CALL_EXPR_ARG (ret, ix) = t; 4703 return ret; 4704 } 4705 4706 /* Create a DECL_... node of code CODE, name NAME and data type TYPE. 4707 We do NOT enter this node in any sort of symbol table. 4708 4709 LOC is the location of the decl. 4710 4711 layout_decl is used to set up the decl's storage layout. 4712 Other slots are initialized to 0 or null pointers. */ 4713 4714 tree 4715 build_decl_stat (location_t loc, enum tree_code code, tree name, 4716 tree type MEM_STAT_DECL) 4717 { 4718 tree t; 4719 4720 t = make_node_stat (code PASS_MEM_STAT); 4721 DECL_SOURCE_LOCATION (t) = loc; 4722 4723 /* if (type == error_mark_node) 4724 type = integer_type_node; */ 4725 /* That is not done, deliberately, so that having error_mark_node 4726 as the type can suppress useless errors in the use of this variable. */ 4727 4728 DECL_NAME (t) = name; 4729 TREE_TYPE (t) = type; 4730 4731 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL) 4732 layout_decl (t, 0); 4733 4734 return t; 4735 } 4736 4737 /* Builds and returns function declaration with NAME and TYPE. */ 4738 4739 tree 4740 build_fn_decl (const char *name, tree type) 4741 { 4742 tree id = get_identifier (name); 4743 tree decl = build_decl (input_location, FUNCTION_DECL, id, type); 4744 4745 DECL_EXTERNAL (decl) = 1; 4746 TREE_PUBLIC (decl) = 1; 4747 DECL_ARTIFICIAL (decl) = 1; 4748 TREE_NOTHROW (decl) = 1; 4749 4750 return decl; 4751 } 4752 4753 vec<tree, va_gc> *all_translation_units; 4754 4755 /* Builds a new translation-unit decl with name NAME, queues it in the 4756 global list of translation-unit decls and returns it. */ 4757 4758 tree 4759 build_translation_unit_decl (tree name) 4760 { 4761 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL, 4762 name, NULL_TREE); 4763 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name; 4764 vec_safe_push (all_translation_units, tu); 4765 return tu; 4766 } 4767 4768 4769 /* BLOCK nodes are used to represent the structure of binding contours 4770 and declarations, once those contours have been exited and their contents 4771 compiled. This information is used for outputting debugging info. */ 4772 4773 tree 4774 build_block (tree vars, tree subblocks, tree supercontext, tree chain) 4775 { 4776 tree block = make_node (BLOCK); 4777 4778 BLOCK_VARS (block) = vars; 4779 BLOCK_SUBBLOCKS (block) = subblocks; 4780 BLOCK_SUPERCONTEXT (block) = supercontext; 4781 BLOCK_CHAIN (block) = chain; 4782 return block; 4783 } 4784 4785 4786 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location. 4787 4788 LOC is the location to use in tree T. */ 4789 4790 void 4791 protected_set_expr_location (tree t, location_t loc) 4792 { 4793 if (CAN_HAVE_LOCATION_P (t)) 4794 SET_EXPR_LOCATION (t, loc); 4795 } 4796 4797 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES 4798 is ATTRIBUTE. */ 4799 4800 tree 4801 build_decl_attribute_variant (tree ddecl, tree attribute) 4802 { 4803 DECL_ATTRIBUTES (ddecl) = attribute; 4804 return ddecl; 4805 } 4806 4807 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE 4808 is ATTRIBUTE and its qualifiers are QUALS. 4809 4810 Record such modified types already made so we don't make duplicates. */ 4811 4812 tree 4813 build_type_attribute_qual_variant (tree otype, tree attribute, int quals) 4814 { 4815 tree ttype = otype; 4816 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute)) 4817 { 4818 inchash::hash hstate; 4819 tree ntype; 4820 int i; 4821 tree t; 4822 enum tree_code code = TREE_CODE (ttype); 4823 4824 /* Building a distinct copy of a tagged type is inappropriate; it 4825 causes breakage in code that expects there to be a one-to-one 4826 relationship between a struct and its fields. 4827 build_duplicate_type is another solution (as used in 4828 handle_transparent_union_attribute), but that doesn't play well 4829 with the stronger C++ type identity model. */ 4830 if (TREE_CODE (ttype) == RECORD_TYPE 4831 || TREE_CODE (ttype) == UNION_TYPE 4832 || TREE_CODE (ttype) == QUAL_UNION_TYPE 4833 || TREE_CODE (ttype) == ENUMERAL_TYPE) 4834 { 4835 warning (OPT_Wattributes, 4836 "ignoring attributes applied to %qT after definition", 4837 TYPE_MAIN_VARIANT (ttype)); 4838 return build_qualified_type (ttype, quals); 4839 } 4840 4841 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED); 4842 if (lang_hooks.types.copy_lang_qualifiers 4843 && otype != TYPE_MAIN_VARIANT (otype)) 4844 ttype = (lang_hooks.types.copy_lang_qualifiers 4845 (ttype, TYPE_MAIN_VARIANT (otype))); 4846 4847 ntype = build_distinct_type_copy (ttype); 4848 4849 TYPE_ATTRIBUTES (ntype) = attribute; 4850 4851 hstate.add_int (code); 4852 if (TREE_TYPE (ntype)) 4853 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype))); 4854 attribute_hash_list (attribute, hstate); 4855 4856 switch (TREE_CODE (ntype)) 4857 { 4858 case FUNCTION_TYPE: 4859 type_hash_list (TYPE_ARG_TYPES (ntype), hstate); 4860 break; 4861 case ARRAY_TYPE: 4862 if (TYPE_DOMAIN (ntype)) 4863 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype))); 4864 break; 4865 case INTEGER_TYPE: 4866 t = TYPE_MAX_VALUE (ntype); 4867 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++) 4868 hstate.add_object (TREE_INT_CST_ELT (t, i)); 4869 break; 4870 case REAL_TYPE: 4871 case FIXED_POINT_TYPE: 4872 { 4873 unsigned int precision = TYPE_PRECISION (ntype); 4874 hstate.add_object (precision); 4875 } 4876 break; 4877 default: 4878 break; 4879 } 4880 4881 ntype = type_hash_canon (hstate.end(), ntype); 4882 4883 /* If the target-dependent attributes make NTYPE different from 4884 its canonical type, we will need to use structural equality 4885 checks for this type. */ 4886 if (TYPE_STRUCTURAL_EQUALITY_P (ttype) 4887 || !comp_type_attributes (ntype, ttype)) 4888 SET_TYPE_STRUCTURAL_EQUALITY (ntype); 4889 else if (TYPE_CANONICAL (ntype) == ntype) 4890 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype); 4891 4892 ttype = build_qualified_type (ntype, quals); 4893 if (lang_hooks.types.copy_lang_qualifiers 4894 && otype != TYPE_MAIN_VARIANT (otype)) 4895 ttype = lang_hooks.types.copy_lang_qualifiers (ttype, otype); 4896 } 4897 else if (TYPE_QUALS (ttype) != quals) 4898 ttype = build_qualified_type (ttype, quals); 4899 4900 return ttype; 4901 } 4902 4903 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are 4904 the same. */ 4905 4906 static bool 4907 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2) 4908 { 4909 tree cl1, cl2; 4910 for (cl1 = clauses1, cl2 = clauses2; 4911 cl1 && cl2; 4912 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2)) 4913 { 4914 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2)) 4915 return false; 4916 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN) 4917 { 4918 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1), 4919 OMP_CLAUSE_DECL (cl2)) != 1) 4920 return false; 4921 } 4922 switch (OMP_CLAUSE_CODE (cl1)) 4923 { 4924 case OMP_CLAUSE_ALIGNED: 4925 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1), 4926 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1) 4927 return false; 4928 break; 4929 case OMP_CLAUSE_LINEAR: 4930 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1), 4931 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1) 4932 return false; 4933 break; 4934 case OMP_CLAUSE_SIMDLEN: 4935 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1), 4936 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1) 4937 return false; 4938 default: 4939 break; 4940 } 4941 } 4942 return true; 4943 } 4944 4945 /* Compare two constructor-element-type constants. Return 1 if the lists 4946 are known to be equal; otherwise return 0. */ 4947 4948 static bool 4949 simple_cst_list_equal (const_tree l1, const_tree l2) 4950 { 4951 while (l1 != NULL_TREE && l2 != NULL_TREE) 4952 { 4953 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1) 4954 return false; 4955 4956 l1 = TREE_CHAIN (l1); 4957 l2 = TREE_CHAIN (l2); 4958 } 4959 4960 return l1 == l2; 4961 } 4962 4963 /* Compare two identifier nodes representing attributes. Either one may 4964 be in wrapped __ATTR__ form. Return true if they are the same, false 4965 otherwise. */ 4966 4967 static bool 4968 cmp_attrib_identifiers (const_tree attr1, const_tree attr2) 4969 { 4970 /* Make sure we're dealing with IDENTIFIER_NODEs. */ 4971 gcc_checking_assert (TREE_CODE (attr1) == IDENTIFIER_NODE 4972 && TREE_CODE (attr2) == IDENTIFIER_NODE); 4973 4974 /* Identifiers can be compared directly for equality. */ 4975 if (attr1 == attr2) 4976 return true; 4977 4978 /* If they are not equal, they may still be one in the form 4979 'text' while the other one is in the form '__text__'. TODO: 4980 If we were storing attributes in normalized 'text' form, then 4981 this could all go away and we could take full advantage of 4982 the fact that we're comparing identifiers. :-) */ 4983 const size_t attr1_len = IDENTIFIER_LENGTH (attr1); 4984 const size_t attr2_len = IDENTIFIER_LENGTH (attr2); 4985 4986 if (attr2_len == attr1_len + 4) 4987 { 4988 const char *p = IDENTIFIER_POINTER (attr2); 4989 const char *q = IDENTIFIER_POINTER (attr1); 4990 if (p[0] == '_' && p[1] == '_' 4991 && p[attr2_len - 2] == '_' && p[attr2_len - 1] == '_' 4992 && strncmp (q, p + 2, attr1_len) == 0) 4993 return true;; 4994 } 4995 else if (attr2_len + 4 == attr1_len) 4996 { 4997 const char *p = IDENTIFIER_POINTER (attr2); 4998 const char *q = IDENTIFIER_POINTER (attr1); 4999 if (q[0] == '_' && q[1] == '_' 5000 && q[attr1_len - 2] == '_' && q[attr1_len - 1] == '_' 5001 && strncmp (q + 2, p, attr2_len) == 0) 5002 return true; 5003 } 5004 5005 return false; 5006 } 5007 5008 /* Compare two attributes for their value identity. Return true if the 5009 attribute values are known to be equal; otherwise return false. */ 5010 5011 bool 5012 attribute_value_equal (const_tree attr1, const_tree attr2) 5013 { 5014 if (TREE_VALUE (attr1) == TREE_VALUE (attr2)) 5015 return true; 5016 5017 if (TREE_VALUE (attr1) != NULL_TREE 5018 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST 5019 && TREE_VALUE (attr2) != NULL_TREE 5020 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST) 5021 { 5022 /* Handle attribute format. */ 5023 if (is_attribute_p ("format", get_attribute_name (attr1))) 5024 { 5025 attr1 = TREE_VALUE (attr1); 5026 attr2 = TREE_VALUE (attr2); 5027 /* Compare the archetypes (printf/scanf/strftime/...). */ 5028 if (!cmp_attrib_identifiers (TREE_VALUE (attr1), 5029 TREE_VALUE (attr2))) 5030 return false; 5031 /* Archetypes are the same. Compare the rest. */ 5032 return (simple_cst_list_equal (TREE_CHAIN (attr1), 5033 TREE_CHAIN (attr2)) == 1); 5034 } 5035 return (simple_cst_list_equal (TREE_VALUE (attr1), 5036 TREE_VALUE (attr2)) == 1); 5037 } 5038 5039 if ((flag_openmp || flag_openmp_simd) 5040 && TREE_VALUE (attr1) && TREE_VALUE (attr2) 5041 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE 5042 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE) 5043 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1), 5044 TREE_VALUE (attr2)); 5045 5046 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1); 5047 } 5048 5049 /* Return 0 if the attributes for two types are incompatible, 1 if they 5050 are compatible, and 2 if they are nearly compatible (which causes a 5051 warning to be generated). */ 5052 int 5053 comp_type_attributes (const_tree type1, const_tree type2) 5054 { 5055 const_tree a1 = TYPE_ATTRIBUTES (type1); 5056 const_tree a2 = TYPE_ATTRIBUTES (type2); 5057 const_tree a; 5058 5059 if (a1 == a2) 5060 return 1; 5061 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a)) 5062 { 5063 const struct attribute_spec *as; 5064 const_tree attr; 5065 5066 as = lookup_attribute_spec (get_attribute_name (a)); 5067 if (!as || as->affects_type_identity == false) 5068 continue; 5069 5070 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2)); 5071 if (!attr || !attribute_value_equal (a, attr)) 5072 break; 5073 } 5074 if (!a) 5075 { 5076 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a)) 5077 { 5078 const struct attribute_spec *as; 5079 5080 as = lookup_attribute_spec (get_attribute_name (a)); 5081 if (!as || as->affects_type_identity == false) 5082 continue; 5083 5084 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1))) 5085 break; 5086 /* We don't need to compare trees again, as we did this 5087 already in first loop. */ 5088 } 5089 /* All types - affecting identity - are equal, so 5090 there is no need to call target hook for comparison. */ 5091 if (!a) 5092 return 1; 5093 } 5094 if (lookup_attribute ("transaction_safe", CONST_CAST_TREE (a))) 5095 return 0; 5096 /* As some type combinations - like default calling-convention - might 5097 be compatible, we have to call the target hook to get the final result. */ 5098 return targetm.comp_type_attributes (type1, type2); 5099 } 5100 5101 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE 5102 is ATTRIBUTE. 5103 5104 Record such modified types already made so we don't make duplicates. */ 5105 5106 tree 5107 build_type_attribute_variant (tree ttype, tree attribute) 5108 { 5109 return build_type_attribute_qual_variant (ttype, attribute, 5110 TYPE_QUALS (ttype)); 5111 } 5112 5113 5114 /* Reset the expression *EXPR_P, a size or position. 5115 5116 ??? We could reset all non-constant sizes or positions. But it's cheap 5117 enough to not do so and refrain from adding workarounds to dwarf2out.c. 5118 5119 We need to reset self-referential sizes or positions because they cannot 5120 be gimplified and thus can contain a CALL_EXPR after the gimplification 5121 is finished, which will run afoul of LTO streaming. And they need to be 5122 reset to something essentially dummy but not constant, so as to preserve 5123 the properties of the object they are attached to. */ 5124 5125 static inline void 5126 free_lang_data_in_one_sizepos (tree *expr_p) 5127 { 5128 tree expr = *expr_p; 5129 if (CONTAINS_PLACEHOLDER_P (expr)) 5130 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr)); 5131 } 5132 5133 5134 /* Reset all the fields in a binfo node BINFO. We only keep 5135 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */ 5136 5137 static void 5138 free_lang_data_in_binfo (tree binfo) 5139 { 5140 unsigned i; 5141 tree t; 5142 5143 gcc_assert (TREE_CODE (binfo) == TREE_BINFO); 5144 5145 BINFO_VIRTUALS (binfo) = NULL_TREE; 5146 BINFO_BASE_ACCESSES (binfo) = NULL; 5147 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE; 5148 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE; 5149 5150 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t) 5151 free_lang_data_in_binfo (t); 5152 } 5153 5154 5155 /* Reset all language specific information still present in TYPE. */ 5156 5157 static void 5158 free_lang_data_in_type (tree type) 5159 { 5160 gcc_assert (TYPE_P (type)); 5161 5162 /* Give the FE a chance to remove its own data first. */ 5163 lang_hooks.free_lang_data (type); 5164 5165 TREE_LANG_FLAG_0 (type) = 0; 5166 TREE_LANG_FLAG_1 (type) = 0; 5167 TREE_LANG_FLAG_2 (type) = 0; 5168 TREE_LANG_FLAG_3 (type) = 0; 5169 TREE_LANG_FLAG_4 (type) = 0; 5170 TREE_LANG_FLAG_5 (type) = 0; 5171 TREE_LANG_FLAG_6 (type) = 0; 5172 5173 if (TREE_CODE (type) == FUNCTION_TYPE) 5174 { 5175 /* Remove the const and volatile qualifiers from arguments. The 5176 C++ front end removes them, but the C front end does not, 5177 leading to false ODR violation errors when merging two 5178 instances of the same function signature compiled by 5179 different front ends. */ 5180 tree p; 5181 5182 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p)) 5183 { 5184 tree arg_type = TREE_VALUE (p); 5185 5186 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type)) 5187 { 5188 int quals = TYPE_QUALS (arg_type) 5189 & ~TYPE_QUAL_CONST 5190 & ~TYPE_QUAL_VOLATILE; 5191 TREE_VALUE (p) = build_qualified_type (arg_type, quals); 5192 free_lang_data_in_type (TREE_VALUE (p)); 5193 } 5194 /* C++ FE uses TREE_PURPOSE to store initial values. */ 5195 TREE_PURPOSE (p) = NULL; 5196 } 5197 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */ 5198 TYPE_MINVAL (type) = NULL; 5199 } 5200 if (TREE_CODE (type) == METHOD_TYPE) 5201 { 5202 tree p; 5203 5204 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p)) 5205 { 5206 /* C++ FE uses TREE_PURPOSE to store initial values. */ 5207 TREE_PURPOSE (p) = NULL; 5208 } 5209 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */ 5210 TYPE_MINVAL (type) = NULL; 5211 } 5212 5213 /* Remove members that are not actually FIELD_DECLs from the field 5214 list of an aggregate. These occur in C++. */ 5215 if (RECORD_OR_UNION_TYPE_P (type)) 5216 { 5217 tree prev, member; 5218 5219 /* Note that TYPE_FIELDS can be shared across distinct 5220 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is 5221 to be removed, we cannot set its TREE_CHAIN to NULL. 5222 Otherwise, we would not be able to find all the other fields 5223 in the other instances of this TREE_TYPE. 5224 5225 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */ 5226 prev = NULL_TREE; 5227 member = TYPE_FIELDS (type); 5228 while (member) 5229 { 5230 if (TREE_CODE (member) == FIELD_DECL 5231 || (TREE_CODE (member) == TYPE_DECL 5232 && !DECL_IGNORED_P (member) 5233 && debug_info_level > DINFO_LEVEL_TERSE 5234 && !is_redundant_typedef (member))) 5235 { 5236 if (prev) 5237 TREE_CHAIN (prev) = member; 5238 else 5239 TYPE_FIELDS (type) = member; 5240 prev = member; 5241 } 5242 5243 member = TREE_CHAIN (member); 5244 } 5245 5246 if (prev) 5247 TREE_CHAIN (prev) = NULL_TREE; 5248 else 5249 TYPE_FIELDS (type) = NULL_TREE; 5250 5251 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS 5252 and danagle the pointer from time to time. */ 5253 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL) 5254 TYPE_VFIELD (type) = NULL_TREE; 5255 5256 /* Remove TYPE_METHODS list. While it would be nice to keep it 5257 to enable ODR warnings about different method lists, doing so 5258 seems to impractically increase size of LTO data streamed. 5259 Keep the information if TYPE_METHODS was non-NULL. This is used 5260 by function.c and pretty printers. */ 5261 if (TYPE_METHODS (type)) 5262 TYPE_METHODS (type) = error_mark_node; 5263 if (TYPE_BINFO (type)) 5264 { 5265 free_lang_data_in_binfo (TYPE_BINFO (type)); 5266 /* We need to preserve link to bases and virtual table for all 5267 polymorphic types to make devirtualization machinery working. 5268 Debug output cares only about bases, but output also 5269 virtual table pointers so merging of -fdevirtualize and 5270 -fno-devirtualize units is easier. */ 5271 if ((!BINFO_VTABLE (TYPE_BINFO (type)) 5272 || !flag_devirtualize) 5273 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type)) 5274 && !BINFO_VTABLE (TYPE_BINFO (type))) 5275 || debug_info_level != DINFO_LEVEL_NONE)) 5276 TYPE_BINFO (type) = NULL; 5277 } 5278 } 5279 else 5280 { 5281 /* For non-aggregate types, clear out the language slot (which 5282 overloads TYPE_BINFO). */ 5283 TYPE_LANG_SLOT_1 (type) = NULL_TREE; 5284 5285 if (INTEGRAL_TYPE_P (type) 5286 || SCALAR_FLOAT_TYPE_P (type) 5287 || FIXED_POINT_TYPE_P (type)) 5288 { 5289 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type)); 5290 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type)); 5291 } 5292 } 5293 5294 free_lang_data_in_one_sizepos (&TYPE_SIZE (type)); 5295 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type)); 5296 5297 if (TYPE_CONTEXT (type) 5298 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK) 5299 { 5300 tree ctx = TYPE_CONTEXT (type); 5301 do 5302 { 5303 ctx = BLOCK_SUPERCONTEXT (ctx); 5304 } 5305 while (ctx && TREE_CODE (ctx) == BLOCK); 5306 TYPE_CONTEXT (type) = ctx; 5307 } 5308 } 5309 5310 5311 /* Return true if DECL may need an assembler name to be set. */ 5312 5313 static inline bool 5314 need_assembler_name_p (tree decl) 5315 { 5316 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition 5317 Rule merging. This makes type_odr_p to return true on those types during 5318 LTO and by comparing the mangled name, we can say what types are intended 5319 to be equivalent across compilation unit. 5320 5321 We do not store names of type_in_anonymous_namespace_p. 5322 5323 Record, union and enumeration type have linkage that allows use 5324 to check type_in_anonymous_namespace_p. We do not mangle compound types 5325 that always can be compared structurally. 5326 5327 Similarly for builtin types, we compare properties of their main variant. 5328 A special case are integer types where mangling do make differences 5329 between char/signed char/unsigned char etc. Storing name for these makes 5330 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well. 5331 See cp/mangle.c:write_builtin_type for details. */ 5332 5333 if (flag_lto_odr_type_mering 5334 && TREE_CODE (decl) == TYPE_DECL 5335 && DECL_NAME (decl) 5336 && decl == TYPE_NAME (TREE_TYPE (decl)) 5337 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl) 5338 && !TYPE_ARTIFICIAL (TREE_TYPE (decl)) 5339 && (type_with_linkage_p (TREE_TYPE (decl)) 5340 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE) 5341 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE)) 5342 return !DECL_ASSEMBLER_NAME_SET_P (decl); 5343 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */ 5344 if (!VAR_OR_FUNCTION_DECL_P (decl)) 5345 return false; 5346 5347 /* If DECL already has its assembler name set, it does not need a 5348 new one. */ 5349 if (!HAS_DECL_ASSEMBLER_NAME_P (decl) 5350 || DECL_ASSEMBLER_NAME_SET_P (decl)) 5351 return false; 5352 5353 /* Abstract decls do not need an assembler name. */ 5354 if (DECL_ABSTRACT_P (decl)) 5355 return false; 5356 5357 /* For VAR_DECLs, only static, public and external symbols need an 5358 assembler name. */ 5359 if (VAR_P (decl) 5360 && !TREE_STATIC (decl) 5361 && !TREE_PUBLIC (decl) 5362 && !DECL_EXTERNAL (decl)) 5363 return false; 5364 5365 if (TREE_CODE (decl) == FUNCTION_DECL) 5366 { 5367 /* Do not set assembler name on builtins. Allow RTL expansion to 5368 decide whether to expand inline or via a regular call. */ 5369 if (DECL_BUILT_IN (decl) 5370 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND) 5371 return false; 5372 5373 /* Functions represented in the callgraph need an assembler name. */ 5374 if (cgraph_node::get (decl) != NULL) 5375 return true; 5376 5377 /* Unused and not public functions don't need an assembler name. */ 5378 if (!TREE_USED (decl) && !TREE_PUBLIC (decl)) 5379 return false; 5380 } 5381 5382 return true; 5383 } 5384 5385 5386 /* Reset all language specific information still present in symbol 5387 DECL. */ 5388 5389 static void 5390 free_lang_data_in_decl (tree decl) 5391 { 5392 gcc_assert (DECL_P (decl)); 5393 5394 /* Give the FE a chance to remove its own data first. */ 5395 lang_hooks.free_lang_data (decl); 5396 5397 TREE_LANG_FLAG_0 (decl) = 0; 5398 TREE_LANG_FLAG_1 (decl) = 0; 5399 TREE_LANG_FLAG_2 (decl) = 0; 5400 TREE_LANG_FLAG_3 (decl) = 0; 5401 TREE_LANG_FLAG_4 (decl) = 0; 5402 TREE_LANG_FLAG_5 (decl) = 0; 5403 TREE_LANG_FLAG_6 (decl) = 0; 5404 5405 free_lang_data_in_one_sizepos (&DECL_SIZE (decl)); 5406 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl)); 5407 if (TREE_CODE (decl) == FIELD_DECL) 5408 { 5409 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl)); 5410 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE) 5411 DECL_QUALIFIER (decl) = NULL_TREE; 5412 } 5413 5414 if (TREE_CODE (decl) == FUNCTION_DECL) 5415 { 5416 struct cgraph_node *node; 5417 if (!(node = cgraph_node::get (decl)) 5418 || (!node->definition && !node->clones)) 5419 { 5420 if (node) 5421 node->release_body (); 5422 else 5423 { 5424 release_function_body (decl); 5425 DECL_ARGUMENTS (decl) = NULL; 5426 DECL_RESULT (decl) = NULL; 5427 DECL_INITIAL (decl) = error_mark_node; 5428 } 5429 } 5430 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p)) 5431 { 5432 tree t; 5433 5434 /* If DECL has a gimple body, then the context for its 5435 arguments must be DECL. Otherwise, it doesn't really 5436 matter, as we will not be emitting any code for DECL. In 5437 general, there may be other instances of DECL created by 5438 the front end and since PARM_DECLs are generally shared, 5439 their DECL_CONTEXT changes as the replicas of DECL are 5440 created. The only time where DECL_CONTEXT is important 5441 is for the FUNCTION_DECLs that have a gimple body (since 5442 the PARM_DECL will be used in the function's body). */ 5443 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t)) 5444 DECL_CONTEXT (t) = decl; 5445 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl)) 5446 DECL_FUNCTION_SPECIFIC_TARGET (decl) 5447 = target_option_default_node; 5448 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)) 5449 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl) 5450 = optimization_default_node; 5451 } 5452 5453 /* DECL_SAVED_TREE holds the GENERIC representation for DECL. 5454 At this point, it is not needed anymore. */ 5455 DECL_SAVED_TREE (decl) = NULL_TREE; 5456 5457 /* Clear the abstract origin if it refers to a method. 5458 Otherwise dwarf2out.c will ICE as we splice functions out of 5459 TYPE_FIELDS and thus the origin will not be output 5460 correctly. */ 5461 if (DECL_ABSTRACT_ORIGIN (decl) 5462 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl)) 5463 && RECORD_OR_UNION_TYPE_P 5464 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl)))) 5465 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE; 5466 5467 /* Sometimes the C++ frontend doesn't manage to transform a temporary 5468 DECL_VINDEX referring to itself into a vtable slot number as it 5469 should. Happens with functions that are copied and then forgotten 5470 about. Just clear it, it won't matter anymore. */ 5471 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl))) 5472 DECL_VINDEX (decl) = NULL_TREE; 5473 } 5474 else if (VAR_P (decl)) 5475 { 5476 if ((DECL_EXTERNAL (decl) 5477 && (!TREE_STATIC (decl) || !TREE_READONLY (decl))) 5478 || (decl_function_context (decl) && !TREE_STATIC (decl))) 5479 DECL_INITIAL (decl) = NULL_TREE; 5480 } 5481 else if (TREE_CODE (decl) == TYPE_DECL) 5482 { 5483 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT; 5484 DECL_VISIBILITY_SPECIFIED (decl) = 0; 5485 DECL_INITIAL (decl) = NULL_TREE; 5486 } 5487 else if (TREE_CODE (decl) == FIELD_DECL) 5488 DECL_INITIAL (decl) = NULL_TREE; 5489 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL 5490 && DECL_INITIAL (decl) 5491 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK) 5492 { 5493 /* Strip builtins from the translation-unit BLOCK. We still have targets 5494 without builtin_decl_explicit support and also builtins are shared 5495 nodes and thus we can't use TREE_CHAIN in multiple lists. */ 5496 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl)); 5497 while (*nextp) 5498 { 5499 tree var = *nextp; 5500 if (TREE_CODE (var) == FUNCTION_DECL 5501 && DECL_BUILT_IN (var)) 5502 *nextp = TREE_CHAIN (var); 5503 else 5504 nextp = &TREE_CHAIN (var); 5505 } 5506 } 5507 } 5508 5509 5510 /* Data used when collecting DECLs and TYPEs for language data removal. */ 5511 5512 struct free_lang_data_d 5513 { 5514 free_lang_data_d () : decls (100), types (100) {} 5515 5516 /* Worklist to avoid excessive recursion. */ 5517 auto_vec<tree> worklist; 5518 5519 /* Set of traversed objects. Used to avoid duplicate visits. */ 5520 hash_set<tree> pset; 5521 5522 /* Array of symbols to process with free_lang_data_in_decl. */ 5523 auto_vec<tree> decls; 5524 5525 /* Array of types to process with free_lang_data_in_type. */ 5526 auto_vec<tree> types; 5527 }; 5528 5529 5530 /* Save all language fields needed to generate proper debug information 5531 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */ 5532 5533 static void 5534 save_debug_info_for_decl (tree t) 5535 { 5536 /*struct saved_debug_info_d *sdi;*/ 5537 5538 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t)); 5539 5540 /* FIXME. Partial implementation for saving debug info removed. */ 5541 } 5542 5543 5544 /* Save all language fields needed to generate proper debug information 5545 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */ 5546 5547 static void 5548 save_debug_info_for_type (tree t) 5549 { 5550 /*struct saved_debug_info_d *sdi;*/ 5551 5552 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t)); 5553 5554 /* FIXME. Partial implementation for saving debug info removed. */ 5555 } 5556 5557 5558 /* Add type or decl T to one of the list of tree nodes that need their 5559 language data removed. The lists are held inside FLD. */ 5560 5561 static void 5562 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld) 5563 { 5564 if (DECL_P (t)) 5565 { 5566 fld->decls.safe_push (t); 5567 if (debug_info_level > DINFO_LEVEL_TERSE) 5568 save_debug_info_for_decl (t); 5569 } 5570 else if (TYPE_P (t)) 5571 { 5572 fld->types.safe_push (t); 5573 if (debug_info_level > DINFO_LEVEL_TERSE) 5574 save_debug_info_for_type (t); 5575 } 5576 else 5577 gcc_unreachable (); 5578 } 5579 5580 /* Push tree node T into FLD->WORKLIST. */ 5581 5582 static inline void 5583 fld_worklist_push (tree t, struct free_lang_data_d *fld) 5584 { 5585 if (t && !is_lang_specific (t) && !fld->pset.contains (t)) 5586 fld->worklist.safe_push ((t)); 5587 } 5588 5589 5590 /* Operand callback helper for free_lang_data_in_node. *TP is the 5591 subtree operand being considered. */ 5592 5593 static tree 5594 find_decls_types_r (tree *tp, int *ws, void *data) 5595 { 5596 tree t = *tp; 5597 struct free_lang_data_d *fld = (struct free_lang_data_d *) data; 5598 5599 if (TREE_CODE (t) == TREE_LIST) 5600 return NULL_TREE; 5601 5602 /* Language specific nodes will be removed, so there is no need 5603 to gather anything under them. */ 5604 if (is_lang_specific (t)) 5605 { 5606 *ws = 0; 5607 return NULL_TREE; 5608 } 5609 5610 if (DECL_P (t)) 5611 { 5612 /* Note that walk_tree does not traverse every possible field in 5613 decls, so we have to do our own traversals here. */ 5614 add_tree_to_fld_list (t, fld); 5615 5616 fld_worklist_push (DECL_NAME (t), fld); 5617 fld_worklist_push (DECL_CONTEXT (t), fld); 5618 fld_worklist_push (DECL_SIZE (t), fld); 5619 fld_worklist_push (DECL_SIZE_UNIT (t), fld); 5620 5621 /* We are going to remove everything under DECL_INITIAL for 5622 TYPE_DECLs. No point walking them. */ 5623 if (TREE_CODE (t) != TYPE_DECL) 5624 fld_worklist_push (DECL_INITIAL (t), fld); 5625 5626 fld_worklist_push (DECL_ATTRIBUTES (t), fld); 5627 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld); 5628 5629 if (TREE_CODE (t) == FUNCTION_DECL) 5630 { 5631 fld_worklist_push (DECL_ARGUMENTS (t), fld); 5632 fld_worklist_push (DECL_RESULT (t), fld); 5633 } 5634 else if (TREE_CODE (t) == TYPE_DECL) 5635 { 5636 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld); 5637 } 5638 else if (TREE_CODE (t) == FIELD_DECL) 5639 { 5640 fld_worklist_push (DECL_FIELD_OFFSET (t), fld); 5641 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld); 5642 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld); 5643 fld_worklist_push (DECL_FCONTEXT (t), fld); 5644 } 5645 5646 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL) 5647 && DECL_HAS_VALUE_EXPR_P (t)) 5648 fld_worklist_push (DECL_VALUE_EXPR (t), fld); 5649 5650 if (TREE_CODE (t) != FIELD_DECL 5651 && TREE_CODE (t) != TYPE_DECL) 5652 fld_worklist_push (TREE_CHAIN (t), fld); 5653 *ws = 0; 5654 } 5655 else if (TYPE_P (t)) 5656 { 5657 /* Note that walk_tree does not traverse every possible field in 5658 types, so we have to do our own traversals here. */ 5659 add_tree_to_fld_list (t, fld); 5660 5661 if (!RECORD_OR_UNION_TYPE_P (t)) 5662 fld_worklist_push (TYPE_CACHED_VALUES (t), fld); 5663 fld_worklist_push (TYPE_SIZE (t), fld); 5664 fld_worklist_push (TYPE_SIZE_UNIT (t), fld); 5665 fld_worklist_push (TYPE_ATTRIBUTES (t), fld); 5666 fld_worklist_push (TYPE_POINTER_TO (t), fld); 5667 fld_worklist_push (TYPE_REFERENCE_TO (t), fld); 5668 fld_worklist_push (TYPE_NAME (t), fld); 5669 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream 5670 them and thus do not and want not to reach unused pointer types 5671 this way. */ 5672 if (!POINTER_TYPE_P (t)) 5673 fld_worklist_push (TYPE_MINVAL (t), fld); 5674 if (!RECORD_OR_UNION_TYPE_P (t)) 5675 fld_worklist_push (TYPE_MAXVAL (t), fld); 5676 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld); 5677 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus 5678 do not and want not to reach unused variants this way. */ 5679 if (TYPE_CONTEXT (t)) 5680 { 5681 tree ctx = TYPE_CONTEXT (t); 5682 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one. 5683 So push that instead. */ 5684 while (ctx && TREE_CODE (ctx) == BLOCK) 5685 ctx = BLOCK_SUPERCONTEXT (ctx); 5686 fld_worklist_push (ctx, fld); 5687 } 5688 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not 5689 and want not to reach unused types this way. */ 5690 5691 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t)) 5692 { 5693 unsigned i; 5694 tree tem; 5695 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem) 5696 fld_worklist_push (TREE_TYPE (tem), fld); 5697 tem = BINFO_VIRTUALS (TYPE_BINFO (t)); 5698 if (tem 5699 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */ 5700 && TREE_CODE (tem) == TREE_LIST) 5701 do 5702 { 5703 fld_worklist_push (TREE_VALUE (tem), fld); 5704 tem = TREE_CHAIN (tem); 5705 } 5706 while (tem); 5707 } 5708 if (RECORD_OR_UNION_TYPE_P (t)) 5709 { 5710 tree tem; 5711 /* Push all TYPE_FIELDS - there can be interleaving interesting 5712 and non-interesting things. */ 5713 tem = TYPE_FIELDS (t); 5714 while (tem) 5715 { 5716 if (TREE_CODE (tem) == FIELD_DECL 5717 || (TREE_CODE (tem) == TYPE_DECL 5718 && !DECL_IGNORED_P (tem) 5719 && debug_info_level > DINFO_LEVEL_TERSE 5720 && !is_redundant_typedef (tem))) 5721 fld_worklist_push (tem, fld); 5722 tem = TREE_CHAIN (tem); 5723 } 5724 } 5725 5726 fld_worklist_push (TYPE_STUB_DECL (t), fld); 5727 *ws = 0; 5728 } 5729 else if (TREE_CODE (t) == BLOCK) 5730 { 5731 tree tem; 5732 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem)) 5733 fld_worklist_push (tem, fld); 5734 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem)) 5735 fld_worklist_push (tem, fld); 5736 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld); 5737 } 5738 5739 if (TREE_CODE (t) != IDENTIFIER_NODE 5740 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED)) 5741 fld_worklist_push (TREE_TYPE (t), fld); 5742 5743 return NULL_TREE; 5744 } 5745 5746 5747 /* Find decls and types in T. */ 5748 5749 static void 5750 find_decls_types (tree t, struct free_lang_data_d *fld) 5751 { 5752 while (1) 5753 { 5754 if (!fld->pset.contains (t)) 5755 walk_tree (&t, find_decls_types_r, fld, &fld->pset); 5756 if (fld->worklist.is_empty ()) 5757 break; 5758 t = fld->worklist.pop (); 5759 } 5760 } 5761 5762 /* Translate all the types in LIST with the corresponding runtime 5763 types. */ 5764 5765 static tree 5766 get_eh_types_for_runtime (tree list) 5767 { 5768 tree head, prev; 5769 5770 if (list == NULL_TREE) 5771 return NULL_TREE; 5772 5773 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list))); 5774 prev = head; 5775 list = TREE_CHAIN (list); 5776 while (list) 5777 { 5778 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list))); 5779 TREE_CHAIN (prev) = n; 5780 prev = TREE_CHAIN (prev); 5781 list = TREE_CHAIN (list); 5782 } 5783 5784 return head; 5785 } 5786 5787 5788 /* Find decls and types referenced in EH region R and store them in 5789 FLD->DECLS and FLD->TYPES. */ 5790 5791 static void 5792 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld) 5793 { 5794 switch (r->type) 5795 { 5796 case ERT_CLEANUP: 5797 break; 5798 5799 case ERT_TRY: 5800 { 5801 eh_catch c; 5802 5803 /* The types referenced in each catch must first be changed to the 5804 EH types used at runtime. This removes references to FE types 5805 in the region. */ 5806 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch) 5807 { 5808 c->type_list = get_eh_types_for_runtime (c->type_list); 5809 walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset); 5810 } 5811 } 5812 break; 5813 5814 case ERT_ALLOWED_EXCEPTIONS: 5815 r->u.allowed.type_list 5816 = get_eh_types_for_runtime (r->u.allowed.type_list); 5817 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset); 5818 break; 5819 5820 case ERT_MUST_NOT_THROW: 5821 walk_tree (&r->u.must_not_throw.failure_decl, 5822 find_decls_types_r, fld, &fld->pset); 5823 break; 5824 } 5825 } 5826 5827 5828 /* Find decls and types referenced in cgraph node N and store them in 5829 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will 5830 look for *every* kind of DECL and TYPE node reachable from N, 5831 including those embedded inside types and decls (i.e,, TYPE_DECLs, 5832 NAMESPACE_DECLs, etc). */ 5833 5834 static void 5835 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld) 5836 { 5837 basic_block bb; 5838 struct function *fn; 5839 unsigned ix; 5840 tree t; 5841 5842 find_decls_types (n->decl, fld); 5843 5844 if (!gimple_has_body_p (n->decl)) 5845 return; 5846 5847 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL); 5848 5849 fn = DECL_STRUCT_FUNCTION (n->decl); 5850 5851 /* Traverse locals. */ 5852 FOR_EACH_LOCAL_DECL (fn, ix, t) 5853 find_decls_types (t, fld); 5854 5855 /* Traverse EH regions in FN. */ 5856 { 5857 eh_region r; 5858 FOR_ALL_EH_REGION_FN (r, fn) 5859 find_decls_types_in_eh_region (r, fld); 5860 } 5861 5862 /* Traverse every statement in FN. */ 5863 FOR_EACH_BB_FN (bb, fn) 5864 { 5865 gphi_iterator psi; 5866 gimple_stmt_iterator si; 5867 unsigned i; 5868 5869 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi)) 5870 { 5871 gphi *phi = psi.phi (); 5872 5873 for (i = 0; i < gimple_phi_num_args (phi); i++) 5874 { 5875 tree *arg_p = gimple_phi_arg_def_ptr (phi, i); 5876 find_decls_types (*arg_p, fld); 5877 } 5878 } 5879 5880 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si)) 5881 { 5882 gimple *stmt = gsi_stmt (si); 5883 5884 if (is_gimple_call (stmt)) 5885 find_decls_types (gimple_call_fntype (stmt), fld); 5886 5887 for (i = 0; i < gimple_num_ops (stmt); i++) 5888 { 5889 tree arg = gimple_op (stmt, i); 5890 find_decls_types (arg, fld); 5891 } 5892 } 5893 } 5894 } 5895 5896 5897 /* Find decls and types referenced in varpool node N and store them in 5898 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will 5899 look for *every* kind of DECL and TYPE node reachable from N, 5900 including those embedded inside types and decls (i.e,, TYPE_DECLs, 5901 NAMESPACE_DECLs, etc). */ 5902 5903 static void 5904 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld) 5905 { 5906 find_decls_types (v->decl, fld); 5907 } 5908 5909 /* If T needs an assembler name, have one created for it. */ 5910 5911 void 5912 assign_assembler_name_if_needed (tree t) 5913 { 5914 if (need_assembler_name_p (t)) 5915 { 5916 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit 5917 diagnostics that use input_location to show locus 5918 information. The problem here is that, at this point, 5919 input_location is generally anchored to the end of the file 5920 (since the parser is long gone), so we don't have a good 5921 position to pin it to. 5922 5923 To alleviate this problem, this uses the location of T's 5924 declaration. Examples of this are 5925 testsuite/g++.dg/template/cond2.C and 5926 testsuite/g++.dg/template/pr35240.C. */ 5927 location_t saved_location = input_location; 5928 input_location = DECL_SOURCE_LOCATION (t); 5929 5930 decl_assembler_name (t); 5931 5932 input_location = saved_location; 5933 } 5934 } 5935 5936 5937 /* Free language specific information for every operand and expression 5938 in every node of the call graph. This process operates in three stages: 5939 5940 1- Every callgraph node and varpool node is traversed looking for 5941 decls and types embedded in them. This is a more exhaustive 5942 search than that done by find_referenced_vars, because it will 5943 also collect individual fields, decls embedded in types, etc. 5944 5945 2- All the decls found are sent to free_lang_data_in_decl. 5946 5947 3- All the types found are sent to free_lang_data_in_type. 5948 5949 The ordering between decls and types is important because 5950 free_lang_data_in_decl sets assembler names, which includes 5951 mangling. So types cannot be freed up until assembler names have 5952 been set up. */ 5953 5954 static void 5955 free_lang_data_in_cgraph (void) 5956 { 5957 struct cgraph_node *n; 5958 varpool_node *v; 5959 struct free_lang_data_d fld; 5960 tree t; 5961 unsigned i; 5962 alias_pair *p; 5963 5964 /* Find decls and types in the body of every function in the callgraph. */ 5965 FOR_EACH_FUNCTION (n) 5966 find_decls_types_in_node (n, &fld); 5967 5968 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p) 5969 find_decls_types (p->decl, &fld); 5970 5971 /* Find decls and types in every varpool symbol. */ 5972 FOR_EACH_VARIABLE (v) 5973 find_decls_types_in_var (v, &fld); 5974 5975 /* Set the assembler name on every decl found. We need to do this 5976 now because free_lang_data_in_decl will invalidate data needed 5977 for mangling. This breaks mangling on interdependent decls. */ 5978 FOR_EACH_VEC_ELT (fld.decls, i, t) 5979 assign_assembler_name_if_needed (t); 5980 5981 /* Traverse every decl found freeing its language data. */ 5982 FOR_EACH_VEC_ELT (fld.decls, i, t) 5983 free_lang_data_in_decl (t); 5984 5985 /* Traverse every type found freeing its language data. */ 5986 FOR_EACH_VEC_ELT (fld.types, i, t) 5987 free_lang_data_in_type (t); 5988 if (flag_checking) 5989 { 5990 FOR_EACH_VEC_ELT (fld.types, i, t) 5991 verify_type (t); 5992 } 5993 } 5994 5995 5996 /* Free resources that are used by FE but are not needed once they are done. */ 5997 5998 static unsigned 5999 free_lang_data (void) 6000 { 6001 unsigned i; 6002 6003 /* If we are the LTO frontend we have freed lang-specific data already. */ 6004 if (in_lto_p 6005 || (!flag_generate_lto && !flag_generate_offload)) 6006 return 0; 6007 6008 /* Allocate and assign alias sets to the standard integer types 6009 while the slots are still in the way the frontends generated them. */ 6010 for (i = 0; i < itk_none; ++i) 6011 if (integer_types[i]) 6012 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]); 6013 6014 /* Traverse the IL resetting language specific information for 6015 operands, expressions, etc. */ 6016 free_lang_data_in_cgraph (); 6017 6018 /* Create gimple variants for common types. */ 6019 fileptr_type_node = ptr_type_node; 6020 const_tm_ptr_type_node = const_ptr_type_node; 6021 6022 /* Reset some langhooks. Do not reset types_compatible_p, it may 6023 still be used indirectly via the get_alias_set langhook. */ 6024 lang_hooks.dwarf_name = lhd_dwarf_name; 6025 lang_hooks.decl_printable_name = gimple_decl_printable_name; 6026 lang_hooks.gimplify_expr = lhd_gimplify_expr; 6027 6028 /* We do not want the default decl_assembler_name implementation, 6029 rather if we have fixed everything we want a wrapper around it 6030 asserting that all non-local symbols already got their assembler 6031 name and only produce assembler names for local symbols. Or rather 6032 make sure we never call decl_assembler_name on local symbols and 6033 devise a separate, middle-end private scheme for it. */ 6034 6035 /* Reset diagnostic machinery. */ 6036 tree_diagnostics_defaults (global_dc); 6037 6038 return 0; 6039 } 6040 6041 6042 namespace { 6043 6044 const pass_data pass_data_ipa_free_lang_data = 6045 { 6046 SIMPLE_IPA_PASS, /* type */ 6047 "*free_lang_data", /* name */ 6048 OPTGROUP_NONE, /* optinfo_flags */ 6049 TV_IPA_FREE_LANG_DATA, /* tv_id */ 6050 0, /* properties_required */ 6051 0, /* properties_provided */ 6052 0, /* properties_destroyed */ 6053 0, /* todo_flags_start */ 6054 0, /* todo_flags_finish */ 6055 }; 6056 6057 class pass_ipa_free_lang_data : public simple_ipa_opt_pass 6058 { 6059 public: 6060 pass_ipa_free_lang_data (gcc::context *ctxt) 6061 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt) 6062 {} 6063 6064 /* opt_pass methods: */ 6065 virtual unsigned int execute (function *) { return free_lang_data (); } 6066 6067 }; // class pass_ipa_free_lang_data 6068 6069 } // anon namespace 6070 6071 simple_ipa_opt_pass * 6072 make_pass_ipa_free_lang_data (gcc::context *ctxt) 6073 { 6074 return new pass_ipa_free_lang_data (ctxt); 6075 } 6076 6077 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of 6078 ATTR_NAME. Also used internally by remove_attribute(). */ 6079 bool 6080 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident) 6081 { 6082 size_t ident_len = IDENTIFIER_LENGTH (ident); 6083 6084 if (ident_len == attr_len) 6085 { 6086 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0) 6087 return true; 6088 } 6089 else if (ident_len == attr_len + 4) 6090 { 6091 /* There is the possibility that ATTR is 'text' and IDENT is 6092 '__text__'. */ 6093 const char *p = IDENTIFIER_POINTER (ident); 6094 if (p[0] == '_' && p[1] == '_' 6095 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_' 6096 && strncmp (attr_name, p + 2, attr_len) == 0) 6097 return true; 6098 } 6099 6100 return false; 6101 } 6102 6103 /* The backbone of lookup_attribute(). ATTR_LEN is the string length 6104 of ATTR_NAME, and LIST is not NULL_TREE. */ 6105 tree 6106 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list) 6107 { 6108 while (list) 6109 { 6110 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list)); 6111 6112 if (ident_len == attr_len) 6113 { 6114 if (!strcmp (attr_name, 6115 IDENTIFIER_POINTER (get_attribute_name (list)))) 6116 break; 6117 } 6118 /* TODO: If we made sure that attributes were stored in the 6119 canonical form without '__...__' (ie, as in 'text' as opposed 6120 to '__text__') then we could avoid the following case. */ 6121 else if (ident_len == attr_len + 4) 6122 { 6123 const char *p = IDENTIFIER_POINTER (get_attribute_name (list)); 6124 if (p[0] == '_' && p[1] == '_' 6125 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_' 6126 && strncmp (attr_name, p + 2, attr_len) == 0) 6127 break; 6128 } 6129 list = TREE_CHAIN (list); 6130 } 6131 6132 return list; 6133 } 6134 6135 /* Given an attribute name ATTR_NAME and a list of attributes LIST, 6136 return a pointer to the attribute's list first element if the attribute 6137 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not 6138 '__text__'). */ 6139 6140 tree 6141 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len, 6142 tree list) 6143 { 6144 while (list) 6145 { 6146 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list)); 6147 6148 if (attr_len > ident_len) 6149 { 6150 list = TREE_CHAIN (list); 6151 continue; 6152 } 6153 6154 const char *p = IDENTIFIER_POINTER (get_attribute_name (list)); 6155 6156 if (strncmp (attr_name, p, attr_len) == 0) 6157 break; 6158 6159 /* TODO: If we made sure that attributes were stored in the 6160 canonical form without '__...__' (ie, as in 'text' as opposed 6161 to '__text__') then we could avoid the following case. */ 6162 if (p[0] == '_' && p[1] == '_' && 6163 strncmp (attr_name, p + 2, attr_len) == 0) 6164 break; 6165 6166 list = TREE_CHAIN (list); 6167 } 6168 6169 return list; 6170 } 6171 6172 6173 /* A variant of lookup_attribute() that can be used with an identifier 6174 as the first argument, and where the identifier can be either 6175 'text' or '__text__'. 6176 6177 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST, 6178 return a pointer to the attribute's list element if the attribute 6179 is part of the list, or NULL_TREE if not found. If the attribute 6180 appears more than once, this only returns the first occurrence; the 6181 TREE_CHAIN of the return value should be passed back in if further 6182 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but 6183 can be in the form 'text' or '__text__'. */ 6184 static tree 6185 lookup_ident_attribute (tree attr_identifier, tree list) 6186 { 6187 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE); 6188 6189 while (list) 6190 { 6191 gcc_checking_assert (TREE_CODE (get_attribute_name (list)) 6192 == IDENTIFIER_NODE); 6193 6194 if (cmp_attrib_identifiers (attr_identifier, 6195 get_attribute_name (list))) 6196 /* Found it. */ 6197 break; 6198 list = TREE_CHAIN (list); 6199 } 6200 6201 return list; 6202 } 6203 6204 /* Remove any instances of attribute ATTR_NAME in LIST and return the 6205 modified list. */ 6206 6207 tree 6208 remove_attribute (const char *attr_name, tree list) 6209 { 6210 tree *p; 6211 size_t attr_len = strlen (attr_name); 6212 6213 gcc_checking_assert (attr_name[0] != '_'); 6214 6215 for (p = &list; *p; ) 6216 { 6217 tree l = *p; 6218 /* TODO: If we were storing attributes in normalized form, here 6219 we could use a simple strcmp(). */ 6220 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l))) 6221 *p = TREE_CHAIN (l); 6222 else 6223 p = &TREE_CHAIN (l); 6224 } 6225 6226 return list; 6227 } 6228 6229 /* Return an attribute list that is the union of a1 and a2. */ 6230 6231 tree 6232 merge_attributes (tree a1, tree a2) 6233 { 6234 tree attributes; 6235 6236 /* Either one unset? Take the set one. */ 6237 6238 if ((attributes = a1) == 0) 6239 attributes = a2; 6240 6241 /* One that completely contains the other? Take it. */ 6242 6243 else if (a2 != 0 && ! attribute_list_contained (a1, a2)) 6244 { 6245 if (attribute_list_contained (a2, a1)) 6246 attributes = a2; 6247 else 6248 { 6249 /* Pick the longest list, and hang on the other list. */ 6250 6251 if (list_length (a1) < list_length (a2)) 6252 attributes = a2, a2 = a1; 6253 6254 for (; a2 != 0; a2 = TREE_CHAIN (a2)) 6255 { 6256 tree a; 6257 for (a = lookup_ident_attribute (get_attribute_name (a2), 6258 attributes); 6259 a != NULL_TREE && !attribute_value_equal (a, a2); 6260 a = lookup_ident_attribute (get_attribute_name (a2), 6261 TREE_CHAIN (a))) 6262 ; 6263 if (a == NULL_TREE) 6264 { 6265 a1 = copy_node (a2); 6266 TREE_CHAIN (a1) = attributes; 6267 attributes = a1; 6268 } 6269 } 6270 } 6271 } 6272 return attributes; 6273 } 6274 6275 /* Given types T1 and T2, merge their attributes and return 6276 the result. */ 6277 6278 tree 6279 merge_type_attributes (tree t1, tree t2) 6280 { 6281 return merge_attributes (TYPE_ATTRIBUTES (t1), 6282 TYPE_ATTRIBUTES (t2)); 6283 } 6284 6285 /* Given decls OLDDECL and NEWDECL, merge their attributes and return 6286 the result. */ 6287 6288 tree 6289 merge_decl_attributes (tree olddecl, tree newdecl) 6290 { 6291 return merge_attributes (DECL_ATTRIBUTES (olddecl), 6292 DECL_ATTRIBUTES (newdecl)); 6293 } 6294 6295 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES 6296 6297 /* Specialization of merge_decl_attributes for various Windows targets. 6298 6299 This handles the following situation: 6300 6301 __declspec (dllimport) int foo; 6302 int foo; 6303 6304 The second instance of `foo' nullifies the dllimport. */ 6305 6306 tree 6307 merge_dllimport_decl_attributes (tree old, tree new_tree) 6308 { 6309 tree a; 6310 int delete_dllimport_p = 1; 6311 6312 /* What we need to do here is remove from `old' dllimport if it doesn't 6313 appear in `new'. dllimport behaves like extern: if a declaration is 6314 marked dllimport and a definition appears later, then the object 6315 is not dllimport'd. We also remove a `new' dllimport if the old list 6316 contains dllexport: dllexport always overrides dllimport, regardless 6317 of the order of declaration. */ 6318 if (!VAR_OR_FUNCTION_DECL_P (new_tree)) 6319 delete_dllimport_p = 0; 6320 else if (DECL_DLLIMPORT_P (new_tree) 6321 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old))) 6322 { 6323 DECL_DLLIMPORT_P (new_tree) = 0; 6324 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: " 6325 "dllimport ignored", new_tree); 6326 } 6327 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree)) 6328 { 6329 /* Warn about overriding a symbol that has already been used, e.g.: 6330 extern int __attribute__ ((dllimport)) foo; 6331 int* bar () {return &foo;} 6332 int foo; 6333 */ 6334 if (TREE_USED (old)) 6335 { 6336 warning (0, "%q+D redeclared without dllimport attribute " 6337 "after being referenced with dll linkage", new_tree); 6338 /* If we have used a variable's address with dllimport linkage, 6339 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the 6340 decl may already have had TREE_CONSTANT computed. 6341 We still remove the attribute so that assembler code refers 6342 to '&foo rather than '_imp__foo'. */ 6343 if (VAR_P (old) && TREE_ADDRESSABLE (old)) 6344 DECL_DLLIMPORT_P (new_tree) = 1; 6345 } 6346 6347 /* Let an inline definition silently override the external reference, 6348 but otherwise warn about attribute inconsistency. */ 6349 else if (VAR_P (new_tree) || !DECL_DECLARED_INLINE_P (new_tree)) 6350 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: " 6351 "previous dllimport ignored", new_tree); 6352 } 6353 else 6354 delete_dllimport_p = 0; 6355 6356 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree)); 6357 6358 if (delete_dllimport_p) 6359 a = remove_attribute ("dllimport", a); 6360 6361 return a; 6362 } 6363 6364 /* Handle a "dllimport" or "dllexport" attribute; arguments as in 6365 struct attribute_spec.handler. */ 6366 6367 tree 6368 handle_dll_attribute (tree * pnode, tree name, tree args, int flags, 6369 bool *no_add_attrs) 6370 { 6371 tree node = *pnode; 6372 bool is_dllimport; 6373 6374 /* These attributes may apply to structure and union types being created, 6375 but otherwise should pass to the declaration involved. */ 6376 if (!DECL_P (node)) 6377 { 6378 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT 6379 | (int) ATTR_FLAG_ARRAY_NEXT)) 6380 { 6381 *no_add_attrs = true; 6382 return tree_cons (name, args, NULL_TREE); 6383 } 6384 if (TREE_CODE (node) == RECORD_TYPE 6385 || TREE_CODE (node) == UNION_TYPE) 6386 { 6387 node = TYPE_NAME (node); 6388 if (!node) 6389 return NULL_TREE; 6390 } 6391 else 6392 { 6393 warning (OPT_Wattributes, "%qE attribute ignored", 6394 name); 6395 *no_add_attrs = true; 6396 return NULL_TREE; 6397 } 6398 } 6399 6400 if (!VAR_OR_FUNCTION_DECL_P (node) && TREE_CODE (node) != TYPE_DECL) 6401 { 6402 *no_add_attrs = true; 6403 warning (OPT_Wattributes, "%qE attribute ignored", 6404 name); 6405 return NULL_TREE; 6406 } 6407 6408 if (TREE_CODE (node) == TYPE_DECL 6409 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE 6410 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE) 6411 { 6412 *no_add_attrs = true; 6413 warning (OPT_Wattributes, "%qE attribute ignored", 6414 name); 6415 return NULL_TREE; 6416 } 6417 6418 is_dllimport = is_attribute_p ("dllimport", name); 6419 6420 /* Report error on dllimport ambiguities seen now before they cause 6421 any damage. */ 6422 if (is_dllimport) 6423 { 6424 /* Honor any target-specific overrides. */ 6425 if (!targetm.valid_dllimport_attribute_p (node)) 6426 *no_add_attrs = true; 6427 6428 else if (TREE_CODE (node) == FUNCTION_DECL 6429 && DECL_DECLARED_INLINE_P (node)) 6430 { 6431 warning (OPT_Wattributes, "inline function %q+D declared as " 6432 " dllimport: attribute ignored", node); 6433 *no_add_attrs = true; 6434 } 6435 /* Like MS, treat definition of dllimported variables and 6436 non-inlined functions on declaration as syntax errors. */ 6437 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node)) 6438 { 6439 error ("function %q+D definition is marked dllimport", node); 6440 *no_add_attrs = true; 6441 } 6442 6443 else if (VAR_P (node)) 6444 { 6445 if (DECL_INITIAL (node)) 6446 { 6447 error ("variable %q+D definition is marked dllimport", 6448 node); 6449 *no_add_attrs = true; 6450 } 6451 6452 /* `extern' needn't be specified with dllimport. 6453 Specify `extern' now and hope for the best. Sigh. */ 6454 DECL_EXTERNAL (node) = 1; 6455 /* Also, implicitly give dllimport'd variables declared within 6456 a function global scope, unless declared static. */ 6457 if (current_function_decl != NULL_TREE && !TREE_STATIC (node)) 6458 TREE_PUBLIC (node) = 1; 6459 } 6460 6461 if (*no_add_attrs == false) 6462 DECL_DLLIMPORT_P (node) = 1; 6463 } 6464 else if (TREE_CODE (node) == FUNCTION_DECL 6465 && DECL_DECLARED_INLINE_P (node) 6466 && flag_keep_inline_dllexport) 6467 /* An exported function, even if inline, must be emitted. */ 6468 DECL_EXTERNAL (node) = 0; 6469 6470 /* Report error if symbol is not accessible at global scope. */ 6471 if (!TREE_PUBLIC (node) && VAR_OR_FUNCTION_DECL_P (node)) 6472 { 6473 error ("external linkage required for symbol %q+D because of " 6474 "%qE attribute", node, name); 6475 *no_add_attrs = true; 6476 } 6477 6478 /* A dllexport'd entity must have default visibility so that other 6479 program units (shared libraries or the main executable) can see 6480 it. A dllimport'd entity must have default visibility so that 6481 the linker knows that undefined references within this program 6482 unit can be resolved by the dynamic linker. */ 6483 if (!*no_add_attrs) 6484 { 6485 if (DECL_VISIBILITY_SPECIFIED (node) 6486 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT) 6487 error ("%qE implies default visibility, but %qD has already " 6488 "been declared with a different visibility", 6489 name, node); 6490 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT; 6491 DECL_VISIBILITY_SPECIFIED (node) = 1; 6492 } 6493 6494 return NULL_TREE; 6495 } 6496 6497 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */ 6498 6499 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask 6500 of the various TYPE_QUAL values. */ 6501 6502 static void 6503 set_type_quals (tree type, int type_quals) 6504 { 6505 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0; 6506 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0; 6507 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0; 6508 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0; 6509 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals); 6510 } 6511 6512 /* Returns true iff CAND and BASE have equivalent language-specific 6513 qualifiers. */ 6514 6515 bool 6516 check_lang_type (const_tree cand, const_tree base) 6517 { 6518 if (lang_hooks.types.type_hash_eq == NULL) 6519 return true; 6520 /* type_hash_eq currently only applies to these types. */ 6521 if (TREE_CODE (cand) != FUNCTION_TYPE 6522 && TREE_CODE (cand) != METHOD_TYPE) 6523 return true; 6524 return lang_hooks.types.type_hash_eq (cand, base); 6525 } 6526 6527 /* Returns true iff unqualified CAND and BASE are equivalent. */ 6528 6529 bool 6530 check_base_type (const_tree cand, const_tree base) 6531 { 6532 return (TYPE_NAME (cand) == TYPE_NAME (base) 6533 /* Apparently this is needed for Objective-C. */ 6534 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base) 6535 /* Check alignment. */ 6536 && TYPE_ALIGN (cand) == TYPE_ALIGN (base) 6537 && attribute_list_equal (TYPE_ATTRIBUTES (cand), 6538 TYPE_ATTRIBUTES (base))); 6539 } 6540 6541 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */ 6542 6543 bool 6544 check_qualified_type (const_tree cand, const_tree base, int type_quals) 6545 { 6546 return (TYPE_QUALS (cand) == type_quals 6547 && check_base_type (cand, base) 6548 && check_lang_type (cand, base)); 6549 } 6550 6551 /* Returns true iff CAND is equivalent to BASE with ALIGN. */ 6552 6553 static bool 6554 check_aligned_type (const_tree cand, const_tree base, unsigned int align) 6555 { 6556 return (TYPE_QUALS (cand) == TYPE_QUALS (base) 6557 && TYPE_NAME (cand) == TYPE_NAME (base) 6558 /* Apparently this is needed for Objective-C. */ 6559 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base) 6560 /* Check alignment. */ 6561 && TYPE_ALIGN (cand) == align 6562 && attribute_list_equal (TYPE_ATTRIBUTES (cand), 6563 TYPE_ATTRIBUTES (base)) 6564 && check_lang_type (cand, base)); 6565 } 6566 6567 /* This function checks to see if TYPE matches the size one of the built-in 6568 atomic types, and returns that core atomic type. */ 6569 6570 static tree 6571 find_atomic_core_type (tree type) 6572 { 6573 tree base_atomic_type; 6574 6575 /* Only handle complete types. */ 6576 if (TYPE_SIZE (type) == NULL_TREE) 6577 return NULL_TREE; 6578 6579 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type)); 6580 switch (type_size) 6581 { 6582 case 8: 6583 base_atomic_type = atomicQI_type_node; 6584 break; 6585 6586 case 16: 6587 base_atomic_type = atomicHI_type_node; 6588 break; 6589 6590 case 32: 6591 base_atomic_type = atomicSI_type_node; 6592 break; 6593 6594 case 64: 6595 base_atomic_type = atomicDI_type_node; 6596 break; 6597 6598 case 128: 6599 base_atomic_type = atomicTI_type_node; 6600 break; 6601 6602 default: 6603 base_atomic_type = NULL_TREE; 6604 } 6605 6606 return base_atomic_type; 6607 } 6608 6609 /* Return a version of the TYPE, qualified as indicated by the 6610 TYPE_QUALS, if one exists. If no qualified version exists yet, 6611 return NULL_TREE. */ 6612 6613 tree 6614 get_qualified_type (tree type, int type_quals) 6615 { 6616 tree t; 6617 6618 if (TYPE_QUALS (type) == type_quals) 6619 return type; 6620 6621 /* Search the chain of variants to see if there is already one there just 6622 like the one we need to have. If so, use that existing one. We must 6623 preserve the TYPE_NAME, since there is code that depends on this. */ 6624 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) 6625 if (check_qualified_type (t, type, type_quals)) 6626 return t; 6627 6628 return NULL_TREE; 6629 } 6630 6631 /* Like get_qualified_type, but creates the type if it does not 6632 exist. This function never returns NULL_TREE. */ 6633 6634 tree 6635 build_qualified_type (tree type, int type_quals) 6636 { 6637 tree t; 6638 6639 /* See if we already have the appropriate qualified variant. */ 6640 t = get_qualified_type (type, type_quals); 6641 6642 /* If not, build it. */ 6643 if (!t) 6644 { 6645 t = build_variant_type_copy (type); 6646 set_type_quals (t, type_quals); 6647 6648 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC)) 6649 { 6650 /* See if this object can map to a basic atomic type. */ 6651 tree atomic_type = find_atomic_core_type (type); 6652 if (atomic_type) 6653 { 6654 /* Ensure the alignment of this type is compatible with 6655 the required alignment of the atomic type. */ 6656 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t)) 6657 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type)); 6658 } 6659 } 6660 6661 if (TYPE_STRUCTURAL_EQUALITY_P (type)) 6662 /* Propagate structural equality. */ 6663 SET_TYPE_STRUCTURAL_EQUALITY (t); 6664 else if (TYPE_CANONICAL (type) != type) 6665 /* Build the underlying canonical type, since it is different 6666 from TYPE. */ 6667 { 6668 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals); 6669 TYPE_CANONICAL (t) = TYPE_CANONICAL (c); 6670 } 6671 else 6672 /* T is its own canonical type. */ 6673 TYPE_CANONICAL (t) = t; 6674 6675 } 6676 6677 return t; 6678 } 6679 6680 /* Create a variant of type T with alignment ALIGN. */ 6681 6682 tree 6683 build_aligned_type (tree type, unsigned int align) 6684 { 6685 tree t; 6686 6687 if (TYPE_PACKED (type) 6688 || TYPE_ALIGN (type) == align) 6689 return type; 6690 6691 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) 6692 if (check_aligned_type (t, type, align)) 6693 return t; 6694 6695 t = build_variant_type_copy (type); 6696 SET_TYPE_ALIGN (t, align); 6697 TYPE_USER_ALIGN (t) = 1; 6698 6699 return t; 6700 } 6701 6702 /* Create a new distinct copy of TYPE. The new type is made its own 6703 MAIN_VARIANT. If TYPE requires structural equality checks, the 6704 resulting type requires structural equality checks; otherwise, its 6705 TYPE_CANONICAL points to itself. */ 6706 6707 tree 6708 build_distinct_type_copy (tree type) 6709 { 6710 tree t = copy_node (type); 6711 6712 TYPE_POINTER_TO (t) = 0; 6713 TYPE_REFERENCE_TO (t) = 0; 6714 6715 /* Set the canonical type either to a new equivalence class, or 6716 propagate the need for structural equality checks. */ 6717 if (TYPE_STRUCTURAL_EQUALITY_P (type)) 6718 SET_TYPE_STRUCTURAL_EQUALITY (t); 6719 else 6720 TYPE_CANONICAL (t) = t; 6721 6722 /* Make it its own variant. */ 6723 TYPE_MAIN_VARIANT (t) = t; 6724 TYPE_NEXT_VARIANT (t) = 0; 6725 6726 /* We do not record methods in type copies nor variants 6727 so we do not need to keep them up to date when new method 6728 is inserted. */ 6729 if (RECORD_OR_UNION_TYPE_P (t)) 6730 TYPE_METHODS (t) = NULL_TREE; 6731 6732 /* Note that it is now possible for TYPE_MIN_VALUE to be a value 6733 whose TREE_TYPE is not t. This can also happen in the Ada 6734 frontend when using subtypes. */ 6735 6736 return t; 6737 } 6738 6739 /* Create a new variant of TYPE, equivalent but distinct. This is so 6740 the caller can modify it. TYPE_CANONICAL for the return type will 6741 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types 6742 are considered equal by the language itself (or that both types 6743 require structural equality checks). */ 6744 6745 tree 6746 build_variant_type_copy (tree type) 6747 { 6748 tree t, m = TYPE_MAIN_VARIANT (type); 6749 6750 t = build_distinct_type_copy (type); 6751 6752 /* Since we're building a variant, assume that it is a non-semantic 6753 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */ 6754 TYPE_CANONICAL (t) = TYPE_CANONICAL (type); 6755 /* Type variants have no alias set defined. */ 6756 TYPE_ALIAS_SET (t) = -1; 6757 6758 /* Add the new type to the chain of variants of TYPE. */ 6759 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m); 6760 TYPE_NEXT_VARIANT (m) = t; 6761 TYPE_MAIN_VARIANT (t) = m; 6762 6763 return t; 6764 } 6765 6766 /* Return true if the from tree in both tree maps are equal. */ 6767 6768 int 6769 tree_map_base_eq (const void *va, const void *vb) 6770 { 6771 const struct tree_map_base *const a = (const struct tree_map_base *) va, 6772 *const b = (const struct tree_map_base *) vb; 6773 return (a->from == b->from); 6774 } 6775 6776 /* Hash a from tree in a tree_base_map. */ 6777 6778 unsigned int 6779 tree_map_base_hash (const void *item) 6780 { 6781 return htab_hash_pointer (((const struct tree_map_base *)item)->from); 6782 } 6783 6784 /* Return true if this tree map structure is marked for garbage collection 6785 purposes. We simply return true if the from tree is marked, so that this 6786 structure goes away when the from tree goes away. */ 6787 6788 int 6789 tree_map_base_marked_p (const void *p) 6790 { 6791 return ggc_marked_p (((const struct tree_map_base *) p)->from); 6792 } 6793 6794 /* Hash a from tree in a tree_map. */ 6795 6796 unsigned int 6797 tree_map_hash (const void *item) 6798 { 6799 return (((const struct tree_map *) item)->hash); 6800 } 6801 6802 /* Hash a from tree in a tree_decl_map. */ 6803 6804 unsigned int 6805 tree_decl_map_hash (const void *item) 6806 { 6807 return DECL_UID (((const struct tree_decl_map *) item)->base.from); 6808 } 6809 6810 /* Return the initialization priority for DECL. */ 6811 6812 priority_type 6813 decl_init_priority_lookup (tree decl) 6814 { 6815 symtab_node *snode = symtab_node::get (decl); 6816 6817 if (!snode) 6818 return DEFAULT_INIT_PRIORITY; 6819 return 6820 snode->get_init_priority (); 6821 } 6822 6823 /* Return the finalization priority for DECL. */ 6824 6825 priority_type 6826 decl_fini_priority_lookup (tree decl) 6827 { 6828 cgraph_node *node = cgraph_node::get (decl); 6829 6830 if (!node) 6831 return DEFAULT_INIT_PRIORITY; 6832 return 6833 node->get_fini_priority (); 6834 } 6835 6836 /* Set the initialization priority for DECL to PRIORITY. */ 6837 6838 void 6839 decl_init_priority_insert (tree decl, priority_type priority) 6840 { 6841 struct symtab_node *snode; 6842 6843 if (priority == DEFAULT_INIT_PRIORITY) 6844 { 6845 snode = symtab_node::get (decl); 6846 if (!snode) 6847 return; 6848 } 6849 else if (VAR_P (decl)) 6850 snode = varpool_node::get_create (decl); 6851 else 6852 snode = cgraph_node::get_create (decl); 6853 snode->set_init_priority (priority); 6854 } 6855 6856 /* Set the finalization priority for DECL to PRIORITY. */ 6857 6858 void 6859 decl_fini_priority_insert (tree decl, priority_type priority) 6860 { 6861 struct cgraph_node *node; 6862 6863 if (priority == DEFAULT_INIT_PRIORITY) 6864 { 6865 node = cgraph_node::get (decl); 6866 if (!node) 6867 return; 6868 } 6869 else 6870 node = cgraph_node::get_create (decl); 6871 node->set_fini_priority (priority); 6872 } 6873 6874 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */ 6875 6876 static void 6877 print_debug_expr_statistics (void) 6878 { 6879 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n", 6880 (long) debug_expr_for_decl->size (), 6881 (long) debug_expr_for_decl->elements (), 6882 debug_expr_for_decl->collisions ()); 6883 } 6884 6885 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */ 6886 6887 static void 6888 print_value_expr_statistics (void) 6889 { 6890 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n", 6891 (long) value_expr_for_decl->size (), 6892 (long) value_expr_for_decl->elements (), 6893 value_expr_for_decl->collisions ()); 6894 } 6895 6896 /* Lookup a debug expression for FROM, and return it if we find one. */ 6897 6898 tree 6899 decl_debug_expr_lookup (tree from) 6900 { 6901 struct tree_decl_map *h, in; 6902 in.base.from = from; 6903 6904 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from)); 6905 if (h) 6906 return h->to; 6907 return NULL_TREE; 6908 } 6909 6910 /* Insert a mapping FROM->TO in the debug expression hashtable. */ 6911 6912 void 6913 decl_debug_expr_insert (tree from, tree to) 6914 { 6915 struct tree_decl_map *h; 6916 6917 h = ggc_alloc<tree_decl_map> (); 6918 h->base.from = from; 6919 h->to = to; 6920 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h; 6921 } 6922 6923 /* Lookup a value expression for FROM, and return it if we find one. */ 6924 6925 tree 6926 decl_value_expr_lookup (tree from) 6927 { 6928 struct tree_decl_map *h, in; 6929 in.base.from = from; 6930 6931 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from)); 6932 if (h) 6933 return h->to; 6934 return NULL_TREE; 6935 } 6936 6937 /* Insert a mapping FROM->TO in the value expression hashtable. */ 6938 6939 void 6940 decl_value_expr_insert (tree from, tree to) 6941 { 6942 struct tree_decl_map *h; 6943 6944 h = ggc_alloc<tree_decl_map> (); 6945 h->base.from = from; 6946 h->to = to; 6947 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h; 6948 } 6949 6950 /* Lookup a vector of debug arguments for FROM, and return it if we 6951 find one. */ 6952 6953 vec<tree, va_gc> ** 6954 decl_debug_args_lookup (tree from) 6955 { 6956 struct tree_vec_map *h, in; 6957 6958 if (!DECL_HAS_DEBUG_ARGS_P (from)) 6959 return NULL; 6960 gcc_checking_assert (debug_args_for_decl != NULL); 6961 in.base.from = from; 6962 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from)); 6963 if (h) 6964 return &h->to; 6965 return NULL; 6966 } 6967 6968 /* Insert a mapping FROM->empty vector of debug arguments in the value 6969 expression hashtable. */ 6970 6971 vec<tree, va_gc> ** 6972 decl_debug_args_insert (tree from) 6973 { 6974 struct tree_vec_map *h; 6975 tree_vec_map **loc; 6976 6977 if (DECL_HAS_DEBUG_ARGS_P (from)) 6978 return decl_debug_args_lookup (from); 6979 if (debug_args_for_decl == NULL) 6980 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64); 6981 h = ggc_alloc<tree_vec_map> (); 6982 h->base.from = from; 6983 h->to = NULL; 6984 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT); 6985 *loc = h; 6986 DECL_HAS_DEBUG_ARGS_P (from) = 1; 6987 return &h->to; 6988 } 6989 6990 /* Hashing of types so that we don't make duplicates. 6991 The entry point is `type_hash_canon'. */ 6992 6993 /* Compute a hash code for a list of types (chain of TREE_LIST nodes 6994 with types in the TREE_VALUE slots), by adding the hash codes 6995 of the individual types. */ 6996 6997 static void 6998 type_hash_list (const_tree list, inchash::hash &hstate) 6999 { 7000 const_tree tail; 7001 7002 for (tail = list; tail; tail = TREE_CHAIN (tail)) 7003 if (TREE_VALUE (tail) != error_mark_node) 7004 hstate.add_object (TYPE_HASH (TREE_VALUE (tail))); 7005 } 7006 7007 /* These are the Hashtable callback functions. */ 7008 7009 /* Returns true iff the types are equivalent. */ 7010 7011 bool 7012 type_cache_hasher::equal (type_hash *a, type_hash *b) 7013 { 7014 /* First test the things that are the same for all types. */ 7015 if (a->hash != b->hash 7016 || TREE_CODE (a->type) != TREE_CODE (b->type) 7017 || TREE_TYPE (a->type) != TREE_TYPE (b->type) 7018 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type), 7019 TYPE_ATTRIBUTES (b->type)) 7020 || (TREE_CODE (a->type) != COMPLEX_TYPE 7021 && TYPE_NAME (a->type) != TYPE_NAME (b->type))) 7022 return 0; 7023 7024 /* Be careful about comparing arrays before and after the element type 7025 has been completed; don't compare TYPE_ALIGN unless both types are 7026 complete. */ 7027 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type) 7028 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type) 7029 || TYPE_MODE (a->type) != TYPE_MODE (b->type))) 7030 return 0; 7031 7032 switch (TREE_CODE (a->type)) 7033 { 7034 case VOID_TYPE: 7035 case COMPLEX_TYPE: 7036 case POINTER_TYPE: 7037 case REFERENCE_TYPE: 7038 case NULLPTR_TYPE: 7039 return 1; 7040 7041 case VECTOR_TYPE: 7042 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type); 7043 7044 case ENUMERAL_TYPE: 7045 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type) 7046 && !(TYPE_VALUES (a->type) 7047 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST 7048 && TYPE_VALUES (b->type) 7049 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST 7050 && type_list_equal (TYPE_VALUES (a->type), 7051 TYPE_VALUES (b->type)))) 7052 return 0; 7053 7054 /* fall through */ 7055 7056 case INTEGER_TYPE: 7057 case REAL_TYPE: 7058 case BOOLEAN_TYPE: 7059 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type)) 7060 return false; 7061 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type) 7062 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type), 7063 TYPE_MAX_VALUE (b->type))) 7064 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type) 7065 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type), 7066 TYPE_MIN_VALUE (b->type)))); 7067 7068 case FIXED_POINT_TYPE: 7069 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type); 7070 7071 case OFFSET_TYPE: 7072 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type); 7073 7074 case METHOD_TYPE: 7075 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type) 7076 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type) 7077 || (TYPE_ARG_TYPES (a->type) 7078 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST 7079 && TYPE_ARG_TYPES (b->type) 7080 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST 7081 && type_list_equal (TYPE_ARG_TYPES (a->type), 7082 TYPE_ARG_TYPES (b->type))))) 7083 break; 7084 return 0; 7085 case ARRAY_TYPE: 7086 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates, 7087 where the flag should be inherited from the element type 7088 and can change after ARRAY_TYPEs are created; on non-aggregates 7089 compare it and hash it, scalars will never have that flag set 7090 and we need to differentiate between arrays created by different 7091 front-ends or middle-end created arrays. */ 7092 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type) 7093 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type)) 7094 || (TYPE_TYPELESS_STORAGE (a->type) 7095 == TYPE_TYPELESS_STORAGE (b->type)))); 7096 7097 case RECORD_TYPE: 7098 case UNION_TYPE: 7099 case QUAL_UNION_TYPE: 7100 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type) 7101 || (TYPE_FIELDS (a->type) 7102 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST 7103 && TYPE_FIELDS (b->type) 7104 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST 7105 && type_list_equal (TYPE_FIELDS (a->type), 7106 TYPE_FIELDS (b->type)))); 7107 7108 case FUNCTION_TYPE: 7109 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type) 7110 || (TYPE_ARG_TYPES (a->type) 7111 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST 7112 && TYPE_ARG_TYPES (b->type) 7113 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST 7114 && type_list_equal (TYPE_ARG_TYPES (a->type), 7115 TYPE_ARG_TYPES (b->type)))) 7116 break; 7117 return 0; 7118 7119 default: 7120 return 0; 7121 } 7122 7123 if (lang_hooks.types.type_hash_eq != NULL) 7124 return lang_hooks.types.type_hash_eq (a->type, b->type); 7125 7126 return 1; 7127 } 7128 7129 /* Given TYPE, and HASHCODE its hash code, return the canonical 7130 object for an identical type if one already exists. 7131 Otherwise, return TYPE, and record it as the canonical object. 7132 7133 To use this function, first create a type of the sort you want. 7134 Then compute its hash code from the fields of the type that 7135 make it different from other similar types. 7136 Then call this function and use the value. */ 7137 7138 tree 7139 type_hash_canon (unsigned int hashcode, tree type) 7140 { 7141 type_hash in; 7142 type_hash **loc; 7143 7144 /* The hash table only contains main variants, so ensure that's what we're 7145 being passed. */ 7146 gcc_assert (TYPE_MAIN_VARIANT (type) == type); 7147 7148 /* The TYPE_ALIGN field of a type is set by layout_type(), so we 7149 must call that routine before comparing TYPE_ALIGNs. */ 7150 layout_type (type); 7151 7152 in.hash = hashcode; 7153 in.type = type; 7154 7155 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT); 7156 if (*loc) 7157 { 7158 tree t1 = ((type_hash *) *loc)->type; 7159 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1); 7160 free_node (type); 7161 return t1; 7162 } 7163 else 7164 { 7165 struct type_hash *h; 7166 7167 h = ggc_alloc<type_hash> (); 7168 h->hash = hashcode; 7169 h->type = type; 7170 *loc = h; 7171 7172 return type; 7173 } 7174 } 7175 7176 static void 7177 print_type_hash_statistics (void) 7178 { 7179 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n", 7180 (long) type_hash_table->size (), 7181 (long) type_hash_table->elements (), 7182 type_hash_table->collisions ()); 7183 } 7184 7185 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes 7186 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots), 7187 by adding the hash codes of the individual attributes. */ 7188 7189 static void 7190 attribute_hash_list (const_tree list, inchash::hash &hstate) 7191 { 7192 const_tree tail; 7193 7194 for (tail = list; tail; tail = TREE_CHAIN (tail)) 7195 /* ??? Do we want to add in TREE_VALUE too? */ 7196 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail))); 7197 } 7198 7199 /* Given two lists of attributes, return true if list l2 is 7200 equivalent to l1. */ 7201 7202 int 7203 attribute_list_equal (const_tree l1, const_tree l2) 7204 { 7205 if (l1 == l2) 7206 return 1; 7207 7208 return attribute_list_contained (l1, l2) 7209 && attribute_list_contained (l2, l1); 7210 } 7211 7212 /* Given two lists of attributes, return true if list L2 is 7213 completely contained within L1. */ 7214 /* ??? This would be faster if attribute names were stored in a canonicalized 7215 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method 7216 must be used to show these elements are equivalent (which they are). */ 7217 /* ??? It's not clear that attributes with arguments will always be handled 7218 correctly. */ 7219 7220 int 7221 attribute_list_contained (const_tree l1, const_tree l2) 7222 { 7223 const_tree t1, t2; 7224 7225 /* First check the obvious, maybe the lists are identical. */ 7226 if (l1 == l2) 7227 return 1; 7228 7229 /* Maybe the lists are similar. */ 7230 for (t1 = l1, t2 = l2; 7231 t1 != 0 && t2 != 0 7232 && get_attribute_name (t1) == get_attribute_name (t2) 7233 && TREE_VALUE (t1) == TREE_VALUE (t2); 7234 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2)) 7235 ; 7236 7237 /* Maybe the lists are equal. */ 7238 if (t1 == 0 && t2 == 0) 7239 return 1; 7240 7241 for (; t2 != 0; t2 = TREE_CHAIN (t2)) 7242 { 7243 const_tree attr; 7244 /* This CONST_CAST is okay because lookup_attribute does not 7245 modify its argument and the return value is assigned to a 7246 const_tree. */ 7247 for (attr = lookup_ident_attribute (get_attribute_name (t2), 7248 CONST_CAST_TREE (l1)); 7249 attr != NULL_TREE && !attribute_value_equal (t2, attr); 7250 attr = lookup_ident_attribute (get_attribute_name (t2), 7251 TREE_CHAIN (attr))) 7252 ; 7253 7254 if (attr == NULL_TREE) 7255 return 0; 7256 } 7257 7258 return 1; 7259 } 7260 7261 /* Given two lists of types 7262 (chains of TREE_LIST nodes with types in the TREE_VALUE slots) 7263 return 1 if the lists contain the same types in the same order. 7264 Also, the TREE_PURPOSEs must match. */ 7265 7266 int 7267 type_list_equal (const_tree l1, const_tree l2) 7268 { 7269 const_tree t1, t2; 7270 7271 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2)) 7272 if (TREE_VALUE (t1) != TREE_VALUE (t2) 7273 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2) 7274 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2)) 7275 && (TREE_TYPE (TREE_PURPOSE (t1)) 7276 == TREE_TYPE (TREE_PURPOSE (t2)))))) 7277 return 0; 7278 7279 return t1 == t2; 7280 } 7281 7282 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE 7283 given by TYPE. If the argument list accepts variable arguments, 7284 then this function counts only the ordinary arguments. */ 7285 7286 int 7287 type_num_arguments (const_tree type) 7288 { 7289 int i = 0; 7290 tree t; 7291 7292 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t)) 7293 /* If the function does not take a variable number of arguments, 7294 the last element in the list will have type `void'. */ 7295 if (VOID_TYPE_P (TREE_VALUE (t))) 7296 break; 7297 else 7298 ++i; 7299 7300 return i; 7301 } 7302 7303 /* Nonzero if integer constants T1 and T2 7304 represent the same constant value. */ 7305 7306 int 7307 tree_int_cst_equal (const_tree t1, const_tree t2) 7308 { 7309 if (t1 == t2) 7310 return 1; 7311 7312 if (t1 == 0 || t2 == 0) 7313 return 0; 7314 7315 if (TREE_CODE (t1) == INTEGER_CST 7316 && TREE_CODE (t2) == INTEGER_CST 7317 && wi::to_widest (t1) == wi::to_widest (t2)) 7318 return 1; 7319 7320 return 0; 7321 } 7322 7323 /* Return true if T is an INTEGER_CST whose numerical value (extended 7324 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */ 7325 7326 bool 7327 tree_fits_shwi_p (const_tree t) 7328 { 7329 return (t != NULL_TREE 7330 && TREE_CODE (t) == INTEGER_CST 7331 && wi::fits_shwi_p (wi::to_widest (t))); 7332 } 7333 7334 /* Return true if T is an INTEGER_CST whose numerical value (extended 7335 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */ 7336 7337 bool 7338 tree_fits_uhwi_p (const_tree t) 7339 { 7340 return (t != NULL_TREE 7341 && TREE_CODE (t) == INTEGER_CST 7342 && wi::fits_uhwi_p (wi::to_widest (t))); 7343 } 7344 7345 /* T is an INTEGER_CST whose numerical value (extended according to 7346 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that 7347 HOST_WIDE_INT. */ 7348 7349 HOST_WIDE_INT 7350 tree_to_shwi (const_tree t) 7351 { 7352 gcc_assert (tree_fits_shwi_p (t)); 7353 return TREE_INT_CST_LOW (t); 7354 } 7355 7356 /* T is an INTEGER_CST whose numerical value (extended according to 7357 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that 7358 HOST_WIDE_INT. */ 7359 7360 unsigned HOST_WIDE_INT 7361 tree_to_uhwi (const_tree t) 7362 { 7363 gcc_assert (tree_fits_uhwi_p (t)); 7364 return TREE_INT_CST_LOW (t); 7365 } 7366 7367 /* Return the most significant (sign) bit of T. */ 7368 7369 int 7370 tree_int_cst_sign_bit (const_tree t) 7371 { 7372 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1; 7373 7374 return wi::extract_uhwi (t, bitno, 1); 7375 } 7376 7377 /* Return an indication of the sign of the integer constant T. 7378 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0. 7379 Note that -1 will never be returned if T's type is unsigned. */ 7380 7381 int 7382 tree_int_cst_sgn (const_tree t) 7383 { 7384 if (wi::eq_p (t, 0)) 7385 return 0; 7386 else if (TYPE_UNSIGNED (TREE_TYPE (t))) 7387 return 1; 7388 else if (wi::neg_p (t)) 7389 return -1; 7390 else 7391 return 1; 7392 } 7393 7394 /* Return the minimum number of bits needed to represent VALUE in a 7395 signed or unsigned type, UNSIGNEDP says which. */ 7396 7397 unsigned int 7398 tree_int_cst_min_precision (tree value, signop sgn) 7399 { 7400 /* If the value is negative, compute its negative minus 1. The latter 7401 adjustment is because the absolute value of the largest negative value 7402 is one larger than the largest positive value. This is equivalent to 7403 a bit-wise negation, so use that operation instead. */ 7404 7405 if (tree_int_cst_sgn (value) < 0) 7406 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value); 7407 7408 /* Return the number of bits needed, taking into account the fact 7409 that we need one more bit for a signed than unsigned type. 7410 If value is 0 or -1, the minimum precision is 1 no matter 7411 whether unsignedp is true or false. */ 7412 7413 if (integer_zerop (value)) 7414 return 1; 7415 else 7416 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ; 7417 } 7418 7419 /* Return truthvalue of whether T1 is the same tree structure as T2. 7420 Return 1 if they are the same. 7421 Return 0 if they are understandably different. 7422 Return -1 if either contains tree structure not understood by 7423 this function. */ 7424 7425 int 7426 simple_cst_equal (const_tree t1, const_tree t2) 7427 { 7428 enum tree_code code1, code2; 7429 int cmp; 7430 int i; 7431 7432 if (t1 == t2) 7433 return 1; 7434 if (t1 == 0 || t2 == 0) 7435 return 0; 7436 7437 code1 = TREE_CODE (t1); 7438 code2 = TREE_CODE (t2); 7439 7440 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR) 7441 { 7442 if (CONVERT_EXPR_CODE_P (code2) 7443 || code2 == NON_LVALUE_EXPR) 7444 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); 7445 else 7446 return simple_cst_equal (TREE_OPERAND (t1, 0), t2); 7447 } 7448 7449 else if (CONVERT_EXPR_CODE_P (code2) 7450 || code2 == NON_LVALUE_EXPR) 7451 return simple_cst_equal (t1, TREE_OPERAND (t2, 0)); 7452 7453 if (code1 != code2) 7454 return 0; 7455 7456 switch (code1) 7457 { 7458 case INTEGER_CST: 7459 return wi::to_widest (t1) == wi::to_widest (t2); 7460 7461 case REAL_CST: 7462 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2)); 7463 7464 case FIXED_CST: 7465 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2)); 7466 7467 case STRING_CST: 7468 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2) 7469 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2), 7470 TREE_STRING_LENGTH (t1))); 7471 7472 case CONSTRUCTOR: 7473 { 7474 unsigned HOST_WIDE_INT idx; 7475 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1); 7476 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2); 7477 7478 if (vec_safe_length (v1) != vec_safe_length (v2)) 7479 return false; 7480 7481 for (idx = 0; idx < vec_safe_length (v1); ++idx) 7482 /* ??? Should we handle also fields here? */ 7483 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value)) 7484 return false; 7485 return true; 7486 } 7487 7488 case SAVE_EXPR: 7489 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); 7490 7491 case CALL_EXPR: 7492 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2)); 7493 if (cmp <= 0) 7494 return cmp; 7495 if (call_expr_nargs (t1) != call_expr_nargs (t2)) 7496 return 0; 7497 { 7498 const_tree arg1, arg2; 7499 const_call_expr_arg_iterator iter1, iter2; 7500 for (arg1 = first_const_call_expr_arg (t1, &iter1), 7501 arg2 = first_const_call_expr_arg (t2, &iter2); 7502 arg1 && arg2; 7503 arg1 = next_const_call_expr_arg (&iter1), 7504 arg2 = next_const_call_expr_arg (&iter2)) 7505 { 7506 cmp = simple_cst_equal (arg1, arg2); 7507 if (cmp <= 0) 7508 return cmp; 7509 } 7510 return arg1 == arg2; 7511 } 7512 7513 case TARGET_EXPR: 7514 /* Special case: if either target is an unallocated VAR_DECL, 7515 it means that it's going to be unified with whatever the 7516 TARGET_EXPR is really supposed to initialize, so treat it 7517 as being equivalent to anything. */ 7518 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL 7519 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE 7520 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0))) 7521 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL 7522 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE 7523 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0)))) 7524 cmp = 1; 7525 else 7526 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); 7527 7528 if (cmp <= 0) 7529 return cmp; 7530 7531 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1)); 7532 7533 case WITH_CLEANUP_EXPR: 7534 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); 7535 if (cmp <= 0) 7536 return cmp; 7537 7538 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1)); 7539 7540 case COMPONENT_REF: 7541 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1)) 7542 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); 7543 7544 return 0; 7545 7546 case VAR_DECL: 7547 case PARM_DECL: 7548 case CONST_DECL: 7549 case FUNCTION_DECL: 7550 return 0; 7551 7552 default: 7553 break; 7554 } 7555 7556 /* This general rule works for most tree codes. All exceptions should be 7557 handled above. If this is a language-specific tree code, we can't 7558 trust what might be in the operand, so say we don't know 7559 the situation. */ 7560 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE) 7561 return -1; 7562 7563 switch (TREE_CODE_CLASS (code1)) 7564 { 7565 case tcc_unary: 7566 case tcc_binary: 7567 case tcc_comparison: 7568 case tcc_expression: 7569 case tcc_reference: 7570 case tcc_statement: 7571 cmp = 1; 7572 for (i = 0; i < TREE_CODE_LENGTH (code1); i++) 7573 { 7574 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i)); 7575 if (cmp <= 0) 7576 return cmp; 7577 } 7578 7579 return cmp; 7580 7581 default: 7582 return -1; 7583 } 7584 } 7585 7586 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value. 7587 Return -1, 0, or 1 if the value of T is less than, equal to, or greater 7588 than U, respectively. */ 7589 7590 int 7591 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u) 7592 { 7593 if (tree_int_cst_sgn (t) < 0) 7594 return -1; 7595 else if (!tree_fits_uhwi_p (t)) 7596 return 1; 7597 else if (TREE_INT_CST_LOW (t) == u) 7598 return 0; 7599 else if (TREE_INT_CST_LOW (t) < u) 7600 return -1; 7601 else 7602 return 1; 7603 } 7604 7605 /* Return true if SIZE represents a constant size that is in bounds of 7606 what the middle-end and the backend accepts (covering not more than 7607 half of the address-space). */ 7608 7609 bool 7610 valid_constant_size_p (const_tree size) 7611 { 7612 if (! tree_fits_uhwi_p (size) 7613 || TREE_OVERFLOW (size) 7614 || tree_int_cst_sign_bit (size) != 0) 7615 return false; 7616 return true; 7617 } 7618 7619 /* Return the precision of the type, or for a complex or vector type the 7620 precision of the type of its elements. */ 7621 7622 unsigned int 7623 element_precision (const_tree type) 7624 { 7625 if (!TYPE_P (type)) 7626 type = TREE_TYPE (type); 7627 enum tree_code code = TREE_CODE (type); 7628 if (code == COMPLEX_TYPE || code == VECTOR_TYPE) 7629 type = TREE_TYPE (type); 7630 7631 return TYPE_PRECISION (type); 7632 } 7633 7634 /* Return true if CODE represents an associative tree code. Otherwise 7635 return false. */ 7636 bool 7637 associative_tree_code (enum tree_code code) 7638 { 7639 switch (code) 7640 { 7641 case BIT_IOR_EXPR: 7642 case BIT_AND_EXPR: 7643 case BIT_XOR_EXPR: 7644 case PLUS_EXPR: 7645 case MULT_EXPR: 7646 case MIN_EXPR: 7647 case MAX_EXPR: 7648 return true; 7649 7650 default: 7651 break; 7652 } 7653 return false; 7654 } 7655 7656 /* Return true if CODE represents a commutative tree code. Otherwise 7657 return false. */ 7658 bool 7659 commutative_tree_code (enum tree_code code) 7660 { 7661 switch (code) 7662 { 7663 case PLUS_EXPR: 7664 case MULT_EXPR: 7665 case MULT_HIGHPART_EXPR: 7666 case MIN_EXPR: 7667 case MAX_EXPR: 7668 case BIT_IOR_EXPR: 7669 case BIT_XOR_EXPR: 7670 case BIT_AND_EXPR: 7671 case NE_EXPR: 7672 case EQ_EXPR: 7673 case UNORDERED_EXPR: 7674 case ORDERED_EXPR: 7675 case UNEQ_EXPR: 7676 case LTGT_EXPR: 7677 case TRUTH_AND_EXPR: 7678 case TRUTH_XOR_EXPR: 7679 case TRUTH_OR_EXPR: 7680 case WIDEN_MULT_EXPR: 7681 case VEC_WIDEN_MULT_HI_EXPR: 7682 case VEC_WIDEN_MULT_LO_EXPR: 7683 case VEC_WIDEN_MULT_EVEN_EXPR: 7684 case VEC_WIDEN_MULT_ODD_EXPR: 7685 return true; 7686 7687 default: 7688 break; 7689 } 7690 return false; 7691 } 7692 7693 /* Return true if CODE represents a ternary tree code for which the 7694 first two operands are commutative. Otherwise return false. */ 7695 bool 7696 commutative_ternary_tree_code (enum tree_code code) 7697 { 7698 switch (code) 7699 { 7700 case WIDEN_MULT_PLUS_EXPR: 7701 case WIDEN_MULT_MINUS_EXPR: 7702 case DOT_PROD_EXPR: 7703 case FMA_EXPR: 7704 return true; 7705 7706 default: 7707 break; 7708 } 7709 return false; 7710 } 7711 7712 /* Returns true if CODE can overflow. */ 7713 7714 bool 7715 operation_can_overflow (enum tree_code code) 7716 { 7717 switch (code) 7718 { 7719 case PLUS_EXPR: 7720 case MINUS_EXPR: 7721 case MULT_EXPR: 7722 case LSHIFT_EXPR: 7723 /* Can overflow in various ways. */ 7724 return true; 7725 case TRUNC_DIV_EXPR: 7726 case EXACT_DIV_EXPR: 7727 case FLOOR_DIV_EXPR: 7728 case CEIL_DIV_EXPR: 7729 /* For INT_MIN / -1. */ 7730 return true; 7731 case NEGATE_EXPR: 7732 case ABS_EXPR: 7733 /* For -INT_MIN. */ 7734 return true; 7735 default: 7736 /* These operators cannot overflow. */ 7737 return false; 7738 } 7739 } 7740 7741 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or 7742 ftrapv doesn't generate trapping insns for CODE. */ 7743 7744 bool 7745 operation_no_trapping_overflow (tree type, enum tree_code code) 7746 { 7747 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type)); 7748 7749 /* We don't generate instructions that trap on overflow for complex or vector 7750 types. */ 7751 if (!INTEGRAL_TYPE_P (type)) 7752 return true; 7753 7754 if (!TYPE_OVERFLOW_TRAPS (type)) 7755 return true; 7756 7757 switch (code) 7758 { 7759 case PLUS_EXPR: 7760 case MINUS_EXPR: 7761 case MULT_EXPR: 7762 case NEGATE_EXPR: 7763 case ABS_EXPR: 7764 /* These operators can overflow, and -ftrapv generates trapping code for 7765 these. */ 7766 return false; 7767 case TRUNC_DIV_EXPR: 7768 case EXACT_DIV_EXPR: 7769 case FLOOR_DIV_EXPR: 7770 case CEIL_DIV_EXPR: 7771 case LSHIFT_EXPR: 7772 /* These operators can overflow, but -ftrapv does not generate trapping 7773 code for these. */ 7774 return true; 7775 default: 7776 /* These operators cannot overflow. */ 7777 return true; 7778 } 7779 } 7780 7781 namespace inchash 7782 { 7783 7784 /* Generate a hash value for an expression. This can be used iteratively 7785 by passing a previous result as the HSTATE argument. 7786 7787 This function is intended to produce the same hash for expressions which 7788 would compare equal using operand_equal_p. */ 7789 void 7790 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags) 7791 { 7792 int i; 7793 enum tree_code code; 7794 enum tree_code_class tclass; 7795 7796 if (t == NULL_TREE || t == error_mark_node) 7797 { 7798 hstate.merge_hash (0); 7799 return; 7800 } 7801 7802 if (!(flags & OEP_ADDRESS_OF)) 7803 STRIP_NOPS (t); 7804 7805 code = TREE_CODE (t); 7806 7807 switch (code) 7808 { 7809 /* Alas, constants aren't shared, so we can't rely on pointer 7810 identity. */ 7811 case VOID_CST: 7812 hstate.merge_hash (0); 7813 return; 7814 case INTEGER_CST: 7815 gcc_checking_assert (!(flags & OEP_ADDRESS_OF)); 7816 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++) 7817 hstate.add_wide_int (TREE_INT_CST_ELT (t, i)); 7818 return; 7819 case REAL_CST: 7820 { 7821 unsigned int val2; 7822 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t)) 7823 val2 = rvc_zero; 7824 else 7825 val2 = real_hash (TREE_REAL_CST_PTR (t)); 7826 hstate.merge_hash (val2); 7827 return; 7828 } 7829 case FIXED_CST: 7830 { 7831 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t)); 7832 hstate.merge_hash (val2); 7833 return; 7834 } 7835 case STRING_CST: 7836 hstate.add ((const void *) TREE_STRING_POINTER (t), 7837 TREE_STRING_LENGTH (t)); 7838 return; 7839 case COMPLEX_CST: 7840 inchash::add_expr (TREE_REALPART (t), hstate, flags); 7841 inchash::add_expr (TREE_IMAGPART (t), hstate, flags); 7842 return; 7843 case VECTOR_CST: 7844 { 7845 unsigned i; 7846 for (i = 0; i < VECTOR_CST_NELTS (t); ++i) 7847 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate, flags); 7848 return; 7849 } 7850 case SSA_NAME: 7851 /* We can just compare by pointer. */ 7852 hstate.add_wide_int (SSA_NAME_VERSION (t)); 7853 return; 7854 case PLACEHOLDER_EXPR: 7855 /* The node itself doesn't matter. */ 7856 return; 7857 case BLOCK: 7858 case OMP_CLAUSE: 7859 /* Ignore. */ 7860 return; 7861 case TREE_LIST: 7862 /* A list of expressions, for a CALL_EXPR or as the elements of a 7863 VECTOR_CST. */ 7864 for (; t; t = TREE_CHAIN (t)) 7865 inchash::add_expr (TREE_VALUE (t), hstate, flags); 7866 return; 7867 case CONSTRUCTOR: 7868 { 7869 unsigned HOST_WIDE_INT idx; 7870 tree field, value; 7871 flags &= ~OEP_ADDRESS_OF; 7872 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value) 7873 { 7874 inchash::add_expr (field, hstate, flags); 7875 inchash::add_expr (value, hstate, flags); 7876 } 7877 return; 7878 } 7879 case STATEMENT_LIST: 7880 { 7881 tree_stmt_iterator i; 7882 for (i = tsi_start (CONST_CAST_TREE (t)); 7883 !tsi_end_p (i); tsi_next (&i)) 7884 inchash::add_expr (tsi_stmt (i), hstate, flags); 7885 return; 7886 } 7887 case TREE_VEC: 7888 for (i = 0; i < TREE_VEC_LENGTH (t); ++i) 7889 inchash::add_expr (TREE_VEC_ELT (t, i), hstate, flags); 7890 return; 7891 case IDENTIFIER_NODE: 7892 hstate.add_object (IDENTIFIER_HASH_VALUE (t)); 7893 return; 7894 case FUNCTION_DECL: 7895 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form. 7896 Otherwise nodes that compare equal according to operand_equal_p might 7897 get different hash codes. However, don't do this for machine specific 7898 or front end builtins, since the function code is overloaded in those 7899 cases. */ 7900 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL 7901 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t))) 7902 { 7903 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t)); 7904 code = TREE_CODE (t); 7905 } 7906 /* FALL THROUGH */ 7907 default: 7908 tclass = TREE_CODE_CLASS (code); 7909 7910 if (tclass == tcc_declaration) 7911 { 7912 /* DECL's have a unique ID */ 7913 hstate.add_wide_int (DECL_UID (t)); 7914 } 7915 else if (tclass == tcc_comparison && !commutative_tree_code (code)) 7916 { 7917 /* For comparisons that can be swapped, use the lower 7918 tree code. */ 7919 enum tree_code ccode = swap_tree_comparison (code); 7920 if (code < ccode) 7921 ccode = code; 7922 hstate.add_object (ccode); 7923 inchash::add_expr (TREE_OPERAND (t, ccode != code), hstate, flags); 7924 inchash::add_expr (TREE_OPERAND (t, ccode == code), hstate, flags); 7925 } 7926 else if (CONVERT_EXPR_CODE_P (code)) 7927 { 7928 /* NOP_EXPR and CONVERT_EXPR are considered equal by 7929 operand_equal_p. */ 7930 enum tree_code ccode = NOP_EXPR; 7931 hstate.add_object (ccode); 7932 7933 /* Don't hash the type, that can lead to having nodes which 7934 compare equal according to operand_equal_p, but which 7935 have different hash codes. Make sure to include signedness 7936 in the hash computation. */ 7937 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t))); 7938 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags); 7939 } 7940 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */ 7941 else if (code == MEM_REF 7942 && (flags & OEP_ADDRESS_OF) != 0 7943 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR 7944 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) 7945 && integer_zerop (TREE_OPERAND (t, 1))) 7946 inchash::add_expr (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 7947 hstate, flags); 7948 /* Don't ICE on FE specific trees, or their arguments etc. 7949 during operand_equal_p hash verification. */ 7950 else if (!IS_EXPR_CODE_CLASS (tclass)) 7951 gcc_assert (flags & OEP_HASH_CHECK); 7952 else 7953 { 7954 unsigned int sflags = flags; 7955 7956 hstate.add_object (code); 7957 7958 switch (code) 7959 { 7960 case ADDR_EXPR: 7961 gcc_checking_assert (!(flags & OEP_ADDRESS_OF)); 7962 flags |= OEP_ADDRESS_OF; 7963 sflags = flags; 7964 break; 7965 7966 case INDIRECT_REF: 7967 case MEM_REF: 7968 case TARGET_MEM_REF: 7969 flags &= ~OEP_ADDRESS_OF; 7970 sflags = flags; 7971 break; 7972 7973 case ARRAY_REF: 7974 case ARRAY_RANGE_REF: 7975 case COMPONENT_REF: 7976 case BIT_FIELD_REF: 7977 sflags &= ~OEP_ADDRESS_OF; 7978 break; 7979 7980 case COND_EXPR: 7981 flags &= ~OEP_ADDRESS_OF; 7982 break; 7983 7984 case FMA_EXPR: 7985 case WIDEN_MULT_PLUS_EXPR: 7986 case WIDEN_MULT_MINUS_EXPR: 7987 { 7988 /* The multiplication operands are commutative. */ 7989 inchash::hash one, two; 7990 inchash::add_expr (TREE_OPERAND (t, 0), one, flags); 7991 inchash::add_expr (TREE_OPERAND (t, 1), two, flags); 7992 hstate.add_commutative (one, two); 7993 inchash::add_expr (TREE_OPERAND (t, 2), two, flags); 7994 return; 7995 } 7996 7997 case CALL_EXPR: 7998 if (CALL_EXPR_FN (t) == NULL_TREE) 7999 hstate.add_int (CALL_EXPR_IFN (t)); 8000 break; 8001 8002 case TARGET_EXPR: 8003 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT. 8004 Usually different TARGET_EXPRs just should use 8005 different temporaries in their slots. */ 8006 inchash::add_expr (TARGET_EXPR_SLOT (t), hstate, flags); 8007 return; 8008 8009 default: 8010 break; 8011 } 8012 8013 /* Don't hash the type, that can lead to having nodes which 8014 compare equal according to operand_equal_p, but which 8015 have different hash codes. */ 8016 if (code == NON_LVALUE_EXPR) 8017 { 8018 /* Make sure to include signness in the hash computation. */ 8019 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t))); 8020 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags); 8021 } 8022 8023 else if (commutative_tree_code (code)) 8024 { 8025 /* It's a commutative expression. We want to hash it the same 8026 however it appears. We do this by first hashing both operands 8027 and then rehashing based on the order of their independent 8028 hashes. */ 8029 inchash::hash one, two; 8030 inchash::add_expr (TREE_OPERAND (t, 0), one, flags); 8031 inchash::add_expr (TREE_OPERAND (t, 1), two, flags); 8032 hstate.add_commutative (one, two); 8033 } 8034 else 8035 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i) 8036 inchash::add_expr (TREE_OPERAND (t, i), hstate, 8037 i == 0 ? flags : sflags); 8038 } 8039 return; 8040 } 8041 } 8042 8043 } 8044 8045 /* Constructors for pointer, array and function types. 8046 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are 8047 constructed by language-dependent code, not here.) */ 8048 8049 /* Construct, lay out and return the type of pointers to TO_TYPE with 8050 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can 8051 reference all of memory. If such a type has already been 8052 constructed, reuse it. */ 8053 8054 tree 8055 build_pointer_type_for_mode (tree to_type, machine_mode mode, 8056 bool can_alias_all) 8057 { 8058 tree t; 8059 bool could_alias = can_alias_all; 8060 8061 if (to_type == error_mark_node) 8062 return error_mark_node; 8063 8064 /* If the pointed-to type has the may_alias attribute set, force 8065 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */ 8066 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type))) 8067 can_alias_all = true; 8068 8069 /* In some cases, languages will have things that aren't a POINTER_TYPE 8070 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO. 8071 In that case, return that type without regard to the rest of our 8072 operands. 8073 8074 ??? This is a kludge, but consistent with the way this function has 8075 always operated and there doesn't seem to be a good way to avoid this 8076 at the moment. */ 8077 if (TYPE_POINTER_TO (to_type) != 0 8078 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE) 8079 return TYPE_POINTER_TO (to_type); 8080 8081 /* First, if we already have a type for pointers to TO_TYPE and it's 8082 the proper mode, use it. */ 8083 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t)) 8084 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all) 8085 return t; 8086 8087 t = make_node (POINTER_TYPE); 8088 8089 TREE_TYPE (t) = to_type; 8090 SET_TYPE_MODE (t, mode); 8091 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all; 8092 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type); 8093 TYPE_POINTER_TO (to_type) = t; 8094 8095 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */ 8096 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p) 8097 SET_TYPE_STRUCTURAL_EQUALITY (t); 8098 else if (TYPE_CANONICAL (to_type) != to_type || could_alias) 8099 TYPE_CANONICAL (t) 8100 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type), 8101 mode, false); 8102 8103 /* Lay out the type. This function has many callers that are concerned 8104 with expression-construction, and this simplifies them all. */ 8105 layout_type (t); 8106 8107 return t; 8108 } 8109 8110 /* By default build pointers in ptr_mode. */ 8111 8112 tree 8113 build_pointer_type (tree to_type) 8114 { 8115 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC 8116 : TYPE_ADDR_SPACE (to_type); 8117 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as); 8118 return build_pointer_type_for_mode (to_type, pointer_mode, false); 8119 } 8120 8121 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */ 8122 8123 tree 8124 build_reference_type_for_mode (tree to_type, machine_mode mode, 8125 bool can_alias_all) 8126 { 8127 tree t; 8128 bool could_alias = can_alias_all; 8129 8130 if (to_type == error_mark_node) 8131 return error_mark_node; 8132 8133 /* If the pointed-to type has the may_alias attribute set, force 8134 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */ 8135 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type))) 8136 can_alias_all = true; 8137 8138 /* In some cases, languages will have things that aren't a REFERENCE_TYPE 8139 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO. 8140 In that case, return that type without regard to the rest of our 8141 operands. 8142 8143 ??? This is a kludge, but consistent with the way this function has 8144 always operated and there doesn't seem to be a good way to avoid this 8145 at the moment. */ 8146 if (TYPE_REFERENCE_TO (to_type) != 0 8147 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE) 8148 return TYPE_REFERENCE_TO (to_type); 8149 8150 /* First, if we already have a type for pointers to TO_TYPE and it's 8151 the proper mode, use it. */ 8152 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t)) 8153 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all) 8154 return t; 8155 8156 t = make_node (REFERENCE_TYPE); 8157 8158 TREE_TYPE (t) = to_type; 8159 SET_TYPE_MODE (t, mode); 8160 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all; 8161 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type); 8162 TYPE_REFERENCE_TO (to_type) = t; 8163 8164 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */ 8165 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p) 8166 SET_TYPE_STRUCTURAL_EQUALITY (t); 8167 else if (TYPE_CANONICAL (to_type) != to_type || could_alias) 8168 TYPE_CANONICAL (t) 8169 = build_reference_type_for_mode (TYPE_CANONICAL (to_type), 8170 mode, false); 8171 8172 layout_type (t); 8173 8174 return t; 8175 } 8176 8177 8178 /* Build the node for the type of references-to-TO_TYPE by default 8179 in ptr_mode. */ 8180 8181 tree 8182 build_reference_type (tree to_type) 8183 { 8184 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC 8185 : TYPE_ADDR_SPACE (to_type); 8186 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as); 8187 return build_reference_type_for_mode (to_type, pointer_mode, false); 8188 } 8189 8190 #define MAX_INT_CACHED_PREC \ 8191 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64) 8192 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2]; 8193 8194 /* Builds a signed or unsigned integer type of precision PRECISION. 8195 Used for C bitfields whose precision does not match that of 8196 built-in target types. */ 8197 tree 8198 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision, 8199 int unsignedp) 8200 { 8201 tree itype, ret; 8202 8203 if (unsignedp) 8204 unsignedp = MAX_INT_CACHED_PREC + 1; 8205 8206 if (precision <= MAX_INT_CACHED_PREC) 8207 { 8208 itype = nonstandard_integer_type_cache[precision + unsignedp]; 8209 if (itype) 8210 return itype; 8211 } 8212 8213 itype = make_node (INTEGER_TYPE); 8214 TYPE_PRECISION (itype) = precision; 8215 8216 if (unsignedp) 8217 fixup_unsigned_type (itype); 8218 else 8219 fixup_signed_type (itype); 8220 8221 ret = itype; 8222 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype))) 8223 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype); 8224 if (precision <= MAX_INT_CACHED_PREC) 8225 nonstandard_integer_type_cache[precision + unsignedp] = ret; 8226 8227 return ret; 8228 } 8229 8230 #define MAX_BOOL_CACHED_PREC \ 8231 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64) 8232 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1]; 8233 8234 /* Builds a boolean type of precision PRECISION. 8235 Used for boolean vectors to choose proper vector element size. */ 8236 tree 8237 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision) 8238 { 8239 tree type; 8240 8241 if (precision <= MAX_BOOL_CACHED_PREC) 8242 { 8243 type = nonstandard_boolean_type_cache[precision]; 8244 if (type) 8245 return type; 8246 } 8247 8248 type = make_node (BOOLEAN_TYPE); 8249 TYPE_PRECISION (type) = precision; 8250 fixup_signed_type (type); 8251 8252 if (precision <= MAX_INT_CACHED_PREC) 8253 nonstandard_boolean_type_cache[precision] = type; 8254 8255 return type; 8256 } 8257 8258 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE 8259 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED 8260 is true, reuse such a type that has already been constructed. */ 8261 8262 static tree 8263 build_range_type_1 (tree type, tree lowval, tree highval, bool shared) 8264 { 8265 tree itype = make_node (INTEGER_TYPE); 8266 inchash::hash hstate; 8267 8268 TREE_TYPE (itype) = type; 8269 8270 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval); 8271 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL; 8272 8273 TYPE_PRECISION (itype) = TYPE_PRECISION (type); 8274 SET_TYPE_MODE (itype, TYPE_MODE (type)); 8275 TYPE_SIZE (itype) = TYPE_SIZE (type); 8276 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type); 8277 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type)); 8278 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type); 8279 8280 if (!shared) 8281 return itype; 8282 8283 if ((TYPE_MIN_VALUE (itype) 8284 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST) 8285 || (TYPE_MAX_VALUE (itype) 8286 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)) 8287 { 8288 /* Since we cannot reliably merge this type, we need to compare it using 8289 structural equality checks. */ 8290 SET_TYPE_STRUCTURAL_EQUALITY (itype); 8291 return itype; 8292 } 8293 8294 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate); 8295 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate); 8296 hstate.merge_hash (TYPE_HASH (type)); 8297 itype = type_hash_canon (hstate.end (), itype); 8298 8299 return itype; 8300 } 8301 8302 /* Wrapper around build_range_type_1 with SHARED set to true. */ 8303 8304 tree 8305 build_range_type (tree type, tree lowval, tree highval) 8306 { 8307 return build_range_type_1 (type, lowval, highval, true); 8308 } 8309 8310 /* Wrapper around build_range_type_1 with SHARED set to false. */ 8311 8312 tree 8313 build_nonshared_range_type (tree type, tree lowval, tree highval) 8314 { 8315 return build_range_type_1 (type, lowval, highval, false); 8316 } 8317 8318 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE. 8319 MAXVAL should be the maximum value in the domain 8320 (one less than the length of the array). 8321 8322 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT. 8323 We don't enforce this limit, that is up to caller (e.g. language front end). 8324 The limit exists because the result is a signed type and we don't handle 8325 sizes that use more than one HOST_WIDE_INT. */ 8326 8327 tree 8328 build_index_type (tree maxval) 8329 { 8330 return build_range_type (sizetype, size_zero_node, maxval); 8331 } 8332 8333 /* Return true if the debug information for TYPE, a subtype, should be emitted 8334 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the 8335 high bound, respectively. Sometimes doing so unnecessarily obfuscates the 8336 debug info and doesn't reflect the source code. */ 8337 8338 bool 8339 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval) 8340 { 8341 tree base_type = TREE_TYPE (type), low, high; 8342 8343 /* Subrange types have a base type which is an integral type. */ 8344 if (!INTEGRAL_TYPE_P (base_type)) 8345 return false; 8346 8347 /* Get the real bounds of the subtype. */ 8348 if (lang_hooks.types.get_subrange_bounds) 8349 lang_hooks.types.get_subrange_bounds (type, &low, &high); 8350 else 8351 { 8352 low = TYPE_MIN_VALUE (type); 8353 high = TYPE_MAX_VALUE (type); 8354 } 8355 8356 /* If the type and its base type have the same representation and the same 8357 name, then the type is not a subrange but a copy of the base type. */ 8358 if ((TREE_CODE (base_type) == INTEGER_TYPE 8359 || TREE_CODE (base_type) == BOOLEAN_TYPE) 8360 && int_size_in_bytes (type) == int_size_in_bytes (base_type) 8361 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type)) 8362 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type)) 8363 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type)) 8364 return false; 8365 8366 if (lowval) 8367 *lowval = low; 8368 if (highval) 8369 *highval = high; 8370 return true; 8371 } 8372 8373 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE 8374 and number of elements specified by the range of values of INDEX_TYPE. 8375 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type. 8376 If SHARED is true, reuse such a type that has already been constructed. */ 8377 8378 static tree 8379 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage, 8380 bool shared) 8381 { 8382 tree t; 8383 8384 if (TREE_CODE (elt_type) == FUNCTION_TYPE) 8385 { 8386 error ("arrays of functions are not meaningful"); 8387 elt_type = integer_type_node; 8388 } 8389 8390 t = make_node (ARRAY_TYPE); 8391 TREE_TYPE (t) = elt_type; 8392 TYPE_DOMAIN (t) = index_type; 8393 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type); 8394 TYPE_TYPELESS_STORAGE (t) = typeless_storage; 8395 layout_type (t); 8396 8397 /* If the element type is incomplete at this point we get marked for 8398 structural equality. Do not record these types in the canonical 8399 type hashtable. */ 8400 if (TYPE_STRUCTURAL_EQUALITY_P (t)) 8401 return t; 8402 8403 if (shared) 8404 { 8405 inchash::hash hstate; 8406 hstate.add_object (TYPE_HASH (elt_type)); 8407 if (index_type) 8408 hstate.add_object (TYPE_HASH (index_type)); 8409 if (!AGGREGATE_TYPE_P (elt_type)) 8410 hstate.add_flag (TYPE_TYPELESS_STORAGE (t)); 8411 t = type_hash_canon (hstate.end (), t); 8412 } 8413 8414 if (TYPE_CANONICAL (t) == t) 8415 { 8416 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type) 8417 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)) 8418 || in_lto_p) 8419 SET_TYPE_STRUCTURAL_EQUALITY (t); 8420 else if (TYPE_CANONICAL (elt_type) != elt_type 8421 || (index_type && TYPE_CANONICAL (index_type) != index_type)) 8422 TYPE_CANONICAL (t) 8423 = build_array_type_1 (TYPE_CANONICAL (elt_type), 8424 index_type 8425 ? TYPE_CANONICAL (index_type) : NULL_TREE, 8426 typeless_storage, shared); 8427 } 8428 8429 return t; 8430 } 8431 8432 /* Wrapper around build_array_type_1 with SHARED set to true. */ 8433 8434 tree 8435 build_array_type (tree elt_type, tree index_type, bool typeless_storage) 8436 { 8437 return build_array_type_1 (elt_type, index_type, typeless_storage, true); 8438 } 8439 8440 /* Wrapper around build_array_type_1 with SHARED set to false. */ 8441 8442 tree 8443 build_nonshared_array_type (tree elt_type, tree index_type) 8444 { 8445 return build_array_type_1 (elt_type, index_type, false, false); 8446 } 8447 8448 /* Return a representation of ELT_TYPE[NELTS], using indices of type 8449 sizetype. */ 8450 8451 tree 8452 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts) 8453 { 8454 return build_array_type (elt_type, build_index_type (size_int (nelts - 1))); 8455 } 8456 8457 /* Recursively examines the array elements of TYPE, until a non-array 8458 element type is found. */ 8459 8460 tree 8461 strip_array_types (tree type) 8462 { 8463 while (TREE_CODE (type) == ARRAY_TYPE) 8464 type = TREE_TYPE (type); 8465 8466 return type; 8467 } 8468 8469 /* Computes the canonical argument types from the argument type list 8470 ARGTYPES. 8471 8472 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true 8473 on entry to this function, or if any of the ARGTYPES are 8474 structural. 8475 8476 Upon return, *ANY_NONCANONICAL_P will be true iff either it was 8477 true on entry to this function, or if any of the ARGTYPES are 8478 non-canonical. 8479 8480 Returns a canonical argument list, which may be ARGTYPES when the 8481 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is 8482 true) or would not differ from ARGTYPES. */ 8483 8484 static tree 8485 maybe_canonicalize_argtypes (tree argtypes, 8486 bool *any_structural_p, 8487 bool *any_noncanonical_p) 8488 { 8489 tree arg; 8490 bool any_noncanonical_argtypes_p = false; 8491 8492 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg)) 8493 { 8494 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node) 8495 /* Fail gracefully by stating that the type is structural. */ 8496 *any_structural_p = true; 8497 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg))) 8498 *any_structural_p = true; 8499 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg) 8500 || TREE_PURPOSE (arg)) 8501 /* If the argument has a default argument, we consider it 8502 non-canonical even though the type itself is canonical. 8503 That way, different variants of function and method types 8504 with default arguments will all point to the variant with 8505 no defaults as their canonical type. */ 8506 any_noncanonical_argtypes_p = true; 8507 } 8508 8509 if (*any_structural_p) 8510 return argtypes; 8511 8512 if (any_noncanonical_argtypes_p) 8513 { 8514 /* Build the canonical list of argument types. */ 8515 tree canon_argtypes = NULL_TREE; 8516 bool is_void = false; 8517 8518 for (arg = argtypes; arg; arg = TREE_CHAIN (arg)) 8519 { 8520 if (arg == void_list_node) 8521 is_void = true; 8522 else 8523 canon_argtypes = tree_cons (NULL_TREE, 8524 TYPE_CANONICAL (TREE_VALUE (arg)), 8525 canon_argtypes); 8526 } 8527 8528 canon_argtypes = nreverse (canon_argtypes); 8529 if (is_void) 8530 canon_argtypes = chainon (canon_argtypes, void_list_node); 8531 8532 /* There is a non-canonical type. */ 8533 *any_noncanonical_p = true; 8534 return canon_argtypes; 8535 } 8536 8537 /* The canonical argument types are the same as ARGTYPES. */ 8538 return argtypes; 8539 } 8540 8541 /* Construct, lay out and return 8542 the type of functions returning type VALUE_TYPE 8543 given arguments of types ARG_TYPES. 8544 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs 8545 are data type nodes for the arguments of the function. 8546 If such a type has already been constructed, reuse it. */ 8547 8548 tree 8549 build_function_type (tree value_type, tree arg_types) 8550 { 8551 tree t; 8552 inchash::hash hstate; 8553 bool any_structural_p, any_noncanonical_p; 8554 tree canon_argtypes; 8555 8556 if (TREE_CODE (value_type) == FUNCTION_TYPE) 8557 { 8558 error ("function return type cannot be function"); 8559 value_type = integer_type_node; 8560 } 8561 8562 /* Make a node of the sort we want. */ 8563 t = make_node (FUNCTION_TYPE); 8564 TREE_TYPE (t) = value_type; 8565 TYPE_ARG_TYPES (t) = arg_types; 8566 8567 /* If we already have such a type, use the old one. */ 8568 hstate.add_object (TYPE_HASH (value_type)); 8569 type_hash_list (arg_types, hstate); 8570 t = type_hash_canon (hstate.end (), t); 8571 8572 /* Set up the canonical type. */ 8573 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type); 8574 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type; 8575 canon_argtypes = maybe_canonicalize_argtypes (arg_types, 8576 &any_structural_p, 8577 &any_noncanonical_p); 8578 if (any_structural_p) 8579 SET_TYPE_STRUCTURAL_EQUALITY (t); 8580 else if (any_noncanonical_p) 8581 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type), 8582 canon_argtypes); 8583 8584 if (!COMPLETE_TYPE_P (t)) 8585 layout_type (t); 8586 return t; 8587 } 8588 8589 /* Build a function type. The RETURN_TYPE is the type returned by the 8590 function. If VAARGS is set, no void_type_node is appended to the 8591 list. ARGP must be always be terminated be a NULL_TREE. */ 8592 8593 static tree 8594 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp) 8595 { 8596 tree t, args, last; 8597 8598 t = va_arg (argp, tree); 8599 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree)) 8600 args = tree_cons (NULL_TREE, t, args); 8601 8602 if (vaargs) 8603 { 8604 last = args; 8605 if (args != NULL_TREE) 8606 args = nreverse (args); 8607 gcc_assert (last != void_list_node); 8608 } 8609 else if (args == NULL_TREE) 8610 args = void_list_node; 8611 else 8612 { 8613 last = args; 8614 args = nreverse (args); 8615 TREE_CHAIN (last) = void_list_node; 8616 } 8617 args = build_function_type (return_type, args); 8618 8619 return args; 8620 } 8621 8622 /* Build a function type. The RETURN_TYPE is the type returned by the 8623 function. If additional arguments are provided, they are 8624 additional argument types. The list of argument types must always 8625 be terminated by NULL_TREE. */ 8626 8627 tree 8628 build_function_type_list (tree return_type, ...) 8629 { 8630 tree args; 8631 va_list p; 8632 8633 va_start (p, return_type); 8634 args = build_function_type_list_1 (false, return_type, p); 8635 va_end (p); 8636 return args; 8637 } 8638 8639 /* Build a variable argument function type. The RETURN_TYPE is the 8640 type returned by the function. If additional arguments are provided, 8641 they are additional argument types. The list of argument types must 8642 always be terminated by NULL_TREE. */ 8643 8644 tree 8645 build_varargs_function_type_list (tree return_type, ...) 8646 { 8647 tree args; 8648 va_list p; 8649 8650 va_start (p, return_type); 8651 args = build_function_type_list_1 (true, return_type, p); 8652 va_end (p); 8653 8654 return args; 8655 } 8656 8657 /* Build a function type. RETURN_TYPE is the type returned by the 8658 function; VAARGS indicates whether the function takes varargs. The 8659 function takes N named arguments, the types of which are provided in 8660 ARG_TYPES. */ 8661 8662 static tree 8663 build_function_type_array_1 (bool vaargs, tree return_type, int n, 8664 tree *arg_types) 8665 { 8666 int i; 8667 tree t = vaargs ? NULL_TREE : void_list_node; 8668 8669 for (i = n - 1; i >= 0; i--) 8670 t = tree_cons (NULL_TREE, arg_types[i], t); 8671 8672 return build_function_type (return_type, t); 8673 } 8674 8675 /* Build a function type. RETURN_TYPE is the type returned by the 8676 function. The function takes N named arguments, the types of which 8677 are provided in ARG_TYPES. */ 8678 8679 tree 8680 build_function_type_array (tree return_type, int n, tree *arg_types) 8681 { 8682 return build_function_type_array_1 (false, return_type, n, arg_types); 8683 } 8684 8685 /* Build a variable argument function type. RETURN_TYPE is the type 8686 returned by the function. The function takes N named arguments, the 8687 types of which are provided in ARG_TYPES. */ 8688 8689 tree 8690 build_varargs_function_type_array (tree return_type, int n, tree *arg_types) 8691 { 8692 return build_function_type_array_1 (true, return_type, n, arg_types); 8693 } 8694 8695 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE) 8696 and ARGTYPES (a TREE_LIST) are the return type and arguments types 8697 for the method. An implicit additional parameter (of type 8698 pointer-to-BASETYPE) is added to the ARGTYPES. */ 8699 8700 tree 8701 build_method_type_directly (tree basetype, 8702 tree rettype, 8703 tree argtypes) 8704 { 8705 tree t; 8706 tree ptype; 8707 inchash::hash hstate; 8708 bool any_structural_p, any_noncanonical_p; 8709 tree canon_argtypes; 8710 8711 /* Make a node of the sort we want. */ 8712 t = make_node (METHOD_TYPE); 8713 8714 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype); 8715 TREE_TYPE (t) = rettype; 8716 ptype = build_pointer_type (basetype); 8717 8718 /* The actual arglist for this function includes a "hidden" argument 8719 which is "this". Put it into the list of argument types. */ 8720 argtypes = tree_cons (NULL_TREE, ptype, argtypes); 8721 TYPE_ARG_TYPES (t) = argtypes; 8722 8723 /* If we already have such a type, use the old one. */ 8724 hstate.add_object (TYPE_HASH (basetype)); 8725 hstate.add_object (TYPE_HASH (rettype)); 8726 type_hash_list (argtypes, hstate); 8727 t = type_hash_canon (hstate.end (), t); 8728 8729 /* Set up the canonical type. */ 8730 any_structural_p 8731 = (TYPE_STRUCTURAL_EQUALITY_P (basetype) 8732 || TYPE_STRUCTURAL_EQUALITY_P (rettype)); 8733 any_noncanonical_p 8734 = (TYPE_CANONICAL (basetype) != basetype 8735 || TYPE_CANONICAL (rettype) != rettype); 8736 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes), 8737 &any_structural_p, 8738 &any_noncanonical_p); 8739 if (any_structural_p) 8740 SET_TYPE_STRUCTURAL_EQUALITY (t); 8741 else if (any_noncanonical_p) 8742 TYPE_CANONICAL (t) 8743 = build_method_type_directly (TYPE_CANONICAL (basetype), 8744 TYPE_CANONICAL (rettype), 8745 canon_argtypes); 8746 if (!COMPLETE_TYPE_P (t)) 8747 layout_type (t); 8748 8749 return t; 8750 } 8751 8752 /* Construct, lay out and return the type of methods belonging to class 8753 BASETYPE and whose arguments and values are described by TYPE. 8754 If that type exists already, reuse it. 8755 TYPE must be a FUNCTION_TYPE node. */ 8756 8757 tree 8758 build_method_type (tree basetype, tree type) 8759 { 8760 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE); 8761 8762 return build_method_type_directly (basetype, 8763 TREE_TYPE (type), 8764 TYPE_ARG_TYPES (type)); 8765 } 8766 8767 /* Construct, lay out and return the type of offsets to a value 8768 of type TYPE, within an object of type BASETYPE. 8769 If a suitable offset type exists already, reuse it. */ 8770 8771 tree 8772 build_offset_type (tree basetype, tree type) 8773 { 8774 tree t; 8775 inchash::hash hstate; 8776 8777 /* Make a node of the sort we want. */ 8778 t = make_node (OFFSET_TYPE); 8779 8780 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype); 8781 TREE_TYPE (t) = type; 8782 8783 /* If we already have such a type, use the old one. */ 8784 hstate.add_object (TYPE_HASH (basetype)); 8785 hstate.add_object (TYPE_HASH (type)); 8786 t = type_hash_canon (hstate.end (), t); 8787 8788 if (!COMPLETE_TYPE_P (t)) 8789 layout_type (t); 8790 8791 if (TYPE_CANONICAL (t) == t) 8792 { 8793 if (TYPE_STRUCTURAL_EQUALITY_P (basetype) 8794 || TYPE_STRUCTURAL_EQUALITY_P (type)) 8795 SET_TYPE_STRUCTURAL_EQUALITY (t); 8796 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype 8797 || TYPE_CANONICAL (type) != type) 8798 TYPE_CANONICAL (t) 8799 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)), 8800 TYPE_CANONICAL (type)); 8801 } 8802 8803 return t; 8804 } 8805 8806 /* Create a complex type whose components are COMPONENT_TYPE. 8807 8808 If NAMED is true, the type is given a TYPE_NAME. We do not always 8809 do so because this creates a DECL node and thus make the DECL_UIDs 8810 dependent on the type canonicalization hashtable, which is GC-ed, 8811 so the DECL_UIDs would not be stable wrt garbage collection. */ 8812 8813 tree 8814 build_complex_type (tree component_type, bool named) 8815 { 8816 tree t; 8817 inchash::hash hstate; 8818 8819 gcc_assert (INTEGRAL_TYPE_P (component_type) 8820 || SCALAR_FLOAT_TYPE_P (component_type) 8821 || FIXED_POINT_TYPE_P (component_type)); 8822 8823 /* Make a node of the sort we want. */ 8824 t = make_node (COMPLEX_TYPE); 8825 8826 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type); 8827 8828 /* If we already have such a type, use the old one. */ 8829 hstate.add_object (TYPE_HASH (component_type)); 8830 t = type_hash_canon (hstate.end (), t); 8831 8832 if (!COMPLETE_TYPE_P (t)) 8833 layout_type (t); 8834 8835 if (TYPE_CANONICAL (t) == t) 8836 { 8837 if (TYPE_STRUCTURAL_EQUALITY_P (component_type)) 8838 SET_TYPE_STRUCTURAL_EQUALITY (t); 8839 else if (TYPE_CANONICAL (component_type) != component_type) 8840 TYPE_CANONICAL (t) 8841 = build_complex_type (TYPE_CANONICAL (component_type), named); 8842 } 8843 8844 /* We need to create a name, since complex is a fundamental type. */ 8845 if (!TYPE_NAME (t) && named) 8846 { 8847 const char *name; 8848 if (component_type == char_type_node) 8849 name = "complex char"; 8850 else if (component_type == signed_char_type_node) 8851 name = "complex signed char"; 8852 else if (component_type == unsigned_char_type_node) 8853 name = "complex unsigned char"; 8854 else if (component_type == short_integer_type_node) 8855 name = "complex short int"; 8856 else if (component_type == short_unsigned_type_node) 8857 name = "complex short unsigned int"; 8858 else if (component_type == integer_type_node) 8859 name = "complex int"; 8860 else if (component_type == unsigned_type_node) 8861 name = "complex unsigned int"; 8862 else if (component_type == long_integer_type_node) 8863 name = "complex long int"; 8864 else if (component_type == long_unsigned_type_node) 8865 name = "complex long unsigned int"; 8866 else if (component_type == long_long_integer_type_node) 8867 name = "complex long long int"; 8868 else if (component_type == long_long_unsigned_type_node) 8869 name = "complex long long unsigned int"; 8870 else 8871 name = 0; 8872 8873 if (name != 0) 8874 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL, 8875 get_identifier (name), t); 8876 } 8877 8878 return build_qualified_type (t, TYPE_QUALS (component_type)); 8879 } 8880 8881 /* If TYPE is a real or complex floating-point type and the target 8882 does not directly support arithmetic on TYPE then return the wider 8883 type to be used for arithmetic on TYPE. Otherwise, return 8884 NULL_TREE. */ 8885 8886 tree 8887 excess_precision_type (tree type) 8888 { 8889 /* The target can give two different responses to the question of 8890 which excess precision mode it would like depending on whether we 8891 are in -fexcess-precision=standard or -fexcess-precision=fast. */ 8892 8893 enum excess_precision_type requested_type 8894 = (flag_excess_precision == EXCESS_PRECISION_FAST 8895 ? EXCESS_PRECISION_TYPE_FAST 8896 : EXCESS_PRECISION_TYPE_STANDARD); 8897 8898 enum flt_eval_method target_flt_eval_method 8899 = targetm.c.excess_precision (requested_type); 8900 8901 /* The target should not ask for unpredictable float evaluation (though 8902 it might advertise that implicitly the evaluation is unpredictable, 8903 but we don't care about that here, it will have been reported 8904 elsewhere). If it does ask for unpredictable evaluation, we have 8905 nothing to do here. */ 8906 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE); 8907 8908 /* Nothing to do. The target has asked for all types we know about 8909 to be computed with their native precision and range. */ 8910 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16) 8911 return NULL_TREE; 8912 8913 /* The target will promote this type in a target-dependent way, so excess 8914 precision ought to leave it alone. */ 8915 if (targetm.promoted_type (type) != NULL_TREE) 8916 return NULL_TREE; 8917 8918 machine_mode float16_type_mode = (float16_type_node 8919 ? TYPE_MODE (float16_type_node) 8920 : VOIDmode); 8921 machine_mode float_type_mode = TYPE_MODE (float_type_node); 8922 machine_mode double_type_mode = TYPE_MODE (double_type_node); 8923 8924 switch (TREE_CODE (type)) 8925 { 8926 case REAL_TYPE: 8927 { 8928 machine_mode type_mode = TYPE_MODE (type); 8929 switch (target_flt_eval_method) 8930 { 8931 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT: 8932 if (type_mode == float16_type_mode) 8933 return float_type_node; 8934 break; 8935 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE: 8936 if (type_mode == float16_type_mode 8937 || type_mode == float_type_mode) 8938 return double_type_node; 8939 break; 8940 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE: 8941 if (type_mode == float16_type_mode 8942 || type_mode == float_type_mode 8943 || type_mode == double_type_mode) 8944 return long_double_type_node; 8945 break; 8946 default: 8947 gcc_unreachable (); 8948 } 8949 break; 8950 } 8951 case COMPLEX_TYPE: 8952 { 8953 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE) 8954 return NULL_TREE; 8955 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type)); 8956 switch (target_flt_eval_method) 8957 { 8958 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT: 8959 if (type_mode == float16_type_mode) 8960 return complex_float_type_node; 8961 break; 8962 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE: 8963 if (type_mode == float16_type_mode 8964 || type_mode == float_type_mode) 8965 return complex_double_type_node; 8966 break; 8967 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE: 8968 if (type_mode == float16_type_mode 8969 || type_mode == float_type_mode 8970 || type_mode == double_type_mode) 8971 return complex_long_double_type_node; 8972 break; 8973 default: 8974 gcc_unreachable (); 8975 } 8976 break; 8977 } 8978 default: 8979 break; 8980 } 8981 8982 return NULL_TREE; 8983 } 8984 8985 /* Return OP, stripped of any conversions to wider types as much as is safe. 8986 Converting the value back to OP's type makes a value equivalent to OP. 8987 8988 If FOR_TYPE is nonzero, we return a value which, if converted to 8989 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE. 8990 8991 OP must have integer, real or enumeral type. Pointers are not allowed! 8992 8993 There are some cases where the obvious value we could return 8994 would regenerate to OP if converted to OP's type, 8995 but would not extend like OP to wider types. 8996 If FOR_TYPE indicates such extension is contemplated, we eschew such values. 8997 For example, if OP is (unsigned short)(signed char)-1, 8998 we avoid returning (signed char)-1 if FOR_TYPE is int, 8999 even though extending that to an unsigned short would regenerate OP, 9000 since the result of extending (signed char)-1 to (int) 9001 is different from (int) OP. */ 9002 9003 tree 9004 get_unwidened (tree op, tree for_type) 9005 { 9006 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */ 9007 tree type = TREE_TYPE (op); 9008 unsigned final_prec 9009 = TYPE_PRECISION (for_type != 0 ? for_type : type); 9010 int uns 9011 = (for_type != 0 && for_type != type 9012 && final_prec > TYPE_PRECISION (type) 9013 && TYPE_UNSIGNED (type)); 9014 tree win = op; 9015 9016 while (CONVERT_EXPR_P (op)) 9017 { 9018 int bitschange; 9019 9020 /* TYPE_PRECISION on vector types has different meaning 9021 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions, 9022 so avoid them here. */ 9023 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE) 9024 break; 9025 9026 bitschange = TYPE_PRECISION (TREE_TYPE (op)) 9027 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))); 9028 9029 /* Truncations are many-one so cannot be removed. 9030 Unless we are later going to truncate down even farther. */ 9031 if (bitschange < 0 9032 && final_prec > TYPE_PRECISION (TREE_TYPE (op))) 9033 break; 9034 9035 /* See what's inside this conversion. If we decide to strip it, 9036 we will set WIN. */ 9037 op = TREE_OPERAND (op, 0); 9038 9039 /* If we have not stripped any zero-extensions (uns is 0), 9040 we can strip any kind of extension. 9041 If we have previously stripped a zero-extension, 9042 only zero-extensions can safely be stripped. 9043 Any extension can be stripped if the bits it would produce 9044 are all going to be discarded later by truncating to FOR_TYPE. */ 9045 9046 if (bitschange > 0) 9047 { 9048 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op))) 9049 win = op; 9050 /* TYPE_UNSIGNED says whether this is a zero-extension. 9051 Let's avoid computing it if it does not affect WIN 9052 and if UNS will not be needed again. */ 9053 if ((uns 9054 || CONVERT_EXPR_P (op)) 9055 && TYPE_UNSIGNED (TREE_TYPE (op))) 9056 { 9057 uns = 1; 9058 win = op; 9059 } 9060 } 9061 } 9062 9063 /* If we finally reach a constant see if it fits in sth smaller and 9064 in that case convert it. */ 9065 if (TREE_CODE (win) == INTEGER_CST) 9066 { 9067 tree wtype = TREE_TYPE (win); 9068 unsigned prec = wi::min_precision (win, TYPE_SIGN (wtype)); 9069 if (for_type) 9070 prec = MAX (prec, final_prec); 9071 if (prec < TYPE_PRECISION (wtype)) 9072 { 9073 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype)); 9074 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype)) 9075 win = fold_convert (t, win); 9076 } 9077 } 9078 9079 return win; 9080 } 9081 9082 /* Return OP or a simpler expression for a narrower value 9083 which can be sign-extended or zero-extended to give back OP. 9084 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended 9085 or 0 if the value should be sign-extended. */ 9086 9087 tree 9088 get_narrower (tree op, int *unsignedp_ptr) 9089 { 9090 int uns = 0; 9091 int first = 1; 9092 tree win = op; 9093 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op)); 9094 9095 while (TREE_CODE (op) == NOP_EXPR) 9096 { 9097 int bitschange 9098 = (TYPE_PRECISION (TREE_TYPE (op)) 9099 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)))); 9100 9101 /* Truncations are many-one so cannot be removed. */ 9102 if (bitschange < 0) 9103 break; 9104 9105 /* See what's inside this conversion. If we decide to strip it, 9106 we will set WIN. */ 9107 9108 if (bitschange > 0) 9109 { 9110 op = TREE_OPERAND (op, 0); 9111 /* An extension: the outermost one can be stripped, 9112 but remember whether it is zero or sign extension. */ 9113 if (first) 9114 uns = TYPE_UNSIGNED (TREE_TYPE (op)); 9115 /* Otherwise, if a sign extension has been stripped, 9116 only sign extensions can now be stripped; 9117 if a zero extension has been stripped, only zero-extensions. */ 9118 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op))) 9119 break; 9120 first = 0; 9121 } 9122 else /* bitschange == 0 */ 9123 { 9124 /* A change in nominal type can always be stripped, but we must 9125 preserve the unsignedness. */ 9126 if (first) 9127 uns = TYPE_UNSIGNED (TREE_TYPE (op)); 9128 first = 0; 9129 op = TREE_OPERAND (op, 0); 9130 /* Keep trying to narrow, but don't assign op to win if it 9131 would turn an integral type into something else. */ 9132 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p) 9133 continue; 9134 } 9135 9136 win = op; 9137 } 9138 9139 if (TREE_CODE (op) == COMPONENT_REF 9140 /* Since type_for_size always gives an integer type. */ 9141 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE 9142 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE 9143 /* Ensure field is laid out already. */ 9144 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0 9145 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1)))) 9146 { 9147 unsigned HOST_WIDE_INT innerprec 9148 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1))); 9149 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1)) 9150 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1)))); 9151 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp); 9152 9153 /* We can get this structure field in a narrower type that fits it, 9154 but the resulting extension to its nominal type (a fullword type) 9155 must satisfy the same conditions as for other extensions. 9156 9157 Do this only for fields that are aligned (not bit-fields), 9158 because when bit-field insns will be used there is no 9159 advantage in doing this. */ 9160 9161 if (innerprec < TYPE_PRECISION (TREE_TYPE (op)) 9162 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1)) 9163 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1))) 9164 && type != 0) 9165 { 9166 if (first) 9167 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1)); 9168 win = fold_convert (type, op); 9169 } 9170 } 9171 9172 *unsignedp_ptr = uns; 9173 return win; 9174 } 9175 9176 /* Return true if integer constant C has a value that is permissible 9177 for TYPE, an integral type. */ 9178 9179 bool 9180 int_fits_type_p (const_tree c, const_tree type) 9181 { 9182 tree type_low_bound, type_high_bound; 9183 bool ok_for_low_bound, ok_for_high_bound; 9184 signop sgn_c = TYPE_SIGN (TREE_TYPE (c)); 9185 9186 /* Non-standard boolean types can have arbitrary precision but various 9187 transformations assume that they can only take values 0 and +/-1. */ 9188 if (TREE_CODE (type) == BOOLEAN_TYPE) 9189 return wi::fits_to_boolean_p (c, type); 9190 9191 retry: 9192 type_low_bound = TYPE_MIN_VALUE (type); 9193 type_high_bound = TYPE_MAX_VALUE (type); 9194 9195 /* If at least one bound of the type is a constant integer, we can check 9196 ourselves and maybe make a decision. If no such decision is possible, but 9197 this type is a subtype, try checking against that. Otherwise, use 9198 fits_to_tree_p, which checks against the precision. 9199 9200 Compute the status for each possibly constant bound, and return if we see 9201 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1 9202 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1 9203 for "constant known to fit". */ 9204 9205 /* Check if c >= type_low_bound. */ 9206 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST) 9207 { 9208 if (tree_int_cst_lt (c, type_low_bound)) 9209 return false; 9210 ok_for_low_bound = true; 9211 } 9212 else 9213 ok_for_low_bound = false; 9214 9215 /* Check if c <= type_high_bound. */ 9216 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST) 9217 { 9218 if (tree_int_cst_lt (type_high_bound, c)) 9219 return false; 9220 ok_for_high_bound = true; 9221 } 9222 else 9223 ok_for_high_bound = false; 9224 9225 /* If the constant fits both bounds, the result is known. */ 9226 if (ok_for_low_bound && ok_for_high_bound) 9227 return true; 9228 9229 /* Perform some generic filtering which may allow making a decision 9230 even if the bounds are not constant. First, negative integers 9231 never fit in unsigned types, */ 9232 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c)) 9233 return false; 9234 9235 /* Second, narrower types always fit in wider ones. */ 9236 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c))) 9237 return true; 9238 9239 /* Third, unsigned integers with top bit set never fit signed types. */ 9240 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED) 9241 { 9242 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1; 9243 if (prec < TYPE_PRECISION (TREE_TYPE (c))) 9244 { 9245 /* When a tree_cst is converted to a wide-int, the precision 9246 is taken from the type. However, if the precision of the 9247 mode underneath the type is smaller than that, it is 9248 possible that the value will not fit. The test below 9249 fails if any bit is set between the sign bit of the 9250 underlying mode and the top bit of the type. */ 9251 if (wi::ne_p (wi::zext (c, prec - 1), c)) 9252 return false; 9253 } 9254 else if (wi::neg_p (c)) 9255 return false; 9256 } 9257 9258 /* If we haven't been able to decide at this point, there nothing more we 9259 can check ourselves here. Look at the base type if we have one and it 9260 has the same precision. */ 9261 if (TREE_CODE (type) == INTEGER_TYPE 9262 && TREE_TYPE (type) != 0 9263 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type))) 9264 { 9265 type = TREE_TYPE (type); 9266 goto retry; 9267 } 9268 9269 /* Or to fits_to_tree_p, if nothing else. */ 9270 return wi::fits_to_tree_p (c, type); 9271 } 9272 9273 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant 9274 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be 9275 represented (assuming two's-complement arithmetic) within the bit 9276 precision of the type are returned instead. */ 9277 9278 void 9279 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max) 9280 { 9281 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type) 9282 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST) 9283 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type)); 9284 else 9285 { 9286 if (TYPE_UNSIGNED (type)) 9287 mpz_set_ui (min, 0); 9288 else 9289 { 9290 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED); 9291 wi::to_mpz (mn, min, SIGNED); 9292 } 9293 } 9294 9295 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type) 9296 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST) 9297 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type)); 9298 else 9299 { 9300 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type)); 9301 wi::to_mpz (mn, max, TYPE_SIGN (type)); 9302 } 9303 } 9304 9305 /* Return true if VAR is an automatic variable defined in function FN. */ 9306 9307 bool 9308 auto_var_in_fn_p (const_tree var, const_tree fn) 9309 { 9310 return (DECL_P (var) && DECL_CONTEXT (var) == fn 9311 && ((((VAR_P (var) && ! DECL_EXTERNAL (var)) 9312 || TREE_CODE (var) == PARM_DECL) 9313 && ! TREE_STATIC (var)) 9314 || TREE_CODE (var) == LABEL_DECL 9315 || TREE_CODE (var) == RESULT_DECL)); 9316 } 9317 9318 /* Subprogram of following function. Called by walk_tree. 9319 9320 Return *TP if it is an automatic variable or parameter of the 9321 function passed in as DATA. */ 9322 9323 static tree 9324 find_var_from_fn (tree *tp, int *walk_subtrees, void *data) 9325 { 9326 tree fn = (tree) data; 9327 9328 if (TYPE_P (*tp)) 9329 *walk_subtrees = 0; 9330 9331 else if (DECL_P (*tp) 9332 && auto_var_in_fn_p (*tp, fn)) 9333 return *tp; 9334 9335 return NULL_TREE; 9336 } 9337 9338 /* Returns true if T is, contains, or refers to a type with variable 9339 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the 9340 arguments, but not the return type. If FN is nonzero, only return 9341 true if a modifier of the type or position of FN is a variable or 9342 parameter inside FN. 9343 9344 This concept is more general than that of C99 'variably modified types': 9345 in C99, a struct type is never variably modified because a VLA may not 9346 appear as a structure member. However, in GNU C code like: 9347 9348 struct S { int i[f()]; }; 9349 9350 is valid, and other languages may define similar constructs. */ 9351 9352 bool 9353 variably_modified_type_p (tree type, tree fn) 9354 { 9355 tree t; 9356 9357 /* Test if T is either variable (if FN is zero) or an expression containing 9358 a variable in FN. If TYPE isn't gimplified, return true also if 9359 gimplify_one_sizepos would gimplify the expression into a local 9360 variable. */ 9361 #define RETURN_TRUE_IF_VAR(T) \ 9362 do { tree _t = (T); \ 9363 if (_t != NULL_TREE \ 9364 && _t != error_mark_node \ 9365 && TREE_CODE (_t) != INTEGER_CST \ 9366 && TREE_CODE (_t) != PLACEHOLDER_EXPR \ 9367 && (!fn \ 9368 || (!TYPE_SIZES_GIMPLIFIED (type) \ 9369 && !is_gimple_sizepos (_t)) \ 9370 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \ 9371 return true; } while (0) 9372 9373 if (type == error_mark_node) 9374 return false; 9375 9376 /* If TYPE itself has variable size, it is variably modified. */ 9377 RETURN_TRUE_IF_VAR (TYPE_SIZE (type)); 9378 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type)); 9379 9380 switch (TREE_CODE (type)) 9381 { 9382 case POINTER_TYPE: 9383 case REFERENCE_TYPE: 9384 case VECTOR_TYPE: 9385 if (variably_modified_type_p (TREE_TYPE (type), fn)) 9386 return true; 9387 break; 9388 9389 case FUNCTION_TYPE: 9390 case METHOD_TYPE: 9391 /* If TYPE is a function type, it is variably modified if the 9392 return type is variably modified. */ 9393 if (variably_modified_type_p (TREE_TYPE (type), fn)) 9394 return true; 9395 break; 9396 9397 case INTEGER_TYPE: 9398 case REAL_TYPE: 9399 case FIXED_POINT_TYPE: 9400 case ENUMERAL_TYPE: 9401 case BOOLEAN_TYPE: 9402 /* Scalar types are variably modified if their end points 9403 aren't constant. */ 9404 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type)); 9405 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type)); 9406 break; 9407 9408 case RECORD_TYPE: 9409 case UNION_TYPE: 9410 case QUAL_UNION_TYPE: 9411 /* We can't see if any of the fields are variably-modified by the 9412 definition we normally use, since that would produce infinite 9413 recursion via pointers. */ 9414 /* This is variably modified if some field's type is. */ 9415 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t)) 9416 if (TREE_CODE (t) == FIELD_DECL) 9417 { 9418 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t)); 9419 RETURN_TRUE_IF_VAR (DECL_SIZE (t)); 9420 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t)); 9421 9422 if (TREE_CODE (type) == QUAL_UNION_TYPE) 9423 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t)); 9424 } 9425 break; 9426 9427 case ARRAY_TYPE: 9428 /* Do not call ourselves to avoid infinite recursion. This is 9429 variably modified if the element type is. */ 9430 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type))); 9431 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type))); 9432 break; 9433 9434 default: 9435 break; 9436 } 9437 9438 /* The current language may have other cases to check, but in general, 9439 all other types are not variably modified. */ 9440 return lang_hooks.tree_inlining.var_mod_type_p (type, fn); 9441 9442 #undef RETURN_TRUE_IF_VAR 9443 } 9444 9445 /* Given a DECL or TYPE, return the scope in which it was declared, or 9446 NULL_TREE if there is no containing scope. */ 9447 9448 tree 9449 get_containing_scope (const_tree t) 9450 { 9451 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t)); 9452 } 9453 9454 /* Return the innermost context enclosing DECL that is 9455 a FUNCTION_DECL, or zero if none. */ 9456 9457 tree 9458 decl_function_context (const_tree decl) 9459 { 9460 tree context; 9461 9462 if (TREE_CODE (decl) == ERROR_MARK) 9463 return 0; 9464 9465 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable 9466 where we look up the function at runtime. Such functions always take 9467 a first argument of type 'pointer to real context'. 9468 9469 C++ should really be fixed to use DECL_CONTEXT for the real context, 9470 and use something else for the "virtual context". */ 9471 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl)) 9472 context 9473 = TYPE_MAIN_VARIANT 9474 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl))))); 9475 else 9476 context = DECL_CONTEXT (decl); 9477 9478 while (context && TREE_CODE (context) != FUNCTION_DECL) 9479 { 9480 if (TREE_CODE (context) == BLOCK) 9481 context = BLOCK_SUPERCONTEXT (context); 9482 else 9483 context = get_containing_scope (context); 9484 } 9485 9486 return context; 9487 } 9488 9489 /* Return the innermost context enclosing DECL that is 9490 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none. 9491 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */ 9492 9493 tree 9494 decl_type_context (const_tree decl) 9495 { 9496 tree context = DECL_CONTEXT (decl); 9497 9498 while (context) 9499 switch (TREE_CODE (context)) 9500 { 9501 case NAMESPACE_DECL: 9502 case TRANSLATION_UNIT_DECL: 9503 return NULL_TREE; 9504 9505 case RECORD_TYPE: 9506 case UNION_TYPE: 9507 case QUAL_UNION_TYPE: 9508 return context; 9509 9510 case TYPE_DECL: 9511 case FUNCTION_DECL: 9512 context = DECL_CONTEXT (context); 9513 break; 9514 9515 case BLOCK: 9516 context = BLOCK_SUPERCONTEXT (context); 9517 break; 9518 9519 default: 9520 gcc_unreachable (); 9521 } 9522 9523 return NULL_TREE; 9524 } 9525 9526 /* CALL is a CALL_EXPR. Return the declaration for the function 9527 called, or NULL_TREE if the called function cannot be 9528 determined. */ 9529 9530 tree 9531 get_callee_fndecl (const_tree call) 9532 { 9533 tree addr; 9534 9535 if (call == error_mark_node) 9536 return error_mark_node; 9537 9538 /* It's invalid to call this function with anything but a 9539 CALL_EXPR. */ 9540 gcc_assert (TREE_CODE (call) == CALL_EXPR); 9541 9542 /* The first operand to the CALL is the address of the function 9543 called. */ 9544 addr = CALL_EXPR_FN (call); 9545 9546 /* If there is no function, return early. */ 9547 if (addr == NULL_TREE) 9548 return NULL_TREE; 9549 9550 STRIP_NOPS (addr); 9551 9552 /* If this is a readonly function pointer, extract its initial value. */ 9553 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL 9554 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr) 9555 && DECL_INITIAL (addr)) 9556 addr = DECL_INITIAL (addr); 9557 9558 /* If the address is just `&f' for some function `f', then we know 9559 that `f' is being called. */ 9560 if (TREE_CODE (addr) == ADDR_EXPR 9561 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL) 9562 return TREE_OPERAND (addr, 0); 9563 9564 /* We couldn't figure out what was being called. */ 9565 return NULL_TREE; 9566 } 9567 9568 /* If CALL_EXPR CALL calls a normal built-in function or an internal function, 9569 return the associated function code, otherwise return CFN_LAST. */ 9570 9571 combined_fn 9572 get_call_combined_fn (const_tree call) 9573 { 9574 /* It's invalid to call this function with anything but a CALL_EXPR. */ 9575 gcc_assert (TREE_CODE (call) == CALL_EXPR); 9576 9577 if (!CALL_EXPR_FN (call)) 9578 return as_combined_fn (CALL_EXPR_IFN (call)); 9579 9580 tree fndecl = get_callee_fndecl (call); 9581 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) 9582 return as_combined_fn (DECL_FUNCTION_CODE (fndecl)); 9583 9584 return CFN_LAST; 9585 } 9586 9587 #define TREE_MEM_USAGE_SPACES 40 9588 9589 /* Print debugging information about tree nodes generated during the compile, 9590 and any language-specific information. */ 9591 9592 void 9593 dump_tree_statistics (void) 9594 { 9595 if (GATHER_STATISTICS) 9596 { 9597 int i; 9598 int total_nodes, total_bytes; 9599 fprintf (stderr, "\nKind Nodes Bytes\n"); 9600 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES); 9601 total_nodes = total_bytes = 0; 9602 for (i = 0; i < (int) all_kinds; i++) 9603 { 9604 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i], 9605 tree_node_counts[i], tree_node_sizes[i]); 9606 total_nodes += tree_node_counts[i]; 9607 total_bytes += tree_node_sizes[i]; 9608 } 9609 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES); 9610 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes); 9611 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES); 9612 fprintf (stderr, "Code Nodes\n"); 9613 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES); 9614 for (i = 0; i < (int) MAX_TREE_CODES; i++) 9615 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i), 9616 tree_code_counts[i]); 9617 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES); 9618 fprintf (stderr, "\n"); 9619 ssanames_print_statistics (); 9620 fprintf (stderr, "\n"); 9621 phinodes_print_statistics (); 9622 fprintf (stderr, "\n"); 9623 } 9624 else 9625 fprintf (stderr, "(No per-node statistics)\n"); 9626 9627 print_type_hash_statistics (); 9628 print_debug_expr_statistics (); 9629 print_value_expr_statistics (); 9630 lang_hooks.print_statistics (); 9631 } 9632 9633 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s" 9634 9635 /* Generate a crc32 of a byte. */ 9636 9637 static unsigned 9638 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits) 9639 { 9640 unsigned ix; 9641 9642 for (ix = bits; ix--; value <<= 1) 9643 { 9644 unsigned feedback; 9645 9646 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0; 9647 chksum <<= 1; 9648 chksum ^= feedback; 9649 } 9650 return chksum; 9651 } 9652 9653 /* Generate a crc32 of a 32-bit unsigned. */ 9654 9655 unsigned 9656 crc32_unsigned (unsigned chksum, unsigned value) 9657 { 9658 return crc32_unsigned_bits (chksum, value, 32); 9659 } 9660 9661 /* Generate a crc32 of a byte. */ 9662 9663 unsigned 9664 crc32_byte (unsigned chksum, char byte) 9665 { 9666 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8); 9667 } 9668 9669 /* Generate a crc32 of a string. */ 9670 9671 unsigned 9672 crc32_string (unsigned chksum, const char *string) 9673 { 9674 do 9675 { 9676 chksum = crc32_byte (chksum, *string); 9677 } 9678 while (*string++); 9679 return chksum; 9680 } 9681 9682 /* P is a string that will be used in a symbol. Mask out any characters 9683 that are not valid in that context. */ 9684 9685 void 9686 clean_symbol_name (char *p) 9687 { 9688 for (; *p; p++) 9689 if (! (ISALNUM (*p) 9690 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */ 9691 || *p == '$' 9692 #endif 9693 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */ 9694 || *p == '.' 9695 #endif 9696 )) 9697 *p = '_'; 9698 } 9699 9700 /* For anonymous aggregate types, we need some sort of name to 9701 hold on to. In practice, this should not appear, but it should 9702 not be harmful if it does. */ 9703 bool 9704 anon_aggrname_p(const_tree id_node) 9705 { 9706 #ifndef NO_DOT_IN_LABEL 9707 return (IDENTIFIER_POINTER (id_node)[0] == '.' 9708 && IDENTIFIER_POINTER (id_node)[1] == '_'); 9709 #else /* NO_DOT_IN_LABEL */ 9710 #ifndef NO_DOLLAR_IN_LABEL 9711 return (IDENTIFIER_POINTER (id_node)[0] == '$' \ 9712 && IDENTIFIER_POINTER (id_node)[1] == '_'); 9713 #else /* NO_DOLLAR_IN_LABEL */ 9714 #define ANON_AGGRNAME_PREFIX "__anon_" 9715 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX, 9716 sizeof (ANON_AGGRNAME_PREFIX) - 1)); 9717 #endif /* NO_DOLLAR_IN_LABEL */ 9718 #endif /* NO_DOT_IN_LABEL */ 9719 } 9720 9721 /* Return a format for an anonymous aggregate name. */ 9722 const char * 9723 anon_aggrname_format() 9724 { 9725 #ifndef NO_DOT_IN_LABEL 9726 return "._%d"; 9727 #else /* NO_DOT_IN_LABEL */ 9728 #ifndef NO_DOLLAR_IN_LABEL 9729 return "$_%d"; 9730 #else /* NO_DOLLAR_IN_LABEL */ 9731 return "__anon_%d"; 9732 #endif /* NO_DOLLAR_IN_LABEL */ 9733 #endif /* NO_DOT_IN_LABEL */ 9734 } 9735 9736 /* Generate a name for a special-purpose function. 9737 The generated name may need to be unique across the whole link. 9738 Changes to this function may also require corresponding changes to 9739 xstrdup_mask_random. 9740 TYPE is some string to identify the purpose of this function to the 9741 linker or collect2; it must start with an uppercase letter, 9742 one of: 9743 I - for constructors 9744 D - for destructors 9745 N - for C++ anonymous namespaces 9746 F - for DWARF unwind frame information. */ 9747 9748 tree 9749 get_file_function_name (const char *type) 9750 { 9751 char *buf; 9752 const char *p; 9753 char *q; 9754 9755 /* If we already have a name we know to be unique, just use that. */ 9756 if (first_global_object_name) 9757 p = q = ASTRDUP (first_global_object_name); 9758 /* If the target is handling the constructors/destructors, they 9759 will be local to this file and the name is only necessary for 9760 debugging purposes. 9761 We also assign sub_I and sub_D sufixes to constructors called from 9762 the global static constructors. These are always local. */ 9763 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors) 9764 || (strncmp (type, "sub_", 4) == 0 9765 && (type[4] == 'I' || type[4] == 'D'))) 9766 { 9767 const char *file = main_input_filename; 9768 if (! file) 9769 file = LOCATION_FILE (input_location); 9770 /* Just use the file's basename, because the full pathname 9771 might be quite long. */ 9772 p = q = ASTRDUP (lbasename (file)); 9773 } 9774 else 9775 { 9776 /* Otherwise, the name must be unique across the entire link. 9777 We don't have anything that we know to be unique to this translation 9778 unit, so use what we do have and throw in some randomness. */ 9779 unsigned len; 9780 const char *name = weak_global_object_name; 9781 const char *file = main_input_filename; 9782 9783 if (! name) 9784 name = ""; 9785 if (! file) 9786 file = LOCATION_FILE (input_location); 9787 9788 len = strlen (file); 9789 q = (char *) alloca (9 + 19 + len + 1); 9790 memcpy (q, file, len + 1); 9791 9792 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX, 9793 crc32_string (0, name), get_random_seed (false)); 9794 9795 p = q; 9796 } 9797 9798 clean_symbol_name (q); 9799 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p) 9800 + strlen (type)); 9801 9802 /* Set up the name of the file-level functions we may need. 9803 Use a global object (which is already required to be unique over 9804 the program) rather than the file name (which imposes extra 9805 constraints). */ 9806 sprintf (buf, FILE_FUNCTION_FORMAT, type, p); 9807 9808 return get_identifier (buf); 9809 } 9810 9811 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007) 9812 9813 /* Complain that the tree code of NODE does not match the expected 0 9814 terminated list of trailing codes. The trailing code list can be 9815 empty, for a more vague error message. FILE, LINE, and FUNCTION 9816 are of the caller. */ 9817 9818 void 9819 tree_check_failed (const_tree node, const char *file, 9820 int line, const char *function, ...) 9821 { 9822 va_list args; 9823 const char *buffer; 9824 unsigned length = 0; 9825 enum tree_code code; 9826 9827 va_start (args, function); 9828 while ((code = (enum tree_code) va_arg (args, int))) 9829 length += 4 + strlen (get_tree_code_name (code)); 9830 va_end (args); 9831 if (length) 9832 { 9833 char *tmp; 9834 va_start (args, function); 9835 length += strlen ("expected "); 9836 buffer = tmp = (char *) alloca (length); 9837 length = 0; 9838 while ((code = (enum tree_code) va_arg (args, int))) 9839 { 9840 const char *prefix = length ? " or " : "expected "; 9841 9842 strcpy (tmp + length, prefix); 9843 length += strlen (prefix); 9844 strcpy (tmp + length, get_tree_code_name (code)); 9845 length += strlen (get_tree_code_name (code)); 9846 } 9847 va_end (args); 9848 } 9849 else 9850 buffer = "unexpected node"; 9851 9852 internal_error ("tree check: %s, have %s in %s, at %s:%d", 9853 buffer, get_tree_code_name (TREE_CODE (node)), 9854 function, trim_filename (file), line); 9855 } 9856 9857 /* Complain that the tree code of NODE does match the expected 0 9858 terminated list of trailing codes. FILE, LINE, and FUNCTION are of 9859 the caller. */ 9860 9861 void 9862 tree_not_check_failed (const_tree node, const char *file, 9863 int line, const char *function, ...) 9864 { 9865 va_list args; 9866 char *buffer; 9867 unsigned length = 0; 9868 enum tree_code code; 9869 9870 va_start (args, function); 9871 while ((code = (enum tree_code) va_arg (args, int))) 9872 length += 4 + strlen (get_tree_code_name (code)); 9873 va_end (args); 9874 va_start (args, function); 9875 buffer = (char *) alloca (length); 9876 length = 0; 9877 while ((code = (enum tree_code) va_arg (args, int))) 9878 { 9879 if (length) 9880 { 9881 strcpy (buffer + length, " or "); 9882 length += 4; 9883 } 9884 strcpy (buffer + length, get_tree_code_name (code)); 9885 length += strlen (get_tree_code_name (code)); 9886 } 9887 va_end (args); 9888 9889 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d", 9890 buffer, get_tree_code_name (TREE_CODE (node)), 9891 function, trim_filename (file), line); 9892 } 9893 9894 /* Similar to tree_check_failed, except that we check for a class of tree 9895 code, given in CL. */ 9896 9897 void 9898 tree_class_check_failed (const_tree node, const enum tree_code_class cl, 9899 const char *file, int line, const char *function) 9900 { 9901 internal_error 9902 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d", 9903 TREE_CODE_CLASS_STRING (cl), 9904 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))), 9905 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line); 9906 } 9907 9908 /* Similar to tree_check_failed, except that instead of specifying a 9909 dozen codes, use the knowledge that they're all sequential. */ 9910 9911 void 9912 tree_range_check_failed (const_tree node, const char *file, int line, 9913 const char *function, enum tree_code c1, 9914 enum tree_code c2) 9915 { 9916 char *buffer; 9917 unsigned length = 0; 9918 unsigned int c; 9919 9920 for (c = c1; c <= c2; ++c) 9921 length += 4 + strlen (get_tree_code_name ((enum tree_code) c)); 9922 9923 length += strlen ("expected "); 9924 buffer = (char *) alloca (length); 9925 length = 0; 9926 9927 for (c = c1; c <= c2; ++c) 9928 { 9929 const char *prefix = length ? " or " : "expected "; 9930 9931 strcpy (buffer + length, prefix); 9932 length += strlen (prefix); 9933 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c)); 9934 length += strlen (get_tree_code_name ((enum tree_code) c)); 9935 } 9936 9937 internal_error ("tree check: %s, have %s in %s, at %s:%d", 9938 buffer, get_tree_code_name (TREE_CODE (node)), 9939 function, trim_filename (file), line); 9940 } 9941 9942 9943 /* Similar to tree_check_failed, except that we check that a tree does 9944 not have the specified code, given in CL. */ 9945 9946 void 9947 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl, 9948 const char *file, int line, const char *function) 9949 { 9950 internal_error 9951 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d", 9952 TREE_CODE_CLASS_STRING (cl), 9953 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))), 9954 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line); 9955 } 9956 9957 9958 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */ 9959 9960 void 9961 omp_clause_check_failed (const_tree node, const char *file, int line, 9962 const char *function, enum omp_clause_code code) 9963 { 9964 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d", 9965 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)), 9966 function, trim_filename (file), line); 9967 } 9968 9969 9970 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */ 9971 9972 void 9973 omp_clause_range_check_failed (const_tree node, const char *file, int line, 9974 const char *function, enum omp_clause_code c1, 9975 enum omp_clause_code c2) 9976 { 9977 char *buffer; 9978 unsigned length = 0; 9979 unsigned int c; 9980 9981 for (c = c1; c <= c2; ++c) 9982 length += 4 + strlen (omp_clause_code_name[c]); 9983 9984 length += strlen ("expected "); 9985 buffer = (char *) alloca (length); 9986 length = 0; 9987 9988 for (c = c1; c <= c2; ++c) 9989 { 9990 const char *prefix = length ? " or " : "expected "; 9991 9992 strcpy (buffer + length, prefix); 9993 length += strlen (prefix); 9994 strcpy (buffer + length, omp_clause_code_name[c]); 9995 length += strlen (omp_clause_code_name[c]); 9996 } 9997 9998 internal_error ("tree check: %s, have %s in %s, at %s:%d", 9999 buffer, omp_clause_code_name[TREE_CODE (node)], 10000 function, trim_filename (file), line); 10001 } 10002 10003 10004 #undef DEFTREESTRUCT 10005 #define DEFTREESTRUCT(VAL, NAME) NAME, 10006 10007 static const char *ts_enum_names[] = { 10008 #include "treestruct.def" 10009 }; 10010 #undef DEFTREESTRUCT 10011 10012 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)]) 10013 10014 /* Similar to tree_class_check_failed, except that we check for 10015 whether CODE contains the tree structure identified by EN. */ 10016 10017 void 10018 tree_contains_struct_check_failed (const_tree node, 10019 const enum tree_node_structure_enum en, 10020 const char *file, int line, 10021 const char *function) 10022 { 10023 internal_error 10024 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d", 10025 TS_ENUM_NAME (en), 10026 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line); 10027 } 10028 10029 10030 /* Similar to above, except that the check is for the bounds of a TREE_VEC's 10031 (dynamically sized) vector. */ 10032 10033 void 10034 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line, 10035 const char *function) 10036 { 10037 internal_error 10038 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d", 10039 idx + 1, len, function, trim_filename (file), line); 10040 } 10041 10042 /* Similar to above, except that the check is for the bounds of a TREE_VEC's 10043 (dynamically sized) vector. */ 10044 10045 void 10046 tree_vec_elt_check_failed (int idx, int len, const char *file, int line, 10047 const char *function) 10048 { 10049 internal_error 10050 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d", 10051 idx + 1, len, function, trim_filename (file), line); 10052 } 10053 10054 /* Similar to above, except that the check is for the bounds of the operand 10055 vector of an expression node EXP. */ 10056 10057 void 10058 tree_operand_check_failed (int idx, const_tree exp, const char *file, 10059 int line, const char *function) 10060 { 10061 enum tree_code code = TREE_CODE (exp); 10062 internal_error 10063 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d", 10064 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp), 10065 function, trim_filename (file), line); 10066 } 10067 10068 /* Similar to above, except that the check is for the number of 10069 operands of an OMP_CLAUSE node. */ 10070 10071 void 10072 omp_clause_operand_check_failed (int idx, const_tree t, const char *file, 10073 int line, const char *function) 10074 { 10075 internal_error 10076 ("tree check: accessed operand %d of omp_clause %s with %d operands " 10077 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)], 10078 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function, 10079 trim_filename (file), line); 10080 } 10081 #endif /* ENABLE_TREE_CHECKING */ 10082 10083 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE, 10084 and mapped to the machine mode MODE. Initialize its fields and build 10085 the information necessary for debugging output. */ 10086 10087 static tree 10088 make_vector_type (tree innertype, int nunits, machine_mode mode) 10089 { 10090 tree t; 10091 inchash::hash hstate; 10092 tree mv_innertype = TYPE_MAIN_VARIANT (innertype); 10093 10094 t = make_node (VECTOR_TYPE); 10095 TREE_TYPE (t) = mv_innertype; 10096 SET_TYPE_VECTOR_SUBPARTS (t, nunits); 10097 SET_TYPE_MODE (t, mode); 10098 10099 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p) 10100 SET_TYPE_STRUCTURAL_EQUALITY (t); 10101 else if ((TYPE_CANONICAL (mv_innertype) != innertype 10102 || mode != VOIDmode) 10103 && !VECTOR_BOOLEAN_TYPE_P (t)) 10104 TYPE_CANONICAL (t) 10105 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode); 10106 10107 layout_type (t); 10108 10109 hstate.add_wide_int (VECTOR_TYPE); 10110 hstate.add_wide_int (nunits); 10111 hstate.add_wide_int (mode); 10112 hstate.add_object (TYPE_HASH (TREE_TYPE (t))); 10113 t = type_hash_canon (hstate.end (), t); 10114 10115 /* We have built a main variant, based on the main variant of the 10116 inner type. Use it to build the variant we return. */ 10117 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype)) 10118 && TREE_TYPE (t) != innertype) 10119 return build_type_attribute_qual_variant (t, 10120 TYPE_ATTRIBUTES (innertype), 10121 TYPE_QUALS (innertype)); 10122 10123 return t; 10124 } 10125 10126 static tree 10127 make_or_reuse_type (unsigned size, int unsignedp) 10128 { 10129 int i; 10130 10131 if (size == INT_TYPE_SIZE) 10132 return unsignedp ? unsigned_type_node : integer_type_node; 10133 if (size == CHAR_TYPE_SIZE) 10134 return unsignedp ? unsigned_char_type_node : signed_char_type_node; 10135 if (size == SHORT_TYPE_SIZE) 10136 return unsignedp ? short_unsigned_type_node : short_integer_type_node; 10137 if (size == LONG_TYPE_SIZE) 10138 return unsignedp ? long_unsigned_type_node : long_integer_type_node; 10139 if (size == LONG_LONG_TYPE_SIZE) 10140 return (unsignedp ? long_long_unsigned_type_node 10141 : long_long_integer_type_node); 10142 10143 for (i = 0; i < NUM_INT_N_ENTS; i ++) 10144 if (size == int_n_data[i].bitsize 10145 && int_n_enabled_p[i]) 10146 return (unsignedp ? int_n_trees[i].unsigned_type 10147 : int_n_trees[i].signed_type); 10148 10149 if (unsignedp) 10150 return make_unsigned_type (size); 10151 else 10152 return make_signed_type (size); 10153 } 10154 10155 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */ 10156 10157 static tree 10158 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp) 10159 { 10160 if (satp) 10161 { 10162 if (size == SHORT_FRACT_TYPE_SIZE) 10163 return unsignedp ? sat_unsigned_short_fract_type_node 10164 : sat_short_fract_type_node; 10165 if (size == FRACT_TYPE_SIZE) 10166 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node; 10167 if (size == LONG_FRACT_TYPE_SIZE) 10168 return unsignedp ? sat_unsigned_long_fract_type_node 10169 : sat_long_fract_type_node; 10170 if (size == LONG_LONG_FRACT_TYPE_SIZE) 10171 return unsignedp ? sat_unsigned_long_long_fract_type_node 10172 : sat_long_long_fract_type_node; 10173 } 10174 else 10175 { 10176 if (size == SHORT_FRACT_TYPE_SIZE) 10177 return unsignedp ? unsigned_short_fract_type_node 10178 : short_fract_type_node; 10179 if (size == FRACT_TYPE_SIZE) 10180 return unsignedp ? unsigned_fract_type_node : fract_type_node; 10181 if (size == LONG_FRACT_TYPE_SIZE) 10182 return unsignedp ? unsigned_long_fract_type_node 10183 : long_fract_type_node; 10184 if (size == LONG_LONG_FRACT_TYPE_SIZE) 10185 return unsignedp ? unsigned_long_long_fract_type_node 10186 : long_long_fract_type_node; 10187 } 10188 10189 return make_fract_type (size, unsignedp, satp); 10190 } 10191 10192 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */ 10193 10194 static tree 10195 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp) 10196 { 10197 if (satp) 10198 { 10199 if (size == SHORT_ACCUM_TYPE_SIZE) 10200 return unsignedp ? sat_unsigned_short_accum_type_node 10201 : sat_short_accum_type_node; 10202 if (size == ACCUM_TYPE_SIZE) 10203 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node; 10204 if (size == LONG_ACCUM_TYPE_SIZE) 10205 return unsignedp ? sat_unsigned_long_accum_type_node 10206 : sat_long_accum_type_node; 10207 if (size == LONG_LONG_ACCUM_TYPE_SIZE) 10208 return unsignedp ? sat_unsigned_long_long_accum_type_node 10209 : sat_long_long_accum_type_node; 10210 } 10211 else 10212 { 10213 if (size == SHORT_ACCUM_TYPE_SIZE) 10214 return unsignedp ? unsigned_short_accum_type_node 10215 : short_accum_type_node; 10216 if (size == ACCUM_TYPE_SIZE) 10217 return unsignedp ? unsigned_accum_type_node : accum_type_node; 10218 if (size == LONG_ACCUM_TYPE_SIZE) 10219 return unsignedp ? unsigned_long_accum_type_node 10220 : long_accum_type_node; 10221 if (size == LONG_LONG_ACCUM_TYPE_SIZE) 10222 return unsignedp ? unsigned_long_long_accum_type_node 10223 : long_long_accum_type_node; 10224 } 10225 10226 return make_accum_type (size, unsignedp, satp); 10227 } 10228 10229 10230 /* Create an atomic variant node for TYPE. This routine is called 10231 during initialization of data types to create the 5 basic atomic 10232 types. The generic build_variant_type function requires these to 10233 already be set up in order to function properly, so cannot be 10234 called from there. If ALIGN is non-zero, then ensure alignment is 10235 overridden to this value. */ 10236 10237 static tree 10238 build_atomic_base (tree type, unsigned int align) 10239 { 10240 tree t; 10241 10242 /* Make sure its not already registered. */ 10243 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC))) 10244 return t; 10245 10246 t = build_variant_type_copy (type); 10247 set_type_quals (t, TYPE_QUAL_ATOMIC); 10248 10249 if (align) 10250 SET_TYPE_ALIGN (t, align); 10251 10252 return t; 10253 } 10254 10255 /* Information about the _FloatN and _FloatNx types. This must be in 10256 the same order as the corresponding TI_* enum values. */ 10257 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] = 10258 { 10259 { 16, false }, 10260 { 32, false }, 10261 { 64, false }, 10262 { 128, false }, 10263 { 32, true }, 10264 { 64, true }, 10265 { 128, true }, 10266 }; 10267 10268 10269 /* Create nodes for all integer types (and error_mark_node) using the sizes 10270 of C datatypes. SIGNED_CHAR specifies whether char is signed. */ 10271 10272 void 10273 build_common_tree_nodes (bool signed_char) 10274 { 10275 int i; 10276 10277 error_mark_node = make_node (ERROR_MARK); 10278 TREE_TYPE (error_mark_node) = error_mark_node; 10279 10280 initialize_sizetypes (); 10281 10282 /* Define both `signed char' and `unsigned char'. */ 10283 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE); 10284 TYPE_STRING_FLAG (signed_char_type_node) = 1; 10285 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE); 10286 TYPE_STRING_FLAG (unsigned_char_type_node) = 1; 10287 10288 /* Define `char', which is like either `signed char' or `unsigned char' 10289 but not the same as either. */ 10290 char_type_node 10291 = (signed_char 10292 ? make_signed_type (CHAR_TYPE_SIZE) 10293 : make_unsigned_type (CHAR_TYPE_SIZE)); 10294 TYPE_STRING_FLAG (char_type_node) = 1; 10295 10296 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE); 10297 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE); 10298 integer_type_node = make_signed_type (INT_TYPE_SIZE); 10299 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE); 10300 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE); 10301 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE); 10302 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE); 10303 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE); 10304 10305 for (i = 0; i < NUM_INT_N_ENTS; i ++) 10306 { 10307 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize); 10308 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize); 10309 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize); 10310 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize); 10311 10312 if (int_n_enabled_p[i]) 10313 { 10314 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type; 10315 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type; 10316 } 10317 } 10318 10319 /* Define a boolean type. This type only represents boolean values but 10320 may be larger than char depending on the value of BOOL_TYPE_SIZE. */ 10321 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE); 10322 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE); 10323 TYPE_PRECISION (boolean_type_node) = 1; 10324 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1); 10325 10326 /* Define what type to use for size_t. */ 10327 if (strcmp (SIZE_TYPE, "unsigned int") == 0) 10328 size_type_node = unsigned_type_node; 10329 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0) 10330 size_type_node = long_unsigned_type_node; 10331 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0) 10332 size_type_node = long_long_unsigned_type_node; 10333 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0) 10334 size_type_node = short_unsigned_type_node; 10335 else 10336 { 10337 int i; 10338 10339 size_type_node = NULL_TREE; 10340 for (i = 0; i < NUM_INT_N_ENTS; i++) 10341 if (int_n_enabled_p[i]) 10342 { 10343 char name[50]; 10344 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize); 10345 10346 if (strcmp (name, SIZE_TYPE) == 0) 10347 { 10348 size_type_node = int_n_trees[i].unsigned_type; 10349 } 10350 } 10351 if (size_type_node == NULL_TREE) 10352 gcc_unreachable (); 10353 } 10354 10355 /* Define what type to use for ptrdiff_t. */ 10356 if (strcmp (PTRDIFF_TYPE, "int") == 0) 10357 ptrdiff_type_node = integer_type_node; 10358 else if (strcmp (PTRDIFF_TYPE, "long int") == 0) 10359 ptrdiff_type_node = long_integer_type_node; 10360 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0) 10361 ptrdiff_type_node = long_long_integer_type_node; 10362 else if (strcmp (PTRDIFF_TYPE, "short int") == 0) 10363 ptrdiff_type_node = short_integer_type_node; 10364 else 10365 { 10366 ptrdiff_type_node = NULL_TREE; 10367 for (int i = 0; i < NUM_INT_N_ENTS; i++) 10368 if (int_n_enabled_p[i]) 10369 { 10370 char name[50]; 10371 sprintf (name, "__int%d", int_n_data[i].bitsize); 10372 if (strcmp (name, PTRDIFF_TYPE) == 0) 10373 ptrdiff_type_node = int_n_trees[i].signed_type; 10374 } 10375 if (ptrdiff_type_node == NULL_TREE) 10376 gcc_unreachable (); 10377 } 10378 10379 /* Fill in the rest of the sized types. Reuse existing type nodes 10380 when possible. */ 10381 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0); 10382 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0); 10383 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0); 10384 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0); 10385 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0); 10386 10387 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1); 10388 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1); 10389 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1); 10390 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1); 10391 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1); 10392 10393 /* Don't call build_qualified type for atomics. That routine does 10394 special processing for atomics, and until they are initialized 10395 it's better not to make that call. 10396 10397 Check to see if there is a target override for atomic types. */ 10398 10399 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node, 10400 targetm.atomic_align_for_mode (QImode)); 10401 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node, 10402 targetm.atomic_align_for_mode (HImode)); 10403 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node, 10404 targetm.atomic_align_for_mode (SImode)); 10405 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node, 10406 targetm.atomic_align_for_mode (DImode)); 10407 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node, 10408 targetm.atomic_align_for_mode (TImode)); 10409 10410 access_public_node = get_identifier ("public"); 10411 access_protected_node = get_identifier ("protected"); 10412 access_private_node = get_identifier ("private"); 10413 10414 /* Define these next since types below may used them. */ 10415 integer_zero_node = build_int_cst (integer_type_node, 0); 10416 integer_one_node = build_int_cst (integer_type_node, 1); 10417 integer_three_node = build_int_cst (integer_type_node, 3); 10418 integer_minus_one_node = build_int_cst (integer_type_node, -1); 10419 10420 size_zero_node = size_int (0); 10421 size_one_node = size_int (1); 10422 bitsize_zero_node = bitsize_int (0); 10423 bitsize_one_node = bitsize_int (1); 10424 bitsize_unit_node = bitsize_int (BITS_PER_UNIT); 10425 10426 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node); 10427 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node); 10428 10429 void_type_node = make_node (VOID_TYPE); 10430 layout_type (void_type_node); 10431 10432 pointer_bounds_type_node = targetm.chkp_bound_type (); 10433 10434 /* We are not going to have real types in C with less than byte alignment, 10435 so we might as well not have any types that claim to have it. */ 10436 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT); 10437 TYPE_USER_ALIGN (void_type_node) = 0; 10438 10439 void_node = make_node (VOID_CST); 10440 TREE_TYPE (void_node) = void_type_node; 10441 10442 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0); 10443 layout_type (TREE_TYPE (null_pointer_node)); 10444 10445 ptr_type_node = build_pointer_type (void_type_node); 10446 const_ptr_type_node 10447 = build_pointer_type (build_type_variant (void_type_node, 1, 0)); 10448 fileptr_type_node = ptr_type_node; 10449 const_tm_ptr_type_node = const_ptr_type_node; 10450 10451 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1); 10452 10453 float_type_node = make_node (REAL_TYPE); 10454 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE; 10455 layout_type (float_type_node); 10456 10457 double_type_node = make_node (REAL_TYPE); 10458 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE; 10459 layout_type (double_type_node); 10460 10461 long_double_type_node = make_node (REAL_TYPE); 10462 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE; 10463 layout_type (long_double_type_node); 10464 10465 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++) 10466 { 10467 int n = floatn_nx_types[i].n; 10468 bool extended = floatn_nx_types[i].extended; 10469 machine_mode mode = targetm.floatn_mode (n, extended); 10470 if (mode == VOIDmode) 10471 continue; 10472 int precision = GET_MODE_PRECISION (mode); 10473 /* Work around the rs6000 KFmode having precision 113 not 10474 128. */ 10475 const struct real_format *fmt = REAL_MODE_FORMAT (mode); 10476 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3); 10477 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin); 10478 if (!extended) 10479 gcc_assert (min_precision == n); 10480 if (precision < min_precision) 10481 precision = min_precision; 10482 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE); 10483 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision; 10484 layout_type (FLOATN_NX_TYPE_NODE (i)); 10485 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode); 10486 } 10487 10488 float_ptr_type_node = build_pointer_type (float_type_node); 10489 double_ptr_type_node = build_pointer_type (double_type_node); 10490 long_double_ptr_type_node = build_pointer_type (long_double_type_node); 10491 integer_ptr_type_node = build_pointer_type (integer_type_node); 10492 10493 /* Fixed size integer types. */ 10494 uint16_type_node = make_or_reuse_type (16, 1); 10495 uint32_type_node = make_or_reuse_type (32, 1); 10496 uint64_type_node = make_or_reuse_type (64, 1); 10497 10498 /* Decimal float types. */ 10499 dfloat32_type_node = make_node (REAL_TYPE); 10500 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE; 10501 SET_TYPE_MODE (dfloat32_type_node, SDmode); 10502 layout_type (dfloat32_type_node); 10503 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node); 10504 10505 dfloat64_type_node = make_node (REAL_TYPE); 10506 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE; 10507 SET_TYPE_MODE (dfloat64_type_node, DDmode); 10508 layout_type (dfloat64_type_node); 10509 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node); 10510 10511 dfloat128_type_node = make_node (REAL_TYPE); 10512 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE; 10513 SET_TYPE_MODE (dfloat128_type_node, TDmode); 10514 layout_type (dfloat128_type_node); 10515 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node); 10516 10517 complex_integer_type_node = build_complex_type (integer_type_node, true); 10518 complex_float_type_node = build_complex_type (float_type_node, true); 10519 complex_double_type_node = build_complex_type (double_type_node, true); 10520 complex_long_double_type_node = build_complex_type (long_double_type_node, 10521 true); 10522 10523 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++) 10524 { 10525 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE) 10526 COMPLEX_FLOATN_NX_TYPE_NODE (i) 10527 = build_complex_type (FLOATN_NX_TYPE_NODE (i)); 10528 } 10529 10530 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */ 10531 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \ 10532 sat_ ## KIND ## _type_node = \ 10533 make_sat_signed_ ## KIND ## _type (SIZE); \ 10534 sat_unsigned_ ## KIND ## _type_node = \ 10535 make_sat_unsigned_ ## KIND ## _type (SIZE); \ 10536 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \ 10537 unsigned_ ## KIND ## _type_node = \ 10538 make_unsigned_ ## KIND ## _type (SIZE); 10539 10540 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \ 10541 sat_ ## WIDTH ## KIND ## _type_node = \ 10542 make_sat_signed_ ## KIND ## _type (SIZE); \ 10543 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \ 10544 make_sat_unsigned_ ## KIND ## _type (SIZE); \ 10545 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \ 10546 unsigned_ ## WIDTH ## KIND ## _type_node = \ 10547 make_unsigned_ ## KIND ## _type (SIZE); 10548 10549 /* Make fixed-point type nodes based on four different widths. */ 10550 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \ 10551 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \ 10552 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \ 10553 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \ 10554 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE) 10555 10556 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */ 10557 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \ 10558 NAME ## _type_node = \ 10559 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \ 10560 u ## NAME ## _type_node = \ 10561 make_or_reuse_unsigned_ ## KIND ## _type \ 10562 (GET_MODE_BITSIZE (U ## MODE ## mode)); \ 10563 sat_ ## NAME ## _type_node = \ 10564 make_or_reuse_sat_signed_ ## KIND ## _type \ 10565 (GET_MODE_BITSIZE (MODE ## mode)); \ 10566 sat_u ## NAME ## _type_node = \ 10567 make_or_reuse_sat_unsigned_ ## KIND ## _type \ 10568 (GET_MODE_BITSIZE (U ## MODE ## mode)); 10569 10570 /* Fixed-point type and mode nodes. */ 10571 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT) 10572 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM) 10573 MAKE_FIXED_MODE_NODE (fract, qq, QQ) 10574 MAKE_FIXED_MODE_NODE (fract, hq, HQ) 10575 MAKE_FIXED_MODE_NODE (fract, sq, SQ) 10576 MAKE_FIXED_MODE_NODE (fract, dq, DQ) 10577 MAKE_FIXED_MODE_NODE (fract, tq, TQ) 10578 MAKE_FIXED_MODE_NODE (accum, ha, HA) 10579 MAKE_FIXED_MODE_NODE (accum, sa, SA) 10580 MAKE_FIXED_MODE_NODE (accum, da, DA) 10581 MAKE_FIXED_MODE_NODE (accum, ta, TA) 10582 10583 { 10584 tree t = targetm.build_builtin_va_list (); 10585 10586 /* Many back-ends define record types without setting TYPE_NAME. 10587 If we copied the record type here, we'd keep the original 10588 record type without a name. This breaks name mangling. So, 10589 don't copy record types and let c_common_nodes_and_builtins() 10590 declare the type to be __builtin_va_list. */ 10591 if (TREE_CODE (t) != RECORD_TYPE) 10592 t = build_variant_type_copy (t); 10593 10594 va_list_type_node = t; 10595 } 10596 } 10597 10598 /* Modify DECL for given flags. 10599 TM_PURE attribute is set only on types, so the function will modify 10600 DECL's type when ECF_TM_PURE is used. */ 10601 10602 void 10603 set_call_expr_flags (tree decl, int flags) 10604 { 10605 if (flags & ECF_NOTHROW) 10606 TREE_NOTHROW (decl) = 1; 10607 if (flags & ECF_CONST) 10608 TREE_READONLY (decl) = 1; 10609 if (flags & ECF_PURE) 10610 DECL_PURE_P (decl) = 1; 10611 if (flags & ECF_LOOPING_CONST_OR_PURE) 10612 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1; 10613 if (flags & ECF_NOVOPS) 10614 DECL_IS_NOVOPS (decl) = 1; 10615 if (flags & ECF_NORETURN) 10616 TREE_THIS_VOLATILE (decl) = 1; 10617 if (flags & ECF_MALLOC) 10618 DECL_IS_MALLOC (decl) = 1; 10619 if (flags & ECF_RETURNS_TWICE) 10620 DECL_IS_RETURNS_TWICE (decl) = 1; 10621 if (flags & ECF_LEAF) 10622 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"), 10623 NULL, DECL_ATTRIBUTES (decl)); 10624 if (flags & ECF_RET1) 10625 DECL_ATTRIBUTES (decl) 10626 = tree_cons (get_identifier ("fn spec"), 10627 build_tree_list (NULL_TREE, build_string (1, "1")), 10628 DECL_ATTRIBUTES (decl)); 10629 if ((flags & ECF_TM_PURE) && flag_tm) 10630 apply_tm_attr (decl, get_identifier ("transaction_pure")); 10631 /* Looping const or pure is implied by noreturn. 10632 There is currently no way to declare looping const or looping pure alone. */ 10633 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE) 10634 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE)))); 10635 } 10636 10637 10638 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */ 10639 10640 static void 10641 local_define_builtin (const char *name, tree type, enum built_in_function code, 10642 const char *library_name, int ecf_flags) 10643 { 10644 tree decl; 10645 10646 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL, 10647 library_name, NULL_TREE); 10648 set_call_expr_flags (decl, ecf_flags); 10649 10650 set_builtin_decl (code, decl, true); 10651 } 10652 10653 /* Call this function after instantiating all builtins that the language 10654 front end cares about. This will build the rest of the builtins 10655 and internal functions that are relied upon by the tree optimizers and 10656 the middle-end. */ 10657 10658 void 10659 build_common_builtin_nodes (void) 10660 { 10661 tree tmp, ftype; 10662 int ecf_flags; 10663 10664 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE) 10665 || !builtin_decl_explicit_p (BUILT_IN_ABORT)) 10666 { 10667 ftype = build_function_type (void_type_node, void_list_node); 10668 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)) 10669 local_define_builtin ("__builtin_unreachable", ftype, 10670 BUILT_IN_UNREACHABLE, 10671 "__builtin_unreachable", 10672 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN 10673 | ECF_CONST); 10674 if (!builtin_decl_explicit_p (BUILT_IN_ABORT)) 10675 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT, 10676 "abort", 10677 ECF_LEAF | ECF_NORETURN | ECF_CONST); 10678 } 10679 10680 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY) 10681 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE)) 10682 { 10683 ftype = build_function_type_list (ptr_type_node, 10684 ptr_type_node, const_ptr_type_node, 10685 size_type_node, NULL_TREE); 10686 10687 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)) 10688 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY, 10689 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1); 10690 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE)) 10691 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE, 10692 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1); 10693 } 10694 10695 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP)) 10696 { 10697 ftype = build_function_type_list (integer_type_node, const_ptr_type_node, 10698 const_ptr_type_node, size_type_node, 10699 NULL_TREE); 10700 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP, 10701 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF); 10702 } 10703 10704 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET)) 10705 { 10706 ftype = build_function_type_list (ptr_type_node, 10707 ptr_type_node, integer_type_node, 10708 size_type_node, NULL_TREE); 10709 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET, 10710 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1); 10711 } 10712 10713 /* If we're checking the stack, `alloca' can throw. */ 10714 const int alloca_flags 10715 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW); 10716 10717 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA)) 10718 { 10719 ftype = build_function_type_list (ptr_type_node, 10720 size_type_node, NULL_TREE); 10721 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA, 10722 "alloca", alloca_flags); 10723 } 10724 10725 ftype = build_function_type_list (ptr_type_node, size_type_node, 10726 size_type_node, NULL_TREE); 10727 local_define_builtin ("__builtin_alloca_with_align", ftype, 10728 BUILT_IN_ALLOCA_WITH_ALIGN, 10729 "__builtin_alloca_with_align", 10730 alloca_flags); 10731 10732 ftype = build_function_type_list (void_type_node, 10733 ptr_type_node, ptr_type_node, 10734 ptr_type_node, NULL_TREE); 10735 local_define_builtin ("__builtin_init_trampoline", ftype, 10736 BUILT_IN_INIT_TRAMPOLINE, 10737 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF); 10738 local_define_builtin ("__builtin_init_heap_trampoline", ftype, 10739 BUILT_IN_INIT_HEAP_TRAMPOLINE, 10740 "__builtin_init_heap_trampoline", 10741 ECF_NOTHROW | ECF_LEAF); 10742 local_define_builtin ("__builtin_init_descriptor", ftype, 10743 BUILT_IN_INIT_DESCRIPTOR, 10744 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF); 10745 10746 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE); 10747 local_define_builtin ("__builtin_adjust_trampoline", ftype, 10748 BUILT_IN_ADJUST_TRAMPOLINE, 10749 "__builtin_adjust_trampoline", 10750 ECF_CONST | ECF_NOTHROW); 10751 local_define_builtin ("__builtin_adjust_descriptor", ftype, 10752 BUILT_IN_ADJUST_DESCRIPTOR, 10753 "__builtin_adjust_descriptor", 10754 ECF_CONST | ECF_NOTHROW); 10755 10756 ftype = build_function_type_list (void_type_node, 10757 ptr_type_node, ptr_type_node, NULL_TREE); 10758 local_define_builtin ("__builtin_nonlocal_goto", ftype, 10759 BUILT_IN_NONLOCAL_GOTO, 10760 "__builtin_nonlocal_goto", 10761 ECF_NORETURN | ECF_NOTHROW); 10762 10763 ftype = build_function_type_list (void_type_node, 10764 ptr_type_node, ptr_type_node, NULL_TREE); 10765 local_define_builtin ("__builtin_setjmp_setup", ftype, 10766 BUILT_IN_SETJMP_SETUP, 10767 "__builtin_setjmp_setup", ECF_NOTHROW); 10768 10769 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE); 10770 local_define_builtin ("__builtin_setjmp_receiver", ftype, 10771 BUILT_IN_SETJMP_RECEIVER, 10772 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF); 10773 10774 ftype = build_function_type_list (ptr_type_node, NULL_TREE); 10775 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE, 10776 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF); 10777 10778 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE); 10779 local_define_builtin ("__builtin_stack_restore", ftype, 10780 BUILT_IN_STACK_RESTORE, 10781 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF); 10782 10783 ftype = build_function_type_list (integer_type_node, const_ptr_type_node, 10784 const_ptr_type_node, size_type_node, 10785 NULL_TREE); 10786 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ, 10787 "__builtin_memcmp_eq", 10788 ECF_PURE | ECF_NOTHROW | ECF_LEAF); 10789 10790 /* If there's a possibility that we might use the ARM EABI, build the 10791 alternate __cxa_end_cleanup node used to resume from C++ and Java. */ 10792 if (targetm.arm_eabi_unwinder) 10793 { 10794 ftype = build_function_type_list (void_type_node, NULL_TREE); 10795 local_define_builtin ("__builtin_cxa_end_cleanup", ftype, 10796 BUILT_IN_CXA_END_CLEANUP, 10797 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF); 10798 } 10799 10800 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE); 10801 local_define_builtin ("__builtin_unwind_resume", ftype, 10802 BUILT_IN_UNWIND_RESUME, 10803 ((targetm_common.except_unwind_info (&global_options) 10804 == UI_SJLJ) 10805 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"), 10806 ECF_NORETURN); 10807 10808 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE) 10809 { 10810 ftype = build_function_type_list (ptr_type_node, integer_type_node, 10811 NULL_TREE); 10812 local_define_builtin ("__builtin_return_address", ftype, 10813 BUILT_IN_RETURN_ADDRESS, 10814 "__builtin_return_address", 10815 ECF_NOTHROW); 10816 } 10817 10818 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER) 10819 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT)) 10820 { 10821 ftype = build_function_type_list (void_type_node, ptr_type_node, 10822 ptr_type_node, NULL_TREE); 10823 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)) 10824 local_define_builtin ("__cyg_profile_func_enter", ftype, 10825 BUILT_IN_PROFILE_FUNC_ENTER, 10826 "__cyg_profile_func_enter", 0); 10827 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT)) 10828 local_define_builtin ("__cyg_profile_func_exit", ftype, 10829 BUILT_IN_PROFILE_FUNC_EXIT, 10830 "__cyg_profile_func_exit", 0); 10831 } 10832 10833 /* The exception object and filter values from the runtime. The argument 10834 must be zero before exception lowering, i.e. from the front end. After 10835 exception lowering, it will be the region number for the exception 10836 landing pad. These functions are PURE instead of CONST to prevent 10837 them from being hoisted past the exception edge that will initialize 10838 its value in the landing pad. */ 10839 ftype = build_function_type_list (ptr_type_node, 10840 integer_type_node, NULL_TREE); 10841 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF; 10842 /* Only use TM_PURE if we have TM language support. */ 10843 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1)) 10844 ecf_flags |= ECF_TM_PURE; 10845 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER, 10846 "__builtin_eh_pointer", ecf_flags); 10847 10848 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0); 10849 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE); 10850 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER, 10851 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF); 10852 10853 ftype = build_function_type_list (void_type_node, 10854 integer_type_node, integer_type_node, 10855 NULL_TREE); 10856 local_define_builtin ("__builtin_eh_copy_values", ftype, 10857 BUILT_IN_EH_COPY_VALUES, 10858 "__builtin_eh_copy_values", ECF_NOTHROW); 10859 10860 /* Complex multiplication and division. These are handled as builtins 10861 rather than optabs because emit_library_call_value doesn't support 10862 complex. Further, we can do slightly better with folding these 10863 beasties if the real and complex parts of the arguments are separate. */ 10864 { 10865 int mode; 10866 10867 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode) 10868 { 10869 char mode_name_buf[4], *q; 10870 const char *p; 10871 enum built_in_function mcode, dcode; 10872 tree type, inner_type; 10873 const char *prefix = "__"; 10874 10875 if (targetm.libfunc_gnu_prefix) 10876 prefix = "__gnu_"; 10877 10878 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0); 10879 if (type == NULL) 10880 continue; 10881 inner_type = TREE_TYPE (type); 10882 10883 ftype = build_function_type_list (type, inner_type, inner_type, 10884 inner_type, inner_type, NULL_TREE); 10885 10886 mcode = ((enum built_in_function) 10887 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT)); 10888 dcode = ((enum built_in_function) 10889 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT)); 10890 10891 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++) 10892 *q = TOLOWER (*p); 10893 *q = '\0'; 10894 10895 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3", 10896 NULL); 10897 local_define_builtin (built_in_names[mcode], ftype, mcode, 10898 built_in_names[mcode], 10899 ECF_CONST | ECF_NOTHROW | ECF_LEAF); 10900 10901 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3", 10902 NULL); 10903 local_define_builtin (built_in_names[dcode], ftype, dcode, 10904 built_in_names[dcode], 10905 ECF_CONST | ECF_NOTHROW | ECF_LEAF); 10906 } 10907 } 10908 10909 init_internal_fns (); 10910 } 10911 10912 /* HACK. GROSS. This is absolutely disgusting. I wish there was a 10913 better way. 10914 10915 If we requested a pointer to a vector, build up the pointers that 10916 we stripped off while looking for the inner type. Similarly for 10917 return values from functions. 10918 10919 The argument TYPE is the top of the chain, and BOTTOM is the 10920 new type which we will point to. */ 10921 10922 tree 10923 reconstruct_complex_type (tree type, tree bottom) 10924 { 10925 tree inner, outer; 10926 10927 if (TREE_CODE (type) == POINTER_TYPE) 10928 { 10929 inner = reconstruct_complex_type (TREE_TYPE (type), bottom); 10930 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type), 10931 TYPE_REF_CAN_ALIAS_ALL (type)); 10932 } 10933 else if (TREE_CODE (type) == REFERENCE_TYPE) 10934 { 10935 inner = reconstruct_complex_type (TREE_TYPE (type), bottom); 10936 outer = build_reference_type_for_mode (inner, TYPE_MODE (type), 10937 TYPE_REF_CAN_ALIAS_ALL (type)); 10938 } 10939 else if (TREE_CODE (type) == ARRAY_TYPE) 10940 { 10941 inner = reconstruct_complex_type (TREE_TYPE (type), bottom); 10942 outer = build_array_type (inner, TYPE_DOMAIN (type)); 10943 } 10944 else if (TREE_CODE (type) == FUNCTION_TYPE) 10945 { 10946 inner = reconstruct_complex_type (TREE_TYPE (type), bottom); 10947 outer = build_function_type (inner, TYPE_ARG_TYPES (type)); 10948 } 10949 else if (TREE_CODE (type) == METHOD_TYPE) 10950 { 10951 inner = reconstruct_complex_type (TREE_TYPE (type), bottom); 10952 /* The build_method_type_directly() routine prepends 'this' to argument list, 10953 so we must compensate by getting rid of it. */ 10954 outer 10955 = build_method_type_directly 10956 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))), 10957 inner, 10958 TREE_CHAIN (TYPE_ARG_TYPES (type))); 10959 } 10960 else if (TREE_CODE (type) == OFFSET_TYPE) 10961 { 10962 inner = reconstruct_complex_type (TREE_TYPE (type), bottom); 10963 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner); 10964 } 10965 else 10966 return bottom; 10967 10968 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type), 10969 TYPE_QUALS (type)); 10970 } 10971 10972 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and 10973 the inner type. */ 10974 tree 10975 build_vector_type_for_mode (tree innertype, machine_mode mode) 10976 { 10977 int nunits; 10978 10979 switch (GET_MODE_CLASS (mode)) 10980 { 10981 case MODE_VECTOR_INT: 10982 case MODE_VECTOR_FLOAT: 10983 case MODE_VECTOR_FRACT: 10984 case MODE_VECTOR_UFRACT: 10985 case MODE_VECTOR_ACCUM: 10986 case MODE_VECTOR_UACCUM: 10987 nunits = GET_MODE_NUNITS (mode); 10988 break; 10989 10990 case MODE_INT: 10991 /* Check that there are no leftover bits. */ 10992 gcc_assert (GET_MODE_BITSIZE (mode) 10993 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0); 10994 10995 nunits = GET_MODE_BITSIZE (mode) 10996 / TREE_INT_CST_LOW (TYPE_SIZE (innertype)); 10997 break; 10998 10999 default: 11000 gcc_unreachable (); 11001 } 11002 11003 return make_vector_type (innertype, nunits, mode); 11004 } 11005 11006 /* Similarly, but takes the inner type and number of units, which must be 11007 a power of two. */ 11008 11009 tree 11010 build_vector_type (tree innertype, int nunits) 11011 { 11012 return make_vector_type (innertype, nunits, VOIDmode); 11013 } 11014 11015 /* Build truth vector with specified length and number of units. */ 11016 11017 tree 11018 build_truth_vector_type (unsigned nunits, unsigned vector_size) 11019 { 11020 machine_mode mask_mode = targetm.vectorize.get_mask_mode (nunits, 11021 vector_size); 11022 11023 gcc_assert (mask_mode != VOIDmode); 11024 11025 unsigned HOST_WIDE_INT vsize; 11026 if (mask_mode == BLKmode) 11027 vsize = vector_size * BITS_PER_UNIT; 11028 else 11029 vsize = GET_MODE_BITSIZE (mask_mode); 11030 11031 unsigned HOST_WIDE_INT esize = vsize / nunits; 11032 gcc_assert (esize * nunits == vsize); 11033 11034 tree bool_type = build_nonstandard_boolean_type (esize); 11035 11036 return make_vector_type (bool_type, nunits, mask_mode); 11037 } 11038 11039 /* Returns a vector type corresponding to a comparison of VECTYPE. */ 11040 11041 tree 11042 build_same_sized_truth_vector_type (tree vectype) 11043 { 11044 if (VECTOR_BOOLEAN_TYPE_P (vectype)) 11045 return vectype; 11046 11047 unsigned HOST_WIDE_INT size = GET_MODE_SIZE (TYPE_MODE (vectype)); 11048 11049 if (!size) 11050 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype)); 11051 11052 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size); 11053 } 11054 11055 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */ 11056 11057 tree 11058 build_opaque_vector_type (tree innertype, int nunits) 11059 { 11060 tree t = make_vector_type (innertype, nunits, VOIDmode); 11061 tree cand; 11062 /* We always build the non-opaque variant before the opaque one, 11063 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */ 11064 cand = TYPE_NEXT_VARIANT (t); 11065 if (cand 11066 && TYPE_VECTOR_OPAQUE (cand) 11067 && check_qualified_type (cand, t, TYPE_QUALS (t))) 11068 return cand; 11069 /* Othewise build a variant type and make sure to queue it after 11070 the non-opaque type. */ 11071 cand = build_distinct_type_copy (t); 11072 TYPE_VECTOR_OPAQUE (cand) = true; 11073 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t); 11074 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t); 11075 TYPE_NEXT_VARIANT (t) = cand; 11076 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t); 11077 return cand; 11078 } 11079 11080 11081 /* Given an initializer INIT, return TRUE if INIT is zero or some 11082 aggregate of zeros. Otherwise return FALSE. */ 11083 bool 11084 initializer_zerop (const_tree init) 11085 { 11086 tree elt; 11087 11088 STRIP_NOPS (init); 11089 11090 switch (TREE_CODE (init)) 11091 { 11092 case INTEGER_CST: 11093 return integer_zerop (init); 11094 11095 case REAL_CST: 11096 /* ??? Note that this is not correct for C4X float formats. There, 11097 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most 11098 negative exponent. */ 11099 return real_zerop (init) 11100 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)); 11101 11102 case FIXED_CST: 11103 return fixed_zerop (init); 11104 11105 case COMPLEX_CST: 11106 return integer_zerop (init) 11107 || (real_zerop (init) 11108 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init))) 11109 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))); 11110 11111 case VECTOR_CST: 11112 { 11113 unsigned i; 11114 for (i = 0; i < VECTOR_CST_NELTS (init); ++i) 11115 if (!initializer_zerop (VECTOR_CST_ELT (init, i))) 11116 return false; 11117 return true; 11118 } 11119 11120 case CONSTRUCTOR: 11121 { 11122 unsigned HOST_WIDE_INT idx; 11123 11124 if (TREE_CLOBBER_P (init)) 11125 return false; 11126 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt) 11127 if (!initializer_zerop (elt)) 11128 return false; 11129 return true; 11130 } 11131 11132 case STRING_CST: 11133 { 11134 int i; 11135 11136 /* We need to loop through all elements to handle cases like 11137 "\0" and "\0foobar". */ 11138 for (i = 0; i < TREE_STRING_LENGTH (init); ++i) 11139 if (TREE_STRING_POINTER (init)[i] != '\0') 11140 return false; 11141 11142 return true; 11143 } 11144 11145 default: 11146 return false; 11147 } 11148 } 11149 11150 /* Check if vector VEC consists of all the equal elements and 11151 that the number of elements corresponds to the type of VEC. 11152 The function returns first element of the vector 11153 or NULL_TREE if the vector is not uniform. */ 11154 tree 11155 uniform_vector_p (const_tree vec) 11156 { 11157 tree first, t; 11158 unsigned i; 11159 11160 if (vec == NULL_TREE) 11161 return NULL_TREE; 11162 11163 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec))); 11164 11165 if (TREE_CODE (vec) == VECTOR_CST) 11166 { 11167 first = VECTOR_CST_ELT (vec, 0); 11168 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i) 11169 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0)) 11170 return NULL_TREE; 11171 11172 return first; 11173 } 11174 11175 else if (TREE_CODE (vec) == CONSTRUCTOR) 11176 { 11177 first = error_mark_node; 11178 11179 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t) 11180 { 11181 if (i == 0) 11182 { 11183 first = t; 11184 continue; 11185 } 11186 if (!operand_equal_p (first, t, 0)) 11187 return NULL_TREE; 11188 } 11189 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec))) 11190 return NULL_TREE; 11191 11192 return first; 11193 } 11194 11195 return NULL_TREE; 11196 } 11197 11198 /* Build an empty statement at location LOC. */ 11199 11200 tree 11201 build_empty_stmt (location_t loc) 11202 { 11203 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node); 11204 SET_EXPR_LOCATION (t, loc); 11205 return t; 11206 } 11207 11208 11209 /* Build an OpenMP clause with code CODE. LOC is the location of the 11210 clause. */ 11211 11212 tree 11213 build_omp_clause (location_t loc, enum omp_clause_code code) 11214 { 11215 tree t; 11216 int size, length; 11217 11218 length = omp_clause_num_ops[code]; 11219 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree)); 11220 11221 record_node_allocation_statistics (OMP_CLAUSE, size); 11222 11223 t = (tree) ggc_internal_alloc (size); 11224 memset (t, 0, size); 11225 TREE_SET_CODE (t, OMP_CLAUSE); 11226 OMP_CLAUSE_SET_CODE (t, code); 11227 OMP_CLAUSE_LOCATION (t) = loc; 11228 11229 return t; 11230 } 11231 11232 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN 11233 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1. 11234 Except for the CODE and operand count field, other storage for the 11235 object is initialized to zeros. */ 11236 11237 tree 11238 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL) 11239 { 11240 tree t; 11241 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp); 11242 11243 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp); 11244 gcc_assert (len >= 1); 11245 11246 record_node_allocation_statistics (code, length); 11247 11248 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT); 11249 11250 TREE_SET_CODE (t, code); 11251 11252 /* Can't use TREE_OPERAND to store the length because if checking is 11253 enabled, it will try to check the length before we store it. :-P */ 11254 t->exp.operands[0] = build_int_cst (sizetype, len); 11255 11256 return t; 11257 } 11258 11259 /* Helper function for build_call_* functions; build a CALL_EXPR with 11260 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of 11261 the argument slots. */ 11262 11263 static tree 11264 build_call_1 (tree return_type, tree fn, int nargs) 11265 { 11266 tree t; 11267 11268 t = build_vl_exp (CALL_EXPR, nargs + 3); 11269 TREE_TYPE (t) = return_type; 11270 CALL_EXPR_FN (t) = fn; 11271 CALL_EXPR_STATIC_CHAIN (t) = NULL; 11272 11273 return t; 11274 } 11275 11276 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and 11277 FN and a null static chain slot. NARGS is the number of call arguments 11278 which are specified as "..." arguments. */ 11279 11280 tree 11281 build_call_nary (tree return_type, tree fn, int nargs, ...) 11282 { 11283 tree ret; 11284 va_list args; 11285 va_start (args, nargs); 11286 ret = build_call_valist (return_type, fn, nargs, args); 11287 va_end (args); 11288 return ret; 11289 } 11290 11291 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and 11292 FN and a null static chain slot. NARGS is the number of call arguments 11293 which are specified as a va_list ARGS. */ 11294 11295 tree 11296 build_call_valist (tree return_type, tree fn, int nargs, va_list args) 11297 { 11298 tree t; 11299 int i; 11300 11301 t = build_call_1 (return_type, fn, nargs); 11302 for (i = 0; i < nargs; i++) 11303 CALL_EXPR_ARG (t, i) = va_arg (args, tree); 11304 process_call_operands (t); 11305 return t; 11306 } 11307 11308 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and 11309 FN and a null static chain slot. NARGS is the number of call arguments 11310 which are specified as a tree array ARGS. */ 11311 11312 tree 11313 build_call_array_loc (location_t loc, tree return_type, tree fn, 11314 int nargs, const tree *args) 11315 { 11316 tree t; 11317 int i; 11318 11319 t = build_call_1 (return_type, fn, nargs); 11320 for (i = 0; i < nargs; i++) 11321 CALL_EXPR_ARG (t, i) = args[i]; 11322 process_call_operands (t); 11323 SET_EXPR_LOCATION (t, loc); 11324 return t; 11325 } 11326 11327 /* Like build_call_array, but takes a vec. */ 11328 11329 tree 11330 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args) 11331 { 11332 tree ret, t; 11333 unsigned int ix; 11334 11335 ret = build_call_1 (return_type, fn, vec_safe_length (args)); 11336 FOR_EACH_VEC_SAFE_ELT (args, ix, t) 11337 CALL_EXPR_ARG (ret, ix) = t; 11338 process_call_operands (ret); 11339 return ret; 11340 } 11341 11342 /* Conveniently construct a function call expression. FNDECL names the 11343 function to be called and N arguments are passed in the array 11344 ARGARRAY. */ 11345 11346 tree 11347 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray) 11348 { 11349 tree fntype = TREE_TYPE (fndecl); 11350 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl); 11351 11352 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray); 11353 } 11354 11355 /* Conveniently construct a function call expression. FNDECL names the 11356 function to be called and the arguments are passed in the vector 11357 VEC. */ 11358 11359 tree 11360 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec) 11361 { 11362 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec), 11363 vec_safe_address (vec)); 11364 } 11365 11366 11367 /* Conveniently construct a function call expression. FNDECL names the 11368 function to be called, N is the number of arguments, and the "..." 11369 parameters are the argument expressions. */ 11370 11371 tree 11372 build_call_expr_loc (location_t loc, tree fndecl, int n, ...) 11373 { 11374 va_list ap; 11375 tree *argarray = XALLOCAVEC (tree, n); 11376 int i; 11377 11378 va_start (ap, n); 11379 for (i = 0; i < n; i++) 11380 argarray[i] = va_arg (ap, tree); 11381 va_end (ap); 11382 return build_call_expr_loc_array (loc, fndecl, n, argarray); 11383 } 11384 11385 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because 11386 varargs macros aren't supported by all bootstrap compilers. */ 11387 11388 tree 11389 build_call_expr (tree fndecl, int n, ...) 11390 { 11391 va_list ap; 11392 tree *argarray = XALLOCAVEC (tree, n); 11393 int i; 11394 11395 va_start (ap, n); 11396 for (i = 0; i < n; i++) 11397 argarray[i] = va_arg (ap, tree); 11398 va_end (ap); 11399 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray); 11400 } 11401 11402 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return 11403 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL. 11404 It will get gimplified later into an ordinary internal function. */ 11405 11406 tree 11407 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn, 11408 tree type, int n, const tree *args) 11409 { 11410 tree t = build_call_1 (type, NULL_TREE, n); 11411 for (int i = 0; i < n; ++i) 11412 CALL_EXPR_ARG (t, i) = args[i]; 11413 SET_EXPR_LOCATION (t, loc); 11414 CALL_EXPR_IFN (t) = ifn; 11415 return t; 11416 } 11417 11418 /* Build internal call expression. This is just like CALL_EXPR, except 11419 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary 11420 internal function. */ 11421 11422 tree 11423 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn, 11424 tree type, int n, ...) 11425 { 11426 va_list ap; 11427 tree *argarray = XALLOCAVEC (tree, n); 11428 int i; 11429 11430 va_start (ap, n); 11431 for (i = 0; i < n; i++) 11432 argarray[i] = va_arg (ap, tree); 11433 va_end (ap); 11434 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray); 11435 } 11436 11437 /* Return a function call to FN, if the target is guaranteed to support it, 11438 or null otherwise. 11439 11440 N is the number of arguments, passed in the "...", and TYPE is the 11441 type of the return value. */ 11442 11443 tree 11444 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type, 11445 int n, ...) 11446 { 11447 va_list ap; 11448 tree *argarray = XALLOCAVEC (tree, n); 11449 int i; 11450 11451 va_start (ap, n); 11452 for (i = 0; i < n; i++) 11453 argarray[i] = va_arg (ap, tree); 11454 va_end (ap); 11455 if (internal_fn_p (fn)) 11456 { 11457 internal_fn ifn = as_internal_fn (fn); 11458 if (direct_internal_fn_p (ifn)) 11459 { 11460 tree_pair types = direct_internal_fn_types (ifn, type, argarray); 11461 if (!direct_internal_fn_supported_p (ifn, types, 11462 OPTIMIZE_FOR_BOTH)) 11463 return NULL_TREE; 11464 } 11465 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray); 11466 } 11467 else 11468 { 11469 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn)); 11470 if (!fndecl) 11471 return NULL_TREE; 11472 return build_call_expr_loc_array (loc, fndecl, n, argarray); 11473 } 11474 } 11475 11476 /* Create a new constant string literal and return a char* pointer to it. 11477 The STRING_CST value is the LEN characters at STR. */ 11478 tree 11479 build_string_literal (int len, const char *str) 11480 { 11481 tree t, elem, index, type; 11482 11483 t = build_string (len, str); 11484 elem = build_type_variant (char_type_node, 1, 0); 11485 index = build_index_type (size_int (len - 1)); 11486 type = build_array_type (elem, index); 11487 TREE_TYPE (t) = type; 11488 TREE_CONSTANT (t) = 1; 11489 TREE_READONLY (t) = 1; 11490 TREE_STATIC (t) = 1; 11491 11492 type = build_pointer_type (elem); 11493 t = build1 (ADDR_EXPR, type, 11494 build4 (ARRAY_REF, elem, 11495 t, integer_zero_node, NULL_TREE, NULL_TREE)); 11496 return t; 11497 } 11498 11499 11500 11501 /* Return true if T (assumed to be a DECL) must be assigned a memory 11502 location. */ 11503 11504 bool 11505 needs_to_live_in_memory (const_tree t) 11506 { 11507 return (TREE_ADDRESSABLE (t) 11508 || is_global_var (t) 11509 || (TREE_CODE (t) == RESULT_DECL 11510 && !DECL_BY_REFERENCE (t) 11511 && aggregate_value_p (t, current_function_decl))); 11512 } 11513 11514 /* Return value of a constant X and sign-extend it. */ 11515 11516 HOST_WIDE_INT 11517 int_cst_value (const_tree x) 11518 { 11519 unsigned bits = TYPE_PRECISION (TREE_TYPE (x)); 11520 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x); 11521 11522 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */ 11523 gcc_assert (cst_and_fits_in_hwi (x)); 11524 11525 if (bits < HOST_BITS_PER_WIDE_INT) 11526 { 11527 bool negative = ((val >> (bits - 1)) & 1) != 0; 11528 if (negative) 11529 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1; 11530 else 11531 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1); 11532 } 11533 11534 return val; 11535 } 11536 11537 /* If TYPE is an integral or pointer type, return an integer type with 11538 the same precision which is unsigned iff UNSIGNEDP is true, or itself 11539 if TYPE is already an integer type of signedness UNSIGNEDP. */ 11540 11541 tree 11542 signed_or_unsigned_type_for (int unsignedp, tree type) 11543 { 11544 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp) 11545 return type; 11546 11547 if (TREE_CODE (type) == VECTOR_TYPE) 11548 { 11549 tree inner = TREE_TYPE (type); 11550 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner); 11551 if (!inner2) 11552 return NULL_TREE; 11553 if (inner == inner2) 11554 return type; 11555 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type)); 11556 } 11557 11558 if (!INTEGRAL_TYPE_P (type) 11559 && !POINTER_TYPE_P (type) 11560 && TREE_CODE (type) != OFFSET_TYPE) 11561 return NULL_TREE; 11562 11563 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp); 11564 } 11565 11566 /* If TYPE is an integral or pointer type, return an integer type with 11567 the same precision which is unsigned, or itself if TYPE is already an 11568 unsigned integer type. */ 11569 11570 tree 11571 unsigned_type_for (tree type) 11572 { 11573 return signed_or_unsigned_type_for (1, type); 11574 } 11575 11576 /* If TYPE is an integral or pointer type, return an integer type with 11577 the same precision which is signed, or itself if TYPE is already a 11578 signed integer type. */ 11579 11580 tree 11581 signed_type_for (tree type) 11582 { 11583 return signed_or_unsigned_type_for (0, type); 11584 } 11585 11586 /* If TYPE is a vector type, return a signed integer vector type with the 11587 same width and number of subparts. Otherwise return boolean_type_node. */ 11588 11589 tree 11590 truth_type_for (tree type) 11591 { 11592 if (TREE_CODE (type) == VECTOR_TYPE) 11593 { 11594 if (VECTOR_BOOLEAN_TYPE_P (type)) 11595 return type; 11596 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type), 11597 GET_MODE_SIZE (TYPE_MODE (type))); 11598 } 11599 else 11600 return boolean_type_node; 11601 } 11602 11603 /* Returns the largest value obtainable by casting something in INNER type to 11604 OUTER type. */ 11605 11606 tree 11607 upper_bound_in_type (tree outer, tree inner) 11608 { 11609 unsigned int det = 0; 11610 unsigned oprec = TYPE_PRECISION (outer); 11611 unsigned iprec = TYPE_PRECISION (inner); 11612 unsigned prec; 11613 11614 /* Compute a unique number for every combination. */ 11615 det |= (oprec > iprec) ? 4 : 0; 11616 det |= TYPE_UNSIGNED (outer) ? 2 : 0; 11617 det |= TYPE_UNSIGNED (inner) ? 1 : 0; 11618 11619 /* Determine the exponent to use. */ 11620 switch (det) 11621 { 11622 case 0: 11623 case 1: 11624 /* oprec <= iprec, outer: signed, inner: don't care. */ 11625 prec = oprec - 1; 11626 break; 11627 case 2: 11628 case 3: 11629 /* oprec <= iprec, outer: unsigned, inner: don't care. */ 11630 prec = oprec; 11631 break; 11632 case 4: 11633 /* oprec > iprec, outer: signed, inner: signed. */ 11634 prec = iprec - 1; 11635 break; 11636 case 5: 11637 /* oprec > iprec, outer: signed, inner: unsigned. */ 11638 prec = iprec; 11639 break; 11640 case 6: 11641 /* oprec > iprec, outer: unsigned, inner: signed. */ 11642 prec = oprec; 11643 break; 11644 case 7: 11645 /* oprec > iprec, outer: unsigned, inner: unsigned. */ 11646 prec = iprec; 11647 break; 11648 default: 11649 gcc_unreachable (); 11650 } 11651 11652 return wide_int_to_tree (outer, 11653 wi::mask (prec, false, TYPE_PRECISION (outer))); 11654 } 11655 11656 /* Returns the smallest value obtainable by casting something in INNER type to 11657 OUTER type. */ 11658 11659 tree 11660 lower_bound_in_type (tree outer, tree inner) 11661 { 11662 unsigned oprec = TYPE_PRECISION (outer); 11663 unsigned iprec = TYPE_PRECISION (inner); 11664 11665 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type 11666 and obtain 0. */ 11667 if (TYPE_UNSIGNED (outer) 11668 /* If we are widening something of an unsigned type, OUTER type 11669 contains all values of INNER type. In particular, both INNER 11670 and OUTER types have zero in common. */ 11671 || (oprec > iprec && TYPE_UNSIGNED (inner))) 11672 return build_int_cst (outer, 0); 11673 else 11674 { 11675 /* If we are widening a signed type to another signed type, we 11676 want to obtain -2^^(iprec-1). If we are keeping the 11677 precision or narrowing to a signed type, we want to obtain 11678 -2^(oprec-1). */ 11679 unsigned prec = oprec > iprec ? iprec : oprec; 11680 return wide_int_to_tree (outer, 11681 wi::mask (prec - 1, true, 11682 TYPE_PRECISION (outer))); 11683 } 11684 } 11685 11686 /* Return nonzero if two operands that are suitable for PHI nodes are 11687 necessarily equal. Specifically, both ARG0 and ARG1 must be either 11688 SSA_NAME or invariant. Note that this is strictly an optimization. 11689 That is, callers of this function can directly call operand_equal_p 11690 and get the same result, only slower. */ 11691 11692 int 11693 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1) 11694 { 11695 if (arg0 == arg1) 11696 return 1; 11697 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME) 11698 return 0; 11699 return operand_equal_p (arg0, arg1, 0); 11700 } 11701 11702 /* Returns number of zeros at the end of binary representation of X. */ 11703 11704 tree 11705 num_ending_zeros (const_tree x) 11706 { 11707 return build_int_cst (TREE_TYPE (x), wi::ctz (x)); 11708 } 11709 11710 11711 #define WALK_SUBTREE(NODE) \ 11712 do \ 11713 { \ 11714 result = walk_tree_1 (&(NODE), func, data, pset, lh); \ 11715 if (result) \ 11716 return result; \ 11717 } \ 11718 while (0) 11719 11720 /* This is a subroutine of walk_tree that walks field of TYPE that are to 11721 be walked whenever a type is seen in the tree. Rest of operands and return 11722 value are as for walk_tree. */ 11723 11724 static tree 11725 walk_type_fields (tree type, walk_tree_fn func, void *data, 11726 hash_set<tree> *pset, walk_tree_lh lh) 11727 { 11728 tree result = NULL_TREE; 11729 11730 switch (TREE_CODE (type)) 11731 { 11732 case POINTER_TYPE: 11733 case REFERENCE_TYPE: 11734 case VECTOR_TYPE: 11735 /* We have to worry about mutually recursive pointers. These can't 11736 be written in C. They can in Ada. It's pathological, but 11737 there's an ACATS test (c38102a) that checks it. Deal with this 11738 by checking if we're pointing to another pointer, that one 11739 points to another pointer, that one does too, and we have no htab. 11740 If so, get a hash table. We check three levels deep to avoid 11741 the cost of the hash table if we don't need one. */ 11742 if (POINTER_TYPE_P (TREE_TYPE (type)) 11743 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type))) 11744 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type)))) 11745 && !pset) 11746 { 11747 result = walk_tree_without_duplicates (&TREE_TYPE (type), 11748 func, data); 11749 if (result) 11750 return result; 11751 11752 break; 11753 } 11754 11755 /* fall through */ 11756 11757 case COMPLEX_TYPE: 11758 WALK_SUBTREE (TREE_TYPE (type)); 11759 break; 11760 11761 case METHOD_TYPE: 11762 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type)); 11763 11764 /* Fall through. */ 11765 11766 case FUNCTION_TYPE: 11767 WALK_SUBTREE (TREE_TYPE (type)); 11768 { 11769 tree arg; 11770 11771 /* We never want to walk into default arguments. */ 11772 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg)) 11773 WALK_SUBTREE (TREE_VALUE (arg)); 11774 } 11775 break; 11776 11777 case ARRAY_TYPE: 11778 /* Don't follow this nodes's type if a pointer for fear that 11779 we'll have infinite recursion. If we have a PSET, then we 11780 need not fear. */ 11781 if (pset 11782 || (!POINTER_TYPE_P (TREE_TYPE (type)) 11783 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE)) 11784 WALK_SUBTREE (TREE_TYPE (type)); 11785 WALK_SUBTREE (TYPE_DOMAIN (type)); 11786 break; 11787 11788 case OFFSET_TYPE: 11789 WALK_SUBTREE (TREE_TYPE (type)); 11790 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type)); 11791 break; 11792 11793 default: 11794 break; 11795 } 11796 11797 return NULL_TREE; 11798 } 11799 11800 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is 11801 called with the DATA and the address of each sub-tree. If FUNC returns a 11802 non-NULL value, the traversal is stopped, and the value returned by FUNC 11803 is returned. If PSET is non-NULL it is used to record the nodes visited, 11804 and to avoid visiting a node more than once. */ 11805 11806 tree 11807 walk_tree_1 (tree *tp, walk_tree_fn func, void *data, 11808 hash_set<tree> *pset, walk_tree_lh lh) 11809 { 11810 enum tree_code code; 11811 int walk_subtrees; 11812 tree result; 11813 11814 #define WALK_SUBTREE_TAIL(NODE) \ 11815 do \ 11816 { \ 11817 tp = & (NODE); \ 11818 goto tail_recurse; \ 11819 } \ 11820 while (0) 11821 11822 tail_recurse: 11823 /* Skip empty subtrees. */ 11824 if (!*tp) 11825 return NULL_TREE; 11826 11827 /* Don't walk the same tree twice, if the user has requested 11828 that we avoid doing so. */ 11829 if (pset && pset->add (*tp)) 11830 return NULL_TREE; 11831 11832 /* Call the function. */ 11833 walk_subtrees = 1; 11834 result = (*func) (tp, &walk_subtrees, data); 11835 11836 /* If we found something, return it. */ 11837 if (result) 11838 return result; 11839 11840 code = TREE_CODE (*tp); 11841 11842 /* Even if we didn't, FUNC may have decided that there was nothing 11843 interesting below this point in the tree. */ 11844 if (!walk_subtrees) 11845 { 11846 /* But we still need to check our siblings. */ 11847 if (code == TREE_LIST) 11848 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp)); 11849 else if (code == OMP_CLAUSE) 11850 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); 11851 else 11852 return NULL_TREE; 11853 } 11854 11855 if (lh) 11856 { 11857 result = (*lh) (tp, &walk_subtrees, func, data, pset); 11858 if (result || !walk_subtrees) 11859 return result; 11860 } 11861 11862 switch (code) 11863 { 11864 case ERROR_MARK: 11865 case IDENTIFIER_NODE: 11866 case INTEGER_CST: 11867 case REAL_CST: 11868 case FIXED_CST: 11869 case VECTOR_CST: 11870 case STRING_CST: 11871 case BLOCK: 11872 case PLACEHOLDER_EXPR: 11873 case SSA_NAME: 11874 case FIELD_DECL: 11875 case RESULT_DECL: 11876 /* None of these have subtrees other than those already walked 11877 above. */ 11878 break; 11879 11880 case TREE_LIST: 11881 WALK_SUBTREE (TREE_VALUE (*tp)); 11882 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp)); 11883 break; 11884 11885 case TREE_VEC: 11886 { 11887 int len = TREE_VEC_LENGTH (*tp); 11888 11889 if (len == 0) 11890 break; 11891 11892 /* Walk all elements but the first. */ 11893 while (--len) 11894 WALK_SUBTREE (TREE_VEC_ELT (*tp, len)); 11895 11896 /* Now walk the first one as a tail call. */ 11897 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0)); 11898 } 11899 11900 case COMPLEX_CST: 11901 WALK_SUBTREE (TREE_REALPART (*tp)); 11902 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp)); 11903 11904 case CONSTRUCTOR: 11905 { 11906 unsigned HOST_WIDE_INT idx; 11907 constructor_elt *ce; 11908 11909 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce); 11910 idx++) 11911 WALK_SUBTREE (ce->value); 11912 } 11913 break; 11914 11915 case SAVE_EXPR: 11916 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0)); 11917 11918 case BIND_EXPR: 11919 { 11920 tree decl; 11921 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl)) 11922 { 11923 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk 11924 into declarations that are just mentioned, rather than 11925 declared; they don't really belong to this part of the tree. 11926 And, we can see cycles: the initializer for a declaration 11927 can refer to the declaration itself. */ 11928 WALK_SUBTREE (DECL_INITIAL (decl)); 11929 WALK_SUBTREE (DECL_SIZE (decl)); 11930 WALK_SUBTREE (DECL_SIZE_UNIT (decl)); 11931 } 11932 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp)); 11933 } 11934 11935 case STATEMENT_LIST: 11936 { 11937 tree_stmt_iterator i; 11938 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i)) 11939 WALK_SUBTREE (*tsi_stmt_ptr (i)); 11940 } 11941 break; 11942 11943 case OMP_CLAUSE: 11944 switch (OMP_CLAUSE_CODE (*tp)) 11945 { 11946 case OMP_CLAUSE_GANG: 11947 case OMP_CLAUSE__GRIDDIM_: 11948 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1)); 11949 /* FALLTHRU */ 11950 11951 case OMP_CLAUSE_ASYNC: 11952 case OMP_CLAUSE_WAIT: 11953 case OMP_CLAUSE_WORKER: 11954 case OMP_CLAUSE_VECTOR: 11955 case OMP_CLAUSE_NUM_GANGS: 11956 case OMP_CLAUSE_NUM_WORKERS: 11957 case OMP_CLAUSE_VECTOR_LENGTH: 11958 case OMP_CLAUSE_PRIVATE: 11959 case OMP_CLAUSE_SHARED: 11960 case OMP_CLAUSE_FIRSTPRIVATE: 11961 case OMP_CLAUSE_COPYIN: 11962 case OMP_CLAUSE_COPYPRIVATE: 11963 case OMP_CLAUSE_FINAL: 11964 case OMP_CLAUSE_IF: 11965 case OMP_CLAUSE_NUM_THREADS: 11966 case OMP_CLAUSE_SCHEDULE: 11967 case OMP_CLAUSE_UNIFORM: 11968 case OMP_CLAUSE_DEPEND: 11969 case OMP_CLAUSE_NUM_TEAMS: 11970 case OMP_CLAUSE_THREAD_LIMIT: 11971 case OMP_CLAUSE_DEVICE: 11972 case OMP_CLAUSE_DIST_SCHEDULE: 11973 case OMP_CLAUSE_SAFELEN: 11974 case OMP_CLAUSE_SIMDLEN: 11975 case OMP_CLAUSE_ORDERED: 11976 case OMP_CLAUSE_PRIORITY: 11977 case OMP_CLAUSE_GRAINSIZE: 11978 case OMP_CLAUSE_NUM_TASKS: 11979 case OMP_CLAUSE_HINT: 11980 case OMP_CLAUSE_TO_DECLARE: 11981 case OMP_CLAUSE_LINK: 11982 case OMP_CLAUSE_USE_DEVICE_PTR: 11983 case OMP_CLAUSE_IS_DEVICE_PTR: 11984 case OMP_CLAUSE__LOOPTEMP_: 11985 case OMP_CLAUSE__SIMDUID_: 11986 case OMP_CLAUSE__CILK_FOR_COUNT_: 11987 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0)); 11988 /* FALLTHRU */ 11989 11990 case OMP_CLAUSE_INDEPENDENT: 11991 case OMP_CLAUSE_NOWAIT: 11992 case OMP_CLAUSE_DEFAULT: 11993 case OMP_CLAUSE_UNTIED: 11994 case OMP_CLAUSE_MERGEABLE: 11995 case OMP_CLAUSE_PROC_BIND: 11996 case OMP_CLAUSE_INBRANCH: 11997 case OMP_CLAUSE_NOTINBRANCH: 11998 case OMP_CLAUSE_FOR: 11999 case OMP_CLAUSE_PARALLEL: 12000 case OMP_CLAUSE_SECTIONS: 12001 case OMP_CLAUSE_TASKGROUP: 12002 case OMP_CLAUSE_NOGROUP: 12003 case OMP_CLAUSE_THREADS: 12004 case OMP_CLAUSE_SIMD: 12005 case OMP_CLAUSE_DEFAULTMAP: 12006 case OMP_CLAUSE_AUTO: 12007 case OMP_CLAUSE_SEQ: 12008 case OMP_CLAUSE_TILE: 12009 case OMP_CLAUSE__SIMT_: 12010 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); 12011 12012 case OMP_CLAUSE_LASTPRIVATE: 12013 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp)); 12014 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp)); 12015 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); 12016 12017 case OMP_CLAUSE_COLLAPSE: 12018 { 12019 int i; 12020 for (i = 0; i < 3; i++) 12021 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i)); 12022 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); 12023 } 12024 12025 case OMP_CLAUSE_LINEAR: 12026 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp)); 12027 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp)); 12028 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp)); 12029 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); 12030 12031 case OMP_CLAUSE_ALIGNED: 12032 case OMP_CLAUSE_FROM: 12033 case OMP_CLAUSE_TO: 12034 case OMP_CLAUSE_MAP: 12035 case OMP_CLAUSE__CACHE_: 12036 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp)); 12037 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1)); 12038 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); 12039 12040 case OMP_CLAUSE_REDUCTION: 12041 { 12042 int i; 12043 for (i = 0; i < 5; i++) 12044 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i)); 12045 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); 12046 } 12047 12048 default: 12049 gcc_unreachable (); 12050 } 12051 break; 12052 12053 case TARGET_EXPR: 12054 { 12055 int i, len; 12056 12057 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same. 12058 But, we only want to walk once. */ 12059 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3; 12060 for (i = 0; i < len; ++i) 12061 WALK_SUBTREE (TREE_OPERAND (*tp, i)); 12062 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len)); 12063 } 12064 12065 case DECL_EXPR: 12066 /* If this is a TYPE_DECL, walk into the fields of the type that it's 12067 defining. We only want to walk into these fields of a type in this 12068 case and not in the general case of a mere reference to the type. 12069 12070 The criterion is as follows: if the field can be an expression, it 12071 must be walked only here. This should be in keeping with the fields 12072 that are directly gimplified in gimplify_type_sizes in order for the 12073 mark/copy-if-shared/unmark machinery of the gimplifier to work with 12074 variable-sized types. 12075 12076 Note that DECLs get walked as part of processing the BIND_EXPR. */ 12077 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL) 12078 { 12079 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp)); 12080 if (TREE_CODE (*type_p) == ERROR_MARK) 12081 return NULL_TREE; 12082 12083 /* Call the function for the type. See if it returns anything or 12084 doesn't want us to continue. If we are to continue, walk both 12085 the normal fields and those for the declaration case. */ 12086 result = (*func) (type_p, &walk_subtrees, data); 12087 if (result || !walk_subtrees) 12088 return result; 12089 12090 /* But do not walk a pointed-to type since it may itself need to 12091 be walked in the declaration case if it isn't anonymous. */ 12092 if (!POINTER_TYPE_P (*type_p)) 12093 { 12094 result = walk_type_fields (*type_p, func, data, pset, lh); 12095 if (result) 12096 return result; 12097 } 12098 12099 /* If this is a record type, also walk the fields. */ 12100 if (RECORD_OR_UNION_TYPE_P (*type_p)) 12101 { 12102 tree field; 12103 12104 for (field = TYPE_FIELDS (*type_p); field; 12105 field = DECL_CHAIN (field)) 12106 { 12107 /* We'd like to look at the type of the field, but we can 12108 easily get infinite recursion. So assume it's pointed 12109 to elsewhere in the tree. Also, ignore things that 12110 aren't fields. */ 12111 if (TREE_CODE (field) != FIELD_DECL) 12112 continue; 12113 12114 WALK_SUBTREE (DECL_FIELD_OFFSET (field)); 12115 WALK_SUBTREE (DECL_SIZE (field)); 12116 WALK_SUBTREE (DECL_SIZE_UNIT (field)); 12117 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE) 12118 WALK_SUBTREE (DECL_QUALIFIER (field)); 12119 } 12120 } 12121 12122 /* Same for scalar types. */ 12123 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE 12124 || TREE_CODE (*type_p) == ENUMERAL_TYPE 12125 || TREE_CODE (*type_p) == INTEGER_TYPE 12126 || TREE_CODE (*type_p) == FIXED_POINT_TYPE 12127 || TREE_CODE (*type_p) == REAL_TYPE) 12128 { 12129 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p)); 12130 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p)); 12131 } 12132 12133 WALK_SUBTREE (TYPE_SIZE (*type_p)); 12134 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p)); 12135 } 12136 /* FALLTHRU */ 12137 12138 default: 12139 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))) 12140 { 12141 int i, len; 12142 12143 /* Walk over all the sub-trees of this operand. */ 12144 len = TREE_OPERAND_LENGTH (*tp); 12145 12146 /* Go through the subtrees. We need to do this in forward order so 12147 that the scope of a FOR_EXPR is handled properly. */ 12148 if (len) 12149 { 12150 for (i = 0; i < len - 1; ++i) 12151 WALK_SUBTREE (TREE_OPERAND (*tp, i)); 12152 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1)); 12153 } 12154 } 12155 /* If this is a type, walk the needed fields in the type. */ 12156 else if (TYPE_P (*tp)) 12157 return walk_type_fields (*tp, func, data, pset, lh); 12158 break; 12159 } 12160 12161 /* We didn't find what we were looking for. */ 12162 return NULL_TREE; 12163 12164 #undef WALK_SUBTREE_TAIL 12165 } 12166 #undef WALK_SUBTREE 12167 12168 /* Like walk_tree, but does not walk duplicate nodes more than once. */ 12169 12170 tree 12171 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data, 12172 walk_tree_lh lh) 12173 { 12174 tree result; 12175 12176 hash_set<tree> pset; 12177 result = walk_tree_1 (tp, func, data, &pset, lh); 12178 return result; 12179 } 12180 12181 12182 tree 12183 tree_block (tree t) 12184 { 12185 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t)); 12186 12187 if (IS_EXPR_CODE_CLASS (c)) 12188 return LOCATION_BLOCK (t->exp.locus); 12189 gcc_unreachable (); 12190 return NULL; 12191 } 12192 12193 void 12194 tree_set_block (tree t, tree b) 12195 { 12196 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t)); 12197 12198 if (IS_EXPR_CODE_CLASS (c)) 12199 { 12200 t->exp.locus = set_block (t->exp.locus, b); 12201 } 12202 else 12203 gcc_unreachable (); 12204 } 12205 12206 /* Create a nameless artificial label and put it in the current 12207 function context. The label has a location of LOC. Returns the 12208 newly created label. */ 12209 12210 tree 12211 create_artificial_label (location_t loc) 12212 { 12213 tree lab = build_decl (loc, 12214 LABEL_DECL, NULL_TREE, void_type_node); 12215 12216 DECL_ARTIFICIAL (lab) = 1; 12217 DECL_IGNORED_P (lab) = 1; 12218 DECL_CONTEXT (lab) = current_function_decl; 12219 return lab; 12220 } 12221 12222 /* Given a tree, try to return a useful variable name that we can use 12223 to prefix a temporary that is being assigned the value of the tree. 12224 I.E. given <temp> = &A, return A. */ 12225 12226 const char * 12227 get_name (tree t) 12228 { 12229 tree stripped_decl; 12230 12231 stripped_decl = t; 12232 STRIP_NOPS (stripped_decl); 12233 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl)) 12234 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl)); 12235 else if (TREE_CODE (stripped_decl) == SSA_NAME) 12236 { 12237 tree name = SSA_NAME_IDENTIFIER (stripped_decl); 12238 if (!name) 12239 return NULL; 12240 return IDENTIFIER_POINTER (name); 12241 } 12242 else 12243 { 12244 switch (TREE_CODE (stripped_decl)) 12245 { 12246 case ADDR_EXPR: 12247 return get_name (TREE_OPERAND (stripped_decl, 0)); 12248 default: 12249 return NULL; 12250 } 12251 } 12252 } 12253 12254 /* Return true if TYPE has a variable argument list. */ 12255 12256 bool 12257 stdarg_p (const_tree fntype) 12258 { 12259 function_args_iterator args_iter; 12260 tree n = NULL_TREE, t; 12261 12262 if (!fntype) 12263 return false; 12264 12265 FOREACH_FUNCTION_ARGS (fntype, t, args_iter) 12266 { 12267 n = t; 12268 } 12269 12270 return n != NULL_TREE && n != void_type_node; 12271 } 12272 12273 /* Return true if TYPE has a prototype. */ 12274 12275 bool 12276 prototype_p (const_tree fntype) 12277 { 12278 tree t; 12279 12280 gcc_assert (fntype != NULL_TREE); 12281 12282 t = TYPE_ARG_TYPES (fntype); 12283 return (t != NULL_TREE); 12284 } 12285 12286 /* If BLOCK is inlined from an __attribute__((__artificial__)) 12287 routine, return pointer to location from where it has been 12288 called. */ 12289 location_t * 12290 block_nonartificial_location (tree block) 12291 { 12292 location_t *ret = NULL; 12293 12294 while (block && TREE_CODE (block) == BLOCK 12295 && BLOCK_ABSTRACT_ORIGIN (block)) 12296 { 12297 tree ao = BLOCK_ABSTRACT_ORIGIN (block); 12298 12299 while (TREE_CODE (ao) == BLOCK 12300 && BLOCK_ABSTRACT_ORIGIN (ao) 12301 && BLOCK_ABSTRACT_ORIGIN (ao) != ao) 12302 ao = BLOCK_ABSTRACT_ORIGIN (ao); 12303 12304 if (TREE_CODE (ao) == FUNCTION_DECL) 12305 { 12306 /* If AO is an artificial inline, point RET to the 12307 call site locus at which it has been inlined and continue 12308 the loop, in case AO's caller is also an artificial 12309 inline. */ 12310 if (DECL_DECLARED_INLINE_P (ao) 12311 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao))) 12312 ret = &BLOCK_SOURCE_LOCATION (block); 12313 else 12314 break; 12315 } 12316 else if (TREE_CODE (ao) != BLOCK) 12317 break; 12318 12319 block = BLOCK_SUPERCONTEXT (block); 12320 } 12321 return ret; 12322 } 12323 12324 12325 /* If EXP is inlined from an __attribute__((__artificial__)) 12326 function, return the location of the original call expression. */ 12327 12328 location_t 12329 tree_nonartificial_location (tree exp) 12330 { 12331 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp)); 12332 12333 if (loc) 12334 return *loc; 12335 else 12336 return EXPR_LOCATION (exp); 12337 } 12338 12339 12340 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq 12341 nodes. */ 12342 12343 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */ 12344 12345 hashval_t 12346 cl_option_hasher::hash (tree x) 12347 { 12348 const_tree const t = x; 12349 const char *p; 12350 size_t i; 12351 size_t len = 0; 12352 hashval_t hash = 0; 12353 12354 if (TREE_CODE (t) == OPTIMIZATION_NODE) 12355 { 12356 p = (const char *)TREE_OPTIMIZATION (t); 12357 len = sizeof (struct cl_optimization); 12358 } 12359 12360 else if (TREE_CODE (t) == TARGET_OPTION_NODE) 12361 return cl_target_option_hash (TREE_TARGET_OPTION (t)); 12362 12363 else 12364 gcc_unreachable (); 12365 12366 /* assume most opt flags are just 0/1, some are 2-3, and a few might be 12367 something else. */ 12368 for (i = 0; i < len; i++) 12369 if (p[i]) 12370 hash = (hash << 4) ^ ((i << 2) | p[i]); 12371 12372 return hash; 12373 } 12374 12375 /* Return nonzero if the value represented by *X (an OPTIMIZATION or 12376 TARGET_OPTION tree node) is the same as that given by *Y, which is the 12377 same. */ 12378 12379 bool 12380 cl_option_hasher::equal (tree x, tree y) 12381 { 12382 const_tree const xt = x; 12383 const_tree const yt = y; 12384 const char *xp; 12385 const char *yp; 12386 size_t len; 12387 12388 if (TREE_CODE (xt) != TREE_CODE (yt)) 12389 return 0; 12390 12391 if (TREE_CODE (xt) == OPTIMIZATION_NODE) 12392 { 12393 xp = (const char *)TREE_OPTIMIZATION (xt); 12394 yp = (const char *)TREE_OPTIMIZATION (yt); 12395 len = sizeof (struct cl_optimization); 12396 } 12397 12398 else if (TREE_CODE (xt) == TARGET_OPTION_NODE) 12399 { 12400 return cl_target_option_eq (TREE_TARGET_OPTION (xt), 12401 TREE_TARGET_OPTION (yt)); 12402 } 12403 12404 else 12405 gcc_unreachable (); 12406 12407 return (memcmp (xp, yp, len) == 0); 12408 } 12409 12410 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */ 12411 12412 tree 12413 build_optimization_node (struct gcc_options *opts) 12414 { 12415 tree t; 12416 12417 /* Use the cache of optimization nodes. */ 12418 12419 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node), 12420 opts); 12421 12422 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT); 12423 t = *slot; 12424 if (!t) 12425 { 12426 /* Insert this one into the hash table. */ 12427 t = cl_optimization_node; 12428 *slot = t; 12429 12430 /* Make a new node for next time round. */ 12431 cl_optimization_node = make_node (OPTIMIZATION_NODE); 12432 } 12433 12434 return t; 12435 } 12436 12437 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */ 12438 12439 tree 12440 build_target_option_node (struct gcc_options *opts) 12441 { 12442 tree t; 12443 12444 /* Use the cache of optimization nodes. */ 12445 12446 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node), 12447 opts); 12448 12449 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT); 12450 t = *slot; 12451 if (!t) 12452 { 12453 /* Insert this one into the hash table. */ 12454 t = cl_target_option_node; 12455 *slot = t; 12456 12457 /* Make a new node for next time round. */ 12458 cl_target_option_node = make_node (TARGET_OPTION_NODE); 12459 } 12460 12461 return t; 12462 } 12463 12464 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees, 12465 so that they aren't saved during PCH writing. */ 12466 12467 void 12468 prepare_target_option_nodes_for_pch (void) 12469 { 12470 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin (); 12471 for (; iter != cl_option_hash_table->end (); ++iter) 12472 if (TREE_CODE (*iter) == TARGET_OPTION_NODE) 12473 TREE_TARGET_GLOBALS (*iter) = NULL; 12474 } 12475 12476 /* Determine the "ultimate origin" of a block. The block may be an inlined 12477 instance of an inlined instance of a block which is local to an inline 12478 function, so we have to trace all of the way back through the origin chain 12479 to find out what sort of node actually served as the original seed for the 12480 given block. */ 12481 12482 tree 12483 block_ultimate_origin (const_tree block) 12484 { 12485 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block); 12486 12487 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if 12488 we're trying to output the abstract instance of this function. */ 12489 if (BLOCK_ABSTRACT (block) && immediate_origin == block) 12490 return NULL_TREE; 12491 12492 if (immediate_origin == NULL_TREE) 12493 return NULL_TREE; 12494 else 12495 { 12496 tree ret_val; 12497 tree lookahead = immediate_origin; 12498 12499 do 12500 { 12501 ret_val = lookahead; 12502 lookahead = (TREE_CODE (ret_val) == BLOCK 12503 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL); 12504 } 12505 while (lookahead != NULL && lookahead != ret_val); 12506 12507 /* The block's abstract origin chain may not be the *ultimate* origin of 12508 the block. It could lead to a DECL that has an abstract origin set. 12509 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN 12510 will give us if it has one). Note that DECL's abstract origins are 12511 supposed to be the most distant ancestor (or so decl_ultimate_origin 12512 claims), so we don't need to loop following the DECL origins. */ 12513 if (DECL_P (ret_val)) 12514 return DECL_ORIGIN (ret_val); 12515 12516 return ret_val; 12517 } 12518 } 12519 12520 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates 12521 no instruction. */ 12522 12523 bool 12524 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type) 12525 { 12526 /* Do not strip casts into or out of differing address spaces. */ 12527 if (POINTER_TYPE_P (outer_type) 12528 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC) 12529 { 12530 if (!POINTER_TYPE_P (inner_type) 12531 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) 12532 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type)))) 12533 return false; 12534 } 12535 else if (POINTER_TYPE_P (inner_type) 12536 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC) 12537 { 12538 /* We already know that outer_type is not a pointer with 12539 a non-generic address space. */ 12540 return false; 12541 } 12542 12543 /* Use precision rather then machine mode when we can, which gives 12544 the correct answer even for submode (bit-field) types. */ 12545 if ((INTEGRAL_TYPE_P (outer_type) 12546 || POINTER_TYPE_P (outer_type) 12547 || TREE_CODE (outer_type) == OFFSET_TYPE) 12548 && (INTEGRAL_TYPE_P (inner_type) 12549 || POINTER_TYPE_P (inner_type) 12550 || TREE_CODE (inner_type) == OFFSET_TYPE)) 12551 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type); 12552 12553 /* Otherwise fall back on comparing machine modes (e.g. for 12554 aggregate types, floats). */ 12555 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type); 12556 } 12557 12558 /* Return true iff conversion in EXP generates no instruction. Mark 12559 it inline so that we fully inline into the stripping functions even 12560 though we have two uses of this function. */ 12561 12562 static inline bool 12563 tree_nop_conversion (const_tree exp) 12564 { 12565 tree outer_type, inner_type; 12566 12567 if (!CONVERT_EXPR_P (exp) 12568 && TREE_CODE (exp) != NON_LVALUE_EXPR) 12569 return false; 12570 if (TREE_OPERAND (exp, 0) == error_mark_node) 12571 return false; 12572 12573 outer_type = TREE_TYPE (exp); 12574 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); 12575 12576 if (!inner_type) 12577 return false; 12578 12579 return tree_nop_conversion_p (outer_type, inner_type); 12580 } 12581 12582 /* Return true iff conversion in EXP generates no instruction. Don't 12583 consider conversions changing the signedness. */ 12584 12585 static bool 12586 tree_sign_nop_conversion (const_tree exp) 12587 { 12588 tree outer_type, inner_type; 12589 12590 if (!tree_nop_conversion (exp)) 12591 return false; 12592 12593 outer_type = TREE_TYPE (exp); 12594 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); 12595 12596 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type) 12597 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type)); 12598 } 12599 12600 /* Strip conversions from EXP according to tree_nop_conversion and 12601 return the resulting expression. */ 12602 12603 tree 12604 tree_strip_nop_conversions (tree exp) 12605 { 12606 while (tree_nop_conversion (exp)) 12607 exp = TREE_OPERAND (exp, 0); 12608 return exp; 12609 } 12610 12611 /* Strip conversions from EXP according to tree_sign_nop_conversion 12612 and return the resulting expression. */ 12613 12614 tree 12615 tree_strip_sign_nop_conversions (tree exp) 12616 { 12617 while (tree_sign_nop_conversion (exp)) 12618 exp = TREE_OPERAND (exp, 0); 12619 return exp; 12620 } 12621 12622 /* Avoid any floating point extensions from EXP. */ 12623 tree 12624 strip_float_extensions (tree exp) 12625 { 12626 tree sub, expt, subt; 12627 12628 /* For floating point constant look up the narrowest type that can hold 12629 it properly and handle it like (type)(narrowest_type)constant. 12630 This way we can optimize for instance a=a*2.0 where "a" is float 12631 but 2.0 is double constant. */ 12632 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp))) 12633 { 12634 REAL_VALUE_TYPE orig; 12635 tree type = NULL; 12636 12637 orig = TREE_REAL_CST (exp); 12638 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node) 12639 && exact_real_truncate (TYPE_MODE (float_type_node), &orig)) 12640 type = float_type_node; 12641 else if (TYPE_PRECISION (TREE_TYPE (exp)) 12642 > TYPE_PRECISION (double_type_node) 12643 && exact_real_truncate (TYPE_MODE (double_type_node), &orig)) 12644 type = double_type_node; 12645 if (type) 12646 return build_real_truncate (type, orig); 12647 } 12648 12649 if (!CONVERT_EXPR_P (exp)) 12650 return exp; 12651 12652 sub = TREE_OPERAND (exp, 0); 12653 subt = TREE_TYPE (sub); 12654 expt = TREE_TYPE (exp); 12655 12656 if (!FLOAT_TYPE_P (subt)) 12657 return exp; 12658 12659 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt)) 12660 return exp; 12661 12662 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt)) 12663 return exp; 12664 12665 return strip_float_extensions (sub); 12666 } 12667 12668 /* Strip out all handled components that produce invariant 12669 offsets. */ 12670 12671 const_tree 12672 strip_invariant_refs (const_tree op) 12673 { 12674 while (handled_component_p (op)) 12675 { 12676 switch (TREE_CODE (op)) 12677 { 12678 case ARRAY_REF: 12679 case ARRAY_RANGE_REF: 12680 if (!is_gimple_constant (TREE_OPERAND (op, 1)) 12681 || TREE_OPERAND (op, 2) != NULL_TREE 12682 || TREE_OPERAND (op, 3) != NULL_TREE) 12683 return NULL; 12684 break; 12685 12686 case COMPONENT_REF: 12687 if (TREE_OPERAND (op, 2) != NULL_TREE) 12688 return NULL; 12689 break; 12690 12691 default:; 12692 } 12693 op = TREE_OPERAND (op, 0); 12694 } 12695 12696 return op; 12697 } 12698 12699 static GTY(()) tree gcc_eh_personality_decl; 12700 12701 /* Return the GCC personality function decl. */ 12702 12703 tree 12704 lhd_gcc_personality (void) 12705 { 12706 if (!gcc_eh_personality_decl) 12707 gcc_eh_personality_decl = build_personality_function ("gcc"); 12708 return gcc_eh_personality_decl; 12709 } 12710 12711 /* TARGET is a call target of GIMPLE call statement 12712 (obtained by gimple_call_fn). Return true if it is 12713 OBJ_TYPE_REF representing an virtual call of C++ method. 12714 (As opposed to OBJ_TYPE_REF representing objc calls 12715 through a cast where middle-end devirtualization machinery 12716 can't apply.) */ 12717 12718 bool 12719 virtual_method_call_p (const_tree target) 12720 { 12721 if (TREE_CODE (target) != OBJ_TYPE_REF) 12722 return false; 12723 tree t = TREE_TYPE (target); 12724 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE); 12725 t = TREE_TYPE (t); 12726 if (TREE_CODE (t) == FUNCTION_TYPE) 12727 return false; 12728 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE); 12729 /* If we do not have BINFO associated, it means that type was built 12730 without devirtualization enabled. Do not consider this a virtual 12731 call. */ 12732 if (!TYPE_BINFO (obj_type_ref_class (target))) 12733 return false; 12734 return true; 12735 } 12736 12737 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */ 12738 12739 tree 12740 obj_type_ref_class (const_tree ref) 12741 { 12742 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF); 12743 ref = TREE_TYPE (ref); 12744 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE); 12745 ref = TREE_TYPE (ref); 12746 /* We look for type THIS points to. ObjC also builds 12747 OBJ_TYPE_REF with non-method calls, Their first parameter 12748 ID however also corresponds to class type. */ 12749 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE 12750 || TREE_CODE (ref) == FUNCTION_TYPE); 12751 ref = TREE_VALUE (TYPE_ARG_TYPES (ref)); 12752 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE); 12753 return TREE_TYPE (ref); 12754 } 12755 12756 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */ 12757 12758 static tree 12759 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos) 12760 { 12761 unsigned int i; 12762 tree base_binfo, b; 12763 12764 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++) 12765 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo)) 12766 && types_same_for_odr (TREE_TYPE (base_binfo), type)) 12767 return base_binfo; 12768 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL) 12769 return b; 12770 return NULL; 12771 } 12772 12773 /* Try to find a base info of BINFO that would have its field decl at offset 12774 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be 12775 found, return, otherwise return NULL_TREE. */ 12776 12777 tree 12778 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type) 12779 { 12780 tree type = BINFO_TYPE (binfo); 12781 12782 while (true) 12783 { 12784 HOST_WIDE_INT pos, size; 12785 tree fld; 12786 int i; 12787 12788 if (types_same_for_odr (type, expected_type)) 12789 return binfo; 12790 if (offset < 0) 12791 return NULL_TREE; 12792 12793 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld)) 12794 { 12795 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld)) 12796 continue; 12797 12798 pos = int_bit_position (fld); 12799 size = tree_to_uhwi (DECL_SIZE (fld)); 12800 if (pos <= offset && (pos + size) > offset) 12801 break; 12802 } 12803 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE) 12804 return NULL_TREE; 12805 12806 /* Offset 0 indicates the primary base, whose vtable contents are 12807 represented in the binfo for the derived class. */ 12808 else if (offset != 0) 12809 { 12810 tree found_binfo = NULL, base_binfo; 12811 /* Offsets in BINFO are in bytes relative to the whole structure 12812 while POS is in bits relative to the containing field. */ 12813 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos 12814 / BITS_PER_UNIT); 12815 12816 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++) 12817 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset 12818 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld))) 12819 { 12820 found_binfo = base_binfo; 12821 break; 12822 } 12823 if (found_binfo) 12824 binfo = found_binfo; 12825 else 12826 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld), 12827 binfo_offset); 12828 } 12829 12830 type = TREE_TYPE (fld); 12831 offset -= pos; 12832 } 12833 } 12834 12835 /* Returns true if X is a typedef decl. */ 12836 12837 bool 12838 is_typedef_decl (const_tree x) 12839 { 12840 return (x && TREE_CODE (x) == TYPE_DECL 12841 && DECL_ORIGINAL_TYPE (x) != NULL_TREE); 12842 } 12843 12844 /* Returns true iff TYPE is a type variant created for a typedef. */ 12845 12846 bool 12847 typedef_variant_p (const_tree type) 12848 { 12849 return is_typedef_decl (TYPE_NAME (type)); 12850 } 12851 12852 /* Warn about a use of an identifier which was marked deprecated. */ 12853 void 12854 warn_deprecated_use (tree node, tree attr) 12855 { 12856 const char *msg; 12857 12858 if (node == 0 || !warn_deprecated_decl) 12859 return; 12860 12861 if (!attr) 12862 { 12863 if (DECL_P (node)) 12864 attr = DECL_ATTRIBUTES (node); 12865 else if (TYPE_P (node)) 12866 { 12867 tree decl = TYPE_STUB_DECL (node); 12868 if (decl) 12869 attr = lookup_attribute ("deprecated", 12870 TYPE_ATTRIBUTES (TREE_TYPE (decl))); 12871 } 12872 } 12873 12874 if (attr) 12875 attr = lookup_attribute ("deprecated", attr); 12876 12877 if (attr) 12878 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))); 12879 else 12880 msg = NULL; 12881 12882 bool w; 12883 if (DECL_P (node)) 12884 { 12885 if (msg) 12886 w = warning (OPT_Wdeprecated_declarations, 12887 "%qD is deprecated: %s", node, msg); 12888 else 12889 w = warning (OPT_Wdeprecated_declarations, 12890 "%qD is deprecated", node); 12891 if (w) 12892 inform (DECL_SOURCE_LOCATION (node), "declared here"); 12893 } 12894 else if (TYPE_P (node)) 12895 { 12896 tree what = NULL_TREE; 12897 tree decl = TYPE_STUB_DECL (node); 12898 12899 if (TYPE_NAME (node)) 12900 { 12901 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE) 12902 what = TYPE_NAME (node); 12903 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL 12904 && DECL_NAME (TYPE_NAME (node))) 12905 what = DECL_NAME (TYPE_NAME (node)); 12906 } 12907 12908 if (decl) 12909 { 12910 if (what) 12911 { 12912 if (msg) 12913 w = warning (OPT_Wdeprecated_declarations, 12914 "%qE is deprecated: %s", what, msg); 12915 else 12916 w = warning (OPT_Wdeprecated_declarations, 12917 "%qE is deprecated", what); 12918 } 12919 else 12920 { 12921 if (msg) 12922 w = warning (OPT_Wdeprecated_declarations, 12923 "type is deprecated: %s", msg); 12924 else 12925 w = warning (OPT_Wdeprecated_declarations, 12926 "type is deprecated"); 12927 } 12928 if (w) 12929 inform (DECL_SOURCE_LOCATION (decl), "declared here"); 12930 } 12931 else 12932 { 12933 if (what) 12934 { 12935 if (msg) 12936 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s", 12937 what, msg); 12938 else 12939 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what); 12940 } 12941 else 12942 { 12943 if (msg) 12944 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s", 12945 msg); 12946 else 12947 warning (OPT_Wdeprecated_declarations, "type is deprecated"); 12948 } 12949 } 12950 } 12951 } 12952 12953 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration 12954 somewhere in it. */ 12955 12956 bool 12957 contains_bitfld_component_ref_p (const_tree ref) 12958 { 12959 while (handled_component_p (ref)) 12960 { 12961 if (TREE_CODE (ref) == COMPONENT_REF 12962 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))) 12963 return true; 12964 ref = TREE_OPERAND (ref, 0); 12965 } 12966 12967 return false; 12968 } 12969 12970 /* Try to determine whether a TRY_CATCH expression can fall through. 12971 This is a subroutine of block_may_fallthru. */ 12972 12973 static bool 12974 try_catch_may_fallthru (const_tree stmt) 12975 { 12976 tree_stmt_iterator i; 12977 12978 /* If the TRY block can fall through, the whole TRY_CATCH can 12979 fall through. */ 12980 if (block_may_fallthru (TREE_OPERAND (stmt, 0))) 12981 return true; 12982 12983 i = tsi_start (TREE_OPERAND (stmt, 1)); 12984 switch (TREE_CODE (tsi_stmt (i))) 12985 { 12986 case CATCH_EXPR: 12987 /* We expect to see a sequence of CATCH_EXPR trees, each with a 12988 catch expression and a body. The whole TRY_CATCH may fall 12989 through iff any of the catch bodies falls through. */ 12990 for (; !tsi_end_p (i); tsi_next (&i)) 12991 { 12992 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i)))) 12993 return true; 12994 } 12995 return false; 12996 12997 case EH_FILTER_EXPR: 12998 /* The exception filter expression only matters if there is an 12999 exception. If the exception does not match EH_FILTER_TYPES, 13000 we will execute EH_FILTER_FAILURE, and we will fall through 13001 if that falls through. If the exception does match 13002 EH_FILTER_TYPES, the stack unwinder will continue up the 13003 stack, so we will not fall through. We don't know whether we 13004 will throw an exception which matches EH_FILTER_TYPES or not, 13005 so we just ignore EH_FILTER_TYPES and assume that we might 13006 throw an exception which doesn't match. */ 13007 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i))); 13008 13009 default: 13010 /* This case represents statements to be executed when an 13011 exception occurs. Those statements are implicitly followed 13012 by a RESX statement to resume execution after the exception. 13013 So in this case the TRY_CATCH never falls through. */ 13014 return false; 13015 } 13016 } 13017 13018 /* Try to determine if we can fall out of the bottom of BLOCK. This guess 13019 need not be 100% accurate; simply be conservative and return true if we 13020 don't know. This is used only to avoid stupidly generating extra code. 13021 If we're wrong, we'll just delete the extra code later. */ 13022 13023 bool 13024 block_may_fallthru (const_tree block) 13025 { 13026 /* This CONST_CAST is okay because expr_last returns its argument 13027 unmodified and we assign it to a const_tree. */ 13028 const_tree stmt = expr_last (CONST_CAST_TREE (block)); 13029 13030 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK) 13031 { 13032 case GOTO_EXPR: 13033 case RETURN_EXPR: 13034 /* Easy cases. If the last statement of the block implies 13035 control transfer, then we can't fall through. */ 13036 return false; 13037 13038 case SWITCH_EXPR: 13039 /* If SWITCH_LABELS is set, this is lowered, and represents a 13040 branch to a selected label and hence can not fall through. 13041 Otherwise SWITCH_BODY is set, and the switch can fall 13042 through. */ 13043 return SWITCH_LABELS (stmt) == NULL_TREE; 13044 13045 case COND_EXPR: 13046 if (block_may_fallthru (COND_EXPR_THEN (stmt))) 13047 return true; 13048 return block_may_fallthru (COND_EXPR_ELSE (stmt)); 13049 13050 case BIND_EXPR: 13051 return block_may_fallthru (BIND_EXPR_BODY (stmt)); 13052 13053 case TRY_CATCH_EXPR: 13054 return try_catch_may_fallthru (stmt); 13055 13056 case TRY_FINALLY_EXPR: 13057 /* The finally clause is always executed after the try clause, 13058 so if it does not fall through, then the try-finally will not 13059 fall through. Otherwise, if the try clause does not fall 13060 through, then when the finally clause falls through it will 13061 resume execution wherever the try clause was going. So the 13062 whole try-finally will only fall through if both the try 13063 clause and the finally clause fall through. */ 13064 return (block_may_fallthru (TREE_OPERAND (stmt, 0)) 13065 && block_may_fallthru (TREE_OPERAND (stmt, 1))); 13066 13067 case MODIFY_EXPR: 13068 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR) 13069 stmt = TREE_OPERAND (stmt, 1); 13070 else 13071 return true; 13072 /* FALLTHRU */ 13073 13074 case CALL_EXPR: 13075 /* Functions that do not return do not fall through. */ 13076 return (call_expr_flags (stmt) & ECF_NORETURN) == 0; 13077 13078 case CLEANUP_POINT_EXPR: 13079 return block_may_fallthru (TREE_OPERAND (stmt, 0)); 13080 13081 case TARGET_EXPR: 13082 return block_may_fallthru (TREE_OPERAND (stmt, 1)); 13083 13084 case ERROR_MARK: 13085 return true; 13086 13087 default: 13088 return lang_hooks.block_may_fallthru (stmt); 13089 } 13090 } 13091 13092 /* True if we are using EH to handle cleanups. */ 13093 static bool using_eh_for_cleanups_flag = false; 13094 13095 /* This routine is called from front ends to indicate eh should be used for 13096 cleanups. */ 13097 void 13098 using_eh_for_cleanups (void) 13099 { 13100 using_eh_for_cleanups_flag = true; 13101 } 13102 13103 /* Query whether EH is used for cleanups. */ 13104 bool 13105 using_eh_for_cleanups_p (void) 13106 { 13107 return using_eh_for_cleanups_flag; 13108 } 13109 13110 /* Wrapper for tree_code_name to ensure that tree code is valid */ 13111 const char * 13112 get_tree_code_name (enum tree_code code) 13113 { 13114 const char *invalid = "<invalid tree code>"; 13115 13116 if (code >= MAX_TREE_CODES) 13117 return invalid; 13118 13119 return tree_code_name[code]; 13120 } 13121 13122 /* Drops the TREE_OVERFLOW flag from T. */ 13123 13124 tree 13125 drop_tree_overflow (tree t) 13126 { 13127 gcc_checking_assert (TREE_OVERFLOW (t)); 13128 13129 /* For tree codes with a sharing machinery re-build the result. */ 13130 if (TREE_CODE (t) == INTEGER_CST) 13131 return wide_int_to_tree (TREE_TYPE (t), t); 13132 13133 /* Otherwise, as all tcc_constants are possibly shared, copy the node 13134 and drop the flag. */ 13135 t = copy_node (t); 13136 TREE_OVERFLOW (t) = 0; 13137 return t; 13138 } 13139 13140 /* Given a memory reference expression T, return its base address. 13141 The base address of a memory reference expression is the main 13142 object being referenced. For instance, the base address for 13143 'array[i].fld[j]' is 'array'. You can think of this as stripping 13144 away the offset part from a memory address. 13145 13146 This function calls handled_component_p to strip away all the inner 13147 parts of the memory reference until it reaches the base object. */ 13148 13149 tree 13150 get_base_address (tree t) 13151 { 13152 while (handled_component_p (t)) 13153 t = TREE_OPERAND (t, 0); 13154 13155 if ((TREE_CODE (t) == MEM_REF 13156 || TREE_CODE (t) == TARGET_MEM_REF) 13157 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR) 13158 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0); 13159 13160 /* ??? Either the alias oracle or all callers need to properly deal 13161 with WITH_SIZE_EXPRs before we can look through those. */ 13162 if (TREE_CODE (t) == WITH_SIZE_EXPR) 13163 return NULL_TREE; 13164 13165 return t; 13166 } 13167 13168 /* Return a tree of sizetype representing the size, in bytes, of the element 13169 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */ 13170 13171 tree 13172 array_ref_element_size (tree exp) 13173 { 13174 tree aligned_size = TREE_OPERAND (exp, 3); 13175 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))); 13176 location_t loc = EXPR_LOCATION (exp); 13177 13178 /* If a size was specified in the ARRAY_REF, it's the size measured 13179 in alignment units of the element type. So multiply by that value. */ 13180 if (aligned_size) 13181 { 13182 /* ??? tree_ssa_useless_type_conversion will eliminate casts to 13183 sizetype from another type of the same width and signedness. */ 13184 if (TREE_TYPE (aligned_size) != sizetype) 13185 aligned_size = fold_convert_loc (loc, sizetype, aligned_size); 13186 return size_binop_loc (loc, MULT_EXPR, aligned_size, 13187 size_int (TYPE_ALIGN_UNIT (elmt_type))); 13188 } 13189 13190 /* Otherwise, take the size from that of the element type. Substitute 13191 any PLACEHOLDER_EXPR that we have. */ 13192 else 13193 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp); 13194 } 13195 13196 /* Return a tree representing the lower bound of the array mentioned in 13197 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */ 13198 13199 tree 13200 array_ref_low_bound (tree exp) 13201 { 13202 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0))); 13203 13204 /* If a lower bound is specified in EXP, use it. */ 13205 if (TREE_OPERAND (exp, 2)) 13206 return TREE_OPERAND (exp, 2); 13207 13208 /* Otherwise, if there is a domain type and it has a lower bound, use it, 13209 substituting for a PLACEHOLDER_EXPR as needed. */ 13210 if (domain_type && TYPE_MIN_VALUE (domain_type)) 13211 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp); 13212 13213 /* Otherwise, return a zero of the appropriate type. */ 13214 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0); 13215 } 13216 13217 /* Return a tree representing the upper bound of the array mentioned in 13218 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */ 13219 13220 tree 13221 array_ref_up_bound (tree exp) 13222 { 13223 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0))); 13224 13225 /* If there is a domain type and it has an upper bound, use it, substituting 13226 for a PLACEHOLDER_EXPR as needed. */ 13227 if (domain_type && TYPE_MAX_VALUE (domain_type)) 13228 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp); 13229 13230 /* Otherwise fail. */ 13231 return NULL_TREE; 13232 } 13233 13234 /* Returns true if REF is an array reference or a component reference 13235 to an array at the end of a structure. 13236 If this is the case, the array may be allocated larger 13237 than its upper bound implies. */ 13238 13239 bool 13240 array_at_struct_end_p (tree ref) 13241 { 13242 tree atype; 13243 13244 if (TREE_CODE (ref) == ARRAY_REF 13245 || TREE_CODE (ref) == ARRAY_RANGE_REF) 13246 { 13247 atype = TREE_TYPE (TREE_OPERAND (ref, 0)); 13248 ref = TREE_OPERAND (ref, 0); 13249 } 13250 else if (TREE_CODE (ref) == COMPONENT_REF 13251 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE) 13252 atype = TREE_TYPE (TREE_OPERAND (ref, 1)); 13253 else 13254 return false; 13255 13256 while (handled_component_p (ref)) 13257 { 13258 /* If the reference chain contains a component reference to a 13259 non-union type and there follows another field the reference 13260 is not at the end of a structure. */ 13261 if (TREE_CODE (ref) == COMPONENT_REF) 13262 { 13263 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE) 13264 { 13265 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1)); 13266 while (nextf && TREE_CODE (nextf) != FIELD_DECL) 13267 nextf = DECL_CHAIN (nextf); 13268 if (nextf) 13269 return false; 13270 } 13271 } 13272 /* If we have a multi-dimensional array we do not consider 13273 a non-innermost dimension as flex array if the whole 13274 multi-dimensional array is at struct end. 13275 Same for an array of aggregates with a trailing array 13276 member. */ 13277 else if (TREE_CODE (ref) == ARRAY_REF) 13278 return false; 13279 else if (TREE_CODE (ref) == ARRAY_RANGE_REF) 13280 ; 13281 /* If we view an underlying object as sth else then what we 13282 gathered up to now is what we have to rely on. */ 13283 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR) 13284 break; 13285 else 13286 gcc_unreachable (); 13287 13288 ref = TREE_OPERAND (ref, 0); 13289 } 13290 13291 /* The array now is at struct end. Treat flexible arrays as 13292 always subject to extend, even into just padding constrained by 13293 an underlying decl. */ 13294 if (! TYPE_SIZE (atype)) 13295 return true; 13296 13297 tree size = NULL; 13298 13299 if (TREE_CODE (ref) == MEM_REF 13300 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR) 13301 { 13302 size = TYPE_SIZE (TREE_TYPE (ref)); 13303 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0); 13304 } 13305 13306 /* If the reference is based on a declared entity, the size of the array 13307 is constrained by its given domain. (Do not trust commons PR/69368). */ 13308 if (DECL_P (ref) 13309 /* Be sure the size of MEM_REF target match. For example: 13310 13311 char buf[10]; 13312 struct foo *str = (struct foo *)&buf; 13313 13314 str->trailin_array[2] = 1; 13315 13316 is valid because BUF allocate enough space. */ 13317 13318 && (!size || (DECL_SIZE (ref) != NULL 13319 && operand_equal_p (DECL_SIZE (ref), size, 0))) 13320 && !(flag_unconstrained_commons 13321 && VAR_P (ref) && DECL_COMMON (ref))) 13322 return false; 13323 13324 return true; 13325 } 13326 13327 /* Return a tree representing the offset, in bytes, of the field referenced 13328 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */ 13329 13330 tree 13331 component_ref_field_offset (tree exp) 13332 { 13333 tree aligned_offset = TREE_OPERAND (exp, 2); 13334 tree field = TREE_OPERAND (exp, 1); 13335 location_t loc = EXPR_LOCATION (exp); 13336 13337 /* If an offset was specified in the COMPONENT_REF, it's the offset measured 13338 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that 13339 value. */ 13340 if (aligned_offset) 13341 { 13342 /* ??? tree_ssa_useless_type_conversion will eliminate casts to 13343 sizetype from another type of the same width and signedness. */ 13344 if (TREE_TYPE (aligned_offset) != sizetype) 13345 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset); 13346 return size_binop_loc (loc, MULT_EXPR, aligned_offset, 13347 size_int (DECL_OFFSET_ALIGN (field) 13348 / BITS_PER_UNIT)); 13349 } 13350 13351 /* Otherwise, take the offset from that of the field. Substitute 13352 any PLACEHOLDER_EXPR that we have. */ 13353 else 13354 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp); 13355 } 13356 13357 /* Return the machine mode of T. For vectors, returns the mode of the 13358 inner type. The main use case is to feed the result to HONOR_NANS, 13359 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */ 13360 13361 machine_mode 13362 element_mode (const_tree t) 13363 { 13364 if (!TYPE_P (t)) 13365 t = TREE_TYPE (t); 13366 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE) 13367 t = TREE_TYPE (t); 13368 return TYPE_MODE (t); 13369 } 13370 13371 13372 /* Veirfy that basic properties of T match TV and thus T can be a variant of 13373 TV. TV should be the more specified variant (i.e. the main variant). */ 13374 13375 static bool 13376 verify_type_variant (const_tree t, tree tv) 13377 { 13378 /* Type variant can differ by: 13379 13380 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT, 13381 ENCODE_QUAL_ADDR_SPACE. 13382 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P 13383 in this case some values may not be set in the variant types 13384 (see TYPE_COMPLETE_P checks). 13385 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type 13386 - by TYPE_NAME and attributes (i.e. when variant originate by typedef) 13387 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants) 13388 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN 13389 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P 13390 this is necessary to make it possible to merge types form different TUs 13391 - arrays, pointers and references may have TREE_TYPE that is a variant 13392 of TREE_TYPE of their main variants. 13393 - aggregates may have new TYPE_FIELDS list that list variants of 13394 the main variant TYPE_FIELDS. 13395 - vector types may differ by TYPE_VECTOR_OPAQUE 13396 - TYPE_METHODS is always NULL for variant types and maintained for 13397 main variant only. 13398 */ 13399 13400 /* Convenience macro for matching individual fields. */ 13401 #define verify_variant_match(flag) \ 13402 do { \ 13403 if (flag (tv) != flag (t)) \ 13404 { \ 13405 error ("type variant differs by " #flag "."); \ 13406 debug_tree (tv); \ 13407 return false; \ 13408 } \ 13409 } while (false) 13410 13411 /* tree_base checks. */ 13412 13413 verify_variant_match (TREE_CODE); 13414 /* FIXME: Ada builds non-artificial variants of artificial types. */ 13415 if (TYPE_ARTIFICIAL (tv) && 0) 13416 verify_variant_match (TYPE_ARTIFICIAL); 13417 if (POINTER_TYPE_P (tv)) 13418 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL); 13419 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */ 13420 verify_variant_match (TYPE_UNSIGNED); 13421 verify_variant_match (TYPE_PACKED); 13422 if (TREE_CODE (t) == REFERENCE_TYPE) 13423 verify_variant_match (TYPE_REF_IS_RVALUE); 13424 if (AGGREGATE_TYPE_P (t)) 13425 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER); 13426 else 13427 verify_variant_match (TYPE_SATURATING); 13428 /* FIXME: This check trigger during libstdc++ build. */ 13429 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0) 13430 verify_variant_match (TYPE_FINAL_P); 13431 13432 /* tree_type_common checks. */ 13433 13434 if (COMPLETE_TYPE_P (t)) 13435 { 13436 verify_variant_match (TYPE_MODE); 13437 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR 13438 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR) 13439 verify_variant_match (TYPE_SIZE); 13440 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR 13441 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR 13442 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)) 13443 { 13444 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t), 13445 TYPE_SIZE_UNIT (tv), 0)); 13446 error ("type variant has different TYPE_SIZE_UNIT"); 13447 debug_tree (tv); 13448 error ("type variant's TYPE_SIZE_UNIT"); 13449 debug_tree (TYPE_SIZE_UNIT (tv)); 13450 error ("type's TYPE_SIZE_UNIT"); 13451 debug_tree (TYPE_SIZE_UNIT (t)); 13452 return false; 13453 } 13454 } 13455 verify_variant_match (TYPE_PRECISION); 13456 verify_variant_match (TYPE_NEEDS_CONSTRUCTING); 13457 if (RECORD_OR_UNION_TYPE_P (t)) 13458 verify_variant_match (TYPE_TRANSPARENT_AGGR); 13459 else if (TREE_CODE (t) == ARRAY_TYPE) 13460 verify_variant_match (TYPE_NONALIASED_COMPONENT); 13461 /* During LTO we merge variant lists from diferent translation units 13462 that may differ BY TYPE_CONTEXT that in turn may point 13463 to TRANSLATION_UNIT_DECL. 13464 Ada also builds variants of types with different TYPE_CONTEXT. */ 13465 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0) 13466 verify_variant_match (TYPE_CONTEXT); 13467 verify_variant_match (TYPE_STRING_FLAG); 13468 if (TYPE_ALIAS_SET_KNOWN_P (t)) 13469 { 13470 error ("type variant with TYPE_ALIAS_SET_KNOWN_P"); 13471 debug_tree (tv); 13472 return false; 13473 } 13474 13475 /* tree_type_non_common checks. */ 13476 13477 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS 13478 and dangle the pointer from time to time. */ 13479 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv) 13480 && (in_lto_p || !TYPE_VFIELD (tv) 13481 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST)) 13482 { 13483 error ("type variant has different TYPE_VFIELD"); 13484 debug_tree (tv); 13485 return false; 13486 } 13487 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t)) 13488 || TREE_CODE (t) == INTEGER_TYPE 13489 || TREE_CODE (t) == BOOLEAN_TYPE 13490 || TREE_CODE (t) == REAL_TYPE 13491 || TREE_CODE (t) == FIXED_POINT_TYPE) 13492 { 13493 verify_variant_match (TYPE_MAX_VALUE); 13494 verify_variant_match (TYPE_MIN_VALUE); 13495 } 13496 if (TREE_CODE (t) == METHOD_TYPE) 13497 verify_variant_match (TYPE_METHOD_BASETYPE); 13498 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t)) 13499 { 13500 error ("type variant has TYPE_METHODS"); 13501 debug_tree (tv); 13502 return false; 13503 } 13504 if (TREE_CODE (t) == OFFSET_TYPE) 13505 verify_variant_match (TYPE_OFFSET_BASETYPE); 13506 if (TREE_CODE (t) == ARRAY_TYPE) 13507 verify_variant_match (TYPE_ARRAY_MAX_SIZE); 13508 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types 13509 or even type's main variant. This is needed to make bootstrap pass 13510 and the bug seems new in GCC 5. 13511 C++ FE should be updated to make this consistent and we should check 13512 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there 13513 is a match with main variant. 13514 13515 Also disable the check for Java for now because of parser hack that builds 13516 first an dummy BINFO and then sometimes replace it by real BINFO in some 13517 of the copies. */ 13518 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv) 13519 && TYPE_BINFO (t) != TYPE_BINFO (tv) 13520 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types. 13521 Since there is no cheap way to tell C++/Java type w/o LTO, do checking 13522 at LTO time only. */ 13523 && (in_lto_p && odr_type_p (t))) 13524 { 13525 error ("type variant has different TYPE_BINFO"); 13526 debug_tree (tv); 13527 error ("type variant's TYPE_BINFO"); 13528 debug_tree (TYPE_BINFO (tv)); 13529 error ("type's TYPE_BINFO"); 13530 debug_tree (TYPE_BINFO (t)); 13531 return false; 13532 } 13533 13534 /* Check various uses of TYPE_VALUES_RAW. */ 13535 if (TREE_CODE (t) == ENUMERAL_TYPE) 13536 verify_variant_match (TYPE_VALUES); 13537 else if (TREE_CODE (t) == ARRAY_TYPE) 13538 verify_variant_match (TYPE_DOMAIN); 13539 /* Permit incomplete variants of complete type. While FEs may complete 13540 all variants, this does not happen for C++ templates in all cases. */ 13541 else if (RECORD_OR_UNION_TYPE_P (t) 13542 && COMPLETE_TYPE_P (t) 13543 && TYPE_FIELDS (t) != TYPE_FIELDS (tv)) 13544 { 13545 tree f1, f2; 13546 13547 /* Fortran builds qualified variants as new records with items of 13548 qualified type. Verify that they looks same. */ 13549 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv); 13550 f1 && f2; 13551 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2)) 13552 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL 13553 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1)) 13554 != TYPE_MAIN_VARIANT (TREE_TYPE (f2)) 13555 /* FIXME: gfc_nonrestricted_type builds all types as variants 13556 with exception of pointer types. It deeply copies the type 13557 which means that we may end up with a variant type 13558 referring non-variant pointer. We may change it to 13559 produce types as variants, too, like 13560 objc_get_protocol_qualified_type does. */ 13561 && !POINTER_TYPE_P (TREE_TYPE (f1))) 13562 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2) 13563 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2)) 13564 break; 13565 if (f1 || f2) 13566 { 13567 error ("type variant has different TYPE_FIELDS"); 13568 debug_tree (tv); 13569 error ("first mismatch is field"); 13570 debug_tree (f1); 13571 error ("and field"); 13572 debug_tree (f2); 13573 return false; 13574 } 13575 } 13576 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)) 13577 verify_variant_match (TYPE_ARG_TYPES); 13578 /* For C++ the qualified variant of array type is really an array type 13579 of qualified TREE_TYPE. 13580 objc builds variants of pointer where pointer to type is a variant, too 13581 in objc_get_protocol_qualified_type. */ 13582 if (TREE_TYPE (t) != TREE_TYPE (tv) 13583 && ((TREE_CODE (t) != ARRAY_TYPE 13584 && !POINTER_TYPE_P (t)) 13585 || TYPE_MAIN_VARIANT (TREE_TYPE (t)) 13586 != TYPE_MAIN_VARIANT (TREE_TYPE (tv)))) 13587 { 13588 error ("type variant has different TREE_TYPE"); 13589 debug_tree (tv); 13590 error ("type variant's TREE_TYPE"); 13591 debug_tree (TREE_TYPE (tv)); 13592 error ("type's TREE_TYPE"); 13593 debug_tree (TREE_TYPE (t)); 13594 return false; 13595 } 13596 if (type_with_alias_set_p (t) 13597 && !gimple_canonical_types_compatible_p (t, tv, false)) 13598 { 13599 error ("type is not compatible with its variant"); 13600 debug_tree (tv); 13601 error ("type variant's TREE_TYPE"); 13602 debug_tree (TREE_TYPE (tv)); 13603 error ("type's TREE_TYPE"); 13604 debug_tree (TREE_TYPE (t)); 13605 return false; 13606 } 13607 return true; 13608 #undef verify_variant_match 13609 } 13610 13611 13612 /* The TYPE_CANONICAL merging machinery. It should closely resemble 13613 the middle-end types_compatible_p function. It needs to avoid 13614 claiming types are different for types that should be treated 13615 the same with respect to TBAA. Canonical types are also used 13616 for IL consistency checks via the useless_type_conversion_p 13617 predicate which does not handle all type kinds itself but falls 13618 back to pointer-comparison of TYPE_CANONICAL for aggregates 13619 for example. */ 13620 13621 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical 13622 type calculation because we need to allow inter-operability between signed 13623 and unsigned variants. */ 13624 13625 bool 13626 type_with_interoperable_signedness (const_tree type) 13627 { 13628 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both 13629 signed char and unsigned char. Similarly fortran FE builds 13630 C_SIZE_T as signed type, while C defines it unsigned. */ 13631 13632 return tree_code_for_canonical_type_merging (TREE_CODE (type)) 13633 == INTEGER_TYPE 13634 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node) 13635 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node)); 13636 } 13637 13638 /* Return true iff T1 and T2 are structurally identical for what 13639 TBAA is concerned. 13640 This function is used both by lto.c canonical type merging and by the 13641 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types 13642 that have TYPE_CANONICAL defined and assume them equivalent. This is useful 13643 only for LTO because only in these cases TYPE_CANONICAL equivalence 13644 correspond to one defined by gimple_canonical_types_compatible_p. */ 13645 13646 bool 13647 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2, 13648 bool trust_type_canonical) 13649 { 13650 /* Type variants should be same as the main variant. When not doing sanity 13651 checking to verify this fact, go to main variants and save some work. */ 13652 if (trust_type_canonical) 13653 { 13654 t1 = TYPE_MAIN_VARIANT (t1); 13655 t2 = TYPE_MAIN_VARIANT (t2); 13656 } 13657 13658 /* Check first for the obvious case of pointer identity. */ 13659 if (t1 == t2) 13660 return true; 13661 13662 /* Check that we have two types to compare. */ 13663 if (t1 == NULL_TREE || t2 == NULL_TREE) 13664 return false; 13665 13666 /* We consider complete types always compatible with incomplete type. 13667 This does not make sense for canonical type calculation and thus we 13668 need to ensure that we are never called on it. 13669 13670 FIXME: For more correctness the function probably should have three modes 13671 1) mode assuming that types are complete mathcing their structure 13672 2) mode allowing incomplete types but producing equivalence classes 13673 and thus ignoring all info from complete types 13674 3) mode allowing incomplete types to match complete but checking 13675 compatibility between complete types. 13676 13677 1 and 2 can be used for canonical type calculation. 3 is the real 13678 definition of type compatibility that can be used i.e. for warnings during 13679 declaration merging. */ 13680 13681 gcc_assert (!trust_type_canonical 13682 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2))); 13683 /* If the types have been previously registered and found equal 13684 they still are. */ 13685 13686 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2) 13687 && trust_type_canonical) 13688 { 13689 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types 13690 they are always NULL, but they are set to non-NULL for types 13691 constructed by build_pointer_type and variants. In this case the 13692 TYPE_CANONICAL is more fine grained than the equivalnce we test (where 13693 all pointers are considered equal. Be sure to not return false 13694 negatives. */ 13695 gcc_checking_assert (canonical_type_used_p (t1) 13696 && canonical_type_used_p (t2)); 13697 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2); 13698 } 13699 13700 /* Can't be the same type if the types don't have the same code. */ 13701 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1)); 13702 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2))) 13703 return false; 13704 13705 /* Qualifiers do not matter for canonical type comparison purposes. */ 13706 13707 /* Void types and nullptr types are always the same. */ 13708 if (TREE_CODE (t1) == VOID_TYPE 13709 || TREE_CODE (t1) == NULLPTR_TYPE) 13710 return true; 13711 13712 /* Can't be the same type if they have different mode. */ 13713 if (TYPE_MODE (t1) != TYPE_MODE (t2)) 13714 return false; 13715 13716 /* Non-aggregate types can be handled cheaply. */ 13717 if (INTEGRAL_TYPE_P (t1) 13718 || SCALAR_FLOAT_TYPE_P (t1) 13719 || FIXED_POINT_TYPE_P (t1) 13720 || TREE_CODE (t1) == VECTOR_TYPE 13721 || TREE_CODE (t1) == COMPLEX_TYPE 13722 || TREE_CODE (t1) == OFFSET_TYPE 13723 || POINTER_TYPE_P (t1)) 13724 { 13725 /* Can't be the same type if they have different recision. */ 13726 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)) 13727 return false; 13728 13729 /* In some cases the signed and unsigned types are required to be 13730 inter-operable. */ 13731 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2) 13732 && !type_with_interoperable_signedness (t1)) 13733 return false; 13734 13735 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be 13736 interoperable with "signed char". Unless all frontends are revisited 13737 to agree on these types, we must ignore the flag completely. */ 13738 13739 /* Fortran standard define C_PTR type that is compatible with every 13740 C pointer. For this reason we need to glob all pointers into one. 13741 Still pointers in different address spaces are not compatible. */ 13742 if (POINTER_TYPE_P (t1)) 13743 { 13744 if (TYPE_ADDR_SPACE (TREE_TYPE (t1)) 13745 != TYPE_ADDR_SPACE (TREE_TYPE (t2))) 13746 return false; 13747 } 13748 13749 /* Tail-recurse to components. */ 13750 if (TREE_CODE (t1) == VECTOR_TYPE 13751 || TREE_CODE (t1) == COMPLEX_TYPE) 13752 return gimple_canonical_types_compatible_p (TREE_TYPE (t1), 13753 TREE_TYPE (t2), 13754 trust_type_canonical); 13755 13756 return true; 13757 } 13758 13759 /* Do type-specific comparisons. */ 13760 switch (TREE_CODE (t1)) 13761 { 13762 case ARRAY_TYPE: 13763 /* Array types are the same if the element types are the same and 13764 the number of elements are the same. */ 13765 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2), 13766 trust_type_canonical) 13767 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2) 13768 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2) 13769 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2)) 13770 return false; 13771 else 13772 { 13773 tree i1 = TYPE_DOMAIN (t1); 13774 tree i2 = TYPE_DOMAIN (t2); 13775 13776 /* For an incomplete external array, the type domain can be 13777 NULL_TREE. Check this condition also. */ 13778 if (i1 == NULL_TREE && i2 == NULL_TREE) 13779 return true; 13780 else if (i1 == NULL_TREE || i2 == NULL_TREE) 13781 return false; 13782 else 13783 { 13784 tree min1 = TYPE_MIN_VALUE (i1); 13785 tree min2 = TYPE_MIN_VALUE (i2); 13786 tree max1 = TYPE_MAX_VALUE (i1); 13787 tree max2 = TYPE_MAX_VALUE (i2); 13788 13789 /* The minimum/maximum values have to be the same. */ 13790 if ((min1 == min2 13791 || (min1 && min2 13792 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR 13793 && TREE_CODE (min2) == PLACEHOLDER_EXPR) 13794 || operand_equal_p (min1, min2, 0)))) 13795 && (max1 == max2 13796 || (max1 && max2 13797 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR 13798 && TREE_CODE (max2) == PLACEHOLDER_EXPR) 13799 || operand_equal_p (max1, max2, 0))))) 13800 return true; 13801 else 13802 return false; 13803 } 13804 } 13805 13806 case METHOD_TYPE: 13807 case FUNCTION_TYPE: 13808 /* Function types are the same if the return type and arguments types 13809 are the same. */ 13810 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2), 13811 trust_type_canonical)) 13812 return false; 13813 13814 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)) 13815 return true; 13816 else 13817 { 13818 tree parms1, parms2; 13819 13820 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2); 13821 parms1 && parms2; 13822 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2)) 13823 { 13824 if (!gimple_canonical_types_compatible_p 13825 (TREE_VALUE (parms1), TREE_VALUE (parms2), 13826 trust_type_canonical)) 13827 return false; 13828 } 13829 13830 if (parms1 || parms2) 13831 return false; 13832 13833 return true; 13834 } 13835 13836 case RECORD_TYPE: 13837 case UNION_TYPE: 13838 case QUAL_UNION_TYPE: 13839 { 13840 tree f1, f2; 13841 13842 /* Don't try to compare variants of an incomplete type, before 13843 TYPE_FIELDS has been copied around. */ 13844 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2)) 13845 return true; 13846 13847 13848 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)) 13849 return false; 13850 13851 /* For aggregate types, all the fields must be the same. */ 13852 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2); 13853 f1 || f2; 13854 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2)) 13855 { 13856 /* Skip non-fields and zero-sized fields. */ 13857 while (f1 && (TREE_CODE (f1) != FIELD_DECL 13858 || (DECL_SIZE (f1) 13859 && integer_zerop (DECL_SIZE (f1))))) 13860 f1 = TREE_CHAIN (f1); 13861 while (f2 && (TREE_CODE (f2) != FIELD_DECL 13862 || (DECL_SIZE (f2) 13863 && integer_zerop (DECL_SIZE (f2))))) 13864 f2 = TREE_CHAIN (f2); 13865 if (!f1 || !f2) 13866 break; 13867 /* The fields must have the same name, offset and type. */ 13868 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2) 13869 || !gimple_compare_field_offset (f1, f2) 13870 || !gimple_canonical_types_compatible_p 13871 (TREE_TYPE (f1), TREE_TYPE (f2), 13872 trust_type_canonical)) 13873 return false; 13874 } 13875 13876 /* If one aggregate has more fields than the other, they 13877 are not the same. */ 13878 if (f1 || f2) 13879 return false; 13880 13881 return true; 13882 } 13883 13884 default: 13885 /* Consider all types with language specific trees in them mutually 13886 compatible. This is executed only from verify_type and false 13887 positives can be tolerated. */ 13888 gcc_assert (!in_lto_p); 13889 return true; 13890 } 13891 } 13892 13893 /* Verify type T. */ 13894 13895 void 13896 verify_type (const_tree t) 13897 { 13898 bool error_found = false; 13899 tree mv = TYPE_MAIN_VARIANT (t); 13900 if (!mv) 13901 { 13902 error ("Main variant is not defined"); 13903 error_found = true; 13904 } 13905 else if (mv != TYPE_MAIN_VARIANT (mv)) 13906 { 13907 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT"); 13908 debug_tree (mv); 13909 error_found = true; 13910 } 13911 else if (t != mv && !verify_type_variant (t, mv)) 13912 error_found = true; 13913 13914 tree ct = TYPE_CANONICAL (t); 13915 if (!ct) 13916 ; 13917 else if (TYPE_CANONICAL (t) != ct) 13918 { 13919 error ("TYPE_CANONICAL has different TYPE_CANONICAL"); 13920 debug_tree (ct); 13921 error_found = true; 13922 } 13923 /* Method and function types can not be used to address memory and thus 13924 TYPE_CANONICAL really matters only for determining useless conversions. 13925 13926 FIXME: C++ FE produce declarations of builtin functions that are not 13927 compatible with main variants. */ 13928 else if (TREE_CODE (t) == FUNCTION_TYPE) 13929 ; 13930 else if (t != ct 13931 /* FIXME: gimple_canonical_types_compatible_p can not compare types 13932 with variably sized arrays because their sizes possibly 13933 gimplified to different variables. */ 13934 && !variably_modified_type_p (ct, NULL) 13935 && !gimple_canonical_types_compatible_p (t, ct, false)) 13936 { 13937 error ("TYPE_CANONICAL is not compatible"); 13938 debug_tree (ct); 13939 error_found = true; 13940 } 13941 13942 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t) 13943 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t))) 13944 { 13945 error ("TYPE_MODE of TYPE_CANONICAL is not compatible"); 13946 debug_tree (ct); 13947 error_found = true; 13948 } 13949 /* FIXME: this is violated by the C++ FE as discussed in PR70029, when 13950 FUNCTION_*_QUALIFIED flags are set. */ 13951 if (0 && TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct) 13952 { 13953 error ("TYPE_CANONICAL of main variant is not main variant"); 13954 debug_tree (ct); 13955 debug_tree (TYPE_MAIN_VARIANT (ct)); 13956 error_found = true; 13957 } 13958 13959 13960 /* Check various uses of TYPE_MINVAL. */ 13961 if (RECORD_OR_UNION_TYPE_P (t)) 13962 { 13963 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS 13964 and danagle the pointer from time to time. */ 13965 if (TYPE_VFIELD (t) 13966 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL 13967 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST) 13968 { 13969 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST"); 13970 debug_tree (TYPE_VFIELD (t)); 13971 error_found = true; 13972 } 13973 } 13974 else if (TREE_CODE (t) == POINTER_TYPE) 13975 { 13976 if (TYPE_NEXT_PTR_TO (t) 13977 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE) 13978 { 13979 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE"); 13980 debug_tree (TYPE_NEXT_PTR_TO (t)); 13981 error_found = true; 13982 } 13983 } 13984 else if (TREE_CODE (t) == REFERENCE_TYPE) 13985 { 13986 if (TYPE_NEXT_REF_TO (t) 13987 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE) 13988 { 13989 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE"); 13990 debug_tree (TYPE_NEXT_REF_TO (t)); 13991 error_found = true; 13992 } 13993 } 13994 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE 13995 || TREE_CODE (t) == FIXED_POINT_TYPE) 13996 { 13997 /* FIXME: The following check should pass: 13998 useless_type_conversion_p (const_cast <tree> (t), 13999 TREE_TYPE (TYPE_MIN_VALUE (t)) 14000 but does not for C sizetypes in LTO. */ 14001 } 14002 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */ 14003 else if (TYPE_MINVAL (t) 14004 && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE) 14005 || in_lto_p)) 14006 { 14007 error ("TYPE_MINVAL non-NULL"); 14008 debug_tree (TYPE_MINVAL (t)); 14009 error_found = true; 14010 } 14011 14012 /* Check various uses of TYPE_MAXVAL. */ 14013 if (RECORD_OR_UNION_TYPE_P (t)) 14014 { 14015 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL 14016 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL 14017 && TYPE_METHODS (t) != error_mark_node) 14018 { 14019 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node"); 14020 debug_tree (TYPE_METHODS (t)); 14021 error_found = true; 14022 } 14023 } 14024 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE) 14025 { 14026 if (TYPE_METHOD_BASETYPE (t) 14027 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE 14028 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE) 14029 { 14030 error ("TYPE_METHOD_BASETYPE is not record nor union"); 14031 debug_tree (TYPE_METHOD_BASETYPE (t)); 14032 error_found = true; 14033 } 14034 } 14035 else if (TREE_CODE (t) == OFFSET_TYPE) 14036 { 14037 if (TYPE_OFFSET_BASETYPE (t) 14038 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE 14039 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE) 14040 { 14041 error ("TYPE_OFFSET_BASETYPE is not record nor union"); 14042 debug_tree (TYPE_OFFSET_BASETYPE (t)); 14043 error_found = true; 14044 } 14045 } 14046 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE 14047 || TREE_CODE (t) == FIXED_POINT_TYPE) 14048 { 14049 /* FIXME: The following check should pass: 14050 useless_type_conversion_p (const_cast <tree> (t), 14051 TREE_TYPE (TYPE_MAX_VALUE (t)) 14052 but does not for C sizetypes in LTO. */ 14053 } 14054 else if (TREE_CODE (t) == ARRAY_TYPE) 14055 { 14056 if (TYPE_ARRAY_MAX_SIZE (t) 14057 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST) 14058 { 14059 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST"); 14060 debug_tree (TYPE_ARRAY_MAX_SIZE (t)); 14061 error_found = true; 14062 } 14063 } 14064 else if (TYPE_MAXVAL (t)) 14065 { 14066 error ("TYPE_MAXVAL non-NULL"); 14067 debug_tree (TYPE_MAXVAL (t)); 14068 error_found = true; 14069 } 14070 14071 /* Check various uses of TYPE_BINFO. */ 14072 if (RECORD_OR_UNION_TYPE_P (t)) 14073 { 14074 if (!TYPE_BINFO (t)) 14075 ; 14076 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO) 14077 { 14078 error ("TYPE_BINFO is not TREE_BINFO"); 14079 debug_tree (TYPE_BINFO (t)); 14080 error_found = true; 14081 } 14082 /* FIXME: Java builds invalid empty binfos that do not have 14083 TREE_TYPE set. */ 14084 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0) 14085 { 14086 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT"); 14087 debug_tree (TREE_TYPE (TYPE_BINFO (t))); 14088 error_found = true; 14089 } 14090 } 14091 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p) 14092 { 14093 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL"); 14094 debug_tree (TYPE_LANG_SLOT_1 (t)); 14095 error_found = true; 14096 } 14097 14098 /* Check various uses of TYPE_VALUES_RAW. */ 14099 if (TREE_CODE (t) == ENUMERAL_TYPE) 14100 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l)) 14101 { 14102 tree value = TREE_VALUE (l); 14103 tree name = TREE_PURPOSE (l); 14104 14105 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses 14106 CONST_DECL of ENUMERAL TYPE. */ 14107 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL) 14108 { 14109 error ("Enum value is not CONST_DECL or INTEGER_CST"); 14110 debug_tree (value); 14111 debug_tree (name); 14112 error_found = true; 14113 } 14114 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE 14115 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value))) 14116 { 14117 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum"); 14118 debug_tree (value); 14119 debug_tree (name); 14120 error_found = true; 14121 } 14122 if (TREE_CODE (name) != IDENTIFIER_NODE) 14123 { 14124 error ("Enum value name is not IDENTIFIER_NODE"); 14125 debug_tree (value); 14126 debug_tree (name); 14127 error_found = true; 14128 } 14129 } 14130 else if (TREE_CODE (t) == ARRAY_TYPE) 14131 { 14132 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE) 14133 { 14134 error ("Array TYPE_DOMAIN is not integer type"); 14135 debug_tree (TYPE_DOMAIN (t)); 14136 error_found = true; 14137 } 14138 } 14139 else if (RECORD_OR_UNION_TYPE_P (t)) 14140 { 14141 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p) 14142 { 14143 error ("TYPE_FIELDS defined in incomplete type"); 14144 error_found = true; 14145 } 14146 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld)) 14147 { 14148 /* TODO: verify properties of decls. */ 14149 if (TREE_CODE (fld) == FIELD_DECL) 14150 ; 14151 else if (TREE_CODE (fld) == TYPE_DECL) 14152 ; 14153 else if (TREE_CODE (fld) == CONST_DECL) 14154 ; 14155 else if (VAR_P (fld)) 14156 ; 14157 else if (TREE_CODE (fld) == TEMPLATE_DECL) 14158 ; 14159 else if (TREE_CODE (fld) == USING_DECL) 14160 ; 14161 else 14162 { 14163 error ("Wrong tree in TYPE_FIELDS list"); 14164 debug_tree (fld); 14165 error_found = true; 14166 } 14167 } 14168 } 14169 else if (TREE_CODE (t) == INTEGER_TYPE 14170 || TREE_CODE (t) == BOOLEAN_TYPE 14171 || TREE_CODE (t) == OFFSET_TYPE 14172 || TREE_CODE (t) == REFERENCE_TYPE 14173 || TREE_CODE (t) == NULLPTR_TYPE 14174 || TREE_CODE (t) == POINTER_TYPE) 14175 { 14176 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL)) 14177 { 14178 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p", 14179 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t)); 14180 error_found = true; 14181 } 14182 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC) 14183 { 14184 error ("TYPE_CACHED_VALUES is not TREE_VEC"); 14185 debug_tree (TYPE_CACHED_VALUES (t)); 14186 error_found = true; 14187 } 14188 /* Verify just enough of cache to ensure that no one copied it to new type. 14189 All copying should go by copy_node that should clear it. */ 14190 else if (TYPE_CACHED_VALUES_P (t)) 14191 { 14192 int i; 14193 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++) 14194 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i) 14195 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t) 14196 { 14197 error ("wrong TYPE_CACHED_VALUES entry"); 14198 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)); 14199 error_found = true; 14200 break; 14201 } 14202 } 14203 } 14204 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE) 14205 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l)) 14206 { 14207 /* C++ FE uses TREE_PURPOSE to store initial values. */ 14208 if (TREE_PURPOSE (l) && in_lto_p) 14209 { 14210 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list"); 14211 debug_tree (l); 14212 error_found = true; 14213 } 14214 if (!TYPE_P (TREE_VALUE (l))) 14215 { 14216 error ("Wrong entry in TYPE_ARG_TYPES list"); 14217 debug_tree (l); 14218 error_found = true; 14219 } 14220 } 14221 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t)) 14222 { 14223 error ("TYPE_VALUES_RAW field is non-NULL"); 14224 debug_tree (TYPE_VALUES_RAW (t)); 14225 error_found = true; 14226 } 14227 if (TREE_CODE (t) != INTEGER_TYPE 14228 && TREE_CODE (t) != BOOLEAN_TYPE 14229 && TREE_CODE (t) != OFFSET_TYPE 14230 && TREE_CODE (t) != REFERENCE_TYPE 14231 && TREE_CODE (t) != NULLPTR_TYPE 14232 && TREE_CODE (t) != POINTER_TYPE 14233 && TYPE_CACHED_VALUES_P (t)) 14234 { 14235 error ("TYPE_CACHED_VALUES_P is set while it should not"); 14236 error_found = true; 14237 } 14238 if (TYPE_STRING_FLAG (t) 14239 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE) 14240 { 14241 error ("TYPE_STRING_FLAG is set on wrong type code"); 14242 error_found = true; 14243 } 14244 else if (TYPE_STRING_FLAG (t)) 14245 { 14246 const_tree b = t; 14247 if (TREE_CODE (b) == ARRAY_TYPE) 14248 b = TREE_TYPE (t); 14249 /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type 14250 that is 32bits. */ 14251 if (TREE_CODE (b) != INTEGER_TYPE) 14252 { 14253 error ("TYPE_STRING_FLAG is set on type that does not look like " 14254 "char nor array of chars"); 14255 error_found = true; 14256 } 14257 } 14258 14259 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always 14260 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns 14261 of a type. */ 14262 if (TREE_CODE (t) == METHOD_TYPE 14263 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t)) 14264 { 14265 error ("TYPE_METHOD_BASETYPE is not main variant"); 14266 error_found = true; 14267 } 14268 14269 if (error_found) 14270 { 14271 debug_tree (const_cast <tree> (t)); 14272 internal_error ("verify_type failed"); 14273 } 14274 } 14275 14276 14277 /* Return 1 if ARG interpreted as signed in its precision is known to be 14278 always positive or 2 if ARG is known to be always negative, or 3 if 14279 ARG may be positive or negative. */ 14280 14281 int 14282 get_range_pos_neg (tree arg) 14283 { 14284 if (arg == error_mark_node) 14285 return 3; 14286 14287 int prec = TYPE_PRECISION (TREE_TYPE (arg)); 14288 int cnt = 0; 14289 if (TREE_CODE (arg) == INTEGER_CST) 14290 { 14291 wide_int w = wi::sext (arg, prec); 14292 if (wi::neg_p (w)) 14293 return 2; 14294 else 14295 return 1; 14296 } 14297 while (CONVERT_EXPR_P (arg) 14298 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0))) 14299 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec) 14300 { 14301 arg = TREE_OPERAND (arg, 0); 14302 /* Narrower value zero extended into wider type 14303 will always result in positive values. */ 14304 if (TYPE_UNSIGNED (TREE_TYPE (arg)) 14305 && TYPE_PRECISION (TREE_TYPE (arg)) < prec) 14306 return 1; 14307 prec = TYPE_PRECISION (TREE_TYPE (arg)); 14308 if (++cnt > 30) 14309 return 3; 14310 } 14311 14312 if (TREE_CODE (arg) != SSA_NAME) 14313 return 3; 14314 wide_int arg_min, arg_max; 14315 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE) 14316 { 14317 gimple *g = SSA_NAME_DEF_STMT (arg); 14318 if (is_gimple_assign (g) 14319 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g))) 14320 { 14321 tree t = gimple_assign_rhs1 (g); 14322 if (INTEGRAL_TYPE_P (TREE_TYPE (t)) 14323 && TYPE_PRECISION (TREE_TYPE (t)) <= prec) 14324 { 14325 if (TYPE_UNSIGNED (TREE_TYPE (t)) 14326 && TYPE_PRECISION (TREE_TYPE (t)) < prec) 14327 return 1; 14328 prec = TYPE_PRECISION (TREE_TYPE (t)); 14329 arg = t; 14330 if (++cnt > 30) 14331 return 3; 14332 continue; 14333 } 14334 } 14335 return 3; 14336 } 14337 if (TYPE_UNSIGNED (TREE_TYPE (arg))) 14338 { 14339 /* For unsigned values, the "positive" range comes 14340 below the "negative" range. */ 14341 if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED)) 14342 return 1; 14343 if (wi::neg_p (wi::sext (arg_min, prec), SIGNED)) 14344 return 2; 14345 } 14346 else 14347 { 14348 if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED)) 14349 return 1; 14350 if (wi::neg_p (wi::sext (arg_max, prec), SIGNED)) 14351 return 2; 14352 } 14353 return 3; 14354 } 14355 14356 14357 14358 14359 /* Return true if ARG is marked with the nonnull attribute in the 14360 current function signature. */ 14361 14362 bool 14363 nonnull_arg_p (const_tree arg) 14364 { 14365 tree t, attrs, fntype; 14366 unsigned HOST_WIDE_INT arg_num; 14367 14368 gcc_assert (TREE_CODE (arg) == PARM_DECL 14369 && (POINTER_TYPE_P (TREE_TYPE (arg)) 14370 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE)); 14371 14372 /* The static chain decl is always non null. */ 14373 if (arg == cfun->static_chain_decl) 14374 return true; 14375 14376 /* THIS argument of method is always non-NULL. */ 14377 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE 14378 && arg == DECL_ARGUMENTS (cfun->decl) 14379 && flag_delete_null_pointer_checks) 14380 return true; 14381 14382 /* Values passed by reference are always non-NULL. */ 14383 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE 14384 && flag_delete_null_pointer_checks) 14385 return true; 14386 14387 fntype = TREE_TYPE (cfun->decl); 14388 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs)) 14389 { 14390 attrs = lookup_attribute ("nonnull", attrs); 14391 14392 /* If "nonnull" wasn't specified, we know nothing about the argument. */ 14393 if (attrs == NULL_TREE) 14394 return false; 14395 14396 /* If "nonnull" applies to all the arguments, then ARG is non-null. */ 14397 if (TREE_VALUE (attrs) == NULL_TREE) 14398 return true; 14399 14400 /* Get the position number for ARG in the function signature. */ 14401 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl); 14402 t; 14403 t = DECL_CHAIN (t), arg_num++) 14404 { 14405 if (t == arg) 14406 break; 14407 } 14408 14409 gcc_assert (t == arg); 14410 14411 /* Now see if ARG_NUM is mentioned in the nonnull list. */ 14412 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t)) 14413 { 14414 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0) 14415 return true; 14416 } 14417 } 14418 14419 return false; 14420 } 14421 14422 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range 14423 information. */ 14424 14425 location_t 14426 set_block (location_t loc, tree block) 14427 { 14428 location_t pure_loc = get_pure_location (loc); 14429 source_range src_range = get_range_from_loc (line_table, loc); 14430 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block); 14431 } 14432 14433 location_t 14434 set_source_range (tree expr, location_t start, location_t finish) 14435 { 14436 source_range src_range; 14437 src_range.m_start = start; 14438 src_range.m_finish = finish; 14439 return set_source_range (expr, src_range); 14440 } 14441 14442 location_t 14443 set_source_range (tree expr, source_range src_range) 14444 { 14445 if (!EXPR_P (expr)) 14446 return UNKNOWN_LOCATION; 14447 14448 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr)); 14449 location_t adhoc = COMBINE_LOCATION_DATA (line_table, 14450 pure_loc, 14451 src_range, 14452 NULL); 14453 SET_EXPR_LOCATION (expr, adhoc); 14454 return adhoc; 14455 } 14456 14457 /* Return the name of combined function FN, for debugging purposes. */ 14458 14459 const char * 14460 combined_fn_name (combined_fn fn) 14461 { 14462 if (builtin_fn_p (fn)) 14463 { 14464 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn)); 14465 return IDENTIFIER_POINTER (DECL_NAME (fndecl)); 14466 } 14467 else 14468 return internal_fn_name (as_internal_fn (fn)); 14469 } 14470 14471 /* Return a bitmap with a bit set corresponding to each argument in 14472 a function call type FNTYPE declared with attribute nonnull, 14473 or null if none of the function's argument are nonnull. The caller 14474 must free the bitmap. */ 14475 14476 bitmap 14477 get_nonnull_args (const_tree fntype) 14478 { 14479 if (fntype == NULL_TREE) 14480 return NULL; 14481 14482 tree attrs = TYPE_ATTRIBUTES (fntype); 14483 if (!attrs) 14484 return NULL; 14485 14486 bitmap argmap = NULL; 14487 14488 /* A function declaration can specify multiple attribute nonnull, 14489 each with zero or more arguments. The loop below creates a bitmap 14490 representing a union of all the arguments. An empty (but non-null) 14491 bitmap means that all arguments have been declaraed nonnull. */ 14492 for ( ; attrs; attrs = TREE_CHAIN (attrs)) 14493 { 14494 attrs = lookup_attribute ("nonnull", attrs); 14495 if (!attrs) 14496 break; 14497 14498 if (!argmap) 14499 argmap = BITMAP_ALLOC (NULL); 14500 14501 if (!TREE_VALUE (attrs)) 14502 { 14503 /* Clear the bitmap in case a previous attribute nonnull 14504 set it and this one overrides it for all arguments. */ 14505 bitmap_clear (argmap); 14506 return argmap; 14507 } 14508 14509 /* Iterate over the indices of the format arguments declared nonnull 14510 and set a bit for each. */ 14511 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx)) 14512 { 14513 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1; 14514 bitmap_set_bit (argmap, val); 14515 } 14516 } 14517 14518 return argmap; 14519 } 14520 14521 /* Return true if an expression with CODE has to have the same result type as 14522 its first operand. */ 14523 14524 bool 14525 expr_type_first_operand_type_p (tree_code code) 14526 { 14527 switch (code) 14528 { 14529 case NEGATE_EXPR: 14530 case ABS_EXPR: 14531 case BIT_NOT_EXPR: 14532 case PAREN_EXPR: 14533 case CONJ_EXPR: 14534 14535 case PLUS_EXPR: 14536 case MINUS_EXPR: 14537 case MULT_EXPR: 14538 case TRUNC_DIV_EXPR: 14539 case CEIL_DIV_EXPR: 14540 case FLOOR_DIV_EXPR: 14541 case ROUND_DIV_EXPR: 14542 case TRUNC_MOD_EXPR: 14543 case CEIL_MOD_EXPR: 14544 case FLOOR_MOD_EXPR: 14545 case ROUND_MOD_EXPR: 14546 case RDIV_EXPR: 14547 case EXACT_DIV_EXPR: 14548 case MIN_EXPR: 14549 case MAX_EXPR: 14550 case BIT_IOR_EXPR: 14551 case BIT_XOR_EXPR: 14552 case BIT_AND_EXPR: 14553 14554 case LSHIFT_EXPR: 14555 case RSHIFT_EXPR: 14556 case LROTATE_EXPR: 14557 case RROTATE_EXPR: 14558 return true; 14559 14560 default: 14561 return false; 14562 } 14563 } 14564 14565 #if CHECKING_P 14566 14567 namespace selftest { 14568 14569 /* Selftests for tree. */ 14570 14571 /* Verify that integer constants are sane. */ 14572 14573 static void 14574 test_integer_constants () 14575 { 14576 ASSERT_TRUE (integer_type_node != NULL); 14577 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL); 14578 14579 tree type = integer_type_node; 14580 14581 tree zero = build_zero_cst (type); 14582 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero)); 14583 ASSERT_EQ (type, TREE_TYPE (zero)); 14584 14585 tree one = build_int_cst (type, 1); 14586 ASSERT_EQ (INTEGER_CST, TREE_CODE (one)); 14587 ASSERT_EQ (type, TREE_TYPE (zero)); 14588 } 14589 14590 /* Verify identifiers. */ 14591 14592 static void 14593 test_identifiers () 14594 { 14595 tree identifier = get_identifier ("foo"); 14596 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier)); 14597 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier)); 14598 } 14599 14600 /* Verify LABEL_DECL. */ 14601 14602 static void 14603 test_labels () 14604 { 14605 tree identifier = get_identifier ("err"); 14606 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL, 14607 identifier, void_type_node); 14608 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl)); 14609 ASSERT_FALSE (FORCED_LABEL (label_decl)); 14610 } 14611 14612 /* Run all of the selftests within this file. */ 14613 14614 void 14615 tree_c_tests () 14616 { 14617 test_integer_constants (); 14618 test_identifiers (); 14619 test_labels (); 14620 } 14621 14622 } // namespace selftest 14623 14624 #endif /* CHECKING_P */ 14625 14626 #include "gt-tree.h" 14627