1 /* Read the GIMPLE representation from a file stream. 2 3 Copyright (C) 2009-2020 Free Software Foundation, Inc. 4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com> 5 Re-implemented by Diego Novillo <dnovillo@google.com> 6 7 This file is part of GCC. 8 9 GCC is free software; you can redistribute it and/or modify it under 10 the terms of the GNU General Public License as published by the Free 11 Software Foundation; either version 3, or (at your option) any later 12 version. 13 14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 15 WARRANTY; without even the implied warranty of MERCHANTABILITY or 16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 17 for more details. 18 19 You should have received a copy of the GNU General Public License 20 along with GCC; see the file COPYING3. If not see 21 <http://www.gnu.org/licenses/>. */ 22 23 #include "config.h" 24 #include "system.h" 25 #include "coretypes.h" 26 #include "backend.h" 27 #include "target.h" 28 #include "rtl.h" 29 #include "tree.h" 30 #include "gimple.h" 31 #include "cfghooks.h" 32 #include "tree-pass.h" 33 #include "ssa.h" 34 #include "gimple-streamer.h" 35 #include "toplev.h" 36 #include "gimple-iterator.h" 37 #include "tree-cfg.h" 38 #include "tree-into-ssa.h" 39 #include "tree-dfa.h" 40 #include "tree-ssa.h" 41 #include "except.h" 42 #include "cgraph.h" 43 #include "cfgloop.h" 44 #include "debug.h" 45 #include "alloc-pool.h" 46 #include "toplev.h" 47 48 /* Allocator used to hold string slot entries for line map streaming. */ 49 static struct object_allocator<struct string_slot> *string_slot_allocator; 50 51 /* The table to hold the file names. */ 52 static hash_table<string_slot_hasher> *file_name_hash_table; 53 54 /* The table to hold the relative pathname prefixes. */ 55 56 /* This obstack holds file names used in locators. Line map datastructures 57 points here and thus it needs to be kept allocated as long as linemaps 58 exists. */ 59 static struct obstack file_name_obstack; 60 61 /* Map a pair of nul terminated strings where the first one can be 62 pointer compared, but the second can't, to another string. */ 63 struct string_pair_map 64 { 65 const char *str1; 66 const char *str2; 67 const char *str3; 68 hashval_t hash; 69 bool prefix; 70 }; 71 72 /* Allocator used to hold string pair map entries for line map streaming. */ 73 static struct object_allocator<struct string_pair_map> 74 *string_pair_map_allocator; 75 76 struct string_pair_map_hasher : nofree_ptr_hash <string_pair_map> 77 { 78 static inline hashval_t hash (const string_pair_map *); 79 static inline bool equal (const string_pair_map *, const string_pair_map *); 80 }; 81 82 inline hashval_t 83 string_pair_map_hasher::hash (const string_pair_map *spm) 84 { 85 return spm->hash; 86 } 87 88 inline bool 89 string_pair_map_hasher::equal (const string_pair_map *spm1, 90 const string_pair_map *spm2) 91 { 92 return (spm1->hash == spm2->hash 93 && spm1->str1 == spm2->str1 94 && spm1->prefix == spm2->prefix 95 && strcmp (spm1->str2, spm2->str2) == 0); 96 } 97 98 /* The table to hold the pairs of pathnames and corresponding 99 resulting pathname. Used for both mapping of get_src_pwd () 100 and recorded source working directory to relative path prefix 101 from current working directory to the recorded one, and for 102 mapping of that relative path prefix and some relative path 103 to those concatenated. */ 104 static hash_table<string_pair_map_hasher> *path_name_pair_hash_table; 105 106 107 /* Check that tag ACTUAL has one of the given values. NUM_TAGS is the 108 number of valid tag values to check. */ 109 110 void 111 lto_tag_check_set (enum LTO_tags actual, int ntags, ...) 112 { 113 va_list ap; 114 int i; 115 116 va_start (ap, ntags); 117 for (i = 0; i < ntags; i++) 118 if ((unsigned) actual == va_arg (ap, unsigned)) 119 { 120 va_end (ap); 121 return; 122 } 123 124 va_end (ap); 125 internal_error ("bytecode stream: unexpected tag %s", lto_tag_name (actual)); 126 } 127 128 129 /* Read LENGTH bytes from STREAM to ADDR. */ 130 131 void 132 lto_input_data_block (class lto_input_block *ib, void *addr, size_t length) 133 { 134 size_t i; 135 unsigned char *const buffer = (unsigned char *) addr; 136 137 for (i = 0; i < length; i++) 138 buffer[i] = streamer_read_uchar (ib); 139 } 140 141 /* Compute the relative path to get to DATA_WD (absolute directory name) 142 from CWD (another absolute directory name). E.g. for 143 DATA_WD of "/tmp/foo/bar" and CWD of "/tmp/baz/qux" return 144 "../../foo/bar". Returned string should be freed by the caller. 145 Return NULL if absolute file name needs to be used. */ 146 147 static char * 148 relative_path_prefix (const char *data_wd, const char *cwd) 149 { 150 const char *d = data_wd; 151 const char *c = cwd; 152 #ifdef HAVE_DOS_BASED_FILE_SYSTEM 153 if (d[1] == ':') 154 { 155 if (!IS_DIR_SEPARATOR (d[2])) 156 return NULL; 157 if (c[0] == d[0] && c[1] == ':' && IS_DIR_SEPARATOR (c[2])) 158 { 159 c += 3; 160 d += 3; 161 } 162 else 163 return NULL; 164 } 165 else if (c[1] == ':') 166 return NULL; 167 #endif 168 do 169 { 170 while (IS_DIR_SEPARATOR (*d)) 171 d++; 172 while (IS_DIR_SEPARATOR (*c)) 173 c++; 174 size_t i; 175 for (i = 0; c[i] && !IS_DIR_SEPARATOR (c[i]) && c[i] == d[i]; i++) 176 ; 177 if ((c[i] == '\0' || IS_DIR_SEPARATOR (c[i])) 178 && (d[i] == '\0' || IS_DIR_SEPARATOR (d[i]))) 179 { 180 c += i; 181 d += i; 182 if (*c == '\0' || *d == '\0') 183 break; 184 } 185 else 186 break; 187 } 188 while (1); 189 size_t num_up = 0; 190 do 191 { 192 while (IS_DIR_SEPARATOR (*c)) 193 c++; 194 if (*c == '\0') 195 break; 196 num_up++; 197 while (*c && !IS_DIR_SEPARATOR (*c)) 198 c++; 199 } 200 while (1); 201 while (IS_DIR_SEPARATOR (*d)) 202 d++; 203 size_t len = strlen (d); 204 if (len == 0 && num_up == 0) 205 return xstrdup ("."); 206 char *ret = XNEWVEC (char, num_up * 3 + len + 1); 207 char *p = ret; 208 for (; num_up; num_up--) 209 { 210 const char dir_up[3] = { '.', '.', DIR_SEPARATOR }; 211 memcpy (p, dir_up, 3); 212 p += 3; 213 } 214 memcpy (p, d, len + 1); 215 return ret; 216 } 217 218 /* Look up DATA_WD in hash table of relative prefixes. If found, 219 return relative path from CWD to DATA_WD from the hash table, 220 otherwise create it. */ 221 222 static const char * 223 canon_relative_path_prefix (const char *data_wd, const char *cwd) 224 { 225 if (!IS_ABSOLUTE_PATH (data_wd) || !IS_ABSOLUTE_PATH (cwd)) 226 return NULL; 227 228 if (!path_name_pair_hash_table) 229 { 230 path_name_pair_hash_table 231 = new hash_table<string_pair_map_hasher> (37); 232 string_pair_map_allocator 233 = new object_allocator <struct string_pair_map> 234 ("line map string pair map hash"); 235 } 236 237 inchash::hash h; 238 h.add_ptr (cwd); 239 h.merge_hash (htab_hash_string (data_wd)); 240 h.add_int (true); 241 242 string_pair_map s_slot; 243 s_slot.str1 = cwd; 244 s_slot.str2 = data_wd; 245 s_slot.str3 = NULL; 246 s_slot.hash = h.end (); 247 s_slot.prefix = true; 248 249 string_pair_map **slot 250 = path_name_pair_hash_table->find_slot (&s_slot, INSERT); 251 if (*slot == NULL) 252 { 253 /* Compute relative path from cwd directory to data_wd directory. 254 E.g. if cwd is /tmp/foo/bar and data_wd is /tmp/baz/qux , 255 it will return ../../baz/qux . */ 256 char *relative_path = relative_path_prefix (data_wd, cwd); 257 const char *relative = relative_path ? relative_path : data_wd; 258 size_t relative_len = strlen (relative); 259 gcc_assert (relative_len); 260 261 size_t data_wd_len = strlen (data_wd); 262 bool add_separator = false; 263 if (!IS_DIR_SEPARATOR (relative[relative_len - 1])) 264 add_separator = true; 265 266 size_t len = relative_len + 1 + data_wd_len + 1 + add_separator; 267 268 char *saved_string = XOBNEWVEC (&file_name_obstack, char, len); 269 struct string_pair_map *new_slot 270 = string_pair_map_allocator->allocate (); 271 memcpy (saved_string, data_wd, data_wd_len + 1); 272 memcpy (saved_string + data_wd_len + 1, relative, relative_len); 273 if (add_separator) 274 saved_string[len - 2] = DIR_SEPARATOR; 275 saved_string[len - 1] = '\0'; 276 new_slot->str1 = cwd; 277 new_slot->str2 = saved_string; 278 new_slot->str3 = saved_string + data_wd_len + 1; 279 if (relative_len == 1 && relative[0] == '.') 280 new_slot->str3 = NULL; 281 new_slot->hash = s_slot.hash; 282 new_slot->prefix = true; 283 *slot = new_slot; 284 free (relative_path); 285 return new_slot->str3; 286 } 287 else 288 { 289 string_pair_map *old_slot = *slot; 290 return old_slot->str3; 291 } 292 } 293 294 /* Look up the pair of RELATIVE_PREFIX and STRING strings in a hash table. 295 If found, return the concatenation of those from the hash table, 296 otherwise concatenate them. */ 297 298 static const char * 299 canon_relative_file_name (const char *relative_prefix, const char *string) 300 { 301 inchash::hash h; 302 h.add_ptr (relative_prefix); 303 h.merge_hash (htab_hash_string (string)); 304 305 string_pair_map s_slot; 306 s_slot.str1 = relative_prefix; 307 s_slot.str2 = string; 308 s_slot.str3 = NULL; 309 s_slot.hash = h.end (); 310 s_slot.prefix = false; 311 312 string_pair_map **slot 313 = path_name_pair_hash_table->find_slot (&s_slot, INSERT); 314 if (*slot == NULL) 315 { 316 size_t relative_prefix_len = strlen (relative_prefix); 317 size_t string_len = strlen (string); 318 size_t len = relative_prefix_len + string_len + 1; 319 320 char *saved_string = XOBNEWVEC (&file_name_obstack, char, len); 321 struct string_pair_map *new_slot 322 = string_pair_map_allocator->allocate (); 323 memcpy (saved_string, relative_prefix, relative_prefix_len); 324 memcpy (saved_string + relative_prefix_len, string, string_len + 1); 325 new_slot->str1 = relative_prefix; 326 new_slot->str2 = saved_string + relative_prefix_len; 327 new_slot->str3 = saved_string; 328 new_slot->hash = s_slot.hash; 329 new_slot->prefix = false; 330 *slot = new_slot; 331 return new_slot->str3; 332 } 333 else 334 { 335 string_pair_map *old_slot = *slot; 336 return old_slot->str3; 337 } 338 } 339 340 /* Lookup STRING in file_name_hash_table. If found, return the existing 341 string, otherwise insert STRING as the canonical version. 342 If STRING is a relative pathname and RELATIVE_PREFIX is non-NULL, use 343 canon_relative_file_name instead. */ 344 345 static const char * 346 canon_file_name (const char *relative_prefix, const char *string) 347 { 348 if (relative_prefix && !IS_ABSOLUTE_PATH (string)) 349 return canon_relative_file_name (relative_prefix, string); 350 351 string_slot **slot; 352 struct string_slot s_slot; 353 size_t len = strlen (string); 354 355 s_slot.s = string; 356 s_slot.len = len; 357 358 slot = file_name_hash_table->find_slot (&s_slot, INSERT); 359 if (*slot == NULL) 360 { 361 char *saved_string; 362 struct string_slot *new_slot; 363 364 saved_string = XOBNEWVEC (&file_name_obstack, char, len + 1); 365 new_slot = string_slot_allocator->allocate (); 366 memcpy (saved_string, string, len + 1); 367 new_slot->s = saved_string; 368 new_slot->len = len; 369 *slot = new_slot; 370 return saved_string; 371 } 372 else 373 { 374 struct string_slot *old_slot = *slot; 375 return old_slot->s; 376 } 377 } 378 379 /* Pointer to currently alive instance of lto_location_cache. */ 380 381 lto_location_cache *lto_location_cache::current_cache; 382 383 /* Sort locations in source order. Start with file from last application. */ 384 385 int 386 lto_location_cache::cmp_loc (const void *pa, const void *pb) 387 { 388 const cached_location *a = ((const cached_location *)pa); 389 const cached_location *b = ((const cached_location *)pb); 390 const char *current_file = current_cache->current_file; 391 int current_line = current_cache->current_line; 392 393 if (a->file == current_file && b->file != current_file) 394 return -1; 395 if (a->file != current_file && b->file == current_file) 396 return 1; 397 if (a->file == current_file && b->file == current_file) 398 { 399 if (a->line == current_line && b->line != current_line) 400 return -1; 401 if (a->line != current_line && b->line == current_line) 402 return 1; 403 } 404 if (a->file != b->file) 405 return strcmp (a->file, b->file); 406 if (a->sysp != b->sysp) 407 return a->sysp ? 1 : -1; 408 if (a->line != b->line) 409 return a->line - b->line; 410 if (a->col != b->col) 411 return a->col - b->col; 412 if ((a->block == NULL_TREE) != (b->block == NULL_TREE)) 413 return a->block ? 1 : -1; 414 if (a->block) 415 { 416 if (BLOCK_NUMBER (a->block) < BLOCK_NUMBER (b->block)) 417 return -1; 418 if (BLOCK_NUMBER (a->block) > BLOCK_NUMBER (b->block)) 419 return 1; 420 } 421 return 0; 422 } 423 424 /* Apply all changes in location cache. Add locations into linemap and patch 425 trees. */ 426 427 bool 428 lto_location_cache::apply_location_cache () 429 { 430 static const char *prev_file; 431 if (!loc_cache.length ()) 432 return false; 433 if (loc_cache.length () > 1) 434 loc_cache.qsort (cmp_loc); 435 436 for (unsigned int i = 0; i < loc_cache.length (); i++) 437 { 438 struct cached_location loc = loc_cache[i]; 439 440 if (current_file != loc.file) 441 linemap_add (line_table, prev_file ? LC_RENAME : LC_ENTER, 442 loc.sysp, loc.file, loc.line); 443 else if (current_line != loc.line) 444 { 445 int max = loc.col; 446 447 for (unsigned int j = i + 1; j < loc_cache.length (); j++) 448 if (loc.file != loc_cache[j].file 449 || loc.line != loc_cache[j].line) 450 break; 451 else if (max < loc_cache[j].col) 452 max = loc_cache[j].col; 453 linemap_line_start (line_table, loc.line, max + 1); 454 } 455 gcc_assert (*loc.loc == BUILTINS_LOCATION + 1); 456 if (current_file != loc.file 457 || current_line != loc.line 458 || current_col != loc.col) 459 { 460 current_loc = linemap_position_for_column (line_table, loc.col); 461 if (loc.block) 462 current_loc = set_block (current_loc, loc.block); 463 } 464 else if (current_block != loc.block) 465 { 466 if (loc.block) 467 current_loc = set_block (current_loc, loc.block); 468 else 469 current_loc = LOCATION_LOCUS (current_loc); 470 } 471 *loc.loc = current_loc; 472 current_line = loc.line; 473 prev_file = current_file = loc.file; 474 current_col = loc.col; 475 current_block = loc.block; 476 } 477 loc_cache.truncate (0); 478 accepted_length = 0; 479 return true; 480 } 481 482 /* Tree merging did not succeed; mark all changes in the cache as accepted. */ 483 484 void 485 lto_location_cache::accept_location_cache () 486 { 487 gcc_assert (current_cache == this); 488 accepted_length = loc_cache.length (); 489 } 490 491 /* Tree merging did succeed; throw away recent changes. */ 492 493 void 494 lto_location_cache::revert_location_cache () 495 { 496 loc_cache.truncate (accepted_length); 497 } 498 499 /* Read a location bitpack from bit pack BP and either update *LOC directly 500 or add it to the location cache. If IB is non-NULL, stream in a block 501 afterwards. 502 It is neccesary to call apply_location_cache to get *LOC updated. */ 503 504 void 505 lto_location_cache::input_location_and_block (location_t *loc, 506 struct bitpack_d *bp, 507 class lto_input_block *ib, 508 class data_in *data_in) 509 { 510 static const char *stream_file; 511 static int stream_line; 512 static int stream_col; 513 static bool stream_sysp; 514 static tree stream_block; 515 static const char *stream_relative_path_prefix; 516 517 gcc_assert (current_cache == this); 518 519 *loc = bp_unpack_int_in_range (bp, "location", 0, 520 RESERVED_LOCATION_COUNT + 1); 521 522 if (*loc < RESERVED_LOCATION_COUNT) 523 { 524 if (ib) 525 { 526 bool block_change = bp_unpack_value (bp, 1); 527 if (block_change) 528 stream_block = stream_read_tree (ib, data_in); 529 if (stream_block) 530 *loc = set_block (*loc, stream_block); 531 } 532 return; 533 } 534 535 bool file_change = (*loc == RESERVED_LOCATION_COUNT + 1); 536 /* Keep value RESERVED_LOCATION_COUNT in *loc as linemap lookups will 537 ICE on it. */ 538 *loc = RESERVED_LOCATION_COUNT; 539 bool line_change = bp_unpack_value (bp, 1); 540 bool column_change = bp_unpack_value (bp, 1); 541 542 if (file_change) 543 { 544 bool pwd_change = bp_unpack_value (bp, 1); 545 if (pwd_change) 546 { 547 const char *pwd = bp_unpack_string (data_in, bp); 548 const char *src_pwd = get_src_pwd (); 549 if (strcmp (pwd, src_pwd) == 0) 550 stream_relative_path_prefix = NULL; 551 else 552 stream_relative_path_prefix 553 = canon_relative_path_prefix (pwd, src_pwd); 554 } 555 stream_file = canon_file_name (stream_relative_path_prefix, 556 bp_unpack_string (data_in, bp)); 557 stream_sysp = bp_unpack_value (bp, 1); 558 } 559 560 if (line_change) 561 stream_line = bp_unpack_var_len_unsigned (bp); 562 563 if (column_change) 564 stream_col = bp_unpack_var_len_unsigned (bp); 565 566 tree block = NULL_TREE; 567 if (ib) 568 { 569 bool block_change = bp_unpack_value (bp, 1); 570 if (block_change) 571 stream_block = stream_read_tree (ib, data_in); 572 block = stream_block; 573 } 574 575 /* This optimization saves location cache operations during gimple 576 streaming. */ 577 578 if (current_file == stream_file 579 && current_line == stream_line 580 && current_col == stream_col 581 && current_sysp == stream_sysp) 582 { 583 if (current_block == block) 584 *loc = current_loc; 585 else if (block) 586 *loc = set_block (current_loc, block); 587 else 588 *loc = LOCATION_LOCUS (current_loc); 589 return; 590 } 591 592 struct cached_location entry 593 = {stream_file, loc, stream_line, stream_col, stream_sysp, block}; 594 loc_cache.safe_push (entry); 595 } 596 597 /* Read a location bitpack from bit pack BP and either update *LOC directly 598 or add it to the location cache. 599 It is neccesary to call apply_location_cache to get *LOC updated. */ 600 601 void 602 lto_location_cache::input_location (location_t *loc, struct bitpack_d *bp, 603 class data_in *data_in) 604 { 605 return input_location_and_block (loc, bp, NULL, data_in); 606 } 607 608 /* Read a location bitpack from input block IB and either update *LOC directly 609 or add it to the location cache. 610 It is neccesary to call apply_location_cache to get *LOC updated. */ 611 612 void 613 lto_input_location (location_t *loc, struct bitpack_d *bp, 614 class data_in *data_in) 615 { 616 data_in->location_cache.input_location (loc, bp, data_in); 617 } 618 619 /* Read a reference to a tree node from DATA_IN using input block IB. 620 TAG is the expected node that should be found in IB, if TAG belongs 621 to one of the indexable trees, expect to read a reference index to 622 be looked up in one of the symbol tables, otherwise read the pysical 623 representation of the tree using stream_read_tree. FN is the 624 function scope for the read tree. */ 625 626 tree 627 lto_input_tree_ref (class lto_input_block *ib, class data_in *data_in, 628 struct function *fn, enum LTO_tags tag) 629 { 630 unsigned HOST_WIDE_INT ix_u; 631 tree result = NULL_TREE; 632 633 lto_tag_check_range (tag, LTO_field_decl_ref, LTO_namelist_decl_ref); 634 635 switch (tag) 636 { 637 case LTO_type_ref: 638 ix_u = streamer_read_uhwi (ib); 639 result = lto_file_decl_data_get_type (data_in->file_data, ix_u); 640 break; 641 642 case LTO_ssa_name_ref: 643 ix_u = streamer_read_uhwi (ib); 644 result = (*SSANAMES (fn))[ix_u]; 645 break; 646 647 case LTO_field_decl_ref: 648 ix_u = streamer_read_uhwi (ib); 649 result = lto_file_decl_data_get_field_decl (data_in->file_data, ix_u); 650 break; 651 652 case LTO_function_decl_ref: 653 ix_u = streamer_read_uhwi (ib); 654 result = lto_file_decl_data_get_fn_decl (data_in->file_data, ix_u); 655 break; 656 657 case LTO_type_decl_ref: 658 ix_u = streamer_read_uhwi (ib); 659 result = lto_file_decl_data_get_type_decl (data_in->file_data, ix_u); 660 break; 661 662 case LTO_namespace_decl_ref: 663 ix_u = streamer_read_uhwi (ib); 664 result = lto_file_decl_data_get_namespace_decl (data_in->file_data, ix_u); 665 break; 666 667 case LTO_global_decl_ref: 668 case LTO_result_decl_ref: 669 case LTO_const_decl_ref: 670 case LTO_imported_decl_ref: 671 case LTO_label_decl_ref: 672 case LTO_translation_unit_decl_ref: 673 case LTO_namelist_decl_ref: 674 ix_u = streamer_read_uhwi (ib); 675 result = lto_file_decl_data_get_var_decl (data_in->file_data, ix_u); 676 break; 677 678 default: 679 gcc_unreachable (); 680 } 681 682 gcc_assert (result); 683 684 return result; 685 } 686 687 688 /* Read and return a double-linked list of catch handlers from input 689 block IB, using descriptors in DATA_IN. */ 690 691 static struct eh_catch_d * 692 lto_input_eh_catch_list (class lto_input_block *ib, class data_in *data_in, 693 eh_catch *last_p) 694 { 695 eh_catch first; 696 enum LTO_tags tag; 697 698 *last_p = first = NULL; 699 tag = streamer_read_record_start (ib); 700 while (tag) 701 { 702 tree list; 703 eh_catch n; 704 705 lto_tag_check_range (tag, LTO_eh_catch, LTO_eh_catch); 706 707 /* Read the catch node. */ 708 n = ggc_cleared_alloc<eh_catch_d> (); 709 n->type_list = stream_read_tree (ib, data_in); 710 n->filter_list = stream_read_tree (ib, data_in); 711 n->label = stream_read_tree (ib, data_in); 712 713 /* Register all the types in N->FILTER_LIST. */ 714 for (list = n->filter_list; list; list = TREE_CHAIN (list)) 715 add_type_for_runtime (TREE_VALUE (list)); 716 717 /* Chain N to the end of the list. */ 718 if (*last_p) 719 (*last_p)->next_catch = n; 720 n->prev_catch = *last_p; 721 *last_p = n; 722 723 /* Set the head of the list the first time through the loop. */ 724 if (first == NULL) 725 first = n; 726 727 tag = streamer_read_record_start (ib); 728 } 729 730 return first; 731 } 732 733 734 /* Read and return EH region IX from input block IB, using descriptors 735 in DATA_IN. */ 736 737 static eh_region 738 input_eh_region (class lto_input_block *ib, class data_in *data_in, int ix) 739 { 740 enum LTO_tags tag; 741 eh_region r; 742 743 /* Read the region header. */ 744 tag = streamer_read_record_start (ib); 745 if (tag == LTO_null) 746 return NULL; 747 748 r = ggc_cleared_alloc<eh_region_d> (); 749 r->index = streamer_read_hwi (ib); 750 751 gcc_assert (r->index == ix); 752 753 /* Read all the region pointers as region numbers. We'll fix up 754 the pointers once the whole array has been read. */ 755 r->outer = (eh_region) (intptr_t) streamer_read_hwi (ib); 756 r->inner = (eh_region) (intptr_t) streamer_read_hwi (ib); 757 r->next_peer = (eh_region) (intptr_t) streamer_read_hwi (ib); 758 759 switch (tag) 760 { 761 case LTO_ert_cleanup: 762 r->type = ERT_CLEANUP; 763 break; 764 765 case LTO_ert_try: 766 { 767 struct eh_catch_d *last_catch; 768 r->type = ERT_TRY; 769 r->u.eh_try.first_catch = lto_input_eh_catch_list (ib, data_in, 770 &last_catch); 771 r->u.eh_try.last_catch = last_catch; 772 break; 773 } 774 775 case LTO_ert_allowed_exceptions: 776 { 777 tree l; 778 779 r->type = ERT_ALLOWED_EXCEPTIONS; 780 r->u.allowed.type_list = stream_read_tree (ib, data_in); 781 r->u.allowed.label = stream_read_tree (ib, data_in); 782 r->u.allowed.filter = streamer_read_uhwi (ib); 783 784 for (l = r->u.allowed.type_list; l ; l = TREE_CHAIN (l)) 785 add_type_for_runtime (TREE_VALUE (l)); 786 } 787 break; 788 789 case LTO_ert_must_not_throw: 790 { 791 r->type = ERT_MUST_NOT_THROW; 792 r->u.must_not_throw.failure_decl = stream_read_tree (ib, data_in); 793 bitpack_d bp = streamer_read_bitpack (ib); 794 stream_input_location (&r->u.must_not_throw.failure_loc, 795 &bp, data_in); 796 } 797 break; 798 799 default: 800 gcc_unreachable (); 801 } 802 803 r->landing_pads = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib); 804 805 return r; 806 } 807 808 809 /* Read and return EH landing pad IX from input block IB, using descriptors 810 in DATA_IN. */ 811 812 static eh_landing_pad 813 input_eh_lp (class lto_input_block *ib, class data_in *data_in, int ix) 814 { 815 enum LTO_tags tag; 816 eh_landing_pad lp; 817 818 /* Read the landing pad header. */ 819 tag = streamer_read_record_start (ib); 820 if (tag == LTO_null) 821 return NULL; 822 823 lto_tag_check_range (tag, LTO_eh_landing_pad, LTO_eh_landing_pad); 824 825 lp = ggc_cleared_alloc<eh_landing_pad_d> (); 826 lp->index = streamer_read_hwi (ib); 827 gcc_assert (lp->index == ix); 828 lp->next_lp = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib); 829 lp->region = (eh_region) (intptr_t) streamer_read_hwi (ib); 830 lp->post_landing_pad = stream_read_tree (ib, data_in); 831 832 return lp; 833 } 834 835 836 /* After reading the EH regions, pointers to peer and children regions 837 are region numbers. This converts all these region numbers into 838 real pointers into the rematerialized regions for FN. ROOT_REGION 839 is the region number for the root EH region in FN. */ 840 841 static void 842 fixup_eh_region_pointers (struct function *fn, HOST_WIDE_INT root_region) 843 { 844 unsigned i; 845 vec<eh_region, va_gc> *eh_array = fn->eh->region_array; 846 vec<eh_landing_pad, va_gc> *lp_array = fn->eh->lp_array; 847 eh_region r; 848 eh_landing_pad lp; 849 850 gcc_assert (eh_array && lp_array); 851 852 gcc_assert (root_region >= 0); 853 fn->eh->region_tree = (*eh_array)[root_region]; 854 855 #define FIXUP_EH_REGION(r) (r) = (*eh_array)[(HOST_WIDE_INT) (intptr_t) (r)] 856 #define FIXUP_EH_LP(p) (p) = (*lp_array)[(HOST_WIDE_INT) (intptr_t) (p)] 857 858 /* Convert all the index numbers stored in pointer fields into 859 pointers to the corresponding slots in the EH region array. */ 860 FOR_EACH_VEC_ELT (*eh_array, i, r) 861 { 862 /* The array may contain NULL regions. */ 863 if (r == NULL) 864 continue; 865 866 gcc_assert (i == (unsigned) r->index); 867 FIXUP_EH_REGION (r->outer); 868 FIXUP_EH_REGION (r->inner); 869 FIXUP_EH_REGION (r->next_peer); 870 FIXUP_EH_LP (r->landing_pads); 871 } 872 873 /* Convert all the index numbers stored in pointer fields into 874 pointers to the corresponding slots in the EH landing pad array. */ 875 FOR_EACH_VEC_ELT (*lp_array, i, lp) 876 { 877 /* The array may contain NULL landing pads. */ 878 if (lp == NULL) 879 continue; 880 881 gcc_assert (i == (unsigned) lp->index); 882 FIXUP_EH_LP (lp->next_lp); 883 FIXUP_EH_REGION (lp->region); 884 } 885 886 #undef FIXUP_EH_REGION 887 #undef FIXUP_EH_LP 888 } 889 890 891 /* Initialize EH support. */ 892 893 void 894 lto_init_eh (void) 895 { 896 static bool eh_initialized_p = false; 897 898 if (eh_initialized_p) 899 return; 900 901 /* Contrary to most other FEs, we only initialize EH support when at 902 least one of the files in the set contains exception regions in 903 it. Since this happens much later than the call to init_eh in 904 lang_dependent_init, we have to set flag_exceptions and call 905 init_eh again to initialize the EH tables. */ 906 flag_exceptions = 1; 907 init_eh (); 908 909 eh_initialized_p = true; 910 } 911 912 913 /* Read the exception table for FN from IB using the data descriptors 914 in DATA_IN. */ 915 916 static void 917 input_eh_regions (class lto_input_block *ib, class data_in *data_in, 918 struct function *fn) 919 { 920 HOST_WIDE_INT i, root_region, len; 921 enum LTO_tags tag; 922 923 tag = streamer_read_record_start (ib); 924 if (tag == LTO_null) 925 return; 926 927 lto_tag_check_range (tag, LTO_eh_table, LTO_eh_table); 928 929 gcc_assert (fn->eh); 930 931 root_region = streamer_read_hwi (ib); 932 gcc_assert (root_region == (int) root_region); 933 934 /* Read the EH region array. */ 935 len = streamer_read_hwi (ib); 936 gcc_assert (len == (int) len); 937 if (len > 0) 938 { 939 vec_safe_grow_cleared (fn->eh->region_array, len); 940 for (i = 0; i < len; i++) 941 { 942 eh_region r = input_eh_region (ib, data_in, i); 943 (*fn->eh->region_array)[i] = r; 944 } 945 } 946 947 /* Read the landing pads. */ 948 len = streamer_read_hwi (ib); 949 gcc_assert (len == (int) len); 950 if (len > 0) 951 { 952 vec_safe_grow_cleared (fn->eh->lp_array, len); 953 for (i = 0; i < len; i++) 954 { 955 eh_landing_pad lp = input_eh_lp (ib, data_in, i); 956 (*fn->eh->lp_array)[i] = lp; 957 } 958 } 959 960 /* Read the runtime type data. */ 961 len = streamer_read_hwi (ib); 962 gcc_assert (len == (int) len); 963 if (len > 0) 964 { 965 vec_safe_grow_cleared (fn->eh->ttype_data, len); 966 for (i = 0; i < len; i++) 967 { 968 tree ttype = stream_read_tree (ib, data_in); 969 (*fn->eh->ttype_data)[i] = ttype; 970 } 971 } 972 973 /* Read the table of action chains. */ 974 len = streamer_read_hwi (ib); 975 gcc_assert (len == (int) len); 976 if (len > 0) 977 { 978 if (targetm.arm_eabi_unwinder) 979 { 980 vec_safe_grow_cleared (fn->eh->ehspec_data.arm_eabi, len); 981 for (i = 0; i < len; i++) 982 { 983 tree t = stream_read_tree (ib, data_in); 984 (*fn->eh->ehspec_data.arm_eabi)[i] = t; 985 } 986 } 987 else 988 { 989 vec_safe_grow_cleared (fn->eh->ehspec_data.other, len); 990 for (i = 0; i < len; i++) 991 { 992 uchar c = streamer_read_uchar (ib); 993 (*fn->eh->ehspec_data.other)[i] = c; 994 } 995 } 996 } 997 998 /* Reconstruct the EH region tree by fixing up the peer/children 999 pointers. */ 1000 fixup_eh_region_pointers (fn, root_region); 1001 1002 tag = streamer_read_record_start (ib); 1003 lto_tag_check_range (tag, LTO_null, LTO_null); 1004 } 1005 1006 1007 /* Make a new basic block with index INDEX in function FN. */ 1008 1009 static basic_block 1010 make_new_block (struct function *fn, unsigned int index) 1011 { 1012 basic_block bb = alloc_block (); 1013 bb->index = index; 1014 SET_BASIC_BLOCK_FOR_FN (fn, index, bb); 1015 n_basic_blocks_for_fn (fn)++; 1016 return bb; 1017 } 1018 1019 1020 /* Read the CFG for function FN from input block IB. */ 1021 1022 static void 1023 input_cfg (class lto_input_block *ib, class data_in *data_in, 1024 struct function *fn) 1025 { 1026 unsigned int bb_count; 1027 basic_block p_bb; 1028 unsigned int i; 1029 int index; 1030 1031 init_empty_tree_cfg_for_function (fn); 1032 init_ssa_operands (fn); 1033 1034 profile_status_for_fn (fn) = streamer_read_enum (ib, profile_status_d, 1035 PROFILE_LAST); 1036 1037 bb_count = streamer_read_uhwi (ib); 1038 1039 last_basic_block_for_fn (fn) = bb_count; 1040 if (bb_count > basic_block_info_for_fn (fn)->length ()) 1041 vec_safe_grow_cleared (basic_block_info_for_fn (fn), bb_count); 1042 1043 if (bb_count > label_to_block_map_for_fn (fn)->length ()) 1044 vec_safe_grow_cleared (label_to_block_map_for_fn (fn), bb_count); 1045 1046 index = streamer_read_hwi (ib); 1047 while (index != -1) 1048 { 1049 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index); 1050 unsigned int edge_count; 1051 1052 if (bb == NULL) 1053 bb = make_new_block (fn, index); 1054 1055 edge_count = streamer_read_uhwi (ib); 1056 1057 /* Connect up the CFG. */ 1058 for (i = 0; i < edge_count; i++) 1059 { 1060 bitpack_d bp = streamer_read_bitpack (ib); 1061 unsigned int dest_index = bp_unpack_var_len_unsigned (&bp); 1062 unsigned int edge_flags = bp_unpack_var_len_unsigned (&bp); 1063 basic_block dest = BASIC_BLOCK_FOR_FN (fn, dest_index); 1064 1065 if (dest == NULL) 1066 dest = make_new_block (fn, dest_index); 1067 1068 edge e = make_edge (bb, dest, edge_flags); 1069 data_in->location_cache.input_location_and_block (&e->goto_locus, 1070 &bp, ib, data_in); 1071 e->probability = profile_probability::stream_in (ib); 1072 1073 } 1074 1075 index = streamer_read_hwi (ib); 1076 } 1077 1078 p_bb = ENTRY_BLOCK_PTR_FOR_FN (fn); 1079 index = streamer_read_hwi (ib); 1080 while (index != -1) 1081 { 1082 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index); 1083 bb->prev_bb = p_bb; 1084 p_bb->next_bb = bb; 1085 p_bb = bb; 1086 index = streamer_read_hwi (ib); 1087 } 1088 1089 /* ??? The cfgloop interface is tied to cfun. */ 1090 gcc_assert (cfun == fn); 1091 1092 /* Input the loop tree. */ 1093 unsigned n_loops = streamer_read_uhwi (ib); 1094 if (n_loops == 0) 1095 return; 1096 1097 struct loops *loops = ggc_cleared_alloc<struct loops> (); 1098 init_loops_structure (fn, loops, n_loops); 1099 set_loops_for_fn (fn, loops); 1100 1101 /* Input each loop and associate it with its loop header so 1102 flow_loops_find can rebuild the loop tree. */ 1103 for (unsigned i = 1; i < n_loops; ++i) 1104 { 1105 int header_index = streamer_read_hwi (ib); 1106 if (header_index == -1) 1107 { 1108 loops->larray->quick_push (NULL); 1109 continue; 1110 } 1111 1112 class loop *loop = alloc_loop (); 1113 loop->header = BASIC_BLOCK_FOR_FN (fn, header_index); 1114 loop->header->loop_father = loop; 1115 1116 /* Read everything copy_loop_info copies. */ 1117 loop->estimate_state = streamer_read_enum (ib, loop_estimation, EST_LAST); 1118 loop->any_upper_bound = streamer_read_hwi (ib); 1119 if (loop->any_upper_bound) 1120 loop->nb_iterations_upper_bound = streamer_read_widest_int (ib); 1121 loop->any_likely_upper_bound = streamer_read_hwi (ib); 1122 if (loop->any_likely_upper_bound) 1123 loop->nb_iterations_likely_upper_bound = streamer_read_widest_int (ib); 1124 loop->any_estimate = streamer_read_hwi (ib); 1125 if (loop->any_estimate) 1126 loop->nb_iterations_estimate = streamer_read_widest_int (ib); 1127 1128 /* Read OMP SIMD related info. */ 1129 loop->safelen = streamer_read_hwi (ib); 1130 loop->unroll = streamer_read_hwi (ib); 1131 loop->owned_clique = streamer_read_hwi (ib); 1132 loop->dont_vectorize = streamer_read_hwi (ib); 1133 loop->force_vectorize = streamer_read_hwi (ib); 1134 loop->finite_p = streamer_read_hwi (ib); 1135 loop->simduid = stream_read_tree (ib, data_in); 1136 1137 place_new_loop (fn, loop); 1138 1139 /* flow_loops_find doesn't like loops not in the tree, hook them 1140 all as siblings of the tree root temporarily. */ 1141 flow_loop_tree_node_add (loops->tree_root, loop); 1142 } 1143 1144 /* Rebuild the loop tree. */ 1145 flow_loops_find (loops); 1146 } 1147 1148 1149 /* Read the SSA names array for function FN from DATA_IN using input 1150 block IB. */ 1151 1152 static void 1153 input_ssa_names (class lto_input_block *ib, class data_in *data_in, 1154 struct function *fn) 1155 { 1156 unsigned int i, size; 1157 1158 size = streamer_read_uhwi (ib); 1159 init_ssanames (fn, size); 1160 1161 i = streamer_read_uhwi (ib); 1162 while (i) 1163 { 1164 tree ssa_name, name; 1165 bool is_default_def; 1166 1167 /* Skip over the elements that had been freed. */ 1168 while (SSANAMES (fn)->length () < i) 1169 SSANAMES (fn)->quick_push (NULL_TREE); 1170 1171 is_default_def = (streamer_read_uchar (ib) != 0); 1172 name = stream_read_tree (ib, data_in); 1173 ssa_name = make_ssa_name_fn (fn, name, NULL); 1174 1175 if (is_default_def) 1176 { 1177 set_ssa_default_def (cfun, SSA_NAME_VAR (ssa_name), ssa_name); 1178 SSA_NAME_DEF_STMT (ssa_name) = gimple_build_nop (); 1179 } 1180 1181 i = streamer_read_uhwi (ib); 1182 } 1183 } 1184 1185 1186 /* Go through all NODE edges and fixup call_stmt pointers 1187 so they point to STMTS. */ 1188 1189 static void 1190 fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple **stmts, 1191 struct function *fn) 1192 { 1193 #define STMT_UID_NOT_IN_RANGE(uid) \ 1194 (gimple_stmt_max_uid (fn) < uid || uid == 0) 1195 1196 struct cgraph_edge *cedge; 1197 struct ipa_ref *ref = NULL; 1198 unsigned int i; 1199 1200 for (cedge = node->callees; cedge; cedge = cedge->next_callee) 1201 { 1202 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid)) 1203 fatal_error (input_location, 1204 "Cgraph edge statement index out of range"); 1205 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]); 1206 cedge->lto_stmt_uid = 0; 1207 if (!cedge->call_stmt) 1208 fatal_error (input_location, 1209 "Cgraph edge statement index not found"); 1210 } 1211 for (cedge = node->indirect_calls; cedge; cedge = cedge->next_callee) 1212 { 1213 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid)) 1214 fatal_error (input_location, 1215 "Cgraph edge statement index out of range"); 1216 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]); 1217 cedge->lto_stmt_uid = 0; 1218 if (!cedge->call_stmt) 1219 fatal_error (input_location, "Cgraph edge statement index not found"); 1220 } 1221 for (i = 0; node->iterate_reference (i, ref); i++) 1222 if (ref->lto_stmt_uid) 1223 { 1224 if (STMT_UID_NOT_IN_RANGE (ref->lto_stmt_uid)) 1225 fatal_error (input_location, 1226 "Reference statement index out of range"); 1227 ref->stmt = stmts[ref->lto_stmt_uid - 1]; 1228 ref->lto_stmt_uid = 0; 1229 if (!ref->stmt) 1230 fatal_error (input_location, "Reference statement index not found"); 1231 } 1232 } 1233 1234 1235 /* Fixup call_stmt pointers in NODE and all clones. */ 1236 1237 static void 1238 fixup_call_stmt_edges (struct cgraph_node *orig, gimple **stmts) 1239 { 1240 struct cgraph_node *node; 1241 struct function *fn; 1242 1243 while (orig->clone_of) 1244 orig = orig->clone_of; 1245 fn = DECL_STRUCT_FUNCTION (orig->decl); 1246 1247 if (!orig->thunk.thunk_p) 1248 fixup_call_stmt_edges_1 (orig, stmts, fn); 1249 if (orig->clones) 1250 for (node = orig->clones; node != orig;) 1251 { 1252 if (!node->thunk.thunk_p) 1253 fixup_call_stmt_edges_1 (node, stmts, fn); 1254 if (node->clones) 1255 node = node->clones; 1256 else if (node->next_sibling_clone) 1257 node = node->next_sibling_clone; 1258 else 1259 { 1260 while (node != orig && !node->next_sibling_clone) 1261 node = node->clone_of; 1262 if (node != orig) 1263 node = node->next_sibling_clone; 1264 } 1265 } 1266 } 1267 1268 1269 /* Input the base body of struct function FN from DATA_IN 1270 using input block IB. */ 1271 1272 static void 1273 input_struct_function_base (struct function *fn, class data_in *data_in, 1274 class lto_input_block *ib) 1275 { 1276 struct bitpack_d bp; 1277 int len; 1278 1279 /* Read the static chain and non-local goto save area. */ 1280 fn->static_chain_decl = stream_read_tree (ib, data_in); 1281 fn->nonlocal_goto_save_area = stream_read_tree (ib, data_in); 1282 1283 /* Read all the local symbols. */ 1284 len = streamer_read_hwi (ib); 1285 if (len > 0) 1286 { 1287 int i; 1288 vec_safe_grow_cleared (fn->local_decls, len); 1289 for (i = 0; i < len; i++) 1290 { 1291 tree t = stream_read_tree (ib, data_in); 1292 (*fn->local_decls)[i] = t; 1293 } 1294 } 1295 1296 /* Input the current IL state of the function. */ 1297 fn->curr_properties = streamer_read_uhwi (ib); 1298 1299 /* Read all the attributes for FN. */ 1300 bp = streamer_read_bitpack (ib); 1301 fn->is_thunk = bp_unpack_value (&bp, 1); 1302 fn->has_local_explicit_reg_vars = bp_unpack_value (&bp, 1); 1303 fn->returns_pcc_struct = bp_unpack_value (&bp, 1); 1304 fn->returns_struct = bp_unpack_value (&bp, 1); 1305 fn->can_throw_non_call_exceptions = bp_unpack_value (&bp, 1); 1306 fn->can_delete_dead_exceptions = bp_unpack_value (&bp, 1); 1307 fn->always_inline_functions_inlined = bp_unpack_value (&bp, 1); 1308 fn->after_inlining = bp_unpack_value (&bp, 1); 1309 fn->stdarg = bp_unpack_value (&bp, 1); 1310 fn->has_nonlocal_label = bp_unpack_value (&bp, 1); 1311 fn->has_forced_label_in_static = bp_unpack_value (&bp, 1); 1312 fn->calls_alloca = bp_unpack_value (&bp, 1); 1313 fn->calls_setjmp = bp_unpack_value (&bp, 1); 1314 fn->calls_eh_return = bp_unpack_value (&bp, 1); 1315 fn->has_force_vectorize_loops = bp_unpack_value (&bp, 1); 1316 fn->has_simduid_loops = bp_unpack_value (&bp, 1); 1317 fn->va_list_fpr_size = bp_unpack_value (&bp, 8); 1318 fn->va_list_gpr_size = bp_unpack_value (&bp, 8); 1319 fn->last_clique = bp_unpack_value (&bp, sizeof (short) * 8); 1320 1321 /* Input the function start and end loci. */ 1322 stream_input_location (&fn->function_start_locus, &bp, data_in); 1323 stream_input_location (&fn->function_end_locus, &bp, data_in); 1324 1325 /* Restore the instance discriminators if present. */ 1326 int instance_number = bp_unpack_value (&bp, 1); 1327 if (instance_number) 1328 { 1329 instance_number = bp_unpack_value (&bp, sizeof (int) * CHAR_BIT); 1330 maybe_create_decl_to_instance_map ()->put (fn->decl, instance_number); 1331 } 1332 } 1333 1334 1335 /* Read the body of function FN_DECL from DATA_IN using input block IB. */ 1336 1337 static void 1338 input_function (tree fn_decl, class data_in *data_in, 1339 class lto_input_block *ib, class lto_input_block *ib_cfg, 1340 cgraph_node *node) 1341 { 1342 struct function *fn; 1343 enum LTO_tags tag; 1344 gimple **stmts; 1345 basic_block bb; 1346 1347 tag = streamer_read_record_start (ib); 1348 lto_tag_check (tag, LTO_function); 1349 1350 /* Read decls for parameters and args. */ 1351 DECL_RESULT (fn_decl) = stream_read_tree (ib, data_in); 1352 DECL_ARGUMENTS (fn_decl) = streamer_read_chain (ib, data_in); 1353 1354 /* Read debug args if available. */ 1355 unsigned n_debugargs = streamer_read_uhwi (ib); 1356 if (n_debugargs) 1357 { 1358 vec<tree, va_gc> **debugargs = decl_debug_args_insert (fn_decl); 1359 vec_safe_grow (*debugargs, n_debugargs); 1360 for (unsigned i = 0; i < n_debugargs; ++i) 1361 (**debugargs)[i] = stream_read_tree (ib, data_in); 1362 } 1363 1364 /* Read the tree of lexical scopes for the function. */ 1365 DECL_INITIAL (fn_decl) = stream_read_tree (ib, data_in); 1366 unsigned block_leaf_count = streamer_read_uhwi (ib); 1367 while (block_leaf_count--) 1368 stream_read_tree (ib, data_in); 1369 1370 if (!streamer_read_uhwi (ib)) 1371 return; 1372 1373 push_struct_function (fn_decl); 1374 fn = DECL_STRUCT_FUNCTION (fn_decl); 1375 init_tree_ssa (fn); 1376 /* We input IL in SSA form. */ 1377 cfun->gimple_df->in_ssa_p = true; 1378 1379 gimple_register_cfg_hooks (); 1380 1381 input_struct_function_base (fn, data_in, ib); 1382 input_cfg (ib_cfg, data_in, fn); 1383 1384 /* Read all the SSA names. */ 1385 input_ssa_names (ib, data_in, fn); 1386 1387 /* Read the exception handling regions in the function. */ 1388 input_eh_regions (ib, data_in, fn); 1389 1390 gcc_assert (DECL_INITIAL (fn_decl)); 1391 DECL_SAVED_TREE (fn_decl) = NULL_TREE; 1392 1393 /* Read all the basic blocks. */ 1394 tag = streamer_read_record_start (ib); 1395 while (tag) 1396 { 1397 input_bb (ib, tag, data_in, fn, 1398 node->count_materialization_scale); 1399 tag = streamer_read_record_start (ib); 1400 } 1401 1402 /* Finalize gimple_location/gimple_block of stmts and phis. */ 1403 data_in->location_cache.apply_location_cache (); 1404 1405 /* Fix up the call statements that are mentioned in the callgraph 1406 edges. */ 1407 set_gimple_stmt_max_uid (cfun, 0); 1408 FOR_ALL_BB_FN (bb, cfun) 1409 { 1410 gimple_stmt_iterator gsi; 1411 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 1412 { 1413 gimple *stmt = gsi_stmt (gsi); 1414 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun)); 1415 } 1416 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 1417 { 1418 gimple *stmt = gsi_stmt (gsi); 1419 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun)); 1420 } 1421 } 1422 stmts = (gimple **) xcalloc (gimple_stmt_max_uid (fn), sizeof (gimple *)); 1423 FOR_ALL_BB_FN (bb, cfun) 1424 { 1425 gimple_stmt_iterator bsi = gsi_start_phis (bb); 1426 while (!gsi_end_p (bsi)) 1427 { 1428 gimple *stmt = gsi_stmt (bsi); 1429 gsi_next (&bsi); 1430 stmts[gimple_uid (stmt)] = stmt; 1431 } 1432 bsi = gsi_start_bb (bb); 1433 while (!gsi_end_p (bsi)) 1434 { 1435 gimple *stmt = gsi_stmt (bsi); 1436 bool remove = false; 1437 /* If we're recompiling LTO objects with debug stmts but 1438 we're not supposed to have debug stmts, remove them now. 1439 We can't remove them earlier because this would cause uid 1440 mismatches in fixups, but we can do it at this point, as 1441 long as debug stmts don't require fixups. 1442 Similarly remove all IFN_*SAN_* internal calls */ 1443 if (!flag_wpa) 1444 { 1445 if (is_gimple_debug (stmt) 1446 && (gimple_debug_nonbind_marker_p (stmt) 1447 ? !MAY_HAVE_DEBUG_MARKER_STMTS 1448 : !MAY_HAVE_DEBUG_BIND_STMTS)) 1449 remove = true; 1450 /* In case the linemap overflows locations can be dropped 1451 to zero. Thus do not keep nonsensical inline entry markers 1452 we'd later ICE on. */ 1453 tree block; 1454 if (gimple_debug_inline_entry_p (stmt) 1455 && (((block = gimple_block (stmt)) 1456 && !inlined_function_outer_scope_p (block)) 1457 || !debug_inline_points)) 1458 remove = true; 1459 if (is_gimple_call (stmt) 1460 && gimple_call_internal_p (stmt)) 1461 { 1462 bool replace = false; 1463 switch (gimple_call_internal_fn (stmt)) 1464 { 1465 case IFN_UBSAN_NULL: 1466 if ((flag_sanitize 1467 & (SANITIZE_NULL | SANITIZE_ALIGNMENT)) == 0) 1468 replace = true; 1469 break; 1470 case IFN_UBSAN_BOUNDS: 1471 if ((flag_sanitize & SANITIZE_BOUNDS) == 0) 1472 replace = true; 1473 break; 1474 case IFN_UBSAN_VPTR: 1475 if ((flag_sanitize & SANITIZE_VPTR) == 0) 1476 replace = true; 1477 break; 1478 case IFN_UBSAN_OBJECT_SIZE: 1479 if ((flag_sanitize & SANITIZE_OBJECT_SIZE) == 0) 1480 replace = true; 1481 break; 1482 case IFN_UBSAN_PTR: 1483 if ((flag_sanitize & SANITIZE_POINTER_OVERFLOW) == 0) 1484 replace = true; 1485 break; 1486 case IFN_ASAN_MARK: 1487 if ((flag_sanitize & SANITIZE_ADDRESS) == 0) 1488 replace = true; 1489 break; 1490 case IFN_TSAN_FUNC_EXIT: 1491 if ((flag_sanitize & SANITIZE_THREAD) == 0) 1492 replace = true; 1493 break; 1494 default: 1495 break; 1496 } 1497 if (replace) 1498 { 1499 gimple_call_set_internal_fn (as_a <gcall *> (stmt), 1500 IFN_NOP); 1501 update_stmt (stmt); 1502 } 1503 } 1504 } 1505 if (remove) 1506 { 1507 gimple_stmt_iterator gsi = bsi; 1508 gsi_next (&bsi); 1509 unlink_stmt_vdef (stmt); 1510 release_defs (stmt); 1511 gsi_remove (&gsi, true); 1512 } 1513 else 1514 { 1515 gsi_next (&bsi); 1516 stmts[gimple_uid (stmt)] = stmt; 1517 1518 /* Remember that the input function has begin stmt 1519 markers, so that we know to expect them when emitting 1520 debug info. */ 1521 if (!cfun->debug_nonbind_markers 1522 && gimple_debug_nonbind_marker_p (stmt)) 1523 cfun->debug_nonbind_markers = true; 1524 } 1525 } 1526 } 1527 1528 /* Set the gimple body to the statement sequence in the entry 1529 basic block. FIXME lto, this is fairly hacky. The existence 1530 of a gimple body is used by the cgraph routines, but we should 1531 really use the presence of the CFG. */ 1532 { 1533 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs); 1534 gimple_set_body (fn_decl, bb_seq (ei_edge (ei)->dest)); 1535 } 1536 1537 update_max_bb_count (); 1538 fixup_call_stmt_edges (node, stmts); 1539 execute_all_ipa_stmt_fixups (node, stmts); 1540 1541 free_dominance_info (CDI_DOMINATORS); 1542 free_dominance_info (CDI_POST_DOMINATORS); 1543 free (stmts); 1544 pop_cfun (); 1545 } 1546 1547 /* Read the body of function FN_DECL from DATA_IN using input block IB. */ 1548 1549 static void 1550 input_constructor (tree var, class data_in *data_in, 1551 class lto_input_block *ib) 1552 { 1553 DECL_INITIAL (var) = stream_read_tree (ib, data_in); 1554 } 1555 1556 1557 /* Read the body from DATA for function NODE and fill it in. 1558 FILE_DATA are the global decls and types. SECTION_TYPE is either 1559 LTO_section_function_body or LTO_section_static_initializer. If 1560 section type is LTO_section_function_body, FN must be the decl for 1561 that function. */ 1562 1563 static void 1564 lto_read_body_or_constructor (struct lto_file_decl_data *file_data, struct symtab_node *node, 1565 const char *data, enum lto_section_type section_type) 1566 { 1567 const struct lto_function_header *header; 1568 class data_in *data_in; 1569 int cfg_offset; 1570 int main_offset; 1571 int string_offset; 1572 tree fn_decl = node->decl; 1573 1574 header = (const struct lto_function_header *) data; 1575 if (TREE_CODE (node->decl) == FUNCTION_DECL) 1576 { 1577 cfg_offset = sizeof (struct lto_function_header); 1578 main_offset = cfg_offset + header->cfg_size; 1579 string_offset = main_offset + header->main_size; 1580 } 1581 else 1582 { 1583 main_offset = sizeof (struct lto_function_header); 1584 string_offset = main_offset + header->main_size; 1585 } 1586 1587 data_in = lto_data_in_create (file_data, data + string_offset, 1588 header->string_size, vNULL); 1589 1590 if (section_type == LTO_section_function_body) 1591 { 1592 struct lto_in_decl_state *decl_state; 1593 unsigned from; 1594 1595 gcc_checking_assert (node); 1596 1597 /* Use the function's decl state. */ 1598 decl_state = lto_get_function_in_decl_state (file_data, fn_decl); 1599 gcc_assert (decl_state); 1600 file_data->current_decl_state = decl_state; 1601 1602 1603 /* Set up the struct function. */ 1604 from = data_in->reader_cache->nodes.length (); 1605 lto_input_block ib_main (data + main_offset, header->main_size, 1606 file_data->mode_table); 1607 if (TREE_CODE (node->decl) == FUNCTION_DECL) 1608 { 1609 lto_input_block ib_cfg (data + cfg_offset, header->cfg_size, 1610 file_data->mode_table); 1611 input_function (fn_decl, data_in, &ib_main, &ib_cfg, 1612 dyn_cast <cgraph_node *>(node)); 1613 } 1614 else 1615 input_constructor (fn_decl, data_in, &ib_main); 1616 data_in->location_cache.apply_location_cache (); 1617 /* And fixup types we streamed locally. */ 1618 { 1619 struct streamer_tree_cache_d *cache = data_in->reader_cache; 1620 unsigned len = cache->nodes.length (); 1621 unsigned i; 1622 for (i = len; i-- > from;) 1623 { 1624 tree t = streamer_tree_cache_get_tree (cache, i); 1625 if (t == NULL_TREE) 1626 continue; 1627 1628 if (TYPE_P (t)) 1629 { 1630 gcc_assert (TYPE_CANONICAL (t) == NULL_TREE); 1631 if (type_with_alias_set_p (t) 1632 && canonical_type_used_p (t)) 1633 TYPE_CANONICAL (t) = TYPE_MAIN_VARIANT (t); 1634 if (TYPE_MAIN_VARIANT (t) != t) 1635 { 1636 gcc_assert (TYPE_NEXT_VARIANT (t) == NULL_TREE); 1637 TYPE_NEXT_VARIANT (t) 1638 = TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t)); 1639 TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t)) = t; 1640 } 1641 } 1642 } 1643 } 1644 1645 /* Restore decl state */ 1646 file_data->current_decl_state = file_data->global_decl_state; 1647 } 1648 1649 lto_data_in_delete (data_in); 1650 } 1651 1652 1653 /* Read the body of NODE using DATA. FILE_DATA holds the global 1654 decls and types. */ 1655 1656 void 1657 lto_input_function_body (struct lto_file_decl_data *file_data, 1658 struct cgraph_node *node, const char *data) 1659 { 1660 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body); 1661 } 1662 1663 /* Read the body of NODE using DATA. FILE_DATA holds the global 1664 decls and types. */ 1665 1666 void 1667 lto_input_variable_constructor (struct lto_file_decl_data *file_data, 1668 struct varpool_node *node, const char *data) 1669 { 1670 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body); 1671 } 1672 1673 1674 /* Queue of acummulated decl -> DIE mappings. Similar to locations those 1675 are only applied to prevailing tree nodes during tree merging. */ 1676 vec<dref_entry> dref_queue; 1677 1678 /* Read the physical representation of a tree node EXPR from 1679 input block IB using the per-file context in DATA_IN. */ 1680 1681 static void 1682 lto_read_tree_1 (class lto_input_block *ib, class data_in *data_in, tree expr) 1683 { 1684 /* Read all the bitfield values in EXPR. Note that for LTO, we 1685 only write language-independent bitfields, so no more unpacking is 1686 needed. */ 1687 streamer_read_tree_bitfields (ib, data_in, expr); 1688 1689 /* Read all the pointer fields in EXPR. */ 1690 streamer_read_tree_body (ib, data_in, expr); 1691 1692 /* Read any LTO-specific data not read by the tree streamer. Do not use 1693 stream_read_tree here since that flushes the dref_queue in mids of 1694 SCC reading. */ 1695 if (DECL_P (expr) 1696 && TREE_CODE (expr) != FUNCTION_DECL 1697 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL) 1698 DECL_INITIAL (expr) 1699 = lto_input_tree_1 (ib, data_in, streamer_read_record_start (ib), 0); 1700 1701 /* Stream references to early generated DIEs. Keep in sync with the 1702 trees handled in dwarf2out_register_external_die. */ 1703 if ((DECL_P (expr) 1704 && TREE_CODE (expr) != FIELD_DECL 1705 && TREE_CODE (expr) != DEBUG_EXPR_DECL 1706 && TREE_CODE (expr) != TYPE_DECL) 1707 || TREE_CODE (expr) == BLOCK) 1708 { 1709 const char *str = streamer_read_string (data_in, ib); 1710 if (str) 1711 { 1712 unsigned HOST_WIDE_INT off = streamer_read_uhwi (ib); 1713 dref_entry e = { expr, str, off }; 1714 dref_queue.safe_push (e); 1715 } 1716 } 1717 } 1718 1719 /* Read the physical representation of a tree node with tag TAG from 1720 input block IB using the per-file context in DATA_IN. */ 1721 1722 static tree 1723 lto_read_tree (class lto_input_block *ib, class data_in *data_in, 1724 enum LTO_tags tag, hashval_t hash) 1725 { 1726 /* Instantiate a new tree node. */ 1727 tree result = streamer_alloc_tree (ib, data_in, tag); 1728 1729 /* Enter RESULT in the reader cache. This will make RESULT 1730 available so that circular references in the rest of the tree 1731 structure can be resolved in subsequent calls to stream_read_tree. */ 1732 streamer_tree_cache_append (data_in->reader_cache, result, hash); 1733 1734 lto_read_tree_1 (ib, data_in, result); 1735 1736 return result; 1737 } 1738 1739 1740 /* Populate the reader cache with trees materialized from the SCC 1741 following in the IB, DATA_IN stream. 1742 If SHARED_SCC is true we input LTO_tree_scc. */ 1743 1744 hashval_t 1745 lto_input_scc (class lto_input_block *ib, class data_in *data_in, 1746 unsigned *len, unsigned *entry_len, bool shared_scc) 1747 { 1748 unsigned size = streamer_read_uhwi (ib); 1749 hashval_t scc_hash = 0; 1750 unsigned scc_entry_len = 1; 1751 1752 if (shared_scc) 1753 { 1754 if (size & 1) 1755 scc_entry_len = streamer_read_uhwi (ib); 1756 size /= 2; 1757 scc_hash = streamer_read_uhwi (ib); 1758 } 1759 1760 if (size == 1) 1761 { 1762 enum LTO_tags tag = streamer_read_record_start (ib); 1763 lto_input_tree_1 (ib, data_in, tag, scc_hash); 1764 } 1765 else 1766 { 1767 unsigned int first = data_in->reader_cache->nodes.length (); 1768 tree result; 1769 1770 /* Materialize size trees by reading their headers. */ 1771 for (unsigned i = 0; i < size; ++i) 1772 { 1773 enum LTO_tags tag = streamer_read_record_start (ib); 1774 if (tag == LTO_null 1775 || (tag >= LTO_field_decl_ref && tag <= LTO_global_decl_ref) 1776 || tag == LTO_tree_pickle_reference 1777 || tag == LTO_integer_cst 1778 || tag == LTO_tree_scc 1779 || tag == LTO_trees) 1780 gcc_unreachable (); 1781 1782 result = streamer_alloc_tree (ib, data_in, tag); 1783 streamer_tree_cache_append (data_in->reader_cache, result, 0); 1784 } 1785 1786 /* Read the tree bitpacks and references. */ 1787 for (unsigned i = 0; i < size; ++i) 1788 { 1789 result = streamer_tree_cache_get_tree (data_in->reader_cache, 1790 first + i); 1791 lto_read_tree_1 (ib, data_in, result); 1792 } 1793 } 1794 1795 *len = size; 1796 *entry_len = scc_entry_len; 1797 return scc_hash; 1798 } 1799 1800 1801 /* Read a tree from input block IB using the per-file context in 1802 DATA_IN. This context is used, for example, to resolve references 1803 to previously read nodes. */ 1804 1805 tree 1806 lto_input_tree_1 (class lto_input_block *ib, class data_in *data_in, 1807 enum LTO_tags tag, hashval_t hash) 1808 { 1809 tree result; 1810 1811 gcc_assert ((unsigned) tag < (unsigned) LTO_NUM_TAGS); 1812 1813 if (tag == LTO_null) 1814 result = NULL_TREE; 1815 else if (tag >= LTO_field_decl_ref && tag <= LTO_namelist_decl_ref) 1816 { 1817 /* If TAG is a reference to an indexable tree, the next value 1818 in IB is the index into the table where we expect to find 1819 that tree. */ 1820 result = lto_input_tree_ref (ib, data_in, cfun, tag); 1821 } 1822 else if (tag == LTO_tree_pickle_reference) 1823 { 1824 /* If TAG is a reference to a previously read tree, look it up in 1825 the reader cache. */ 1826 result = streamer_get_pickled_tree (ib, data_in); 1827 } 1828 else if (tag == LTO_integer_cst) 1829 { 1830 /* For shared integer constants in singletons we can use the 1831 existing tree integer constant merging code. */ 1832 tree type = stream_read_tree (ib, data_in); 1833 unsigned HOST_WIDE_INT len = streamer_read_uhwi (ib); 1834 unsigned HOST_WIDE_INT i; 1835 HOST_WIDE_INT a[WIDE_INT_MAX_ELTS]; 1836 1837 for (i = 0; i < len; i++) 1838 a[i] = streamer_read_hwi (ib); 1839 gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT); 1840 result = wide_int_to_tree (type, wide_int::from_array 1841 (a, len, TYPE_PRECISION (type))); 1842 streamer_tree_cache_append (data_in->reader_cache, result, hash); 1843 } 1844 else if (tag == LTO_tree_scc || tag == LTO_trees) 1845 gcc_unreachable (); 1846 else 1847 { 1848 /* Otherwise, materialize a new node from IB. */ 1849 result = lto_read_tree (ib, data_in, tag, hash); 1850 } 1851 1852 return result; 1853 } 1854 1855 tree 1856 lto_input_tree (class lto_input_block *ib, class data_in *data_in) 1857 { 1858 enum LTO_tags tag; 1859 1860 /* Input pickled trees needed to stream in the reference. */ 1861 while ((tag = streamer_read_record_start (ib)) == LTO_trees) 1862 { 1863 unsigned len, entry_len; 1864 lto_input_scc (ib, data_in, &len, &entry_len, false); 1865 1866 /* Register DECLs with the debuginfo machinery. */ 1867 while (!dref_queue.is_empty ()) 1868 { 1869 dref_entry e = dref_queue.pop (); 1870 debug_hooks->register_external_die (e.decl, e.sym, e.off); 1871 } 1872 } 1873 tree t = lto_input_tree_1 (ib, data_in, tag, 0); 1874 1875 if (!dref_queue.is_empty ()) 1876 { 1877 dref_entry e = dref_queue.pop (); 1878 debug_hooks->register_external_die (e.decl, e.sym, e.off); 1879 gcc_checking_assert (dref_queue.is_empty ()); 1880 } 1881 return t; 1882 } 1883 1884 1885 /* Input toplevel asms. */ 1886 1887 void 1888 lto_input_toplevel_asms (struct lto_file_decl_data *file_data, int order_base) 1889 { 1890 size_t len; 1891 const char *data 1892 = lto_get_summary_section_data (file_data, LTO_section_asm, &len); 1893 const struct lto_simple_header_with_strings *header 1894 = (const struct lto_simple_header_with_strings *) data; 1895 int string_offset; 1896 class data_in *data_in; 1897 tree str; 1898 1899 if (! data) 1900 return; 1901 1902 string_offset = sizeof (*header) + header->main_size; 1903 1904 lto_input_block ib (data + sizeof (*header), header->main_size, 1905 file_data->mode_table); 1906 1907 data_in = lto_data_in_create (file_data, data + string_offset, 1908 header->string_size, vNULL); 1909 1910 while ((str = streamer_read_string_cst (data_in, &ib))) 1911 { 1912 asm_node *node = symtab->finalize_toplevel_asm (str); 1913 node->order = streamer_read_hwi (&ib) + order_base; 1914 if (node->order >= symtab->order) 1915 symtab->order = node->order + 1; 1916 } 1917 1918 lto_data_in_delete (data_in); 1919 1920 lto_free_section_data (file_data, LTO_section_asm, NULL, data, len); 1921 } 1922 1923 1924 /* Input mode table. */ 1925 1926 void 1927 lto_input_mode_table (struct lto_file_decl_data *file_data) 1928 { 1929 size_t len; 1930 const char *data 1931 = lto_get_summary_section_data (file_data, LTO_section_mode_table, &len); 1932 if (! data) 1933 { 1934 internal_error ("cannot read LTO mode table from %s", 1935 file_data->file_name); 1936 return; 1937 } 1938 1939 unsigned char *table = ggc_cleared_vec_alloc<unsigned char> (1 << 8); 1940 file_data->mode_table = table; 1941 const struct lto_simple_header_with_strings *header 1942 = (const struct lto_simple_header_with_strings *) data; 1943 int string_offset; 1944 class data_in *data_in; 1945 string_offset = sizeof (*header) + header->main_size; 1946 1947 lto_input_block ib (data + sizeof (*header), header->main_size, NULL); 1948 data_in = lto_data_in_create (file_data, data + string_offset, 1949 header->string_size, vNULL); 1950 bitpack_d bp = streamer_read_bitpack (&ib); 1951 1952 table[VOIDmode] = VOIDmode; 1953 table[BLKmode] = BLKmode; 1954 unsigned int m; 1955 while ((m = bp_unpack_value (&bp, 8)) != VOIDmode) 1956 { 1957 enum mode_class mclass 1958 = bp_unpack_enum (&bp, mode_class, MAX_MODE_CLASS); 1959 poly_uint16 size = bp_unpack_poly_value (&bp, 16); 1960 poly_uint16 prec = bp_unpack_poly_value (&bp, 16); 1961 machine_mode inner = (machine_mode) bp_unpack_value (&bp, 8); 1962 poly_uint16 nunits = bp_unpack_poly_value (&bp, 16); 1963 unsigned int ibit = 0, fbit = 0; 1964 unsigned int real_fmt_len = 0; 1965 const char *real_fmt_name = NULL; 1966 switch (mclass) 1967 { 1968 case MODE_FRACT: 1969 case MODE_UFRACT: 1970 case MODE_ACCUM: 1971 case MODE_UACCUM: 1972 ibit = bp_unpack_value (&bp, 8); 1973 fbit = bp_unpack_value (&bp, 8); 1974 break; 1975 case MODE_FLOAT: 1976 case MODE_DECIMAL_FLOAT: 1977 real_fmt_name = bp_unpack_indexed_string (data_in, &bp, 1978 &real_fmt_len); 1979 break; 1980 default: 1981 break; 1982 } 1983 /* First search just the GET_CLASS_NARROWEST_MODE to wider modes, 1984 if not found, fallback to all modes. */ 1985 int pass; 1986 for (pass = 0; pass < 2; pass++) 1987 for (machine_mode mr = pass ? VOIDmode 1988 : GET_CLASS_NARROWEST_MODE (mclass); 1989 pass ? mr < MAX_MACHINE_MODE : mr != VOIDmode; 1990 pass ? mr = (machine_mode) (mr + 1) 1991 : mr = GET_MODE_WIDER_MODE (mr).else_void ()) 1992 if (GET_MODE_CLASS (mr) != mclass 1993 || maybe_ne (GET_MODE_SIZE (mr), size) 1994 || maybe_ne (GET_MODE_PRECISION (mr), prec) 1995 || (inner == m 1996 ? GET_MODE_INNER (mr) != mr 1997 : GET_MODE_INNER (mr) != table[(int) inner]) 1998 || GET_MODE_IBIT (mr) != ibit 1999 || GET_MODE_FBIT (mr) != fbit 2000 || maybe_ne (GET_MODE_NUNITS (mr), nunits)) 2001 continue; 2002 else if ((mclass == MODE_FLOAT || mclass == MODE_DECIMAL_FLOAT) 2003 && strcmp (REAL_MODE_FORMAT (mr)->name, real_fmt_name) != 0) 2004 continue; 2005 else 2006 { 2007 table[m] = mr; 2008 pass = 2; 2009 break; 2010 } 2011 unsigned int mname_len; 2012 const char *mname = bp_unpack_indexed_string (data_in, &bp, &mname_len); 2013 if (pass == 2) 2014 { 2015 switch (mclass) 2016 { 2017 case MODE_VECTOR_BOOL: 2018 case MODE_VECTOR_INT: 2019 case MODE_VECTOR_FLOAT: 2020 case MODE_VECTOR_FRACT: 2021 case MODE_VECTOR_UFRACT: 2022 case MODE_VECTOR_ACCUM: 2023 case MODE_VECTOR_UACCUM: 2024 /* For unsupported vector modes just use BLKmode, 2025 if the scalar mode is supported. */ 2026 if (table[(int) inner] != VOIDmode) 2027 { 2028 table[m] = BLKmode; 2029 break; 2030 } 2031 /* FALLTHRU */ 2032 default: 2033 /* This is only used for offloading-target compilations and 2034 is a user-facing error. Give a better error message for 2035 the common modes; see also mode-classes.def. */ 2036 if (mclass == MODE_FLOAT) 2037 fatal_error (UNKNOWN_LOCATION, 2038 "%s - %u-bit-precision floating-point numbers " 2039 "unsupported (mode %qs)", TARGET_MACHINE, 2040 prec.to_constant (), mname); 2041 else if (mclass == MODE_DECIMAL_FLOAT) 2042 fatal_error (UNKNOWN_LOCATION, 2043 "%s - %u-bit-precision decimal floating-point " 2044 "numbers unsupported (mode %qs)", TARGET_MACHINE, 2045 prec.to_constant (), mname); 2046 else if (mclass == MODE_COMPLEX_FLOAT) 2047 fatal_error (UNKNOWN_LOCATION, 2048 "%s - %u-bit-precision complex floating-point " 2049 "numbers unsupported (mode %qs)", TARGET_MACHINE, 2050 prec.to_constant (), mname); 2051 else if (mclass == MODE_INT) 2052 fatal_error (UNKNOWN_LOCATION, 2053 "%s - %u-bit integer numbers unsupported (mode " 2054 "%qs)", TARGET_MACHINE, prec.to_constant (), mname); 2055 else 2056 fatal_error (UNKNOWN_LOCATION, "%s - unsupported mode %qs", 2057 TARGET_MACHINE, mname); 2058 break; 2059 } 2060 } 2061 } 2062 lto_data_in_delete (data_in); 2063 2064 lto_free_section_data (file_data, LTO_section_mode_table, NULL, data, len); 2065 } 2066 2067 2068 /* Initialization for the LTO reader. */ 2069 2070 void 2071 lto_reader_init (void) 2072 { 2073 lto_streamer_init (); 2074 file_name_hash_table 2075 = new hash_table<string_slot_hasher> (37); 2076 string_slot_allocator = new object_allocator <struct string_slot> 2077 ("line map file name hash"); 2078 gcc_obstack_init (&file_name_obstack); 2079 } 2080 2081 /* Free hash table used to stream in location file names. */ 2082 2083 void 2084 lto_free_file_name_hash (void) 2085 { 2086 delete file_name_hash_table; 2087 file_name_hash_table = NULL; 2088 delete string_slot_allocator; 2089 string_slot_allocator = NULL; 2090 delete path_name_pair_hash_table; 2091 path_name_pair_hash_table = NULL; 2092 delete string_pair_map_allocator; 2093 string_pair_map_allocator = NULL; 2094 /* file_name_obstack must stay allocated since it is referred to by 2095 line map table. */ 2096 } 2097 2098 2099 /* Create a new data_in object for FILE_DATA. STRINGS is the string 2100 table to use with LEN strings. RESOLUTIONS is the vector of linker 2101 resolutions (NULL if not using a linker plugin). */ 2102 2103 class data_in * 2104 lto_data_in_create (struct lto_file_decl_data *file_data, const char *strings, 2105 unsigned len, 2106 vec<ld_plugin_symbol_resolution_t> resolutions) 2107 { 2108 class data_in *data_in = new (class data_in); 2109 data_in->file_data = file_data; 2110 data_in->strings = strings; 2111 data_in->strings_len = len; 2112 data_in->globals_resolution = resolutions; 2113 data_in->reader_cache = streamer_tree_cache_create (false, false, true); 2114 return data_in; 2115 } 2116 2117 2118 /* Remove DATA_IN. */ 2119 2120 void 2121 lto_data_in_delete (class data_in *data_in) 2122 { 2123 data_in->globals_resolution.release (); 2124 streamer_tree_cache_delete (data_in->reader_cache); 2125 delete data_in; 2126 } 2127