1 /* Miscellaneous utilities for tree streaming. Things that are used 2 in both input and output are here. 3 4 Copyright (C) 2011-2016 Free Software Foundation, Inc. 5 Contributed by Diego Novillo <dnovillo@google.com> 6 7 This file is part of GCC. 8 9 GCC is free software; you can redistribute it and/or modify it under 10 the terms of the GNU General Public License as published by the Free 11 Software Foundation; either version 3, or (at your option) any later 12 version. 13 14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 15 WARRANTY; without even the implied warranty of MERCHANTABILITY or 16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 17 for more details. 18 19 You should have received a copy of the GNU General Public License 20 along with GCC; see the file COPYING3. If not see 21 <http://www.gnu.org/licenses/>. */ 22 23 #include "config.h" 24 #include "system.h" 25 #include "coretypes.h" 26 #include "backend.h" 27 #include "tree.h" 28 #include "gimple.h" 29 #include "tree-streamer.h" 30 #include "cgraph.h" 31 32 /* Table indexed by machine_mode, used for 2 different purposes. 33 During streaming out we record there non-zero value for all modes 34 that were streamed out. 35 During streaming in, we translate the on the disk mode using this 36 table. For normal LTO it is set to identity, for ACCEL_COMPILER 37 depending on the mode_table content. */ 38 unsigned char streamer_mode_table[1 << 8]; 39 40 /* Check that all the TS_* structures handled by the streamer_write_* and 41 streamer_read_* routines are exactly ALL the structures defined in 42 treestruct.def. */ 43 44 void 45 streamer_check_handled_ts_structures (void) 46 { 47 bool handled_p[LAST_TS_ENUM]; 48 unsigned i; 49 50 memset (&handled_p, 0, sizeof (handled_p)); 51 52 /* These are the TS_* structures that are either handled or 53 explicitly ignored by the streamer routines. */ 54 handled_p[TS_BASE] = true; 55 handled_p[TS_TYPED] = true; 56 handled_p[TS_COMMON] = true; 57 handled_p[TS_INT_CST] = true; 58 handled_p[TS_REAL_CST] = true; 59 handled_p[TS_FIXED_CST] = true; 60 handled_p[TS_VECTOR] = true; 61 handled_p[TS_STRING] = true; 62 handled_p[TS_COMPLEX] = true; 63 handled_p[TS_IDENTIFIER] = true; 64 handled_p[TS_DECL_MINIMAL] = true; 65 handled_p[TS_DECL_COMMON] = true; 66 handled_p[TS_DECL_WRTL] = true; 67 handled_p[TS_DECL_NON_COMMON] = true; 68 handled_p[TS_DECL_WITH_VIS] = true; 69 handled_p[TS_FIELD_DECL] = true; 70 handled_p[TS_VAR_DECL] = true; 71 handled_p[TS_PARM_DECL] = true; 72 handled_p[TS_LABEL_DECL] = true; 73 handled_p[TS_RESULT_DECL] = true; 74 handled_p[TS_CONST_DECL] = true; 75 handled_p[TS_TYPE_DECL] = true; 76 handled_p[TS_FUNCTION_DECL] = true; 77 handled_p[TS_TYPE_COMMON] = true; 78 handled_p[TS_TYPE_WITH_LANG_SPECIFIC] = true; 79 handled_p[TS_TYPE_NON_COMMON] = true; 80 handled_p[TS_LIST] = true; 81 handled_p[TS_VEC] = true; 82 handled_p[TS_EXP] = true; 83 handled_p[TS_SSA_NAME] = true; 84 handled_p[TS_BLOCK] = true; 85 handled_p[TS_BINFO] = true; 86 handled_p[TS_STATEMENT_LIST] = true; 87 handled_p[TS_CONSTRUCTOR] = true; 88 handled_p[TS_OMP_CLAUSE] = true; 89 handled_p[TS_OPTIMIZATION] = true; 90 handled_p[TS_TARGET_OPTION] = true; 91 handled_p[TS_TRANSLATION_UNIT_DECL] = true; 92 93 /* Anything not marked above will trigger the following assertion. 94 If this assertion triggers, it means that there is a new TS_* 95 structure that should be handled by the streamer. */ 96 for (i = 0; i < LAST_TS_ENUM; i++) 97 gcc_assert (handled_p[i]); 98 } 99 100 101 /* Helper for streamer_tree_cache_insert_1. Add T to CACHE->NODES at 102 slot IX. */ 103 104 static void 105 streamer_tree_cache_add_to_node_array (struct streamer_tree_cache_d *cache, 106 unsigned ix, tree t, hashval_t hash) 107 { 108 /* We're either replacing an old element or appending consecutively. */ 109 if (cache->nodes.exists ()) 110 { 111 if (cache->nodes.length () == ix) 112 cache->nodes.safe_push (t); 113 else 114 cache->nodes[ix] = t; 115 } 116 if (cache->hashes.exists ()) 117 { 118 if (cache->hashes.length () == ix) 119 cache->hashes.safe_push (hash); 120 else 121 cache->hashes[ix] = hash; 122 } 123 } 124 125 126 /* Helper for streamer_tree_cache_insert and streamer_tree_cache_insert_at. 127 CACHE, T, and IX_P are as in streamer_tree_cache_insert. 128 129 If INSERT_AT_NEXT_SLOT_P is true, T is inserted at the next available 130 slot in the cache. Otherwise, T is inserted at the position indicated 131 in *IX_P. 132 133 If T already existed in CACHE, return true. Otherwise, 134 return false. */ 135 136 static bool 137 streamer_tree_cache_insert_1 (struct streamer_tree_cache_d *cache, 138 tree t, hashval_t hash, unsigned *ix_p, 139 bool insert_at_next_slot_p) 140 { 141 bool existed_p; 142 143 gcc_assert (t); 144 145 unsigned int &ix = cache->node_map->get_or_insert (t, &existed_p); 146 if (!existed_p) 147 { 148 /* Determine the next slot to use in the cache. */ 149 if (insert_at_next_slot_p) 150 ix = cache->next_idx++; 151 else 152 ix = *ix_p; 153 154 streamer_tree_cache_add_to_node_array (cache, ix, t, hash); 155 } 156 else 157 { 158 if (!insert_at_next_slot_p && ix != *ix_p) 159 { 160 /* If the caller wants to insert T at a specific slot 161 location, and ENTRY->TO does not match *IX_P, add T to 162 the requested location slot. */ 163 ix = *ix_p; 164 streamer_tree_cache_add_to_node_array (cache, ix, t, hash); 165 } 166 } 167 168 if (ix_p) 169 *ix_p = ix; 170 171 return existed_p; 172 } 173 174 175 /* Insert tree node T in CACHE. If T already existed in the cache 176 return true. Otherwise, return false. 177 178 If IX_P is non-null, update it with the index into the cache where 179 T has been stored. */ 180 181 bool 182 streamer_tree_cache_insert (struct streamer_tree_cache_d *cache, tree t, 183 hashval_t hash, unsigned *ix_p) 184 { 185 return streamer_tree_cache_insert_1 (cache, t, hash, ix_p, true); 186 } 187 188 189 /* Replace the tree node with T in CACHE at slot IX. */ 190 191 void 192 streamer_tree_cache_replace_tree (struct streamer_tree_cache_d *cache, 193 tree t, unsigned ix) 194 { 195 hashval_t hash = 0; 196 if (cache->hashes.exists ()) 197 hash = streamer_tree_cache_get_hash (cache, ix); 198 if (!cache->node_map) 199 streamer_tree_cache_add_to_node_array (cache, ix, t, hash); 200 else 201 streamer_tree_cache_insert_1 (cache, t, hash, &ix, false); 202 } 203 204 205 /* Appends tree node T to CACHE, even if T already existed in it. */ 206 207 void 208 streamer_tree_cache_append (struct streamer_tree_cache_d *cache, 209 tree t, hashval_t hash) 210 { 211 unsigned ix = cache->next_idx++; 212 if (!cache->node_map) 213 streamer_tree_cache_add_to_node_array (cache, ix, t, hash); 214 else 215 streamer_tree_cache_insert_1 (cache, t, hash, &ix, false); 216 } 217 218 /* Return true if tree node T exists in CACHE, otherwise false. If IX_P is 219 not NULL, write to *IX_P the index into the cache where T is stored 220 ((unsigned)-1 if T is not found). */ 221 222 bool 223 streamer_tree_cache_lookup (struct streamer_tree_cache_d *cache, tree t, 224 unsigned *ix_p) 225 { 226 unsigned *slot; 227 bool retval; 228 unsigned ix; 229 230 gcc_assert (t); 231 232 slot = cache->node_map->get (t); 233 if (slot == NULL) 234 { 235 retval = false; 236 ix = -1; 237 } 238 else 239 { 240 retval = true; 241 ix = *slot; 242 } 243 244 if (ix_p) 245 *ix_p = ix; 246 247 return retval; 248 } 249 250 251 /* Record NODE in CACHE. */ 252 253 static void 254 record_common_node (struct streamer_tree_cache_d *cache, tree node) 255 { 256 /* If we recursively end up at nodes we do not want to preload simply don't. 257 ??? We'd want to verify that this doesn't happen, or alternatively 258 do not recurse at all. */ 259 if (node == char_type_node) 260 return; 261 262 gcc_checking_assert (node != boolean_type_node 263 && node != boolean_true_node 264 && node != boolean_false_node); 265 266 /* We have to make sure to fill exactly the same number of 267 elements for all frontends. That can include NULL trees. 268 As our hash table can't deal with zero entries we'll simply stream 269 a random other tree. A NULL tree never will be looked up so it 270 doesn't matter which tree we replace it with, just to be sure 271 use error_mark_node. */ 272 if (!node) 273 node = error_mark_node; 274 275 /* ??? FIXME, devise a better hash value. But the hash needs to be equal 276 for all frontend and lto1 invocations. So just use the position 277 in the cache as hash value. */ 278 streamer_tree_cache_append (cache, node, cache->nodes.length ()); 279 280 if (POINTER_TYPE_P (node) 281 || TREE_CODE (node) == COMPLEX_TYPE 282 || TREE_CODE (node) == ARRAY_TYPE) 283 record_common_node (cache, TREE_TYPE (node)); 284 else if (TREE_CODE (node) == RECORD_TYPE) 285 { 286 /* The FIELD_DECLs of structures should be shared, so that every 287 COMPONENT_REF uses the same tree node when referencing a field. 288 Pointer equality between FIELD_DECLs is used by the alias 289 machinery to compute overlapping component references (see 290 nonoverlapping_component_refs_p and 291 nonoverlapping_component_refs_of_decl_p). */ 292 for (tree f = TYPE_FIELDS (node); f; f = TREE_CHAIN (f)) 293 record_common_node (cache, f); 294 } 295 } 296 297 298 /* Preload common nodes into CACHE and make sure they are merged 299 properly according to the gimple type table. */ 300 301 static void 302 preload_common_nodes (struct streamer_tree_cache_d *cache) 303 { 304 unsigned i; 305 306 for (i = 0; i < itk_none; i++) 307 /* Skip itk_char. char_type_node is dependent on -f[un]signed-char. */ 308 if (i != itk_char) 309 record_common_node (cache, integer_types[i]); 310 311 for (i = 0; i < stk_type_kind_last; i++) 312 record_common_node (cache, sizetype_tab[i]); 313 314 for (i = 0; i < TI_MAX; i++) 315 /* Skip boolean type and constants, they are frontend dependent. */ 316 if (i != TI_BOOLEAN_TYPE 317 && i != TI_BOOLEAN_FALSE 318 && i != TI_BOOLEAN_TRUE 319 /* MAIN_IDENTIFIER is not always initialized by Fortran FE. */ 320 && i != TI_MAIN_IDENTIFIER 321 /* PID_TYPE is initialized only by C family front-ends. */ 322 && i != TI_PID_TYPE 323 /* Skip optimization and target option nodes; they depend on flags. */ 324 && i != TI_OPTIMIZATION_DEFAULT 325 && i != TI_OPTIMIZATION_CURRENT 326 && i != TI_TARGET_OPTION_DEFAULT 327 && i != TI_TARGET_OPTION_CURRENT 328 && i != TI_CURRENT_TARGET_PRAGMA 329 && i != TI_CURRENT_OPTIMIZE_PRAGMA 330 /* Skip va_list* related nodes if offloading. For native LTO 331 we want them to be merged for the stdarg pass, for offloading 332 they might not be identical between host and offloading target. */ 333 && (!lto_stream_offload_p 334 || (i != TI_VA_LIST_TYPE 335 && i != TI_VA_LIST_GPR_COUNTER_FIELD 336 && i != TI_VA_LIST_FPR_COUNTER_FIELD))) 337 record_common_node (cache, global_trees[i]); 338 } 339 340 341 /* Create a cache of pickled nodes. */ 342 343 struct streamer_tree_cache_d * 344 streamer_tree_cache_create (bool with_hashes, bool with_map, bool with_vec) 345 { 346 struct streamer_tree_cache_d *cache; 347 348 cache = XCNEW (struct streamer_tree_cache_d); 349 350 if (with_map) 351 cache->node_map = new hash_map<tree, unsigned> (251); 352 cache->next_idx = 0; 353 if (with_vec) 354 cache->nodes.create (165); 355 if (with_hashes) 356 cache->hashes.create (165); 357 358 /* Load all the well-known tree nodes that are always created by 359 the compiler on startup. This prevents writing them out 360 unnecessarily. */ 361 preload_common_nodes (cache); 362 363 return cache; 364 } 365 366 367 /* Delete the streamer cache C. */ 368 369 void 370 streamer_tree_cache_delete (struct streamer_tree_cache_d *c) 371 { 372 if (c == NULL) 373 return; 374 375 delete c->node_map; 376 c->node_map = NULL; 377 c->nodes.release (); 378 c->hashes.release (); 379 free (c); 380 } 381