11debfc3dSmrg /* Miscellaneous utilities for tree streaming. Things that are used
21debfc3dSmrg in both input and output are here.
31debfc3dSmrg
4*8feb0f0bSmrg Copyright (C) 2011-2020 Free Software Foundation, Inc.
51debfc3dSmrg Contributed by Diego Novillo <dnovillo@google.com>
61debfc3dSmrg
71debfc3dSmrg This file is part of GCC.
81debfc3dSmrg
91debfc3dSmrg GCC is free software; you can redistribute it and/or modify it under
101debfc3dSmrg the terms of the GNU General Public License as published by the Free
111debfc3dSmrg Software Foundation; either version 3, or (at your option) any later
121debfc3dSmrg version.
131debfc3dSmrg
141debfc3dSmrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
151debfc3dSmrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
161debfc3dSmrg FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
171debfc3dSmrg for more details.
181debfc3dSmrg
191debfc3dSmrg You should have received a copy of the GNU General Public License
201debfc3dSmrg along with GCC; see the file COPYING3. If not see
211debfc3dSmrg <http://www.gnu.org/licenses/>. */
221debfc3dSmrg
231debfc3dSmrg #include "config.h"
241debfc3dSmrg #include "system.h"
251debfc3dSmrg #include "coretypes.h"
261debfc3dSmrg #include "backend.h"
271debfc3dSmrg #include "tree.h"
281debfc3dSmrg #include "gimple.h"
291debfc3dSmrg #include "tree-streamer.h"
301debfc3dSmrg #include "cgraph.h"
311debfc3dSmrg
321debfc3dSmrg /* Table indexed by machine_mode, used for 2 different purposes.
331debfc3dSmrg During streaming out we record there non-zero value for all modes
341debfc3dSmrg that were streamed out.
351debfc3dSmrg During streaming in, we translate the on the disk mode using this
361debfc3dSmrg table. For normal LTO it is set to identity, for ACCEL_COMPILER
371debfc3dSmrg depending on the mode_table content. */
381debfc3dSmrg unsigned char streamer_mode_table[1 << 8];
391debfc3dSmrg
401debfc3dSmrg /* Check that all the TS_* structures handled by the streamer_write_* and
411debfc3dSmrg streamer_read_* routines are exactly ALL the structures defined in
421debfc3dSmrg treestruct.def. */
431debfc3dSmrg
441debfc3dSmrg void
streamer_check_handled_ts_structures(void)451debfc3dSmrg streamer_check_handled_ts_structures (void)
461debfc3dSmrg {
471debfc3dSmrg bool handled_p[LAST_TS_ENUM];
481debfc3dSmrg unsigned i;
491debfc3dSmrg
501debfc3dSmrg memset (&handled_p, 0, sizeof (handled_p));
511debfc3dSmrg
521debfc3dSmrg /* These are the TS_* structures that are either handled or
531debfc3dSmrg explicitly ignored by the streamer routines. */
541debfc3dSmrg handled_p[TS_BASE] = true;
551debfc3dSmrg handled_p[TS_TYPED] = true;
561debfc3dSmrg handled_p[TS_COMMON] = true;
571debfc3dSmrg handled_p[TS_INT_CST] = true;
58a2dc1f3fSmrg handled_p[TS_POLY_INT_CST] = true;
591debfc3dSmrg handled_p[TS_REAL_CST] = true;
601debfc3dSmrg handled_p[TS_FIXED_CST] = true;
611debfc3dSmrg handled_p[TS_VECTOR] = true;
621debfc3dSmrg handled_p[TS_STRING] = true;
631debfc3dSmrg handled_p[TS_COMPLEX] = true;
641debfc3dSmrg handled_p[TS_IDENTIFIER] = true;
651debfc3dSmrg handled_p[TS_DECL_MINIMAL] = true;
661debfc3dSmrg handled_p[TS_DECL_COMMON] = true;
671debfc3dSmrg handled_p[TS_DECL_WRTL] = true;
681debfc3dSmrg handled_p[TS_DECL_NON_COMMON] = true;
691debfc3dSmrg handled_p[TS_DECL_WITH_VIS] = true;
701debfc3dSmrg handled_p[TS_FIELD_DECL] = true;
711debfc3dSmrg handled_p[TS_VAR_DECL] = true;
721debfc3dSmrg handled_p[TS_PARM_DECL] = true;
731debfc3dSmrg handled_p[TS_LABEL_DECL] = true;
741debfc3dSmrg handled_p[TS_RESULT_DECL] = true;
751debfc3dSmrg handled_p[TS_CONST_DECL] = true;
761debfc3dSmrg handled_p[TS_TYPE_DECL] = true;
771debfc3dSmrg handled_p[TS_FUNCTION_DECL] = true;
781debfc3dSmrg handled_p[TS_TYPE_COMMON] = true;
791debfc3dSmrg handled_p[TS_TYPE_WITH_LANG_SPECIFIC] = true;
801debfc3dSmrg handled_p[TS_TYPE_NON_COMMON] = true;
811debfc3dSmrg handled_p[TS_LIST] = true;
821debfc3dSmrg handled_p[TS_VEC] = true;
831debfc3dSmrg handled_p[TS_EXP] = true;
841debfc3dSmrg handled_p[TS_SSA_NAME] = true;
851debfc3dSmrg handled_p[TS_BLOCK] = true;
861debfc3dSmrg handled_p[TS_BINFO] = true;
871debfc3dSmrg handled_p[TS_STATEMENT_LIST] = true;
881debfc3dSmrg handled_p[TS_CONSTRUCTOR] = true;
891debfc3dSmrg handled_p[TS_OMP_CLAUSE] = true;
901debfc3dSmrg handled_p[TS_OPTIMIZATION] = true;
911debfc3dSmrg handled_p[TS_TARGET_OPTION] = true;
921debfc3dSmrg handled_p[TS_TRANSLATION_UNIT_DECL] = true;
931debfc3dSmrg
941debfc3dSmrg /* Anything not marked above will trigger the following assertion.
951debfc3dSmrg If this assertion triggers, it means that there is a new TS_*
961debfc3dSmrg structure that should be handled by the streamer. */
971debfc3dSmrg for (i = 0; i < LAST_TS_ENUM; i++)
981debfc3dSmrg gcc_assert (handled_p[i]);
991debfc3dSmrg }
1001debfc3dSmrg
1011debfc3dSmrg
1021debfc3dSmrg /* Helper for streamer_tree_cache_insert_1. Add T to CACHE->NODES at
1031debfc3dSmrg slot IX. */
1041debfc3dSmrg
1051debfc3dSmrg static void
streamer_tree_cache_add_to_node_array(struct streamer_tree_cache_d * cache,unsigned ix,tree t,hashval_t hash)1061debfc3dSmrg streamer_tree_cache_add_to_node_array (struct streamer_tree_cache_d *cache,
1071debfc3dSmrg unsigned ix, tree t, hashval_t hash)
1081debfc3dSmrg {
1091debfc3dSmrg /* We're either replacing an old element or appending consecutively. */
1101debfc3dSmrg if (cache->nodes.exists ())
1111debfc3dSmrg {
1121debfc3dSmrg if (cache->nodes.length () == ix)
1131debfc3dSmrg cache->nodes.safe_push (t);
1141debfc3dSmrg else
1151debfc3dSmrg cache->nodes[ix] = t;
1161debfc3dSmrg }
1171debfc3dSmrg if (cache->hashes.exists ())
1181debfc3dSmrg {
1191debfc3dSmrg if (cache->hashes.length () == ix)
1201debfc3dSmrg cache->hashes.safe_push (hash);
1211debfc3dSmrg else
1221debfc3dSmrg cache->hashes[ix] = hash;
1231debfc3dSmrg }
1241debfc3dSmrg }
1251debfc3dSmrg
1261debfc3dSmrg
1271debfc3dSmrg /* Helper for streamer_tree_cache_insert and streamer_tree_cache_insert_at.
1281debfc3dSmrg CACHE, T, and IX_P are as in streamer_tree_cache_insert.
1291debfc3dSmrg
1301debfc3dSmrg If INSERT_AT_NEXT_SLOT_P is true, T is inserted at the next available
1311debfc3dSmrg slot in the cache. Otherwise, T is inserted at the position indicated
1321debfc3dSmrg in *IX_P.
1331debfc3dSmrg
1341debfc3dSmrg If T already existed in CACHE, return true. Otherwise,
1351debfc3dSmrg return false. */
1361debfc3dSmrg
1371debfc3dSmrg static bool
streamer_tree_cache_insert_1(struct streamer_tree_cache_d * cache,tree t,hashval_t hash,unsigned * ix_p,bool insert_at_next_slot_p)1381debfc3dSmrg streamer_tree_cache_insert_1 (struct streamer_tree_cache_d *cache,
1391debfc3dSmrg tree t, hashval_t hash, unsigned *ix_p,
1401debfc3dSmrg bool insert_at_next_slot_p)
1411debfc3dSmrg {
1421debfc3dSmrg bool existed_p;
1431debfc3dSmrg
1441debfc3dSmrg gcc_assert (t);
1451debfc3dSmrg
1461debfc3dSmrg unsigned int &ix = cache->node_map->get_or_insert (t, &existed_p);
1471debfc3dSmrg if (!existed_p)
1481debfc3dSmrg {
1491debfc3dSmrg /* Determine the next slot to use in the cache. */
1501debfc3dSmrg if (insert_at_next_slot_p)
1511debfc3dSmrg ix = cache->next_idx++;
1521debfc3dSmrg else
1531debfc3dSmrg ix = *ix_p;
1541debfc3dSmrg
1551debfc3dSmrg streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
1561debfc3dSmrg }
1571debfc3dSmrg else
1581debfc3dSmrg {
1591debfc3dSmrg if (!insert_at_next_slot_p && ix != *ix_p)
1601debfc3dSmrg {
1611debfc3dSmrg /* If the caller wants to insert T at a specific slot
1621debfc3dSmrg location, and ENTRY->TO does not match *IX_P, add T to
1631debfc3dSmrg the requested location slot. */
1641debfc3dSmrg ix = *ix_p;
1651debfc3dSmrg streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
1661debfc3dSmrg }
1671debfc3dSmrg }
1681debfc3dSmrg
1691debfc3dSmrg if (ix_p)
1701debfc3dSmrg *ix_p = ix;
1711debfc3dSmrg
1721debfc3dSmrg return existed_p;
1731debfc3dSmrg }
1741debfc3dSmrg
1751debfc3dSmrg
1761debfc3dSmrg /* Insert tree node T in CACHE. If T already existed in the cache
1771debfc3dSmrg return true. Otherwise, return false.
1781debfc3dSmrg
1791debfc3dSmrg If IX_P is non-null, update it with the index into the cache where
1801debfc3dSmrg T has been stored. */
1811debfc3dSmrg
1821debfc3dSmrg bool
streamer_tree_cache_insert(struct streamer_tree_cache_d * cache,tree t,hashval_t hash,unsigned * ix_p)1831debfc3dSmrg streamer_tree_cache_insert (struct streamer_tree_cache_d *cache, tree t,
1841debfc3dSmrg hashval_t hash, unsigned *ix_p)
1851debfc3dSmrg {
1861debfc3dSmrg return streamer_tree_cache_insert_1 (cache, t, hash, ix_p, true);
1871debfc3dSmrg }
1881debfc3dSmrg
1891debfc3dSmrg
1901debfc3dSmrg /* Replace the tree node with T in CACHE at slot IX. */
1911debfc3dSmrg
1921debfc3dSmrg void
streamer_tree_cache_replace_tree(struct streamer_tree_cache_d * cache,tree t,unsigned ix)1931debfc3dSmrg streamer_tree_cache_replace_tree (struct streamer_tree_cache_d *cache,
1941debfc3dSmrg tree t, unsigned ix)
1951debfc3dSmrg {
1961debfc3dSmrg hashval_t hash = 0;
1971debfc3dSmrg if (cache->hashes.exists ())
1981debfc3dSmrg hash = streamer_tree_cache_get_hash (cache, ix);
1991debfc3dSmrg if (!cache->node_map)
2001debfc3dSmrg streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
2011debfc3dSmrg else
2021debfc3dSmrg streamer_tree_cache_insert_1 (cache, t, hash, &ix, false);
2031debfc3dSmrg }
2041debfc3dSmrg
2051debfc3dSmrg
2061debfc3dSmrg /* Appends tree node T to CACHE, even if T already existed in it. */
2071debfc3dSmrg
2081debfc3dSmrg void
streamer_tree_cache_append(struct streamer_tree_cache_d * cache,tree t,hashval_t hash)2091debfc3dSmrg streamer_tree_cache_append (struct streamer_tree_cache_d *cache,
2101debfc3dSmrg tree t, hashval_t hash)
2111debfc3dSmrg {
2121debfc3dSmrg unsigned ix = cache->next_idx++;
2131debfc3dSmrg if (!cache->node_map)
2141debfc3dSmrg streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
2151debfc3dSmrg else
2161debfc3dSmrg streamer_tree_cache_insert_1 (cache, t, hash, &ix, false);
2171debfc3dSmrg }
2181debfc3dSmrg
2191debfc3dSmrg /* Return true if tree node T exists in CACHE, otherwise false. If IX_P is
2201debfc3dSmrg not NULL, write to *IX_P the index into the cache where T is stored
2211debfc3dSmrg ((unsigned)-1 if T is not found). */
2221debfc3dSmrg
2231debfc3dSmrg bool
streamer_tree_cache_lookup(struct streamer_tree_cache_d * cache,tree t,unsigned * ix_p)2241debfc3dSmrg streamer_tree_cache_lookup (struct streamer_tree_cache_d *cache, tree t,
2251debfc3dSmrg unsigned *ix_p)
2261debfc3dSmrg {
2271debfc3dSmrg unsigned *slot;
2281debfc3dSmrg bool retval;
2291debfc3dSmrg unsigned ix;
2301debfc3dSmrg
2311debfc3dSmrg gcc_assert (t);
2321debfc3dSmrg
2331debfc3dSmrg slot = cache->node_map->get (t);
2341debfc3dSmrg if (slot == NULL)
2351debfc3dSmrg {
2361debfc3dSmrg retval = false;
2371debfc3dSmrg ix = -1;
2381debfc3dSmrg }
2391debfc3dSmrg else
2401debfc3dSmrg {
2411debfc3dSmrg retval = true;
2421debfc3dSmrg ix = *slot;
2431debfc3dSmrg }
2441debfc3dSmrg
2451debfc3dSmrg if (ix_p)
2461debfc3dSmrg *ix_p = ix;
2471debfc3dSmrg
2481debfc3dSmrg return retval;
2491debfc3dSmrg }
2501debfc3dSmrg
2511debfc3dSmrg
2521debfc3dSmrg /* Verify that NODE is in CACHE. */
2531debfc3dSmrg
2541debfc3dSmrg static void
verify_common_node_recorded(struct streamer_tree_cache_d * cache,tree node)2551debfc3dSmrg verify_common_node_recorded (struct streamer_tree_cache_d *cache, tree node)
2561debfc3dSmrg {
2571debfc3dSmrg /* Restrict this to flag_checking only because in general violating it is
2581debfc3dSmrg harmless plus we never know what happens on all targets/frontend/flag(!)
2591debfc3dSmrg combinations. */
2601debfc3dSmrg if (!flag_checking)
2611debfc3dSmrg return;
2621debfc3dSmrg
2631debfc3dSmrg if (cache->node_map)
2641debfc3dSmrg gcc_assert (streamer_tree_cache_lookup (cache, node, NULL));
2651debfc3dSmrg else
2661debfc3dSmrg {
2671debfc3dSmrg bool found = false;
2681debfc3dSmrg gcc_assert (cache->nodes.exists ());
2691debfc3dSmrg /* Linear search... */
2701debfc3dSmrg for (unsigned i = 0; !found && i < cache->nodes.length (); ++i)
2711debfc3dSmrg if (cache->nodes[i] == node)
2721debfc3dSmrg found = true;
2731debfc3dSmrg gcc_assert (found);
2741debfc3dSmrg }
2751debfc3dSmrg }
2761debfc3dSmrg
2771debfc3dSmrg
2781debfc3dSmrg /* Record NODE in CACHE. */
2791debfc3dSmrg
2801debfc3dSmrg static void
record_common_node(struct streamer_tree_cache_d * cache,tree node)2811debfc3dSmrg record_common_node (struct streamer_tree_cache_d *cache, tree node)
2821debfc3dSmrg {
2831debfc3dSmrg /* If we recursively end up at nodes we do not want to preload simply don't.
2841debfc3dSmrg ??? We'd want to verify that this doesn't happen, or alternatively
2851debfc3dSmrg do not recurse at all. */
2861debfc3dSmrg if (node == char_type_node)
2871debfc3dSmrg return;
2881debfc3dSmrg
2891debfc3dSmrg gcc_checking_assert (node != boolean_type_node
2901debfc3dSmrg && node != boolean_true_node
2911debfc3dSmrg && node != boolean_false_node);
2921debfc3dSmrg
2931debfc3dSmrg /* We have to make sure to fill exactly the same number of
2941debfc3dSmrg elements for all frontends. That can include NULL trees.
2951debfc3dSmrg As our hash table can't deal with zero entries we'll simply stream
2961debfc3dSmrg a random other tree. A NULL tree never will be looked up so it
2971debfc3dSmrg doesn't matter which tree we replace it with, just to be sure
2981debfc3dSmrg use error_mark_node. */
2991debfc3dSmrg if (!node)
3001debfc3dSmrg node = error_mark_node;
3011debfc3dSmrg
302*8feb0f0bSmrg /* This hash needs to be equal for all frontend and lto1 invocations. So
303*8feb0f0bSmrg just use the position in the cache as hash value.
304*8feb0f0bSmrg Small integers are used by hash_tree to record positions within scc
305*8feb0f0bSmrg hash. Values are not in same range. */
306*8feb0f0bSmrg streamer_tree_cache_append (cache, node, cache->next_idx + 0xc001);
3071debfc3dSmrg
3081debfc3dSmrg switch (TREE_CODE (node))
3091debfc3dSmrg {
3101debfc3dSmrg case ERROR_MARK:
3111debfc3dSmrg case FIELD_DECL:
3121debfc3dSmrg case FIXED_POINT_TYPE:
3131debfc3dSmrg case IDENTIFIER_NODE:
3141debfc3dSmrg case INTEGER_CST:
3151debfc3dSmrg case INTEGER_TYPE:
3161debfc3dSmrg case REAL_TYPE:
3171debfc3dSmrg case TREE_LIST:
3181debfc3dSmrg case VOID_CST:
3191debfc3dSmrg case VOID_TYPE:
3201debfc3dSmrg /* No recursive trees. */
3211debfc3dSmrg break;
3221debfc3dSmrg case ARRAY_TYPE:
3231debfc3dSmrg case POINTER_TYPE:
3241debfc3dSmrg case REFERENCE_TYPE:
3251debfc3dSmrg record_common_node (cache, TREE_TYPE (node));
3261debfc3dSmrg break;
3271debfc3dSmrg case COMPLEX_TYPE:
3281debfc3dSmrg /* Verify that a complex type's component type (node_type) has been
3291debfc3dSmrg handled already (and we thus don't need to recurse here). */
3301debfc3dSmrg verify_common_node_recorded (cache, TREE_TYPE (node));
3311debfc3dSmrg break;
3321debfc3dSmrg case RECORD_TYPE:
3331debfc3dSmrg /* The FIELD_DECLs of structures should be shared, so that every
3341debfc3dSmrg COMPONENT_REF uses the same tree node when referencing a field.
3351debfc3dSmrg Pointer equality between FIELD_DECLs is used by the alias
3361debfc3dSmrg machinery to compute overlapping component references (see
3371debfc3dSmrg nonoverlapping_component_refs_p and
3381debfc3dSmrg nonoverlapping_component_refs_of_decl_p). */
3391debfc3dSmrg for (tree f = TYPE_FIELDS (node); f; f = TREE_CHAIN (f))
3401debfc3dSmrg record_common_node (cache, f);
3411debfc3dSmrg break;
3421debfc3dSmrg default:
3431debfc3dSmrg /* Unexpected tree code. */
3441debfc3dSmrg gcc_unreachable ();
3451debfc3dSmrg }
3461debfc3dSmrg }
3471debfc3dSmrg
3481debfc3dSmrg
3491debfc3dSmrg /* Preload common nodes into CACHE and make sure they are merged
3501debfc3dSmrg properly according to the gimple type table. */
3511debfc3dSmrg
3521debfc3dSmrg static void
preload_common_nodes(struct streamer_tree_cache_d * cache)3531debfc3dSmrg preload_common_nodes (struct streamer_tree_cache_d *cache)
3541debfc3dSmrg {
3551debfc3dSmrg unsigned i;
3561debfc3dSmrg
3571debfc3dSmrg for (i = 0; i < itk_none; i++)
3581debfc3dSmrg /* Skip itk_char. char_type_node is dependent on -f[un]signed-char. */
3591debfc3dSmrg if (i != itk_char)
3601debfc3dSmrg record_common_node (cache, integer_types[i]);
3611debfc3dSmrg
3621debfc3dSmrg for (i = 0; i < stk_type_kind_last; i++)
3631debfc3dSmrg record_common_node (cache, sizetype_tab[i]);
3641debfc3dSmrg
3651debfc3dSmrg for (i = 0; i < TI_MAX; i++)
3661debfc3dSmrg /* Skip boolean type and constants, they are frontend dependent. */
3671debfc3dSmrg if (i != TI_BOOLEAN_TYPE
3681debfc3dSmrg && i != TI_BOOLEAN_FALSE
3691debfc3dSmrg && i != TI_BOOLEAN_TRUE
3701debfc3dSmrg /* MAIN_IDENTIFIER is not always initialized by Fortran FE. */
3711debfc3dSmrg && i != TI_MAIN_IDENTIFIER
3721debfc3dSmrg /* PID_TYPE is initialized only by C family front-ends. */
3731debfc3dSmrg && i != TI_PID_TYPE
3741debfc3dSmrg /* Skip optimization and target option nodes; they depend on flags. */
3751debfc3dSmrg && i != TI_OPTIMIZATION_DEFAULT
3761debfc3dSmrg && i != TI_OPTIMIZATION_CURRENT
3771debfc3dSmrg && i != TI_TARGET_OPTION_DEFAULT
3781debfc3dSmrg && i != TI_TARGET_OPTION_CURRENT
3791debfc3dSmrg && i != TI_CURRENT_TARGET_PRAGMA
3801debfc3dSmrg && i != TI_CURRENT_OPTIMIZE_PRAGMA
381*8feb0f0bSmrg /* SCEV state shouldn't reach the IL. */
382*8feb0f0bSmrg && i != TI_CHREC_DONT_KNOW
383*8feb0f0bSmrg && i != TI_CHREC_KNOWN
3841debfc3dSmrg /* Skip va_list* related nodes if offloading. For native LTO
3851debfc3dSmrg we want them to be merged for the stdarg pass, for offloading
3861debfc3dSmrg they might not be identical between host and offloading target. */
3871debfc3dSmrg && (!lto_stream_offload_p
3881debfc3dSmrg || (i != TI_VA_LIST_TYPE
3891debfc3dSmrg && i != TI_VA_LIST_GPR_COUNTER_FIELD
3901debfc3dSmrg && i != TI_VA_LIST_FPR_COUNTER_FIELD)))
3911debfc3dSmrg record_common_node (cache, global_trees[i]);
3921debfc3dSmrg }
3931debfc3dSmrg
3941debfc3dSmrg
3951debfc3dSmrg /* Create a cache of pickled nodes. */
3961debfc3dSmrg
3971debfc3dSmrg struct streamer_tree_cache_d *
streamer_tree_cache_create(bool with_hashes,bool with_map,bool with_vec)3981debfc3dSmrg streamer_tree_cache_create (bool with_hashes, bool with_map, bool with_vec)
3991debfc3dSmrg {
4001debfc3dSmrg struct streamer_tree_cache_d *cache;
4011debfc3dSmrg
4021debfc3dSmrg cache = XCNEW (struct streamer_tree_cache_d);
4031debfc3dSmrg
4041debfc3dSmrg if (with_map)
4051debfc3dSmrg cache->node_map = new hash_map<tree, unsigned> (251);
4061debfc3dSmrg cache->next_idx = 0;
4071debfc3dSmrg if (with_vec)
4081debfc3dSmrg cache->nodes.create (165);
4091debfc3dSmrg if (with_hashes)
4101debfc3dSmrg cache->hashes.create (165);
4111debfc3dSmrg
4121debfc3dSmrg /* Load all the well-known tree nodes that are always created by
4131debfc3dSmrg the compiler on startup. This prevents writing them out
4141debfc3dSmrg unnecessarily. */
4151debfc3dSmrg preload_common_nodes (cache);
4161debfc3dSmrg
4171debfc3dSmrg return cache;
4181debfc3dSmrg }
4191debfc3dSmrg
4201debfc3dSmrg
4211debfc3dSmrg /* Delete the streamer cache C. */
4221debfc3dSmrg
4231debfc3dSmrg void
streamer_tree_cache_delete(struct streamer_tree_cache_d * c)4241debfc3dSmrg streamer_tree_cache_delete (struct streamer_tree_cache_d *c)
4251debfc3dSmrg {
4261debfc3dSmrg if (c == NULL)
4271debfc3dSmrg return;
4281debfc3dSmrg
4291debfc3dSmrg delete c->node_map;
4301debfc3dSmrg c->node_map = NULL;
4311debfc3dSmrg c->nodes.release ();
4321debfc3dSmrg c->hashes.release ();
4331debfc3dSmrg free (c);
4341debfc3dSmrg }
435