Daily bump.
[official-gcc.git] / gcc / tree-streamer.c
blob7a7ea039e1acd8b5e4551a59d637a6c57f1f4875
1 /* Miscellaneous utilities for tree streaming. Things that are used
2 in both input and output are here.
4 Copyright (C) 2011-2015 Free Software Foundation, Inc.
5 Contributed by Diego Novillo <dnovillo@google.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "alias.h"
27 #include "symtab.h"
28 #include "options.h"
29 #include "tree.h"
30 #include "fold-const.h"
31 #include "predict.h"
32 #include "tm.h"
33 #include "hard-reg-set.h"
34 #include "function.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-expr.h"
39 #include "gimple.h"
40 #include "streamer-hooks.h"
41 #include "cgraph.h"
42 #include "tree-streamer.h"
44 /* Table indexed by machine_mode, used for 2 different purposes.
45 During streaming out we record there non-zero value for all modes
46 that were streamed out.
47 During streaming in, we translate the on the disk mode using this
48 table. For normal LTO it is set to identity, for ACCEL_COMPILER
49 depending on the mode_table content. */
50 unsigned char streamer_mode_table[1 << 8];
52 /* Check that all the TS_* structures handled by the streamer_write_* and
53 streamer_read_* routines are exactly ALL the structures defined in
54 treestruct.def. */
56 void
57 streamer_check_handled_ts_structures (void)
59 bool handled_p[LAST_TS_ENUM];
60 unsigned i;
62 memset (&handled_p, 0, sizeof (handled_p));
64 /* These are the TS_* structures that are either handled or
65 explicitly ignored by the streamer routines. */
66 handled_p[TS_BASE] = true;
67 handled_p[TS_TYPED] = true;
68 handled_p[TS_COMMON] = true;
69 handled_p[TS_INT_CST] = true;
70 handled_p[TS_REAL_CST] = true;
71 handled_p[TS_FIXED_CST] = true;
72 handled_p[TS_VECTOR] = true;
73 handled_p[TS_STRING] = true;
74 handled_p[TS_COMPLEX] = true;
75 handled_p[TS_IDENTIFIER] = true;
76 handled_p[TS_DECL_MINIMAL] = true;
77 handled_p[TS_DECL_COMMON] = true;
78 handled_p[TS_DECL_WRTL] = true;
79 handled_p[TS_DECL_NON_COMMON] = true;
80 handled_p[TS_DECL_WITH_VIS] = true;
81 handled_p[TS_FIELD_DECL] = true;
82 handled_p[TS_VAR_DECL] = true;
83 handled_p[TS_PARM_DECL] = true;
84 handled_p[TS_LABEL_DECL] = true;
85 handled_p[TS_RESULT_DECL] = true;
86 handled_p[TS_CONST_DECL] = true;
87 handled_p[TS_TYPE_DECL] = true;
88 handled_p[TS_FUNCTION_DECL] = true;
89 handled_p[TS_TYPE_COMMON] = true;
90 handled_p[TS_TYPE_WITH_LANG_SPECIFIC] = true;
91 handled_p[TS_TYPE_NON_COMMON] = true;
92 handled_p[TS_LIST] = true;
93 handled_p[TS_VEC] = true;
94 handled_p[TS_EXP] = true;
95 handled_p[TS_SSA_NAME] = true;
96 handled_p[TS_BLOCK] = true;
97 handled_p[TS_BINFO] = true;
98 handled_p[TS_STATEMENT_LIST] = true;
99 handled_p[TS_CONSTRUCTOR] = true;
100 handled_p[TS_OMP_CLAUSE] = true;
101 handled_p[TS_OPTIMIZATION] = true;
102 handled_p[TS_TARGET_OPTION] = true;
103 handled_p[TS_TRANSLATION_UNIT_DECL] = true;
105 /* Anything not marked above will trigger the following assertion.
106 If this assertion triggers, it means that there is a new TS_*
107 structure that should be handled by the streamer. */
108 for (i = 0; i < LAST_TS_ENUM; i++)
109 gcc_assert (handled_p[i]);
113 /* Helper for streamer_tree_cache_insert_1. Add T to CACHE->NODES at
114 slot IX. */
116 static void
117 streamer_tree_cache_add_to_node_array (struct streamer_tree_cache_d *cache,
118 unsigned ix, tree t, hashval_t hash)
120 /* We're either replacing an old element or appending consecutively. */
121 if (cache->nodes.exists ())
123 if (cache->nodes.length () == ix)
124 cache->nodes.safe_push (t);
125 else
126 cache->nodes[ix] = t;
128 if (cache->hashes.exists ())
130 if (cache->hashes.length () == ix)
131 cache->hashes.safe_push (hash);
132 else
133 cache->hashes[ix] = hash;
138 /* Helper for streamer_tree_cache_insert and streamer_tree_cache_insert_at.
139 CACHE, T, and IX_P are as in streamer_tree_cache_insert.
141 If INSERT_AT_NEXT_SLOT_P is true, T is inserted at the next available
142 slot in the cache. Otherwise, T is inserted at the position indicated
143 in *IX_P.
145 If T already existed in CACHE, return true. Otherwise,
146 return false. */
148 static bool
149 streamer_tree_cache_insert_1 (struct streamer_tree_cache_d *cache,
150 tree t, hashval_t hash, unsigned *ix_p,
151 bool insert_at_next_slot_p)
153 bool existed_p;
155 gcc_assert (t);
157 unsigned int &ix = cache->node_map->get_or_insert (t, &existed_p);
158 if (!existed_p)
160 /* Determine the next slot to use in the cache. */
161 if (insert_at_next_slot_p)
162 ix = cache->next_idx++;
163 else
164 ix = *ix_p;
166 streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
168 else
170 if (!insert_at_next_slot_p && ix != *ix_p)
172 /* If the caller wants to insert T at a specific slot
173 location, and ENTRY->TO does not match *IX_P, add T to
174 the requested location slot. */
175 ix = *ix_p;
176 streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
180 if (ix_p)
181 *ix_p = ix;
183 return existed_p;
187 /* Insert tree node T in CACHE. If T already existed in the cache
188 return true. Otherwise, return false.
190 If IX_P is non-null, update it with the index into the cache where
191 T has been stored. */
193 bool
194 streamer_tree_cache_insert (struct streamer_tree_cache_d *cache, tree t,
195 hashval_t hash, unsigned *ix_p)
197 return streamer_tree_cache_insert_1 (cache, t, hash, ix_p, true);
201 /* Replace the tree node with T in CACHE at slot IX. */
203 void
204 streamer_tree_cache_replace_tree (struct streamer_tree_cache_d *cache,
205 tree t, unsigned ix)
207 hashval_t hash = 0;
208 if (cache->hashes.exists ())
209 hash = streamer_tree_cache_get_hash (cache, ix);
210 if (!cache->node_map)
211 streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
212 else
213 streamer_tree_cache_insert_1 (cache, t, hash, &ix, false);
217 /* Appends tree node T to CACHE, even if T already existed in it. */
219 void
220 streamer_tree_cache_append (struct streamer_tree_cache_d *cache,
221 tree t, hashval_t hash)
223 unsigned ix = cache->next_idx++;
224 if (!cache->node_map)
225 streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
226 else
227 streamer_tree_cache_insert_1 (cache, t, hash, &ix, false);
230 /* Return true if tree node T exists in CACHE, otherwise false. If IX_P is
231 not NULL, write to *IX_P the index into the cache where T is stored
232 ((unsigned)-1 if T is not found). */
234 bool
235 streamer_tree_cache_lookup (struct streamer_tree_cache_d *cache, tree t,
236 unsigned *ix_p)
238 unsigned *slot;
239 bool retval;
240 unsigned ix;
242 gcc_assert (t);
244 slot = cache->node_map->get (t);
245 if (slot == NULL)
247 retval = false;
248 ix = -1;
250 else
252 retval = true;
253 ix = *slot;
256 if (ix_p)
257 *ix_p = ix;
259 return retval;
263 /* Record NODE in CACHE. */
265 static void
266 record_common_node (struct streamer_tree_cache_d *cache, tree node)
268 /* If we recursively end up at nodes we do not want to preload simply don't.
269 ??? We'd want to verify that this doesn't happen, or alternatively
270 do not recurse at all. */
271 if (node == char_type_node)
272 return;
274 gcc_checking_assert (node != boolean_type_node
275 && node != boolean_true_node
276 && node != boolean_false_node);
278 /* We have to make sure to fill exactly the same number of
279 elements for all frontends. That can include NULL trees.
280 As our hash table can't deal with zero entries we'll simply stream
281 a random other tree. A NULL tree never will be looked up so it
282 doesn't matter which tree we replace it with, just to be sure
283 use error_mark_node. */
284 if (!node)
285 node = error_mark_node;
287 /* ??? FIXME, devise a better hash value. But the hash needs to be equal
288 for all frontend and lto1 invocations. So just use the position
289 in the cache as hash value. */
290 streamer_tree_cache_append (cache, node, cache->nodes.length ());
292 if (POINTER_TYPE_P (node)
293 || TREE_CODE (node) == COMPLEX_TYPE
294 || TREE_CODE (node) == ARRAY_TYPE)
295 record_common_node (cache, TREE_TYPE (node));
296 else if (TREE_CODE (node) == RECORD_TYPE)
298 /* The FIELD_DECLs of structures should be shared, so that every
299 COMPONENT_REF uses the same tree node when referencing a field.
300 Pointer equality between FIELD_DECLs is used by the alias
301 machinery to compute overlapping component references (see
302 nonoverlapping_component_refs_p and
303 nonoverlapping_component_refs_of_decl_p). */
304 for (tree f = TYPE_FIELDS (node); f; f = TREE_CHAIN (f))
305 record_common_node (cache, f);
310 /* Preload common nodes into CACHE and make sure they are merged
311 properly according to the gimple type table. */
313 static void
314 preload_common_nodes (struct streamer_tree_cache_d *cache)
316 unsigned i;
318 for (i = 0; i < itk_none; i++)
319 /* Skip itk_char. char_type_node is dependent on -f[un]signed-char. */
320 if (i != itk_char)
321 record_common_node (cache, integer_types[i]);
323 for (i = 0; i < stk_type_kind_last; i++)
324 record_common_node (cache, sizetype_tab[i]);
326 for (i = 0; i < TI_MAX; i++)
327 /* Skip boolean type and constants, they are frontend dependent. */
328 if (i != TI_BOOLEAN_TYPE
329 && i != TI_BOOLEAN_FALSE
330 && i != TI_BOOLEAN_TRUE
331 /* MAIN_IDENTIFIER is not always initialized by Fortran FE. */
332 && i != TI_MAIN_IDENTIFIER
333 /* PID_TYPE is initialized only by C family front-ends. */
334 && i != TI_PID_TYPE
335 /* Skip optimization and target option nodes; they depend on flags. */
336 && i != TI_OPTIMIZATION_DEFAULT
337 && i != TI_OPTIMIZATION_CURRENT
338 && i != TI_TARGET_OPTION_DEFAULT
339 && i != TI_TARGET_OPTION_CURRENT
340 && i != TI_CURRENT_TARGET_PRAGMA
341 && i != TI_CURRENT_OPTIMIZE_PRAGMA
342 /* Skip va_list* related nodes if offloading. For native LTO
343 we want them to be merged for the stdarg pass, for offloading
344 they might not be identical between host and offloading target. */
345 && (!lto_stream_offload_p
346 || (i != TI_VA_LIST_TYPE
347 && i != TI_VA_LIST_GPR_COUNTER_FIELD
348 && i != TI_VA_LIST_FPR_COUNTER_FIELD)))
349 record_common_node (cache, global_trees[i]);
353 /* Create a cache of pickled nodes. */
355 struct streamer_tree_cache_d *
356 streamer_tree_cache_create (bool with_hashes, bool with_map, bool with_vec)
358 struct streamer_tree_cache_d *cache;
360 cache = XCNEW (struct streamer_tree_cache_d);
362 if (with_map)
363 cache->node_map = new hash_map<tree, unsigned> (251);
364 cache->next_idx = 0;
365 if (with_vec)
366 cache->nodes.create (165);
367 if (with_hashes)
368 cache->hashes.create (165);
370 /* Load all the well-known tree nodes that are always created by
371 the compiler on startup. This prevents writing them out
372 unnecessarily. */
373 preload_common_nodes (cache);
375 return cache;
379 /* Delete the streamer cache C. */
381 void
382 streamer_tree_cache_delete (struct streamer_tree_cache_d *c)
384 if (c == NULL)
385 return;
387 delete c->node_map;
388 c->node_map = NULL;
389 c->nodes.release ();
390 c->hashes.release ();
391 free (c);