gcc/
[official-gcc.git] / gcc / tree-streamer.c
blobad1510df0e82352478d484d4771171160a31a438
1 /* Miscellaneous utilities for tree streaming. Things that are used
2 in both input and output are here.
4 Copyright (C) 2011-2015 Free Software Foundation, Inc.
5 Contributed by Diego Novillo <dnovillo@google.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "alias.h"
27 #include "symtab.h"
28 #include "options.h"
29 #include "tree.h"
30 #include "fold-const.h"
31 #include "predict.h"
32 #include "tm.h"
33 #include "hard-reg-set.h"
34 #include "function.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-expr.h"
39 #include "gimple.h"
40 #include "streamer-hooks.h"
41 #include "plugin-api.h"
42 #include "ipa-ref.h"
43 #include "cgraph.h"
44 #include "tree-streamer.h"
46 /* Table indexed by machine_mode, used for 2 different purposes.
47 During streaming out we record there non-zero value for all modes
48 that were streamed out.
49 During streaming in, we translate the on the disk mode using this
50 table. For normal LTO it is set to identity, for ACCEL_COMPILER
51 depending on the mode_table content. */
52 unsigned char streamer_mode_table[1 << 8];
54 /* Check that all the TS_* structures handled by the streamer_write_* and
55 streamer_read_* routines are exactly ALL the structures defined in
56 treestruct.def. */
58 void
59 streamer_check_handled_ts_structures (void)
61 bool handled_p[LAST_TS_ENUM];
62 unsigned i;
64 memset (&handled_p, 0, sizeof (handled_p));
66 /* These are the TS_* structures that are either handled or
67 explicitly ignored by the streamer routines. */
68 handled_p[TS_BASE] = true;
69 handled_p[TS_TYPED] = true;
70 handled_p[TS_COMMON] = true;
71 handled_p[TS_INT_CST] = true;
72 handled_p[TS_REAL_CST] = true;
73 handled_p[TS_FIXED_CST] = true;
74 handled_p[TS_VECTOR] = true;
75 handled_p[TS_STRING] = true;
76 handled_p[TS_COMPLEX] = true;
77 handled_p[TS_IDENTIFIER] = true;
78 handled_p[TS_DECL_MINIMAL] = true;
79 handled_p[TS_DECL_COMMON] = true;
80 handled_p[TS_DECL_WRTL] = true;
81 handled_p[TS_DECL_NON_COMMON] = true;
82 handled_p[TS_DECL_WITH_VIS] = true;
83 handled_p[TS_FIELD_DECL] = true;
84 handled_p[TS_VAR_DECL] = true;
85 handled_p[TS_PARM_DECL] = true;
86 handled_p[TS_LABEL_DECL] = true;
87 handled_p[TS_RESULT_DECL] = true;
88 handled_p[TS_CONST_DECL] = true;
89 handled_p[TS_TYPE_DECL] = true;
90 handled_p[TS_FUNCTION_DECL] = true;
91 handled_p[TS_TYPE_COMMON] = true;
92 handled_p[TS_TYPE_WITH_LANG_SPECIFIC] = true;
93 handled_p[TS_TYPE_NON_COMMON] = true;
94 handled_p[TS_LIST] = true;
95 handled_p[TS_VEC] = true;
96 handled_p[TS_EXP] = true;
97 handled_p[TS_SSA_NAME] = true;
98 handled_p[TS_BLOCK] = true;
99 handled_p[TS_BINFO] = true;
100 handled_p[TS_STATEMENT_LIST] = true;
101 handled_p[TS_CONSTRUCTOR] = true;
102 handled_p[TS_OMP_CLAUSE] = true;
103 handled_p[TS_OPTIMIZATION] = true;
104 handled_p[TS_TARGET_OPTION] = true;
105 handled_p[TS_TRANSLATION_UNIT_DECL] = true;
107 /* Anything not marked above will trigger the following assertion.
108 If this assertion triggers, it means that there is a new TS_*
109 structure that should be handled by the streamer. */
110 for (i = 0; i < LAST_TS_ENUM; i++)
111 gcc_assert (handled_p[i]);
115 /* Helper for streamer_tree_cache_insert_1. Add T to CACHE->NODES at
116 slot IX. */
118 static void
119 streamer_tree_cache_add_to_node_array (struct streamer_tree_cache_d *cache,
120 unsigned ix, tree t, hashval_t hash)
122 /* We're either replacing an old element or appending consecutively. */
123 if (cache->nodes.exists ())
125 if (cache->nodes.length () == ix)
126 cache->nodes.safe_push (t);
127 else
128 cache->nodes[ix] = t;
130 if (cache->hashes.exists ())
132 if (cache->hashes.length () == ix)
133 cache->hashes.safe_push (hash);
134 else
135 cache->hashes[ix] = hash;
140 /* Helper for streamer_tree_cache_insert and streamer_tree_cache_insert_at.
141 CACHE, T, and IX_P are as in streamer_tree_cache_insert.
143 If INSERT_AT_NEXT_SLOT_P is true, T is inserted at the next available
144 slot in the cache. Otherwise, T is inserted at the position indicated
145 in *IX_P.
147 If T already existed in CACHE, return true. Otherwise,
148 return false. */
150 static bool
151 streamer_tree_cache_insert_1 (struct streamer_tree_cache_d *cache,
152 tree t, hashval_t hash, unsigned *ix_p,
153 bool insert_at_next_slot_p)
155 bool existed_p;
157 gcc_assert (t);
159 unsigned int &ix = cache->node_map->get_or_insert (t, &existed_p);
160 if (!existed_p)
162 /* Determine the next slot to use in the cache. */
163 if (insert_at_next_slot_p)
164 ix = cache->next_idx++;
165 else
166 ix = *ix_p;
168 streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
170 else
172 if (!insert_at_next_slot_p && ix != *ix_p)
174 /* If the caller wants to insert T at a specific slot
175 location, and ENTRY->TO does not match *IX_P, add T to
176 the requested location slot. */
177 ix = *ix_p;
178 streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
182 if (ix_p)
183 *ix_p = ix;
185 return existed_p;
189 /* Insert tree node T in CACHE. If T already existed in the cache
190 return true. Otherwise, return false.
192 If IX_P is non-null, update it with the index into the cache where
193 T has been stored. */
195 bool
196 streamer_tree_cache_insert (struct streamer_tree_cache_d *cache, tree t,
197 hashval_t hash, unsigned *ix_p)
199 return streamer_tree_cache_insert_1 (cache, t, hash, ix_p, true);
203 /* Replace the tree node with T in CACHE at slot IX. */
205 void
206 streamer_tree_cache_replace_tree (struct streamer_tree_cache_d *cache,
207 tree t, unsigned ix)
209 hashval_t hash = 0;
210 if (cache->hashes.exists ())
211 hash = streamer_tree_cache_get_hash (cache, ix);
212 if (!cache->node_map)
213 streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
214 else
215 streamer_tree_cache_insert_1 (cache, t, hash, &ix, false);
219 /* Appends tree node T to CACHE, even if T already existed in it. */
221 void
222 streamer_tree_cache_append (struct streamer_tree_cache_d *cache,
223 tree t, hashval_t hash)
225 unsigned ix = cache->next_idx++;
226 if (!cache->node_map)
227 streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
228 else
229 streamer_tree_cache_insert_1 (cache, t, hash, &ix, false);
232 /* Return true if tree node T exists in CACHE, otherwise false. If IX_P is
233 not NULL, write to *IX_P the index into the cache where T is stored
234 ((unsigned)-1 if T is not found). */
236 bool
237 streamer_tree_cache_lookup (struct streamer_tree_cache_d *cache, tree t,
238 unsigned *ix_p)
240 unsigned *slot;
241 bool retval;
242 unsigned ix;
244 gcc_assert (t);
246 slot = cache->node_map->get (t);
247 if (slot == NULL)
249 retval = false;
250 ix = -1;
252 else
254 retval = true;
255 ix = *slot;
258 if (ix_p)
259 *ix_p = ix;
261 return retval;
265 /* Record NODE in CACHE. */
267 static void
268 record_common_node (struct streamer_tree_cache_d *cache, tree node)
270 /* If we recursively end up at nodes we do not want to preload simply don't.
271 ??? We'd want to verify that this doesn't happen, or alternatively
272 do not recurse at all. */
273 if (node == char_type_node)
274 return;
276 gcc_checking_assert (node != boolean_type_node
277 && node != boolean_true_node
278 && node != boolean_false_node);
280 /* We have to make sure to fill exactly the same number of
281 elements for all frontends. That can include NULL trees.
282 As our hash table can't deal with zero entries we'll simply stream
283 a random other tree. A NULL tree never will be looked up so it
284 doesn't matter which tree we replace it with, just to be sure
285 use error_mark_node. */
286 if (!node)
287 node = error_mark_node;
289 /* ??? FIXME, devise a better hash value. But the hash needs to be equal
290 for all frontend and lto1 invocations. So just use the position
291 in the cache as hash value. */
292 streamer_tree_cache_append (cache, node, cache->nodes.length ());
294 if (POINTER_TYPE_P (node)
295 || TREE_CODE (node) == COMPLEX_TYPE
296 || TREE_CODE (node) == ARRAY_TYPE)
297 record_common_node (cache, TREE_TYPE (node));
298 else if (TREE_CODE (node) == RECORD_TYPE)
300 /* The FIELD_DECLs of structures should be shared, so that every
301 COMPONENT_REF uses the same tree node when referencing a field.
302 Pointer equality between FIELD_DECLs is used by the alias
303 machinery to compute overlapping component references (see
304 nonoverlapping_component_refs_p and
305 nonoverlapping_component_refs_of_decl_p). */
306 for (tree f = TYPE_FIELDS (node); f; f = TREE_CHAIN (f))
307 record_common_node (cache, f);
312 /* Preload common nodes into CACHE and make sure they are merged
313 properly according to the gimple type table. */
315 static void
316 preload_common_nodes (struct streamer_tree_cache_d *cache)
318 unsigned i;
320 for (i = 0; i < itk_none; i++)
321 /* Skip itk_char. char_type_node is dependent on -f[un]signed-char. */
322 if (i != itk_char)
323 record_common_node (cache, integer_types[i]);
325 for (i = 0; i < stk_type_kind_last; i++)
326 record_common_node (cache, sizetype_tab[i]);
328 for (i = 0; i < TI_MAX; i++)
329 /* Skip boolean type and constants, they are frontend dependent. */
330 if (i != TI_BOOLEAN_TYPE
331 && i != TI_BOOLEAN_FALSE
332 && i != TI_BOOLEAN_TRUE
333 /* MAIN_IDENTIFIER is not always initialized by Fortran FE. */
334 && i != TI_MAIN_IDENTIFIER
335 /* PID_TYPE is initialized only by C family front-ends. */
336 && i != TI_PID_TYPE
337 /* Skip optimization and target option nodes; they depend on flags. */
338 && i != TI_OPTIMIZATION_DEFAULT
339 && i != TI_OPTIMIZATION_CURRENT
340 && i != TI_TARGET_OPTION_DEFAULT
341 && i != TI_TARGET_OPTION_CURRENT
342 && i != TI_CURRENT_TARGET_PRAGMA
343 && i != TI_CURRENT_OPTIMIZE_PRAGMA
344 /* Skip va_list* related nodes if offloading. For native LTO
345 we want them to be merged for the stdarg pass, for offloading
346 they might not be identical between host and offloading target. */
347 && (!lto_stream_offload_p
348 || (i != TI_VA_LIST_TYPE
349 && i != TI_VA_LIST_GPR_COUNTER_FIELD
350 && i != TI_VA_LIST_FPR_COUNTER_FIELD)))
351 record_common_node (cache, global_trees[i]);
355 /* Create a cache of pickled nodes. */
357 struct streamer_tree_cache_d *
358 streamer_tree_cache_create (bool with_hashes, bool with_map, bool with_vec)
360 struct streamer_tree_cache_d *cache;
362 cache = XCNEW (struct streamer_tree_cache_d);
364 if (with_map)
365 cache->node_map = new hash_map<tree, unsigned> (251);
366 cache->next_idx = 0;
367 if (with_vec)
368 cache->nodes.create (165);
369 if (with_hashes)
370 cache->hashes.create (165);
372 /* Load all the well-known tree nodes that are always created by
373 the compiler on startup. This prevents writing them out
374 unnecessarily. */
375 preload_common_nodes (cache);
377 return cache;
381 /* Delete the streamer cache C. */
383 void
384 streamer_tree_cache_delete (struct streamer_tree_cache_d *c)
386 if (c == NULL)
387 return;
389 delete c->node_map;
390 c->node_map = NULL;
391 c->nodes.release ();
392 c->hashes.release ();
393 free (c);