[obvious] Fix typos above expand_cond_expr_using_cmove
[official-gcc.git] / gcc / tree-streamer.c
blobc66e1743106d72e918b0025c8fbbea00d8401ecc
1 /* Miscellaneous utilities for tree streaming. Things that are used
2 in both input and output are here.
4 Copyright (C) 2011-2015 Free Software Foundation, Inc.
5 Contributed by Diego Novillo <dnovillo@google.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "alias.h"
27 #include "backend.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "hard-reg-set.h"
31 #include "options.h"
32 #include "fold-const.h"
33 #include "internal-fn.h"
34 #include "streamer-hooks.h"
35 #include "cgraph.h"
36 #include "tree-streamer.h"
38 /* Table indexed by machine_mode, used for 2 different purposes.
39 During streaming out we record there non-zero value for all modes
40 that were streamed out.
41 During streaming in, we translate the on the disk mode using this
42 table. For normal LTO it is set to identity, for ACCEL_COMPILER
43 depending on the mode_table content. */
44 unsigned char streamer_mode_table[1 << 8];
46 /* Check that all the TS_* structures handled by the streamer_write_* and
47 streamer_read_* routines are exactly ALL the structures defined in
48 treestruct.def. */
50 void
51 streamer_check_handled_ts_structures (void)
53 bool handled_p[LAST_TS_ENUM];
54 unsigned i;
56 memset (&handled_p, 0, sizeof (handled_p));
58 /* These are the TS_* structures that are either handled or
59 explicitly ignored by the streamer routines. */
60 handled_p[TS_BASE] = true;
61 handled_p[TS_TYPED] = true;
62 handled_p[TS_COMMON] = true;
63 handled_p[TS_INT_CST] = true;
64 handled_p[TS_REAL_CST] = true;
65 handled_p[TS_FIXED_CST] = true;
66 handled_p[TS_VECTOR] = true;
67 handled_p[TS_STRING] = true;
68 handled_p[TS_COMPLEX] = true;
69 handled_p[TS_IDENTIFIER] = true;
70 handled_p[TS_DECL_MINIMAL] = true;
71 handled_p[TS_DECL_COMMON] = true;
72 handled_p[TS_DECL_WRTL] = true;
73 handled_p[TS_DECL_NON_COMMON] = true;
74 handled_p[TS_DECL_WITH_VIS] = true;
75 handled_p[TS_FIELD_DECL] = true;
76 handled_p[TS_VAR_DECL] = true;
77 handled_p[TS_PARM_DECL] = true;
78 handled_p[TS_LABEL_DECL] = true;
79 handled_p[TS_RESULT_DECL] = true;
80 handled_p[TS_CONST_DECL] = true;
81 handled_p[TS_TYPE_DECL] = true;
82 handled_p[TS_FUNCTION_DECL] = true;
83 handled_p[TS_TYPE_COMMON] = true;
84 handled_p[TS_TYPE_WITH_LANG_SPECIFIC] = true;
85 handled_p[TS_TYPE_NON_COMMON] = true;
86 handled_p[TS_LIST] = true;
87 handled_p[TS_VEC] = true;
88 handled_p[TS_EXP] = true;
89 handled_p[TS_SSA_NAME] = true;
90 handled_p[TS_BLOCK] = true;
91 handled_p[TS_BINFO] = true;
92 handled_p[TS_STATEMENT_LIST] = true;
93 handled_p[TS_CONSTRUCTOR] = true;
94 handled_p[TS_OMP_CLAUSE] = true;
95 handled_p[TS_OPTIMIZATION] = true;
96 handled_p[TS_TARGET_OPTION] = true;
97 handled_p[TS_TRANSLATION_UNIT_DECL] = true;
99 /* Anything not marked above will trigger the following assertion.
100 If this assertion triggers, it means that there is a new TS_*
101 structure that should be handled by the streamer. */
102 for (i = 0; i < LAST_TS_ENUM; i++)
103 gcc_assert (handled_p[i]);
107 /* Helper for streamer_tree_cache_insert_1. Add T to CACHE->NODES at
108 slot IX. */
110 static void
111 streamer_tree_cache_add_to_node_array (struct streamer_tree_cache_d *cache,
112 unsigned ix, tree t, hashval_t hash)
114 /* We're either replacing an old element or appending consecutively. */
115 if (cache->nodes.exists ())
117 if (cache->nodes.length () == ix)
118 cache->nodes.safe_push (t);
119 else
120 cache->nodes[ix] = t;
122 if (cache->hashes.exists ())
124 if (cache->hashes.length () == ix)
125 cache->hashes.safe_push (hash);
126 else
127 cache->hashes[ix] = hash;
132 /* Helper for streamer_tree_cache_insert and streamer_tree_cache_insert_at.
133 CACHE, T, and IX_P are as in streamer_tree_cache_insert.
135 If INSERT_AT_NEXT_SLOT_P is true, T is inserted at the next available
136 slot in the cache. Otherwise, T is inserted at the position indicated
137 in *IX_P.
139 If T already existed in CACHE, return true. Otherwise,
140 return false. */
142 static bool
143 streamer_tree_cache_insert_1 (struct streamer_tree_cache_d *cache,
144 tree t, hashval_t hash, unsigned *ix_p,
145 bool insert_at_next_slot_p)
147 bool existed_p;
149 gcc_assert (t);
151 unsigned int &ix = cache->node_map->get_or_insert (t, &existed_p);
152 if (!existed_p)
154 /* Determine the next slot to use in the cache. */
155 if (insert_at_next_slot_p)
156 ix = cache->next_idx++;
157 else
158 ix = *ix_p;
160 streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
162 else
164 if (!insert_at_next_slot_p && ix != *ix_p)
166 /* If the caller wants to insert T at a specific slot
167 location, and ENTRY->TO does not match *IX_P, add T to
168 the requested location slot. */
169 ix = *ix_p;
170 streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
174 if (ix_p)
175 *ix_p = ix;
177 return existed_p;
181 /* Insert tree node T in CACHE. If T already existed in the cache
182 return true. Otherwise, return false.
184 If IX_P is non-null, update it with the index into the cache where
185 T has been stored. */
187 bool
188 streamer_tree_cache_insert (struct streamer_tree_cache_d *cache, tree t,
189 hashval_t hash, unsigned *ix_p)
191 return streamer_tree_cache_insert_1 (cache, t, hash, ix_p, true);
195 /* Replace the tree node with T in CACHE at slot IX. */
197 void
198 streamer_tree_cache_replace_tree (struct streamer_tree_cache_d *cache,
199 tree t, unsigned ix)
201 hashval_t hash = 0;
202 if (cache->hashes.exists ())
203 hash = streamer_tree_cache_get_hash (cache, ix);
204 if (!cache->node_map)
205 streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
206 else
207 streamer_tree_cache_insert_1 (cache, t, hash, &ix, false);
211 /* Appends tree node T to CACHE, even if T already existed in it. */
213 void
214 streamer_tree_cache_append (struct streamer_tree_cache_d *cache,
215 tree t, hashval_t hash)
217 unsigned ix = cache->next_idx++;
218 if (!cache->node_map)
219 streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
220 else
221 streamer_tree_cache_insert_1 (cache, t, hash, &ix, false);
224 /* Return true if tree node T exists in CACHE, otherwise false. If IX_P is
225 not NULL, write to *IX_P the index into the cache where T is stored
226 ((unsigned)-1 if T is not found). */
228 bool
229 streamer_tree_cache_lookup (struct streamer_tree_cache_d *cache, tree t,
230 unsigned *ix_p)
232 unsigned *slot;
233 bool retval;
234 unsigned ix;
236 gcc_assert (t);
238 slot = cache->node_map->get (t);
239 if (slot == NULL)
241 retval = false;
242 ix = -1;
244 else
246 retval = true;
247 ix = *slot;
250 if (ix_p)
251 *ix_p = ix;
253 return retval;
257 /* Record NODE in CACHE. */
259 static void
260 record_common_node (struct streamer_tree_cache_d *cache, tree node)
262 /* If we recursively end up at nodes we do not want to preload simply don't.
263 ??? We'd want to verify that this doesn't happen, or alternatively
264 do not recurse at all. */
265 if (node == char_type_node)
266 return;
268 gcc_checking_assert (node != boolean_type_node
269 && node != boolean_true_node
270 && node != boolean_false_node);
272 /* We have to make sure to fill exactly the same number of
273 elements for all frontends. That can include NULL trees.
274 As our hash table can't deal with zero entries we'll simply stream
275 a random other tree. A NULL tree never will be looked up so it
276 doesn't matter which tree we replace it with, just to be sure
277 use error_mark_node. */
278 if (!node)
279 node = error_mark_node;
281 /* ??? FIXME, devise a better hash value. But the hash needs to be equal
282 for all frontend and lto1 invocations. So just use the position
283 in the cache as hash value. */
284 streamer_tree_cache_append (cache, node, cache->nodes.length ());
286 if (POINTER_TYPE_P (node)
287 || TREE_CODE (node) == COMPLEX_TYPE
288 || TREE_CODE (node) == ARRAY_TYPE)
289 record_common_node (cache, TREE_TYPE (node));
290 else if (TREE_CODE (node) == RECORD_TYPE)
292 /* The FIELD_DECLs of structures should be shared, so that every
293 COMPONENT_REF uses the same tree node when referencing a field.
294 Pointer equality between FIELD_DECLs is used by the alias
295 machinery to compute overlapping component references (see
296 nonoverlapping_component_refs_p and
297 nonoverlapping_component_refs_of_decl_p). */
298 for (tree f = TYPE_FIELDS (node); f; f = TREE_CHAIN (f))
299 record_common_node (cache, f);
304 /* Preload common nodes into CACHE and make sure they are merged
305 properly according to the gimple type table. */
307 static void
308 preload_common_nodes (struct streamer_tree_cache_d *cache)
310 unsigned i;
312 for (i = 0; i < itk_none; i++)
313 /* Skip itk_char. char_type_node is dependent on -f[un]signed-char. */
314 if (i != itk_char)
315 record_common_node (cache, integer_types[i]);
317 for (i = 0; i < stk_type_kind_last; i++)
318 record_common_node (cache, sizetype_tab[i]);
320 for (i = 0; i < TI_MAX; i++)
321 /* Skip boolean type and constants, they are frontend dependent. */
322 if (i != TI_BOOLEAN_TYPE
323 && i != TI_BOOLEAN_FALSE
324 && i != TI_BOOLEAN_TRUE
325 /* MAIN_IDENTIFIER is not always initialized by Fortran FE. */
326 && i != TI_MAIN_IDENTIFIER
327 /* PID_TYPE is initialized only by C family front-ends. */
328 && i != TI_PID_TYPE
329 /* Skip optimization and target option nodes; they depend on flags. */
330 && i != TI_OPTIMIZATION_DEFAULT
331 && i != TI_OPTIMIZATION_CURRENT
332 && i != TI_TARGET_OPTION_DEFAULT
333 && i != TI_TARGET_OPTION_CURRENT
334 && i != TI_CURRENT_TARGET_PRAGMA
335 && i != TI_CURRENT_OPTIMIZE_PRAGMA
336 /* Skip va_list* related nodes if offloading. For native LTO
337 we want them to be merged for the stdarg pass, for offloading
338 they might not be identical between host and offloading target. */
339 && (!lto_stream_offload_p
340 || (i != TI_VA_LIST_TYPE
341 && i != TI_VA_LIST_GPR_COUNTER_FIELD
342 && i != TI_VA_LIST_FPR_COUNTER_FIELD)))
343 record_common_node (cache, global_trees[i]);
347 /* Create a cache of pickled nodes. */
349 struct streamer_tree_cache_d *
350 streamer_tree_cache_create (bool with_hashes, bool with_map, bool with_vec)
352 struct streamer_tree_cache_d *cache;
354 cache = XCNEW (struct streamer_tree_cache_d);
356 if (with_map)
357 cache->node_map = new hash_map<tree, unsigned> (251);
358 cache->next_idx = 0;
359 if (with_vec)
360 cache->nodes.create (165);
361 if (with_hashes)
362 cache->hashes.create (165);
364 /* Load all the well-known tree nodes that are always created by
365 the compiler on startup. This prevents writing them out
366 unnecessarily. */
367 preload_common_nodes (cache);
369 return cache;
373 /* Delete the streamer cache C. */
375 void
376 streamer_tree_cache_delete (struct streamer_tree_cache_d *c)
378 if (c == NULL)
379 return;
381 delete c->node_map;
382 c->node_map = NULL;
383 c->nodes.release ();
384 c->hashes.release ();
385 free (c);