Add C++11 header <cuchar>.
[official-gcc.git] / gcc / tree-streamer.c
blobfe9a8c938c47fb8e8ebff324214e5068b8aa23e5
1 /* Miscellaneous utilities for tree streaming. Things that are used
2 in both input and output are here.
4 Copyright (C) 2011-2015 Free Software Foundation, Inc.
5 Contributed by Diego Novillo <dnovillo@google.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "alias.h"
27 #include "backend.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "hard-reg-set.h"
31 #include "options.h"
32 #include "fold-const.h"
33 #include "internal-fn.h"
34 #include "tree-streamer.h"
35 #include "cgraph.h"
37 /* Table indexed by machine_mode, used for 2 different purposes.
38 During streaming out we record there non-zero value for all modes
39 that were streamed out.
40 During streaming in, we translate the on the disk mode using this
41 table. For normal LTO it is set to identity, for ACCEL_COMPILER
42 depending on the mode_table content. */
43 unsigned char streamer_mode_table[1 << 8];
45 /* Check that all the TS_* structures handled by the streamer_write_* and
46 streamer_read_* routines are exactly ALL the structures defined in
47 treestruct.def. */
49 void
50 streamer_check_handled_ts_structures (void)
52 bool handled_p[LAST_TS_ENUM];
53 unsigned i;
55 memset (&handled_p, 0, sizeof (handled_p));
57 /* These are the TS_* structures that are either handled or
58 explicitly ignored by the streamer routines. */
59 handled_p[TS_BASE] = true;
60 handled_p[TS_TYPED] = true;
61 handled_p[TS_COMMON] = true;
62 handled_p[TS_INT_CST] = true;
63 handled_p[TS_REAL_CST] = true;
64 handled_p[TS_FIXED_CST] = true;
65 handled_p[TS_VECTOR] = true;
66 handled_p[TS_STRING] = true;
67 handled_p[TS_COMPLEX] = true;
68 handled_p[TS_IDENTIFIER] = true;
69 handled_p[TS_DECL_MINIMAL] = true;
70 handled_p[TS_DECL_COMMON] = true;
71 handled_p[TS_DECL_WRTL] = true;
72 handled_p[TS_DECL_NON_COMMON] = true;
73 handled_p[TS_DECL_WITH_VIS] = true;
74 handled_p[TS_FIELD_DECL] = true;
75 handled_p[TS_VAR_DECL] = true;
76 handled_p[TS_PARM_DECL] = true;
77 handled_p[TS_LABEL_DECL] = true;
78 handled_p[TS_RESULT_DECL] = true;
79 handled_p[TS_CONST_DECL] = true;
80 handled_p[TS_TYPE_DECL] = true;
81 handled_p[TS_FUNCTION_DECL] = true;
82 handled_p[TS_TYPE_COMMON] = true;
83 handled_p[TS_TYPE_WITH_LANG_SPECIFIC] = true;
84 handled_p[TS_TYPE_NON_COMMON] = true;
85 handled_p[TS_LIST] = true;
86 handled_p[TS_VEC] = true;
87 handled_p[TS_EXP] = true;
88 handled_p[TS_SSA_NAME] = true;
89 handled_p[TS_BLOCK] = true;
90 handled_p[TS_BINFO] = true;
91 handled_p[TS_STATEMENT_LIST] = true;
92 handled_p[TS_CONSTRUCTOR] = true;
93 handled_p[TS_OMP_CLAUSE] = true;
94 handled_p[TS_OPTIMIZATION] = true;
95 handled_p[TS_TARGET_OPTION] = true;
96 handled_p[TS_TRANSLATION_UNIT_DECL] = true;
98 /* Anything not marked above will trigger the following assertion.
99 If this assertion triggers, it means that there is a new TS_*
100 structure that should be handled by the streamer. */
101 for (i = 0; i < LAST_TS_ENUM; i++)
102 gcc_assert (handled_p[i]);
106 /* Helper for streamer_tree_cache_insert_1. Add T to CACHE->NODES at
107 slot IX. */
109 static void
110 streamer_tree_cache_add_to_node_array (struct streamer_tree_cache_d *cache,
111 unsigned ix, tree t, hashval_t hash)
113 /* We're either replacing an old element or appending consecutively. */
114 if (cache->nodes.exists ())
116 if (cache->nodes.length () == ix)
117 cache->nodes.safe_push (t);
118 else
119 cache->nodes[ix] = t;
121 if (cache->hashes.exists ())
123 if (cache->hashes.length () == ix)
124 cache->hashes.safe_push (hash);
125 else
126 cache->hashes[ix] = hash;
131 /* Helper for streamer_tree_cache_insert and streamer_tree_cache_insert_at.
132 CACHE, T, and IX_P are as in streamer_tree_cache_insert.
134 If INSERT_AT_NEXT_SLOT_P is true, T is inserted at the next available
135 slot in the cache. Otherwise, T is inserted at the position indicated
136 in *IX_P.
138 If T already existed in CACHE, return true. Otherwise,
139 return false. */
141 static bool
142 streamer_tree_cache_insert_1 (struct streamer_tree_cache_d *cache,
143 tree t, hashval_t hash, unsigned *ix_p,
144 bool insert_at_next_slot_p)
146 bool existed_p;
148 gcc_assert (t);
150 unsigned int &ix = cache->node_map->get_or_insert (t, &existed_p);
151 if (!existed_p)
153 /* Determine the next slot to use in the cache. */
154 if (insert_at_next_slot_p)
155 ix = cache->next_idx++;
156 else
157 ix = *ix_p;
159 streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
161 else
163 if (!insert_at_next_slot_p && ix != *ix_p)
165 /* If the caller wants to insert T at a specific slot
166 location, and ENTRY->TO does not match *IX_P, add T to
167 the requested location slot. */
168 ix = *ix_p;
169 streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
173 if (ix_p)
174 *ix_p = ix;
176 return existed_p;
180 /* Insert tree node T in CACHE. If T already existed in the cache
181 return true. Otherwise, return false.
183 If IX_P is non-null, update it with the index into the cache where
184 T has been stored. */
186 bool
187 streamer_tree_cache_insert (struct streamer_tree_cache_d *cache, tree t,
188 hashval_t hash, unsigned *ix_p)
190 return streamer_tree_cache_insert_1 (cache, t, hash, ix_p, true);
194 /* Replace the tree node with T in CACHE at slot IX. */
196 void
197 streamer_tree_cache_replace_tree (struct streamer_tree_cache_d *cache,
198 tree t, unsigned ix)
200 hashval_t hash = 0;
201 if (cache->hashes.exists ())
202 hash = streamer_tree_cache_get_hash (cache, ix);
203 if (!cache->node_map)
204 streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
205 else
206 streamer_tree_cache_insert_1 (cache, t, hash, &ix, false);
210 /* Appends tree node T to CACHE, even if T already existed in it. */
212 void
213 streamer_tree_cache_append (struct streamer_tree_cache_d *cache,
214 tree t, hashval_t hash)
216 unsigned ix = cache->next_idx++;
217 if (!cache->node_map)
218 streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
219 else
220 streamer_tree_cache_insert_1 (cache, t, hash, &ix, false);
223 /* Return true if tree node T exists in CACHE, otherwise false. If IX_P is
224 not NULL, write to *IX_P the index into the cache where T is stored
225 ((unsigned)-1 if T is not found). */
227 bool
228 streamer_tree_cache_lookup (struct streamer_tree_cache_d *cache, tree t,
229 unsigned *ix_p)
231 unsigned *slot;
232 bool retval;
233 unsigned ix;
235 gcc_assert (t);
237 slot = cache->node_map->get (t);
238 if (slot == NULL)
240 retval = false;
241 ix = -1;
243 else
245 retval = true;
246 ix = *slot;
249 if (ix_p)
250 *ix_p = ix;
252 return retval;
256 /* Record NODE in CACHE. */
258 static void
259 record_common_node (struct streamer_tree_cache_d *cache, tree node)
261 /* If we recursively end up at nodes we do not want to preload simply don't.
262 ??? We'd want to verify that this doesn't happen, or alternatively
263 do not recurse at all. */
264 if (node == char_type_node)
265 return;
267 gcc_checking_assert (node != boolean_type_node
268 && node != boolean_true_node
269 && node != boolean_false_node);
271 /* We have to make sure to fill exactly the same number of
272 elements for all frontends. That can include NULL trees.
273 As our hash table can't deal with zero entries we'll simply stream
274 a random other tree. A NULL tree never will be looked up so it
275 doesn't matter which tree we replace it with, just to be sure
276 use error_mark_node. */
277 if (!node)
278 node = error_mark_node;
280 /* ??? FIXME, devise a better hash value. But the hash needs to be equal
281 for all frontend and lto1 invocations. So just use the position
282 in the cache as hash value. */
283 streamer_tree_cache_append (cache, node, cache->nodes.length ());
285 if (POINTER_TYPE_P (node)
286 || TREE_CODE (node) == COMPLEX_TYPE
287 || TREE_CODE (node) == ARRAY_TYPE)
288 record_common_node (cache, TREE_TYPE (node));
289 else if (TREE_CODE (node) == RECORD_TYPE)
291 /* The FIELD_DECLs of structures should be shared, so that every
292 COMPONENT_REF uses the same tree node when referencing a field.
293 Pointer equality between FIELD_DECLs is used by the alias
294 machinery to compute overlapping component references (see
295 nonoverlapping_component_refs_p and
296 nonoverlapping_component_refs_of_decl_p). */
297 for (tree f = TYPE_FIELDS (node); f; f = TREE_CHAIN (f))
298 record_common_node (cache, f);
303 /* Preload common nodes into CACHE and make sure they are merged
304 properly according to the gimple type table. */
306 static void
307 preload_common_nodes (struct streamer_tree_cache_d *cache)
309 unsigned i;
311 for (i = 0; i < itk_none; i++)
312 /* Skip itk_char. char_type_node is dependent on -f[un]signed-char. */
313 if (i != itk_char)
314 record_common_node (cache, integer_types[i]);
316 for (i = 0; i < stk_type_kind_last; i++)
317 record_common_node (cache, sizetype_tab[i]);
319 for (i = 0; i < TI_MAX; i++)
320 /* Skip boolean type and constants, they are frontend dependent. */
321 if (i != TI_BOOLEAN_TYPE
322 && i != TI_BOOLEAN_FALSE
323 && i != TI_BOOLEAN_TRUE
324 /* MAIN_IDENTIFIER is not always initialized by Fortran FE. */
325 && i != TI_MAIN_IDENTIFIER
326 /* PID_TYPE is initialized only by C family front-ends. */
327 && i != TI_PID_TYPE
328 /* Skip optimization and target option nodes; they depend on flags. */
329 && i != TI_OPTIMIZATION_DEFAULT
330 && i != TI_OPTIMIZATION_CURRENT
331 && i != TI_TARGET_OPTION_DEFAULT
332 && i != TI_TARGET_OPTION_CURRENT
333 && i != TI_CURRENT_TARGET_PRAGMA
334 && i != TI_CURRENT_OPTIMIZE_PRAGMA
335 /* Skip va_list* related nodes if offloading. For native LTO
336 we want them to be merged for the stdarg pass, for offloading
337 they might not be identical between host and offloading target. */
338 && (!lto_stream_offload_p
339 || (i != TI_VA_LIST_TYPE
340 && i != TI_VA_LIST_GPR_COUNTER_FIELD
341 && i != TI_VA_LIST_FPR_COUNTER_FIELD)))
342 record_common_node (cache, global_trees[i]);
346 /* Create a cache of pickled nodes. */
348 struct streamer_tree_cache_d *
349 streamer_tree_cache_create (bool with_hashes, bool with_map, bool with_vec)
351 struct streamer_tree_cache_d *cache;
353 cache = XCNEW (struct streamer_tree_cache_d);
355 if (with_map)
356 cache->node_map = new hash_map<tree, unsigned> (251);
357 cache->next_idx = 0;
358 if (with_vec)
359 cache->nodes.create (165);
360 if (with_hashes)
361 cache->hashes.create (165);
363 /* Load all the well-known tree nodes that are always created by
364 the compiler on startup. This prevents writing them out
365 unnecessarily. */
366 preload_common_nodes (cache);
368 return cache;
372 /* Delete the streamer cache C. */
374 void
375 streamer_tree_cache_delete (struct streamer_tree_cache_d *c)
377 if (c == NULL)
378 return;
380 delete c->node_map;
381 c->node_map = NULL;
382 c->nodes.release ();
383 c->hashes.release ();
384 free (c);