[gcc]
[official-gcc.git] / gcc / lto-streamer-out.c
blobaa6b5892ea5955cdfcfe34df7d2b1e6a0f5c6905
1 /* Write the GIMPLE representation to a file stream.
3 Copyright (C) 2009-2016 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
45 static void lto_write_tree (struct output_block*, tree, bool);
47 /* Clear the line info stored in DATA_IN. */
49 static void
50 clear_line_info (struct output_block *ob)
52 ob->current_file = NULL;
53 ob->current_line = 0;
54 ob->current_col = 0;
55 ob->current_sysp = false;
59 /* Create the output block and return it. SECTION_TYPE is
60 LTO_section_function_body or LTO_static_initializer. */
62 struct output_block *
63 create_output_block (enum lto_section_type section_type)
65 struct output_block *ob = XCNEW (struct output_block);
67 ob->section_type = section_type;
68 ob->decl_state = lto_get_out_decl_state ();
69 ob->main_stream = XCNEW (struct lto_output_stream);
70 ob->string_stream = XCNEW (struct lto_output_stream);
71 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
73 if (section_type == LTO_section_function_body)
74 ob->cfg_stream = XCNEW (struct lto_output_stream);
76 clear_line_info (ob);
78 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
79 gcc_obstack_init (&ob->obstack);
81 return ob;
85 /* Destroy the output block OB. */
87 void
88 destroy_output_block (struct output_block *ob)
90 enum lto_section_type section_type = ob->section_type;
92 delete ob->string_hash_table;
93 ob->string_hash_table = NULL;
95 free (ob->main_stream);
96 free (ob->string_stream);
97 if (section_type == LTO_section_function_body)
98 free (ob->cfg_stream);
100 streamer_tree_cache_delete (ob->writer_cache);
101 obstack_free (&ob->obstack, NULL);
103 free (ob);
107 /* Look up NODE in the type table and write the index for it to OB. */
109 static void
110 output_type_ref (struct output_block *ob, tree node)
112 streamer_write_record_start (ob, LTO_type_ref);
113 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
117 /* Return true if tree node T is written to various tables. For these
118 nodes, we sometimes want to write their phyiscal representation
119 (via lto_output_tree), and sometimes we need to emit an index
120 reference into a table (via lto_output_tree_ref). */
122 static bool
123 tree_is_indexable (tree t)
125 /* Parameters and return values of functions of variably modified types
126 must go to global stream, because they may be used in the type
127 definition. */
128 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
129 && DECL_CONTEXT (t))
130 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
131 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
132 else if (TREE_CODE (t) == IMPORTED_DECL)
133 return false;
134 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
135 || TREE_CODE (t) == TYPE_DECL
136 || TREE_CODE (t) == CONST_DECL
137 || TREE_CODE (t) == NAMELIST_DECL)
138 && decl_function_context (t))
139 return false;
140 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
141 return false;
142 /* Variably modified types need to be streamed alongside function
143 bodies because they can refer to local entities. Together with
144 them we have to localize their members as well.
145 ??? In theory that includes non-FIELD_DECLs as well. */
146 else if (TYPE_P (t)
147 && variably_modified_type_p (t, NULL_TREE))
148 return false;
149 else if (TREE_CODE (t) == FIELD_DECL
150 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
151 return false;
152 else
153 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
157 /* Output info about new location into bitpack BP.
158 After outputting bitpack, lto_output_location_data has
159 to be done to output actual data. */
161 void
162 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
163 location_t loc)
165 expanded_location xloc;
167 loc = LOCATION_LOCUS (loc);
168 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
169 loc < RESERVED_LOCATION_COUNT
170 ? loc : RESERVED_LOCATION_COUNT);
171 if (loc < RESERVED_LOCATION_COUNT)
172 return;
174 xloc = expand_location (loc);
176 bp_pack_value (bp, ob->current_file != xloc.file, 1);
177 bp_pack_value (bp, ob->current_line != xloc.line, 1);
178 bp_pack_value (bp, ob->current_col != xloc.column, 1);
180 if (ob->current_file != xloc.file)
182 bp_pack_string (ob, bp, xloc.file, true);
183 bp_pack_value (bp, xloc.sysp, 1);
185 ob->current_file = xloc.file;
186 ob->current_sysp = xloc.sysp;
188 if (ob->current_line != xloc.line)
189 bp_pack_var_len_unsigned (bp, xloc.line);
190 ob->current_line = xloc.line;
192 if (ob->current_col != xloc.column)
193 bp_pack_var_len_unsigned (bp, xloc.column);
194 ob->current_col = xloc.column;
198 /* If EXPR is an indexable tree node, output a reference to it to
199 output block OB. Otherwise, output the physical representation of
200 EXPR to OB. */
202 static void
203 lto_output_tree_ref (struct output_block *ob, tree expr)
205 enum tree_code code;
207 if (TYPE_P (expr))
209 output_type_ref (ob, expr);
210 return;
213 code = TREE_CODE (expr);
214 switch (code)
216 case SSA_NAME:
217 streamer_write_record_start (ob, LTO_ssa_name_ref);
218 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
219 break;
221 case FIELD_DECL:
222 streamer_write_record_start (ob, LTO_field_decl_ref);
223 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
224 break;
226 case FUNCTION_DECL:
227 streamer_write_record_start (ob, LTO_function_decl_ref);
228 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
229 break;
231 case VAR_DECL:
232 case DEBUG_EXPR_DECL:
233 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
234 case PARM_DECL:
235 streamer_write_record_start (ob, LTO_global_decl_ref);
236 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
237 break;
239 case CONST_DECL:
240 streamer_write_record_start (ob, LTO_const_decl_ref);
241 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
242 break;
244 case IMPORTED_DECL:
245 gcc_assert (decl_function_context (expr) == NULL);
246 streamer_write_record_start (ob, LTO_imported_decl_ref);
247 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
248 break;
250 case TYPE_DECL:
251 streamer_write_record_start (ob, LTO_type_decl_ref);
252 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
253 break;
255 case NAMELIST_DECL:
256 streamer_write_record_start (ob, LTO_namelist_decl_ref);
257 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
258 break;
260 case NAMESPACE_DECL:
261 streamer_write_record_start (ob, LTO_namespace_decl_ref);
262 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
263 break;
265 case LABEL_DECL:
266 streamer_write_record_start (ob, LTO_label_decl_ref);
267 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
268 break;
270 case RESULT_DECL:
271 streamer_write_record_start (ob, LTO_result_decl_ref);
272 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
273 break;
275 case TRANSLATION_UNIT_DECL:
276 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
277 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
278 break;
280 default:
281 /* No other node is indexable, so it should have been handled by
282 lto_output_tree. */
283 gcc_unreachable ();
288 /* Return true if EXPR is a tree node that can be written to disk. */
290 static inline bool
291 lto_is_streamable (tree expr)
293 enum tree_code code = TREE_CODE (expr);
295 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
296 name version in lto_output_tree_ref (see output_ssa_names). */
297 return !is_lang_specific (expr)
298 && code != SSA_NAME
299 && code != CALL_EXPR
300 && code != LANG_TYPE
301 && code != MODIFY_EXPR
302 && code != INIT_EXPR
303 && code != TARGET_EXPR
304 && code != BIND_EXPR
305 && code != WITH_CLEANUP_EXPR
306 && code != STATEMENT_LIST
307 && (code == CASE_LABEL_EXPR
308 || code == DECL_EXPR
309 || TREE_CODE_CLASS (code) != tcc_statement);
312 /* Very rough estimate of streaming size of the initializer. If we ignored
313 presence of strings, we could simply just count number of non-indexable
314 tree nodes and number of references to indexable nodes. Strings however
315 may be very large and we do not want to dump them int othe global stream.
317 Count the size of initializer until the size in DATA is positive. */
319 static tree
320 subtract_estimated_size (tree *tp, int *ws, void *data)
322 long *sum = (long *)data;
323 if (tree_is_indexable (*tp))
325 /* Indexable tree is one reference to global stream.
326 Guess it may be about 4 bytes. */
327 *sum -= 4;
328 *ws = 0;
330 /* String table entry + base of tree node needs to be streamed. */
331 if (TREE_CODE (*tp) == STRING_CST)
332 *sum -= TREE_STRING_LENGTH (*tp) + 8;
333 else
335 /* Identifiers are also variable length but should not appear
336 naked in constructor. */
337 gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
338 /* We do not really make attempt to work out size of pickled tree, as
339 it is very variable. Make it bigger than the reference. */
340 *sum -= 16;
342 if (*sum < 0)
343 return *tp;
344 return NULL_TREE;
348 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
350 static tree
351 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
353 gcc_checking_assert (DECL_P (expr)
354 && TREE_CODE (expr) != FUNCTION_DECL
355 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
357 /* Handle DECL_INITIAL for symbols. */
358 tree initial = DECL_INITIAL (expr);
359 if (TREE_CODE (expr) == VAR_DECL
360 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
361 && !DECL_IN_CONSTANT_POOL (expr)
362 && initial)
364 varpool_node *vnode;
365 /* Extra section needs about 30 bytes; do not produce it for simple
366 scalar values. */
367 if (!(vnode = varpool_node::get (expr))
368 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
369 initial = error_mark_node;
370 if (initial != error_mark_node)
372 long max_size = 30;
373 if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
374 NULL))
375 initial = error_mark_node;
379 return initial;
383 /* Write a physical representation of tree node EXPR to output block
384 OB. If REF_P is true, the leaves of EXPR are emitted as references
385 via lto_output_tree_ref. IX is the index into the streamer cache
386 where EXPR is stored. */
388 static void
389 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
391 /* Pack all the non-pointer fields in EXPR into a bitpack and write
392 the resulting bitpack. */
393 streamer_write_tree_bitfields (ob, expr);
395 /* Write all the pointer fields in EXPR. */
396 streamer_write_tree_body (ob, expr, ref_p);
398 /* Write any LTO-specific data to OB. */
399 if (DECL_P (expr)
400 && TREE_CODE (expr) != FUNCTION_DECL
401 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
403 /* Handle DECL_INITIAL for symbols. */
404 tree initial = get_symbol_initial_value
405 (ob->decl_state->symtab_node_encoder, expr);
406 stream_write_tree (ob, initial, ref_p);
410 /* Write a physical representation of tree node EXPR to output block
411 OB. If REF_P is true, the leaves of EXPR are emitted as references
412 via lto_output_tree_ref. IX is the index into the streamer cache
413 where EXPR is stored. */
415 static void
416 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
418 if (!lto_is_streamable (expr))
419 internal_error ("tree code %qs is not supported in LTO streams",
420 get_tree_code_name (TREE_CODE (expr)));
422 /* Write the header, containing everything needed to materialize
423 EXPR on the reading side. */
424 streamer_write_tree_header (ob, expr);
426 lto_write_tree_1 (ob, expr, ref_p);
428 /* Mark the end of EXPR. */
429 streamer_write_zero (ob);
432 /* Emit the physical representation of tree node EXPR to output block OB,
433 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
434 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
436 static void
437 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
438 bool ref_p, bool this_ref_p)
440 unsigned ix;
442 gcc_checking_assert (expr != NULL_TREE
443 && !(this_ref_p && tree_is_indexable (expr)));
445 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
446 expr, hash, &ix);
447 gcc_assert (!exists_p);
448 if (TREE_CODE (expr) == INTEGER_CST
449 && !TREE_OVERFLOW (expr))
451 /* Shared INTEGER_CST nodes are special because they need their
452 original type to be materialized by the reader (to implement
453 TYPE_CACHED_VALUES). */
454 streamer_write_integer_cst (ob, expr, ref_p);
456 else
458 /* This is the first time we see EXPR, write its fields
459 to OB. */
460 lto_write_tree (ob, expr, ref_p);
464 class DFS
466 public:
467 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
468 bool single_p);
469 ~DFS ();
471 struct scc_entry
473 tree t;
474 hashval_t hash;
476 vec<scc_entry> sccstack;
478 private:
479 struct sccs
481 unsigned int dfsnum;
482 unsigned int low;
484 struct worklist
486 tree expr;
487 sccs *from_state;
488 sccs *cstate;
489 bool ref_p;
490 bool this_ref_p;
493 static int scc_entry_compare (const void *, const void *);
495 void DFS_write_tree_body (struct output_block *ob,
496 tree expr, sccs *expr_state, bool ref_p);
498 void DFS_write_tree (struct output_block *ob, sccs *from_state,
499 tree expr, bool ref_p, bool this_ref_p);
501 hashval_t
502 hash_scc (struct output_block *ob, unsigned first, unsigned size,
503 bool ref_p, bool this_ref_p);
505 hash_map<tree, sccs *> sccstate;
506 vec<worklist> worklist_vec;
507 struct obstack sccstate_obstack;
510 /* Emit the physical representation of tree node EXPR to output block OB,
511 using depth-first search on the subgraph. If THIS_REF_P is true, the
512 leaves of EXPR are emitted as references via lto_output_tree_ref.
513 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
514 this is for a rewalk of a single leaf SCC. */
516 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
517 bool single_p)
519 unsigned int next_dfs_num = 1;
520 sccstack.create (0);
521 gcc_obstack_init (&sccstate_obstack);
522 worklist_vec = vNULL;
523 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
524 while (!worklist_vec.is_empty ())
526 worklist &w = worklist_vec.last ();
527 expr = w.expr;
528 sccs *from_state = w.from_state;
529 sccs *cstate = w.cstate;
530 ref_p = w.ref_p;
531 this_ref_p = w.this_ref_p;
532 if (cstate == NULL)
534 sccs **slot = &sccstate.get_or_insert (expr);
535 cstate = *slot;
536 if (cstate)
538 gcc_checking_assert (from_state);
539 if (cstate->dfsnum < from_state->dfsnum)
540 from_state->low = MIN (cstate->dfsnum, from_state->low);
541 worklist_vec.pop ();
542 continue;
545 scc_entry e = { expr, 0 };
546 /* Not yet visited. DFS recurse and push it onto the stack. */
547 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
548 sccstack.safe_push (e);
549 cstate->dfsnum = next_dfs_num++;
550 cstate->low = cstate->dfsnum;
551 w.cstate = cstate;
553 if (TREE_CODE (expr) == INTEGER_CST
554 && !TREE_OVERFLOW (expr))
555 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
556 else
558 DFS_write_tree_body (ob, expr, cstate, ref_p);
560 /* Walk any LTO-specific edges. */
561 if (DECL_P (expr)
562 && TREE_CODE (expr) != FUNCTION_DECL
563 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
565 /* Handle DECL_INITIAL for symbols. */
566 tree initial
567 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
568 expr);
569 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
572 continue;
575 /* See if we found an SCC. */
576 if (cstate->low == cstate->dfsnum)
578 unsigned first, size;
579 tree x;
581 /* If we are re-walking a single leaf SCC just pop it,
582 let earlier worklist item access the sccstack. */
583 if (single_p)
585 worklist_vec.pop ();
586 continue;
589 /* Pop the SCC and compute its size. */
590 first = sccstack.length ();
593 x = sccstack[--first].t;
595 while (x != expr);
596 size = sccstack.length () - first;
598 /* No need to compute hashes for LTRANS units, we don't perform
599 any merging there. */
600 hashval_t scc_hash = 0;
601 unsigned scc_entry_len = 0;
602 if (!flag_wpa)
604 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
606 /* Put the entries with the least number of collisions first. */
607 unsigned entry_start = 0;
608 scc_entry_len = size + 1;
609 for (unsigned i = 0; i < size;)
611 unsigned from = i;
612 for (i = i + 1; i < size
613 && (sccstack[first + i].hash
614 == sccstack[first + from].hash); ++i)
616 if (i - from < scc_entry_len)
618 scc_entry_len = i - from;
619 entry_start = from;
622 for (unsigned i = 0; i < scc_entry_len; ++i)
623 std::swap (sccstack[first + i],
624 sccstack[first + entry_start + i]);
626 /* We already sorted SCC deterministically in hash_scc. */
628 /* Check that we have only one SCC.
629 Naturally we may have conflicts if hash function is not
630 strong enough. Lets see how far this gets. */
631 gcc_checking_assert (scc_entry_len == 1);
634 /* Write LTO_tree_scc. */
635 streamer_write_record_start (ob, LTO_tree_scc);
636 streamer_write_uhwi (ob, size);
637 streamer_write_uhwi (ob, scc_hash);
639 /* Write size-1 SCCs without wrapping them inside SCC bundles.
640 All INTEGER_CSTs need to be handled this way as we need
641 their type to materialize them. Also builtins are handled
642 this way.
643 ??? We still wrap these in LTO_tree_scc so at the
644 input side we can properly identify the tree we want
645 to ultimatively return. */
646 if (size == 1)
647 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
648 else
650 /* Write the size of the SCC entry candidates. */
651 streamer_write_uhwi (ob, scc_entry_len);
653 /* Write all headers and populate the streamer cache. */
654 for (unsigned i = 0; i < size; ++i)
656 hashval_t hash = sccstack[first+i].hash;
657 tree t = sccstack[first+i].t;
658 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
659 t, hash, NULL);
660 gcc_assert (!exists_p);
662 if (!lto_is_streamable (t))
663 internal_error ("tree code %qs is not supported "
664 "in LTO streams",
665 get_tree_code_name (TREE_CODE (t)));
667 /* Write the header, containing everything needed to
668 materialize EXPR on the reading side. */
669 streamer_write_tree_header (ob, t);
672 /* Write the bitpacks and tree references. */
673 for (unsigned i = 0; i < size; ++i)
675 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
677 /* Mark the end of the tree. */
678 streamer_write_zero (ob);
682 /* Finally truncate the vector. */
683 sccstack.truncate (first);
685 if (from_state)
686 from_state->low = MIN (from_state->low, cstate->low);
687 worklist_vec.pop ();
688 continue;
691 gcc_checking_assert (from_state);
692 from_state->low = MIN (from_state->low, cstate->low);
693 if (cstate->dfsnum < from_state->dfsnum)
694 from_state->low = MIN (cstate->dfsnum, from_state->low);
695 worklist_vec.pop ();
697 worklist_vec.release ();
700 DFS::~DFS ()
702 sccstack.release ();
703 obstack_free (&sccstate_obstack, NULL);
706 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
707 DFS recurse for all tree edges originating from it. */
709 void
710 DFS::DFS_write_tree_body (struct output_block *ob,
711 tree expr, sccs *expr_state, bool ref_p)
713 #define DFS_follow_tree_edge(DEST) \
714 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
716 enum tree_code code;
718 code = TREE_CODE (expr);
720 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
722 if (TREE_CODE (expr) != IDENTIFIER_NODE)
723 DFS_follow_tree_edge (TREE_TYPE (expr));
726 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
728 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
729 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
732 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
734 DFS_follow_tree_edge (TREE_REALPART (expr));
735 DFS_follow_tree_edge (TREE_IMAGPART (expr));
738 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
740 /* Drop names that were created for anonymous entities. */
741 if (DECL_NAME (expr)
742 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
743 && anon_aggrname_p (DECL_NAME (expr)))
745 else
746 DFS_follow_tree_edge (DECL_NAME (expr));
747 DFS_follow_tree_edge (DECL_CONTEXT (expr));
750 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
752 DFS_follow_tree_edge (DECL_SIZE (expr));
753 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
755 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
756 special handling in LTO, it must be handled by streamer hooks. */
758 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
760 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
761 for early inlining so drop it on the floor instead of ICEing in
762 dwarf2out.c.
763 We however use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
764 declarations which should be eliminated by decl merging. Be sure none
765 leaks to this point. */
766 gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
768 if ((TREE_CODE (expr) == VAR_DECL
769 || TREE_CODE (expr) == PARM_DECL)
770 && DECL_HAS_VALUE_EXPR_P (expr))
771 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
772 if (TREE_CODE (expr) == VAR_DECL)
773 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
776 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
778 if (TREE_CODE (expr) == TYPE_DECL)
779 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
782 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
784 /* Make sure we don't inadvertently set the assembler name. */
785 if (DECL_ASSEMBLER_NAME_SET_P (expr))
786 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
789 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
791 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
792 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
793 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
794 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
795 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
798 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
800 DFS_follow_tree_edge (DECL_VINDEX (expr));
801 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
802 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
803 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
806 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
808 DFS_follow_tree_edge (TYPE_SIZE (expr));
809 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
810 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
811 DFS_follow_tree_edge (TYPE_NAME (expr));
812 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
813 reconstructed during fixup. */
814 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
815 during fixup. */
816 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
817 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
818 /* TYPE_CANONICAL is re-computed during type merging, so no need
819 to follow it here. */
820 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
823 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
825 if (TREE_CODE (expr) == ENUMERAL_TYPE)
826 DFS_follow_tree_edge (TYPE_VALUES (expr));
827 else if (TREE_CODE (expr) == ARRAY_TYPE)
828 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
829 else if (RECORD_OR_UNION_TYPE_P (expr))
830 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
831 DFS_follow_tree_edge (t);
832 else if (TREE_CODE (expr) == FUNCTION_TYPE
833 || TREE_CODE (expr) == METHOD_TYPE)
834 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
836 if (!POINTER_TYPE_P (expr))
837 DFS_follow_tree_edge (TYPE_MINVAL (expr));
838 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
839 if (RECORD_OR_UNION_TYPE_P (expr))
840 DFS_follow_tree_edge (TYPE_BINFO (expr));
843 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
845 DFS_follow_tree_edge (TREE_PURPOSE (expr));
846 DFS_follow_tree_edge (TREE_VALUE (expr));
847 DFS_follow_tree_edge (TREE_CHAIN (expr));
850 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
852 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
853 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
856 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
858 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
859 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
860 DFS_follow_tree_edge (TREE_BLOCK (expr));
863 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
865 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
866 if (VAR_OR_FUNCTION_DECL_P (t)
867 && DECL_EXTERNAL (t))
868 /* We have to stream externals in the block chain as
869 non-references. See also
870 tree-streamer-out.c:streamer_write_chain. */
871 DFS_write_tree (ob, expr_state, t, ref_p, false);
872 else
873 DFS_follow_tree_edge (t);
875 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
877 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
878 handle - those that represent inlined function scopes.
879 For the drop rest them on the floor instead of ICEing
880 in dwarf2out.c, but keep the notion of whether the block
881 is an inlined block by refering to itself for the sake of
882 tree_nonartificial_location. */
883 if (inlined_function_outer_scope_p (expr))
885 tree ultimate_origin = block_ultimate_origin (expr);
886 DFS_follow_tree_edge (ultimate_origin);
888 else if (BLOCK_ABSTRACT_ORIGIN (expr))
889 DFS_follow_tree_edge (expr);
890 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
891 information for early inlined BLOCKs so drop it on the floor instead
892 of ICEing in dwarf2out.c. */
894 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
895 streaming time. */
897 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
898 list is re-constructed from BLOCK_SUPERCONTEXT. */
901 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
903 unsigned i;
904 tree t;
906 /* Note that the number of BINFO slots has already been emitted in
907 EXPR's header (see streamer_write_tree_header) because this length
908 is needed to build the empty BINFO node on the reader side. */
909 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
910 DFS_follow_tree_edge (t);
911 DFS_follow_tree_edge (BINFO_OFFSET (expr));
912 DFS_follow_tree_edge (BINFO_VTABLE (expr));
913 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
915 /* The number of BINFO_BASE_ACCESSES has already been emitted in
916 EXPR's bitfield section. */
917 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
918 DFS_follow_tree_edge (t);
920 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
921 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
924 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
926 unsigned i;
927 tree index, value;
929 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
931 DFS_follow_tree_edge (index);
932 DFS_follow_tree_edge (value);
936 if (code == OMP_CLAUSE)
938 int i;
939 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
940 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
941 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
944 #undef DFS_follow_tree_edge
947 /* Return a hash value for the tree T.
948 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
949 may hold hash values if trees inside current SCC. */
951 static hashval_t
952 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
954 inchash::hash hstate;
956 #define visit(SIBLING) \
957 do { \
958 unsigned ix; \
959 if (!SIBLING) \
960 hstate.add_int (0); \
961 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
962 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
963 else if (map) \
964 hstate.add_int (*map->get (SIBLING)); \
965 else \
966 hstate.add_int (1); \
967 } while (0)
969 /* Hash TS_BASE. */
970 enum tree_code code = TREE_CODE (t);
971 hstate.add_int (code);
972 if (!TYPE_P (t))
974 hstate.add_flag (TREE_SIDE_EFFECTS (t));
975 hstate.add_flag (TREE_CONSTANT (t));
976 hstate.add_flag (TREE_READONLY (t));
977 hstate.add_flag (TREE_PUBLIC (t));
979 hstate.add_flag (TREE_ADDRESSABLE (t));
980 hstate.add_flag (TREE_THIS_VOLATILE (t));
981 if (DECL_P (t))
982 hstate.add_flag (DECL_UNSIGNED (t));
983 else if (TYPE_P (t))
984 hstate.add_flag (TYPE_UNSIGNED (t));
985 if (TYPE_P (t))
986 hstate.add_flag (TYPE_ARTIFICIAL (t));
987 else
988 hstate.add_flag (TREE_NO_WARNING (t));
989 hstate.add_flag (TREE_NOTHROW (t));
990 hstate.add_flag (TREE_STATIC (t));
991 hstate.add_flag (TREE_PROTECTED (t));
992 hstate.add_flag (TREE_DEPRECATED (t));
993 if (code != TREE_BINFO)
994 hstate.add_flag (TREE_PRIVATE (t));
995 if (TYPE_P (t))
997 hstate.add_flag (AGGREGATE_TYPE_P (t)
998 ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
999 hstate.add_flag (TYPE_ADDR_SPACE (t));
1001 else if (code == SSA_NAME)
1002 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1003 hstate.commit_flag ();
1005 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1007 int i;
1008 hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
1009 hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
1010 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1011 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
1014 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1016 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1017 hstate.add_flag (r.cl);
1018 hstate.add_flag (r.sign);
1019 hstate.add_flag (r.signalling);
1020 hstate.add_flag (r.canonical);
1021 hstate.commit_flag ();
1022 hstate.add_int (r.uexp);
1023 hstate.add (r.sig, sizeof (r.sig));
1026 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1028 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1029 hstate.add_int (f.mode);
1030 hstate.add_int (f.data.low);
1031 hstate.add_int (f.data.high);
1034 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1036 hstate.add_wide_int (DECL_MODE (t));
1037 hstate.add_flag (DECL_NONLOCAL (t));
1038 hstate.add_flag (DECL_VIRTUAL_P (t));
1039 hstate.add_flag (DECL_IGNORED_P (t));
1040 hstate.add_flag (DECL_ABSTRACT_P (t));
1041 hstate.add_flag (DECL_ARTIFICIAL (t));
1042 hstate.add_flag (DECL_USER_ALIGN (t));
1043 hstate.add_flag (DECL_PRESERVE_P (t));
1044 hstate.add_flag (DECL_EXTERNAL (t));
1045 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1046 hstate.commit_flag ();
1047 hstate.add_int (DECL_ALIGN (t));
1048 if (code == LABEL_DECL)
1050 hstate.add_int (EH_LANDING_PAD_NR (t));
1051 hstate.add_int (LABEL_DECL_UID (t));
1053 else if (code == FIELD_DECL)
1055 hstate.add_flag (DECL_PACKED (t));
1056 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1057 hstate.add_int (DECL_OFFSET_ALIGN (t));
1059 else if (code == VAR_DECL)
1061 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1062 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1064 if (code == RESULT_DECL
1065 || code == PARM_DECL
1066 || code == VAR_DECL)
1068 hstate.add_flag (DECL_BY_REFERENCE (t));
1069 if (code == VAR_DECL
1070 || code == PARM_DECL)
1071 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1073 hstate.commit_flag ();
1076 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1077 hstate.add_int (DECL_REGISTER (t));
1079 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1081 hstate.add_flag (DECL_COMMON (t));
1082 hstate.add_flag (DECL_DLLIMPORT_P (t));
1083 hstate.add_flag (DECL_WEAK (t));
1084 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1085 hstate.add_flag (DECL_COMDAT (t));
1086 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1087 hstate.add_int (DECL_VISIBILITY (t));
1088 if (code == VAR_DECL)
1090 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1091 hstate.add_flag (DECL_HARD_REGISTER (t));
1092 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1094 if (TREE_CODE (t) == FUNCTION_DECL)
1096 hstate.add_flag (DECL_FINAL_P (t));
1097 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1098 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1100 hstate.commit_flag ();
1103 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1105 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1106 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1107 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1108 hstate.add_flag (DECL_UNINLINABLE (t));
1109 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1110 hstate.add_flag (DECL_IS_NOVOPS (t));
1111 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1112 hstate.add_flag (DECL_IS_MALLOC (t));
1113 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1114 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1115 hstate.add_flag (DECL_STATIC_CHAIN (t));
1116 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1117 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1118 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1119 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1120 hstate.add_flag (DECL_PURE_P (t));
1121 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1122 hstate.commit_flag ();
1123 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1124 hstate.add_int (DECL_FUNCTION_CODE (t));
1127 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1129 hstate.add_wide_int (TYPE_MODE (t));
1130 hstate.add_flag (TYPE_STRING_FLAG (t));
1131 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1132 no streaming. */
1133 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
1134 hstate.add_flag (TYPE_PACKED (t));
1135 hstate.add_flag (TYPE_RESTRICT (t));
1136 hstate.add_flag (TYPE_USER_ALIGN (t));
1137 hstate.add_flag (TYPE_READONLY (t));
1138 if (RECORD_OR_UNION_TYPE_P (t))
1140 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1141 hstate.add_flag (TYPE_FINAL_P (t));
1143 else if (code == ARRAY_TYPE)
1144 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1145 hstate.commit_flag ();
1146 hstate.add_int (TYPE_PRECISION (t));
1147 hstate.add_int (TYPE_ALIGN (t));
1150 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1151 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1152 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1154 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1155 /* We don't stream these when passing things to a different target. */
1156 && !lto_stream_offload_p)
1157 hstate.add_wide_int (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1159 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1160 hstate.add_wide_int (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1162 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1163 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1165 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1166 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1168 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1170 if (code != IDENTIFIER_NODE)
1171 visit (TREE_TYPE (t));
1174 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1175 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
1176 visit (VECTOR_CST_ELT (t, i));
1178 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1180 visit (TREE_REALPART (t));
1181 visit (TREE_IMAGPART (t));
1184 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1186 /* Drop names that were created for anonymous entities. */
1187 if (DECL_NAME (t)
1188 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1189 && anon_aggrname_p (DECL_NAME (t)))
1191 else
1192 visit (DECL_NAME (t));
1193 if (DECL_FILE_SCOPE_P (t))
1195 else
1196 visit (DECL_CONTEXT (t));
1199 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1201 visit (DECL_SIZE (t));
1202 visit (DECL_SIZE_UNIT (t));
1203 visit (DECL_ATTRIBUTES (t));
1204 if ((code == VAR_DECL
1205 || code == PARM_DECL)
1206 && DECL_HAS_VALUE_EXPR_P (t))
1207 visit (DECL_VALUE_EXPR (t));
1208 if (code == VAR_DECL
1209 && DECL_HAS_DEBUG_EXPR_P (t))
1210 visit (DECL_DEBUG_EXPR (t));
1211 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1212 be able to call get_symbol_initial_value. */
1215 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1217 if (code == TYPE_DECL)
1218 visit (DECL_ORIGINAL_TYPE (t));
1221 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1223 if (DECL_ASSEMBLER_NAME_SET_P (t))
1224 visit (DECL_ASSEMBLER_NAME (t));
1227 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1229 visit (DECL_FIELD_OFFSET (t));
1230 visit (DECL_BIT_FIELD_TYPE (t));
1231 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1232 visit (DECL_FIELD_BIT_OFFSET (t));
1233 visit (DECL_FCONTEXT (t));
1236 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1238 visit (DECL_VINDEX (t));
1239 visit (DECL_FUNCTION_PERSONALITY (t));
1240 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1241 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1244 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1246 visit (TYPE_SIZE (t));
1247 visit (TYPE_SIZE_UNIT (t));
1248 visit (TYPE_ATTRIBUTES (t));
1249 visit (TYPE_NAME (t));
1250 visit (TYPE_MAIN_VARIANT (t));
1251 if (TYPE_FILE_SCOPE_P (t))
1253 else
1254 visit (TYPE_CONTEXT (t));
1255 visit (TYPE_STUB_DECL (t));
1258 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1260 if (code == ENUMERAL_TYPE)
1261 visit (TYPE_VALUES (t));
1262 else if (code == ARRAY_TYPE)
1263 visit (TYPE_DOMAIN (t));
1264 else if (RECORD_OR_UNION_TYPE_P (t))
1265 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1266 visit (f);
1267 else if (code == FUNCTION_TYPE
1268 || code == METHOD_TYPE)
1269 visit (TYPE_ARG_TYPES (t));
1270 if (!POINTER_TYPE_P (t))
1271 visit (TYPE_MINVAL (t));
1272 visit (TYPE_MAXVAL (t));
1273 if (RECORD_OR_UNION_TYPE_P (t))
1274 visit (TYPE_BINFO (t));
1277 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1279 visit (TREE_PURPOSE (t));
1280 visit (TREE_VALUE (t));
1281 visit (TREE_CHAIN (t));
1284 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1285 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1286 visit (TREE_VEC_ELT (t, i));
1288 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1290 hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
1291 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1292 visit (TREE_OPERAND (t, i));
1295 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1297 unsigned i;
1298 tree b;
1299 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1300 visit (b);
1301 visit (BINFO_OFFSET (t));
1302 visit (BINFO_VTABLE (t));
1303 visit (BINFO_VPTR_FIELD (t));
1304 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1305 visit (b);
1306 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1307 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1310 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1312 unsigned i;
1313 tree index, value;
1314 hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
1315 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1317 visit (index);
1318 visit (value);
1322 if (code == OMP_CLAUSE)
1324 int i;
1325 HOST_WIDE_INT val;
1327 hstate.add_wide_int (OMP_CLAUSE_CODE (t));
1328 switch (OMP_CLAUSE_CODE (t))
1330 case OMP_CLAUSE_DEFAULT:
1331 val = OMP_CLAUSE_DEFAULT_KIND (t);
1332 break;
1333 case OMP_CLAUSE_SCHEDULE:
1334 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1335 break;
1336 case OMP_CLAUSE_DEPEND:
1337 val = OMP_CLAUSE_DEPEND_KIND (t);
1338 break;
1339 case OMP_CLAUSE_MAP:
1340 val = OMP_CLAUSE_MAP_KIND (t);
1341 break;
1342 case OMP_CLAUSE_PROC_BIND:
1343 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1344 break;
1345 case OMP_CLAUSE_REDUCTION:
1346 val = OMP_CLAUSE_REDUCTION_CODE (t);
1347 break;
1348 default:
1349 val = 0;
1350 break;
1352 hstate.add_wide_int (val);
1353 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1354 visit (OMP_CLAUSE_OPERAND (t, i));
1355 visit (OMP_CLAUSE_CHAIN (t));
1358 return hstate.end ();
1360 #undef visit
1363 /* Compare two SCC entries by their hash value for qsorting them. */
1366 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1368 const scc_entry *p1 = (const scc_entry *) p1_;
1369 const scc_entry *p2 = (const scc_entry *) p2_;
1370 if (p1->hash < p2->hash)
1371 return -1;
1372 else if (p1->hash > p2->hash)
1373 return 1;
1374 return 0;
1377 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1378 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1380 hashval_t
1381 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1382 bool ref_p, bool this_ref_p)
1384 unsigned int last_classes = 0, iterations = 0;
1386 /* Compute hash values for the SCC members. */
1387 for (unsigned i = 0; i < size; ++i)
1388 sccstack[first+i].hash
1389 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1391 if (size == 1)
1392 return sccstack[first].hash;
1394 /* We aim to get unique hash for every tree within SCC and compute hash value
1395 of the whole SCC by combining all values together in a stable (entry-point
1396 independent) order. This guarantees that the same SCC regions within
1397 different translation units will get the same hash values and therefore
1398 will be merged at WPA time.
1400 Often the hashes are already unique. In that case we compute the SCC hash
1401 by combining individual hash values in an increasing order.
1403 If there are duplicates, we seek at least one tree with unique hash (and
1404 pick one with minimal hash and this property). Then we obtain a stable
1405 order by DFS walk starting from this unique tree and then use the index
1406 within this order to make individual hash values unique.
1408 If there is no tree with unique hash, we iteratively propagate the hash
1409 values across the internal edges of SCC. This usually quickly leads
1410 to unique hashes. Consider, for example, an SCC containing two pointers
1411 that are identical except for the types they point to and assume that
1412 these types are also part of the SCC. The propagation will add the
1413 points-to type information into their hash values. */
1416 /* Sort the SCC so we can easily check for uniqueness. */
1417 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1419 unsigned int classes = 1;
1420 int firstunique = -1;
1422 /* Find the tree with lowest unique hash (if it exists) and compute
1423 the number of equivalence classes. */
1424 if (sccstack[first].hash != sccstack[first+1].hash)
1425 firstunique = 0;
1426 for (unsigned i = 1; i < size; ++i)
1427 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1429 classes++;
1430 if (firstunique == -1
1431 && (i == size - 1
1432 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1433 firstunique = i;
1436 /* If we found a tree with unique hash, stop the iteration. */
1437 if (firstunique != -1
1438 /* Also terminate if we run out of iterations or if the number of
1439 equivalence classes is no longer increasing.
1440 For example a cyclic list of trees that are all equivalent will
1441 never have unique entry point; we however do not build such SCCs
1442 in our IL. */
1443 || classes <= last_classes || iterations > 16)
1445 hashval_t scc_hash;
1447 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1448 starting from FIRSTUNIQUE to obtain a stable order. */
1449 if (classes != size && firstunique != -1)
1451 hash_map <tree, hashval_t> map(size*2);
1453 /* Store hash values into a map, so we can associate them with
1454 the reordered SCC. */
1455 for (unsigned i = 0; i < size; ++i)
1456 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1458 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1459 true);
1460 gcc_assert (again.sccstack.length () == size);
1462 memcpy (sccstack.address () + first,
1463 again.sccstack.address (),
1464 sizeof (scc_entry) * size);
1466 /* Update hash values of individual members by hashing in the
1467 index within the stable order. This ensures uniqueness.
1468 Also compute the SCC hash by mixing in all hash values in
1469 the stable order we obtained. */
1470 sccstack[first].hash = *map.get (sccstack[first].t);
1471 scc_hash = sccstack[first].hash;
1472 for (unsigned i = 1; i < size; ++i)
1474 sccstack[first+i].hash
1475 = iterative_hash_hashval_t (i,
1476 *map.get (sccstack[first+i].t));
1477 scc_hash
1478 = iterative_hash_hashval_t (scc_hash,
1479 sccstack[first+i].hash);
1482 /* If we got a unique hash value for each tree, then sort already
1483 ensured entry-point independent order. Only compute the final
1484 SCC hash.
1486 If we failed to find the unique entry point, we go by the same
1487 route. We will eventually introduce unwanted hash conflicts. */
1488 else
1490 scc_hash = sccstack[first].hash;
1491 for (unsigned i = 1; i < size; ++i)
1492 scc_hash
1493 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1495 /* We cannot 100% guarantee that the hash won't conflict so as
1496 to make it impossible to find a unique hash. This however
1497 should be an extremely rare case. ICE for now so possible
1498 issues are found and evaluated. */
1499 gcc_checking_assert (classes == size);
1502 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1503 hash into the hash of each element. */
1504 for (unsigned i = 0; i < size; ++i)
1505 sccstack[first+i].hash
1506 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1507 return scc_hash;
1510 last_classes = classes;
1511 iterations++;
1513 /* We failed to identify the entry point; propagate hash values across
1514 the edges. */
1515 hash_map <tree, hashval_t> map(size*2);
1517 for (unsigned i = 0; i < size; ++i)
1518 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1520 for (unsigned i = 0; i < size; i++)
1521 sccstack[first+i].hash
1522 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1524 while (true);
1527 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1528 already in the streamer cache. Main routine called for
1529 each visit of EXPR. */
1531 void
1532 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1533 tree expr, bool ref_p, bool this_ref_p)
1535 /* Handle special cases. */
1536 if (expr == NULL_TREE)
1537 return;
1539 /* Do not DFS walk into indexable trees. */
1540 if (this_ref_p && tree_is_indexable (expr))
1541 return;
1543 /* Check if we already streamed EXPR. */
1544 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1545 return;
1547 worklist w;
1548 w.expr = expr;
1549 w.from_state = from_state;
1550 w.cstate = NULL;
1551 w.ref_p = ref_p;
1552 w.this_ref_p = this_ref_p;
1553 worklist_vec.safe_push (w);
1557 /* Emit the physical representation of tree node EXPR to output block OB.
1558 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1559 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1561 void
1562 lto_output_tree (struct output_block *ob, tree expr,
1563 bool ref_p, bool this_ref_p)
1565 unsigned ix;
1566 bool existed_p;
1568 if (expr == NULL_TREE)
1570 streamer_write_record_start (ob, LTO_null);
1571 return;
1574 if (this_ref_p && tree_is_indexable (expr))
1576 lto_output_tree_ref (ob, expr);
1577 return;
1580 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1581 if (existed_p)
1583 /* If a node has already been streamed out, make sure that
1584 we don't write it more than once. Otherwise, the reader
1585 will instantiate two different nodes for the same object. */
1586 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1587 streamer_write_uhwi (ob, ix);
1588 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1589 lto_tree_code_to_tag (TREE_CODE (expr)));
1590 lto_stats.num_pickle_refs_output++;
1592 else
1594 /* This is the first time we see EXPR, write all reachable
1595 trees to OB. */
1596 static bool in_dfs_walk;
1598 /* Protect against recursion which means disconnect between
1599 what tree edges we walk in the DFS walk and what edges
1600 we stream out. */
1601 gcc_assert (!in_dfs_walk);
1603 /* Start the DFS walk. */
1604 /* Save ob state ... */
1605 /* let's see ... */
1606 in_dfs_walk = true;
1607 DFS (ob, expr, ref_p, this_ref_p, false);
1608 in_dfs_walk = false;
1610 /* Finally append a reference to the tree we were writing.
1611 ??? If expr ended up as a singleton we could have
1612 inlined it here and avoid outputting a reference. */
1613 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1614 gcc_assert (existed_p);
1615 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1616 streamer_write_uhwi (ob, ix);
1617 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1618 lto_tree_code_to_tag (TREE_CODE (expr)));
1619 lto_stats.num_pickle_refs_output++;
1624 /* Output to OB a list of try/catch handlers starting with FIRST. */
1626 static void
1627 output_eh_try_list (struct output_block *ob, eh_catch first)
1629 eh_catch n;
1631 for (n = first; n; n = n->next_catch)
1633 streamer_write_record_start (ob, LTO_eh_catch);
1634 stream_write_tree (ob, n->type_list, true);
1635 stream_write_tree (ob, n->filter_list, true);
1636 stream_write_tree (ob, n->label, true);
1639 streamer_write_record_start (ob, LTO_null);
1643 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1644 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1645 detect EH region sharing. */
1647 static void
1648 output_eh_region (struct output_block *ob, eh_region r)
1650 enum LTO_tags tag;
1652 if (r == NULL)
1654 streamer_write_record_start (ob, LTO_null);
1655 return;
1658 if (r->type == ERT_CLEANUP)
1659 tag = LTO_ert_cleanup;
1660 else if (r->type == ERT_TRY)
1661 tag = LTO_ert_try;
1662 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1663 tag = LTO_ert_allowed_exceptions;
1664 else if (r->type == ERT_MUST_NOT_THROW)
1665 tag = LTO_ert_must_not_throw;
1666 else
1667 gcc_unreachable ();
1669 streamer_write_record_start (ob, tag);
1670 streamer_write_hwi (ob, r->index);
1672 if (r->outer)
1673 streamer_write_hwi (ob, r->outer->index);
1674 else
1675 streamer_write_zero (ob);
1677 if (r->inner)
1678 streamer_write_hwi (ob, r->inner->index);
1679 else
1680 streamer_write_zero (ob);
1682 if (r->next_peer)
1683 streamer_write_hwi (ob, r->next_peer->index);
1684 else
1685 streamer_write_zero (ob);
1687 if (r->type == ERT_TRY)
1689 output_eh_try_list (ob, r->u.eh_try.first_catch);
1691 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1693 stream_write_tree (ob, r->u.allowed.type_list, true);
1694 stream_write_tree (ob, r->u.allowed.label, true);
1695 streamer_write_uhwi (ob, r->u.allowed.filter);
1697 else if (r->type == ERT_MUST_NOT_THROW)
1699 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1700 bitpack_d bp = bitpack_create (ob->main_stream);
1701 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1702 streamer_write_bitpack (&bp);
1705 if (r->landing_pads)
1706 streamer_write_hwi (ob, r->landing_pads->index);
1707 else
1708 streamer_write_zero (ob);
1712 /* Output landing pad LP to OB. */
1714 static void
1715 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1717 if (lp == NULL)
1719 streamer_write_record_start (ob, LTO_null);
1720 return;
1723 streamer_write_record_start (ob, LTO_eh_landing_pad);
1724 streamer_write_hwi (ob, lp->index);
1725 if (lp->next_lp)
1726 streamer_write_hwi (ob, lp->next_lp->index);
1727 else
1728 streamer_write_zero (ob);
1730 if (lp->region)
1731 streamer_write_hwi (ob, lp->region->index);
1732 else
1733 streamer_write_zero (ob);
1735 stream_write_tree (ob, lp->post_landing_pad, true);
1739 /* Output the existing eh_table to OB. */
1741 static void
1742 output_eh_regions (struct output_block *ob, struct function *fn)
1744 if (fn->eh && fn->eh->region_tree)
1746 unsigned i;
1747 eh_region eh;
1748 eh_landing_pad lp;
1749 tree ttype;
1751 streamer_write_record_start (ob, LTO_eh_table);
1753 /* Emit the index of the root of the EH region tree. */
1754 streamer_write_hwi (ob, fn->eh->region_tree->index);
1756 /* Emit all the EH regions in the region array. */
1757 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1758 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1759 output_eh_region (ob, eh);
1761 /* Emit all landing pads. */
1762 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1763 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1764 output_eh_lp (ob, lp);
1766 /* Emit all the runtime type data. */
1767 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1768 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1769 stream_write_tree (ob, ttype, true);
1771 /* Emit the table of action chains. */
1772 if (targetm.arm_eabi_unwinder)
1774 tree t;
1775 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1776 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1777 stream_write_tree (ob, t, true);
1779 else
1781 uchar c;
1782 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1783 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1784 streamer_write_char_stream (ob->main_stream, c);
1788 /* The LTO_null either terminates the record or indicates that there
1789 are no eh_records at all. */
1790 streamer_write_record_start (ob, LTO_null);
1794 /* Output all of the active ssa names to the ssa_names stream. */
1796 static void
1797 output_ssa_names (struct output_block *ob, struct function *fn)
1799 unsigned int i, len;
1801 len = vec_safe_length (SSANAMES (fn));
1802 streamer_write_uhwi (ob, len);
1804 for (i = 1; i < len; i++)
1806 tree ptr = (*SSANAMES (fn))[i];
1808 if (ptr == NULL_TREE
1809 || SSA_NAME_IN_FREE_LIST (ptr)
1810 || virtual_operand_p (ptr)
1811 /* Simply skip unreleased SSA names. */
1812 || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
1813 && (! SSA_NAME_DEF_STMT (ptr)
1814 || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
1815 continue;
1817 streamer_write_uhwi (ob, i);
1818 streamer_write_char_stream (ob->main_stream,
1819 SSA_NAME_IS_DEFAULT_DEF (ptr));
1820 if (SSA_NAME_VAR (ptr))
1821 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1822 else
1823 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1824 stream_write_tree (ob, TREE_TYPE (ptr), true);
1827 streamer_write_zero (ob);
1831 /* Output a wide-int. */
1833 static void
1834 streamer_write_wi (struct output_block *ob,
1835 const widest_int &w)
1837 int len = w.get_len ();
1839 streamer_write_uhwi (ob, w.get_precision ());
1840 streamer_write_uhwi (ob, len);
1841 for (int i = 0; i < len; i++)
1842 streamer_write_hwi (ob, w.elt (i));
1846 /* Output the cfg. */
1848 static void
1849 output_cfg (struct output_block *ob, struct function *fn)
1851 struct lto_output_stream *tmp_stream = ob->main_stream;
1852 basic_block bb;
1854 ob->main_stream = ob->cfg_stream;
1856 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1857 profile_status_for_fn (fn));
1859 /* Output the number of the highest basic block. */
1860 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1862 FOR_ALL_BB_FN (bb, fn)
1864 edge_iterator ei;
1865 edge e;
1867 streamer_write_hwi (ob, bb->index);
1869 /* Output the successors and the edge flags. */
1870 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1871 FOR_EACH_EDGE (e, ei, bb->succs)
1873 streamer_write_uhwi (ob, e->dest->index);
1874 streamer_write_hwi (ob, e->probability);
1875 streamer_write_gcov_count (ob, e->count);
1876 streamer_write_uhwi (ob, e->flags);
1880 streamer_write_hwi (ob, -1);
1882 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1883 while (bb->next_bb)
1885 streamer_write_hwi (ob, bb->next_bb->index);
1886 bb = bb->next_bb;
1889 streamer_write_hwi (ob, -1);
1891 /* ??? The cfgloop interface is tied to cfun. */
1892 gcc_assert (cfun == fn);
1894 /* Output the number of loops. */
1895 streamer_write_uhwi (ob, number_of_loops (fn));
1897 /* Output each loop, skipping the tree root which has number zero. */
1898 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1900 struct loop *loop = get_loop (fn, i);
1902 /* Write the index of the loop header. That's enough to rebuild
1903 the loop tree on the reader side. Stream -1 for an unused
1904 loop entry. */
1905 if (!loop)
1907 streamer_write_hwi (ob, -1);
1908 continue;
1910 else
1911 streamer_write_hwi (ob, loop->header->index);
1913 /* Write everything copy_loop_info copies. */
1914 streamer_write_enum (ob->main_stream,
1915 loop_estimation, EST_LAST, loop->estimate_state);
1916 streamer_write_hwi (ob, loop->any_upper_bound);
1917 if (loop->any_upper_bound)
1918 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1919 streamer_write_hwi (ob, loop->any_likely_upper_bound);
1920 if (loop->any_likely_upper_bound)
1921 streamer_write_wi (ob, loop->nb_iterations_likely_upper_bound);
1922 streamer_write_hwi (ob, loop->any_estimate);
1923 if (loop->any_estimate)
1924 streamer_write_wi (ob, loop->nb_iterations_estimate);
1926 /* Write OMP SIMD related info. */
1927 streamer_write_hwi (ob, loop->safelen);
1928 streamer_write_hwi (ob, loop->dont_vectorize);
1929 streamer_write_hwi (ob, loop->force_vectorize);
1930 stream_write_tree (ob, loop->simduid, true);
1933 ob->main_stream = tmp_stream;
1937 /* Create the header in the file using OB. If the section type is for
1938 a function, set FN to the decl for that function. */
1940 void
1941 produce_asm (struct output_block *ob, tree fn)
1943 enum lto_section_type section_type = ob->section_type;
1944 struct lto_function_header header;
1945 char *section_name;
1947 if (section_type == LTO_section_function_body)
1949 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1950 section_name = lto_get_section_name (section_type, name, NULL);
1952 else
1953 section_name = lto_get_section_name (section_type, NULL, NULL);
1955 lto_begin_section (section_name, !flag_wpa);
1956 free (section_name);
1958 /* The entire header is stream computed here. */
1959 memset (&header, 0, sizeof (struct lto_function_header));
1961 /* Write the header. */
1962 header.major_version = LTO_major_version;
1963 header.minor_version = LTO_minor_version;
1965 if (section_type == LTO_section_function_body)
1966 header.cfg_size = ob->cfg_stream->total_size;
1967 header.main_size = ob->main_stream->total_size;
1968 header.string_size = ob->string_stream->total_size;
1969 lto_write_data (&header, sizeof header);
1971 /* Put all of the gimple and the string table out the asm file as a
1972 block of text. */
1973 if (section_type == LTO_section_function_body)
1974 lto_write_stream (ob->cfg_stream);
1975 lto_write_stream (ob->main_stream);
1976 lto_write_stream (ob->string_stream);
1978 lto_end_section ();
1982 /* Output the base body of struct function FN using output block OB. */
1984 static void
1985 output_struct_function_base (struct output_block *ob, struct function *fn)
1987 struct bitpack_d bp;
1988 unsigned i;
1989 tree t;
1991 /* Output the static chain and non-local goto save area. */
1992 stream_write_tree (ob, fn->static_chain_decl, true);
1993 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1995 /* Output all the local variables in the function. */
1996 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1997 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1998 stream_write_tree (ob, t, true);
2000 /* Output current IL state of the function. */
2001 streamer_write_uhwi (ob, fn->curr_properties);
2003 /* Write all the attributes for FN. */
2004 bp = bitpack_create (ob->main_stream);
2005 bp_pack_value (&bp, fn->is_thunk, 1);
2006 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
2007 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
2008 bp_pack_value (&bp, fn->returns_struct, 1);
2009 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
2010 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2011 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2012 bp_pack_value (&bp, fn->after_inlining, 1);
2013 bp_pack_value (&bp, fn->stdarg, 1);
2014 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2015 bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2016 bp_pack_value (&bp, fn->calls_alloca, 1);
2017 bp_pack_value (&bp, fn->calls_setjmp, 1);
2018 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2019 bp_pack_value (&bp, fn->has_simduid_loops, 1);
2020 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2021 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2022 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2024 /* Output the function start and end loci. */
2025 stream_output_location (ob, &bp, fn->function_start_locus);
2026 stream_output_location (ob, &bp, fn->function_end_locus);
2028 streamer_write_bitpack (&bp);
2032 /* Output the body of function NODE->DECL. */
2034 static void
2035 output_function (struct cgraph_node *node)
2037 tree function;
2038 struct function *fn;
2039 basic_block bb;
2040 struct output_block *ob;
2042 function = node->decl;
2043 fn = DECL_STRUCT_FUNCTION (function);
2044 ob = create_output_block (LTO_section_function_body);
2046 clear_line_info (ob);
2047 ob->symbol = node;
2049 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2051 /* Set current_function_decl and cfun. */
2052 push_cfun (fn);
2054 /* Make string 0 be a NULL string. */
2055 streamer_write_char_stream (ob->string_stream, 0);
2057 streamer_write_record_start (ob, LTO_function);
2059 /* Output decls for parameters and args. */
2060 stream_write_tree (ob, DECL_RESULT (function), true);
2061 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2063 /* Output DECL_INITIAL for the function, which contains the tree of
2064 lexical scopes.
2065 ??? This only streams the outermost block because we do not
2066 recurse into BLOCK_SUBBLOCKS but re-build those on stream-in. */
2067 stream_write_tree (ob, DECL_INITIAL (function), true);
2069 /* We also stream abstract functions where we stream only stuff needed for
2070 debug info. */
2071 if (gimple_has_body_p (function))
2073 streamer_write_uhwi (ob, 1);
2074 output_struct_function_base (ob, fn);
2076 /* Output all the SSA names used in the function. */
2077 output_ssa_names (ob, fn);
2079 /* Output any exception handling regions. */
2080 output_eh_regions (ob, fn);
2083 /* We will renumber the statements. The code that does this uses
2084 the same ordering that we use for serializing them so we can use
2085 the same code on the other end and not have to write out the
2086 statement numbers. We do not assign UIDs to PHIs here because
2087 virtual PHIs get re-computed on-the-fly which would make numbers
2088 inconsistent. */
2089 set_gimple_stmt_max_uid (cfun, 0);
2090 FOR_ALL_BB_FN (bb, cfun)
2092 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2093 gsi_next (&gsi))
2095 gphi *stmt = gsi.phi ();
2097 /* Virtual PHIs are not going to be streamed. */
2098 if (!virtual_operand_p (gimple_phi_result (stmt)))
2099 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2101 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2102 gsi_next (&gsi))
2104 gimple *stmt = gsi_stmt (gsi);
2105 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2108 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2109 virtual phis now. */
2110 FOR_ALL_BB_FN (bb, cfun)
2112 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2113 gsi_next (&gsi))
2115 gphi *stmt = gsi.phi ();
2116 if (virtual_operand_p (gimple_phi_result (stmt)))
2117 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2121 /* Output the code for the function. */
2122 FOR_ALL_BB_FN (bb, fn)
2123 output_bb (ob, bb, fn);
2125 /* The terminator for this function. */
2126 streamer_write_record_start (ob, LTO_null);
2128 output_cfg (ob, fn);
2130 pop_cfun ();
2132 else
2133 streamer_write_uhwi (ob, 0);
2135 /* Create a section to hold the pickled output of this function. */
2136 produce_asm (ob, function);
2138 destroy_output_block (ob);
2141 /* Output the body of function NODE->DECL. */
2143 static void
2144 output_constructor (struct varpool_node *node)
2146 tree var = node->decl;
2147 struct output_block *ob;
2149 ob = create_output_block (LTO_section_function_body);
2151 clear_line_info (ob);
2152 ob->symbol = node;
2154 /* Make string 0 be a NULL string. */
2155 streamer_write_char_stream (ob->string_stream, 0);
2157 /* Output DECL_INITIAL for the function, which contains the tree of
2158 lexical scopes. */
2159 stream_write_tree (ob, DECL_INITIAL (var), true);
2161 /* Create a section to hold the pickled output of this function. */
2162 produce_asm (ob, var);
2164 destroy_output_block (ob);
2168 /* Emit toplevel asms. */
2170 void
2171 lto_output_toplevel_asms (void)
2173 struct output_block *ob;
2174 struct asm_node *can;
2175 char *section_name;
2176 struct lto_simple_header_with_strings header;
2178 if (!symtab->first_asm_symbol ())
2179 return;
2181 ob = create_output_block (LTO_section_asm);
2183 /* Make string 0 be a NULL string. */
2184 streamer_write_char_stream (ob->string_stream, 0);
2186 for (can = symtab->first_asm_symbol (); can; can = can->next)
2188 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2189 streamer_write_hwi (ob, can->order);
2192 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2194 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2195 lto_begin_section (section_name, !flag_wpa);
2196 free (section_name);
2198 /* The entire header stream is computed here. */
2199 memset (&header, 0, sizeof (header));
2201 /* Write the header. */
2202 header.major_version = LTO_major_version;
2203 header.minor_version = LTO_minor_version;
2205 header.main_size = ob->main_stream->total_size;
2206 header.string_size = ob->string_stream->total_size;
2207 lto_write_data (&header, sizeof header);
2209 /* Put all of the gimple and the string table out the asm file as a
2210 block of text. */
2211 lto_write_stream (ob->main_stream);
2212 lto_write_stream (ob->string_stream);
2214 lto_end_section ();
2216 destroy_output_block (ob);
2220 /* Copy the function body or variable constructor of NODE without deserializing. */
2222 static void
2223 copy_function_or_variable (struct symtab_node *node)
2225 tree function = node->decl;
2226 struct lto_file_decl_data *file_data = node->lto_file_data;
2227 const char *data;
2228 size_t len;
2229 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2230 char *section_name =
2231 lto_get_section_name (LTO_section_function_body, name, NULL);
2232 size_t i, j;
2233 struct lto_in_decl_state *in_state;
2234 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2236 lto_begin_section (section_name, false);
2237 free (section_name);
2239 /* We may have renamed the declaration, e.g., a static function. */
2240 name = lto_get_decl_name_mapping (file_data, name);
2242 data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2243 name, &len);
2244 gcc_assert (data);
2246 /* Do a bit copy of the function body. */
2247 lto_write_raw_data (data, len);
2249 /* Copy decls. */
2250 in_state =
2251 lto_get_function_in_decl_state (node->lto_file_data, function);
2252 out_state->compressed = in_state->compressed;
2253 gcc_assert (in_state);
2255 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2257 size_t n = vec_safe_length (in_state->streams[i]);
2258 vec<tree, va_gc> *trees = in_state->streams[i];
2259 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2261 /* The out state must have the same indices and the in state.
2262 So just copy the vector. All the encoders in the in state
2263 must be empty where we reach here. */
2264 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2265 encoder->trees.reserve_exact (n);
2266 for (j = 0; j < n; j++)
2267 encoder->trees.safe_push ((*trees)[j]);
2270 lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2271 data, len);
2272 lto_end_section ();
2275 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2277 static tree
2278 wrap_refs (tree *tp, int *ws, void *)
2280 tree t = *tp;
2281 if (handled_component_p (t)
2282 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2283 && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2285 tree decl = TREE_OPERAND (t, 0);
2286 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2287 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2288 build1 (ADDR_EXPR, ptrtype, decl),
2289 build_int_cst (ptrtype, 0));
2290 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2291 *ws = 0;
2293 else if (TREE_CODE (t) == CONSTRUCTOR)
2295 else if (!EXPR_P (t))
2296 *ws = 0;
2297 return NULL_TREE;
2300 /* Main entry point from the pass manager. */
2302 void
2303 lto_output (void)
2305 struct lto_out_decl_state *decl_state;
2306 bitmap output = NULL;
2307 int i, n_nodes;
2308 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2310 if (flag_checking)
2311 output = lto_bitmap_alloc ();
2313 /* Initialize the streamer. */
2314 lto_streamer_init ();
2316 n_nodes = lto_symtab_encoder_size (encoder);
2317 /* Process only the functions with bodies. */
2318 for (i = 0; i < n_nodes; i++)
2320 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2321 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2323 if (lto_symtab_encoder_encode_body_p (encoder, node)
2324 && !node->alias
2325 && (!node->thunk.thunk_p || !node->thunk.add_pointer_bounds_args))
2327 if (flag_checking)
2329 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2330 bitmap_set_bit (output, DECL_UID (node->decl));
2332 decl_state = lto_new_out_decl_state ();
2333 lto_push_out_decl_state (decl_state);
2334 if (gimple_has_body_p (node->decl) || !flag_wpa
2335 /* Thunks have no body but they may be synthetized
2336 at WPA time. */
2337 || DECL_ARGUMENTS (node->decl))
2338 output_function (node);
2339 else
2340 copy_function_or_variable (node);
2341 gcc_assert (lto_get_out_decl_state () == decl_state);
2342 lto_pop_out_decl_state ();
2343 lto_record_function_out_decl_state (node->decl, decl_state);
2346 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2348 /* Wrap symbol references inside the ctor in a type
2349 preserving MEM_REF. */
2350 tree ctor = DECL_INITIAL (node->decl);
2351 if (ctor && !in_lto_p)
2352 walk_tree (&ctor, wrap_refs, NULL, NULL);
2353 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2354 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2355 && !node->alias)
2357 timevar_push (TV_IPA_LTO_CTORS_OUT);
2358 if (flag_checking)
2360 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2361 bitmap_set_bit (output, DECL_UID (node->decl));
2363 decl_state = lto_new_out_decl_state ();
2364 lto_push_out_decl_state (decl_state);
2365 if (DECL_INITIAL (node->decl) != error_mark_node
2366 || !flag_wpa)
2367 output_constructor (node);
2368 else
2369 copy_function_or_variable (node);
2370 gcc_assert (lto_get_out_decl_state () == decl_state);
2371 lto_pop_out_decl_state ();
2372 lto_record_function_out_decl_state (node->decl, decl_state);
2373 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2378 /* Emit the callgraph after emitting function bodies. This needs to
2379 be done now to make sure that all the statements in every function
2380 have been renumbered so that edges can be associated with call
2381 statements using the statement UIDs. */
2382 output_symtab ();
2384 output_offload_tables ();
2386 #if CHECKING_P
2387 lto_bitmap_free (output);
2388 #endif
2391 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2392 from it and required for correct representation of its semantics.
2393 Each node in ENCODER must be a global declaration or a type. A node
2394 is written only once, even if it appears multiple times in the
2395 vector. Certain transitively-reachable nodes, such as those
2396 representing expressions, may be duplicated, but such nodes
2397 must not appear in ENCODER itself. */
2399 static void
2400 write_global_stream (struct output_block *ob,
2401 struct lto_tree_ref_encoder *encoder)
2403 tree t;
2404 size_t index;
2405 const size_t size = lto_tree_ref_encoder_size (encoder);
2407 for (index = 0; index < size; index++)
2409 t = lto_tree_ref_encoder_get_tree (encoder, index);
2410 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2411 stream_write_tree (ob, t, false);
2416 /* Write a sequence of indices into the globals vector corresponding
2417 to the trees in ENCODER. These are used by the reader to map the
2418 indices used to refer to global entities within function bodies to
2419 their referents. */
2421 static void
2422 write_global_references (struct output_block *ob,
2423 struct lto_tree_ref_encoder *encoder)
2425 tree t;
2426 uint32_t index;
2427 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2429 /* Write size and slot indexes as 32-bit unsigned numbers. */
2430 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2431 data[0] = size;
2433 for (index = 0; index < size; index++)
2435 unsigned slot_num;
2437 t = lto_tree_ref_encoder_get_tree (encoder, index);
2438 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2439 gcc_assert (slot_num != (unsigned)-1);
2440 data[index + 1] = slot_num;
2443 lto_write_data (data, sizeof (int32_t) * (size + 1));
2444 free (data);
2448 /* Write all the streams in an lto_out_decl_state STATE using
2449 output block OB and output stream OUT_STREAM. */
2451 void
2452 lto_output_decl_state_streams (struct output_block *ob,
2453 struct lto_out_decl_state *state)
2455 int i;
2457 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2458 write_global_stream (ob, &state->streams[i]);
2462 /* Write all the references in an lto_out_decl_state STATE using
2463 output block OB and output stream OUT_STREAM. */
2465 void
2466 lto_output_decl_state_refs (struct output_block *ob,
2467 struct lto_out_decl_state *state)
2469 unsigned i;
2470 unsigned ref;
2471 tree decl;
2473 /* Write reference to FUNCTION_DECL. If there is not function,
2474 write reference to void_type_node. */
2475 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2476 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2477 gcc_assert (ref != (unsigned)-1);
2478 ref = ref * 2 + (state->compressed ? 1 : 0);
2479 lto_write_data (&ref, sizeof (uint32_t));
2481 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2482 write_global_references (ob, &state->streams[i]);
2486 /* Return the written size of STATE. */
2488 static size_t
2489 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2491 int i;
2492 size_t size;
2494 size = sizeof (int32_t); /* fn_ref. */
2495 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2497 size += sizeof (int32_t); /* vector size. */
2498 size += (lto_tree_ref_encoder_size (&state->streams[i])
2499 * sizeof (int32_t));
2501 return size;
2505 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2506 so far. */
2508 static void
2509 write_symbol (struct streamer_tree_cache_d *cache,
2510 tree t, hash_set<const char *> *seen, bool alias)
2512 const char *name;
2513 enum gcc_plugin_symbol_kind kind;
2514 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2515 unsigned slot_num;
2516 uint64_t size;
2517 const char *comdat;
2518 unsigned char c;
2520 /* None of the following kinds of symbols are needed in the
2521 symbol table. */
2522 if (!TREE_PUBLIC (t)
2523 || is_builtin_fn (t)
2524 || DECL_ABSTRACT_P (t)
2525 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2526 return;
2527 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2529 gcc_assert (TREE_CODE (t) == VAR_DECL
2530 || TREE_CODE (t) == FUNCTION_DECL);
2532 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2534 /* This behaves like assemble_name_raw in varasm.c, performing the
2535 same name manipulations that ASM_OUTPUT_LABELREF does. */
2536 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2538 if (seen->add (name))
2539 return;
2541 streamer_tree_cache_lookup (cache, t, &slot_num);
2542 gcc_assert (slot_num != (unsigned)-1);
2544 if (DECL_EXTERNAL (t))
2546 if (DECL_WEAK (t))
2547 kind = GCCPK_WEAKUNDEF;
2548 else
2549 kind = GCCPK_UNDEF;
2551 else
2553 if (DECL_WEAK (t))
2554 kind = GCCPK_WEAKDEF;
2555 else if (DECL_COMMON (t))
2556 kind = GCCPK_COMMON;
2557 else
2558 kind = GCCPK_DEF;
2560 /* When something is defined, it should have node attached. */
2561 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2562 || varpool_node::get (t)->definition);
2563 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2564 || (cgraph_node::get (t)
2565 && cgraph_node::get (t)->definition));
2568 /* Imitate what default_elf_asm_output_external do.
2569 When symbol is external, we need to output it with DEFAULT visibility
2570 when compiling with -fvisibility=default, while with HIDDEN visibility
2571 when symbol has attribute (visibility("hidden")) specified.
2572 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2573 right. */
2575 if (DECL_EXTERNAL (t)
2576 && !targetm.binds_local_p (t))
2577 visibility = GCCPV_DEFAULT;
2578 else
2579 switch (DECL_VISIBILITY (t))
2581 case VISIBILITY_DEFAULT:
2582 visibility = GCCPV_DEFAULT;
2583 break;
2584 case VISIBILITY_PROTECTED:
2585 visibility = GCCPV_PROTECTED;
2586 break;
2587 case VISIBILITY_HIDDEN:
2588 visibility = GCCPV_HIDDEN;
2589 break;
2590 case VISIBILITY_INTERNAL:
2591 visibility = GCCPV_INTERNAL;
2592 break;
2595 if (kind == GCCPK_COMMON
2596 && DECL_SIZE_UNIT (t)
2597 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2598 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2599 else
2600 size = 0;
2602 if (DECL_ONE_ONLY (t))
2603 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2604 else
2605 comdat = "";
2607 lto_write_data (name, strlen (name) + 1);
2608 lto_write_data (comdat, strlen (comdat) + 1);
2609 c = (unsigned char) kind;
2610 lto_write_data (&c, 1);
2611 c = (unsigned char) visibility;
2612 lto_write_data (&c, 1);
2613 lto_write_data (&size, 8);
2614 lto_write_data (&slot_num, 4);
2617 /* Return true if NODE should appear in the plugin symbol table. */
2619 bool
2620 output_symbol_p (symtab_node *node)
2622 struct cgraph_node *cnode;
2623 if (!node->real_symbol_p ())
2624 return false;
2625 /* We keep external functions in symtab for sake of inlining
2626 and devirtualization. We do not want to see them in symbol table as
2627 references unless they are really used. */
2628 cnode = dyn_cast <cgraph_node *> (node);
2629 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2630 && cnode->callers)
2631 return true;
2633 /* Ignore all references from external vars initializers - they are not really
2634 part of the compilation unit until they are used by folding. Some symbols,
2635 like references to external construction vtables can not be referred to at all.
2636 We decide this at can_refer_decl_in_current_unit_p. */
2637 if (!node->definition || DECL_EXTERNAL (node->decl))
2639 int i;
2640 struct ipa_ref *ref;
2641 for (i = 0; node->iterate_referring (i, ref); i++)
2643 if (ref->use == IPA_REF_ALIAS)
2644 continue;
2645 if (is_a <cgraph_node *> (ref->referring))
2646 return true;
2647 if (!DECL_EXTERNAL (ref->referring->decl))
2648 return true;
2650 return false;
2652 return true;
2656 /* Write an IL symbol table to OB.
2657 SET and VSET are cgraph/varpool node sets we are outputting. */
2659 static void
2660 produce_symtab (struct output_block *ob)
2662 struct streamer_tree_cache_d *cache = ob->writer_cache;
2663 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2664 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2665 lto_symtab_encoder_iterator lsei;
2667 lto_begin_section (section_name, false);
2668 free (section_name);
2670 hash_set<const char *> seen;
2672 /* Write the symbol table.
2673 First write everything defined and then all declarations.
2674 This is necessary to handle cases where we have duplicated symbols. */
2675 for (lsei = lsei_start (encoder);
2676 !lsei_end_p (lsei); lsei_next (&lsei))
2678 symtab_node *node = lsei_node (lsei);
2680 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2681 continue;
2682 write_symbol (cache, node->decl, &seen, false);
2684 for (lsei = lsei_start (encoder);
2685 !lsei_end_p (lsei); lsei_next (&lsei))
2687 symtab_node *node = lsei_node (lsei);
2689 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2690 continue;
2691 write_symbol (cache, node->decl, &seen, false);
2694 lto_end_section ();
2698 /* Init the streamer_mode_table for output, where we collect info on what
2699 machine_mode values have been streamed. */
2700 void
2701 lto_output_init_mode_table (void)
2703 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2707 /* Write the mode table. */
2708 static void
2709 lto_write_mode_table (void)
2711 struct output_block *ob;
2712 ob = create_output_block (LTO_section_mode_table);
2713 bitpack_d bp = bitpack_create (ob->main_stream);
2715 /* Ensure that for GET_MODE_INNER (m) != m we have
2716 also the inner mode marked. */
2717 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2718 if (streamer_mode_table[i])
2720 machine_mode m = (machine_mode) i;
2721 if (GET_MODE_INNER (m) != m)
2722 streamer_mode_table[(int) GET_MODE_INNER (m)] = 1;
2724 /* First stream modes that have GET_MODE_INNER (m) == m,
2725 so that we can refer to them afterwards. */
2726 for (int pass = 0; pass < 2; pass++)
2727 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2728 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2730 machine_mode m = (machine_mode) i;
2731 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2732 continue;
2733 bp_pack_value (&bp, m, 8);
2734 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2735 bp_pack_value (&bp, GET_MODE_SIZE (m), 8);
2736 bp_pack_value (&bp, GET_MODE_PRECISION (m), 16);
2737 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2738 bp_pack_value (&bp, GET_MODE_NUNITS (m), 8);
2739 switch (GET_MODE_CLASS (m))
2741 case MODE_FRACT:
2742 case MODE_UFRACT:
2743 case MODE_ACCUM:
2744 case MODE_UACCUM:
2745 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2746 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2747 break;
2748 case MODE_FLOAT:
2749 case MODE_DECIMAL_FLOAT:
2750 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2751 break;
2752 default:
2753 break;
2755 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2757 bp_pack_value (&bp, VOIDmode, 8);
2759 streamer_write_bitpack (&bp);
2761 char *section_name
2762 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2763 lto_begin_section (section_name, !flag_wpa);
2764 free (section_name);
2766 /* The entire header stream is computed here. */
2767 struct lto_simple_header_with_strings header;
2768 memset (&header, 0, sizeof (header));
2770 /* Write the header. */
2771 header.major_version = LTO_major_version;
2772 header.minor_version = LTO_minor_version;
2774 header.main_size = ob->main_stream->total_size;
2775 header.string_size = ob->string_stream->total_size;
2776 lto_write_data (&header, sizeof header);
2778 /* Put all of the gimple and the string table out the asm file as a
2779 block of text. */
2780 lto_write_stream (ob->main_stream);
2781 lto_write_stream (ob->string_stream);
2783 lto_end_section ();
2784 destroy_output_block (ob);
2788 /* This pass is run after all of the functions are serialized and all
2789 of the IPA passes have written their serialized forms. This pass
2790 causes the vector of all of the global decls and types used from
2791 this file to be written in to a section that can then be read in to
2792 recover these on other side. */
2794 void
2795 produce_asm_for_decls (void)
2797 struct lto_out_decl_state *out_state;
2798 struct lto_out_decl_state *fn_out_state;
2799 struct lto_decl_header header;
2800 char *section_name;
2801 struct output_block *ob;
2802 unsigned idx, num_fns;
2803 size_t decl_state_size;
2804 int32_t num_decl_states;
2806 ob = create_output_block (LTO_section_decls);
2808 memset (&header, 0, sizeof (struct lto_decl_header));
2810 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2811 lto_begin_section (section_name, !flag_wpa);
2812 free (section_name);
2814 /* Make string 0 be a NULL string. */
2815 streamer_write_char_stream (ob->string_stream, 0);
2817 gcc_assert (!alias_pairs);
2819 /* Get rid of the global decl state hash tables to save some memory. */
2820 out_state = lto_get_out_decl_state ();
2821 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2822 if (out_state->streams[i].tree_hash_table)
2824 delete out_state->streams[i].tree_hash_table;
2825 out_state->streams[i].tree_hash_table = NULL;
2828 /* Write the global symbols. */
2829 lto_output_decl_state_streams (ob, out_state);
2830 num_fns = lto_function_decl_states.length ();
2831 for (idx = 0; idx < num_fns; idx++)
2833 fn_out_state =
2834 lto_function_decl_states[idx];
2835 lto_output_decl_state_streams (ob, fn_out_state);
2838 header.major_version = LTO_major_version;
2839 header.minor_version = LTO_minor_version;
2841 /* Currently not used. This field would allow us to preallocate
2842 the globals vector, so that it need not be resized as it is extended. */
2843 header.num_nodes = -1;
2845 /* Compute the total size of all decl out states. */
2846 decl_state_size = sizeof (int32_t);
2847 decl_state_size += lto_out_decl_state_written_size (out_state);
2848 for (idx = 0; idx < num_fns; idx++)
2850 fn_out_state =
2851 lto_function_decl_states[idx];
2852 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2854 header.decl_state_size = decl_state_size;
2856 header.main_size = ob->main_stream->total_size;
2857 header.string_size = ob->string_stream->total_size;
2859 lto_write_data (&header, sizeof header);
2861 /* Write the main out-decl state, followed by out-decl states of
2862 functions. */
2863 num_decl_states = num_fns + 1;
2864 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2865 lto_output_decl_state_refs (ob, out_state);
2866 for (idx = 0; idx < num_fns; idx++)
2868 fn_out_state = lto_function_decl_states[idx];
2869 lto_output_decl_state_refs (ob, fn_out_state);
2872 lto_write_stream (ob->main_stream);
2873 lto_write_stream (ob->string_stream);
2875 lto_end_section ();
2877 /* Write the symbol table. It is used by linker to determine dependencies
2878 and thus we can skip it for WPA. */
2879 if (!flag_wpa)
2880 produce_symtab (ob);
2882 /* Write command line opts. */
2883 lto_write_options ();
2885 /* Deallocate memory and clean up. */
2886 for (idx = 0; idx < num_fns; idx++)
2888 fn_out_state =
2889 lto_function_decl_states[idx];
2890 lto_delete_out_decl_state (fn_out_state);
2892 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2893 lto_function_decl_states.release ();
2894 destroy_output_block (ob);
2895 if (lto_stream_offload_p)
2896 lto_write_mode_table ();