Remove assert in get_def_bb_for_const
[official-gcc.git] / gcc / lto-streamer-out.c
blobed6f7482dbdc5f13ad71b8e09f16932320a9a091
1 /* Write the GIMPLE representation to a file stream.
3 Copyright (C) 2009-2016 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
45 static void lto_write_tree (struct output_block*, tree, bool);
47 /* Clear the line info stored in DATA_IN. */
49 static void
50 clear_line_info (struct output_block *ob)
52 ob->current_file = NULL;
53 ob->current_line = 0;
54 ob->current_col = 0;
55 ob->current_sysp = false;
59 /* Create the output block and return it. SECTION_TYPE is
60 LTO_section_function_body or LTO_static_initializer. */
62 struct output_block *
63 create_output_block (enum lto_section_type section_type)
65 struct output_block *ob = XCNEW (struct output_block);
67 ob->section_type = section_type;
68 ob->decl_state = lto_get_out_decl_state ();
69 ob->main_stream = XCNEW (struct lto_output_stream);
70 ob->string_stream = XCNEW (struct lto_output_stream);
71 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
73 if (section_type == LTO_section_function_body)
74 ob->cfg_stream = XCNEW (struct lto_output_stream);
76 clear_line_info (ob);
78 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
79 gcc_obstack_init (&ob->obstack);
81 return ob;
85 /* Destroy the output block OB. */
87 void
88 destroy_output_block (struct output_block *ob)
90 enum lto_section_type section_type = ob->section_type;
92 delete ob->string_hash_table;
93 ob->string_hash_table = NULL;
95 free (ob->main_stream);
96 free (ob->string_stream);
97 if (section_type == LTO_section_function_body)
98 free (ob->cfg_stream);
100 streamer_tree_cache_delete (ob->writer_cache);
101 obstack_free (&ob->obstack, NULL);
103 free (ob);
107 /* Look up NODE in the type table and write the index for it to OB. */
109 static void
110 output_type_ref (struct output_block *ob, tree node)
112 streamer_write_record_start (ob, LTO_type_ref);
113 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
117 /* Return true if tree node T is written to various tables. For these
118 nodes, we sometimes want to write their phyiscal representation
119 (via lto_output_tree), and sometimes we need to emit an index
120 reference into a table (via lto_output_tree_ref). */
122 static bool
123 tree_is_indexable (tree t)
125 /* Parameters and return values of functions of variably modified types
126 must go to global stream, because they may be used in the type
127 definition. */
128 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
129 && DECL_CONTEXT (t))
130 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
131 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
132 else if (TREE_CODE (t) == IMPORTED_DECL)
133 return false;
134 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
135 || TREE_CODE (t) == TYPE_DECL
136 || TREE_CODE (t) == CONST_DECL
137 || TREE_CODE (t) == NAMELIST_DECL)
138 && decl_function_context (t))
139 return false;
140 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
141 return false;
142 /* Variably modified types need to be streamed alongside function
143 bodies because they can refer to local entities. Together with
144 them we have to localize their members as well.
145 ??? In theory that includes non-FIELD_DECLs as well. */
146 else if (TYPE_P (t)
147 && variably_modified_type_p (t, NULL_TREE))
148 return false;
149 else if (TREE_CODE (t) == FIELD_DECL
150 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
151 return false;
152 else
153 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
157 /* Output info about new location into bitpack BP.
158 After outputting bitpack, lto_output_location_data has
159 to be done to output actual data. */
161 void
162 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
163 location_t loc)
165 expanded_location xloc;
167 loc = LOCATION_LOCUS (loc);
168 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
169 loc < RESERVED_LOCATION_COUNT
170 ? loc : RESERVED_LOCATION_COUNT);
171 if (loc < RESERVED_LOCATION_COUNT)
172 return;
174 xloc = expand_location (loc);
176 bp_pack_value (bp, ob->current_file != xloc.file, 1);
177 bp_pack_value (bp, ob->current_line != xloc.line, 1);
178 bp_pack_value (bp, ob->current_col != xloc.column, 1);
180 if (ob->current_file != xloc.file)
182 bp_pack_string (ob, bp, xloc.file, true);
183 bp_pack_value (bp, xloc.sysp, 1);
185 ob->current_file = xloc.file;
186 ob->current_sysp = xloc.sysp;
188 if (ob->current_line != xloc.line)
189 bp_pack_var_len_unsigned (bp, xloc.line);
190 ob->current_line = xloc.line;
192 if (ob->current_col != xloc.column)
193 bp_pack_var_len_unsigned (bp, xloc.column);
194 ob->current_col = xloc.column;
198 /* If EXPR is an indexable tree node, output a reference to it to
199 output block OB. Otherwise, output the physical representation of
200 EXPR to OB. */
202 static void
203 lto_output_tree_ref (struct output_block *ob, tree expr)
205 enum tree_code code;
207 if (TYPE_P (expr))
209 output_type_ref (ob, expr);
210 return;
213 code = TREE_CODE (expr);
214 switch (code)
216 case SSA_NAME:
217 streamer_write_record_start (ob, LTO_ssa_name_ref);
218 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
219 break;
221 case FIELD_DECL:
222 streamer_write_record_start (ob, LTO_field_decl_ref);
223 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
224 break;
226 case FUNCTION_DECL:
227 streamer_write_record_start (ob, LTO_function_decl_ref);
228 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
229 break;
231 case VAR_DECL:
232 case DEBUG_EXPR_DECL:
233 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
234 case PARM_DECL:
235 streamer_write_record_start (ob, LTO_global_decl_ref);
236 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
237 break;
239 case CONST_DECL:
240 streamer_write_record_start (ob, LTO_const_decl_ref);
241 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
242 break;
244 case IMPORTED_DECL:
245 gcc_assert (decl_function_context (expr) == NULL);
246 streamer_write_record_start (ob, LTO_imported_decl_ref);
247 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
248 break;
250 case TYPE_DECL:
251 streamer_write_record_start (ob, LTO_type_decl_ref);
252 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
253 break;
255 case NAMELIST_DECL:
256 streamer_write_record_start (ob, LTO_namelist_decl_ref);
257 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
258 break;
260 case NAMESPACE_DECL:
261 streamer_write_record_start (ob, LTO_namespace_decl_ref);
262 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
263 break;
265 case LABEL_DECL:
266 streamer_write_record_start (ob, LTO_label_decl_ref);
267 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
268 break;
270 case RESULT_DECL:
271 streamer_write_record_start (ob, LTO_result_decl_ref);
272 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
273 break;
275 case TRANSLATION_UNIT_DECL:
276 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
277 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
278 break;
280 default:
281 /* No other node is indexable, so it should have been handled by
282 lto_output_tree. */
283 gcc_unreachable ();
288 /* Return true if EXPR is a tree node that can be written to disk. */
290 static inline bool
291 lto_is_streamable (tree expr)
293 enum tree_code code = TREE_CODE (expr);
295 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
296 name version in lto_output_tree_ref (see output_ssa_names). */
297 return !is_lang_specific (expr)
298 && code != SSA_NAME
299 && code != CALL_EXPR
300 && code != LANG_TYPE
301 && code != MODIFY_EXPR
302 && code != INIT_EXPR
303 && code != TARGET_EXPR
304 && code != BIND_EXPR
305 && code != WITH_CLEANUP_EXPR
306 && code != STATEMENT_LIST
307 && (code == CASE_LABEL_EXPR
308 || code == DECL_EXPR
309 || TREE_CODE_CLASS (code) != tcc_statement);
312 /* Very rough estimate of streaming size of the initializer. If we ignored
313 presence of strings, we could simply just count number of non-indexable
314 tree nodes and number of references to indexable nodes. Strings however
315 may be very large and we do not want to dump them int othe global stream.
317 Count the size of initializer until the size in DATA is positive. */
319 static tree
320 subtract_estimated_size (tree *tp, int *ws, void *data)
322 long *sum = (long *)data;
323 if (tree_is_indexable (*tp))
325 /* Indexable tree is one reference to global stream.
326 Guess it may be about 4 bytes. */
327 *sum -= 4;
328 *ws = 0;
330 /* String table entry + base of tree node needs to be streamed. */
331 if (TREE_CODE (*tp) == STRING_CST)
332 *sum -= TREE_STRING_LENGTH (*tp) + 8;
333 else
335 /* Identifiers are also variable length but should not appear
336 naked in constructor. */
337 gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
338 /* We do not really make attempt to work out size of pickled tree, as
339 it is very variable. Make it bigger than the reference. */
340 *sum -= 16;
342 if (*sum < 0)
343 return *tp;
344 return NULL_TREE;
348 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
350 static tree
351 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
353 gcc_checking_assert (DECL_P (expr)
354 && TREE_CODE (expr) != FUNCTION_DECL
355 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
357 /* Handle DECL_INITIAL for symbols. */
358 tree initial = DECL_INITIAL (expr);
359 if (TREE_CODE (expr) == VAR_DECL
360 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
361 && !DECL_IN_CONSTANT_POOL (expr)
362 && initial)
364 varpool_node *vnode;
365 /* Extra section needs about 30 bytes; do not produce it for simple
366 scalar values. */
367 if (!(vnode = varpool_node::get (expr))
368 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
369 initial = error_mark_node;
370 if (initial != error_mark_node)
372 long max_size = 30;
373 if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
374 NULL))
375 initial = error_mark_node;
379 return initial;
383 /* Write a physical representation of tree node EXPR to output block
384 OB. If REF_P is true, the leaves of EXPR are emitted as references
385 via lto_output_tree_ref. IX is the index into the streamer cache
386 where EXPR is stored. */
388 static void
389 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
391 /* Pack all the non-pointer fields in EXPR into a bitpack and write
392 the resulting bitpack. */
393 streamer_write_tree_bitfields (ob, expr);
395 /* Write all the pointer fields in EXPR. */
396 streamer_write_tree_body (ob, expr, ref_p);
398 /* Write any LTO-specific data to OB. */
399 if (DECL_P (expr)
400 && TREE_CODE (expr) != FUNCTION_DECL
401 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
403 /* Handle DECL_INITIAL for symbols. */
404 tree initial = get_symbol_initial_value
405 (ob->decl_state->symtab_node_encoder, expr);
406 stream_write_tree (ob, initial, ref_p);
410 /* Write a physical representation of tree node EXPR to output block
411 OB. If REF_P is true, the leaves of EXPR are emitted as references
412 via lto_output_tree_ref. IX is the index into the streamer cache
413 where EXPR is stored. */
415 static void
416 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
418 if (!lto_is_streamable (expr))
419 internal_error ("tree code %qs is not supported in LTO streams",
420 get_tree_code_name (TREE_CODE (expr)));
422 /* Write the header, containing everything needed to materialize
423 EXPR on the reading side. */
424 streamer_write_tree_header (ob, expr);
426 lto_write_tree_1 (ob, expr, ref_p);
428 /* Mark the end of EXPR. */
429 streamer_write_zero (ob);
432 /* Emit the physical representation of tree node EXPR to output block OB,
433 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
434 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
436 static void
437 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
438 bool ref_p, bool this_ref_p)
440 unsigned ix;
442 gcc_checking_assert (expr != NULL_TREE
443 && !(this_ref_p && tree_is_indexable (expr)));
445 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
446 expr, hash, &ix);
447 gcc_assert (!exists_p);
448 if (streamer_handle_as_builtin_p (expr))
450 /* MD and NORMAL builtins do not need to be written out
451 completely as they are always instantiated by the
452 compiler on startup. The only builtins that need to
453 be written out are BUILT_IN_FRONTEND. For all other
454 builtins, we simply write the class and code. */
455 streamer_write_builtin (ob, expr);
457 else if (TREE_CODE (expr) == INTEGER_CST
458 && !TREE_OVERFLOW (expr))
460 /* Shared INTEGER_CST nodes are special because they need their
461 original type to be materialized by the reader (to implement
462 TYPE_CACHED_VALUES). */
463 streamer_write_integer_cst (ob, expr, ref_p);
465 else
467 /* This is the first time we see EXPR, write its fields
468 to OB. */
469 lto_write_tree (ob, expr, ref_p);
473 class DFS
475 public:
476 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
477 bool single_p);
478 ~DFS ();
480 struct scc_entry
482 tree t;
483 hashval_t hash;
485 vec<scc_entry> sccstack;
487 private:
488 struct sccs
490 unsigned int dfsnum;
491 unsigned int low;
493 struct worklist
495 tree expr;
496 sccs *from_state;
497 sccs *cstate;
498 bool ref_p;
499 bool this_ref_p;
502 static int scc_entry_compare (const void *, const void *);
504 void DFS_write_tree_body (struct output_block *ob,
505 tree expr, sccs *expr_state, bool ref_p);
507 void DFS_write_tree (struct output_block *ob, sccs *from_state,
508 tree expr, bool ref_p, bool this_ref_p);
510 hashval_t
511 hash_scc (struct output_block *ob, unsigned first, unsigned size,
512 bool ref_p, bool this_ref_p);
514 hash_map<tree, sccs *> sccstate;
515 vec<worklist> worklist_vec;
516 struct obstack sccstate_obstack;
519 /* Emit the physical representation of tree node EXPR to output block OB,
520 using depth-first search on the subgraph. If THIS_REF_P is true, the
521 leaves of EXPR are emitted as references via lto_output_tree_ref.
522 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
523 this is for a rewalk of a single leaf SCC. */
525 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
526 bool single_p)
528 unsigned int next_dfs_num = 1;
529 sccstack.create (0);
530 gcc_obstack_init (&sccstate_obstack);
531 worklist_vec = vNULL;
532 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
533 while (!worklist_vec.is_empty ())
535 worklist &w = worklist_vec.last ();
536 expr = w.expr;
537 sccs *from_state = w.from_state;
538 sccs *cstate = w.cstate;
539 ref_p = w.ref_p;
540 this_ref_p = w.this_ref_p;
541 if (cstate == NULL)
543 sccs **slot = &sccstate.get_or_insert (expr);
544 cstate = *slot;
545 if (cstate)
547 gcc_checking_assert (from_state);
548 if (cstate->dfsnum < from_state->dfsnum)
549 from_state->low = MIN (cstate->dfsnum, from_state->low);
550 worklist_vec.pop ();
551 continue;
554 scc_entry e = { expr, 0 };
555 /* Not yet visited. DFS recurse and push it onto the stack. */
556 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
557 sccstack.safe_push (e);
558 cstate->dfsnum = next_dfs_num++;
559 cstate->low = cstate->dfsnum;
560 w.cstate = cstate;
562 if (streamer_handle_as_builtin_p (expr))
564 else if (TREE_CODE (expr) == INTEGER_CST
565 && !TREE_OVERFLOW (expr))
566 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
567 else
569 DFS_write_tree_body (ob, expr, cstate, ref_p);
571 /* Walk any LTO-specific edges. */
572 if (DECL_P (expr)
573 && TREE_CODE (expr) != FUNCTION_DECL
574 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
576 /* Handle DECL_INITIAL for symbols. */
577 tree initial
578 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
579 expr);
580 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
583 continue;
586 /* See if we found an SCC. */
587 if (cstate->low == cstate->dfsnum)
589 unsigned first, size;
590 tree x;
592 /* If we are re-walking a single leaf SCC just pop it,
593 let earlier worklist item access the sccstack. */
594 if (single_p)
596 worklist_vec.pop ();
597 continue;
600 /* Pop the SCC and compute its size. */
601 first = sccstack.length ();
604 x = sccstack[--first].t;
606 while (x != expr);
607 size = sccstack.length () - first;
609 /* No need to compute hashes for LTRANS units, we don't perform
610 any merging there. */
611 hashval_t scc_hash = 0;
612 unsigned scc_entry_len = 0;
613 if (!flag_wpa)
615 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
617 /* Put the entries with the least number of collisions first. */
618 unsigned entry_start = 0;
619 scc_entry_len = size + 1;
620 for (unsigned i = 0; i < size;)
622 unsigned from = i;
623 for (i = i + 1; i < size
624 && (sccstack[first + i].hash
625 == sccstack[first + from].hash); ++i)
627 if (i - from < scc_entry_len)
629 scc_entry_len = i - from;
630 entry_start = from;
633 for (unsigned i = 0; i < scc_entry_len; ++i)
634 std::swap (sccstack[first + i],
635 sccstack[first + entry_start + i]);
637 /* We already sorted SCC deterministically in hash_scc. */
639 /* Check that we have only one SCC.
640 Naturally we may have conflicts if hash function is not
641 strong enough. Lets see how far this gets. */
642 gcc_checking_assert (scc_entry_len == 1);
645 /* Write LTO_tree_scc. */
646 streamer_write_record_start (ob, LTO_tree_scc);
647 streamer_write_uhwi (ob, size);
648 streamer_write_uhwi (ob, scc_hash);
650 /* Write size-1 SCCs without wrapping them inside SCC bundles.
651 All INTEGER_CSTs need to be handled this way as we need
652 their type to materialize them. Also builtins are handled
653 this way.
654 ??? We still wrap these in LTO_tree_scc so at the
655 input side we can properly identify the tree we want
656 to ultimatively return. */
657 if (size == 1)
658 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
659 else
661 /* Write the size of the SCC entry candidates. */
662 streamer_write_uhwi (ob, scc_entry_len);
664 /* Write all headers and populate the streamer cache. */
665 for (unsigned i = 0; i < size; ++i)
667 hashval_t hash = sccstack[first+i].hash;
668 tree t = sccstack[first+i].t;
669 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
670 t, hash, NULL);
671 gcc_assert (!exists_p);
673 if (!lto_is_streamable (t))
674 internal_error ("tree code %qs is not supported "
675 "in LTO streams",
676 get_tree_code_name (TREE_CODE (t)));
678 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
680 /* Write the header, containing everything needed to
681 materialize EXPR on the reading side. */
682 streamer_write_tree_header (ob, t);
685 /* Write the bitpacks and tree references. */
686 for (unsigned i = 0; i < size; ++i)
688 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
690 /* Mark the end of the tree. */
691 streamer_write_zero (ob);
695 /* Finally truncate the vector. */
696 sccstack.truncate (first);
698 if (from_state)
699 from_state->low = MIN (from_state->low, cstate->low);
700 worklist_vec.pop ();
701 continue;
704 gcc_checking_assert (from_state);
705 from_state->low = MIN (from_state->low, cstate->low);
706 if (cstate->dfsnum < from_state->dfsnum)
707 from_state->low = MIN (cstate->dfsnum, from_state->low);
708 worklist_vec.pop ();
710 worklist_vec.release ();
713 DFS::~DFS ()
715 sccstack.release ();
716 obstack_free (&sccstate_obstack, NULL);
719 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
720 DFS recurse for all tree edges originating from it. */
722 void
723 DFS::DFS_write_tree_body (struct output_block *ob,
724 tree expr, sccs *expr_state, bool ref_p)
726 #define DFS_follow_tree_edge(DEST) \
727 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
729 enum tree_code code;
731 code = TREE_CODE (expr);
733 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
735 if (TREE_CODE (expr) != IDENTIFIER_NODE)
736 DFS_follow_tree_edge (TREE_TYPE (expr));
739 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
741 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
742 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
745 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
747 DFS_follow_tree_edge (TREE_REALPART (expr));
748 DFS_follow_tree_edge (TREE_IMAGPART (expr));
751 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
753 /* Drop names that were created for anonymous entities. */
754 if (DECL_NAME (expr)
755 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
756 && anon_aggrname_p (DECL_NAME (expr)))
758 else
759 DFS_follow_tree_edge (DECL_NAME (expr));
760 DFS_follow_tree_edge (DECL_CONTEXT (expr));
763 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
765 DFS_follow_tree_edge (DECL_SIZE (expr));
766 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
768 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
769 special handling in LTO, it must be handled by streamer hooks. */
771 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
773 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
774 for early inlining so drop it on the floor instead of ICEing in
775 dwarf2out.c.
776 We however use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
777 declarations which should be eliminated by decl merging. Be sure none
778 leaks to this point. */
779 gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
781 if ((TREE_CODE (expr) == VAR_DECL
782 || TREE_CODE (expr) == PARM_DECL)
783 && DECL_HAS_VALUE_EXPR_P (expr))
784 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
785 if (TREE_CODE (expr) == VAR_DECL)
786 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
789 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
791 if (TREE_CODE (expr) == TYPE_DECL)
792 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
795 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
797 /* Make sure we don't inadvertently set the assembler name. */
798 if (DECL_ASSEMBLER_NAME_SET_P (expr))
799 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
802 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
804 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
805 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
806 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
807 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
808 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
811 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
813 DFS_follow_tree_edge (DECL_VINDEX (expr));
814 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
815 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
816 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
819 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
821 DFS_follow_tree_edge (TYPE_SIZE (expr));
822 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
823 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
824 DFS_follow_tree_edge (TYPE_NAME (expr));
825 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
826 reconstructed during fixup. */
827 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
828 during fixup. */
829 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
830 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
831 /* TYPE_CANONICAL is re-computed during type merging, so no need
832 to follow it here. */
833 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
836 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
838 if (TREE_CODE (expr) == ENUMERAL_TYPE)
839 DFS_follow_tree_edge (TYPE_VALUES (expr));
840 else if (TREE_CODE (expr) == ARRAY_TYPE)
841 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
842 else if (RECORD_OR_UNION_TYPE_P (expr))
843 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
844 DFS_follow_tree_edge (t);
845 else if (TREE_CODE (expr) == FUNCTION_TYPE
846 || TREE_CODE (expr) == METHOD_TYPE)
847 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
849 if (!POINTER_TYPE_P (expr))
850 DFS_follow_tree_edge (TYPE_MINVAL (expr));
851 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
852 if (RECORD_OR_UNION_TYPE_P (expr))
853 DFS_follow_tree_edge (TYPE_BINFO (expr));
856 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
858 DFS_follow_tree_edge (TREE_PURPOSE (expr));
859 DFS_follow_tree_edge (TREE_VALUE (expr));
860 DFS_follow_tree_edge (TREE_CHAIN (expr));
863 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
865 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
866 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
869 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
871 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
872 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
873 DFS_follow_tree_edge (TREE_BLOCK (expr));
876 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
878 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
879 if (VAR_OR_FUNCTION_DECL_P (t)
880 && DECL_EXTERNAL (t))
881 /* We have to stream externals in the block chain as
882 non-references. See also
883 tree-streamer-out.c:streamer_write_chain. */
884 DFS_write_tree (ob, expr_state, t, ref_p, false);
885 else
886 DFS_follow_tree_edge (t);
888 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
890 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
891 handle - those that represent inlined function scopes.
892 For the drop rest them on the floor instead of ICEing
893 in dwarf2out.c. */
894 if (inlined_function_outer_scope_p (expr))
896 tree ultimate_origin = block_ultimate_origin (expr);
897 DFS_follow_tree_edge (ultimate_origin);
899 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
900 information for early inlined BLOCKs so drop it on the floor instead
901 of ICEing in dwarf2out.c. */
903 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
904 streaming time. */
906 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
907 list is re-constructed from BLOCK_SUPERCONTEXT. */
910 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
912 unsigned i;
913 tree t;
915 /* Note that the number of BINFO slots has already been emitted in
916 EXPR's header (see streamer_write_tree_header) because this length
917 is needed to build the empty BINFO node on the reader side. */
918 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
919 DFS_follow_tree_edge (t);
920 DFS_follow_tree_edge (BINFO_OFFSET (expr));
921 DFS_follow_tree_edge (BINFO_VTABLE (expr));
922 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
924 /* The number of BINFO_BASE_ACCESSES has already been emitted in
925 EXPR's bitfield section. */
926 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
927 DFS_follow_tree_edge (t);
929 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
930 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
933 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
935 unsigned i;
936 tree index, value;
938 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
940 DFS_follow_tree_edge (index);
941 DFS_follow_tree_edge (value);
945 if (code == OMP_CLAUSE)
947 int i;
948 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
949 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
950 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
953 #undef DFS_follow_tree_edge
956 /* Return a hash value for the tree T.
957 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
958 may hold hash values if trees inside current SCC. */
960 static hashval_t
961 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
963 inchash::hash hstate;
965 #define visit(SIBLING) \
966 do { \
967 unsigned ix; \
968 if (!SIBLING) \
969 hstate.add_int (0); \
970 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
971 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
972 else if (map) \
973 hstate.add_int (*map->get (SIBLING)); \
974 else \
975 hstate.add_int (1); \
976 } while (0)
978 /* Hash TS_BASE. */
979 enum tree_code code = TREE_CODE (t);
980 hstate.add_int (code);
981 if (!TYPE_P (t))
983 hstate.add_flag (TREE_SIDE_EFFECTS (t));
984 hstate.add_flag (TREE_CONSTANT (t));
985 hstate.add_flag (TREE_READONLY (t));
986 hstate.add_flag (TREE_PUBLIC (t));
988 hstate.add_flag (TREE_ADDRESSABLE (t));
989 hstate.add_flag (TREE_THIS_VOLATILE (t));
990 if (DECL_P (t))
991 hstate.add_flag (DECL_UNSIGNED (t));
992 else if (TYPE_P (t))
993 hstate.add_flag (TYPE_UNSIGNED (t));
994 if (TYPE_P (t))
995 hstate.add_flag (TYPE_ARTIFICIAL (t));
996 else
997 hstate.add_flag (TREE_NO_WARNING (t));
998 hstate.add_flag (TREE_NOTHROW (t));
999 hstate.add_flag (TREE_STATIC (t));
1000 hstate.add_flag (TREE_PROTECTED (t));
1001 hstate.add_flag (TREE_DEPRECATED (t));
1002 if (code != TREE_BINFO)
1003 hstate.add_flag (TREE_PRIVATE (t));
1004 if (TYPE_P (t))
1006 hstate.add_flag (AGGREGATE_TYPE_P (t)
1007 ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1008 hstate.add_flag (TYPE_ADDR_SPACE (t));
1010 else if (code == SSA_NAME)
1011 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1012 hstate.commit_flag ();
1014 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1016 int i;
1017 hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
1018 hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
1019 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1020 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
1023 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1025 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1026 hstate.add_flag (r.cl);
1027 hstate.add_flag (r.sign);
1028 hstate.add_flag (r.signalling);
1029 hstate.add_flag (r.canonical);
1030 hstate.commit_flag ();
1031 hstate.add_int (r.uexp);
1032 hstate.add (r.sig, sizeof (r.sig));
1035 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1037 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1038 hstate.add_int (f.mode);
1039 hstate.add_int (f.data.low);
1040 hstate.add_int (f.data.high);
1043 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1045 hstate.add_wide_int (DECL_MODE (t));
1046 hstate.add_flag (DECL_NONLOCAL (t));
1047 hstate.add_flag (DECL_VIRTUAL_P (t));
1048 hstate.add_flag (DECL_IGNORED_P (t));
1049 hstate.add_flag (DECL_ABSTRACT_P (t));
1050 hstate.add_flag (DECL_ARTIFICIAL (t));
1051 hstate.add_flag (DECL_USER_ALIGN (t));
1052 hstate.add_flag (DECL_PRESERVE_P (t));
1053 hstate.add_flag (DECL_EXTERNAL (t));
1054 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1055 hstate.commit_flag ();
1056 hstate.add_int (DECL_ALIGN (t));
1057 if (code == LABEL_DECL)
1059 hstate.add_int (EH_LANDING_PAD_NR (t));
1060 hstate.add_int (LABEL_DECL_UID (t));
1062 else if (code == FIELD_DECL)
1064 hstate.add_flag (DECL_PACKED (t));
1065 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1066 hstate.add_int (DECL_OFFSET_ALIGN (t));
1068 else if (code == VAR_DECL)
1070 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1071 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1073 if (code == RESULT_DECL
1074 || code == PARM_DECL
1075 || code == VAR_DECL)
1077 hstate.add_flag (DECL_BY_REFERENCE (t));
1078 if (code == VAR_DECL
1079 || code == PARM_DECL)
1080 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1082 hstate.commit_flag ();
1085 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1086 hstate.add_int (DECL_REGISTER (t));
1088 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1090 hstate.add_flag (DECL_COMMON (t));
1091 hstate.add_flag (DECL_DLLIMPORT_P (t));
1092 hstate.add_flag (DECL_WEAK (t));
1093 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1094 hstate.add_flag (DECL_COMDAT (t));
1095 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1096 hstate.add_int (DECL_VISIBILITY (t));
1097 if (code == VAR_DECL)
1099 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1100 hstate.add_flag (DECL_HARD_REGISTER (t));
1101 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1103 if (TREE_CODE (t) == FUNCTION_DECL)
1105 hstate.add_flag (DECL_FINAL_P (t));
1106 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1107 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1109 hstate.commit_flag ();
1112 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1114 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1115 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1116 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1117 hstate.add_flag (DECL_UNINLINABLE (t));
1118 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1119 hstate.add_flag (DECL_IS_NOVOPS (t));
1120 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1121 hstate.add_flag (DECL_IS_MALLOC (t));
1122 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1123 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1124 hstate.add_flag (DECL_STATIC_CHAIN (t));
1125 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1126 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1127 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1128 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1129 hstate.add_flag (DECL_PURE_P (t));
1130 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1131 hstate.commit_flag ();
1132 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1133 hstate.add_int (DECL_FUNCTION_CODE (t));
1136 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1138 hstate.add_wide_int (TYPE_MODE (t));
1139 hstate.add_flag (TYPE_STRING_FLAG (t));
1140 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1141 no streaming. */
1142 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
1143 hstate.add_flag (TYPE_PACKED (t));
1144 hstate.add_flag (TYPE_RESTRICT (t));
1145 hstate.add_flag (TYPE_USER_ALIGN (t));
1146 hstate.add_flag (TYPE_READONLY (t));
1147 if (RECORD_OR_UNION_TYPE_P (t))
1149 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1150 hstate.add_flag (TYPE_FINAL_P (t));
1152 else if (code == ARRAY_TYPE)
1153 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1154 hstate.commit_flag ();
1155 hstate.add_int (TYPE_PRECISION (t));
1156 hstate.add_int (TYPE_ALIGN (t));
1159 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1160 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1161 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1163 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1164 /* We don't stream these when passing things to a different target. */
1165 && !lto_stream_offload_p)
1166 hstate.add_wide_int (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1168 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1169 hstate.add_wide_int (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1171 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1172 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1174 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1175 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1177 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1179 if (code != IDENTIFIER_NODE)
1180 visit (TREE_TYPE (t));
1183 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1184 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
1185 visit (VECTOR_CST_ELT (t, i));
1187 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1189 visit (TREE_REALPART (t));
1190 visit (TREE_IMAGPART (t));
1193 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1195 /* Drop names that were created for anonymous entities. */
1196 if (DECL_NAME (t)
1197 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1198 && anon_aggrname_p (DECL_NAME (t)))
1200 else
1201 visit (DECL_NAME (t));
1202 if (DECL_FILE_SCOPE_P (t))
1204 else
1205 visit (DECL_CONTEXT (t));
1208 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1210 visit (DECL_SIZE (t));
1211 visit (DECL_SIZE_UNIT (t));
1212 visit (DECL_ATTRIBUTES (t));
1213 if ((code == VAR_DECL
1214 || code == PARM_DECL)
1215 && DECL_HAS_VALUE_EXPR_P (t))
1216 visit (DECL_VALUE_EXPR (t));
1217 if (code == VAR_DECL
1218 && DECL_HAS_DEBUG_EXPR_P (t))
1219 visit (DECL_DEBUG_EXPR (t));
1220 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1221 be able to call get_symbol_initial_value. */
1224 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1226 if (code == TYPE_DECL)
1227 visit (DECL_ORIGINAL_TYPE (t));
1230 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1232 if (DECL_ASSEMBLER_NAME_SET_P (t))
1233 visit (DECL_ASSEMBLER_NAME (t));
1236 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1238 visit (DECL_FIELD_OFFSET (t));
1239 visit (DECL_BIT_FIELD_TYPE (t));
1240 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1241 visit (DECL_FIELD_BIT_OFFSET (t));
1242 visit (DECL_FCONTEXT (t));
1245 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1247 visit (DECL_VINDEX (t));
1248 visit (DECL_FUNCTION_PERSONALITY (t));
1249 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1250 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1253 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1255 visit (TYPE_SIZE (t));
1256 visit (TYPE_SIZE_UNIT (t));
1257 visit (TYPE_ATTRIBUTES (t));
1258 visit (TYPE_NAME (t));
1259 visit (TYPE_MAIN_VARIANT (t));
1260 if (TYPE_FILE_SCOPE_P (t))
1262 else
1263 visit (TYPE_CONTEXT (t));
1264 visit (TYPE_STUB_DECL (t));
1267 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1269 if (code == ENUMERAL_TYPE)
1270 visit (TYPE_VALUES (t));
1271 else if (code == ARRAY_TYPE)
1272 visit (TYPE_DOMAIN (t));
1273 else if (RECORD_OR_UNION_TYPE_P (t))
1274 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1275 visit (f);
1276 else if (code == FUNCTION_TYPE
1277 || code == METHOD_TYPE)
1278 visit (TYPE_ARG_TYPES (t));
1279 if (!POINTER_TYPE_P (t))
1280 visit (TYPE_MINVAL (t));
1281 visit (TYPE_MAXVAL (t));
1282 if (RECORD_OR_UNION_TYPE_P (t))
1283 visit (TYPE_BINFO (t));
1286 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1288 visit (TREE_PURPOSE (t));
1289 visit (TREE_VALUE (t));
1290 visit (TREE_CHAIN (t));
1293 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1294 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1295 visit (TREE_VEC_ELT (t, i));
1297 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1299 hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
1300 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1301 visit (TREE_OPERAND (t, i));
1304 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1306 unsigned i;
1307 tree b;
1308 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1309 visit (b);
1310 visit (BINFO_OFFSET (t));
1311 visit (BINFO_VTABLE (t));
1312 visit (BINFO_VPTR_FIELD (t));
1313 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1314 visit (b);
1315 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1316 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1319 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1321 unsigned i;
1322 tree index, value;
1323 hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
1324 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1326 visit (index);
1327 visit (value);
1331 if (code == OMP_CLAUSE)
1333 int i;
1334 HOST_WIDE_INT val;
1336 hstate.add_wide_int (OMP_CLAUSE_CODE (t));
1337 switch (OMP_CLAUSE_CODE (t))
1339 case OMP_CLAUSE_DEFAULT:
1340 val = OMP_CLAUSE_DEFAULT_KIND (t);
1341 break;
1342 case OMP_CLAUSE_SCHEDULE:
1343 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1344 break;
1345 case OMP_CLAUSE_DEPEND:
1346 val = OMP_CLAUSE_DEPEND_KIND (t);
1347 break;
1348 case OMP_CLAUSE_MAP:
1349 val = OMP_CLAUSE_MAP_KIND (t);
1350 break;
1351 case OMP_CLAUSE_PROC_BIND:
1352 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1353 break;
1354 case OMP_CLAUSE_REDUCTION:
1355 val = OMP_CLAUSE_REDUCTION_CODE (t);
1356 break;
1357 default:
1358 val = 0;
1359 break;
1361 hstate.add_wide_int (val);
1362 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1363 visit (OMP_CLAUSE_OPERAND (t, i));
1364 visit (OMP_CLAUSE_CHAIN (t));
1367 return hstate.end ();
1369 #undef visit
1372 /* Compare two SCC entries by their hash value for qsorting them. */
1375 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1377 const scc_entry *p1 = (const scc_entry *) p1_;
1378 const scc_entry *p2 = (const scc_entry *) p2_;
1379 if (p1->hash < p2->hash)
1380 return -1;
1381 else if (p1->hash > p2->hash)
1382 return 1;
1383 return 0;
1386 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1387 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1389 hashval_t
1390 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1391 bool ref_p, bool this_ref_p)
1393 unsigned int last_classes = 0, iterations = 0;
1395 /* Compute hash values for the SCC members. */
1396 for (unsigned i = 0; i < size; ++i)
1397 sccstack[first+i].hash
1398 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1400 if (size == 1)
1401 return sccstack[first].hash;
1403 /* We aim to get unique hash for every tree within SCC and compute hash value
1404 of the whole SCC by combining all values together in a stable (entry-point
1405 independent) order. This guarantees that the same SCC regions within
1406 different translation units will get the same hash values and therefore
1407 will be merged at WPA time.
1409 Often the hashes are already unique. In that case we compute the SCC hash
1410 by combining individual hash values in an increasing order.
1412 If there are duplicates, we seek at least one tree with unique hash (and
1413 pick one with minimal hash and this property). Then we obtain a stable
1414 order by DFS walk starting from this unique tree and then use the index
1415 within this order to make individual hash values unique.
1417 If there is no tree with unique hash, we iteratively propagate the hash
1418 values across the internal edges of SCC. This usually quickly leads
1419 to unique hashes. Consider, for example, an SCC containing two pointers
1420 that are identical except for the types they point to and assume that
1421 these types are also part of the SCC. The propagation will add the
1422 points-to type information into their hash values. */
1425 /* Sort the SCC so we can easily check for uniqueness. */
1426 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1428 unsigned int classes = 1;
1429 int firstunique = -1;
1431 /* Find the tree with lowest unique hash (if it exists) and compute
1432 the number of equivalence classes. */
1433 if (sccstack[first].hash != sccstack[first+1].hash)
1434 firstunique = 0;
1435 for (unsigned i = 1; i < size; ++i)
1436 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1438 classes++;
1439 if (firstunique == -1
1440 && (i == size - 1
1441 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1442 firstunique = i;
1445 /* If we found a tree with unique hash, stop the iteration. */
1446 if (firstunique != -1
1447 /* Also terminate if we run out of iterations or if the number of
1448 equivalence classes is no longer increasing.
1449 For example a cyclic list of trees that are all equivalent will
1450 never have unique entry point; we however do not build such SCCs
1451 in our IL. */
1452 || classes <= last_classes || iterations > 16)
1454 hashval_t scc_hash;
1456 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1457 starting from FIRSTUNIQUE to obtain a stable order. */
1458 if (classes != size && firstunique != -1)
1460 hash_map <tree, hashval_t> map(size*2);
1462 /* Store hash values into a map, so we can associate them with
1463 the reordered SCC. */
1464 for (unsigned i = 0; i < size; ++i)
1465 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1467 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1468 true);
1469 gcc_assert (again.sccstack.length () == size);
1471 memcpy (sccstack.address () + first,
1472 again.sccstack.address (),
1473 sizeof (scc_entry) * size);
1475 /* Update hash values of individual members by hashing in the
1476 index within the stable order. This ensures uniqueness.
1477 Also compute the SCC hash by mixing in all hash values in
1478 the stable order we obtained. */
1479 sccstack[first].hash = *map.get (sccstack[first].t);
1480 scc_hash = sccstack[first].hash;
1481 for (unsigned i = 1; i < size; ++i)
1483 sccstack[first+i].hash
1484 = iterative_hash_hashval_t (i,
1485 *map.get (sccstack[first+i].t));
1486 scc_hash
1487 = iterative_hash_hashval_t (scc_hash,
1488 sccstack[first+i].hash);
1491 /* If we got a unique hash value for each tree, then sort already
1492 ensured entry-point independent order. Only compute the final
1493 SCC hash.
1495 If we failed to find the unique entry point, we go by the same
1496 route. We will eventually introduce unwanted hash conflicts. */
1497 else
1499 scc_hash = sccstack[first].hash;
1500 for (unsigned i = 1; i < size; ++i)
1501 scc_hash
1502 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1504 /* We cannot 100% guarantee that the hash won't conflict so as
1505 to make it impossible to find a unique hash. This however
1506 should be an extremely rare case. ICE for now so possible
1507 issues are found and evaluated. */
1508 gcc_checking_assert (classes == size);
1511 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1512 hash into the hash of each element. */
1513 for (unsigned i = 0; i < size; ++i)
1514 sccstack[first+i].hash
1515 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1516 return scc_hash;
1519 last_classes = classes;
1520 iterations++;
1522 /* We failed to identify the entry point; propagate hash values across
1523 the edges. */
1524 hash_map <tree, hashval_t> map(size*2);
1526 for (unsigned i = 0; i < size; ++i)
1527 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1529 for (unsigned i = 0; i < size; i++)
1530 sccstack[first+i].hash
1531 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1533 while (true);
1536 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1537 already in the streamer cache. Main routine called for
1538 each visit of EXPR. */
1540 void
1541 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1542 tree expr, bool ref_p, bool this_ref_p)
1544 /* Handle special cases. */
1545 if (expr == NULL_TREE)
1546 return;
1548 /* Do not DFS walk into indexable trees. */
1549 if (this_ref_p && tree_is_indexable (expr))
1550 return;
1552 /* Check if we already streamed EXPR. */
1553 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1554 return;
1556 worklist w;
1557 w.expr = expr;
1558 w.from_state = from_state;
1559 w.cstate = NULL;
1560 w.ref_p = ref_p;
1561 w.this_ref_p = this_ref_p;
1562 worklist_vec.safe_push (w);
1566 /* Emit the physical representation of tree node EXPR to output block OB.
1567 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1568 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1570 void
1571 lto_output_tree (struct output_block *ob, tree expr,
1572 bool ref_p, bool this_ref_p)
1574 unsigned ix;
1575 bool existed_p;
1577 if (expr == NULL_TREE)
1579 streamer_write_record_start (ob, LTO_null);
1580 return;
1583 if (this_ref_p && tree_is_indexable (expr))
1585 lto_output_tree_ref (ob, expr);
1586 return;
1589 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1590 if (existed_p)
1592 /* If a node has already been streamed out, make sure that
1593 we don't write it more than once. Otherwise, the reader
1594 will instantiate two different nodes for the same object. */
1595 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1596 streamer_write_uhwi (ob, ix);
1597 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1598 lto_tree_code_to_tag (TREE_CODE (expr)));
1599 lto_stats.num_pickle_refs_output++;
1601 else
1603 /* This is the first time we see EXPR, write all reachable
1604 trees to OB. */
1605 static bool in_dfs_walk;
1607 /* Protect against recursion which means disconnect between
1608 what tree edges we walk in the DFS walk and what edges
1609 we stream out. */
1610 gcc_assert (!in_dfs_walk);
1612 /* Start the DFS walk. */
1613 /* Save ob state ... */
1614 /* let's see ... */
1615 in_dfs_walk = true;
1616 DFS (ob, expr, ref_p, this_ref_p, false);
1617 in_dfs_walk = false;
1619 /* Finally append a reference to the tree we were writing.
1620 ??? If expr ended up as a singleton we could have
1621 inlined it here and avoid outputting a reference. */
1622 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1623 gcc_assert (existed_p);
1624 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1625 streamer_write_uhwi (ob, ix);
1626 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1627 lto_tree_code_to_tag (TREE_CODE (expr)));
1628 lto_stats.num_pickle_refs_output++;
1633 /* Output to OB a list of try/catch handlers starting with FIRST. */
1635 static void
1636 output_eh_try_list (struct output_block *ob, eh_catch first)
1638 eh_catch n;
1640 for (n = first; n; n = n->next_catch)
1642 streamer_write_record_start (ob, LTO_eh_catch);
1643 stream_write_tree (ob, n->type_list, true);
1644 stream_write_tree (ob, n->filter_list, true);
1645 stream_write_tree (ob, n->label, true);
1648 streamer_write_record_start (ob, LTO_null);
1652 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1653 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1654 detect EH region sharing. */
1656 static void
1657 output_eh_region (struct output_block *ob, eh_region r)
1659 enum LTO_tags tag;
1661 if (r == NULL)
1663 streamer_write_record_start (ob, LTO_null);
1664 return;
1667 if (r->type == ERT_CLEANUP)
1668 tag = LTO_ert_cleanup;
1669 else if (r->type == ERT_TRY)
1670 tag = LTO_ert_try;
1671 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1672 tag = LTO_ert_allowed_exceptions;
1673 else if (r->type == ERT_MUST_NOT_THROW)
1674 tag = LTO_ert_must_not_throw;
1675 else
1676 gcc_unreachable ();
1678 streamer_write_record_start (ob, tag);
1679 streamer_write_hwi (ob, r->index);
1681 if (r->outer)
1682 streamer_write_hwi (ob, r->outer->index);
1683 else
1684 streamer_write_zero (ob);
1686 if (r->inner)
1687 streamer_write_hwi (ob, r->inner->index);
1688 else
1689 streamer_write_zero (ob);
1691 if (r->next_peer)
1692 streamer_write_hwi (ob, r->next_peer->index);
1693 else
1694 streamer_write_zero (ob);
1696 if (r->type == ERT_TRY)
1698 output_eh_try_list (ob, r->u.eh_try.first_catch);
1700 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1702 stream_write_tree (ob, r->u.allowed.type_list, true);
1703 stream_write_tree (ob, r->u.allowed.label, true);
1704 streamer_write_uhwi (ob, r->u.allowed.filter);
1706 else if (r->type == ERT_MUST_NOT_THROW)
1708 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1709 bitpack_d bp = bitpack_create (ob->main_stream);
1710 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1711 streamer_write_bitpack (&bp);
1714 if (r->landing_pads)
1715 streamer_write_hwi (ob, r->landing_pads->index);
1716 else
1717 streamer_write_zero (ob);
1721 /* Output landing pad LP to OB. */
1723 static void
1724 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1726 if (lp == NULL)
1728 streamer_write_record_start (ob, LTO_null);
1729 return;
1732 streamer_write_record_start (ob, LTO_eh_landing_pad);
1733 streamer_write_hwi (ob, lp->index);
1734 if (lp->next_lp)
1735 streamer_write_hwi (ob, lp->next_lp->index);
1736 else
1737 streamer_write_zero (ob);
1739 if (lp->region)
1740 streamer_write_hwi (ob, lp->region->index);
1741 else
1742 streamer_write_zero (ob);
1744 stream_write_tree (ob, lp->post_landing_pad, true);
1748 /* Output the existing eh_table to OB. */
1750 static void
1751 output_eh_regions (struct output_block *ob, struct function *fn)
1753 if (fn->eh && fn->eh->region_tree)
1755 unsigned i;
1756 eh_region eh;
1757 eh_landing_pad lp;
1758 tree ttype;
1760 streamer_write_record_start (ob, LTO_eh_table);
1762 /* Emit the index of the root of the EH region tree. */
1763 streamer_write_hwi (ob, fn->eh->region_tree->index);
1765 /* Emit all the EH regions in the region array. */
1766 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1767 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1768 output_eh_region (ob, eh);
1770 /* Emit all landing pads. */
1771 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1772 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1773 output_eh_lp (ob, lp);
1775 /* Emit all the runtime type data. */
1776 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1777 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1778 stream_write_tree (ob, ttype, true);
1780 /* Emit the table of action chains. */
1781 if (targetm.arm_eabi_unwinder)
1783 tree t;
1784 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1785 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1786 stream_write_tree (ob, t, true);
1788 else
1790 uchar c;
1791 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1792 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1793 streamer_write_char_stream (ob->main_stream, c);
1797 /* The LTO_null either terminates the record or indicates that there
1798 are no eh_records at all. */
1799 streamer_write_record_start (ob, LTO_null);
1803 /* Output all of the active ssa names to the ssa_names stream. */
1805 static void
1806 output_ssa_names (struct output_block *ob, struct function *fn)
1808 unsigned int i, len;
1810 len = vec_safe_length (SSANAMES (fn));
1811 streamer_write_uhwi (ob, len);
1813 for (i = 1; i < len; i++)
1815 tree ptr = (*SSANAMES (fn))[i];
1817 if (ptr == NULL_TREE
1818 || SSA_NAME_IN_FREE_LIST (ptr)
1819 || virtual_operand_p (ptr)
1820 /* Simply skip unreleased SSA names. */
1821 || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
1822 && (! SSA_NAME_DEF_STMT (ptr)
1823 || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
1824 continue;
1826 streamer_write_uhwi (ob, i);
1827 streamer_write_char_stream (ob->main_stream,
1828 SSA_NAME_IS_DEFAULT_DEF (ptr));
1829 if (SSA_NAME_VAR (ptr))
1830 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1831 else
1832 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1833 stream_write_tree (ob, TREE_TYPE (ptr), true);
1836 streamer_write_zero (ob);
1840 /* Output a wide-int. */
1842 static void
1843 streamer_write_wi (struct output_block *ob,
1844 const widest_int &w)
1846 int len = w.get_len ();
1848 streamer_write_uhwi (ob, w.get_precision ());
1849 streamer_write_uhwi (ob, len);
1850 for (int i = 0; i < len; i++)
1851 streamer_write_hwi (ob, w.elt (i));
1855 /* Output the cfg. */
1857 static void
1858 output_cfg (struct output_block *ob, struct function *fn)
1860 struct lto_output_stream *tmp_stream = ob->main_stream;
1861 basic_block bb;
1863 ob->main_stream = ob->cfg_stream;
1865 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1866 profile_status_for_fn (fn));
1868 /* Output the number of the highest basic block. */
1869 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1871 FOR_ALL_BB_FN (bb, fn)
1873 edge_iterator ei;
1874 edge e;
1876 streamer_write_hwi (ob, bb->index);
1878 /* Output the successors and the edge flags. */
1879 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1880 FOR_EACH_EDGE (e, ei, bb->succs)
1882 streamer_write_uhwi (ob, e->dest->index);
1883 streamer_write_hwi (ob, e->probability);
1884 streamer_write_gcov_count (ob, e->count);
1885 streamer_write_uhwi (ob, e->flags);
1889 streamer_write_hwi (ob, -1);
1891 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1892 while (bb->next_bb)
1894 streamer_write_hwi (ob, bb->next_bb->index);
1895 bb = bb->next_bb;
1898 streamer_write_hwi (ob, -1);
1900 /* ??? The cfgloop interface is tied to cfun. */
1901 gcc_assert (cfun == fn);
1903 /* Output the number of loops. */
1904 streamer_write_uhwi (ob, number_of_loops (fn));
1906 /* Output each loop, skipping the tree root which has number zero. */
1907 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1909 struct loop *loop = get_loop (fn, i);
1911 /* Write the index of the loop header. That's enough to rebuild
1912 the loop tree on the reader side. Stream -1 for an unused
1913 loop entry. */
1914 if (!loop)
1916 streamer_write_hwi (ob, -1);
1917 continue;
1919 else
1920 streamer_write_hwi (ob, loop->header->index);
1922 /* Write everything copy_loop_info copies. */
1923 streamer_write_enum (ob->main_stream,
1924 loop_estimation, EST_LAST, loop->estimate_state);
1925 streamer_write_hwi (ob, loop->any_upper_bound);
1926 if (loop->any_upper_bound)
1927 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1928 streamer_write_hwi (ob, loop->any_likely_upper_bound);
1929 if (loop->any_likely_upper_bound)
1930 streamer_write_wi (ob, loop->nb_iterations_likely_upper_bound);
1931 streamer_write_hwi (ob, loop->any_estimate);
1932 if (loop->any_estimate)
1933 streamer_write_wi (ob, loop->nb_iterations_estimate);
1935 /* Write OMP SIMD related info. */
1936 streamer_write_hwi (ob, loop->safelen);
1937 streamer_write_hwi (ob, loop->dont_vectorize);
1938 streamer_write_hwi (ob, loop->force_vectorize);
1939 stream_write_tree (ob, loop->simduid, true);
1942 ob->main_stream = tmp_stream;
1946 /* Create the header in the file using OB. If the section type is for
1947 a function, set FN to the decl for that function. */
1949 void
1950 produce_asm (struct output_block *ob, tree fn)
1952 enum lto_section_type section_type = ob->section_type;
1953 struct lto_function_header header;
1954 char *section_name;
1956 if (section_type == LTO_section_function_body)
1958 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1959 section_name = lto_get_section_name (section_type, name, NULL);
1961 else
1962 section_name = lto_get_section_name (section_type, NULL, NULL);
1964 lto_begin_section (section_name, !flag_wpa);
1965 free (section_name);
1967 /* The entire header is stream computed here. */
1968 memset (&header, 0, sizeof (struct lto_function_header));
1970 /* Write the header. */
1971 header.major_version = LTO_major_version;
1972 header.minor_version = LTO_minor_version;
1974 if (section_type == LTO_section_function_body)
1975 header.cfg_size = ob->cfg_stream->total_size;
1976 header.main_size = ob->main_stream->total_size;
1977 header.string_size = ob->string_stream->total_size;
1978 lto_write_data (&header, sizeof header);
1980 /* Put all of the gimple and the string table out the asm file as a
1981 block of text. */
1982 if (section_type == LTO_section_function_body)
1983 lto_write_stream (ob->cfg_stream);
1984 lto_write_stream (ob->main_stream);
1985 lto_write_stream (ob->string_stream);
1987 lto_end_section ();
1991 /* Output the base body of struct function FN using output block OB. */
1993 static void
1994 output_struct_function_base (struct output_block *ob, struct function *fn)
1996 struct bitpack_d bp;
1997 unsigned i;
1998 tree t;
2000 /* Output the static chain and non-local goto save area. */
2001 stream_write_tree (ob, fn->static_chain_decl, true);
2002 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
2004 /* Output all the local variables in the function. */
2005 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
2006 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
2007 stream_write_tree (ob, t, true);
2009 /* Output current IL state of the function. */
2010 streamer_write_uhwi (ob, fn->curr_properties);
2012 /* Write all the attributes for FN. */
2013 bp = bitpack_create (ob->main_stream);
2014 bp_pack_value (&bp, fn->is_thunk, 1);
2015 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
2016 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
2017 bp_pack_value (&bp, fn->returns_struct, 1);
2018 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
2019 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2020 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2021 bp_pack_value (&bp, fn->after_inlining, 1);
2022 bp_pack_value (&bp, fn->stdarg, 1);
2023 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2024 bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2025 bp_pack_value (&bp, fn->calls_alloca, 1);
2026 bp_pack_value (&bp, fn->calls_setjmp, 1);
2027 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2028 bp_pack_value (&bp, fn->has_simduid_loops, 1);
2029 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2030 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2031 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2033 /* Output the function start and end loci. */
2034 stream_output_location (ob, &bp, fn->function_start_locus);
2035 stream_output_location (ob, &bp, fn->function_end_locus);
2037 streamer_write_bitpack (&bp);
2041 /* Output the body of function NODE->DECL. */
2043 static void
2044 output_function (struct cgraph_node *node)
2046 tree function;
2047 struct function *fn;
2048 basic_block bb;
2049 struct output_block *ob;
2051 function = node->decl;
2052 fn = DECL_STRUCT_FUNCTION (function);
2053 ob = create_output_block (LTO_section_function_body);
2055 clear_line_info (ob);
2056 ob->symbol = node;
2058 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2060 /* Set current_function_decl and cfun. */
2061 push_cfun (fn);
2063 /* Make string 0 be a NULL string. */
2064 streamer_write_char_stream (ob->string_stream, 0);
2066 streamer_write_record_start (ob, LTO_function);
2068 /* Output decls for parameters and args. */
2069 stream_write_tree (ob, DECL_RESULT (function), true);
2070 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2072 /* Output DECL_INITIAL for the function, which contains the tree of
2073 lexical scopes. */
2074 stream_write_tree (ob, DECL_INITIAL (function), true);
2076 /* We also stream abstract functions where we stream only stuff needed for
2077 debug info. */
2078 if (gimple_has_body_p (function))
2080 streamer_write_uhwi (ob, 1);
2081 output_struct_function_base (ob, fn);
2083 /* Output all the SSA names used in the function. */
2084 output_ssa_names (ob, fn);
2086 /* Output any exception handling regions. */
2087 output_eh_regions (ob, fn);
2090 /* We will renumber the statements. The code that does this uses
2091 the same ordering that we use for serializing them so we can use
2092 the same code on the other end and not have to write out the
2093 statement numbers. We do not assign UIDs to PHIs here because
2094 virtual PHIs get re-computed on-the-fly which would make numbers
2095 inconsistent. */
2096 set_gimple_stmt_max_uid (cfun, 0);
2097 FOR_ALL_BB_FN (bb, cfun)
2099 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2100 gsi_next (&gsi))
2102 gphi *stmt = gsi.phi ();
2104 /* Virtual PHIs are not going to be streamed. */
2105 if (!virtual_operand_p (gimple_phi_result (stmt)))
2106 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2108 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2109 gsi_next (&gsi))
2111 gimple *stmt = gsi_stmt (gsi);
2112 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2115 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2116 virtual phis now. */
2117 FOR_ALL_BB_FN (bb, cfun)
2119 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2120 gsi_next (&gsi))
2122 gphi *stmt = gsi.phi ();
2123 if (virtual_operand_p (gimple_phi_result (stmt)))
2124 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2128 /* Output the code for the function. */
2129 FOR_ALL_BB_FN (bb, fn)
2130 output_bb (ob, bb, fn);
2132 /* The terminator for this function. */
2133 streamer_write_record_start (ob, LTO_null);
2135 output_cfg (ob, fn);
2137 pop_cfun ();
2139 else
2140 streamer_write_uhwi (ob, 0);
2142 /* Create a section to hold the pickled output of this function. */
2143 produce_asm (ob, function);
2145 destroy_output_block (ob);
2148 /* Output the body of function NODE->DECL. */
2150 static void
2151 output_constructor (struct varpool_node *node)
2153 tree var = node->decl;
2154 struct output_block *ob;
2156 ob = create_output_block (LTO_section_function_body);
2158 clear_line_info (ob);
2159 ob->symbol = node;
2161 /* Make string 0 be a NULL string. */
2162 streamer_write_char_stream (ob->string_stream, 0);
2164 /* Output DECL_INITIAL for the function, which contains the tree of
2165 lexical scopes. */
2166 stream_write_tree (ob, DECL_INITIAL (var), true);
2168 /* Create a section to hold the pickled output of this function. */
2169 produce_asm (ob, var);
2171 destroy_output_block (ob);
2175 /* Emit toplevel asms. */
2177 void
2178 lto_output_toplevel_asms (void)
2180 struct output_block *ob;
2181 struct asm_node *can;
2182 char *section_name;
2183 struct lto_simple_header_with_strings header;
2185 if (!symtab->first_asm_symbol ())
2186 return;
2188 ob = create_output_block (LTO_section_asm);
2190 /* Make string 0 be a NULL string. */
2191 streamer_write_char_stream (ob->string_stream, 0);
2193 for (can = symtab->first_asm_symbol (); can; can = can->next)
2195 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2196 streamer_write_hwi (ob, can->order);
2199 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2201 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2202 lto_begin_section (section_name, !flag_wpa);
2203 free (section_name);
2205 /* The entire header stream is computed here. */
2206 memset (&header, 0, sizeof (header));
2208 /* Write the header. */
2209 header.major_version = LTO_major_version;
2210 header.minor_version = LTO_minor_version;
2212 header.main_size = ob->main_stream->total_size;
2213 header.string_size = ob->string_stream->total_size;
2214 lto_write_data (&header, sizeof header);
2216 /* Put all of the gimple and the string table out the asm file as a
2217 block of text. */
2218 lto_write_stream (ob->main_stream);
2219 lto_write_stream (ob->string_stream);
2221 lto_end_section ();
2223 destroy_output_block (ob);
2227 /* Copy the function body or variable constructor of NODE without deserializing. */
2229 static void
2230 copy_function_or_variable (struct symtab_node *node)
2232 tree function = node->decl;
2233 struct lto_file_decl_data *file_data = node->lto_file_data;
2234 const char *data;
2235 size_t len;
2236 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2237 char *section_name =
2238 lto_get_section_name (LTO_section_function_body, name, NULL);
2239 size_t i, j;
2240 struct lto_in_decl_state *in_state;
2241 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2243 lto_begin_section (section_name, false);
2244 free (section_name);
2246 /* We may have renamed the declaration, e.g., a static function. */
2247 name = lto_get_decl_name_mapping (file_data, name);
2249 data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2250 name, &len);
2251 gcc_assert (data);
2253 /* Do a bit copy of the function body. */
2254 lto_write_raw_data (data, len);
2256 /* Copy decls. */
2257 in_state =
2258 lto_get_function_in_decl_state (node->lto_file_data, function);
2259 out_state->compressed = in_state->compressed;
2260 gcc_assert (in_state);
2262 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2264 size_t n = vec_safe_length (in_state->streams[i]);
2265 vec<tree, va_gc> *trees = in_state->streams[i];
2266 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2268 /* The out state must have the same indices and the in state.
2269 So just copy the vector. All the encoders in the in state
2270 must be empty where we reach here. */
2271 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2272 encoder->trees.reserve_exact (n);
2273 for (j = 0; j < n; j++)
2274 encoder->trees.safe_push ((*trees)[j]);
2277 lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2278 data, len);
2279 lto_end_section ();
2282 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2284 static tree
2285 wrap_refs (tree *tp, int *ws, void *)
2287 tree t = *tp;
2288 if (handled_component_p (t)
2289 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2290 && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2292 tree decl = TREE_OPERAND (t, 0);
2293 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2294 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2295 build1 (ADDR_EXPR, ptrtype, decl),
2296 build_int_cst (ptrtype, 0));
2297 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2298 *ws = 0;
2300 else if (TREE_CODE (t) == CONSTRUCTOR)
2302 else if (!EXPR_P (t))
2303 *ws = 0;
2304 return NULL_TREE;
2307 /* Main entry point from the pass manager. */
2309 void
2310 lto_output (void)
2312 struct lto_out_decl_state *decl_state;
2313 bitmap output = NULL;
2314 int i, n_nodes;
2315 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2317 if (flag_checking)
2318 output = lto_bitmap_alloc ();
2320 /* Initialize the streamer. */
2321 lto_streamer_init ();
2323 n_nodes = lto_symtab_encoder_size (encoder);
2324 /* Process only the functions with bodies. */
2325 for (i = 0; i < n_nodes; i++)
2327 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2328 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2330 if (lto_symtab_encoder_encode_body_p (encoder, node)
2331 && !node->alias
2332 && (!node->thunk.thunk_p || !node->thunk.add_pointer_bounds_args))
2334 if (flag_checking)
2336 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2337 bitmap_set_bit (output, DECL_UID (node->decl));
2339 decl_state = lto_new_out_decl_state ();
2340 lto_push_out_decl_state (decl_state);
2341 if (gimple_has_body_p (node->decl) || !flag_wpa
2342 /* Thunks have no body but they may be synthetized
2343 at WPA time. */
2344 || DECL_ARGUMENTS (node->decl))
2345 output_function (node);
2346 else
2347 copy_function_or_variable (node);
2348 gcc_assert (lto_get_out_decl_state () == decl_state);
2349 lto_pop_out_decl_state ();
2350 lto_record_function_out_decl_state (node->decl, decl_state);
2353 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2355 /* Wrap symbol references inside the ctor in a type
2356 preserving MEM_REF. */
2357 tree ctor = DECL_INITIAL (node->decl);
2358 if (ctor && !in_lto_p)
2359 walk_tree (&ctor, wrap_refs, NULL, NULL);
2360 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2361 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2362 && !node->alias)
2364 timevar_push (TV_IPA_LTO_CTORS_OUT);
2365 if (flag_checking)
2367 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2368 bitmap_set_bit (output, DECL_UID (node->decl));
2370 decl_state = lto_new_out_decl_state ();
2371 lto_push_out_decl_state (decl_state);
2372 if (DECL_INITIAL (node->decl) != error_mark_node
2373 || !flag_wpa)
2374 output_constructor (node);
2375 else
2376 copy_function_or_variable (node);
2377 gcc_assert (lto_get_out_decl_state () == decl_state);
2378 lto_pop_out_decl_state ();
2379 lto_record_function_out_decl_state (node->decl, decl_state);
2380 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2385 /* Emit the callgraph after emitting function bodies. This needs to
2386 be done now to make sure that all the statements in every function
2387 have been renumbered so that edges can be associated with call
2388 statements using the statement UIDs. */
2389 output_symtab ();
2391 output_offload_tables ();
2393 #if CHECKING_P
2394 lto_bitmap_free (output);
2395 #endif
2398 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2399 from it and required for correct representation of its semantics.
2400 Each node in ENCODER must be a global declaration or a type. A node
2401 is written only once, even if it appears multiple times in the
2402 vector. Certain transitively-reachable nodes, such as those
2403 representing expressions, may be duplicated, but such nodes
2404 must not appear in ENCODER itself. */
2406 static void
2407 write_global_stream (struct output_block *ob,
2408 struct lto_tree_ref_encoder *encoder)
2410 tree t;
2411 size_t index;
2412 const size_t size = lto_tree_ref_encoder_size (encoder);
2414 for (index = 0; index < size; index++)
2416 t = lto_tree_ref_encoder_get_tree (encoder, index);
2417 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2418 stream_write_tree (ob, t, false);
2423 /* Write a sequence of indices into the globals vector corresponding
2424 to the trees in ENCODER. These are used by the reader to map the
2425 indices used to refer to global entities within function bodies to
2426 their referents. */
2428 static void
2429 write_global_references (struct output_block *ob,
2430 struct lto_tree_ref_encoder *encoder)
2432 tree t;
2433 uint32_t index;
2434 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2436 /* Write size and slot indexes as 32-bit unsigned numbers. */
2437 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2438 data[0] = size;
2440 for (index = 0; index < size; index++)
2442 unsigned slot_num;
2444 t = lto_tree_ref_encoder_get_tree (encoder, index);
2445 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2446 gcc_assert (slot_num != (unsigned)-1);
2447 data[index + 1] = slot_num;
2450 lto_write_data (data, sizeof (int32_t) * (size + 1));
2451 free (data);
2455 /* Write all the streams in an lto_out_decl_state STATE using
2456 output block OB and output stream OUT_STREAM. */
2458 void
2459 lto_output_decl_state_streams (struct output_block *ob,
2460 struct lto_out_decl_state *state)
2462 int i;
2464 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2465 write_global_stream (ob, &state->streams[i]);
2469 /* Write all the references in an lto_out_decl_state STATE using
2470 output block OB and output stream OUT_STREAM. */
2472 void
2473 lto_output_decl_state_refs (struct output_block *ob,
2474 struct lto_out_decl_state *state)
2476 unsigned i;
2477 unsigned ref;
2478 tree decl;
2480 /* Write reference to FUNCTION_DECL. If there is not function,
2481 write reference to void_type_node. */
2482 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2483 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2484 gcc_assert (ref != (unsigned)-1);
2485 ref = ref * 2 + (state->compressed ? 1 : 0);
2486 lto_write_data (&ref, sizeof (uint32_t));
2488 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2489 write_global_references (ob, &state->streams[i]);
2493 /* Return the written size of STATE. */
2495 static size_t
2496 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2498 int i;
2499 size_t size;
2501 size = sizeof (int32_t); /* fn_ref. */
2502 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2504 size += sizeof (int32_t); /* vector size. */
2505 size += (lto_tree_ref_encoder_size (&state->streams[i])
2506 * sizeof (int32_t));
2508 return size;
2512 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2513 so far. */
2515 static void
2516 write_symbol (struct streamer_tree_cache_d *cache,
2517 tree t, hash_set<const char *> *seen, bool alias)
2519 const char *name;
2520 enum gcc_plugin_symbol_kind kind;
2521 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2522 unsigned slot_num;
2523 uint64_t size;
2524 const char *comdat;
2525 unsigned char c;
2527 /* None of the following kinds of symbols are needed in the
2528 symbol table. */
2529 if (!TREE_PUBLIC (t)
2530 || is_builtin_fn (t)
2531 || DECL_ABSTRACT_P (t)
2532 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2533 return;
2534 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2536 gcc_assert (TREE_CODE (t) == VAR_DECL
2537 || TREE_CODE (t) == FUNCTION_DECL);
2539 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2541 /* This behaves like assemble_name_raw in varasm.c, performing the
2542 same name manipulations that ASM_OUTPUT_LABELREF does. */
2543 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2545 if (seen->add (name))
2546 return;
2548 streamer_tree_cache_lookup (cache, t, &slot_num);
2549 gcc_assert (slot_num != (unsigned)-1);
2551 if (DECL_EXTERNAL (t))
2553 if (DECL_WEAK (t))
2554 kind = GCCPK_WEAKUNDEF;
2555 else
2556 kind = GCCPK_UNDEF;
2558 else
2560 if (DECL_WEAK (t))
2561 kind = GCCPK_WEAKDEF;
2562 else if (DECL_COMMON (t))
2563 kind = GCCPK_COMMON;
2564 else
2565 kind = GCCPK_DEF;
2567 /* When something is defined, it should have node attached. */
2568 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2569 || varpool_node::get (t)->definition);
2570 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2571 || (cgraph_node::get (t)
2572 && cgraph_node::get (t)->definition));
2575 /* Imitate what default_elf_asm_output_external do.
2576 When symbol is external, we need to output it with DEFAULT visibility
2577 when compiling with -fvisibility=default, while with HIDDEN visibility
2578 when symbol has attribute (visibility("hidden")) specified.
2579 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2580 right. */
2582 if (DECL_EXTERNAL (t)
2583 && !targetm.binds_local_p (t))
2584 visibility = GCCPV_DEFAULT;
2585 else
2586 switch (DECL_VISIBILITY (t))
2588 case VISIBILITY_DEFAULT:
2589 visibility = GCCPV_DEFAULT;
2590 break;
2591 case VISIBILITY_PROTECTED:
2592 visibility = GCCPV_PROTECTED;
2593 break;
2594 case VISIBILITY_HIDDEN:
2595 visibility = GCCPV_HIDDEN;
2596 break;
2597 case VISIBILITY_INTERNAL:
2598 visibility = GCCPV_INTERNAL;
2599 break;
2602 if (kind == GCCPK_COMMON
2603 && DECL_SIZE_UNIT (t)
2604 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2605 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2606 else
2607 size = 0;
2609 if (DECL_ONE_ONLY (t))
2610 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2611 else
2612 comdat = "";
2614 lto_write_data (name, strlen (name) + 1);
2615 lto_write_data (comdat, strlen (comdat) + 1);
2616 c = (unsigned char) kind;
2617 lto_write_data (&c, 1);
2618 c = (unsigned char) visibility;
2619 lto_write_data (&c, 1);
2620 lto_write_data (&size, 8);
2621 lto_write_data (&slot_num, 4);
2624 /* Return true if NODE should appear in the plugin symbol table. */
2626 bool
2627 output_symbol_p (symtab_node *node)
2629 struct cgraph_node *cnode;
2630 if (!node->real_symbol_p ())
2631 return false;
2632 /* We keep external functions in symtab for sake of inlining
2633 and devirtualization. We do not want to see them in symbol table as
2634 references unless they are really used. */
2635 cnode = dyn_cast <cgraph_node *> (node);
2636 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2637 && cnode->callers)
2638 return true;
2640 /* Ignore all references from external vars initializers - they are not really
2641 part of the compilation unit until they are used by folding. Some symbols,
2642 like references to external construction vtables can not be referred to at all.
2643 We decide this at can_refer_decl_in_current_unit_p. */
2644 if (!node->definition || DECL_EXTERNAL (node->decl))
2646 int i;
2647 struct ipa_ref *ref;
2648 for (i = 0; node->iterate_referring (i, ref); i++)
2650 if (ref->use == IPA_REF_ALIAS)
2651 continue;
2652 if (is_a <cgraph_node *> (ref->referring))
2653 return true;
2654 if (!DECL_EXTERNAL (ref->referring->decl))
2655 return true;
2657 return false;
2659 return true;
2663 /* Write an IL symbol table to OB.
2664 SET and VSET are cgraph/varpool node sets we are outputting. */
2666 static void
2667 produce_symtab (struct output_block *ob)
2669 struct streamer_tree_cache_d *cache = ob->writer_cache;
2670 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2671 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2672 lto_symtab_encoder_iterator lsei;
2674 lto_begin_section (section_name, false);
2675 free (section_name);
2677 hash_set<const char *> seen;
2679 /* Write the symbol table.
2680 First write everything defined and then all declarations.
2681 This is necessary to handle cases where we have duplicated symbols. */
2682 for (lsei = lsei_start (encoder);
2683 !lsei_end_p (lsei); lsei_next (&lsei))
2685 symtab_node *node = lsei_node (lsei);
2687 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2688 continue;
2689 write_symbol (cache, node->decl, &seen, false);
2691 for (lsei = lsei_start (encoder);
2692 !lsei_end_p (lsei); lsei_next (&lsei))
2694 symtab_node *node = lsei_node (lsei);
2696 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2697 continue;
2698 write_symbol (cache, node->decl, &seen, false);
2701 lto_end_section ();
2705 /* Init the streamer_mode_table for output, where we collect info on what
2706 machine_mode values have been streamed. */
2707 void
2708 lto_output_init_mode_table (void)
2710 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2714 /* Write the mode table. */
2715 static void
2716 lto_write_mode_table (void)
2718 struct output_block *ob;
2719 ob = create_output_block (LTO_section_mode_table);
2720 bitpack_d bp = bitpack_create (ob->main_stream);
2722 /* Ensure that for GET_MODE_INNER (m) != m we have
2723 also the inner mode marked. */
2724 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2725 if (streamer_mode_table[i])
2727 machine_mode m = (machine_mode) i;
2728 if (GET_MODE_INNER (m) != m)
2729 streamer_mode_table[(int) GET_MODE_INNER (m)] = 1;
2731 /* First stream modes that have GET_MODE_INNER (m) == m,
2732 so that we can refer to them afterwards. */
2733 for (int pass = 0; pass < 2; pass++)
2734 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2735 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2737 machine_mode m = (machine_mode) i;
2738 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2739 continue;
2740 bp_pack_value (&bp, m, 8);
2741 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2742 bp_pack_value (&bp, GET_MODE_SIZE (m), 8);
2743 bp_pack_value (&bp, GET_MODE_PRECISION (m), 16);
2744 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2745 bp_pack_value (&bp, GET_MODE_NUNITS (m), 8);
2746 switch (GET_MODE_CLASS (m))
2748 case MODE_FRACT:
2749 case MODE_UFRACT:
2750 case MODE_ACCUM:
2751 case MODE_UACCUM:
2752 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2753 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2754 break;
2755 case MODE_FLOAT:
2756 case MODE_DECIMAL_FLOAT:
2757 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2758 break;
2759 default:
2760 break;
2762 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2764 bp_pack_value (&bp, VOIDmode, 8);
2766 streamer_write_bitpack (&bp);
2768 char *section_name
2769 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2770 lto_begin_section (section_name, !flag_wpa);
2771 free (section_name);
2773 /* The entire header stream is computed here. */
2774 struct lto_simple_header_with_strings header;
2775 memset (&header, 0, sizeof (header));
2777 /* Write the header. */
2778 header.major_version = LTO_major_version;
2779 header.minor_version = LTO_minor_version;
2781 header.main_size = ob->main_stream->total_size;
2782 header.string_size = ob->string_stream->total_size;
2783 lto_write_data (&header, sizeof header);
2785 /* Put all of the gimple and the string table out the asm file as a
2786 block of text. */
2787 lto_write_stream (ob->main_stream);
2788 lto_write_stream (ob->string_stream);
2790 lto_end_section ();
2791 destroy_output_block (ob);
2795 /* This pass is run after all of the functions are serialized and all
2796 of the IPA passes have written their serialized forms. This pass
2797 causes the vector of all of the global decls and types used from
2798 this file to be written in to a section that can then be read in to
2799 recover these on other side. */
2801 void
2802 produce_asm_for_decls (void)
2804 struct lto_out_decl_state *out_state;
2805 struct lto_out_decl_state *fn_out_state;
2806 struct lto_decl_header header;
2807 char *section_name;
2808 struct output_block *ob;
2809 unsigned idx, num_fns;
2810 size_t decl_state_size;
2811 int32_t num_decl_states;
2813 ob = create_output_block (LTO_section_decls);
2815 memset (&header, 0, sizeof (struct lto_decl_header));
2817 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2818 lto_begin_section (section_name, !flag_wpa);
2819 free (section_name);
2821 /* Make string 0 be a NULL string. */
2822 streamer_write_char_stream (ob->string_stream, 0);
2824 gcc_assert (!alias_pairs);
2826 /* Get rid of the global decl state hash tables to save some memory. */
2827 out_state = lto_get_out_decl_state ();
2828 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2829 if (out_state->streams[i].tree_hash_table)
2831 delete out_state->streams[i].tree_hash_table;
2832 out_state->streams[i].tree_hash_table = NULL;
2835 /* Write the global symbols. */
2836 lto_output_decl_state_streams (ob, out_state);
2837 num_fns = lto_function_decl_states.length ();
2838 for (idx = 0; idx < num_fns; idx++)
2840 fn_out_state =
2841 lto_function_decl_states[idx];
2842 lto_output_decl_state_streams (ob, fn_out_state);
2845 header.major_version = LTO_major_version;
2846 header.minor_version = LTO_minor_version;
2848 /* Currently not used. This field would allow us to preallocate
2849 the globals vector, so that it need not be resized as it is extended. */
2850 header.num_nodes = -1;
2852 /* Compute the total size of all decl out states. */
2853 decl_state_size = sizeof (int32_t);
2854 decl_state_size += lto_out_decl_state_written_size (out_state);
2855 for (idx = 0; idx < num_fns; idx++)
2857 fn_out_state =
2858 lto_function_decl_states[idx];
2859 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2861 header.decl_state_size = decl_state_size;
2863 header.main_size = ob->main_stream->total_size;
2864 header.string_size = ob->string_stream->total_size;
2866 lto_write_data (&header, sizeof header);
2868 /* Write the main out-decl state, followed by out-decl states of
2869 functions. */
2870 num_decl_states = num_fns + 1;
2871 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2872 lto_output_decl_state_refs (ob, out_state);
2873 for (idx = 0; idx < num_fns; idx++)
2875 fn_out_state = lto_function_decl_states[idx];
2876 lto_output_decl_state_refs (ob, fn_out_state);
2879 lto_write_stream (ob->main_stream);
2880 lto_write_stream (ob->string_stream);
2882 lto_end_section ();
2884 /* Write the symbol table. It is used by linker to determine dependencies
2885 and thus we can skip it for WPA. */
2886 if (!flag_wpa)
2887 produce_symtab (ob);
2889 /* Write command line opts. */
2890 lto_write_options ();
2892 /* Deallocate memory and clean up. */
2893 for (idx = 0; idx < num_fns; idx++)
2895 fn_out_state =
2896 lto_function_decl_states[idx];
2897 lto_delete_out_decl_state (fn_out_state);
2899 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2900 lto_function_decl_states.release ();
2901 destroy_output_block (ob);
2902 if (lto_stream_offload_p)
2903 lto_write_mode_table ();