* lto-streamer-out.c (wrap_refs): Only wrap public decls.
[official-gcc.git] / gcc / lto-streamer-out.c
blob5524f84dc066947644afb757e64b84191e1ddcbb
1 /* Write the GIMPLE representation to a file stream.
3 Copyright (C) 2009-2015 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
45 static void lto_write_tree (struct output_block*, tree, bool);
47 /* Clear the line info stored in DATA_IN. */
49 static void
50 clear_line_info (struct output_block *ob)
52 ob->current_file = NULL;
53 ob->current_line = 0;
54 ob->current_col = 0;
55 ob->current_sysp = false;
59 /* Create the output block and return it. SECTION_TYPE is
60 LTO_section_function_body or LTO_static_initializer. */
62 struct output_block *
63 create_output_block (enum lto_section_type section_type)
65 struct output_block *ob = XCNEW (struct output_block);
67 ob->section_type = section_type;
68 ob->decl_state = lto_get_out_decl_state ();
69 ob->main_stream = XCNEW (struct lto_output_stream);
70 ob->string_stream = XCNEW (struct lto_output_stream);
71 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
73 if (section_type == LTO_section_function_body)
74 ob->cfg_stream = XCNEW (struct lto_output_stream);
76 clear_line_info (ob);
78 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
79 gcc_obstack_init (&ob->obstack);
81 return ob;
85 /* Destroy the output block OB. */
87 void
88 destroy_output_block (struct output_block *ob)
90 enum lto_section_type section_type = ob->section_type;
92 delete ob->string_hash_table;
93 ob->string_hash_table = NULL;
95 free (ob->main_stream);
96 free (ob->string_stream);
97 if (section_type == LTO_section_function_body)
98 free (ob->cfg_stream);
100 streamer_tree_cache_delete (ob->writer_cache);
101 obstack_free (&ob->obstack, NULL);
103 free (ob);
107 /* Look up NODE in the type table and write the index for it to OB. */
109 static void
110 output_type_ref (struct output_block *ob, tree node)
112 streamer_write_record_start (ob, LTO_type_ref);
113 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
117 /* Return true if tree node T is written to various tables. For these
118 nodes, we sometimes want to write their phyiscal representation
119 (via lto_output_tree), and sometimes we need to emit an index
120 reference into a table (via lto_output_tree_ref). */
122 static bool
123 tree_is_indexable (tree t)
125 /* Parameters and return values of functions of variably modified types
126 must go to global stream, because they may be used in the type
127 definition. */
128 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
129 && DECL_CONTEXT (t))
130 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
131 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
132 else if (TREE_CODE (t) == IMPORTED_DECL)
133 return false;
134 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
135 || TREE_CODE (t) == TYPE_DECL
136 || TREE_CODE (t) == CONST_DECL
137 || TREE_CODE (t) == NAMELIST_DECL)
138 && decl_function_context (t))
139 return false;
140 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
141 return false;
142 /* Variably modified types need to be streamed alongside function
143 bodies because they can refer to local entities. Together with
144 them we have to localize their members as well.
145 ??? In theory that includes non-FIELD_DECLs as well. */
146 else if (TYPE_P (t)
147 && variably_modified_type_p (t, NULL_TREE))
148 return false;
149 else if (TREE_CODE (t) == FIELD_DECL
150 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
151 return false;
152 else
153 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
157 /* Output info about new location into bitpack BP.
158 After outputting bitpack, lto_output_location_data has
159 to be done to output actual data. */
161 void
162 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
163 location_t loc)
165 expanded_location xloc;
167 loc = LOCATION_LOCUS (loc);
168 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
169 loc < RESERVED_LOCATION_COUNT
170 ? loc : RESERVED_LOCATION_COUNT);
171 if (loc < RESERVED_LOCATION_COUNT)
172 return;
174 xloc = expand_location (loc);
176 bp_pack_value (bp, ob->current_file != xloc.file, 1);
177 bp_pack_value (bp, ob->current_line != xloc.line, 1);
178 bp_pack_value (bp, ob->current_col != xloc.column, 1);
180 if (ob->current_file != xloc.file)
182 bp_pack_string (ob, bp, xloc.file, true);
183 bp_pack_value (bp, xloc.sysp, 1);
185 ob->current_file = xloc.file;
186 ob->current_sysp = xloc.sysp;
188 if (ob->current_line != xloc.line)
189 bp_pack_var_len_unsigned (bp, xloc.line);
190 ob->current_line = xloc.line;
192 if (ob->current_col != xloc.column)
193 bp_pack_var_len_unsigned (bp, xloc.column);
194 ob->current_col = xloc.column;
198 /* If EXPR is an indexable tree node, output a reference to it to
199 output block OB. Otherwise, output the physical representation of
200 EXPR to OB. */
202 static void
203 lto_output_tree_ref (struct output_block *ob, tree expr)
205 enum tree_code code;
207 if (TYPE_P (expr))
209 output_type_ref (ob, expr);
210 return;
213 code = TREE_CODE (expr);
214 switch (code)
216 case SSA_NAME:
217 streamer_write_record_start (ob, LTO_ssa_name_ref);
218 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
219 break;
221 case FIELD_DECL:
222 streamer_write_record_start (ob, LTO_field_decl_ref);
223 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
224 break;
226 case FUNCTION_DECL:
227 streamer_write_record_start (ob, LTO_function_decl_ref);
228 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
229 break;
231 case VAR_DECL:
232 case DEBUG_EXPR_DECL:
233 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
234 case PARM_DECL:
235 streamer_write_record_start (ob, LTO_global_decl_ref);
236 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
237 break;
239 case CONST_DECL:
240 streamer_write_record_start (ob, LTO_const_decl_ref);
241 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
242 break;
244 case IMPORTED_DECL:
245 gcc_assert (decl_function_context (expr) == NULL);
246 streamer_write_record_start (ob, LTO_imported_decl_ref);
247 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
248 break;
250 case TYPE_DECL:
251 streamer_write_record_start (ob, LTO_type_decl_ref);
252 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
253 break;
255 case NAMELIST_DECL:
256 streamer_write_record_start (ob, LTO_namelist_decl_ref);
257 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
258 break;
260 case NAMESPACE_DECL:
261 streamer_write_record_start (ob, LTO_namespace_decl_ref);
262 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
263 break;
265 case LABEL_DECL:
266 streamer_write_record_start (ob, LTO_label_decl_ref);
267 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
268 break;
270 case RESULT_DECL:
271 streamer_write_record_start (ob, LTO_result_decl_ref);
272 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
273 break;
275 case TRANSLATION_UNIT_DECL:
276 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
277 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
278 break;
280 default:
281 /* No other node is indexable, so it should have been handled by
282 lto_output_tree. */
283 gcc_unreachable ();
288 /* Return true if EXPR is a tree node that can be written to disk. */
290 static inline bool
291 lto_is_streamable (tree expr)
293 enum tree_code code = TREE_CODE (expr);
295 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
296 name version in lto_output_tree_ref (see output_ssa_names). */
297 return !is_lang_specific (expr)
298 && code != SSA_NAME
299 && code != CALL_EXPR
300 && code != LANG_TYPE
301 && code != MODIFY_EXPR
302 && code != INIT_EXPR
303 && code != TARGET_EXPR
304 && code != BIND_EXPR
305 && code != WITH_CLEANUP_EXPR
306 && code != STATEMENT_LIST
307 && (code == CASE_LABEL_EXPR
308 || code == DECL_EXPR
309 || TREE_CODE_CLASS (code) != tcc_statement);
313 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
315 static tree
316 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
318 gcc_checking_assert (DECL_P (expr)
319 && TREE_CODE (expr) != FUNCTION_DECL
320 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
322 /* Handle DECL_INITIAL for symbols. */
323 tree initial = DECL_INITIAL (expr);
324 if (TREE_CODE (expr) == VAR_DECL
325 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
326 && !DECL_IN_CONSTANT_POOL (expr)
327 && initial)
329 varpool_node *vnode;
330 /* Extra section needs about 30 bytes; do not produce it for simple
331 scalar values. */
332 if (TREE_CODE (DECL_INITIAL (expr)) == CONSTRUCTOR
333 || !(vnode = varpool_node::get (expr))
334 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
335 initial = error_mark_node;
338 return initial;
342 /* Write a physical representation of tree node EXPR to output block
343 OB. If REF_P is true, the leaves of EXPR are emitted as references
344 via lto_output_tree_ref. IX is the index into the streamer cache
345 where EXPR is stored. */
347 static void
348 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
350 /* Pack all the non-pointer fields in EXPR into a bitpack and write
351 the resulting bitpack. */
352 streamer_write_tree_bitfields (ob, expr);
354 /* Write all the pointer fields in EXPR. */
355 streamer_write_tree_body (ob, expr, ref_p);
357 /* Write any LTO-specific data to OB. */
358 if (DECL_P (expr)
359 && TREE_CODE (expr) != FUNCTION_DECL
360 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
362 /* Handle DECL_INITIAL for symbols. */
363 tree initial = get_symbol_initial_value
364 (ob->decl_state->symtab_node_encoder, expr);
365 stream_write_tree (ob, initial, ref_p);
369 /* Write a physical representation of tree node EXPR to output block
370 OB. If REF_P is true, the leaves of EXPR are emitted as references
371 via lto_output_tree_ref. IX is the index into the streamer cache
372 where EXPR is stored. */
374 static void
375 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
377 if (!lto_is_streamable (expr))
378 internal_error ("tree code %qs is not supported in LTO streams",
379 get_tree_code_name (TREE_CODE (expr)));
381 /* Write the header, containing everything needed to materialize
382 EXPR on the reading side. */
383 streamer_write_tree_header (ob, expr);
385 lto_write_tree_1 (ob, expr, ref_p);
387 /* Mark the end of EXPR. */
388 streamer_write_zero (ob);
391 /* Emit the physical representation of tree node EXPR to output block OB,
392 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
393 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
395 static void
396 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
397 bool ref_p, bool this_ref_p)
399 unsigned ix;
401 gcc_checking_assert (expr != NULL_TREE
402 && !(this_ref_p && tree_is_indexable (expr)));
404 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
405 expr, hash, &ix);
406 gcc_assert (!exists_p);
407 if (streamer_handle_as_builtin_p (expr))
409 /* MD and NORMAL builtins do not need to be written out
410 completely as they are always instantiated by the
411 compiler on startup. The only builtins that need to
412 be written out are BUILT_IN_FRONTEND. For all other
413 builtins, we simply write the class and code. */
414 streamer_write_builtin (ob, expr);
416 else if (TREE_CODE (expr) == INTEGER_CST
417 && !TREE_OVERFLOW (expr))
419 /* Shared INTEGER_CST nodes are special because they need their
420 original type to be materialized by the reader (to implement
421 TYPE_CACHED_VALUES). */
422 streamer_write_integer_cst (ob, expr, ref_p);
424 else
426 /* This is the first time we see EXPR, write its fields
427 to OB. */
428 lto_write_tree (ob, expr, ref_p);
432 class DFS
434 public:
435 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
436 bool single_p);
437 ~DFS ();
439 struct scc_entry
441 tree t;
442 hashval_t hash;
444 vec<scc_entry> sccstack;
446 private:
447 struct sccs
449 unsigned int dfsnum;
450 unsigned int low;
452 struct worklist
454 tree expr;
455 sccs *from_state;
456 sccs *cstate;
457 bool ref_p;
458 bool this_ref_p;
461 static int scc_entry_compare (const void *, const void *);
463 void DFS_write_tree_body (struct output_block *ob,
464 tree expr, sccs *expr_state, bool ref_p);
466 void DFS_write_tree (struct output_block *ob, sccs *from_state,
467 tree expr, bool ref_p, bool this_ref_p);
469 hashval_t
470 hash_scc (struct output_block *ob, unsigned first, unsigned size,
471 bool ref_p, bool this_ref_p);
473 hash_map<tree, sccs *> sccstate;
474 vec<worklist> worklist_vec;
475 struct obstack sccstate_obstack;
478 /* Emit the physical representation of tree node EXPR to output block OB,
479 using depth-first search on the subgraph. If THIS_REF_P is true, the
480 leaves of EXPR are emitted as references via lto_output_tree_ref.
481 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
482 this is for a rewalk of a single leaf SCC. */
484 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
485 bool single_p)
487 unsigned int next_dfs_num = 1;
488 sccstack.create (0);
489 gcc_obstack_init (&sccstate_obstack);
490 worklist_vec = vNULL;
491 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
492 while (!worklist_vec.is_empty ())
494 worklist &w = worklist_vec.last ();
495 expr = w.expr;
496 sccs *from_state = w.from_state;
497 sccs *cstate = w.cstate;
498 ref_p = w.ref_p;
499 this_ref_p = w.this_ref_p;
500 if (cstate == NULL)
502 sccs **slot = &sccstate.get_or_insert (expr);
503 cstate = *slot;
504 if (cstate)
506 gcc_checking_assert (from_state);
507 if (cstate->dfsnum < from_state->dfsnum)
508 from_state->low = MIN (cstate->dfsnum, from_state->low);
509 worklist_vec.pop ();
510 continue;
513 scc_entry e = { expr, 0 };
514 /* Not yet visited. DFS recurse and push it onto the stack. */
515 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
516 sccstack.safe_push (e);
517 cstate->dfsnum = next_dfs_num++;
518 cstate->low = cstate->dfsnum;
519 w.cstate = cstate;
521 if (streamer_handle_as_builtin_p (expr))
523 else if (TREE_CODE (expr) == INTEGER_CST
524 && !TREE_OVERFLOW (expr))
525 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
526 else
528 DFS_write_tree_body (ob, expr, cstate, ref_p);
530 /* Walk any LTO-specific edges. */
531 if (DECL_P (expr)
532 && TREE_CODE (expr) != FUNCTION_DECL
533 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
535 /* Handle DECL_INITIAL for symbols. */
536 tree initial
537 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
538 expr);
539 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
542 continue;
545 /* See if we found an SCC. */
546 if (cstate->low == cstate->dfsnum)
548 unsigned first, size;
549 tree x;
551 /* If we are re-walking a single leaf SCC just pop it,
552 let earlier worklist item access the sccstack. */
553 if (single_p)
555 worklist_vec.pop ();
556 continue;
559 /* Pop the SCC and compute its size. */
560 first = sccstack.length ();
563 x = sccstack[--first].t;
565 while (x != expr);
566 size = sccstack.length () - first;
568 /* No need to compute hashes for LTRANS units, we don't perform
569 any merging there. */
570 hashval_t scc_hash = 0;
571 unsigned scc_entry_len = 0;
572 if (!flag_wpa)
574 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
576 /* Put the entries with the least number of collisions first. */
577 unsigned entry_start = 0;
578 scc_entry_len = size + 1;
579 for (unsigned i = 0; i < size;)
581 unsigned from = i;
582 for (i = i + 1; i < size
583 && (sccstack[first + i].hash
584 == sccstack[first + from].hash); ++i)
586 if (i - from < scc_entry_len)
588 scc_entry_len = i - from;
589 entry_start = from;
592 for (unsigned i = 0; i < scc_entry_len; ++i)
593 std::swap (sccstack[first + i],
594 sccstack[first + entry_start + i]);
596 /* We already sorted SCC deterministically in hash_scc. */
598 /* Check that we have only one SCC.
599 Naturally we may have conflicts if hash function is not
600 strong enough. Lets see how far this gets. */
601 gcc_checking_assert (scc_entry_len == 1);
604 /* Write LTO_tree_scc. */
605 streamer_write_record_start (ob, LTO_tree_scc);
606 streamer_write_uhwi (ob, size);
607 streamer_write_uhwi (ob, scc_hash);
609 /* Write size-1 SCCs without wrapping them inside SCC bundles.
610 All INTEGER_CSTs need to be handled this way as we need
611 their type to materialize them. Also builtins are handled
612 this way.
613 ??? We still wrap these in LTO_tree_scc so at the
614 input side we can properly identify the tree we want
615 to ultimatively return. */
616 if (size == 1)
617 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
618 else
620 /* Write the size of the SCC entry candidates. */
621 streamer_write_uhwi (ob, scc_entry_len);
623 /* Write all headers and populate the streamer cache. */
624 for (unsigned i = 0; i < size; ++i)
626 hashval_t hash = sccstack[first+i].hash;
627 tree t = sccstack[first+i].t;
628 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
629 t, hash, NULL);
630 gcc_assert (!exists_p);
632 if (!lto_is_streamable (t))
633 internal_error ("tree code %qs is not supported "
634 "in LTO streams",
635 get_tree_code_name (TREE_CODE (t)));
637 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
639 /* Write the header, containing everything needed to
640 materialize EXPR on the reading side. */
641 streamer_write_tree_header (ob, t);
644 /* Write the bitpacks and tree references. */
645 for (unsigned i = 0; i < size; ++i)
647 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
649 /* Mark the end of the tree. */
650 streamer_write_zero (ob);
654 /* Finally truncate the vector. */
655 sccstack.truncate (first);
657 if (from_state)
658 from_state->low = MIN (from_state->low, cstate->low);
659 worklist_vec.pop ();
660 continue;
663 gcc_checking_assert (from_state);
664 from_state->low = MIN (from_state->low, cstate->low);
665 if (cstate->dfsnum < from_state->dfsnum)
666 from_state->low = MIN (cstate->dfsnum, from_state->low);
667 worklist_vec.pop ();
669 worklist_vec.release ();
672 DFS::~DFS ()
674 sccstack.release ();
675 obstack_free (&sccstate_obstack, NULL);
678 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
679 DFS recurse for all tree edges originating from it. */
681 void
682 DFS::DFS_write_tree_body (struct output_block *ob,
683 tree expr, sccs *expr_state, bool ref_p)
685 #define DFS_follow_tree_edge(DEST) \
686 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
688 enum tree_code code;
690 code = TREE_CODE (expr);
692 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
694 if (TREE_CODE (expr) != IDENTIFIER_NODE)
695 DFS_follow_tree_edge (TREE_TYPE (expr));
698 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
700 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
701 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
704 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
706 DFS_follow_tree_edge (TREE_REALPART (expr));
707 DFS_follow_tree_edge (TREE_IMAGPART (expr));
710 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
712 /* Drop names that were created for anonymous entities. */
713 if (DECL_NAME (expr)
714 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
715 && anon_aggrname_p (DECL_NAME (expr)))
717 else
718 DFS_follow_tree_edge (DECL_NAME (expr));
719 DFS_follow_tree_edge (DECL_CONTEXT (expr));
722 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
724 DFS_follow_tree_edge (DECL_SIZE (expr));
725 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
727 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
728 special handling in LTO, it must be handled by streamer hooks. */
730 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
732 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
733 for early inlining so drop it on the floor instead of ICEing in
734 dwarf2out.c.
735 We however use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
736 declarations which should be eliminated by decl merging. Be sure none
737 leaks to this point. */
738 gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
740 if ((TREE_CODE (expr) == VAR_DECL
741 || TREE_CODE (expr) == PARM_DECL)
742 && DECL_HAS_VALUE_EXPR_P (expr))
743 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
744 if (TREE_CODE (expr) == VAR_DECL)
745 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
748 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
750 if (TREE_CODE (expr) == TYPE_DECL)
751 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
754 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
756 /* Make sure we don't inadvertently set the assembler name. */
757 if (DECL_ASSEMBLER_NAME_SET_P (expr))
758 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
761 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
763 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
764 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
765 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
766 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
767 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
770 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
772 DFS_follow_tree_edge (DECL_VINDEX (expr));
773 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
774 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
775 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
778 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
780 DFS_follow_tree_edge (TYPE_SIZE (expr));
781 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
782 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
783 DFS_follow_tree_edge (TYPE_NAME (expr));
784 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
785 reconstructed during fixup. */
786 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
787 during fixup. */
788 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
789 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
790 /* TYPE_CANONICAL is re-computed during type merging, so no need
791 to follow it here. */
792 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
795 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
797 if (TREE_CODE (expr) == ENUMERAL_TYPE)
798 DFS_follow_tree_edge (TYPE_VALUES (expr));
799 else if (TREE_CODE (expr) == ARRAY_TYPE)
800 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
801 else if (RECORD_OR_UNION_TYPE_P (expr))
802 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
803 DFS_follow_tree_edge (t);
804 else if (TREE_CODE (expr) == FUNCTION_TYPE
805 || TREE_CODE (expr) == METHOD_TYPE)
806 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
808 if (!POINTER_TYPE_P (expr))
809 DFS_follow_tree_edge (TYPE_MINVAL (expr));
810 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
811 if (RECORD_OR_UNION_TYPE_P (expr))
812 DFS_follow_tree_edge (TYPE_BINFO (expr));
815 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
817 DFS_follow_tree_edge (TREE_PURPOSE (expr));
818 DFS_follow_tree_edge (TREE_VALUE (expr));
819 DFS_follow_tree_edge (TREE_CHAIN (expr));
822 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
824 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
825 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
828 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
830 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
831 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
832 DFS_follow_tree_edge (TREE_BLOCK (expr));
835 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
837 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
838 if (VAR_OR_FUNCTION_DECL_P (t)
839 && DECL_EXTERNAL (t))
840 /* We have to stream externals in the block chain as
841 non-references. See also
842 tree-streamer-out.c:streamer_write_chain. */
843 DFS_write_tree (ob, expr_state, t, ref_p, false);
844 else
845 DFS_follow_tree_edge (t);
847 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
849 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
850 handle - those that represent inlined function scopes.
851 For the drop rest them on the floor instead of ICEing
852 in dwarf2out.c. */
853 if (inlined_function_outer_scope_p (expr))
855 tree ultimate_origin = block_ultimate_origin (expr);
856 DFS_follow_tree_edge (ultimate_origin);
858 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
859 information for early inlined BLOCKs so drop it on the floor instead
860 of ICEing in dwarf2out.c. */
862 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
863 streaming time. */
865 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
866 list is re-constructed from BLOCK_SUPERCONTEXT. */
869 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
871 unsigned i;
872 tree t;
874 /* Note that the number of BINFO slots has already been emitted in
875 EXPR's header (see streamer_write_tree_header) because this length
876 is needed to build the empty BINFO node on the reader side. */
877 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
878 DFS_follow_tree_edge (t);
879 DFS_follow_tree_edge (BINFO_OFFSET (expr));
880 DFS_follow_tree_edge (BINFO_VTABLE (expr));
881 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
883 /* The number of BINFO_BASE_ACCESSES has already been emitted in
884 EXPR's bitfield section. */
885 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
886 DFS_follow_tree_edge (t);
888 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
889 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
892 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
894 unsigned i;
895 tree index, value;
897 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
899 DFS_follow_tree_edge (index);
900 DFS_follow_tree_edge (value);
904 if (code == OMP_CLAUSE)
906 int i;
907 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
908 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
909 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
912 #undef DFS_follow_tree_edge
915 /* Return a hash value for the tree T.
916 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
917 may hold hash values if trees inside current SCC. */
919 static hashval_t
920 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
922 inchash::hash hstate;
924 #define visit(SIBLING) \
925 do { \
926 unsigned ix; \
927 if (!SIBLING) \
928 hstate.add_int (0); \
929 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
930 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
931 else if (map) \
932 hstate.add_int (*map->get (SIBLING)); \
933 else \
934 hstate.add_int (1); \
935 } while (0)
937 /* Hash TS_BASE. */
938 enum tree_code code = TREE_CODE (t);
939 hstate.add_int (code);
940 if (!TYPE_P (t))
942 hstate.add_flag (TREE_SIDE_EFFECTS (t));
943 hstate.add_flag (TREE_CONSTANT (t));
944 hstate.add_flag (TREE_READONLY (t));
945 hstate.add_flag (TREE_PUBLIC (t));
947 hstate.add_flag (TREE_ADDRESSABLE (t));
948 hstate.add_flag (TREE_THIS_VOLATILE (t));
949 if (DECL_P (t))
950 hstate.add_flag (DECL_UNSIGNED (t));
951 else if (TYPE_P (t))
952 hstate.add_flag (TYPE_UNSIGNED (t));
953 if (TYPE_P (t))
954 hstate.add_flag (TYPE_ARTIFICIAL (t));
955 else
956 hstate.add_flag (TREE_NO_WARNING (t));
957 hstate.add_flag (TREE_NOTHROW (t));
958 hstate.add_flag (TREE_STATIC (t));
959 hstate.add_flag (TREE_PROTECTED (t));
960 hstate.add_flag (TREE_DEPRECATED (t));
961 if (code != TREE_BINFO)
962 hstate.add_flag (TREE_PRIVATE (t));
963 if (TYPE_P (t))
965 hstate.add_flag (AGGREGATE_TYPE_P (t)
966 ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
967 hstate.add_flag (TYPE_ADDR_SPACE (t));
969 else if (code == SSA_NAME)
970 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
971 hstate.commit_flag ();
973 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
975 int i;
976 hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
977 hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
978 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
979 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
982 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
984 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
985 hstate.add_flag (r.cl);
986 hstate.add_flag (r.sign);
987 hstate.add_flag (r.signalling);
988 hstate.add_flag (r.canonical);
989 hstate.commit_flag ();
990 hstate.add_int (r.uexp);
991 hstate.add (r.sig, sizeof (r.sig));
994 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
996 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
997 hstate.add_int (f.mode);
998 hstate.add_int (f.data.low);
999 hstate.add_int (f.data.high);
1002 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1004 hstate.add_wide_int (DECL_MODE (t));
1005 hstate.add_flag (DECL_NONLOCAL (t));
1006 hstate.add_flag (DECL_VIRTUAL_P (t));
1007 hstate.add_flag (DECL_IGNORED_P (t));
1008 hstate.add_flag (DECL_ABSTRACT_P (t));
1009 hstate.add_flag (DECL_ARTIFICIAL (t));
1010 hstate.add_flag (DECL_USER_ALIGN (t));
1011 hstate.add_flag (DECL_PRESERVE_P (t));
1012 hstate.add_flag (DECL_EXTERNAL (t));
1013 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1014 hstate.commit_flag ();
1015 hstate.add_int (DECL_ALIGN (t));
1016 if (code == LABEL_DECL)
1018 hstate.add_int (EH_LANDING_PAD_NR (t));
1019 hstate.add_int (LABEL_DECL_UID (t));
1021 else if (code == FIELD_DECL)
1023 hstate.add_flag (DECL_PACKED (t));
1024 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1025 hstate.add_int (DECL_OFFSET_ALIGN (t));
1027 else if (code == VAR_DECL)
1029 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1030 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1032 if (code == RESULT_DECL
1033 || code == PARM_DECL
1034 || code == VAR_DECL)
1036 hstate.add_flag (DECL_BY_REFERENCE (t));
1037 if (code == VAR_DECL
1038 || code == PARM_DECL)
1039 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1041 hstate.commit_flag ();
1044 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1045 hstate.add_int (DECL_REGISTER (t));
1047 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1049 hstate.add_flag (DECL_COMMON (t));
1050 hstate.add_flag (DECL_DLLIMPORT_P (t));
1051 hstate.add_flag (DECL_WEAK (t));
1052 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1053 hstate.add_flag (DECL_COMDAT (t));
1054 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1055 hstate.add_int (DECL_VISIBILITY (t));
1056 if (code == VAR_DECL)
1058 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1059 hstate.add_flag (DECL_HARD_REGISTER (t));
1060 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1062 if (TREE_CODE (t) == FUNCTION_DECL)
1064 hstate.add_flag (DECL_FINAL_P (t));
1065 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1066 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1068 hstate.commit_flag ();
1071 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1073 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1074 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1075 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1076 hstate.add_flag (DECL_UNINLINABLE (t));
1077 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1078 hstate.add_flag (DECL_IS_NOVOPS (t));
1079 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1080 hstate.add_flag (DECL_IS_MALLOC (t));
1081 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1082 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1083 hstate.add_flag (DECL_STATIC_CHAIN (t));
1084 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1085 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1086 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1087 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1088 hstate.add_flag (DECL_PURE_P (t));
1089 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1090 hstate.commit_flag ();
1091 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1092 hstate.add_int (DECL_FUNCTION_CODE (t));
1095 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1097 hstate.add_wide_int (TYPE_MODE (t));
1098 hstate.add_flag (TYPE_STRING_FLAG (t));
1099 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1100 no streaming. */
1101 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
1102 hstate.add_flag (TYPE_PACKED (t));
1103 hstate.add_flag (TYPE_RESTRICT (t));
1104 hstate.add_flag (TYPE_USER_ALIGN (t));
1105 hstate.add_flag (TYPE_READONLY (t));
1106 if (RECORD_OR_UNION_TYPE_P (t))
1108 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1109 hstate.add_flag (TYPE_FINAL_P (t));
1111 else if (code == ARRAY_TYPE)
1112 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1113 hstate.commit_flag ();
1114 hstate.add_int (TYPE_PRECISION (t));
1115 hstate.add_int (TYPE_ALIGN (t));
1118 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1119 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1120 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1122 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1123 /* We don't stream these when passing things to a different target. */
1124 && !lto_stream_offload_p)
1125 hstate.add_wide_int (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1127 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1128 hstate.add_wide_int (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1130 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1131 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1133 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1134 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1136 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1138 if (code != IDENTIFIER_NODE)
1139 visit (TREE_TYPE (t));
1142 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1143 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
1144 visit (VECTOR_CST_ELT (t, i));
1146 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1148 visit (TREE_REALPART (t));
1149 visit (TREE_IMAGPART (t));
1152 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1154 /* Drop names that were created for anonymous entities. */
1155 if (DECL_NAME (t)
1156 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1157 && anon_aggrname_p (DECL_NAME (t)))
1159 else
1160 visit (DECL_NAME (t));
1161 if (DECL_FILE_SCOPE_P (t))
1163 else
1164 visit (DECL_CONTEXT (t));
1167 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1169 visit (DECL_SIZE (t));
1170 visit (DECL_SIZE_UNIT (t));
1171 visit (DECL_ATTRIBUTES (t));
1172 if ((code == VAR_DECL
1173 || code == PARM_DECL)
1174 && DECL_HAS_VALUE_EXPR_P (t))
1175 visit (DECL_VALUE_EXPR (t));
1176 if (code == VAR_DECL
1177 && DECL_HAS_DEBUG_EXPR_P (t))
1178 visit (DECL_DEBUG_EXPR (t));
1179 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1180 be able to call get_symbol_initial_value. */
1183 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1185 if (code == TYPE_DECL)
1186 visit (DECL_ORIGINAL_TYPE (t));
1189 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1191 if (DECL_ASSEMBLER_NAME_SET_P (t))
1192 visit (DECL_ASSEMBLER_NAME (t));
1195 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1197 visit (DECL_FIELD_OFFSET (t));
1198 visit (DECL_BIT_FIELD_TYPE (t));
1199 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1200 visit (DECL_FIELD_BIT_OFFSET (t));
1201 visit (DECL_FCONTEXT (t));
1204 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1206 visit (DECL_VINDEX (t));
1207 visit (DECL_FUNCTION_PERSONALITY (t));
1208 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1209 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1212 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1214 visit (TYPE_SIZE (t));
1215 visit (TYPE_SIZE_UNIT (t));
1216 visit (TYPE_ATTRIBUTES (t));
1217 visit (TYPE_NAME (t));
1218 visit (TYPE_MAIN_VARIANT (t));
1219 if (TYPE_FILE_SCOPE_P (t))
1221 else
1222 visit (TYPE_CONTEXT (t));
1223 visit (TYPE_STUB_DECL (t));
1226 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1228 if (code == ENUMERAL_TYPE)
1229 visit (TYPE_VALUES (t));
1230 else if (code == ARRAY_TYPE)
1231 visit (TYPE_DOMAIN (t));
1232 else if (RECORD_OR_UNION_TYPE_P (t))
1233 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1234 visit (f);
1235 else if (code == FUNCTION_TYPE
1236 || code == METHOD_TYPE)
1237 visit (TYPE_ARG_TYPES (t));
1238 if (!POINTER_TYPE_P (t))
1239 visit (TYPE_MINVAL (t));
1240 visit (TYPE_MAXVAL (t));
1241 if (RECORD_OR_UNION_TYPE_P (t))
1242 visit (TYPE_BINFO (t));
1245 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1247 visit (TREE_PURPOSE (t));
1248 visit (TREE_VALUE (t));
1249 visit (TREE_CHAIN (t));
1252 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1253 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1254 visit (TREE_VEC_ELT (t, i));
1256 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1258 hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
1259 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1260 visit (TREE_OPERAND (t, i));
1263 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1265 unsigned i;
1266 tree b;
1267 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1268 visit (b);
1269 visit (BINFO_OFFSET (t));
1270 visit (BINFO_VTABLE (t));
1271 visit (BINFO_VPTR_FIELD (t));
1272 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1273 visit (b);
1274 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1275 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1278 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1280 unsigned i;
1281 tree index, value;
1282 hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
1283 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1285 visit (index);
1286 visit (value);
1290 if (code == OMP_CLAUSE)
1292 int i;
1293 HOST_WIDE_INT val;
1295 hstate.add_wide_int (OMP_CLAUSE_CODE (t));
1296 switch (OMP_CLAUSE_CODE (t))
1298 case OMP_CLAUSE_DEFAULT:
1299 val = OMP_CLAUSE_DEFAULT_KIND (t);
1300 break;
1301 case OMP_CLAUSE_SCHEDULE:
1302 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1303 break;
1304 case OMP_CLAUSE_DEPEND:
1305 val = OMP_CLAUSE_DEPEND_KIND (t);
1306 break;
1307 case OMP_CLAUSE_MAP:
1308 val = OMP_CLAUSE_MAP_KIND (t);
1309 break;
1310 case OMP_CLAUSE_PROC_BIND:
1311 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1312 break;
1313 case OMP_CLAUSE_REDUCTION:
1314 val = OMP_CLAUSE_REDUCTION_CODE (t);
1315 break;
1316 default:
1317 val = 0;
1318 break;
1320 hstate.add_wide_int (val);
1321 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1322 visit (OMP_CLAUSE_OPERAND (t, i));
1323 visit (OMP_CLAUSE_CHAIN (t));
1326 return hstate.end ();
1328 #undef visit
1331 /* Compare two SCC entries by their hash value for qsorting them. */
1334 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1336 const scc_entry *p1 = (const scc_entry *) p1_;
1337 const scc_entry *p2 = (const scc_entry *) p2_;
1338 if (p1->hash < p2->hash)
1339 return -1;
1340 else if (p1->hash > p2->hash)
1341 return 1;
1342 return 0;
1345 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1346 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1348 hashval_t
1349 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1350 bool ref_p, bool this_ref_p)
1352 unsigned int last_classes = 0, iterations = 0;
1354 /* Compute hash values for the SCC members. */
1355 for (unsigned i = 0; i < size; ++i)
1356 sccstack[first+i].hash
1357 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1359 if (size == 1)
1360 return sccstack[first].hash;
1362 /* We aim to get unique hash for every tree within SCC and compute hash value
1363 of the whole SCC by combining all values together in a stable (entry-point
1364 independent) order. This guarantees that the same SCC regions within
1365 different translation units will get the same hash values and therefore
1366 will be merged at WPA time.
1368 Often the hashes are already unique. In that case we compute the SCC hash
1369 by combining individual hash values in an increasing order.
1371 If there are duplicates, we seek at least one tree with unique hash (and
1372 pick one with minimal hash and this property). Then we obtain a stable
1373 order by DFS walk starting from this unique tree and then use the index
1374 within this order to make individual hash values unique.
1376 If there is no tree with unique hash, we iteratively propagate the hash
1377 values across the internal edges of SCC. This usually quickly leads
1378 to unique hashes. Consider, for example, an SCC containing two pointers
1379 that are identical except for the types they point to and assume that
1380 these types are also part of the SCC. The propagation will add the
1381 points-to type information into their hash values. */
1384 /* Sort the SCC so we can easily check for uniqueness. */
1385 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1387 unsigned int classes = 1;
1388 int firstunique = -1;
1390 /* Find the tree with lowest unique hash (if it exists) and compute
1391 the number of equivalence classes. */
1392 if (sccstack[first].hash != sccstack[first+1].hash)
1393 firstunique = 0;
1394 for (unsigned i = 1; i < size; ++i)
1395 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1397 classes++;
1398 if (firstunique == -1
1399 && (i == size - 1
1400 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1401 firstunique = i;
1404 /* If we found a tree with unique hash, stop the iteration. */
1405 if (firstunique != -1
1406 /* Also terminate if we run out of iterations or if the number of
1407 equivalence classes is no longer increasing.
1408 For example a cyclic list of trees that are all equivalent will
1409 never have unique entry point; we however do not build such SCCs
1410 in our IL. */
1411 || classes <= last_classes || iterations > 16)
1413 hashval_t scc_hash;
1415 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1416 starting from FIRSTUNIQUE to obtain a stable order. */
1417 if (classes != size && firstunique != -1)
1419 hash_map <tree, hashval_t> map(size*2);
1421 /* Store hash values into a map, so we can associate them with
1422 the reordered SCC. */
1423 for (unsigned i = 0; i < size; ++i)
1424 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1426 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1427 true);
1428 gcc_assert (again.sccstack.length () == size);
1430 memcpy (sccstack.address () + first,
1431 again.sccstack.address (),
1432 sizeof (scc_entry) * size);
1434 /* Update hash values of individual members by hashing in the
1435 index within the stable order. This ensures uniqueness.
1436 Also compute the SCC hash by mixing in all hash values in
1437 the stable order we obtained. */
1438 sccstack[first].hash = *map.get (sccstack[first].t);
1439 scc_hash = sccstack[first].hash;
1440 for (unsigned i = 1; i < size; ++i)
1442 sccstack[first+i].hash
1443 = iterative_hash_hashval_t (i,
1444 *map.get (sccstack[first+i].t));
1445 scc_hash
1446 = iterative_hash_hashval_t (scc_hash,
1447 sccstack[first+i].hash);
1450 /* If we got a unique hash value for each tree, then sort already
1451 ensured entry-point independent order. Only compute the final
1452 SCC hash.
1454 If we failed to find the unique entry point, we go by the same
1455 route. We will eventually introduce unwanted hash conflicts. */
1456 else
1458 scc_hash = sccstack[first].hash;
1459 for (unsigned i = 1; i < size; ++i)
1460 scc_hash
1461 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1463 /* We cannot 100% guarantee that the hash won't conflict so as
1464 to make it impossible to find a unique hash. This however
1465 should be an extremely rare case. ICE for now so possible
1466 issues are found and evaluated. */
1467 gcc_checking_assert (classes == size);
1470 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1471 hash into the hash of each element. */
1472 for (unsigned i = 0; i < size; ++i)
1473 sccstack[first+i].hash
1474 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1475 return scc_hash;
1478 last_classes = classes;
1479 iterations++;
1481 /* We failed to identify the entry point; propagate hash values across
1482 the edges. */
1483 hash_map <tree, hashval_t> map(size*2);
1485 for (unsigned i = 0; i < size; ++i)
1486 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1488 for (unsigned i = 0; i < size; i++)
1489 sccstack[first+i].hash
1490 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1492 while (true);
1495 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1496 already in the streamer cache. Main routine called for
1497 each visit of EXPR. */
1499 void
1500 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1501 tree expr, bool ref_p, bool this_ref_p)
1503 /* Handle special cases. */
1504 if (expr == NULL_TREE)
1505 return;
1507 /* Do not DFS walk into indexable trees. */
1508 if (this_ref_p && tree_is_indexable (expr))
1509 return;
1511 /* Check if we already streamed EXPR. */
1512 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1513 return;
1515 worklist w;
1516 w.expr = expr;
1517 w.from_state = from_state;
1518 w.cstate = NULL;
1519 w.ref_p = ref_p;
1520 w.this_ref_p = this_ref_p;
1521 worklist_vec.safe_push (w);
1525 /* Emit the physical representation of tree node EXPR to output block OB.
1526 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1527 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1529 void
1530 lto_output_tree (struct output_block *ob, tree expr,
1531 bool ref_p, bool this_ref_p)
1533 unsigned ix;
1534 bool existed_p;
1536 if (expr == NULL_TREE)
1538 streamer_write_record_start (ob, LTO_null);
1539 return;
1542 if (this_ref_p && tree_is_indexable (expr))
1544 lto_output_tree_ref (ob, expr);
1545 return;
1548 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1549 if (existed_p)
1551 /* If a node has already been streamed out, make sure that
1552 we don't write it more than once. Otherwise, the reader
1553 will instantiate two different nodes for the same object. */
1554 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1555 streamer_write_uhwi (ob, ix);
1556 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1557 lto_tree_code_to_tag (TREE_CODE (expr)));
1558 lto_stats.num_pickle_refs_output++;
1560 else
1562 /* This is the first time we see EXPR, write all reachable
1563 trees to OB. */
1564 static bool in_dfs_walk;
1566 /* Protect against recursion which means disconnect between
1567 what tree edges we walk in the DFS walk and what edges
1568 we stream out. */
1569 gcc_assert (!in_dfs_walk);
1571 /* Start the DFS walk. */
1572 /* Save ob state ... */
1573 /* let's see ... */
1574 in_dfs_walk = true;
1575 DFS (ob, expr, ref_p, this_ref_p, false);
1576 in_dfs_walk = false;
1578 /* Finally append a reference to the tree we were writing.
1579 ??? If expr ended up as a singleton we could have
1580 inlined it here and avoid outputting a reference. */
1581 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1582 gcc_assert (existed_p);
1583 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1584 streamer_write_uhwi (ob, ix);
1585 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1586 lto_tree_code_to_tag (TREE_CODE (expr)));
1587 lto_stats.num_pickle_refs_output++;
1592 /* Output to OB a list of try/catch handlers starting with FIRST. */
1594 static void
1595 output_eh_try_list (struct output_block *ob, eh_catch first)
1597 eh_catch n;
1599 for (n = first; n; n = n->next_catch)
1601 streamer_write_record_start (ob, LTO_eh_catch);
1602 stream_write_tree (ob, n->type_list, true);
1603 stream_write_tree (ob, n->filter_list, true);
1604 stream_write_tree (ob, n->label, true);
1607 streamer_write_record_start (ob, LTO_null);
1611 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1612 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1613 detect EH region sharing. */
1615 static void
1616 output_eh_region (struct output_block *ob, eh_region r)
1618 enum LTO_tags tag;
1620 if (r == NULL)
1622 streamer_write_record_start (ob, LTO_null);
1623 return;
1626 if (r->type == ERT_CLEANUP)
1627 tag = LTO_ert_cleanup;
1628 else if (r->type == ERT_TRY)
1629 tag = LTO_ert_try;
1630 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1631 tag = LTO_ert_allowed_exceptions;
1632 else if (r->type == ERT_MUST_NOT_THROW)
1633 tag = LTO_ert_must_not_throw;
1634 else
1635 gcc_unreachable ();
1637 streamer_write_record_start (ob, tag);
1638 streamer_write_hwi (ob, r->index);
1640 if (r->outer)
1641 streamer_write_hwi (ob, r->outer->index);
1642 else
1643 streamer_write_zero (ob);
1645 if (r->inner)
1646 streamer_write_hwi (ob, r->inner->index);
1647 else
1648 streamer_write_zero (ob);
1650 if (r->next_peer)
1651 streamer_write_hwi (ob, r->next_peer->index);
1652 else
1653 streamer_write_zero (ob);
1655 if (r->type == ERT_TRY)
1657 output_eh_try_list (ob, r->u.eh_try.first_catch);
1659 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1661 stream_write_tree (ob, r->u.allowed.type_list, true);
1662 stream_write_tree (ob, r->u.allowed.label, true);
1663 streamer_write_uhwi (ob, r->u.allowed.filter);
1665 else if (r->type == ERT_MUST_NOT_THROW)
1667 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1668 bitpack_d bp = bitpack_create (ob->main_stream);
1669 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1670 streamer_write_bitpack (&bp);
1673 if (r->landing_pads)
1674 streamer_write_hwi (ob, r->landing_pads->index);
1675 else
1676 streamer_write_zero (ob);
1680 /* Output landing pad LP to OB. */
1682 static void
1683 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1685 if (lp == NULL)
1687 streamer_write_record_start (ob, LTO_null);
1688 return;
1691 streamer_write_record_start (ob, LTO_eh_landing_pad);
1692 streamer_write_hwi (ob, lp->index);
1693 if (lp->next_lp)
1694 streamer_write_hwi (ob, lp->next_lp->index);
1695 else
1696 streamer_write_zero (ob);
1698 if (lp->region)
1699 streamer_write_hwi (ob, lp->region->index);
1700 else
1701 streamer_write_zero (ob);
1703 stream_write_tree (ob, lp->post_landing_pad, true);
1707 /* Output the existing eh_table to OB. */
1709 static void
1710 output_eh_regions (struct output_block *ob, struct function *fn)
1712 if (fn->eh && fn->eh->region_tree)
1714 unsigned i;
1715 eh_region eh;
1716 eh_landing_pad lp;
1717 tree ttype;
1719 streamer_write_record_start (ob, LTO_eh_table);
1721 /* Emit the index of the root of the EH region tree. */
1722 streamer_write_hwi (ob, fn->eh->region_tree->index);
1724 /* Emit all the EH regions in the region array. */
1725 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1726 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1727 output_eh_region (ob, eh);
1729 /* Emit all landing pads. */
1730 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1731 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1732 output_eh_lp (ob, lp);
1734 /* Emit all the runtime type data. */
1735 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1736 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1737 stream_write_tree (ob, ttype, true);
1739 /* Emit the table of action chains. */
1740 if (targetm.arm_eabi_unwinder)
1742 tree t;
1743 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1744 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1745 stream_write_tree (ob, t, true);
1747 else
1749 uchar c;
1750 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1751 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1752 streamer_write_char_stream (ob->main_stream, c);
1756 /* The LTO_null either terminates the record or indicates that there
1757 are no eh_records at all. */
1758 streamer_write_record_start (ob, LTO_null);
1762 /* Output all of the active ssa names to the ssa_names stream. */
1764 static void
1765 output_ssa_names (struct output_block *ob, struct function *fn)
1767 unsigned int i, len;
1769 len = vec_safe_length (SSANAMES (fn));
1770 streamer_write_uhwi (ob, len);
1772 for (i = 1; i < len; i++)
1774 tree ptr = (*SSANAMES (fn))[i];
1776 if (ptr == NULL_TREE
1777 || SSA_NAME_IN_FREE_LIST (ptr)
1778 || virtual_operand_p (ptr))
1779 continue;
1781 streamer_write_uhwi (ob, i);
1782 streamer_write_char_stream (ob->main_stream,
1783 SSA_NAME_IS_DEFAULT_DEF (ptr));
1784 if (SSA_NAME_VAR (ptr))
1785 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1786 else
1787 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1788 stream_write_tree (ob, TREE_TYPE (ptr), true);
1791 streamer_write_zero (ob);
1795 /* Output a wide-int. */
1797 static void
1798 streamer_write_wi (struct output_block *ob,
1799 const widest_int &w)
1801 int len = w.get_len ();
1803 streamer_write_uhwi (ob, w.get_precision ());
1804 streamer_write_uhwi (ob, len);
1805 for (int i = 0; i < len; i++)
1806 streamer_write_hwi (ob, w.elt (i));
1810 /* Output the cfg. */
1812 static void
1813 output_cfg (struct output_block *ob, struct function *fn)
1815 struct lto_output_stream *tmp_stream = ob->main_stream;
1816 basic_block bb;
1818 ob->main_stream = ob->cfg_stream;
1820 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1821 profile_status_for_fn (fn));
1823 /* Output the number of the highest basic block. */
1824 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1826 FOR_ALL_BB_FN (bb, fn)
1828 edge_iterator ei;
1829 edge e;
1831 streamer_write_hwi (ob, bb->index);
1833 /* Output the successors and the edge flags. */
1834 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1835 FOR_EACH_EDGE (e, ei, bb->succs)
1837 streamer_write_uhwi (ob, e->dest->index);
1838 streamer_write_hwi (ob, e->probability);
1839 streamer_write_gcov_count (ob, e->count);
1840 streamer_write_uhwi (ob, e->flags);
1844 streamer_write_hwi (ob, -1);
1846 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1847 while (bb->next_bb)
1849 streamer_write_hwi (ob, bb->next_bb->index);
1850 bb = bb->next_bb;
1853 streamer_write_hwi (ob, -1);
1855 /* ??? The cfgloop interface is tied to cfun. */
1856 gcc_assert (cfun == fn);
1858 /* Output the number of loops. */
1859 streamer_write_uhwi (ob, number_of_loops (fn));
1861 /* Output each loop, skipping the tree root which has number zero. */
1862 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1864 struct loop *loop = get_loop (fn, i);
1866 /* Write the index of the loop header. That's enough to rebuild
1867 the loop tree on the reader side. Stream -1 for an unused
1868 loop entry. */
1869 if (!loop)
1871 streamer_write_hwi (ob, -1);
1872 continue;
1874 else
1875 streamer_write_hwi (ob, loop->header->index);
1877 /* Write everything copy_loop_info copies. */
1878 streamer_write_enum (ob->main_stream,
1879 loop_estimation, EST_LAST, loop->estimate_state);
1880 streamer_write_hwi (ob, loop->any_upper_bound);
1881 if (loop->any_upper_bound)
1882 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1883 streamer_write_hwi (ob, loop->any_estimate);
1884 if (loop->any_estimate)
1885 streamer_write_wi (ob, loop->nb_iterations_estimate);
1887 /* Write OMP SIMD related info. */
1888 streamer_write_hwi (ob, loop->safelen);
1889 streamer_write_hwi (ob, loop->dont_vectorize);
1890 streamer_write_hwi (ob, loop->force_vectorize);
1891 stream_write_tree (ob, loop->simduid, true);
1894 ob->main_stream = tmp_stream;
1898 /* Create the header in the file using OB. If the section type is for
1899 a function, set FN to the decl for that function. */
1901 void
1902 produce_asm (struct output_block *ob, tree fn)
1904 enum lto_section_type section_type = ob->section_type;
1905 struct lto_function_header header;
1906 char *section_name;
1908 if (section_type == LTO_section_function_body)
1910 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1911 section_name = lto_get_section_name (section_type, name, NULL);
1913 else
1914 section_name = lto_get_section_name (section_type, NULL, NULL);
1916 lto_begin_section (section_name, !flag_wpa);
1917 free (section_name);
1919 /* The entire header is stream computed here. */
1920 memset (&header, 0, sizeof (struct lto_function_header));
1922 /* Write the header. */
1923 header.major_version = LTO_major_version;
1924 header.minor_version = LTO_minor_version;
1926 if (section_type == LTO_section_function_body)
1927 header.cfg_size = ob->cfg_stream->total_size;
1928 header.main_size = ob->main_stream->total_size;
1929 header.string_size = ob->string_stream->total_size;
1930 lto_write_data (&header, sizeof header);
1932 /* Put all of the gimple and the string table out the asm file as a
1933 block of text. */
1934 if (section_type == LTO_section_function_body)
1935 lto_write_stream (ob->cfg_stream);
1936 lto_write_stream (ob->main_stream);
1937 lto_write_stream (ob->string_stream);
1939 lto_end_section ();
1943 /* Output the base body of struct function FN using output block OB. */
1945 static void
1946 output_struct_function_base (struct output_block *ob, struct function *fn)
1948 struct bitpack_d bp;
1949 unsigned i;
1950 tree t;
1952 /* Output the static chain and non-local goto save area. */
1953 stream_write_tree (ob, fn->static_chain_decl, true);
1954 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1956 /* Output all the local variables in the function. */
1957 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1958 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1959 stream_write_tree (ob, t, true);
1961 /* Output current IL state of the function. */
1962 streamer_write_uhwi (ob, fn->curr_properties);
1964 /* Write all the attributes for FN. */
1965 bp = bitpack_create (ob->main_stream);
1966 bp_pack_value (&bp, fn->is_thunk, 1);
1967 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1968 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1969 bp_pack_value (&bp, fn->returns_struct, 1);
1970 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1971 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1972 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1973 bp_pack_value (&bp, fn->after_inlining, 1);
1974 bp_pack_value (&bp, fn->stdarg, 1);
1975 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1976 bp_pack_value (&bp, fn->calls_alloca, 1);
1977 bp_pack_value (&bp, fn->calls_setjmp, 1);
1978 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
1979 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1980 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1981 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1982 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
1984 /* Output the function start and end loci. */
1985 stream_output_location (ob, &bp, fn->function_start_locus);
1986 stream_output_location (ob, &bp, fn->function_end_locus);
1988 streamer_write_bitpack (&bp);
1992 /* Output the body of function NODE->DECL. */
1994 static void
1995 output_function (struct cgraph_node *node)
1997 tree function;
1998 struct function *fn;
1999 basic_block bb;
2000 struct output_block *ob;
2002 function = node->decl;
2003 fn = DECL_STRUCT_FUNCTION (function);
2004 ob = create_output_block (LTO_section_function_body);
2006 clear_line_info (ob);
2007 ob->symbol = node;
2009 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2011 /* Set current_function_decl and cfun. */
2012 push_cfun (fn);
2014 /* Make string 0 be a NULL string. */
2015 streamer_write_char_stream (ob->string_stream, 0);
2017 streamer_write_record_start (ob, LTO_function);
2019 /* Output decls for parameters and args. */
2020 stream_write_tree (ob, DECL_RESULT (function), true);
2021 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2023 /* Output DECL_INITIAL for the function, which contains the tree of
2024 lexical scopes. */
2025 stream_write_tree (ob, DECL_INITIAL (function), true);
2027 /* We also stream abstract functions where we stream only stuff needed for
2028 debug info. */
2029 if (gimple_has_body_p (function))
2031 streamer_write_uhwi (ob, 1);
2032 output_struct_function_base (ob, fn);
2034 /* Output all the SSA names used in the function. */
2035 output_ssa_names (ob, fn);
2037 /* Output any exception handling regions. */
2038 output_eh_regions (ob, fn);
2041 /* We will renumber the statements. The code that does this uses
2042 the same ordering that we use for serializing them so we can use
2043 the same code on the other end and not have to write out the
2044 statement numbers. We do not assign UIDs to PHIs here because
2045 virtual PHIs get re-computed on-the-fly which would make numbers
2046 inconsistent. */
2047 set_gimple_stmt_max_uid (cfun, 0);
2048 FOR_ALL_BB_FN (bb, cfun)
2050 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2051 gsi_next (&gsi))
2053 gphi *stmt = gsi.phi ();
2055 /* Virtual PHIs are not going to be streamed. */
2056 if (!virtual_operand_p (gimple_phi_result (stmt)))
2057 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2059 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2060 gsi_next (&gsi))
2062 gimple *stmt = gsi_stmt (gsi);
2063 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2066 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2067 virtual phis now. */
2068 FOR_ALL_BB_FN (bb, cfun)
2070 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2071 gsi_next (&gsi))
2073 gphi *stmt = gsi.phi ();
2074 if (virtual_operand_p (gimple_phi_result (stmt)))
2075 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2079 /* Output the code for the function. */
2080 FOR_ALL_BB_FN (bb, fn)
2081 output_bb (ob, bb, fn);
2083 /* The terminator for this function. */
2084 streamer_write_record_start (ob, LTO_null);
2086 output_cfg (ob, fn);
2088 pop_cfun ();
2090 else
2091 streamer_write_uhwi (ob, 0);
2093 /* Create a section to hold the pickled output of this function. */
2094 produce_asm (ob, function);
2096 destroy_output_block (ob);
2099 /* Output the body of function NODE->DECL. */
2101 static void
2102 output_constructor (struct varpool_node *node)
2104 tree var = node->decl;
2105 struct output_block *ob;
2107 ob = create_output_block (LTO_section_function_body);
2109 clear_line_info (ob);
2110 ob->symbol = node;
2112 /* Make string 0 be a NULL string. */
2113 streamer_write_char_stream (ob->string_stream, 0);
2115 /* Output DECL_INITIAL for the function, which contains the tree of
2116 lexical scopes. */
2117 stream_write_tree (ob, DECL_INITIAL (var), true);
2119 /* Create a section to hold the pickled output of this function. */
2120 produce_asm (ob, var);
2122 destroy_output_block (ob);
2126 /* Emit toplevel asms. */
2128 void
2129 lto_output_toplevel_asms (void)
2131 struct output_block *ob;
2132 struct asm_node *can;
2133 char *section_name;
2134 struct lto_simple_header_with_strings header;
2136 if (!symtab->first_asm_symbol ())
2137 return;
2139 ob = create_output_block (LTO_section_asm);
2141 /* Make string 0 be a NULL string. */
2142 streamer_write_char_stream (ob->string_stream, 0);
2144 for (can = symtab->first_asm_symbol (); can; can = can->next)
2146 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2147 streamer_write_hwi (ob, can->order);
2150 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2152 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2153 lto_begin_section (section_name, !flag_wpa);
2154 free (section_name);
2156 /* The entire header stream is computed here. */
2157 memset (&header, 0, sizeof (header));
2159 /* Write the header. */
2160 header.major_version = LTO_major_version;
2161 header.minor_version = LTO_minor_version;
2163 header.main_size = ob->main_stream->total_size;
2164 header.string_size = ob->string_stream->total_size;
2165 lto_write_data (&header, sizeof header);
2167 /* Put all of the gimple and the string table out the asm file as a
2168 block of text. */
2169 lto_write_stream (ob->main_stream);
2170 lto_write_stream (ob->string_stream);
2172 lto_end_section ();
2174 destroy_output_block (ob);
2178 /* Copy the function body or variable constructor of NODE without deserializing. */
2180 static void
2181 copy_function_or_variable (struct symtab_node *node)
2183 tree function = node->decl;
2184 struct lto_file_decl_data *file_data = node->lto_file_data;
2185 const char *data;
2186 size_t len;
2187 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2188 char *section_name =
2189 lto_get_section_name (LTO_section_function_body, name, NULL);
2190 size_t i, j;
2191 struct lto_in_decl_state *in_state;
2192 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2194 lto_begin_section (section_name, !flag_wpa);
2195 free (section_name);
2197 /* We may have renamed the declaration, e.g., a static function. */
2198 name = lto_get_decl_name_mapping (file_data, name);
2200 data = lto_get_section_data (file_data, LTO_section_function_body,
2201 name, &len);
2202 gcc_assert (data);
2204 /* Do a bit copy of the function body. */
2205 lto_write_data (data, len);
2207 /* Copy decls. */
2208 in_state =
2209 lto_get_function_in_decl_state (node->lto_file_data, function);
2210 gcc_assert (in_state);
2212 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2214 size_t n = vec_safe_length (in_state->streams[i]);
2215 vec<tree, va_gc> *trees = in_state->streams[i];
2216 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2218 /* The out state must have the same indices and the in state.
2219 So just copy the vector. All the encoders in the in state
2220 must be empty where we reach here. */
2221 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2222 encoder->trees.reserve_exact (n);
2223 for (j = 0; j < n; j++)
2224 encoder->trees.safe_push ((*trees)[j]);
2227 lto_free_section_data (file_data, LTO_section_function_body, name,
2228 data, len);
2229 lto_end_section ();
2232 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2234 static tree
2235 wrap_refs (tree *tp, int *ws, void *)
2237 tree t = *tp;
2238 if (handled_component_p (t)
2239 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2240 && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2242 tree decl = TREE_OPERAND (t, 0);
2243 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2244 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2245 build1 (ADDR_EXPR, ptrtype, decl),
2246 build_int_cst (ptrtype, 0));
2247 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2248 *ws = 0;
2250 else if (TREE_CODE (t) == CONSTRUCTOR)
2252 else if (!EXPR_P (t))
2253 *ws = 0;
2254 return NULL_TREE;
2257 /* Main entry point from the pass manager. */
2259 void
2260 lto_output (void)
2262 struct lto_out_decl_state *decl_state;
2263 bitmap output = NULL;
2264 int i, n_nodes;
2265 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2267 if (flag_checking)
2268 output = lto_bitmap_alloc ();
2270 /* Initialize the streamer. */
2271 lto_streamer_init ();
2273 n_nodes = lto_symtab_encoder_size (encoder);
2274 /* Process only the functions with bodies. */
2275 for (i = 0; i < n_nodes; i++)
2277 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2278 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2280 if (lto_symtab_encoder_encode_body_p (encoder, node)
2281 && !node->alias)
2283 if (flag_checking)
2285 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2286 bitmap_set_bit (output, DECL_UID (node->decl));
2288 decl_state = lto_new_out_decl_state ();
2289 lto_push_out_decl_state (decl_state);
2290 if (gimple_has_body_p (node->decl) || !flag_wpa
2291 /* Thunks have no body but they may be synthetized
2292 at WPA time. */
2293 || DECL_ARGUMENTS (node->decl))
2294 output_function (node);
2295 else
2296 copy_function_or_variable (node);
2297 gcc_assert (lto_get_out_decl_state () == decl_state);
2298 lto_pop_out_decl_state ();
2299 lto_record_function_out_decl_state (node->decl, decl_state);
2302 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2304 /* Wrap symbol references inside the ctor in a type
2305 preserving MEM_REF. */
2306 tree ctor = DECL_INITIAL (node->decl);
2307 if (ctor && !in_lto_p)
2308 walk_tree (&ctor, wrap_refs, NULL, NULL);
2309 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2310 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2311 && !node->alias)
2313 timevar_push (TV_IPA_LTO_CTORS_OUT);
2314 if (flag_checking)
2316 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2317 bitmap_set_bit (output, DECL_UID (node->decl));
2319 decl_state = lto_new_out_decl_state ();
2320 lto_push_out_decl_state (decl_state);
2321 if (DECL_INITIAL (node->decl) != error_mark_node
2322 || !flag_wpa)
2323 output_constructor (node);
2324 else
2325 copy_function_or_variable (node);
2326 gcc_assert (lto_get_out_decl_state () == decl_state);
2327 lto_pop_out_decl_state ();
2328 lto_record_function_out_decl_state (node->decl, decl_state);
2329 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2334 /* Emit the callgraph after emitting function bodies. This needs to
2335 be done now to make sure that all the statements in every function
2336 have been renumbered so that edges can be associated with call
2337 statements using the statement UIDs. */
2338 output_symtab ();
2340 output_offload_tables ();
2342 #if CHECKING_P
2343 lto_bitmap_free (output);
2344 #endif
2347 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2348 from it and required for correct representation of its semantics.
2349 Each node in ENCODER must be a global declaration or a type. A node
2350 is written only once, even if it appears multiple times in the
2351 vector. Certain transitively-reachable nodes, such as those
2352 representing expressions, may be duplicated, but such nodes
2353 must not appear in ENCODER itself. */
2355 static void
2356 write_global_stream (struct output_block *ob,
2357 struct lto_tree_ref_encoder *encoder)
2359 tree t;
2360 size_t index;
2361 const size_t size = lto_tree_ref_encoder_size (encoder);
2363 for (index = 0; index < size; index++)
2365 t = lto_tree_ref_encoder_get_tree (encoder, index);
2366 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2367 stream_write_tree (ob, t, false);
2372 /* Write a sequence of indices into the globals vector corresponding
2373 to the trees in ENCODER. These are used by the reader to map the
2374 indices used to refer to global entities within function bodies to
2375 their referents. */
2377 static void
2378 write_global_references (struct output_block *ob,
2379 struct lto_tree_ref_encoder *encoder)
2381 tree t;
2382 uint32_t index;
2383 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2385 /* Write size and slot indexes as 32-bit unsigned numbers. */
2386 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2387 data[0] = size;
2389 for (index = 0; index < size; index++)
2391 unsigned slot_num;
2393 t = lto_tree_ref_encoder_get_tree (encoder, index);
2394 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2395 gcc_assert (slot_num != (unsigned)-1);
2396 data[index + 1] = slot_num;
2399 lto_write_data (data, sizeof (int32_t) * (size + 1));
2400 free (data);
2404 /* Write all the streams in an lto_out_decl_state STATE using
2405 output block OB and output stream OUT_STREAM. */
2407 void
2408 lto_output_decl_state_streams (struct output_block *ob,
2409 struct lto_out_decl_state *state)
2411 int i;
2413 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2414 write_global_stream (ob, &state->streams[i]);
2418 /* Write all the references in an lto_out_decl_state STATE using
2419 output block OB and output stream OUT_STREAM. */
2421 void
2422 lto_output_decl_state_refs (struct output_block *ob,
2423 struct lto_out_decl_state *state)
2425 unsigned i;
2426 unsigned ref;
2427 tree decl;
2429 /* Write reference to FUNCTION_DECL. If there is not function,
2430 write reference to void_type_node. */
2431 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2432 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2433 gcc_assert (ref != (unsigned)-1);
2434 lto_write_data (&ref, sizeof (uint32_t));
2436 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2437 write_global_references (ob, &state->streams[i]);
2441 /* Return the written size of STATE. */
2443 static size_t
2444 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2446 int i;
2447 size_t size;
2449 size = sizeof (int32_t); /* fn_ref. */
2450 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2452 size += sizeof (int32_t); /* vector size. */
2453 size += (lto_tree_ref_encoder_size (&state->streams[i])
2454 * sizeof (int32_t));
2456 return size;
2460 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2461 so far. */
2463 static void
2464 write_symbol (struct streamer_tree_cache_d *cache,
2465 tree t, hash_set<const char *> *seen, bool alias)
2467 const char *name;
2468 enum gcc_plugin_symbol_kind kind;
2469 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2470 unsigned slot_num;
2471 uint64_t size;
2472 const char *comdat;
2473 unsigned char c;
2475 /* None of the following kinds of symbols are needed in the
2476 symbol table. */
2477 if (!TREE_PUBLIC (t)
2478 || is_builtin_fn (t)
2479 || DECL_ABSTRACT_P (t)
2480 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2481 return;
2482 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2484 gcc_assert (TREE_CODE (t) == VAR_DECL
2485 || TREE_CODE (t) == FUNCTION_DECL);
2487 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2489 /* This behaves like assemble_name_raw in varasm.c, performing the
2490 same name manipulations that ASM_OUTPUT_LABELREF does. */
2491 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2493 if (seen->add (name))
2494 return;
2496 streamer_tree_cache_lookup (cache, t, &slot_num);
2497 gcc_assert (slot_num != (unsigned)-1);
2499 if (DECL_EXTERNAL (t))
2501 if (DECL_WEAK (t))
2502 kind = GCCPK_WEAKUNDEF;
2503 else
2504 kind = GCCPK_UNDEF;
2506 else
2508 if (DECL_WEAK (t))
2509 kind = GCCPK_WEAKDEF;
2510 else if (DECL_COMMON (t))
2511 kind = GCCPK_COMMON;
2512 else
2513 kind = GCCPK_DEF;
2515 /* When something is defined, it should have node attached. */
2516 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2517 || varpool_node::get (t)->definition);
2518 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2519 || (cgraph_node::get (t)
2520 && cgraph_node::get (t)->definition));
2523 /* Imitate what default_elf_asm_output_external do.
2524 When symbol is external, we need to output it with DEFAULT visibility
2525 when compiling with -fvisibility=default, while with HIDDEN visibility
2526 when symbol has attribute (visibility("hidden")) specified.
2527 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2528 right. */
2530 if (DECL_EXTERNAL (t)
2531 && !targetm.binds_local_p (t))
2532 visibility = GCCPV_DEFAULT;
2533 else
2534 switch (DECL_VISIBILITY (t))
2536 case VISIBILITY_DEFAULT:
2537 visibility = GCCPV_DEFAULT;
2538 break;
2539 case VISIBILITY_PROTECTED:
2540 visibility = GCCPV_PROTECTED;
2541 break;
2542 case VISIBILITY_HIDDEN:
2543 visibility = GCCPV_HIDDEN;
2544 break;
2545 case VISIBILITY_INTERNAL:
2546 visibility = GCCPV_INTERNAL;
2547 break;
2550 if (kind == GCCPK_COMMON
2551 && DECL_SIZE_UNIT (t)
2552 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2553 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2554 else
2555 size = 0;
2557 if (DECL_ONE_ONLY (t))
2558 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2559 else
2560 comdat = "";
2562 lto_write_data (name, strlen (name) + 1);
2563 lto_write_data (comdat, strlen (comdat) + 1);
2564 c = (unsigned char) kind;
2565 lto_write_data (&c, 1);
2566 c = (unsigned char) visibility;
2567 lto_write_data (&c, 1);
2568 lto_write_data (&size, 8);
2569 lto_write_data (&slot_num, 4);
2572 /* Return true if NODE should appear in the plugin symbol table. */
2574 bool
2575 output_symbol_p (symtab_node *node)
2577 struct cgraph_node *cnode;
2578 if (!node->real_symbol_p ())
2579 return false;
2580 /* We keep external functions in symtab for sake of inlining
2581 and devirtualization. We do not want to see them in symbol table as
2582 references unless they are really used. */
2583 cnode = dyn_cast <cgraph_node *> (node);
2584 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2585 && cnode->callers)
2586 return true;
2588 /* Ignore all references from external vars initializers - they are not really
2589 part of the compilation unit until they are used by folding. Some symbols,
2590 like references to external construction vtables can not be referred to at all.
2591 We decide this at can_refer_decl_in_current_unit_p. */
2592 if (!node->definition || DECL_EXTERNAL (node->decl))
2594 int i;
2595 struct ipa_ref *ref;
2596 for (i = 0; node->iterate_referring (i, ref); i++)
2598 if (ref->use == IPA_REF_ALIAS)
2599 continue;
2600 if (is_a <cgraph_node *> (ref->referring))
2601 return true;
2602 if (!DECL_EXTERNAL (ref->referring->decl))
2603 return true;
2605 return false;
2607 return true;
2611 /* Write an IL symbol table to OB.
2612 SET and VSET are cgraph/varpool node sets we are outputting. */
2614 static void
2615 produce_symtab (struct output_block *ob)
2617 struct streamer_tree_cache_d *cache = ob->writer_cache;
2618 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2619 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2620 lto_symtab_encoder_iterator lsei;
2622 lto_begin_section (section_name, false);
2623 free (section_name);
2625 hash_set<const char *> seen;
2627 /* Write the symbol table.
2628 First write everything defined and then all declarations.
2629 This is necessary to handle cases where we have duplicated symbols. */
2630 for (lsei = lsei_start (encoder);
2631 !lsei_end_p (lsei); lsei_next (&lsei))
2633 symtab_node *node = lsei_node (lsei);
2635 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2636 continue;
2637 write_symbol (cache, node->decl, &seen, false);
2639 for (lsei = lsei_start (encoder);
2640 !lsei_end_p (lsei); lsei_next (&lsei))
2642 symtab_node *node = lsei_node (lsei);
2644 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2645 continue;
2646 write_symbol (cache, node->decl, &seen, false);
2649 lto_end_section ();
2653 /* Init the streamer_mode_table for output, where we collect info on what
2654 machine_mode values have been streamed. */
2655 void
2656 lto_output_init_mode_table (void)
2658 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2662 /* Write the mode table. */
2663 static void
2664 lto_write_mode_table (void)
2666 struct output_block *ob;
2667 ob = create_output_block (LTO_section_mode_table);
2668 bitpack_d bp = bitpack_create (ob->main_stream);
2670 /* Ensure that for GET_MODE_INNER (m) != m we have
2671 also the inner mode marked. */
2672 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2673 if (streamer_mode_table[i])
2675 machine_mode m = (machine_mode) i;
2676 if (GET_MODE_INNER (m) != m)
2677 streamer_mode_table[(int) GET_MODE_INNER (m)] = 1;
2679 /* First stream modes that have GET_MODE_INNER (m) == m,
2680 so that we can refer to them afterwards. */
2681 for (int pass = 0; pass < 2; pass++)
2682 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2683 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2685 machine_mode m = (machine_mode) i;
2686 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2687 continue;
2688 bp_pack_value (&bp, m, 8);
2689 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2690 bp_pack_value (&bp, GET_MODE_SIZE (m), 8);
2691 bp_pack_value (&bp, GET_MODE_PRECISION (m), 16);
2692 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2693 bp_pack_value (&bp, GET_MODE_NUNITS (m), 8);
2694 switch (GET_MODE_CLASS (m))
2696 case MODE_FRACT:
2697 case MODE_UFRACT:
2698 case MODE_ACCUM:
2699 case MODE_UACCUM:
2700 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2701 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2702 break;
2703 case MODE_FLOAT:
2704 case MODE_DECIMAL_FLOAT:
2705 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2706 break;
2707 default:
2708 break;
2710 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2712 bp_pack_value (&bp, VOIDmode, 8);
2714 streamer_write_bitpack (&bp);
2716 char *section_name
2717 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2718 lto_begin_section (section_name, !flag_wpa);
2719 free (section_name);
2721 /* The entire header stream is computed here. */
2722 struct lto_simple_header_with_strings header;
2723 memset (&header, 0, sizeof (header));
2725 /* Write the header. */
2726 header.major_version = LTO_major_version;
2727 header.minor_version = LTO_minor_version;
2729 header.main_size = ob->main_stream->total_size;
2730 header.string_size = ob->string_stream->total_size;
2731 lto_write_data (&header, sizeof header);
2733 /* Put all of the gimple and the string table out the asm file as a
2734 block of text. */
2735 lto_write_stream (ob->main_stream);
2736 lto_write_stream (ob->string_stream);
2738 lto_end_section ();
2739 destroy_output_block (ob);
2743 /* This pass is run after all of the functions are serialized and all
2744 of the IPA passes have written their serialized forms. This pass
2745 causes the vector of all of the global decls and types used from
2746 this file to be written in to a section that can then be read in to
2747 recover these on other side. */
2749 void
2750 produce_asm_for_decls (void)
2752 struct lto_out_decl_state *out_state;
2753 struct lto_out_decl_state *fn_out_state;
2754 struct lto_decl_header header;
2755 char *section_name;
2756 struct output_block *ob;
2757 unsigned idx, num_fns;
2758 size_t decl_state_size;
2759 int32_t num_decl_states;
2761 ob = create_output_block (LTO_section_decls);
2763 memset (&header, 0, sizeof (struct lto_decl_header));
2765 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2766 lto_begin_section (section_name, !flag_wpa);
2767 free (section_name);
2769 /* Make string 0 be a NULL string. */
2770 streamer_write_char_stream (ob->string_stream, 0);
2772 gcc_assert (!alias_pairs);
2774 /* Get rid of the global decl state hash tables to save some memory. */
2775 out_state = lto_get_out_decl_state ();
2776 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2777 if (out_state->streams[i].tree_hash_table)
2779 delete out_state->streams[i].tree_hash_table;
2780 out_state->streams[i].tree_hash_table = NULL;
2783 /* Write the global symbols. */
2784 lto_output_decl_state_streams (ob, out_state);
2785 num_fns = lto_function_decl_states.length ();
2786 for (idx = 0; idx < num_fns; idx++)
2788 fn_out_state =
2789 lto_function_decl_states[idx];
2790 lto_output_decl_state_streams (ob, fn_out_state);
2793 header.major_version = LTO_major_version;
2794 header.minor_version = LTO_minor_version;
2796 /* Currently not used. This field would allow us to preallocate
2797 the globals vector, so that it need not be resized as it is extended. */
2798 header.num_nodes = -1;
2800 /* Compute the total size of all decl out states. */
2801 decl_state_size = sizeof (int32_t);
2802 decl_state_size += lto_out_decl_state_written_size (out_state);
2803 for (idx = 0; idx < num_fns; idx++)
2805 fn_out_state =
2806 lto_function_decl_states[idx];
2807 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2809 header.decl_state_size = decl_state_size;
2811 header.main_size = ob->main_stream->total_size;
2812 header.string_size = ob->string_stream->total_size;
2814 lto_write_data (&header, sizeof header);
2816 /* Write the main out-decl state, followed by out-decl states of
2817 functions. */
2818 num_decl_states = num_fns + 1;
2819 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2820 lto_output_decl_state_refs (ob, out_state);
2821 for (idx = 0; idx < num_fns; idx++)
2823 fn_out_state = lto_function_decl_states[idx];
2824 lto_output_decl_state_refs (ob, fn_out_state);
2827 lto_write_stream (ob->main_stream);
2828 lto_write_stream (ob->string_stream);
2830 lto_end_section ();
2832 /* Write the symbol table. It is used by linker to determine dependencies
2833 and thus we can skip it for WPA. */
2834 if (!flag_wpa)
2835 produce_symtab (ob);
2837 /* Write command line opts. */
2838 lto_write_options ();
2840 /* Deallocate memory and clean up. */
2841 for (idx = 0; idx < num_fns; idx++)
2843 fn_out_state =
2844 lto_function_decl_states[idx];
2845 lto_delete_out_decl_state (fn_out_state);
2847 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2848 lto_function_decl_states.release ();
2849 destroy_output_block (ob);
2850 if (lto_stream_offload_p)
2851 lto_write_mode_table ();