2015-08-24 François Dumont <fdumont@gcc.gnu.org>
[official-gcc.git] / gcc / lto-streamer-out.c
blob3ca88556638af7f442b7f8d650a0ba6eeab2a3aa
1 /* Write the GIMPLE representation to a file stream.
3 Copyright (C) 2009-2015 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "rtl.h"
30 #include "ssa.h"
31 #include "alias.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "flags.h"
35 #include "insn-config.h"
36 #include "expmed.h"
37 #include "dojump.h"
38 #include "explow.h"
39 #include "calls.h"
40 #include "emit-rtl.h"
41 #include "varasm.h"
42 #include "stmt.h"
43 #include "expr.h"
44 #include "params.h"
45 #include "internal-fn.h"
46 #include "gimple-iterator.h"
47 #include "tree-pass.h"
48 #include "diagnostic-core.h"
49 #include "except.h"
50 #include "lto-symtab.h"
51 #include "cgraph.h"
52 #include "target.h"
53 #include "gimple-streamer.h"
54 #include "cfgloop.h"
55 #include "builtins.h"
56 #include "gomp-constants.h"
59 static void lto_write_tree (struct output_block*, tree, bool);
61 /* Clear the line info stored in DATA_IN. */
63 static void
64 clear_line_info (struct output_block *ob)
66 ob->current_file = NULL;
67 ob->current_line = 0;
68 ob->current_col = 0;
72 /* Create the output block and return it. SECTION_TYPE is
73 LTO_section_function_body or LTO_static_initializer. */
75 struct output_block *
76 create_output_block (enum lto_section_type section_type)
78 struct output_block *ob = XCNEW (struct output_block);
80 ob->section_type = section_type;
81 ob->decl_state = lto_get_out_decl_state ();
82 ob->main_stream = XCNEW (struct lto_output_stream);
83 ob->string_stream = XCNEW (struct lto_output_stream);
84 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
86 if (section_type == LTO_section_function_body)
87 ob->cfg_stream = XCNEW (struct lto_output_stream);
89 clear_line_info (ob);
91 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
92 gcc_obstack_init (&ob->obstack);
94 return ob;
98 /* Destroy the output block OB. */
100 void
101 destroy_output_block (struct output_block *ob)
103 enum lto_section_type section_type = ob->section_type;
105 delete ob->string_hash_table;
106 ob->string_hash_table = NULL;
108 free (ob->main_stream);
109 free (ob->string_stream);
110 if (section_type == LTO_section_function_body)
111 free (ob->cfg_stream);
113 streamer_tree_cache_delete (ob->writer_cache);
114 obstack_free (&ob->obstack, NULL);
116 free (ob);
120 /* Look up NODE in the type table and write the index for it to OB. */
122 static void
123 output_type_ref (struct output_block *ob, tree node)
125 streamer_write_record_start (ob, LTO_type_ref);
126 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
130 /* Return true if tree node T is written to various tables. For these
131 nodes, we sometimes want to write their phyiscal representation
132 (via lto_output_tree), and sometimes we need to emit an index
133 reference into a table (via lto_output_tree_ref). */
135 static bool
136 tree_is_indexable (tree t)
138 /* Parameters and return values of functions of variably modified types
139 must go to global stream, because they may be used in the type
140 definition. */
141 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
142 && DECL_CONTEXT (t))
143 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
144 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
145 else if (TREE_CODE (t) == IMPORTED_DECL)
146 return false;
147 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
148 || TREE_CODE (t) == TYPE_DECL
149 || TREE_CODE (t) == CONST_DECL
150 || TREE_CODE (t) == NAMELIST_DECL)
151 && decl_function_context (t))
152 return false;
153 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
154 return false;
155 /* Variably modified types need to be streamed alongside function
156 bodies because they can refer to local entities. Together with
157 them we have to localize their members as well.
158 ??? In theory that includes non-FIELD_DECLs as well. */
159 else if (TYPE_P (t)
160 && variably_modified_type_p (t, NULL_TREE))
161 return false;
162 else if (TREE_CODE (t) == FIELD_DECL
163 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
164 return false;
165 else
166 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
170 /* Output info about new location into bitpack BP.
171 After outputting bitpack, lto_output_location_data has
172 to be done to output actual data. */
174 void
175 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
176 location_t loc)
178 expanded_location xloc;
180 loc = LOCATION_LOCUS (loc);
181 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
182 loc < RESERVED_LOCATION_COUNT
183 ? loc : RESERVED_LOCATION_COUNT);
184 if (loc < RESERVED_LOCATION_COUNT)
185 return;
187 xloc = expand_location (loc);
189 bp_pack_value (bp, ob->current_file != xloc.file, 1);
190 bp_pack_value (bp, ob->current_line != xloc.line, 1);
191 bp_pack_value (bp, ob->current_col != xloc.column, 1);
193 if (ob->current_file != xloc.file)
194 bp_pack_string (ob, bp, xloc.file, true);
195 ob->current_file = xloc.file;
197 if (ob->current_line != xloc.line)
198 bp_pack_var_len_unsigned (bp, xloc.line);
199 ob->current_line = xloc.line;
201 if (ob->current_col != xloc.column)
202 bp_pack_var_len_unsigned (bp, xloc.column);
203 ob->current_col = xloc.column;
207 /* If EXPR is an indexable tree node, output a reference to it to
208 output block OB. Otherwise, output the physical representation of
209 EXPR to OB. */
211 static void
212 lto_output_tree_ref (struct output_block *ob, tree expr)
214 enum tree_code code;
216 if (TYPE_P (expr))
218 output_type_ref (ob, expr);
219 return;
222 code = TREE_CODE (expr);
223 switch (code)
225 case SSA_NAME:
226 streamer_write_record_start (ob, LTO_ssa_name_ref);
227 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
228 break;
230 case FIELD_DECL:
231 streamer_write_record_start (ob, LTO_field_decl_ref);
232 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
233 break;
235 case FUNCTION_DECL:
236 streamer_write_record_start (ob, LTO_function_decl_ref);
237 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
238 break;
240 case VAR_DECL:
241 case DEBUG_EXPR_DECL:
242 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
243 case PARM_DECL:
244 streamer_write_record_start (ob, LTO_global_decl_ref);
245 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
246 break;
248 case CONST_DECL:
249 streamer_write_record_start (ob, LTO_const_decl_ref);
250 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
251 break;
253 case IMPORTED_DECL:
254 gcc_assert (decl_function_context (expr) == NULL);
255 streamer_write_record_start (ob, LTO_imported_decl_ref);
256 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
257 break;
259 case TYPE_DECL:
260 streamer_write_record_start (ob, LTO_type_decl_ref);
261 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
262 break;
264 case NAMELIST_DECL:
265 streamer_write_record_start (ob, LTO_namelist_decl_ref);
266 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
267 break;
269 case NAMESPACE_DECL:
270 streamer_write_record_start (ob, LTO_namespace_decl_ref);
271 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
272 break;
274 case LABEL_DECL:
275 streamer_write_record_start (ob, LTO_label_decl_ref);
276 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
277 break;
279 case RESULT_DECL:
280 streamer_write_record_start (ob, LTO_result_decl_ref);
281 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
282 break;
284 case TRANSLATION_UNIT_DECL:
285 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
286 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
287 break;
289 default:
290 /* No other node is indexable, so it should have been handled by
291 lto_output_tree. */
292 gcc_unreachable ();
297 /* Return true if EXPR is a tree node that can be written to disk. */
299 static inline bool
300 lto_is_streamable (tree expr)
302 enum tree_code code = TREE_CODE (expr);
304 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
305 name version in lto_output_tree_ref (see output_ssa_names). */
306 return !is_lang_specific (expr)
307 && code != SSA_NAME
308 && code != CALL_EXPR
309 && code != LANG_TYPE
310 && code != MODIFY_EXPR
311 && code != INIT_EXPR
312 && code != TARGET_EXPR
313 && code != BIND_EXPR
314 && code != WITH_CLEANUP_EXPR
315 && code != STATEMENT_LIST
316 && (code == CASE_LABEL_EXPR
317 || code == DECL_EXPR
318 || TREE_CODE_CLASS (code) != tcc_statement);
322 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
324 static tree
325 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
327 gcc_checking_assert (DECL_P (expr)
328 && TREE_CODE (expr) != FUNCTION_DECL
329 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
331 /* Handle DECL_INITIAL for symbols. */
332 tree initial = DECL_INITIAL (expr);
333 if (TREE_CODE (expr) == VAR_DECL
334 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
335 && !DECL_IN_CONSTANT_POOL (expr)
336 && initial)
338 varpool_node *vnode;
339 /* Extra section needs about 30 bytes; do not produce it for simple
340 scalar values. */
341 if (TREE_CODE (DECL_INITIAL (expr)) == CONSTRUCTOR
342 || !(vnode = varpool_node::get (expr))
343 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
344 initial = error_mark_node;
347 return initial;
351 /* Write a physical representation of tree node EXPR to output block
352 OB. If REF_P is true, the leaves of EXPR are emitted as references
353 via lto_output_tree_ref. IX is the index into the streamer cache
354 where EXPR is stored. */
356 static void
357 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
359 /* Pack all the non-pointer fields in EXPR into a bitpack and write
360 the resulting bitpack. */
361 streamer_write_tree_bitfields (ob, expr);
363 /* Write all the pointer fields in EXPR. */
364 streamer_write_tree_body (ob, expr, ref_p);
366 /* Write any LTO-specific data to OB. */
367 if (DECL_P (expr)
368 && TREE_CODE (expr) != FUNCTION_DECL
369 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
371 /* Handle DECL_INITIAL for symbols. */
372 tree initial = get_symbol_initial_value
373 (ob->decl_state->symtab_node_encoder, expr);
374 stream_write_tree (ob, initial, ref_p);
378 /* Write a physical representation of tree node EXPR to output block
379 OB. If REF_P is true, the leaves of EXPR are emitted as references
380 via lto_output_tree_ref. IX is the index into the streamer cache
381 where EXPR is stored. */
383 static void
384 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
386 if (!lto_is_streamable (expr))
387 internal_error ("tree code %qs is not supported in LTO streams",
388 get_tree_code_name (TREE_CODE (expr)));
390 /* Write the header, containing everything needed to materialize
391 EXPR on the reading side. */
392 streamer_write_tree_header (ob, expr);
394 lto_write_tree_1 (ob, expr, ref_p);
396 /* Mark the end of EXPR. */
397 streamer_write_zero (ob);
400 /* Emit the physical representation of tree node EXPR to output block OB,
401 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
402 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
404 static void
405 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
406 bool ref_p, bool this_ref_p)
408 unsigned ix;
410 gcc_checking_assert (expr != NULL_TREE
411 && !(this_ref_p && tree_is_indexable (expr)));
413 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
414 expr, hash, &ix);
415 gcc_assert (!exists_p);
416 if (streamer_handle_as_builtin_p (expr))
418 /* MD and NORMAL builtins do not need to be written out
419 completely as they are always instantiated by the
420 compiler on startup. The only builtins that need to
421 be written out are BUILT_IN_FRONTEND. For all other
422 builtins, we simply write the class and code. */
423 streamer_write_builtin (ob, expr);
425 else if (TREE_CODE (expr) == INTEGER_CST
426 && !TREE_OVERFLOW (expr))
428 /* Shared INTEGER_CST nodes are special because they need their
429 original type to be materialized by the reader (to implement
430 TYPE_CACHED_VALUES). */
431 streamer_write_integer_cst (ob, expr, ref_p);
433 else
435 /* This is the first time we see EXPR, write its fields
436 to OB. */
437 lto_write_tree (ob, expr, ref_p);
441 class DFS
443 public:
444 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
445 bool single_p);
446 ~DFS ();
448 struct scc_entry
450 tree t;
451 hashval_t hash;
453 vec<scc_entry> sccstack;
455 private:
456 struct sccs
458 unsigned int dfsnum;
459 unsigned int low;
461 struct worklist
463 tree expr;
464 sccs *from_state;
465 sccs *cstate;
466 bool ref_p;
467 bool this_ref_p;
470 static int scc_entry_compare (const void *, const void *);
472 void DFS_write_tree_body (struct output_block *ob,
473 tree expr, sccs *expr_state, bool ref_p);
475 void DFS_write_tree (struct output_block *ob, sccs *from_state,
476 tree expr, bool ref_p, bool this_ref_p);
478 hashval_t
479 hash_scc (struct output_block *ob, unsigned first, unsigned size,
480 bool ref_p, bool this_ref_p);
482 hash_map<tree, sccs *> sccstate;
483 vec<worklist> worklist_vec;
484 struct obstack sccstate_obstack;
487 /* Emit the physical representation of tree node EXPR to output block OB,
488 using depth-first search on the subgraph. If THIS_REF_P is true, the
489 leaves of EXPR are emitted as references via lto_output_tree_ref.
490 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
491 this is for a rewalk of a single leaf SCC. */
493 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
494 bool single_p)
496 unsigned int next_dfs_num = 1;
497 sccstack.create (0);
498 gcc_obstack_init (&sccstate_obstack);
499 worklist_vec = vNULL;
500 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
501 while (!worklist_vec.is_empty ())
503 worklist &w = worklist_vec.last ();
504 expr = w.expr;
505 sccs *from_state = w.from_state;
506 sccs *cstate = w.cstate;
507 ref_p = w.ref_p;
508 this_ref_p = w.this_ref_p;
509 if (cstate == NULL)
511 sccs **slot = &sccstate.get_or_insert (expr);
512 cstate = *slot;
513 if (cstate)
515 gcc_checking_assert (from_state);
516 if (cstate->dfsnum < from_state->dfsnum)
517 from_state->low = MIN (cstate->dfsnum, from_state->low);
518 worklist_vec.pop ();
519 continue;
522 scc_entry e = { expr, 0 };
523 /* Not yet visited. DFS recurse and push it onto the stack. */
524 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
525 sccstack.safe_push (e);
526 cstate->dfsnum = next_dfs_num++;
527 cstate->low = cstate->dfsnum;
528 w.cstate = cstate;
530 if (streamer_handle_as_builtin_p (expr))
532 else if (TREE_CODE (expr) == INTEGER_CST
533 && !TREE_OVERFLOW (expr))
534 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
535 else
537 DFS_write_tree_body (ob, expr, cstate, ref_p);
539 /* Walk any LTO-specific edges. */
540 if (DECL_P (expr)
541 && TREE_CODE (expr) != FUNCTION_DECL
542 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
544 /* Handle DECL_INITIAL for symbols. */
545 tree initial
546 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
547 expr);
548 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
551 continue;
554 /* See if we found an SCC. */
555 if (cstate->low == cstate->dfsnum)
557 unsigned first, size;
558 tree x;
560 /* If we are re-walking a single leaf SCC just pop it,
561 let earlier worklist item access the sccstack. */
562 if (single_p)
564 worklist_vec.pop ();
565 continue;
568 /* Pop the SCC and compute its size. */
569 first = sccstack.length ();
572 x = sccstack[--first].t;
574 while (x != expr);
575 size = sccstack.length () - first;
577 /* No need to compute hashes for LTRANS units, we don't perform
578 any merging there. */
579 hashval_t scc_hash = 0;
580 unsigned scc_entry_len = 0;
581 if (!flag_wpa)
583 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
585 /* Put the entries with the least number of collisions first. */
586 unsigned entry_start = 0;
587 scc_entry_len = size + 1;
588 for (unsigned i = 0; i < size;)
590 unsigned from = i;
591 for (i = i + 1; i < size
592 && (sccstack[first + i].hash
593 == sccstack[first + from].hash); ++i)
595 if (i - from < scc_entry_len)
597 scc_entry_len = i - from;
598 entry_start = from;
601 for (unsigned i = 0; i < scc_entry_len; ++i)
602 std::swap (sccstack[first + i],
603 sccstack[first + entry_start + i]);
605 if (scc_entry_len == 1)
606 ; /* We already sorted SCC deterministically in hash_scc. */
607 else
608 /* Check that we have only one SCC.
609 Naturally we may have conflicts if hash function is not
610 strong enough. Lets see how far this gets. */
612 #ifdef ENABLE_CHECKING
613 gcc_unreachable ();
614 #endif
618 /* Write LTO_tree_scc. */
619 streamer_write_record_start (ob, LTO_tree_scc);
620 streamer_write_uhwi (ob, size);
621 streamer_write_uhwi (ob, scc_hash);
623 /* Write size-1 SCCs without wrapping them inside SCC bundles.
624 All INTEGER_CSTs need to be handled this way as we need
625 their type to materialize them. Also builtins are handled
626 this way.
627 ??? We still wrap these in LTO_tree_scc so at the
628 input side we can properly identify the tree we want
629 to ultimatively return. */
630 if (size == 1)
631 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
632 else
634 /* Write the size of the SCC entry candidates. */
635 streamer_write_uhwi (ob, scc_entry_len);
637 /* Write all headers and populate the streamer cache. */
638 for (unsigned i = 0; i < size; ++i)
640 hashval_t hash = sccstack[first+i].hash;
641 tree t = sccstack[first+i].t;
642 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
643 t, hash, NULL);
644 gcc_assert (!exists_p);
646 if (!lto_is_streamable (t))
647 internal_error ("tree code %qs is not supported "
648 "in LTO streams",
649 get_tree_code_name (TREE_CODE (t)));
651 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
653 /* Write the header, containing everything needed to
654 materialize EXPR on the reading side. */
655 streamer_write_tree_header (ob, t);
658 /* Write the bitpacks and tree references. */
659 for (unsigned i = 0; i < size; ++i)
661 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
663 /* Mark the end of the tree. */
664 streamer_write_zero (ob);
668 /* Finally truncate the vector. */
669 sccstack.truncate (first);
671 if (from_state)
672 from_state->low = MIN (from_state->low, cstate->low);
673 worklist_vec.pop ();
674 continue;
677 gcc_checking_assert (from_state);
678 from_state->low = MIN (from_state->low, cstate->low);
679 if (cstate->dfsnum < from_state->dfsnum)
680 from_state->low = MIN (cstate->dfsnum, from_state->low);
681 worklist_vec.pop ();
683 worklist_vec.release ();
686 DFS::~DFS ()
688 sccstack.release ();
689 obstack_free (&sccstate_obstack, NULL);
692 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
693 DFS recurse for all tree edges originating from it. */
695 void
696 DFS::DFS_write_tree_body (struct output_block *ob,
697 tree expr, sccs *expr_state, bool ref_p)
699 #define DFS_follow_tree_edge(DEST) \
700 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
702 enum tree_code code;
704 code = TREE_CODE (expr);
706 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
708 if (TREE_CODE (expr) != IDENTIFIER_NODE)
709 DFS_follow_tree_edge (TREE_TYPE (expr));
712 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
714 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
715 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
718 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
720 DFS_follow_tree_edge (TREE_REALPART (expr));
721 DFS_follow_tree_edge (TREE_IMAGPART (expr));
724 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
726 /* Drop names that were created for anonymous entities. */
727 if (DECL_NAME (expr)
728 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
729 && anon_aggrname_p (DECL_NAME (expr)))
731 else
732 DFS_follow_tree_edge (DECL_NAME (expr));
733 DFS_follow_tree_edge (DECL_CONTEXT (expr));
736 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
738 DFS_follow_tree_edge (DECL_SIZE (expr));
739 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
741 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
742 special handling in LTO, it must be handled by streamer hooks. */
744 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
746 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
747 for early inlining so drop it on the floor instead of ICEing in
748 dwarf2out.c. */
750 if ((TREE_CODE (expr) == VAR_DECL
751 || TREE_CODE (expr) == PARM_DECL)
752 && DECL_HAS_VALUE_EXPR_P (expr))
753 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
754 if (TREE_CODE (expr) == VAR_DECL)
755 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
758 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
760 if (TREE_CODE (expr) == TYPE_DECL)
761 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
764 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
766 /* Make sure we don't inadvertently set the assembler name. */
767 if (DECL_ASSEMBLER_NAME_SET_P (expr))
768 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
771 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
773 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
774 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
775 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
776 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
777 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
780 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
782 DFS_follow_tree_edge (DECL_VINDEX (expr));
783 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
784 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
785 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
788 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
790 DFS_follow_tree_edge (TYPE_SIZE (expr));
791 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
792 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
793 DFS_follow_tree_edge (TYPE_NAME (expr));
794 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
795 reconstructed during fixup. */
796 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
797 during fixup. */
798 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
799 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
800 /* TYPE_CANONICAL is re-computed during type merging, so no need
801 to follow it here. */
802 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
805 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
807 if (TREE_CODE (expr) == ENUMERAL_TYPE)
808 DFS_follow_tree_edge (TYPE_VALUES (expr));
809 else if (TREE_CODE (expr) == ARRAY_TYPE)
810 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
811 else if (RECORD_OR_UNION_TYPE_P (expr))
812 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
813 DFS_follow_tree_edge (t);
814 else if (TREE_CODE (expr) == FUNCTION_TYPE
815 || TREE_CODE (expr) == METHOD_TYPE)
816 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
818 if (!POINTER_TYPE_P (expr))
819 DFS_follow_tree_edge (TYPE_MINVAL (expr));
820 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
821 if (RECORD_OR_UNION_TYPE_P (expr))
822 DFS_follow_tree_edge (TYPE_BINFO (expr));
825 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
827 DFS_follow_tree_edge (TREE_PURPOSE (expr));
828 DFS_follow_tree_edge (TREE_VALUE (expr));
829 DFS_follow_tree_edge (TREE_CHAIN (expr));
832 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
834 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
835 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
838 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
840 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
841 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
842 DFS_follow_tree_edge (TREE_BLOCK (expr));
845 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
847 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
848 if (VAR_OR_FUNCTION_DECL_P (t)
849 && DECL_EXTERNAL (t))
850 /* We have to stream externals in the block chain as
851 non-references. See also
852 tree-streamer-out.c:streamer_write_chain. */
853 DFS_write_tree (ob, expr_state, t, ref_p, false);
854 else
855 DFS_follow_tree_edge (t);
857 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
859 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
860 handle - those that represent inlined function scopes.
861 For the drop rest them on the floor instead of ICEing
862 in dwarf2out.c. */
863 if (inlined_function_outer_scope_p (expr))
865 tree ultimate_origin = block_ultimate_origin (expr);
866 DFS_follow_tree_edge (ultimate_origin);
868 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
869 information for early inlined BLOCKs so drop it on the floor instead
870 of ICEing in dwarf2out.c. */
872 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
873 streaming time. */
875 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
876 list is re-constructed from BLOCK_SUPERCONTEXT. */
879 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
881 unsigned i;
882 tree t;
884 /* Note that the number of BINFO slots has already been emitted in
885 EXPR's header (see streamer_write_tree_header) because this length
886 is needed to build the empty BINFO node on the reader side. */
887 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
888 DFS_follow_tree_edge (t);
889 DFS_follow_tree_edge (BINFO_OFFSET (expr));
890 DFS_follow_tree_edge (BINFO_VTABLE (expr));
891 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
893 /* The number of BINFO_BASE_ACCESSES has already been emitted in
894 EXPR's bitfield section. */
895 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
896 DFS_follow_tree_edge (t);
898 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
899 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
902 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
904 unsigned i;
905 tree index, value;
907 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
909 DFS_follow_tree_edge (index);
910 DFS_follow_tree_edge (value);
914 if (code == OMP_CLAUSE)
916 int i;
917 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
918 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
919 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
922 #undef DFS_follow_tree_edge
925 /* Return a hash value for the tree T.
926 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
927 may hold hash values if trees inside current SCC. */
929 static hashval_t
930 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
932 inchash::hash hstate;
934 #define visit(SIBLING) \
935 do { \
936 unsigned ix; \
937 if (!SIBLING) \
938 hstate.add_int (0); \
939 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
940 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
941 else if (map) \
942 hstate.add_int (*map->get (SIBLING)); \
943 else \
944 hstate.add_int (1); \
945 } while (0)
947 /* Hash TS_BASE. */
948 enum tree_code code = TREE_CODE (t);
949 hstate.add_int (code);
950 if (!TYPE_P (t))
952 hstate.add_flag (TREE_SIDE_EFFECTS (t));
953 hstate.add_flag (TREE_CONSTANT (t));
954 hstate.add_flag (TREE_READONLY (t));
955 hstate.add_flag (TREE_PUBLIC (t));
957 hstate.add_flag (TREE_ADDRESSABLE (t));
958 hstate.add_flag (TREE_THIS_VOLATILE (t));
959 if (DECL_P (t))
960 hstate.add_flag (DECL_UNSIGNED (t));
961 else if (TYPE_P (t))
962 hstate.add_flag (TYPE_UNSIGNED (t));
963 if (TYPE_P (t))
964 hstate.add_flag (TYPE_ARTIFICIAL (t));
965 else
966 hstate.add_flag (TREE_NO_WARNING (t));
967 hstate.add_flag (TREE_NOTHROW (t));
968 hstate.add_flag (TREE_STATIC (t));
969 hstate.add_flag (TREE_PROTECTED (t));
970 hstate.add_flag (TREE_DEPRECATED (t));
971 if (code != TREE_BINFO)
972 hstate.add_flag (TREE_PRIVATE (t));
973 if (TYPE_P (t))
975 hstate.add_flag (TYPE_SATURATING (t));
976 hstate.add_flag (TYPE_ADDR_SPACE (t));
978 else if (code == SSA_NAME)
979 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
980 hstate.commit_flag ();
982 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
984 int i;
985 hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
986 hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
987 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
988 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
991 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
993 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
994 hstate.add_flag (r.cl);
995 hstate.add_flag (r.sign);
996 hstate.add_flag (r.signalling);
997 hstate.add_flag (r.canonical);
998 hstate.commit_flag ();
999 hstate.add_int (r.uexp);
1000 hstate.add (r.sig, sizeof (r.sig));
1003 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1005 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1006 hstate.add_int (f.mode);
1007 hstate.add_int (f.data.low);
1008 hstate.add_int (f.data.high);
1011 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1013 hstate.add_wide_int (DECL_MODE (t));
1014 hstate.add_flag (DECL_NONLOCAL (t));
1015 hstate.add_flag (DECL_VIRTUAL_P (t));
1016 hstate.add_flag (DECL_IGNORED_P (t));
1017 hstate.add_flag (DECL_ABSTRACT_P (t));
1018 hstate.add_flag (DECL_ARTIFICIAL (t));
1019 hstate.add_flag (DECL_USER_ALIGN (t));
1020 hstate.add_flag (DECL_PRESERVE_P (t));
1021 hstate.add_flag (DECL_EXTERNAL (t));
1022 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1023 hstate.commit_flag ();
1024 hstate.add_int (DECL_ALIGN (t));
1025 if (code == LABEL_DECL)
1027 hstate.add_int (EH_LANDING_PAD_NR (t));
1028 hstate.add_int (LABEL_DECL_UID (t));
1030 else if (code == FIELD_DECL)
1032 hstate.add_flag (DECL_PACKED (t));
1033 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1034 hstate.add_int (DECL_OFFSET_ALIGN (t));
1036 else if (code == VAR_DECL)
1038 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1039 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1041 if (code == RESULT_DECL
1042 || code == PARM_DECL
1043 || code == VAR_DECL)
1045 hstate.add_flag (DECL_BY_REFERENCE (t));
1046 if (code == VAR_DECL
1047 || code == PARM_DECL)
1048 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1050 hstate.commit_flag ();
1053 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1054 hstate.add_int (DECL_REGISTER (t));
1056 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1058 hstate.add_flag (DECL_COMMON (t));
1059 hstate.add_flag (DECL_DLLIMPORT_P (t));
1060 hstate.add_flag (DECL_WEAK (t));
1061 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1062 hstate.add_flag (DECL_COMDAT (t));
1063 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1064 hstate.add_int (DECL_VISIBILITY (t));
1065 if (code == VAR_DECL)
1067 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1068 hstate.add_flag (DECL_HARD_REGISTER (t));
1069 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1071 if (TREE_CODE (t) == FUNCTION_DECL)
1073 hstate.add_flag (DECL_FINAL_P (t));
1074 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1075 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1077 hstate.commit_flag ();
1080 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1082 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1083 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1084 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1085 hstate.add_flag (DECL_UNINLINABLE (t));
1086 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1087 hstate.add_flag (DECL_IS_NOVOPS (t));
1088 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1089 hstate.add_flag (DECL_IS_MALLOC (t));
1090 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1091 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1092 hstate.add_flag (DECL_STATIC_CHAIN (t));
1093 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1094 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1095 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1096 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1097 hstate.add_flag (DECL_PURE_P (t));
1098 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1099 hstate.commit_flag ();
1100 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1101 hstate.add_int (DECL_FUNCTION_CODE (t));
1104 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1106 hstate.add_wide_int (TYPE_MODE (t));
1107 hstate.add_flag (TYPE_STRING_FLAG (t));
1108 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1109 no streaming. */
1110 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
1111 hstate.add_flag (TYPE_PACKED (t));
1112 hstate.add_flag (TYPE_RESTRICT (t));
1113 hstate.add_flag (TYPE_USER_ALIGN (t));
1114 hstate.add_flag (TYPE_READONLY (t));
1115 if (RECORD_OR_UNION_TYPE_P (t))
1117 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1118 hstate.add_flag (TYPE_FINAL_P (t));
1120 else if (code == ARRAY_TYPE)
1121 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1122 hstate.commit_flag ();
1123 hstate.add_int (TYPE_PRECISION (t));
1124 hstate.add_int (TYPE_ALIGN (t));
1125 hstate.add_int ((TYPE_ALIAS_SET (t) == 0
1126 || (!in_lto_p
1127 && get_alias_set (t) == 0))
1128 ? 0 : -1);
1131 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1132 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1133 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1135 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1136 /* We don't stream these when passing things to a different target. */
1137 && !lto_stream_offload_p)
1138 hstate.add_wide_int (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1140 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1141 hstate.add_wide_int (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1143 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1144 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1146 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1147 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1149 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1151 if (code != IDENTIFIER_NODE)
1152 visit (TREE_TYPE (t));
1155 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1156 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
1157 visit (VECTOR_CST_ELT (t, i));
1159 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1161 visit (TREE_REALPART (t));
1162 visit (TREE_IMAGPART (t));
1165 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1167 /* Drop names that were created for anonymous entities. */
1168 if (DECL_NAME (t)
1169 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1170 && anon_aggrname_p (DECL_NAME (t)))
1172 else
1173 visit (DECL_NAME (t));
1174 if (DECL_FILE_SCOPE_P (t))
1176 else
1177 visit (DECL_CONTEXT (t));
1180 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1182 visit (DECL_SIZE (t));
1183 visit (DECL_SIZE_UNIT (t));
1184 visit (DECL_ATTRIBUTES (t));
1185 if ((code == VAR_DECL
1186 || code == PARM_DECL)
1187 && DECL_HAS_VALUE_EXPR_P (t))
1188 visit (DECL_VALUE_EXPR (t));
1189 if (code == VAR_DECL
1190 && DECL_HAS_DEBUG_EXPR_P (t))
1191 visit (DECL_DEBUG_EXPR (t));
1192 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1193 be able to call get_symbol_initial_value. */
1196 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1198 if (code == TYPE_DECL)
1199 visit (DECL_ORIGINAL_TYPE (t));
1202 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1204 if (DECL_ASSEMBLER_NAME_SET_P (t))
1205 visit (DECL_ASSEMBLER_NAME (t));
1208 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1210 visit (DECL_FIELD_OFFSET (t));
1211 visit (DECL_BIT_FIELD_TYPE (t));
1212 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1213 visit (DECL_FIELD_BIT_OFFSET (t));
1214 visit (DECL_FCONTEXT (t));
1217 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1219 visit (DECL_VINDEX (t));
1220 visit (DECL_FUNCTION_PERSONALITY (t));
1221 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1222 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1225 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1227 visit (TYPE_SIZE (t));
1228 visit (TYPE_SIZE_UNIT (t));
1229 visit (TYPE_ATTRIBUTES (t));
1230 visit (TYPE_NAME (t));
1231 visit (TYPE_MAIN_VARIANT (t));
1232 if (TYPE_FILE_SCOPE_P (t))
1234 else
1235 visit (TYPE_CONTEXT (t));
1236 visit (TYPE_STUB_DECL (t));
1239 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1241 if (code == ENUMERAL_TYPE)
1242 visit (TYPE_VALUES (t));
1243 else if (code == ARRAY_TYPE)
1244 visit (TYPE_DOMAIN (t));
1245 else if (RECORD_OR_UNION_TYPE_P (t))
1246 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1247 visit (f);
1248 else if (code == FUNCTION_TYPE
1249 || code == METHOD_TYPE)
1250 visit (TYPE_ARG_TYPES (t));
1251 if (!POINTER_TYPE_P (t))
1252 visit (TYPE_MINVAL (t));
1253 visit (TYPE_MAXVAL (t));
1254 if (RECORD_OR_UNION_TYPE_P (t))
1255 visit (TYPE_BINFO (t));
1258 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1260 visit (TREE_PURPOSE (t));
1261 visit (TREE_VALUE (t));
1262 visit (TREE_CHAIN (t));
1265 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1266 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1267 visit (TREE_VEC_ELT (t, i));
1269 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1271 hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
1272 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1273 visit (TREE_OPERAND (t, i));
1276 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1278 unsigned i;
1279 tree b;
1280 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1281 visit (b);
1282 visit (BINFO_OFFSET (t));
1283 visit (BINFO_VTABLE (t));
1284 visit (BINFO_VPTR_FIELD (t));
1285 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1286 visit (b);
1287 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1288 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1291 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1293 unsigned i;
1294 tree index, value;
1295 hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
1296 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1298 visit (index);
1299 visit (value);
1303 if (code == OMP_CLAUSE)
1305 int i;
1306 HOST_WIDE_INT val;
1308 hstate.add_wide_int (OMP_CLAUSE_CODE (t));
1309 switch (OMP_CLAUSE_CODE (t))
1311 case OMP_CLAUSE_DEFAULT:
1312 val = OMP_CLAUSE_DEFAULT_KIND (t);
1313 break;
1314 case OMP_CLAUSE_SCHEDULE:
1315 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1316 break;
1317 case OMP_CLAUSE_DEPEND:
1318 val = OMP_CLAUSE_DEPEND_KIND (t);
1319 break;
1320 case OMP_CLAUSE_MAP:
1321 val = OMP_CLAUSE_MAP_KIND (t);
1322 break;
1323 case OMP_CLAUSE_PROC_BIND:
1324 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1325 break;
1326 case OMP_CLAUSE_REDUCTION:
1327 val = OMP_CLAUSE_REDUCTION_CODE (t);
1328 break;
1329 default:
1330 val = 0;
1331 break;
1333 hstate.add_wide_int (val);
1334 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1335 visit (OMP_CLAUSE_OPERAND (t, i));
1336 visit (OMP_CLAUSE_CHAIN (t));
1339 return hstate.end ();
1341 #undef visit
1344 /* Compare two SCC entries by their hash value for qsorting them. */
1347 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1349 const scc_entry *p1 = (const scc_entry *) p1_;
1350 const scc_entry *p2 = (const scc_entry *) p2_;
1351 if (p1->hash < p2->hash)
1352 return -1;
1353 else if (p1->hash > p2->hash)
1354 return 1;
1355 return 0;
1358 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1359 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1361 hashval_t
1362 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1363 bool ref_p, bool this_ref_p)
1365 unsigned int last_classes = 0, iterations = 0;
1367 /* Compute hash values for the SCC members. */
1368 for (unsigned i = 0; i < size; ++i)
1369 sccstack[first+i].hash
1370 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1372 if (size == 1)
1373 return sccstack[first].hash;
1375 /* We aim to get unique hash for every tree within SCC and compute hash value
1376 of the whole SCC by combining all values together in a stable (entry-point
1377 independent) order. This guarantees that the same SCC regions within
1378 different translation units will get the same hash values and therefore
1379 will be merged at WPA time.
1381 Often the hashes are already unique. In that case we compute the SCC hash
1382 by combining individual hash values in an increasing order.
1384 If there are duplicates, we seek at least one tree with unique hash (and
1385 pick one with minimal hash and this property). Then we obtain a stable
1386 order by DFS walk starting from this unique tree and then use the index
1387 within this order to make individual hash values unique.
1389 If there is no tree with unique hash, we iteratively propagate the hash
1390 values across the internal edges of SCC. This usually quickly leads
1391 to unique hashes. Consider, for example, an SCC containing two pointers
1392 that are identical except for the types they point to and assume that
1393 these types are also part of the SCC. The propagation will add the
1394 points-to type information into their hash values. */
1397 /* Sort the SCC so we can easily check for uniqueness. */
1398 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1400 unsigned int classes = 1;
1401 int firstunique = -1;
1403 /* Find the tree with lowest unique hash (if it exists) and compute
1404 the number of equivalence classes. */
1405 if (sccstack[first].hash != sccstack[first+1].hash)
1406 firstunique = 0;
1407 for (unsigned i = 1; i < size; ++i)
1408 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1410 classes++;
1411 if (firstunique == -1
1412 && (i == size - 1
1413 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1414 firstunique = i;
1417 /* If we found a tree with unique hash, stop the iteration. */
1418 if (firstunique != -1
1419 /* Also terminate if we run out of iterations or if the number of
1420 equivalence classes is no longer increasing.
1421 For example a cyclic list of trees that are all equivalent will
1422 never have unique entry point; we however do not build such SCCs
1423 in our IL. */
1424 || classes <= last_classes || iterations > 16)
1426 hashval_t scc_hash;
1428 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1429 starting from FIRSTUNIQUE to obtain a stable order. */
1430 if (classes != size && firstunique != -1)
1432 hash_map <tree, hashval_t> map(size*2);
1434 /* Store hash values into a map, so we can associate them with
1435 the reordered SCC. */
1436 for (unsigned i = 0; i < size; ++i)
1437 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1439 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1440 true);
1441 gcc_assert (again.sccstack.length () == size);
1443 memcpy (sccstack.address () + first,
1444 again.sccstack.address (),
1445 sizeof (scc_entry) * size);
1447 /* Update hash values of individual members by hashing in the
1448 index within the stable order. This ensures uniqueness.
1449 Also compute the SCC hash by mixing in all hash values in
1450 the stable order we obtained. */
1451 sccstack[first].hash = *map.get (sccstack[first].t);
1452 scc_hash = sccstack[first].hash;
1453 for (unsigned i = 1; i < size; ++i)
1455 sccstack[first+i].hash
1456 = iterative_hash_hashval_t (i,
1457 *map.get (sccstack[first+i].t));
1458 scc_hash
1459 = iterative_hash_hashval_t (scc_hash,
1460 sccstack[first+i].hash);
1463 /* If we got a unique hash value for each tree, then sort already
1464 ensured entry-point independent order. Only compute the final
1465 SCC hash.
1467 If we failed to find the unique entry point, we go by the same
1468 route. We will eventually introduce unwanted hash conflicts. */
1469 else
1471 scc_hash = sccstack[first].hash;
1472 for (unsigned i = 1; i < size; ++i)
1473 scc_hash
1474 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1476 /* We cannot 100% guarantee that the hash won't conflict so as
1477 to make it impossible to find a unique hash. This however
1478 should be an extremely rare case. ICE for now so possible
1479 issues are found and evaluated. */
1480 gcc_checking_assert (classes == size);
1483 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1484 hash into the hash of each element. */
1485 for (unsigned i = 0; i < size; ++i)
1486 sccstack[first+i].hash
1487 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1488 return scc_hash;
1491 last_classes = classes;
1492 iterations++;
1494 /* We failed to identify the entry point; propagate hash values across
1495 the edges. */
1496 hash_map <tree, hashval_t> map(size*2);
1498 for (unsigned i = 0; i < size; ++i)
1499 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1501 for (unsigned i = 0; i < size; i++)
1502 sccstack[first+i].hash
1503 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1505 while (true);
1508 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1509 already in the streamer cache. Main routine called for
1510 each visit of EXPR. */
1512 void
1513 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1514 tree expr, bool ref_p, bool this_ref_p)
1516 /* Handle special cases. */
1517 if (expr == NULL_TREE)
1518 return;
1520 /* Do not DFS walk into indexable trees. */
1521 if (this_ref_p && tree_is_indexable (expr))
1522 return;
1524 /* Check if we already streamed EXPR. */
1525 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1526 return;
1528 worklist w;
1529 w.expr = expr;
1530 w.from_state = from_state;
1531 w.cstate = NULL;
1532 w.ref_p = ref_p;
1533 w.this_ref_p = this_ref_p;
1534 worklist_vec.safe_push (w);
1538 /* Emit the physical representation of tree node EXPR to output block OB.
1539 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1540 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1542 void
1543 lto_output_tree (struct output_block *ob, tree expr,
1544 bool ref_p, bool this_ref_p)
1546 unsigned ix;
1547 bool existed_p;
1549 if (expr == NULL_TREE)
1551 streamer_write_record_start (ob, LTO_null);
1552 return;
1555 if (this_ref_p && tree_is_indexable (expr))
1557 lto_output_tree_ref (ob, expr);
1558 return;
1561 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1562 if (existed_p)
1564 /* If a node has already been streamed out, make sure that
1565 we don't write it more than once. Otherwise, the reader
1566 will instantiate two different nodes for the same object. */
1567 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1568 streamer_write_uhwi (ob, ix);
1569 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1570 lto_tree_code_to_tag (TREE_CODE (expr)));
1571 lto_stats.num_pickle_refs_output++;
1573 else
1575 /* This is the first time we see EXPR, write all reachable
1576 trees to OB. */
1577 static bool in_dfs_walk;
1579 /* Protect against recursion which means disconnect between
1580 what tree edges we walk in the DFS walk and what edges
1581 we stream out. */
1582 gcc_assert (!in_dfs_walk);
1584 /* Start the DFS walk. */
1585 /* Save ob state ... */
1586 /* let's see ... */
1587 in_dfs_walk = true;
1588 DFS (ob, expr, ref_p, this_ref_p, false);
1589 in_dfs_walk = false;
1591 /* Finally append a reference to the tree we were writing.
1592 ??? If expr ended up as a singleton we could have
1593 inlined it here and avoid outputting a reference. */
1594 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1595 gcc_assert (existed_p);
1596 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1597 streamer_write_uhwi (ob, ix);
1598 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1599 lto_tree_code_to_tag (TREE_CODE (expr)));
1600 lto_stats.num_pickle_refs_output++;
1605 /* Output to OB a list of try/catch handlers starting with FIRST. */
1607 static void
1608 output_eh_try_list (struct output_block *ob, eh_catch first)
1610 eh_catch n;
1612 for (n = first; n; n = n->next_catch)
1614 streamer_write_record_start (ob, LTO_eh_catch);
1615 stream_write_tree (ob, n->type_list, true);
1616 stream_write_tree (ob, n->filter_list, true);
1617 stream_write_tree (ob, n->label, true);
1620 streamer_write_record_start (ob, LTO_null);
1624 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1625 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1626 detect EH region sharing. */
1628 static void
1629 output_eh_region (struct output_block *ob, eh_region r)
1631 enum LTO_tags tag;
1633 if (r == NULL)
1635 streamer_write_record_start (ob, LTO_null);
1636 return;
1639 if (r->type == ERT_CLEANUP)
1640 tag = LTO_ert_cleanup;
1641 else if (r->type == ERT_TRY)
1642 tag = LTO_ert_try;
1643 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1644 tag = LTO_ert_allowed_exceptions;
1645 else if (r->type == ERT_MUST_NOT_THROW)
1646 tag = LTO_ert_must_not_throw;
1647 else
1648 gcc_unreachable ();
1650 streamer_write_record_start (ob, tag);
1651 streamer_write_hwi (ob, r->index);
1653 if (r->outer)
1654 streamer_write_hwi (ob, r->outer->index);
1655 else
1656 streamer_write_zero (ob);
1658 if (r->inner)
1659 streamer_write_hwi (ob, r->inner->index);
1660 else
1661 streamer_write_zero (ob);
1663 if (r->next_peer)
1664 streamer_write_hwi (ob, r->next_peer->index);
1665 else
1666 streamer_write_zero (ob);
1668 if (r->type == ERT_TRY)
1670 output_eh_try_list (ob, r->u.eh_try.first_catch);
1672 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1674 stream_write_tree (ob, r->u.allowed.type_list, true);
1675 stream_write_tree (ob, r->u.allowed.label, true);
1676 streamer_write_uhwi (ob, r->u.allowed.filter);
1678 else if (r->type == ERT_MUST_NOT_THROW)
1680 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1681 bitpack_d bp = bitpack_create (ob->main_stream);
1682 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1683 streamer_write_bitpack (&bp);
1686 if (r->landing_pads)
1687 streamer_write_hwi (ob, r->landing_pads->index);
1688 else
1689 streamer_write_zero (ob);
1693 /* Output landing pad LP to OB. */
1695 static void
1696 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1698 if (lp == NULL)
1700 streamer_write_record_start (ob, LTO_null);
1701 return;
1704 streamer_write_record_start (ob, LTO_eh_landing_pad);
1705 streamer_write_hwi (ob, lp->index);
1706 if (lp->next_lp)
1707 streamer_write_hwi (ob, lp->next_lp->index);
1708 else
1709 streamer_write_zero (ob);
1711 if (lp->region)
1712 streamer_write_hwi (ob, lp->region->index);
1713 else
1714 streamer_write_zero (ob);
1716 stream_write_tree (ob, lp->post_landing_pad, true);
1720 /* Output the existing eh_table to OB. */
1722 static void
1723 output_eh_regions (struct output_block *ob, struct function *fn)
1725 if (fn->eh && fn->eh->region_tree)
1727 unsigned i;
1728 eh_region eh;
1729 eh_landing_pad lp;
1730 tree ttype;
1732 streamer_write_record_start (ob, LTO_eh_table);
1734 /* Emit the index of the root of the EH region tree. */
1735 streamer_write_hwi (ob, fn->eh->region_tree->index);
1737 /* Emit all the EH regions in the region array. */
1738 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1739 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1740 output_eh_region (ob, eh);
1742 /* Emit all landing pads. */
1743 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1744 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1745 output_eh_lp (ob, lp);
1747 /* Emit all the runtime type data. */
1748 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1749 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1750 stream_write_tree (ob, ttype, true);
1752 /* Emit the table of action chains. */
1753 if (targetm.arm_eabi_unwinder)
1755 tree t;
1756 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1757 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1758 stream_write_tree (ob, t, true);
1760 else
1762 uchar c;
1763 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1764 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1765 streamer_write_char_stream (ob->main_stream, c);
1769 /* The LTO_null either terminates the record or indicates that there
1770 are no eh_records at all. */
1771 streamer_write_record_start (ob, LTO_null);
1775 /* Output all of the active ssa names to the ssa_names stream. */
1777 static void
1778 output_ssa_names (struct output_block *ob, struct function *fn)
1780 unsigned int i, len;
1782 len = vec_safe_length (SSANAMES (fn));
1783 streamer_write_uhwi (ob, len);
1785 for (i = 1; i < len; i++)
1787 tree ptr = (*SSANAMES (fn))[i];
1789 if (ptr == NULL_TREE
1790 || SSA_NAME_IN_FREE_LIST (ptr)
1791 || virtual_operand_p (ptr))
1792 continue;
1794 streamer_write_uhwi (ob, i);
1795 streamer_write_char_stream (ob->main_stream,
1796 SSA_NAME_IS_DEFAULT_DEF (ptr));
1797 if (SSA_NAME_VAR (ptr))
1798 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1799 else
1800 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1801 stream_write_tree (ob, TREE_TYPE (ptr), true);
1804 streamer_write_zero (ob);
1808 /* Output a wide-int. */
1810 static void
1811 streamer_write_wi (struct output_block *ob,
1812 const widest_int &w)
1814 int len = w.get_len ();
1816 streamer_write_uhwi (ob, w.get_precision ());
1817 streamer_write_uhwi (ob, len);
1818 for (int i = 0; i < len; i++)
1819 streamer_write_hwi (ob, w.elt (i));
1823 /* Output the cfg. */
1825 static void
1826 output_cfg (struct output_block *ob, struct function *fn)
1828 struct lto_output_stream *tmp_stream = ob->main_stream;
1829 basic_block bb;
1831 ob->main_stream = ob->cfg_stream;
1833 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1834 profile_status_for_fn (fn));
1836 /* Output the number of the highest basic block. */
1837 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1839 FOR_ALL_BB_FN (bb, fn)
1841 edge_iterator ei;
1842 edge e;
1844 streamer_write_hwi (ob, bb->index);
1846 /* Output the successors and the edge flags. */
1847 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1848 FOR_EACH_EDGE (e, ei, bb->succs)
1850 streamer_write_uhwi (ob, e->dest->index);
1851 streamer_write_hwi (ob, e->probability);
1852 streamer_write_gcov_count (ob, e->count);
1853 streamer_write_uhwi (ob, e->flags);
1857 streamer_write_hwi (ob, -1);
1859 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1860 while (bb->next_bb)
1862 streamer_write_hwi (ob, bb->next_bb->index);
1863 bb = bb->next_bb;
1866 streamer_write_hwi (ob, -1);
1868 /* ??? The cfgloop interface is tied to cfun. */
1869 gcc_assert (cfun == fn);
1871 /* Output the number of loops. */
1872 streamer_write_uhwi (ob, number_of_loops (fn));
1874 /* Output each loop, skipping the tree root which has number zero. */
1875 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1877 struct loop *loop = get_loop (fn, i);
1879 /* Write the index of the loop header. That's enough to rebuild
1880 the loop tree on the reader side. Stream -1 for an unused
1881 loop entry. */
1882 if (!loop)
1884 streamer_write_hwi (ob, -1);
1885 continue;
1887 else
1888 streamer_write_hwi (ob, loop->header->index);
1890 /* Write everything copy_loop_info copies. */
1891 streamer_write_enum (ob->main_stream,
1892 loop_estimation, EST_LAST, loop->estimate_state);
1893 streamer_write_hwi (ob, loop->any_upper_bound);
1894 if (loop->any_upper_bound)
1895 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1896 streamer_write_hwi (ob, loop->any_estimate);
1897 if (loop->any_estimate)
1898 streamer_write_wi (ob, loop->nb_iterations_estimate);
1900 /* Write OMP SIMD related info. */
1901 streamer_write_hwi (ob, loop->safelen);
1902 streamer_write_hwi (ob, loop->dont_vectorize);
1903 streamer_write_hwi (ob, loop->force_vectorize);
1904 stream_write_tree (ob, loop->simduid, true);
1907 ob->main_stream = tmp_stream;
1911 /* Create the header in the file using OB. If the section type is for
1912 a function, set FN to the decl for that function. */
1914 void
1915 produce_asm (struct output_block *ob, tree fn)
1917 enum lto_section_type section_type = ob->section_type;
1918 struct lto_function_header header;
1919 char *section_name;
1921 if (section_type == LTO_section_function_body)
1923 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1924 section_name = lto_get_section_name (section_type, name, NULL);
1926 else
1927 section_name = lto_get_section_name (section_type, NULL, NULL);
1929 lto_begin_section (section_name, !flag_wpa);
1930 free (section_name);
1932 /* The entire header is stream computed here. */
1933 memset (&header, 0, sizeof (struct lto_function_header));
1935 /* Write the header. */
1936 header.major_version = LTO_major_version;
1937 header.minor_version = LTO_minor_version;
1939 if (section_type == LTO_section_function_body)
1940 header.cfg_size = ob->cfg_stream->total_size;
1941 header.main_size = ob->main_stream->total_size;
1942 header.string_size = ob->string_stream->total_size;
1943 lto_write_data (&header, sizeof header);
1945 /* Put all of the gimple and the string table out the asm file as a
1946 block of text. */
1947 if (section_type == LTO_section_function_body)
1948 lto_write_stream (ob->cfg_stream);
1949 lto_write_stream (ob->main_stream);
1950 lto_write_stream (ob->string_stream);
1952 lto_end_section ();
1956 /* Output the base body of struct function FN using output block OB. */
1958 static void
1959 output_struct_function_base (struct output_block *ob, struct function *fn)
1961 struct bitpack_d bp;
1962 unsigned i;
1963 tree t;
1965 /* Output the static chain and non-local goto save area. */
1966 stream_write_tree (ob, fn->static_chain_decl, true);
1967 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1969 /* Output all the local variables in the function. */
1970 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1971 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1972 stream_write_tree (ob, t, true);
1974 /* Output current IL state of the function. */
1975 streamer_write_uhwi (ob, fn->curr_properties);
1977 /* Write all the attributes for FN. */
1978 bp = bitpack_create (ob->main_stream);
1979 bp_pack_value (&bp, fn->is_thunk, 1);
1980 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1981 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1982 bp_pack_value (&bp, fn->returns_struct, 1);
1983 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1984 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1985 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1986 bp_pack_value (&bp, fn->after_inlining, 1);
1987 bp_pack_value (&bp, fn->stdarg, 1);
1988 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1989 bp_pack_value (&bp, fn->calls_alloca, 1);
1990 bp_pack_value (&bp, fn->calls_setjmp, 1);
1991 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
1992 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1993 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1994 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1995 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
1997 /* Output the function start and end loci. */
1998 stream_output_location (ob, &bp, fn->function_start_locus);
1999 stream_output_location (ob, &bp, fn->function_end_locus);
2001 streamer_write_bitpack (&bp);
2005 /* Output the body of function NODE->DECL. */
2007 static void
2008 output_function (struct cgraph_node *node)
2010 tree function;
2011 struct function *fn;
2012 basic_block bb;
2013 struct output_block *ob;
2015 function = node->decl;
2016 fn = DECL_STRUCT_FUNCTION (function);
2017 ob = create_output_block (LTO_section_function_body);
2019 clear_line_info (ob);
2020 ob->symbol = node;
2022 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2024 /* Set current_function_decl and cfun. */
2025 push_cfun (fn);
2027 /* Make string 0 be a NULL string. */
2028 streamer_write_char_stream (ob->string_stream, 0);
2030 streamer_write_record_start (ob, LTO_function);
2032 /* Output decls for parameters and args. */
2033 stream_write_tree (ob, DECL_RESULT (function), true);
2034 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2036 /* Output DECL_INITIAL for the function, which contains the tree of
2037 lexical scopes. */
2038 stream_write_tree (ob, DECL_INITIAL (function), true);
2040 /* We also stream abstract functions where we stream only stuff needed for
2041 debug info. */
2042 if (gimple_has_body_p (function))
2044 streamer_write_uhwi (ob, 1);
2045 output_struct_function_base (ob, fn);
2047 /* Output all the SSA names used in the function. */
2048 output_ssa_names (ob, fn);
2050 /* Output any exception handling regions. */
2051 output_eh_regions (ob, fn);
2054 /* We will renumber the statements. The code that does this uses
2055 the same ordering that we use for serializing them so we can use
2056 the same code on the other end and not have to write out the
2057 statement numbers. We do not assign UIDs to PHIs here because
2058 virtual PHIs get re-computed on-the-fly which would make numbers
2059 inconsistent. */
2060 set_gimple_stmt_max_uid (cfun, 0);
2061 FOR_ALL_BB_FN (bb, cfun)
2063 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2064 gsi_next (&gsi))
2066 gphi *stmt = gsi.phi ();
2068 /* Virtual PHIs are not going to be streamed. */
2069 if (!virtual_operand_p (gimple_phi_result (stmt)))
2070 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2072 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2073 gsi_next (&gsi))
2075 gimple stmt = gsi_stmt (gsi);
2076 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2079 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2080 virtual phis now. */
2081 FOR_ALL_BB_FN (bb, cfun)
2083 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2084 gsi_next (&gsi))
2086 gphi *stmt = gsi.phi ();
2087 if (virtual_operand_p (gimple_phi_result (stmt)))
2088 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2092 /* Output the code for the function. */
2093 FOR_ALL_BB_FN (bb, fn)
2094 output_bb (ob, bb, fn);
2096 /* The terminator for this function. */
2097 streamer_write_record_start (ob, LTO_null);
2099 output_cfg (ob, fn);
2101 pop_cfun ();
2103 else
2104 streamer_write_uhwi (ob, 0);
2106 /* Create a section to hold the pickled output of this function. */
2107 produce_asm (ob, function);
2109 destroy_output_block (ob);
2112 /* Output the body of function NODE->DECL. */
2114 static void
2115 output_constructor (struct varpool_node *node)
2117 tree var = node->decl;
2118 struct output_block *ob;
2120 ob = create_output_block (LTO_section_function_body);
2122 clear_line_info (ob);
2123 ob->symbol = node;
2125 /* Make string 0 be a NULL string. */
2126 streamer_write_char_stream (ob->string_stream, 0);
2128 /* Output DECL_INITIAL for the function, which contains the tree of
2129 lexical scopes. */
2130 stream_write_tree (ob, DECL_INITIAL (var), true);
2132 /* Create a section to hold the pickled output of this function. */
2133 produce_asm (ob, var);
2135 destroy_output_block (ob);
2139 /* Emit toplevel asms. */
2141 void
2142 lto_output_toplevel_asms (void)
2144 struct output_block *ob;
2145 struct asm_node *can;
2146 char *section_name;
2147 struct lto_simple_header_with_strings header;
2149 if (!symtab->first_asm_symbol ())
2150 return;
2152 ob = create_output_block (LTO_section_asm);
2154 /* Make string 0 be a NULL string. */
2155 streamer_write_char_stream (ob->string_stream, 0);
2157 for (can = symtab->first_asm_symbol (); can; can = can->next)
2159 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2160 streamer_write_hwi (ob, can->order);
2163 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2165 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2166 lto_begin_section (section_name, !flag_wpa);
2167 free (section_name);
2169 /* The entire header stream is computed here. */
2170 memset (&header, 0, sizeof (header));
2172 /* Write the header. */
2173 header.major_version = LTO_major_version;
2174 header.minor_version = LTO_minor_version;
2176 header.main_size = ob->main_stream->total_size;
2177 header.string_size = ob->string_stream->total_size;
2178 lto_write_data (&header, sizeof header);
2180 /* Put all of the gimple and the string table out the asm file as a
2181 block of text. */
2182 lto_write_stream (ob->main_stream);
2183 lto_write_stream (ob->string_stream);
2185 lto_end_section ();
2187 destroy_output_block (ob);
2191 /* Copy the function body or variable constructor of NODE without deserializing. */
2193 static void
2194 copy_function_or_variable (struct symtab_node *node)
2196 tree function = node->decl;
2197 struct lto_file_decl_data *file_data = node->lto_file_data;
2198 const char *data;
2199 size_t len;
2200 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2201 char *section_name =
2202 lto_get_section_name (LTO_section_function_body, name, NULL);
2203 size_t i, j;
2204 struct lto_in_decl_state *in_state;
2205 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2207 lto_begin_section (section_name, !flag_wpa);
2208 free (section_name);
2210 /* We may have renamed the declaration, e.g., a static function. */
2211 name = lto_get_decl_name_mapping (file_data, name);
2213 data = lto_get_section_data (file_data, LTO_section_function_body,
2214 name, &len);
2215 gcc_assert (data);
2217 /* Do a bit copy of the function body. */
2218 lto_write_data (data, len);
2220 /* Copy decls. */
2221 in_state =
2222 lto_get_function_in_decl_state (node->lto_file_data, function);
2223 gcc_assert (in_state);
2225 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2227 size_t n = vec_safe_length (in_state->streams[i]);
2228 vec<tree, va_gc> *trees = in_state->streams[i];
2229 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2231 /* The out state must have the same indices and the in state.
2232 So just copy the vector. All the encoders in the in state
2233 must be empty where we reach here. */
2234 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2235 encoder->trees.reserve_exact (n);
2236 for (j = 0; j < n; j++)
2237 encoder->trees.safe_push ((*trees)[j]);
2240 lto_free_section_data (file_data, LTO_section_function_body, name,
2241 data, len);
2242 lto_end_section ();
2245 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2247 static tree
2248 wrap_refs (tree *tp, int *ws, void *)
2250 tree t = *tp;
2251 if (handled_component_p (t)
2252 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL)
2254 tree decl = TREE_OPERAND (t, 0);
2255 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2256 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2257 build1 (ADDR_EXPR, ptrtype, decl),
2258 build_int_cst (ptrtype, 0));
2259 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2260 *ws = 0;
2262 else if (TREE_CODE (t) == CONSTRUCTOR)
2264 else if (!EXPR_P (t))
2265 *ws = 0;
2266 return NULL_TREE;
2269 /* Main entry point from the pass manager. */
2271 void
2272 lto_output (void)
2274 struct lto_out_decl_state *decl_state;
2275 #ifdef ENABLE_CHECKING
2276 bitmap output = lto_bitmap_alloc ();
2277 #endif
2278 int i, n_nodes;
2279 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2281 /* Initialize the streamer. */
2282 lto_streamer_init ();
2284 n_nodes = lto_symtab_encoder_size (encoder);
2285 /* Process only the functions with bodies. */
2286 for (i = 0; i < n_nodes; i++)
2288 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2289 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2291 if (lto_symtab_encoder_encode_body_p (encoder, node)
2292 && !node->alias)
2294 #ifdef ENABLE_CHECKING
2295 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2296 bitmap_set_bit (output, DECL_UID (node->decl));
2297 #endif
2298 decl_state = lto_new_out_decl_state ();
2299 lto_push_out_decl_state (decl_state);
2300 if (gimple_has_body_p (node->decl) || !flag_wpa
2301 /* Thunks have no body but they may be synthetized
2302 at WPA time. */
2303 || DECL_ARGUMENTS (node->decl))
2304 output_function (node);
2305 else
2306 copy_function_or_variable (node);
2307 gcc_assert (lto_get_out_decl_state () == decl_state);
2308 lto_pop_out_decl_state ();
2309 lto_record_function_out_decl_state (node->decl, decl_state);
2312 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2314 /* Wrap symbol references inside the ctor in a type
2315 preserving MEM_REF. */
2316 tree ctor = DECL_INITIAL (node->decl);
2317 if (ctor && !in_lto_p)
2318 walk_tree (&ctor, wrap_refs, NULL, NULL);
2319 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2320 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2321 && !node->alias)
2323 timevar_push (TV_IPA_LTO_CTORS_OUT);
2324 #ifdef ENABLE_CHECKING
2325 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2326 bitmap_set_bit (output, DECL_UID (node->decl));
2327 #endif
2328 decl_state = lto_new_out_decl_state ();
2329 lto_push_out_decl_state (decl_state);
2330 if (DECL_INITIAL (node->decl) != error_mark_node
2331 || !flag_wpa)
2332 output_constructor (node);
2333 else
2334 copy_function_or_variable (node);
2335 gcc_assert (lto_get_out_decl_state () == decl_state);
2336 lto_pop_out_decl_state ();
2337 lto_record_function_out_decl_state (node->decl, decl_state);
2338 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2343 /* Emit the callgraph after emitting function bodies. This needs to
2344 be done now to make sure that all the statements in every function
2345 have been renumbered so that edges can be associated with call
2346 statements using the statement UIDs. */
2347 output_symtab ();
2349 output_offload_tables ();
2351 #ifdef ENABLE_CHECKING
2352 lto_bitmap_free (output);
2353 #endif
2356 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2357 from it and required for correct representation of its semantics.
2358 Each node in ENCODER must be a global declaration or a type. A node
2359 is written only once, even if it appears multiple times in the
2360 vector. Certain transitively-reachable nodes, such as those
2361 representing expressions, may be duplicated, but such nodes
2362 must not appear in ENCODER itself. */
2364 static void
2365 write_global_stream (struct output_block *ob,
2366 struct lto_tree_ref_encoder *encoder)
2368 tree t;
2369 size_t index;
2370 const size_t size = lto_tree_ref_encoder_size (encoder);
2372 for (index = 0; index < size; index++)
2374 t = lto_tree_ref_encoder_get_tree (encoder, index);
2375 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2376 stream_write_tree (ob, t, false);
2381 /* Write a sequence of indices into the globals vector corresponding
2382 to the trees in ENCODER. These are used by the reader to map the
2383 indices used to refer to global entities within function bodies to
2384 their referents. */
2386 static void
2387 write_global_references (struct output_block *ob,
2388 struct lto_tree_ref_encoder *encoder)
2390 tree t;
2391 uint32_t index;
2392 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2394 /* Write size and slot indexes as 32-bit unsigned numbers. */
2395 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2396 data[0] = size;
2398 for (index = 0; index < size; index++)
2400 uint32_t slot_num;
2402 t = lto_tree_ref_encoder_get_tree (encoder, index);
2403 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2404 gcc_assert (slot_num != (unsigned)-1);
2405 data[index + 1] = slot_num;
2408 lto_write_data (data, sizeof (int32_t) * (size + 1));
2409 free (data);
2413 /* Write all the streams in an lto_out_decl_state STATE using
2414 output block OB and output stream OUT_STREAM. */
2416 void
2417 lto_output_decl_state_streams (struct output_block *ob,
2418 struct lto_out_decl_state *state)
2420 int i;
2422 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2423 write_global_stream (ob, &state->streams[i]);
2427 /* Write all the references in an lto_out_decl_state STATE using
2428 output block OB and output stream OUT_STREAM. */
2430 void
2431 lto_output_decl_state_refs (struct output_block *ob,
2432 struct lto_out_decl_state *state)
2434 unsigned i;
2435 uint32_t ref;
2436 tree decl;
2438 /* Write reference to FUNCTION_DECL. If there is not function,
2439 write reference to void_type_node. */
2440 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2441 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2442 gcc_assert (ref != (unsigned)-1);
2443 lto_write_data (&ref, sizeof (uint32_t));
2445 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2446 write_global_references (ob, &state->streams[i]);
2450 /* Return the written size of STATE. */
2452 static size_t
2453 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2455 int i;
2456 size_t size;
2458 size = sizeof (int32_t); /* fn_ref. */
2459 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2461 size += sizeof (int32_t); /* vector size. */
2462 size += (lto_tree_ref_encoder_size (&state->streams[i])
2463 * sizeof (int32_t));
2465 return size;
2469 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2470 so far. */
2472 static void
2473 write_symbol (struct streamer_tree_cache_d *cache,
2474 tree t, hash_set<const char *> *seen, bool alias)
2476 const char *name;
2477 enum gcc_plugin_symbol_kind kind;
2478 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2479 unsigned slot_num;
2480 uint64_t size;
2481 const char *comdat;
2482 unsigned char c;
2484 /* None of the following kinds of symbols are needed in the
2485 symbol table. */
2486 if (!TREE_PUBLIC (t)
2487 || is_builtin_fn (t)
2488 || DECL_ABSTRACT_P (t)
2489 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2490 return;
2491 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2493 gcc_assert (TREE_CODE (t) == VAR_DECL
2494 || TREE_CODE (t) == FUNCTION_DECL);
2496 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2498 /* This behaves like assemble_name_raw in varasm.c, performing the
2499 same name manipulations that ASM_OUTPUT_LABELREF does. */
2500 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2502 if (seen->add (name))
2503 return;
2505 streamer_tree_cache_lookup (cache, t, &slot_num);
2506 gcc_assert (slot_num != (unsigned)-1);
2508 if (DECL_EXTERNAL (t))
2510 if (DECL_WEAK (t))
2511 kind = GCCPK_WEAKUNDEF;
2512 else
2513 kind = GCCPK_UNDEF;
2515 else
2517 if (DECL_WEAK (t))
2518 kind = GCCPK_WEAKDEF;
2519 else if (DECL_COMMON (t))
2520 kind = GCCPK_COMMON;
2521 else
2522 kind = GCCPK_DEF;
2524 /* When something is defined, it should have node attached. */
2525 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2526 || varpool_node::get (t)->definition);
2527 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2528 || (cgraph_node::get (t)
2529 && cgraph_node::get (t)->definition));
2532 /* Imitate what default_elf_asm_output_external do.
2533 When symbol is external, we need to output it with DEFAULT visibility
2534 when compiling with -fvisibility=default, while with HIDDEN visibility
2535 when symbol has attribute (visibility("hidden")) specified.
2536 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2537 right. */
2539 if (DECL_EXTERNAL (t)
2540 && !targetm.binds_local_p (t))
2541 visibility = GCCPV_DEFAULT;
2542 else
2543 switch (DECL_VISIBILITY (t))
2545 case VISIBILITY_DEFAULT:
2546 visibility = GCCPV_DEFAULT;
2547 break;
2548 case VISIBILITY_PROTECTED:
2549 visibility = GCCPV_PROTECTED;
2550 break;
2551 case VISIBILITY_HIDDEN:
2552 visibility = GCCPV_HIDDEN;
2553 break;
2554 case VISIBILITY_INTERNAL:
2555 visibility = GCCPV_INTERNAL;
2556 break;
2559 if (kind == GCCPK_COMMON
2560 && DECL_SIZE_UNIT (t)
2561 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2562 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2563 else
2564 size = 0;
2566 if (DECL_ONE_ONLY (t))
2567 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2568 else
2569 comdat = "";
2571 lto_write_data (name, strlen (name) + 1);
2572 lto_write_data (comdat, strlen (comdat) + 1);
2573 c = (unsigned char) kind;
2574 lto_write_data (&c, 1);
2575 c = (unsigned char) visibility;
2576 lto_write_data (&c, 1);
2577 lto_write_data (&size, 8);
2578 lto_write_data (&slot_num, 4);
2581 /* Return true if NODE should appear in the plugin symbol table. */
2583 bool
2584 output_symbol_p (symtab_node *node)
2586 struct cgraph_node *cnode;
2587 if (!node->real_symbol_p ())
2588 return false;
2589 /* We keep external functions in symtab for sake of inlining
2590 and devirtualization. We do not want to see them in symbol table as
2591 references unless they are really used. */
2592 cnode = dyn_cast <cgraph_node *> (node);
2593 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2594 && cnode->callers)
2595 return true;
2597 /* Ignore all references from external vars initializers - they are not really
2598 part of the compilation unit until they are used by folding. Some symbols,
2599 like references to external construction vtables can not be referred to at all.
2600 We decide this at can_refer_decl_in_current_unit_p. */
2601 if (!node->definition || DECL_EXTERNAL (node->decl))
2603 int i;
2604 struct ipa_ref *ref;
2605 for (i = 0; node->iterate_referring (i, ref); i++)
2607 if (ref->use == IPA_REF_ALIAS)
2608 continue;
2609 if (is_a <cgraph_node *> (ref->referring))
2610 return true;
2611 if (!DECL_EXTERNAL (ref->referring->decl))
2612 return true;
2614 return false;
2616 return true;
2620 /* Write an IL symbol table to OB.
2621 SET and VSET are cgraph/varpool node sets we are outputting. */
2623 static void
2624 produce_symtab (struct output_block *ob)
2626 struct streamer_tree_cache_d *cache = ob->writer_cache;
2627 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2628 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2629 lto_symtab_encoder_iterator lsei;
2631 lto_begin_section (section_name, false);
2632 free (section_name);
2634 hash_set<const char *> seen;
2636 /* Write the symbol table.
2637 First write everything defined and then all declarations.
2638 This is necessary to handle cases where we have duplicated symbols. */
2639 for (lsei = lsei_start (encoder);
2640 !lsei_end_p (lsei); lsei_next (&lsei))
2642 symtab_node *node = lsei_node (lsei);
2644 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2645 continue;
2646 write_symbol (cache, node->decl, &seen, false);
2648 for (lsei = lsei_start (encoder);
2649 !lsei_end_p (lsei); lsei_next (&lsei))
2651 symtab_node *node = lsei_node (lsei);
2653 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2654 continue;
2655 write_symbol (cache, node->decl, &seen, false);
2658 lto_end_section ();
2662 /* Init the streamer_mode_table for output, where we collect info on what
2663 machine_mode values have been streamed. */
2664 void
2665 lto_output_init_mode_table (void)
2667 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2671 /* Write the mode table. */
2672 static void
2673 lto_write_mode_table (void)
2675 struct output_block *ob;
2676 ob = create_output_block (LTO_section_mode_table);
2677 bitpack_d bp = bitpack_create (ob->main_stream);
2679 /* Ensure that for GET_MODE_INNER (m) != m we have
2680 also the inner mode marked. */
2681 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2682 if (streamer_mode_table[i])
2684 machine_mode m = (machine_mode) i;
2685 if (GET_MODE_INNER (m) != m)
2686 streamer_mode_table[(int) GET_MODE_INNER (m)] = 1;
2688 /* First stream modes that have GET_MODE_INNER (m) == m,
2689 so that we can refer to them afterwards. */
2690 for (int pass = 0; pass < 2; pass++)
2691 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2692 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2694 machine_mode m = (machine_mode) i;
2695 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2696 continue;
2697 bp_pack_value (&bp, m, 8);
2698 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2699 bp_pack_value (&bp, GET_MODE_SIZE (m), 8);
2700 bp_pack_value (&bp, GET_MODE_PRECISION (m), 16);
2701 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2702 bp_pack_value (&bp, GET_MODE_NUNITS (m), 8);
2703 switch (GET_MODE_CLASS (m))
2705 case MODE_FRACT:
2706 case MODE_UFRACT:
2707 case MODE_ACCUM:
2708 case MODE_UACCUM:
2709 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2710 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2711 break;
2712 case MODE_FLOAT:
2713 case MODE_DECIMAL_FLOAT:
2714 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2715 break;
2716 default:
2717 break;
2719 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2721 bp_pack_value (&bp, VOIDmode, 8);
2723 streamer_write_bitpack (&bp);
2725 char *section_name
2726 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2727 lto_begin_section (section_name, !flag_wpa);
2728 free (section_name);
2730 /* The entire header stream is computed here. */
2731 struct lto_simple_header_with_strings header;
2732 memset (&header, 0, sizeof (header));
2734 /* Write the header. */
2735 header.major_version = LTO_major_version;
2736 header.minor_version = LTO_minor_version;
2738 header.main_size = ob->main_stream->total_size;
2739 header.string_size = ob->string_stream->total_size;
2740 lto_write_data (&header, sizeof header);
2742 /* Put all of the gimple and the string table out the asm file as a
2743 block of text. */
2744 lto_write_stream (ob->main_stream);
2745 lto_write_stream (ob->string_stream);
2747 lto_end_section ();
2748 destroy_output_block (ob);
2752 /* This pass is run after all of the functions are serialized and all
2753 of the IPA passes have written their serialized forms. This pass
2754 causes the vector of all of the global decls and types used from
2755 this file to be written in to a section that can then be read in to
2756 recover these on other side. */
2758 void
2759 produce_asm_for_decls (void)
2761 struct lto_out_decl_state *out_state;
2762 struct lto_out_decl_state *fn_out_state;
2763 struct lto_decl_header header;
2764 char *section_name;
2765 struct output_block *ob;
2766 unsigned idx, num_fns;
2767 size_t decl_state_size;
2768 int32_t num_decl_states;
2770 ob = create_output_block (LTO_section_decls);
2772 memset (&header, 0, sizeof (struct lto_decl_header));
2774 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2775 lto_begin_section (section_name, !flag_wpa);
2776 free (section_name);
2778 /* Make string 0 be a NULL string. */
2779 streamer_write_char_stream (ob->string_stream, 0);
2781 gcc_assert (!alias_pairs);
2783 /* Get rid of the global decl state hash tables to save some memory. */
2784 out_state = lto_get_out_decl_state ();
2785 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2786 if (out_state->streams[i].tree_hash_table)
2788 delete out_state->streams[i].tree_hash_table;
2789 out_state->streams[i].tree_hash_table = NULL;
2792 /* Write the global symbols. */
2793 lto_output_decl_state_streams (ob, out_state);
2794 num_fns = lto_function_decl_states.length ();
2795 for (idx = 0; idx < num_fns; idx++)
2797 fn_out_state =
2798 lto_function_decl_states[idx];
2799 lto_output_decl_state_streams (ob, fn_out_state);
2802 header.major_version = LTO_major_version;
2803 header.minor_version = LTO_minor_version;
2805 /* Currently not used. This field would allow us to preallocate
2806 the globals vector, so that it need not be resized as it is extended. */
2807 header.num_nodes = -1;
2809 /* Compute the total size of all decl out states. */
2810 decl_state_size = sizeof (int32_t);
2811 decl_state_size += lto_out_decl_state_written_size (out_state);
2812 for (idx = 0; idx < num_fns; idx++)
2814 fn_out_state =
2815 lto_function_decl_states[idx];
2816 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2818 header.decl_state_size = decl_state_size;
2820 header.main_size = ob->main_stream->total_size;
2821 header.string_size = ob->string_stream->total_size;
2823 lto_write_data (&header, sizeof header);
2825 /* Write the main out-decl state, followed by out-decl states of
2826 functions. */
2827 num_decl_states = num_fns + 1;
2828 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2829 lto_output_decl_state_refs (ob, out_state);
2830 for (idx = 0; idx < num_fns; idx++)
2832 fn_out_state = lto_function_decl_states[idx];
2833 lto_output_decl_state_refs (ob, fn_out_state);
2836 lto_write_stream (ob->main_stream);
2837 lto_write_stream (ob->string_stream);
2839 lto_end_section ();
2841 /* Write the symbol table. It is used by linker to determine dependencies
2842 and thus we can skip it for WPA. */
2843 if (!flag_wpa)
2844 produce_symtab (ob);
2846 /* Write command line opts. */
2847 lto_write_options ();
2849 /* Deallocate memory and clean up. */
2850 for (idx = 0; idx < num_fns; idx++)
2852 fn_out_state =
2853 lto_function_decl_states[idx];
2854 lto_delete_out_decl_state (fn_out_state);
2856 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2857 lto_function_decl_states.release ();
2858 destroy_output_block (ob);
2859 if (lto_stream_offload_p)
2860 lto_write_mode_table ();