* doc/generic.texi (ANNOTATE_EXPR): Document 3rd operand.
[official-gcc.git] / gcc / lto-streamer-out.c
blob0442ec72d06d0ea69885bcfb7d251081add7c02f
1 /* Write the GIMPLE representation to a file stream.
3 Copyright (C) 2009-2017 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
43 #include "debug.h"
46 static void lto_write_tree (struct output_block*, tree, bool);
48 /* Clear the line info stored in DATA_IN. */
50 static void
51 clear_line_info (struct output_block *ob)
53 ob->current_file = NULL;
54 ob->current_line = 0;
55 ob->current_col = 0;
56 ob->current_sysp = false;
60 /* Create the output block and return it. SECTION_TYPE is
61 LTO_section_function_body or LTO_static_initializer. */
63 struct output_block *
64 create_output_block (enum lto_section_type section_type)
66 struct output_block *ob = XCNEW (struct output_block);
68 ob->section_type = section_type;
69 ob->decl_state = lto_get_out_decl_state ();
70 ob->main_stream = XCNEW (struct lto_output_stream);
71 ob->string_stream = XCNEW (struct lto_output_stream);
72 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
74 if (section_type == LTO_section_function_body)
75 ob->cfg_stream = XCNEW (struct lto_output_stream);
77 clear_line_info (ob);
79 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
80 gcc_obstack_init (&ob->obstack);
82 return ob;
86 /* Destroy the output block OB. */
88 void
89 destroy_output_block (struct output_block *ob)
91 enum lto_section_type section_type = ob->section_type;
93 delete ob->string_hash_table;
94 ob->string_hash_table = NULL;
96 free (ob->main_stream);
97 free (ob->string_stream);
98 if (section_type == LTO_section_function_body)
99 free (ob->cfg_stream);
101 streamer_tree_cache_delete (ob->writer_cache);
102 obstack_free (&ob->obstack, NULL);
104 free (ob);
108 /* Look up NODE in the type table and write the index for it to OB. */
110 static void
111 output_type_ref (struct output_block *ob, tree node)
113 streamer_write_record_start (ob, LTO_type_ref);
114 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
118 /* Return true if tree node T is written to various tables. For these
119 nodes, we sometimes want to write their phyiscal representation
120 (via lto_output_tree), and sometimes we need to emit an index
121 reference into a table (via lto_output_tree_ref). */
123 static bool
124 tree_is_indexable (tree t)
126 /* Parameters and return values of functions of variably modified types
127 must go to global stream, because they may be used in the type
128 definition. */
129 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
130 && DECL_CONTEXT (t))
131 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
132 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
133 else if (TREE_CODE (t) == IMPORTED_DECL)
134 return false;
135 else if (((VAR_P (t) && !TREE_STATIC (t))
136 || TREE_CODE (t) == TYPE_DECL
137 || TREE_CODE (t) == CONST_DECL
138 || TREE_CODE (t) == NAMELIST_DECL)
139 && decl_function_context (t))
140 return false;
141 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
142 return false;
143 /* Variably modified types need to be streamed alongside function
144 bodies because they can refer to local entities. Together with
145 them we have to localize their members as well.
146 ??? In theory that includes non-FIELD_DECLs as well. */
147 else if (TYPE_P (t)
148 && variably_modified_type_p (t, NULL_TREE))
149 return false;
150 else if (TREE_CODE (t) == FIELD_DECL
151 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
152 return false;
153 else
154 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
158 /* Output info about new location into bitpack BP.
159 After outputting bitpack, lto_output_location_data has
160 to be done to output actual data. */
162 void
163 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
164 location_t loc)
166 expanded_location xloc;
168 loc = LOCATION_LOCUS (loc);
169 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
170 loc < RESERVED_LOCATION_COUNT
171 ? loc : RESERVED_LOCATION_COUNT);
172 if (loc < RESERVED_LOCATION_COUNT)
173 return;
175 xloc = expand_location (loc);
177 bp_pack_value (bp, ob->current_file != xloc.file, 1);
178 bp_pack_value (bp, ob->current_line != xloc.line, 1);
179 bp_pack_value (bp, ob->current_col != xloc.column, 1);
181 if (ob->current_file != xloc.file)
183 bp_pack_string (ob, bp, xloc.file, true);
184 bp_pack_value (bp, xloc.sysp, 1);
186 ob->current_file = xloc.file;
187 ob->current_sysp = xloc.sysp;
189 if (ob->current_line != xloc.line)
190 bp_pack_var_len_unsigned (bp, xloc.line);
191 ob->current_line = xloc.line;
193 if (ob->current_col != xloc.column)
194 bp_pack_var_len_unsigned (bp, xloc.column);
195 ob->current_col = xloc.column;
199 /* If EXPR is an indexable tree node, output a reference to it to
200 output block OB. Otherwise, output the physical representation of
201 EXPR to OB. */
203 static void
204 lto_output_tree_ref (struct output_block *ob, tree expr)
206 enum tree_code code;
208 if (TYPE_P (expr))
210 output_type_ref (ob, expr);
211 return;
214 code = TREE_CODE (expr);
215 switch (code)
217 case SSA_NAME:
218 streamer_write_record_start (ob, LTO_ssa_name_ref);
219 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
220 break;
222 case FIELD_DECL:
223 streamer_write_record_start (ob, LTO_field_decl_ref);
224 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
225 break;
227 case FUNCTION_DECL:
228 streamer_write_record_start (ob, LTO_function_decl_ref);
229 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
230 break;
232 case VAR_DECL:
233 case DEBUG_EXPR_DECL:
234 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
235 /* FALLTHRU */
236 case PARM_DECL:
237 streamer_write_record_start (ob, LTO_global_decl_ref);
238 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
239 break;
241 case CONST_DECL:
242 streamer_write_record_start (ob, LTO_const_decl_ref);
243 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
244 break;
246 case IMPORTED_DECL:
247 gcc_assert (decl_function_context (expr) == NULL);
248 streamer_write_record_start (ob, LTO_imported_decl_ref);
249 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
250 break;
252 case TYPE_DECL:
253 streamer_write_record_start (ob, LTO_type_decl_ref);
254 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
255 break;
257 case NAMELIST_DECL:
258 streamer_write_record_start (ob, LTO_namelist_decl_ref);
259 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
260 break;
262 case NAMESPACE_DECL:
263 streamer_write_record_start (ob, LTO_namespace_decl_ref);
264 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
265 break;
267 case LABEL_DECL:
268 streamer_write_record_start (ob, LTO_label_decl_ref);
269 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
270 break;
272 case RESULT_DECL:
273 streamer_write_record_start (ob, LTO_result_decl_ref);
274 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
275 break;
277 case TRANSLATION_UNIT_DECL:
278 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
279 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
280 break;
282 default:
283 /* No other node is indexable, so it should have been handled by
284 lto_output_tree. */
285 gcc_unreachable ();
290 /* Return true if EXPR is a tree node that can be written to disk. */
292 static inline bool
293 lto_is_streamable (tree expr)
295 enum tree_code code = TREE_CODE (expr);
297 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
298 name version in lto_output_tree_ref (see output_ssa_names). */
299 return !is_lang_specific (expr)
300 && code != SSA_NAME
301 && code != CALL_EXPR
302 && code != LANG_TYPE
303 && code != MODIFY_EXPR
304 && code != INIT_EXPR
305 && code != TARGET_EXPR
306 && code != BIND_EXPR
307 && code != WITH_CLEANUP_EXPR
308 && code != STATEMENT_LIST
309 && (code == CASE_LABEL_EXPR
310 || code == DECL_EXPR
311 || TREE_CODE_CLASS (code) != tcc_statement);
314 /* Very rough estimate of streaming size of the initializer. If we ignored
315 presence of strings, we could simply just count number of non-indexable
316 tree nodes and number of references to indexable nodes. Strings however
317 may be very large and we do not want to dump them int othe global stream.
319 Count the size of initializer until the size in DATA is positive. */
321 static tree
322 subtract_estimated_size (tree *tp, int *ws, void *data)
324 long *sum = (long *)data;
325 if (tree_is_indexable (*tp))
327 /* Indexable tree is one reference to global stream.
328 Guess it may be about 4 bytes. */
329 *sum -= 4;
330 *ws = 0;
332 /* String table entry + base of tree node needs to be streamed. */
333 if (TREE_CODE (*tp) == STRING_CST)
334 *sum -= TREE_STRING_LENGTH (*tp) + 8;
335 else
337 /* Identifiers are also variable length but should not appear
338 naked in constructor. */
339 gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
340 /* We do not really make attempt to work out size of pickled tree, as
341 it is very variable. Make it bigger than the reference. */
342 *sum -= 16;
344 if (*sum < 0)
345 return *tp;
346 return NULL_TREE;
350 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
352 static tree
353 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
355 gcc_checking_assert (DECL_P (expr)
356 && TREE_CODE (expr) != FUNCTION_DECL
357 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
359 /* Handle DECL_INITIAL for symbols. */
360 tree initial = DECL_INITIAL (expr);
361 if (VAR_P (expr)
362 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
363 && !DECL_IN_CONSTANT_POOL (expr)
364 && initial)
366 varpool_node *vnode;
367 /* Extra section needs about 30 bytes; do not produce it for simple
368 scalar values. */
369 if (!(vnode = varpool_node::get (expr))
370 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
371 initial = error_mark_node;
372 if (initial != error_mark_node)
374 long max_size = 30;
375 if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
376 NULL))
377 initial = error_mark_node;
381 return initial;
385 /* Write a physical representation of tree node EXPR to output block
386 OB. If REF_P is true, the leaves of EXPR are emitted as references
387 via lto_output_tree_ref. IX is the index into the streamer cache
388 where EXPR is stored. */
390 static void
391 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
393 /* Pack all the non-pointer fields in EXPR into a bitpack and write
394 the resulting bitpack. */
395 streamer_write_tree_bitfields (ob, expr);
397 /* Write all the pointer fields in EXPR. */
398 streamer_write_tree_body (ob, expr, ref_p);
400 /* Write any LTO-specific data to OB. */
401 if (DECL_P (expr)
402 && TREE_CODE (expr) != FUNCTION_DECL
403 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
405 /* Handle DECL_INITIAL for symbols. */
406 tree initial = get_symbol_initial_value
407 (ob->decl_state->symtab_node_encoder, expr);
408 stream_write_tree (ob, initial, ref_p);
411 /* Stream references to early generated DIEs. Keep in sync with the
412 trees handled in dwarf2out_die_ref_for_decl. */
413 if ((DECL_P (expr)
414 && TREE_CODE (expr) != FIELD_DECL
415 && TREE_CODE (expr) != DEBUG_EXPR_DECL
416 && TREE_CODE (expr) != TYPE_DECL)
417 || TREE_CODE (expr) == BLOCK)
419 const char *sym;
420 unsigned HOST_WIDE_INT off;
421 if (debug_info_level > DINFO_LEVEL_NONE
422 && debug_hooks->die_ref_for_decl (expr, &sym, &off))
424 streamer_write_string (ob, ob->main_stream, sym, true);
425 streamer_write_uhwi (ob, off);
427 else
428 streamer_write_string (ob, ob->main_stream, NULL, true);
432 /* Write a physical representation of tree node EXPR to output block
433 OB. If REF_P is true, the leaves of EXPR are emitted as references
434 via lto_output_tree_ref. IX is the index into the streamer cache
435 where EXPR is stored. */
437 static void
438 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
440 if (!lto_is_streamable (expr))
441 internal_error ("tree code %qs is not supported in LTO streams",
442 get_tree_code_name (TREE_CODE (expr)));
444 /* Write the header, containing everything needed to materialize
445 EXPR on the reading side. */
446 streamer_write_tree_header (ob, expr);
448 lto_write_tree_1 (ob, expr, ref_p);
450 /* Mark the end of EXPR. */
451 streamer_write_zero (ob);
454 /* Emit the physical representation of tree node EXPR to output block OB,
455 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
456 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
458 static void
459 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
460 bool ref_p, bool this_ref_p)
462 unsigned ix;
464 gcc_checking_assert (expr != NULL_TREE
465 && !(this_ref_p && tree_is_indexable (expr)));
467 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
468 expr, hash, &ix);
469 gcc_assert (!exists_p);
470 if (TREE_CODE (expr) == INTEGER_CST
471 && !TREE_OVERFLOW (expr))
473 /* Shared INTEGER_CST nodes are special because they need their
474 original type to be materialized by the reader (to implement
475 TYPE_CACHED_VALUES). */
476 streamer_write_integer_cst (ob, expr, ref_p);
478 else
480 /* This is the first time we see EXPR, write its fields
481 to OB. */
482 lto_write_tree (ob, expr, ref_p);
486 class DFS
488 public:
489 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
490 bool single_p);
491 ~DFS ();
493 struct scc_entry
495 tree t;
496 hashval_t hash;
498 vec<scc_entry> sccstack;
500 private:
501 struct sccs
503 unsigned int dfsnum;
504 unsigned int low;
506 struct worklist
508 tree expr;
509 sccs *from_state;
510 sccs *cstate;
511 bool ref_p;
512 bool this_ref_p;
515 static int scc_entry_compare (const void *, const void *);
517 void DFS_write_tree_body (struct output_block *ob,
518 tree expr, sccs *expr_state, bool ref_p);
520 void DFS_write_tree (struct output_block *ob, sccs *from_state,
521 tree expr, bool ref_p, bool this_ref_p);
523 hashval_t
524 hash_scc (struct output_block *ob, unsigned first, unsigned size,
525 bool ref_p, bool this_ref_p);
527 hash_map<tree, sccs *> sccstate;
528 vec<worklist> worklist_vec;
529 struct obstack sccstate_obstack;
532 /* Emit the physical representation of tree node EXPR to output block OB,
533 using depth-first search on the subgraph. If THIS_REF_P is true, the
534 leaves of EXPR are emitted as references via lto_output_tree_ref.
535 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
536 this is for a rewalk of a single leaf SCC. */
538 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
539 bool single_p)
541 unsigned int next_dfs_num = 1;
542 sccstack.create (0);
543 gcc_obstack_init (&sccstate_obstack);
544 worklist_vec = vNULL;
545 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
546 while (!worklist_vec.is_empty ())
548 worklist &w = worklist_vec.last ();
549 expr = w.expr;
550 sccs *from_state = w.from_state;
551 sccs *cstate = w.cstate;
552 ref_p = w.ref_p;
553 this_ref_p = w.this_ref_p;
554 if (cstate == NULL)
556 sccs **slot = &sccstate.get_or_insert (expr);
557 cstate = *slot;
558 if (cstate)
560 gcc_checking_assert (from_state);
561 if (cstate->dfsnum < from_state->dfsnum)
562 from_state->low = MIN (cstate->dfsnum, from_state->low);
563 worklist_vec.pop ();
564 continue;
567 scc_entry e = { expr, 0 };
568 /* Not yet visited. DFS recurse and push it onto the stack. */
569 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
570 sccstack.safe_push (e);
571 cstate->dfsnum = next_dfs_num++;
572 cstate->low = cstate->dfsnum;
573 w.cstate = cstate;
575 if (TREE_CODE (expr) == INTEGER_CST
576 && !TREE_OVERFLOW (expr))
577 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
578 else
580 DFS_write_tree_body (ob, expr, cstate, ref_p);
582 /* Walk any LTO-specific edges. */
583 if (DECL_P (expr)
584 && TREE_CODE (expr) != FUNCTION_DECL
585 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
587 /* Handle DECL_INITIAL for symbols. */
588 tree initial
589 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
590 expr);
591 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
594 continue;
597 /* See if we found an SCC. */
598 if (cstate->low == cstate->dfsnum)
600 unsigned first, size;
601 tree x;
603 /* If we are re-walking a single leaf SCC just pop it,
604 let earlier worklist item access the sccstack. */
605 if (single_p)
607 worklist_vec.pop ();
608 continue;
611 /* Pop the SCC and compute its size. */
612 first = sccstack.length ();
615 x = sccstack[--first].t;
617 while (x != expr);
618 size = sccstack.length () - first;
620 /* No need to compute hashes for LTRANS units, we don't perform
621 any merging there. */
622 hashval_t scc_hash = 0;
623 unsigned scc_entry_len = 0;
624 if (!flag_wpa)
626 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
628 /* Put the entries with the least number of collisions first. */
629 unsigned entry_start = 0;
630 scc_entry_len = size + 1;
631 for (unsigned i = 0; i < size;)
633 unsigned from = i;
634 for (i = i + 1; i < size
635 && (sccstack[first + i].hash
636 == sccstack[first + from].hash); ++i)
638 if (i - from < scc_entry_len)
640 scc_entry_len = i - from;
641 entry_start = from;
644 for (unsigned i = 0; i < scc_entry_len; ++i)
645 std::swap (sccstack[first + i],
646 sccstack[first + entry_start + i]);
648 /* We already sorted SCC deterministically in hash_scc. */
650 /* Check that we have only one SCC.
651 Naturally we may have conflicts if hash function is not
652 strong enough. Lets see how far this gets. */
653 gcc_checking_assert (scc_entry_len == 1);
656 /* Write LTO_tree_scc. */
657 streamer_write_record_start (ob, LTO_tree_scc);
658 streamer_write_uhwi (ob, size);
659 streamer_write_uhwi (ob, scc_hash);
661 /* Write size-1 SCCs without wrapping them inside SCC bundles.
662 All INTEGER_CSTs need to be handled this way as we need
663 their type to materialize them. Also builtins are handled
664 this way.
665 ??? We still wrap these in LTO_tree_scc so at the
666 input side we can properly identify the tree we want
667 to ultimatively return. */
668 if (size == 1)
669 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
670 else
672 /* Write the size of the SCC entry candidates. */
673 streamer_write_uhwi (ob, scc_entry_len);
675 /* Write all headers and populate the streamer cache. */
676 for (unsigned i = 0; i < size; ++i)
678 hashval_t hash = sccstack[first+i].hash;
679 tree t = sccstack[first+i].t;
680 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
681 t, hash, NULL);
682 gcc_assert (!exists_p);
684 if (!lto_is_streamable (t))
685 internal_error ("tree code %qs is not supported "
686 "in LTO streams",
687 get_tree_code_name (TREE_CODE (t)));
689 /* Write the header, containing everything needed to
690 materialize EXPR on the reading side. */
691 streamer_write_tree_header (ob, t);
694 /* Write the bitpacks and tree references. */
695 for (unsigned i = 0; i < size; ++i)
697 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
699 /* Mark the end of the tree. */
700 streamer_write_zero (ob);
704 /* Finally truncate the vector. */
705 sccstack.truncate (first);
707 if (from_state)
708 from_state->low = MIN (from_state->low, cstate->low);
709 worklist_vec.pop ();
710 continue;
713 gcc_checking_assert (from_state);
714 from_state->low = MIN (from_state->low, cstate->low);
715 if (cstate->dfsnum < from_state->dfsnum)
716 from_state->low = MIN (cstate->dfsnum, from_state->low);
717 worklist_vec.pop ();
719 worklist_vec.release ();
722 DFS::~DFS ()
724 sccstack.release ();
725 obstack_free (&sccstate_obstack, NULL);
728 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
729 DFS recurse for all tree edges originating from it. */
731 void
732 DFS::DFS_write_tree_body (struct output_block *ob,
733 tree expr, sccs *expr_state, bool ref_p)
735 #define DFS_follow_tree_edge(DEST) \
736 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
738 enum tree_code code;
740 code = TREE_CODE (expr);
742 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
744 if (TREE_CODE (expr) != IDENTIFIER_NODE)
745 DFS_follow_tree_edge (TREE_TYPE (expr));
748 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
750 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
751 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
754 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
756 DFS_follow_tree_edge (TREE_REALPART (expr));
757 DFS_follow_tree_edge (TREE_IMAGPART (expr));
760 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
762 /* Drop names that were created for anonymous entities. */
763 if (DECL_NAME (expr)
764 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
765 && anon_aggrname_p (DECL_NAME (expr)))
767 else
768 DFS_follow_tree_edge (DECL_NAME (expr));
769 if (TREE_CODE (expr) != TRANSLATION_UNIT_DECL
770 && ! DECL_CONTEXT (expr))
771 DFS_follow_tree_edge ((*all_translation_units)[0]);
772 else
773 DFS_follow_tree_edge (DECL_CONTEXT (expr));
776 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
778 DFS_follow_tree_edge (DECL_SIZE (expr));
779 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
781 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
782 special handling in LTO, it must be handled by streamer hooks. */
784 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
786 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
787 for early inlining so drop it on the floor instead of ICEing in
788 dwarf2out.c.
789 We however use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
790 declarations which should be eliminated by decl merging. Be sure none
791 leaks to this point. */
792 gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
793 DFS_follow_tree_edge (DECL_ABSTRACT_ORIGIN (expr));
795 if ((VAR_P (expr)
796 || TREE_CODE (expr) == PARM_DECL)
797 && DECL_HAS_VALUE_EXPR_P (expr))
798 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
799 if (VAR_P (expr))
800 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
803 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
805 if (TREE_CODE (expr) == TYPE_DECL)
806 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
809 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
811 /* Make sure we don't inadvertently set the assembler name. */
812 if (DECL_ASSEMBLER_NAME_SET_P (expr))
813 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
816 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
818 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
819 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
820 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
821 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
822 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
825 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
827 DFS_follow_tree_edge (DECL_VINDEX (expr));
828 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
829 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
830 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
833 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
835 DFS_follow_tree_edge (TYPE_SIZE (expr));
836 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
837 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
838 DFS_follow_tree_edge (TYPE_NAME (expr));
839 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
840 reconstructed during fixup. */
841 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
842 during fixup. */
843 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
844 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
845 /* TYPE_CANONICAL is re-computed during type merging, so no need
846 to follow it here. */
847 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
850 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
852 if (TREE_CODE (expr) == ENUMERAL_TYPE)
853 DFS_follow_tree_edge (TYPE_VALUES (expr));
854 else if (TREE_CODE (expr) == ARRAY_TYPE)
855 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
856 else if (RECORD_OR_UNION_TYPE_P (expr))
857 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
858 DFS_follow_tree_edge (t);
859 else if (TREE_CODE (expr) == FUNCTION_TYPE
860 || TREE_CODE (expr) == METHOD_TYPE)
861 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
863 if (!POINTER_TYPE_P (expr))
864 DFS_follow_tree_edge (TYPE_MIN_VALUE_RAW (expr));
865 DFS_follow_tree_edge (TYPE_MAX_VALUE_RAW (expr));
868 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
870 DFS_follow_tree_edge (TREE_PURPOSE (expr));
871 DFS_follow_tree_edge (TREE_VALUE (expr));
872 DFS_follow_tree_edge (TREE_CHAIN (expr));
875 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
877 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
878 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
881 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
883 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
884 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
885 DFS_follow_tree_edge (TREE_BLOCK (expr));
888 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
890 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
891 if (VAR_OR_FUNCTION_DECL_P (t)
892 && DECL_EXTERNAL (t))
893 /* We have to stream externals in the block chain as
894 non-references. See also
895 tree-streamer-out.c:streamer_write_chain. */
896 DFS_write_tree (ob, expr_state, t, ref_p, false);
897 else
898 DFS_follow_tree_edge (t);
900 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
902 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
903 handle - those that represent inlined function scopes.
904 For the drop rest them on the floor instead of ICEing
905 in dwarf2out.c, but keep the notion of whether the block
906 is an inlined block by refering to itself for the sake of
907 tree_nonartificial_location. */
908 if (inlined_function_outer_scope_p (expr))
910 tree ultimate_origin = block_ultimate_origin (expr);
911 DFS_follow_tree_edge (ultimate_origin);
913 else if (BLOCK_ABSTRACT_ORIGIN (expr))
914 DFS_follow_tree_edge (expr);
915 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
916 information for early inlined BLOCKs so drop it on the floor instead
917 of ICEing in dwarf2out.c. */
919 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
920 streaming time. */
922 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
923 list is re-constructed from BLOCK_SUPERCONTEXT. */
926 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
928 unsigned i;
929 tree t;
931 /* Note that the number of BINFO slots has already been emitted in
932 EXPR's header (see streamer_write_tree_header) because this length
933 is needed to build the empty BINFO node on the reader side. */
934 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
935 DFS_follow_tree_edge (t);
936 DFS_follow_tree_edge (BINFO_OFFSET (expr));
937 DFS_follow_tree_edge (BINFO_VTABLE (expr));
938 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
940 /* The number of BINFO_BASE_ACCESSES has already been emitted in
941 EXPR's bitfield section. */
942 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
943 DFS_follow_tree_edge (t);
945 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
946 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
949 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
951 unsigned i;
952 tree index, value;
954 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
956 DFS_follow_tree_edge (index);
957 DFS_follow_tree_edge (value);
961 if (code == OMP_CLAUSE)
963 int i;
964 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
965 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
966 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
969 #undef DFS_follow_tree_edge
972 /* Return a hash value for the tree T.
973 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
974 may hold hash values if trees inside current SCC. */
976 static hashval_t
977 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
979 inchash::hash hstate;
981 #define visit(SIBLING) \
982 do { \
983 unsigned ix; \
984 if (!SIBLING) \
985 hstate.add_int (0); \
986 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
987 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
988 else if (map) \
989 hstate.add_int (*map->get (SIBLING)); \
990 else \
991 hstate.add_int (1); \
992 } while (0)
994 /* Hash TS_BASE. */
995 enum tree_code code = TREE_CODE (t);
996 hstate.add_int (code);
997 if (!TYPE_P (t))
999 hstate.add_flag (TREE_SIDE_EFFECTS (t));
1000 hstate.add_flag (TREE_CONSTANT (t));
1001 hstate.add_flag (TREE_READONLY (t));
1002 hstate.add_flag (TREE_PUBLIC (t));
1004 hstate.add_flag (TREE_ADDRESSABLE (t));
1005 hstate.add_flag (TREE_THIS_VOLATILE (t));
1006 if (DECL_P (t))
1007 hstate.add_flag (DECL_UNSIGNED (t));
1008 else if (TYPE_P (t))
1009 hstate.add_flag (TYPE_UNSIGNED (t));
1010 if (TYPE_P (t))
1011 hstate.add_flag (TYPE_ARTIFICIAL (t));
1012 else
1013 hstate.add_flag (TREE_NO_WARNING (t));
1014 hstate.add_flag (TREE_NOTHROW (t));
1015 hstate.add_flag (TREE_STATIC (t));
1016 hstate.add_flag (TREE_PROTECTED (t));
1017 hstate.add_flag (TREE_DEPRECATED (t));
1018 if (code != TREE_BINFO)
1019 hstate.add_flag (TREE_PRIVATE (t));
1020 if (TYPE_P (t))
1022 hstate.add_flag (AGGREGATE_TYPE_P (t)
1023 ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1024 hstate.add_flag (TYPE_ADDR_SPACE (t));
1026 else if (code == SSA_NAME)
1027 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1028 hstate.commit_flag ();
1030 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1031 hstate.add_wide_int (wi::to_widest (t));
1033 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1035 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1036 hstate.add_flag (r.cl);
1037 hstate.add_flag (r.sign);
1038 hstate.add_flag (r.signalling);
1039 hstate.add_flag (r.canonical);
1040 hstate.commit_flag ();
1041 hstate.add_int (r.uexp);
1042 hstate.add (r.sig, sizeof (r.sig));
1045 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1047 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1048 hstate.add_int (f.mode);
1049 hstate.add_int (f.data.low);
1050 hstate.add_int (f.data.high);
1053 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1055 hstate.add_hwi (DECL_MODE (t));
1056 hstate.add_flag (DECL_NONLOCAL (t));
1057 hstate.add_flag (DECL_VIRTUAL_P (t));
1058 hstate.add_flag (DECL_IGNORED_P (t));
1059 hstate.add_flag (DECL_ABSTRACT_P (t));
1060 hstate.add_flag (DECL_ARTIFICIAL (t));
1061 hstate.add_flag (DECL_USER_ALIGN (t));
1062 hstate.add_flag (DECL_PRESERVE_P (t));
1063 hstate.add_flag (DECL_EXTERNAL (t));
1064 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1065 hstate.commit_flag ();
1066 hstate.add_int (DECL_ALIGN (t));
1067 if (code == LABEL_DECL)
1069 hstate.add_int (EH_LANDING_PAD_NR (t));
1070 hstate.add_int (LABEL_DECL_UID (t));
1072 else if (code == FIELD_DECL)
1074 hstate.add_flag (DECL_PACKED (t));
1075 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1076 hstate.add_flag (DECL_PADDING_P (t));
1077 hstate.add_int (DECL_OFFSET_ALIGN (t));
1079 else if (code == VAR_DECL)
1081 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1082 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1084 if (code == RESULT_DECL
1085 || code == PARM_DECL
1086 || code == VAR_DECL)
1088 hstate.add_flag (DECL_BY_REFERENCE (t));
1089 if (code == VAR_DECL
1090 || code == PARM_DECL)
1091 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1093 hstate.commit_flag ();
1096 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1097 hstate.add_int (DECL_REGISTER (t));
1099 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1101 hstate.add_flag (DECL_COMMON (t));
1102 hstate.add_flag (DECL_DLLIMPORT_P (t));
1103 hstate.add_flag (DECL_WEAK (t));
1104 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1105 hstate.add_flag (DECL_COMDAT (t));
1106 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1107 hstate.add_int (DECL_VISIBILITY (t));
1108 if (code == VAR_DECL)
1110 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1111 hstate.add_flag (DECL_HARD_REGISTER (t));
1112 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1114 if (TREE_CODE (t) == FUNCTION_DECL)
1116 hstate.add_flag (DECL_FINAL_P (t));
1117 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1118 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1120 hstate.commit_flag ();
1123 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1125 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1126 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1127 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1128 hstate.add_flag (DECL_UNINLINABLE (t));
1129 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1130 hstate.add_flag (DECL_IS_NOVOPS (t));
1131 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1132 hstate.add_flag (DECL_IS_MALLOC (t));
1133 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1134 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1135 hstate.add_flag (DECL_STATIC_CHAIN (t));
1136 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1137 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1138 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1139 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1140 hstate.add_flag (DECL_PURE_P (t));
1141 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1142 hstate.commit_flag ();
1143 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1144 hstate.add_int (DECL_FUNCTION_CODE (t));
1147 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1149 hstate.add_hwi (TYPE_MODE (t));
1150 hstate.add_flag (TYPE_STRING_FLAG (t));
1151 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1152 no streaming. */
1153 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
1154 hstate.add_flag (TYPE_PACKED (t));
1155 hstate.add_flag (TYPE_RESTRICT (t));
1156 hstate.add_flag (TYPE_USER_ALIGN (t));
1157 hstate.add_flag (TYPE_READONLY (t));
1158 if (RECORD_OR_UNION_TYPE_P (t))
1160 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1161 hstate.add_flag (TYPE_FINAL_P (t));
1163 else if (code == ARRAY_TYPE)
1164 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1165 if (AGGREGATE_TYPE_P (t))
1166 hstate.add_flag (TYPE_TYPELESS_STORAGE (t));
1167 hstate.commit_flag ();
1168 hstate.add_int (TYPE_PRECISION (t));
1169 hstate.add_int (TYPE_ALIGN (t));
1170 hstate.add_int (TYPE_EMPTY_P (t));
1173 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1174 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1175 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1177 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1178 /* We don't stream these when passing things to a different target. */
1179 && !lto_stream_offload_p)
1180 hstate.add_hwi (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1182 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1183 hstate.add_hwi (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1185 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1186 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1188 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1189 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1191 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1193 if (code != IDENTIFIER_NODE)
1194 visit (TREE_TYPE (t));
1197 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1198 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
1199 visit (VECTOR_CST_ELT (t, i));
1201 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1203 visit (TREE_REALPART (t));
1204 visit (TREE_IMAGPART (t));
1207 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1209 /* Drop names that were created for anonymous entities. */
1210 if (DECL_NAME (t)
1211 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1212 && anon_aggrname_p (DECL_NAME (t)))
1214 else
1215 visit (DECL_NAME (t));
1216 if (DECL_FILE_SCOPE_P (t))
1218 else
1219 visit (DECL_CONTEXT (t));
1222 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1224 visit (DECL_SIZE (t));
1225 visit (DECL_SIZE_UNIT (t));
1226 visit (DECL_ATTRIBUTES (t));
1227 if ((code == VAR_DECL
1228 || code == PARM_DECL)
1229 && DECL_HAS_VALUE_EXPR_P (t))
1230 visit (DECL_VALUE_EXPR (t));
1231 if (code == VAR_DECL
1232 && DECL_HAS_DEBUG_EXPR_P (t))
1233 visit (DECL_DEBUG_EXPR (t));
1234 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1235 be able to call get_symbol_initial_value. */
1238 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1240 if (code == TYPE_DECL)
1241 visit (DECL_ORIGINAL_TYPE (t));
1244 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1246 if (DECL_ASSEMBLER_NAME_SET_P (t))
1247 visit (DECL_ASSEMBLER_NAME (t));
1250 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1252 visit (DECL_FIELD_OFFSET (t));
1253 visit (DECL_BIT_FIELD_TYPE (t));
1254 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1255 visit (DECL_FIELD_BIT_OFFSET (t));
1256 visit (DECL_FCONTEXT (t));
1259 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1261 visit (DECL_VINDEX (t));
1262 visit (DECL_FUNCTION_PERSONALITY (t));
1263 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1264 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1267 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1269 visit (TYPE_SIZE (t));
1270 visit (TYPE_SIZE_UNIT (t));
1271 visit (TYPE_ATTRIBUTES (t));
1272 visit (TYPE_NAME (t));
1273 visit (TYPE_MAIN_VARIANT (t));
1274 if (TYPE_FILE_SCOPE_P (t))
1276 else
1277 visit (TYPE_CONTEXT (t));
1278 visit (TYPE_STUB_DECL (t));
1281 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1283 if (code == ENUMERAL_TYPE)
1284 visit (TYPE_VALUES (t));
1285 else if (code == ARRAY_TYPE)
1286 visit (TYPE_DOMAIN (t));
1287 else if (RECORD_OR_UNION_TYPE_P (t))
1288 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1289 visit (f);
1290 else if (code == FUNCTION_TYPE
1291 || code == METHOD_TYPE)
1292 visit (TYPE_ARG_TYPES (t));
1293 if (!POINTER_TYPE_P (t))
1294 visit (TYPE_MIN_VALUE_RAW (t));
1295 visit (TYPE_MAX_VALUE_RAW (t));
1298 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1300 visit (TREE_PURPOSE (t));
1301 visit (TREE_VALUE (t));
1302 visit (TREE_CHAIN (t));
1305 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1306 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1307 visit (TREE_VEC_ELT (t, i));
1309 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1311 hstate.add_hwi (TREE_OPERAND_LENGTH (t));
1312 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1313 visit (TREE_OPERAND (t, i));
1316 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1318 unsigned i;
1319 tree b;
1320 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1321 visit (b);
1322 visit (BINFO_OFFSET (t));
1323 visit (BINFO_VTABLE (t));
1324 visit (BINFO_VPTR_FIELD (t));
1325 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1326 visit (b);
1327 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1328 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1331 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1333 unsigned i;
1334 tree index, value;
1335 hstate.add_hwi (CONSTRUCTOR_NELTS (t));
1336 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1338 visit (index);
1339 visit (value);
1343 if (code == OMP_CLAUSE)
1345 int i;
1346 HOST_WIDE_INT val;
1348 hstate.add_hwi (OMP_CLAUSE_CODE (t));
1349 switch (OMP_CLAUSE_CODE (t))
1351 case OMP_CLAUSE_DEFAULT:
1352 val = OMP_CLAUSE_DEFAULT_KIND (t);
1353 break;
1354 case OMP_CLAUSE_SCHEDULE:
1355 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1356 break;
1357 case OMP_CLAUSE_DEPEND:
1358 val = OMP_CLAUSE_DEPEND_KIND (t);
1359 break;
1360 case OMP_CLAUSE_MAP:
1361 val = OMP_CLAUSE_MAP_KIND (t);
1362 break;
1363 case OMP_CLAUSE_PROC_BIND:
1364 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1365 break;
1366 case OMP_CLAUSE_REDUCTION:
1367 val = OMP_CLAUSE_REDUCTION_CODE (t);
1368 break;
1369 default:
1370 val = 0;
1371 break;
1373 hstate.add_hwi (val);
1374 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1375 visit (OMP_CLAUSE_OPERAND (t, i));
1376 visit (OMP_CLAUSE_CHAIN (t));
1379 return hstate.end ();
1381 #undef visit
1384 /* Compare two SCC entries by their hash value for qsorting them. */
1387 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1389 const scc_entry *p1 = (const scc_entry *) p1_;
1390 const scc_entry *p2 = (const scc_entry *) p2_;
1391 if (p1->hash < p2->hash)
1392 return -1;
1393 else if (p1->hash > p2->hash)
1394 return 1;
1395 return 0;
1398 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1399 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1401 hashval_t
1402 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1403 bool ref_p, bool this_ref_p)
1405 unsigned int last_classes = 0, iterations = 0;
1407 /* Compute hash values for the SCC members. */
1408 for (unsigned i = 0; i < size; ++i)
1409 sccstack[first+i].hash
1410 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1412 if (size == 1)
1413 return sccstack[first].hash;
1415 /* We aim to get unique hash for every tree within SCC and compute hash value
1416 of the whole SCC by combining all values together in a stable (entry-point
1417 independent) order. This guarantees that the same SCC regions within
1418 different translation units will get the same hash values and therefore
1419 will be merged at WPA time.
1421 Often the hashes are already unique. In that case we compute the SCC hash
1422 by combining individual hash values in an increasing order.
1424 If there are duplicates, we seek at least one tree with unique hash (and
1425 pick one with minimal hash and this property). Then we obtain a stable
1426 order by DFS walk starting from this unique tree and then use the index
1427 within this order to make individual hash values unique.
1429 If there is no tree with unique hash, we iteratively propagate the hash
1430 values across the internal edges of SCC. This usually quickly leads
1431 to unique hashes. Consider, for example, an SCC containing two pointers
1432 that are identical except for the types they point to and assume that
1433 these types are also part of the SCC. The propagation will add the
1434 points-to type information into their hash values. */
1437 /* Sort the SCC so we can easily check for uniqueness. */
1438 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1440 unsigned int classes = 1;
1441 int firstunique = -1;
1443 /* Find the tree with lowest unique hash (if it exists) and compute
1444 the number of equivalence classes. */
1445 if (sccstack[first].hash != sccstack[first+1].hash)
1446 firstunique = 0;
1447 for (unsigned i = 1; i < size; ++i)
1448 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1450 classes++;
1451 if (firstunique == -1
1452 && (i == size - 1
1453 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1454 firstunique = i;
1457 /* If we found a tree with unique hash, stop the iteration. */
1458 if (firstunique != -1
1459 /* Also terminate if we run out of iterations or if the number of
1460 equivalence classes is no longer increasing.
1461 For example a cyclic list of trees that are all equivalent will
1462 never have unique entry point; we however do not build such SCCs
1463 in our IL. */
1464 || classes <= last_classes || iterations > 16)
1466 hashval_t scc_hash;
1468 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1469 starting from FIRSTUNIQUE to obtain a stable order. */
1470 if (classes != size && firstunique != -1)
1472 hash_map <tree, hashval_t> map(size*2);
1474 /* Store hash values into a map, so we can associate them with
1475 the reordered SCC. */
1476 for (unsigned i = 0; i < size; ++i)
1477 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1479 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1480 true);
1481 gcc_assert (again.sccstack.length () == size);
1483 memcpy (sccstack.address () + first,
1484 again.sccstack.address (),
1485 sizeof (scc_entry) * size);
1487 /* Update hash values of individual members by hashing in the
1488 index within the stable order. This ensures uniqueness.
1489 Also compute the SCC hash by mixing in all hash values in
1490 the stable order we obtained. */
1491 sccstack[first].hash = *map.get (sccstack[first].t);
1492 scc_hash = sccstack[first].hash;
1493 for (unsigned i = 1; i < size; ++i)
1495 sccstack[first+i].hash
1496 = iterative_hash_hashval_t (i,
1497 *map.get (sccstack[first+i].t));
1498 scc_hash
1499 = iterative_hash_hashval_t (scc_hash,
1500 sccstack[first+i].hash);
1503 /* If we got a unique hash value for each tree, then sort already
1504 ensured entry-point independent order. Only compute the final
1505 SCC hash.
1507 If we failed to find the unique entry point, we go by the same
1508 route. We will eventually introduce unwanted hash conflicts. */
1509 else
1511 scc_hash = sccstack[first].hash;
1512 for (unsigned i = 1; i < size; ++i)
1513 scc_hash
1514 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1516 /* We cannot 100% guarantee that the hash won't conflict so as
1517 to make it impossible to find a unique hash. This however
1518 should be an extremely rare case. ICE for now so possible
1519 issues are found and evaluated. */
1520 gcc_checking_assert (classes == size);
1523 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1524 hash into the hash of each element. */
1525 for (unsigned i = 0; i < size; ++i)
1526 sccstack[first+i].hash
1527 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1528 return scc_hash;
1531 last_classes = classes;
1532 iterations++;
1534 /* We failed to identify the entry point; propagate hash values across
1535 the edges. */
1536 hash_map <tree, hashval_t> map(size*2);
1538 for (unsigned i = 0; i < size; ++i)
1539 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1541 for (unsigned i = 0; i < size; i++)
1542 sccstack[first+i].hash
1543 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1545 while (true);
1548 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1549 already in the streamer cache. Main routine called for
1550 each visit of EXPR. */
1552 void
1553 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1554 tree expr, bool ref_p, bool this_ref_p)
1556 /* Handle special cases. */
1557 if (expr == NULL_TREE)
1558 return;
1560 /* Do not DFS walk into indexable trees. */
1561 if (this_ref_p && tree_is_indexable (expr))
1562 return;
1564 /* Check if we already streamed EXPR. */
1565 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1566 return;
1568 worklist w;
1569 w.expr = expr;
1570 w.from_state = from_state;
1571 w.cstate = NULL;
1572 w.ref_p = ref_p;
1573 w.this_ref_p = this_ref_p;
1574 worklist_vec.safe_push (w);
1578 /* Emit the physical representation of tree node EXPR to output block OB.
1579 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1580 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1582 void
1583 lto_output_tree (struct output_block *ob, tree expr,
1584 bool ref_p, bool this_ref_p)
1586 unsigned ix;
1587 bool existed_p;
1589 if (expr == NULL_TREE)
1591 streamer_write_record_start (ob, LTO_null);
1592 return;
1595 if (this_ref_p && tree_is_indexable (expr))
1597 lto_output_tree_ref (ob, expr);
1598 return;
1601 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1602 if (existed_p)
1604 /* If a node has already been streamed out, make sure that
1605 we don't write it more than once. Otherwise, the reader
1606 will instantiate two different nodes for the same object. */
1607 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1608 streamer_write_uhwi (ob, ix);
1609 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1610 lto_tree_code_to_tag (TREE_CODE (expr)));
1611 lto_stats.num_pickle_refs_output++;
1613 else
1615 /* This is the first time we see EXPR, write all reachable
1616 trees to OB. */
1617 static bool in_dfs_walk;
1619 /* Protect against recursion which means disconnect between
1620 what tree edges we walk in the DFS walk and what edges
1621 we stream out. */
1622 gcc_assert (!in_dfs_walk);
1624 /* Start the DFS walk. */
1625 /* Save ob state ... */
1626 /* let's see ... */
1627 in_dfs_walk = true;
1628 DFS (ob, expr, ref_p, this_ref_p, false);
1629 in_dfs_walk = false;
1631 /* Finally append a reference to the tree we were writing.
1632 ??? If expr ended up as a singleton we could have
1633 inlined it here and avoid outputting a reference. */
1634 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1635 gcc_assert (existed_p);
1636 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1637 streamer_write_uhwi (ob, ix);
1638 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1639 lto_tree_code_to_tag (TREE_CODE (expr)));
1640 lto_stats.num_pickle_refs_output++;
1645 /* Output to OB a list of try/catch handlers starting with FIRST. */
1647 static void
1648 output_eh_try_list (struct output_block *ob, eh_catch first)
1650 eh_catch n;
1652 for (n = first; n; n = n->next_catch)
1654 streamer_write_record_start (ob, LTO_eh_catch);
1655 stream_write_tree (ob, n->type_list, true);
1656 stream_write_tree (ob, n->filter_list, true);
1657 stream_write_tree (ob, n->label, true);
1660 streamer_write_record_start (ob, LTO_null);
1664 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1665 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1666 detect EH region sharing. */
1668 static void
1669 output_eh_region (struct output_block *ob, eh_region r)
1671 enum LTO_tags tag;
1673 if (r == NULL)
1675 streamer_write_record_start (ob, LTO_null);
1676 return;
1679 if (r->type == ERT_CLEANUP)
1680 tag = LTO_ert_cleanup;
1681 else if (r->type == ERT_TRY)
1682 tag = LTO_ert_try;
1683 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1684 tag = LTO_ert_allowed_exceptions;
1685 else if (r->type == ERT_MUST_NOT_THROW)
1686 tag = LTO_ert_must_not_throw;
1687 else
1688 gcc_unreachable ();
1690 streamer_write_record_start (ob, tag);
1691 streamer_write_hwi (ob, r->index);
1693 if (r->outer)
1694 streamer_write_hwi (ob, r->outer->index);
1695 else
1696 streamer_write_zero (ob);
1698 if (r->inner)
1699 streamer_write_hwi (ob, r->inner->index);
1700 else
1701 streamer_write_zero (ob);
1703 if (r->next_peer)
1704 streamer_write_hwi (ob, r->next_peer->index);
1705 else
1706 streamer_write_zero (ob);
1708 if (r->type == ERT_TRY)
1710 output_eh_try_list (ob, r->u.eh_try.first_catch);
1712 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1714 stream_write_tree (ob, r->u.allowed.type_list, true);
1715 stream_write_tree (ob, r->u.allowed.label, true);
1716 streamer_write_uhwi (ob, r->u.allowed.filter);
1718 else if (r->type == ERT_MUST_NOT_THROW)
1720 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1721 bitpack_d bp = bitpack_create (ob->main_stream);
1722 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1723 streamer_write_bitpack (&bp);
1726 if (r->landing_pads)
1727 streamer_write_hwi (ob, r->landing_pads->index);
1728 else
1729 streamer_write_zero (ob);
1733 /* Output landing pad LP to OB. */
1735 static void
1736 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1738 if (lp == NULL)
1740 streamer_write_record_start (ob, LTO_null);
1741 return;
1744 streamer_write_record_start (ob, LTO_eh_landing_pad);
1745 streamer_write_hwi (ob, lp->index);
1746 if (lp->next_lp)
1747 streamer_write_hwi (ob, lp->next_lp->index);
1748 else
1749 streamer_write_zero (ob);
1751 if (lp->region)
1752 streamer_write_hwi (ob, lp->region->index);
1753 else
1754 streamer_write_zero (ob);
1756 stream_write_tree (ob, lp->post_landing_pad, true);
1760 /* Output the existing eh_table to OB. */
1762 static void
1763 output_eh_regions (struct output_block *ob, struct function *fn)
1765 if (fn->eh && fn->eh->region_tree)
1767 unsigned i;
1768 eh_region eh;
1769 eh_landing_pad lp;
1770 tree ttype;
1772 streamer_write_record_start (ob, LTO_eh_table);
1774 /* Emit the index of the root of the EH region tree. */
1775 streamer_write_hwi (ob, fn->eh->region_tree->index);
1777 /* Emit all the EH regions in the region array. */
1778 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1779 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1780 output_eh_region (ob, eh);
1782 /* Emit all landing pads. */
1783 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1784 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1785 output_eh_lp (ob, lp);
1787 /* Emit all the runtime type data. */
1788 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1789 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1790 stream_write_tree (ob, ttype, true);
1792 /* Emit the table of action chains. */
1793 if (targetm.arm_eabi_unwinder)
1795 tree t;
1796 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1797 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1798 stream_write_tree (ob, t, true);
1800 else
1802 uchar c;
1803 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1804 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1805 streamer_write_char_stream (ob->main_stream, c);
1809 /* The LTO_null either terminates the record or indicates that there
1810 are no eh_records at all. */
1811 streamer_write_record_start (ob, LTO_null);
1815 /* Output all of the active ssa names to the ssa_names stream. */
1817 static void
1818 output_ssa_names (struct output_block *ob, struct function *fn)
1820 unsigned int i, len;
1822 len = vec_safe_length (SSANAMES (fn));
1823 streamer_write_uhwi (ob, len);
1825 for (i = 1; i < len; i++)
1827 tree ptr = (*SSANAMES (fn))[i];
1829 if (ptr == NULL_TREE
1830 || SSA_NAME_IN_FREE_LIST (ptr)
1831 || virtual_operand_p (ptr)
1832 /* Simply skip unreleased SSA names. */
1833 || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
1834 && (! SSA_NAME_DEF_STMT (ptr)
1835 || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
1836 continue;
1838 streamer_write_uhwi (ob, i);
1839 streamer_write_char_stream (ob->main_stream,
1840 SSA_NAME_IS_DEFAULT_DEF (ptr));
1841 if (SSA_NAME_VAR (ptr))
1842 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1843 else
1844 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1845 stream_write_tree (ob, TREE_TYPE (ptr), true);
1848 streamer_write_zero (ob);
1853 /* Output the cfg. */
1855 static void
1856 output_cfg (struct output_block *ob, struct function *fn)
1858 struct lto_output_stream *tmp_stream = ob->main_stream;
1859 basic_block bb;
1861 ob->main_stream = ob->cfg_stream;
1863 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1864 profile_status_for_fn (fn));
1866 /* Output the number of the highest basic block. */
1867 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1869 FOR_ALL_BB_FN (bb, fn)
1871 edge_iterator ei;
1872 edge e;
1874 streamer_write_hwi (ob, bb->index);
1876 /* Output the successors and the edge flags. */
1877 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1878 FOR_EACH_EDGE (e, ei, bb->succs)
1880 streamer_write_uhwi (ob, e->dest->index);
1881 e->probability.stream_out (ob);
1882 streamer_write_uhwi (ob, e->flags);
1886 streamer_write_hwi (ob, -1);
1888 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1889 while (bb->next_bb)
1891 streamer_write_hwi (ob, bb->next_bb->index);
1892 bb = bb->next_bb;
1895 streamer_write_hwi (ob, -1);
1897 /* ??? The cfgloop interface is tied to cfun. */
1898 gcc_assert (cfun == fn);
1900 /* Output the number of loops. */
1901 streamer_write_uhwi (ob, number_of_loops (fn));
1903 /* Output each loop, skipping the tree root which has number zero. */
1904 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1906 struct loop *loop = get_loop (fn, i);
1908 /* Write the index of the loop header. That's enough to rebuild
1909 the loop tree on the reader side. Stream -1 for an unused
1910 loop entry. */
1911 if (!loop)
1913 streamer_write_hwi (ob, -1);
1914 continue;
1916 else
1917 streamer_write_hwi (ob, loop->header->index);
1919 /* Write everything copy_loop_info copies. */
1920 streamer_write_enum (ob->main_stream,
1921 loop_estimation, EST_LAST, loop->estimate_state);
1922 streamer_write_hwi (ob, loop->any_upper_bound);
1923 if (loop->any_upper_bound)
1924 streamer_write_widest_int (ob, loop->nb_iterations_upper_bound);
1925 streamer_write_hwi (ob, loop->any_likely_upper_bound);
1926 if (loop->any_likely_upper_bound)
1927 streamer_write_widest_int (ob, loop->nb_iterations_likely_upper_bound);
1928 streamer_write_hwi (ob, loop->any_estimate);
1929 if (loop->any_estimate)
1930 streamer_write_widest_int (ob, loop->nb_iterations_estimate);
1932 /* Write OMP SIMD related info. */
1933 streamer_write_hwi (ob, loop->safelen);
1934 streamer_write_hwi (ob, loop->unroll);
1935 streamer_write_hwi (ob, loop->dont_vectorize);
1936 streamer_write_hwi (ob, loop->force_vectorize);
1937 stream_write_tree (ob, loop->simduid, true);
1940 ob->main_stream = tmp_stream;
1944 /* Create the header in the file using OB. If the section type is for
1945 a function, set FN to the decl for that function. */
1947 void
1948 produce_asm (struct output_block *ob, tree fn)
1950 enum lto_section_type section_type = ob->section_type;
1951 struct lto_function_header header;
1952 char *section_name;
1954 if (section_type == LTO_section_function_body)
1956 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1957 section_name = lto_get_section_name (section_type, name, NULL);
1959 else
1960 section_name = lto_get_section_name (section_type, NULL, NULL);
1962 lto_begin_section (section_name, !flag_wpa);
1963 free (section_name);
1965 /* The entire header is stream computed here. */
1966 memset (&header, 0, sizeof (struct lto_function_header));
1968 /* Write the header. */
1969 header.major_version = LTO_major_version;
1970 header.minor_version = LTO_minor_version;
1972 if (section_type == LTO_section_function_body)
1973 header.cfg_size = ob->cfg_stream->total_size;
1974 header.main_size = ob->main_stream->total_size;
1975 header.string_size = ob->string_stream->total_size;
1976 lto_write_data (&header, sizeof header);
1978 /* Put all of the gimple and the string table out the asm file as a
1979 block of text. */
1980 if (section_type == LTO_section_function_body)
1981 lto_write_stream (ob->cfg_stream);
1982 lto_write_stream (ob->main_stream);
1983 lto_write_stream (ob->string_stream);
1985 lto_end_section ();
1989 /* Output the base body of struct function FN using output block OB. */
1991 static void
1992 output_struct_function_base (struct output_block *ob, struct function *fn)
1994 struct bitpack_d bp;
1995 unsigned i;
1996 tree t;
1998 /* Output the static chain and non-local goto save area. */
1999 stream_write_tree (ob, fn->static_chain_decl, true);
2000 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
2002 /* Output all the local variables in the function. */
2003 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
2004 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
2005 stream_write_tree (ob, t, true);
2007 /* Output current IL state of the function. */
2008 streamer_write_uhwi (ob, fn->curr_properties);
2010 /* Write all the attributes for FN. */
2011 bp = bitpack_create (ob->main_stream);
2012 bp_pack_value (&bp, fn->is_thunk, 1);
2013 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
2014 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
2015 bp_pack_value (&bp, fn->returns_struct, 1);
2016 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
2017 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2018 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2019 bp_pack_value (&bp, fn->after_inlining, 1);
2020 bp_pack_value (&bp, fn->stdarg, 1);
2021 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2022 bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2023 bp_pack_value (&bp, fn->calls_alloca, 1);
2024 bp_pack_value (&bp, fn->calls_setjmp, 1);
2025 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2026 bp_pack_value (&bp, fn->has_simduid_loops, 1);
2027 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2028 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2029 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2031 /* Output the function start and end loci. */
2032 stream_output_location (ob, &bp, fn->function_start_locus);
2033 stream_output_location (ob, &bp, fn->function_end_locus);
2035 streamer_write_bitpack (&bp);
2039 /* Collect all leaf BLOCKs beyond ROOT into LEAFS. */
2041 static void
2042 collect_block_tree_leafs (tree root, vec<tree> &leafs)
2044 for (root = BLOCK_SUBBLOCKS (root); root; root = BLOCK_CHAIN (root))
2045 if (! BLOCK_SUBBLOCKS (root))
2046 leafs.safe_push (root);
2047 else
2048 collect_block_tree_leafs (BLOCK_SUBBLOCKS (root), leafs);
2051 /* Output the body of function NODE->DECL. */
2053 static void
2054 output_function (struct cgraph_node *node)
2056 tree function;
2057 struct function *fn;
2058 basic_block bb;
2059 struct output_block *ob;
2061 function = node->decl;
2062 fn = DECL_STRUCT_FUNCTION (function);
2063 ob = create_output_block (LTO_section_function_body);
2065 clear_line_info (ob);
2066 ob->symbol = node;
2068 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2070 /* Set current_function_decl and cfun. */
2071 push_cfun (fn);
2073 /* Make string 0 be a NULL string. */
2074 streamer_write_char_stream (ob->string_stream, 0);
2076 streamer_write_record_start (ob, LTO_function);
2078 /* Output decls for parameters and args. */
2079 stream_write_tree (ob, DECL_RESULT (function), true);
2080 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2082 /* Output debug args if available. */
2083 vec<tree, va_gc> **debugargs = decl_debug_args_lookup (function);
2084 if (! debugargs)
2085 streamer_write_uhwi (ob, 0);
2086 else
2088 streamer_write_uhwi (ob, (*debugargs)->length ());
2089 for (unsigned i = 0; i < (*debugargs)->length (); ++i)
2090 stream_write_tree (ob, (**debugargs)[i], true);
2093 /* Output DECL_INITIAL for the function, which contains the tree of
2094 lexical scopes. */
2095 stream_write_tree (ob, DECL_INITIAL (function), true);
2096 /* As we do not recurse into BLOCK_SUBBLOCKS but only BLOCK_SUPERCONTEXT
2097 collect block tree leafs and stream those. */
2098 auto_vec<tree> block_tree_leafs;
2099 if (DECL_INITIAL (function))
2100 collect_block_tree_leafs (DECL_INITIAL (function), block_tree_leafs);
2101 streamer_write_uhwi (ob, block_tree_leafs.length ());
2102 for (unsigned i = 0; i < block_tree_leafs.length (); ++i)
2103 stream_write_tree (ob, block_tree_leafs[i], true);
2105 /* We also stream abstract functions where we stream only stuff needed for
2106 debug info. */
2107 if (gimple_has_body_p (function))
2109 streamer_write_uhwi (ob, 1);
2110 output_struct_function_base (ob, fn);
2112 /* Output all the SSA names used in the function. */
2113 output_ssa_names (ob, fn);
2115 /* Output any exception handling regions. */
2116 output_eh_regions (ob, fn);
2119 /* We will renumber the statements. The code that does this uses
2120 the same ordering that we use for serializing them so we can use
2121 the same code on the other end and not have to write out the
2122 statement numbers. We do not assign UIDs to PHIs here because
2123 virtual PHIs get re-computed on-the-fly which would make numbers
2124 inconsistent. */
2125 set_gimple_stmt_max_uid (cfun, 0);
2126 FOR_ALL_BB_FN (bb, cfun)
2128 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2129 gsi_next (&gsi))
2131 gphi *stmt = gsi.phi ();
2133 /* Virtual PHIs are not going to be streamed. */
2134 if (!virtual_operand_p (gimple_phi_result (stmt)))
2135 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2137 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2138 gsi_next (&gsi))
2140 gimple *stmt = gsi_stmt (gsi);
2141 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2144 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2145 virtual phis now. */
2146 FOR_ALL_BB_FN (bb, cfun)
2148 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2149 gsi_next (&gsi))
2151 gphi *stmt = gsi.phi ();
2152 if (virtual_operand_p (gimple_phi_result (stmt)))
2153 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2157 /* Output the code for the function. */
2158 FOR_ALL_BB_FN (bb, fn)
2159 output_bb (ob, bb, fn);
2161 /* The terminator for this function. */
2162 streamer_write_record_start (ob, LTO_null);
2164 output_cfg (ob, fn);
2166 pop_cfun ();
2168 else
2169 streamer_write_uhwi (ob, 0);
2171 /* Create a section to hold the pickled output of this function. */
2172 produce_asm (ob, function);
2174 destroy_output_block (ob);
2177 /* Output the body of function NODE->DECL. */
2179 static void
2180 output_constructor (struct varpool_node *node)
2182 tree var = node->decl;
2183 struct output_block *ob;
2185 ob = create_output_block (LTO_section_function_body);
2187 clear_line_info (ob);
2188 ob->symbol = node;
2190 /* Make string 0 be a NULL string. */
2191 streamer_write_char_stream (ob->string_stream, 0);
2193 /* Output DECL_INITIAL for the function, which contains the tree of
2194 lexical scopes. */
2195 stream_write_tree (ob, DECL_INITIAL (var), true);
2197 /* Create a section to hold the pickled output of this function. */
2198 produce_asm (ob, var);
2200 destroy_output_block (ob);
2204 /* Emit toplevel asms. */
2206 void
2207 lto_output_toplevel_asms (void)
2209 struct output_block *ob;
2210 struct asm_node *can;
2211 char *section_name;
2212 struct lto_simple_header_with_strings header;
2214 if (!symtab->first_asm_symbol ())
2215 return;
2217 ob = create_output_block (LTO_section_asm);
2219 /* Make string 0 be a NULL string. */
2220 streamer_write_char_stream (ob->string_stream, 0);
2222 for (can = symtab->first_asm_symbol (); can; can = can->next)
2224 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2225 streamer_write_hwi (ob, can->order);
2228 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2230 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2231 lto_begin_section (section_name, !flag_wpa);
2232 free (section_name);
2234 /* The entire header stream is computed here. */
2235 memset (&header, 0, sizeof (header));
2237 /* Write the header. */
2238 header.major_version = LTO_major_version;
2239 header.minor_version = LTO_minor_version;
2241 header.main_size = ob->main_stream->total_size;
2242 header.string_size = ob->string_stream->total_size;
2243 lto_write_data (&header, sizeof header);
2245 /* Put all of the gimple and the string table out the asm file as a
2246 block of text. */
2247 lto_write_stream (ob->main_stream);
2248 lto_write_stream (ob->string_stream);
2250 lto_end_section ();
2252 destroy_output_block (ob);
2256 /* Copy the function body or variable constructor of NODE without deserializing. */
2258 static void
2259 copy_function_or_variable (struct symtab_node *node)
2261 tree function = node->decl;
2262 struct lto_file_decl_data *file_data = node->lto_file_data;
2263 const char *data;
2264 size_t len;
2265 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2266 char *section_name =
2267 lto_get_section_name (LTO_section_function_body, name, NULL);
2268 size_t i, j;
2269 struct lto_in_decl_state *in_state;
2270 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2272 lto_begin_section (section_name, false);
2273 free (section_name);
2275 /* We may have renamed the declaration, e.g., a static function. */
2276 name = lto_get_decl_name_mapping (file_data, name);
2278 data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2279 name, &len);
2280 gcc_assert (data);
2282 /* Do a bit copy of the function body. */
2283 lto_write_raw_data (data, len);
2285 /* Copy decls. */
2286 in_state =
2287 lto_get_function_in_decl_state (node->lto_file_data, function);
2288 out_state->compressed = in_state->compressed;
2289 gcc_assert (in_state);
2291 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2293 size_t n = vec_safe_length (in_state->streams[i]);
2294 vec<tree, va_gc> *trees = in_state->streams[i];
2295 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2297 /* The out state must have the same indices and the in state.
2298 So just copy the vector. All the encoders in the in state
2299 must be empty where we reach here. */
2300 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2301 encoder->trees.reserve_exact (n);
2302 for (j = 0; j < n; j++)
2303 encoder->trees.safe_push ((*trees)[j]);
2306 lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2307 data, len);
2308 lto_end_section ();
2311 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2313 static tree
2314 wrap_refs (tree *tp, int *ws, void *)
2316 tree t = *tp;
2317 if (handled_component_p (t)
2318 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2319 && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2321 tree decl = TREE_OPERAND (t, 0);
2322 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2323 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2324 build1 (ADDR_EXPR, ptrtype, decl),
2325 build_int_cst (ptrtype, 0));
2326 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2327 *ws = 0;
2329 else if (TREE_CODE (t) == CONSTRUCTOR)
2331 else if (!EXPR_P (t))
2332 *ws = 0;
2333 return NULL_TREE;
2336 /* Main entry point from the pass manager. */
2338 void
2339 lto_output (void)
2341 struct lto_out_decl_state *decl_state;
2342 bitmap output = NULL;
2343 int i, n_nodes;
2344 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2346 if (flag_checking)
2347 output = lto_bitmap_alloc ();
2349 /* Initialize the streamer. */
2350 lto_streamer_init ();
2352 n_nodes = lto_symtab_encoder_size (encoder);
2353 /* Process only the functions with bodies. */
2354 for (i = 0; i < n_nodes; i++)
2356 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2357 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2359 if (lto_symtab_encoder_encode_body_p (encoder, node)
2360 && !node->alias
2361 && (!node->thunk.thunk_p || !node->thunk.add_pointer_bounds_args))
2363 if (flag_checking)
2365 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2366 bitmap_set_bit (output, DECL_UID (node->decl));
2368 decl_state = lto_new_out_decl_state ();
2369 lto_push_out_decl_state (decl_state);
2370 if (gimple_has_body_p (node->decl) || !flag_wpa
2371 /* Thunks have no body but they may be synthetized
2372 at WPA time. */
2373 || DECL_ARGUMENTS (node->decl))
2374 output_function (node);
2375 else
2376 copy_function_or_variable (node);
2377 gcc_assert (lto_get_out_decl_state () == decl_state);
2378 lto_pop_out_decl_state ();
2379 lto_record_function_out_decl_state (node->decl, decl_state);
2382 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2384 /* Wrap symbol references inside the ctor in a type
2385 preserving MEM_REF. */
2386 tree ctor = DECL_INITIAL (node->decl);
2387 if (ctor && !in_lto_p)
2388 walk_tree (&ctor, wrap_refs, NULL, NULL);
2389 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2390 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2391 && !node->alias)
2393 timevar_push (TV_IPA_LTO_CTORS_OUT);
2394 if (flag_checking)
2396 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2397 bitmap_set_bit (output, DECL_UID (node->decl));
2399 decl_state = lto_new_out_decl_state ();
2400 lto_push_out_decl_state (decl_state);
2401 if (DECL_INITIAL (node->decl) != error_mark_node
2402 || !flag_wpa)
2403 output_constructor (node);
2404 else
2405 copy_function_or_variable (node);
2406 gcc_assert (lto_get_out_decl_state () == decl_state);
2407 lto_pop_out_decl_state ();
2408 lto_record_function_out_decl_state (node->decl, decl_state);
2409 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2414 /* Emit the callgraph after emitting function bodies. This needs to
2415 be done now to make sure that all the statements in every function
2416 have been renumbered so that edges can be associated with call
2417 statements using the statement UIDs. */
2418 output_symtab ();
2420 output_offload_tables ();
2422 #if CHECKING_P
2423 lto_bitmap_free (output);
2424 #endif
2427 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2428 from it and required for correct representation of its semantics.
2429 Each node in ENCODER must be a global declaration or a type. A node
2430 is written only once, even if it appears multiple times in the
2431 vector. Certain transitively-reachable nodes, such as those
2432 representing expressions, may be duplicated, but such nodes
2433 must not appear in ENCODER itself. */
2435 static void
2436 write_global_stream (struct output_block *ob,
2437 struct lto_tree_ref_encoder *encoder)
2439 tree t;
2440 size_t index;
2441 const size_t size = lto_tree_ref_encoder_size (encoder);
2443 for (index = 0; index < size; index++)
2445 t = lto_tree_ref_encoder_get_tree (encoder, index);
2446 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2447 stream_write_tree (ob, t, false);
2452 /* Write a sequence of indices into the globals vector corresponding
2453 to the trees in ENCODER. These are used by the reader to map the
2454 indices used to refer to global entities within function bodies to
2455 their referents. */
2457 static void
2458 write_global_references (struct output_block *ob,
2459 struct lto_tree_ref_encoder *encoder)
2461 tree t;
2462 uint32_t index;
2463 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2465 /* Write size and slot indexes as 32-bit unsigned numbers. */
2466 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2467 data[0] = size;
2469 for (index = 0; index < size; index++)
2471 unsigned slot_num;
2473 t = lto_tree_ref_encoder_get_tree (encoder, index);
2474 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2475 gcc_assert (slot_num != (unsigned)-1);
2476 data[index + 1] = slot_num;
2479 lto_write_data (data, sizeof (int32_t) * (size + 1));
2480 free (data);
2484 /* Write all the streams in an lto_out_decl_state STATE using
2485 output block OB and output stream OUT_STREAM. */
2487 void
2488 lto_output_decl_state_streams (struct output_block *ob,
2489 struct lto_out_decl_state *state)
2491 int i;
2493 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2494 write_global_stream (ob, &state->streams[i]);
2498 /* Write all the references in an lto_out_decl_state STATE using
2499 output block OB and output stream OUT_STREAM. */
2501 void
2502 lto_output_decl_state_refs (struct output_block *ob,
2503 struct lto_out_decl_state *state)
2505 unsigned i;
2506 unsigned ref;
2507 tree decl;
2509 /* Write reference to FUNCTION_DECL. If there is not function,
2510 write reference to void_type_node. */
2511 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2512 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2513 gcc_assert (ref != (unsigned)-1);
2514 ref = ref * 2 + (state->compressed ? 1 : 0);
2515 lto_write_data (&ref, sizeof (uint32_t));
2517 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2518 write_global_references (ob, &state->streams[i]);
2522 /* Return the written size of STATE. */
2524 static size_t
2525 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2527 int i;
2528 size_t size;
2530 size = sizeof (int32_t); /* fn_ref. */
2531 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2533 size += sizeof (int32_t); /* vector size. */
2534 size += (lto_tree_ref_encoder_size (&state->streams[i])
2535 * sizeof (int32_t));
2537 return size;
2541 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2542 so far. */
2544 static void
2545 write_symbol (struct streamer_tree_cache_d *cache,
2546 tree t, hash_set<const char *> *seen, bool alias)
2548 const char *name;
2549 enum gcc_plugin_symbol_kind kind;
2550 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2551 unsigned slot_num;
2552 uint64_t size;
2553 const char *comdat;
2554 unsigned char c;
2556 /* None of the following kinds of symbols are needed in the
2557 symbol table. */
2558 if (!TREE_PUBLIC (t)
2559 || is_builtin_fn (t)
2560 || DECL_ABSTRACT_P (t)
2561 || (VAR_P (t) && DECL_HARD_REGISTER (t)))
2562 return;
2564 gcc_assert (VAR_OR_FUNCTION_DECL_P (t));
2566 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2568 /* This behaves like assemble_name_raw in varasm.c, performing the
2569 same name manipulations that ASM_OUTPUT_LABELREF does. */
2570 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2572 if (seen->add (name))
2573 return;
2575 streamer_tree_cache_lookup (cache, t, &slot_num);
2576 gcc_assert (slot_num != (unsigned)-1);
2578 if (DECL_EXTERNAL (t))
2580 if (DECL_WEAK (t))
2581 kind = GCCPK_WEAKUNDEF;
2582 else
2583 kind = GCCPK_UNDEF;
2585 else
2587 if (DECL_WEAK (t))
2588 kind = GCCPK_WEAKDEF;
2589 else if (DECL_COMMON (t))
2590 kind = GCCPK_COMMON;
2591 else
2592 kind = GCCPK_DEF;
2594 /* When something is defined, it should have node attached. */
2595 gcc_assert (alias || !VAR_P (t) || varpool_node::get (t)->definition);
2596 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2597 || (cgraph_node::get (t)
2598 && cgraph_node::get (t)->definition));
2601 /* Imitate what default_elf_asm_output_external do.
2602 When symbol is external, we need to output it with DEFAULT visibility
2603 when compiling with -fvisibility=default, while with HIDDEN visibility
2604 when symbol has attribute (visibility("hidden")) specified.
2605 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2606 right. */
2608 if (DECL_EXTERNAL (t)
2609 && !targetm.binds_local_p (t))
2610 visibility = GCCPV_DEFAULT;
2611 else
2612 switch (DECL_VISIBILITY (t))
2614 case VISIBILITY_DEFAULT:
2615 visibility = GCCPV_DEFAULT;
2616 break;
2617 case VISIBILITY_PROTECTED:
2618 visibility = GCCPV_PROTECTED;
2619 break;
2620 case VISIBILITY_HIDDEN:
2621 visibility = GCCPV_HIDDEN;
2622 break;
2623 case VISIBILITY_INTERNAL:
2624 visibility = GCCPV_INTERNAL;
2625 break;
2628 if (kind == GCCPK_COMMON
2629 && DECL_SIZE_UNIT (t)
2630 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2631 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2632 else
2633 size = 0;
2635 if (DECL_ONE_ONLY (t))
2636 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2637 else
2638 comdat = "";
2640 lto_write_data (name, strlen (name) + 1);
2641 lto_write_data (comdat, strlen (comdat) + 1);
2642 c = (unsigned char) kind;
2643 lto_write_data (&c, 1);
2644 c = (unsigned char) visibility;
2645 lto_write_data (&c, 1);
2646 lto_write_data (&size, 8);
2647 lto_write_data (&slot_num, 4);
2650 /* Return true if NODE should appear in the plugin symbol table. */
2652 bool
2653 output_symbol_p (symtab_node *node)
2655 struct cgraph_node *cnode;
2656 if (!node->real_symbol_p ())
2657 return false;
2658 /* We keep external functions in symtab for sake of inlining
2659 and devirtualization. We do not want to see them in symbol table as
2660 references unless they are really used. */
2661 cnode = dyn_cast <cgraph_node *> (node);
2662 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2663 && cnode->callers)
2664 return true;
2666 /* Ignore all references from external vars initializers - they are not really
2667 part of the compilation unit until they are used by folding. Some symbols,
2668 like references to external construction vtables can not be referred to at all.
2669 We decide this at can_refer_decl_in_current_unit_p. */
2670 if (!node->definition || DECL_EXTERNAL (node->decl))
2672 int i;
2673 struct ipa_ref *ref;
2674 for (i = 0; node->iterate_referring (i, ref); i++)
2676 if (ref->use == IPA_REF_ALIAS)
2677 continue;
2678 if (is_a <cgraph_node *> (ref->referring))
2679 return true;
2680 if (!DECL_EXTERNAL (ref->referring->decl))
2681 return true;
2683 return false;
2685 return true;
2689 /* Write an IL symbol table to OB.
2690 SET and VSET are cgraph/varpool node sets we are outputting. */
2692 static void
2693 produce_symtab (struct output_block *ob)
2695 struct streamer_tree_cache_d *cache = ob->writer_cache;
2696 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2697 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2698 lto_symtab_encoder_iterator lsei;
2700 lto_begin_section (section_name, false);
2701 free (section_name);
2703 hash_set<const char *> seen;
2705 /* Write the symbol table.
2706 First write everything defined and then all declarations.
2707 This is necessary to handle cases where we have duplicated symbols. */
2708 for (lsei = lsei_start (encoder);
2709 !lsei_end_p (lsei); lsei_next (&lsei))
2711 symtab_node *node = lsei_node (lsei);
2713 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2714 continue;
2715 write_symbol (cache, node->decl, &seen, false);
2717 for (lsei = lsei_start (encoder);
2718 !lsei_end_p (lsei); lsei_next (&lsei))
2720 symtab_node *node = lsei_node (lsei);
2722 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2723 continue;
2724 write_symbol (cache, node->decl, &seen, false);
2727 lto_end_section ();
2731 /* Init the streamer_mode_table for output, where we collect info on what
2732 machine_mode values have been streamed. */
2733 void
2734 lto_output_init_mode_table (void)
2736 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2740 /* Write the mode table. */
2741 static void
2742 lto_write_mode_table (void)
2744 struct output_block *ob;
2745 ob = create_output_block (LTO_section_mode_table);
2746 bitpack_d bp = bitpack_create (ob->main_stream);
2748 /* Ensure that for GET_MODE_INNER (m) != m we have
2749 also the inner mode marked. */
2750 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2751 if (streamer_mode_table[i])
2753 machine_mode m = (machine_mode) i;
2754 machine_mode inner_m = GET_MODE_INNER (m);
2755 if (inner_m != m)
2756 streamer_mode_table[(int) inner_m] = 1;
2758 /* First stream modes that have GET_MODE_INNER (m) == m,
2759 so that we can refer to them afterwards. */
2760 for (int pass = 0; pass < 2; pass++)
2761 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2762 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2764 machine_mode m = (machine_mode) i;
2765 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2766 continue;
2767 bp_pack_value (&bp, m, 8);
2768 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2769 bp_pack_value (&bp, GET_MODE_SIZE (m), 8);
2770 bp_pack_value (&bp, GET_MODE_PRECISION (m), 16);
2771 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2772 bp_pack_value (&bp, GET_MODE_NUNITS (m), 8);
2773 switch (GET_MODE_CLASS (m))
2775 case MODE_FRACT:
2776 case MODE_UFRACT:
2777 case MODE_ACCUM:
2778 case MODE_UACCUM:
2779 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2780 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2781 break;
2782 case MODE_FLOAT:
2783 case MODE_DECIMAL_FLOAT:
2784 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2785 break;
2786 default:
2787 break;
2789 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2791 bp_pack_value (&bp, VOIDmode, 8);
2793 streamer_write_bitpack (&bp);
2795 char *section_name
2796 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2797 lto_begin_section (section_name, !flag_wpa);
2798 free (section_name);
2800 /* The entire header stream is computed here. */
2801 struct lto_simple_header_with_strings header;
2802 memset (&header, 0, sizeof (header));
2804 /* Write the header. */
2805 header.major_version = LTO_major_version;
2806 header.minor_version = LTO_minor_version;
2808 header.main_size = ob->main_stream->total_size;
2809 header.string_size = ob->string_stream->total_size;
2810 lto_write_data (&header, sizeof header);
2812 /* Put all of the gimple and the string table out the asm file as a
2813 block of text. */
2814 lto_write_stream (ob->main_stream);
2815 lto_write_stream (ob->string_stream);
2817 lto_end_section ();
2818 destroy_output_block (ob);
2822 /* This pass is run after all of the functions are serialized and all
2823 of the IPA passes have written their serialized forms. This pass
2824 causes the vector of all of the global decls and types used from
2825 this file to be written in to a section that can then be read in to
2826 recover these on other side. */
2828 void
2829 produce_asm_for_decls (void)
2831 struct lto_out_decl_state *out_state;
2832 struct lto_out_decl_state *fn_out_state;
2833 struct lto_decl_header header;
2834 char *section_name;
2835 struct output_block *ob;
2836 unsigned idx, num_fns;
2837 size_t decl_state_size;
2838 int32_t num_decl_states;
2840 ob = create_output_block (LTO_section_decls);
2842 memset (&header, 0, sizeof (struct lto_decl_header));
2844 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2845 lto_begin_section (section_name, !flag_wpa);
2846 free (section_name);
2848 /* Make string 0 be a NULL string. */
2849 streamer_write_char_stream (ob->string_stream, 0);
2851 gcc_assert (!alias_pairs);
2853 /* Get rid of the global decl state hash tables to save some memory. */
2854 out_state = lto_get_out_decl_state ();
2855 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2856 if (out_state->streams[i].tree_hash_table)
2858 delete out_state->streams[i].tree_hash_table;
2859 out_state->streams[i].tree_hash_table = NULL;
2862 /* Write the global symbols. */
2863 lto_output_decl_state_streams (ob, out_state);
2864 num_fns = lto_function_decl_states.length ();
2865 for (idx = 0; idx < num_fns; idx++)
2867 fn_out_state =
2868 lto_function_decl_states[idx];
2869 lto_output_decl_state_streams (ob, fn_out_state);
2872 header.major_version = LTO_major_version;
2873 header.minor_version = LTO_minor_version;
2875 /* Currently not used. This field would allow us to preallocate
2876 the globals vector, so that it need not be resized as it is extended. */
2877 header.num_nodes = -1;
2879 /* Compute the total size of all decl out states. */
2880 decl_state_size = sizeof (int32_t);
2881 decl_state_size += lto_out_decl_state_written_size (out_state);
2882 for (idx = 0; idx < num_fns; idx++)
2884 fn_out_state =
2885 lto_function_decl_states[idx];
2886 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2888 header.decl_state_size = decl_state_size;
2890 header.main_size = ob->main_stream->total_size;
2891 header.string_size = ob->string_stream->total_size;
2893 lto_write_data (&header, sizeof header);
2895 /* Write the main out-decl state, followed by out-decl states of
2896 functions. */
2897 num_decl_states = num_fns + 1;
2898 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2899 lto_output_decl_state_refs (ob, out_state);
2900 for (idx = 0; idx < num_fns; idx++)
2902 fn_out_state = lto_function_decl_states[idx];
2903 lto_output_decl_state_refs (ob, fn_out_state);
2906 lto_write_stream (ob->main_stream);
2907 lto_write_stream (ob->string_stream);
2909 lto_end_section ();
2911 /* Write the symbol table. It is used by linker to determine dependencies
2912 and thus we can skip it for WPA. */
2913 if (!flag_wpa)
2914 produce_symtab (ob);
2916 /* Write command line opts. */
2917 lto_write_options ();
2919 /* Deallocate memory and clean up. */
2920 for (idx = 0; idx < num_fns; idx++)
2922 fn_out_state =
2923 lto_function_decl_states[idx];
2924 lto_delete_out_decl_state (fn_out_state);
2926 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2927 lto_function_decl_states.release ();
2928 destroy_output_block (ob);
2929 if (lto_stream_offload_p)
2930 lto_write_mode_table ();