* testsuite/26_numerics/headers/cmath/hypot.cc: XFAIL on AIX.
[official-gcc.git] / gcc / lto-streamer-out.c
blob22d8ac90724339839290e3b5b137f6da7e45720f
1 /* Write the GIMPLE representation to a file stream.
3 Copyright (C) 2009-2016 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
45 static void lto_write_tree (struct output_block*, tree, bool);
47 /* Clear the line info stored in DATA_IN. */
49 static void
50 clear_line_info (struct output_block *ob)
52 ob->current_file = NULL;
53 ob->current_line = 0;
54 ob->current_col = 0;
55 ob->current_sysp = false;
59 /* Create the output block and return it. SECTION_TYPE is
60 LTO_section_function_body or LTO_static_initializer. */
62 struct output_block *
63 create_output_block (enum lto_section_type section_type)
65 struct output_block *ob = XCNEW (struct output_block);
67 ob->section_type = section_type;
68 ob->decl_state = lto_get_out_decl_state ();
69 ob->main_stream = XCNEW (struct lto_output_stream);
70 ob->string_stream = XCNEW (struct lto_output_stream);
71 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
73 if (section_type == LTO_section_function_body)
74 ob->cfg_stream = XCNEW (struct lto_output_stream);
76 clear_line_info (ob);
78 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
79 gcc_obstack_init (&ob->obstack);
81 return ob;
85 /* Destroy the output block OB. */
87 void
88 destroy_output_block (struct output_block *ob)
90 enum lto_section_type section_type = ob->section_type;
92 delete ob->string_hash_table;
93 ob->string_hash_table = NULL;
95 free (ob->main_stream);
96 free (ob->string_stream);
97 if (section_type == LTO_section_function_body)
98 free (ob->cfg_stream);
100 streamer_tree_cache_delete (ob->writer_cache);
101 obstack_free (&ob->obstack, NULL);
103 free (ob);
107 /* Look up NODE in the type table and write the index for it to OB. */
109 static void
110 output_type_ref (struct output_block *ob, tree node)
112 streamer_write_record_start (ob, LTO_type_ref);
113 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
117 /* Return true if tree node T is written to various tables. For these
118 nodes, we sometimes want to write their phyiscal representation
119 (via lto_output_tree), and sometimes we need to emit an index
120 reference into a table (via lto_output_tree_ref). */
122 static bool
123 tree_is_indexable (tree t)
125 /* Parameters and return values of functions of variably modified types
126 must go to global stream, because they may be used in the type
127 definition. */
128 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
129 && DECL_CONTEXT (t))
130 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
131 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
132 else if (TREE_CODE (t) == IMPORTED_DECL)
133 return false;
134 else if (((VAR_P (t) && !TREE_STATIC (t))
135 || TREE_CODE (t) == TYPE_DECL
136 || TREE_CODE (t) == CONST_DECL
137 || TREE_CODE (t) == NAMELIST_DECL)
138 && decl_function_context (t))
139 return false;
140 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
141 return false;
142 /* Variably modified types need to be streamed alongside function
143 bodies because they can refer to local entities. Together with
144 them we have to localize their members as well.
145 ??? In theory that includes non-FIELD_DECLs as well. */
146 else if (TYPE_P (t)
147 && variably_modified_type_p (t, NULL_TREE))
148 return false;
149 else if (TREE_CODE (t) == FIELD_DECL
150 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
151 return false;
152 else
153 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
157 /* Output info about new location into bitpack BP.
158 After outputting bitpack, lto_output_location_data has
159 to be done to output actual data. */
161 void
162 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
163 location_t loc)
165 expanded_location xloc;
167 loc = LOCATION_LOCUS (loc);
168 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
169 loc < RESERVED_LOCATION_COUNT
170 ? loc : RESERVED_LOCATION_COUNT);
171 if (loc < RESERVED_LOCATION_COUNT)
172 return;
174 xloc = expand_location (loc);
176 bp_pack_value (bp, ob->current_file != xloc.file, 1);
177 bp_pack_value (bp, ob->current_line != xloc.line, 1);
178 bp_pack_value (bp, ob->current_col != xloc.column, 1);
180 if (ob->current_file != xloc.file)
182 bp_pack_string (ob, bp, xloc.file, true);
183 bp_pack_value (bp, xloc.sysp, 1);
185 ob->current_file = xloc.file;
186 ob->current_sysp = xloc.sysp;
188 if (ob->current_line != xloc.line)
189 bp_pack_var_len_unsigned (bp, xloc.line);
190 ob->current_line = xloc.line;
192 if (ob->current_col != xloc.column)
193 bp_pack_var_len_unsigned (bp, xloc.column);
194 ob->current_col = xloc.column;
198 /* If EXPR is an indexable tree node, output a reference to it to
199 output block OB. Otherwise, output the physical representation of
200 EXPR to OB. */
202 static void
203 lto_output_tree_ref (struct output_block *ob, tree expr)
205 enum tree_code code;
207 if (TYPE_P (expr))
209 output_type_ref (ob, expr);
210 return;
213 code = TREE_CODE (expr);
214 switch (code)
216 case SSA_NAME:
217 streamer_write_record_start (ob, LTO_ssa_name_ref);
218 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
219 break;
221 case FIELD_DECL:
222 streamer_write_record_start (ob, LTO_field_decl_ref);
223 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
224 break;
226 case FUNCTION_DECL:
227 streamer_write_record_start (ob, LTO_function_decl_ref);
228 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
229 break;
231 case VAR_DECL:
232 case DEBUG_EXPR_DECL:
233 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
234 /* FALLTHRU */
235 case PARM_DECL:
236 streamer_write_record_start (ob, LTO_global_decl_ref);
237 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
238 break;
240 case CONST_DECL:
241 streamer_write_record_start (ob, LTO_const_decl_ref);
242 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
243 break;
245 case IMPORTED_DECL:
246 gcc_assert (decl_function_context (expr) == NULL);
247 streamer_write_record_start (ob, LTO_imported_decl_ref);
248 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
249 break;
251 case TYPE_DECL:
252 streamer_write_record_start (ob, LTO_type_decl_ref);
253 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
254 break;
256 case NAMELIST_DECL:
257 streamer_write_record_start (ob, LTO_namelist_decl_ref);
258 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
259 break;
261 case NAMESPACE_DECL:
262 streamer_write_record_start (ob, LTO_namespace_decl_ref);
263 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
264 break;
266 case LABEL_DECL:
267 streamer_write_record_start (ob, LTO_label_decl_ref);
268 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
269 break;
271 case RESULT_DECL:
272 streamer_write_record_start (ob, LTO_result_decl_ref);
273 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
274 break;
276 case TRANSLATION_UNIT_DECL:
277 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
278 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
279 break;
281 default:
282 /* No other node is indexable, so it should have been handled by
283 lto_output_tree. */
284 gcc_unreachable ();
289 /* Return true if EXPR is a tree node that can be written to disk. */
291 static inline bool
292 lto_is_streamable (tree expr)
294 enum tree_code code = TREE_CODE (expr);
296 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
297 name version in lto_output_tree_ref (see output_ssa_names). */
298 return !is_lang_specific (expr)
299 && code != SSA_NAME
300 && code != CALL_EXPR
301 && code != LANG_TYPE
302 && code != MODIFY_EXPR
303 && code != INIT_EXPR
304 && code != TARGET_EXPR
305 && code != BIND_EXPR
306 && code != WITH_CLEANUP_EXPR
307 && code != STATEMENT_LIST
308 && (code == CASE_LABEL_EXPR
309 || code == DECL_EXPR
310 || TREE_CODE_CLASS (code) != tcc_statement);
313 /* Very rough estimate of streaming size of the initializer. If we ignored
314 presence of strings, we could simply just count number of non-indexable
315 tree nodes and number of references to indexable nodes. Strings however
316 may be very large and we do not want to dump them int othe global stream.
318 Count the size of initializer until the size in DATA is positive. */
320 static tree
321 subtract_estimated_size (tree *tp, int *ws, void *data)
323 long *sum = (long *)data;
324 if (tree_is_indexable (*tp))
326 /* Indexable tree is one reference to global stream.
327 Guess it may be about 4 bytes. */
328 *sum -= 4;
329 *ws = 0;
331 /* String table entry + base of tree node needs to be streamed. */
332 if (TREE_CODE (*tp) == STRING_CST)
333 *sum -= TREE_STRING_LENGTH (*tp) + 8;
334 else
336 /* Identifiers are also variable length but should not appear
337 naked in constructor. */
338 gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
339 /* We do not really make attempt to work out size of pickled tree, as
340 it is very variable. Make it bigger than the reference. */
341 *sum -= 16;
343 if (*sum < 0)
344 return *tp;
345 return NULL_TREE;
349 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
351 static tree
352 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
354 gcc_checking_assert (DECL_P (expr)
355 && TREE_CODE (expr) != FUNCTION_DECL
356 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
358 /* Handle DECL_INITIAL for symbols. */
359 tree initial = DECL_INITIAL (expr);
360 if (VAR_P (expr)
361 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
362 && !DECL_IN_CONSTANT_POOL (expr)
363 && initial)
365 varpool_node *vnode;
366 /* Extra section needs about 30 bytes; do not produce it for simple
367 scalar values. */
368 if (!(vnode = varpool_node::get (expr))
369 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
370 initial = error_mark_node;
371 if (initial != error_mark_node)
373 long max_size = 30;
374 if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
375 NULL))
376 initial = error_mark_node;
380 return initial;
384 /* Write a physical representation of tree node EXPR to output block
385 OB. If REF_P is true, the leaves of EXPR are emitted as references
386 via lto_output_tree_ref. IX is the index into the streamer cache
387 where EXPR is stored. */
389 static void
390 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
392 /* Pack all the non-pointer fields in EXPR into a bitpack and write
393 the resulting bitpack. */
394 streamer_write_tree_bitfields (ob, expr);
396 /* Write all the pointer fields in EXPR. */
397 streamer_write_tree_body (ob, expr, ref_p);
399 /* Write any LTO-specific data to OB. */
400 if (DECL_P (expr)
401 && TREE_CODE (expr) != FUNCTION_DECL
402 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
404 /* Handle DECL_INITIAL for symbols. */
405 tree initial = get_symbol_initial_value
406 (ob->decl_state->symtab_node_encoder, expr);
407 stream_write_tree (ob, initial, ref_p);
411 /* Write a physical representation of tree node EXPR to output block
412 OB. If REF_P is true, the leaves of EXPR are emitted as references
413 via lto_output_tree_ref. IX is the index into the streamer cache
414 where EXPR is stored. */
416 static void
417 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
419 if (!lto_is_streamable (expr))
420 internal_error ("tree code %qs is not supported in LTO streams",
421 get_tree_code_name (TREE_CODE (expr)));
423 /* Write the header, containing everything needed to materialize
424 EXPR on the reading side. */
425 streamer_write_tree_header (ob, expr);
427 lto_write_tree_1 (ob, expr, ref_p);
429 /* Mark the end of EXPR. */
430 streamer_write_zero (ob);
433 /* Emit the physical representation of tree node EXPR to output block OB,
434 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
435 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
437 static void
438 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
439 bool ref_p, bool this_ref_p)
441 unsigned ix;
443 gcc_checking_assert (expr != NULL_TREE
444 && !(this_ref_p && tree_is_indexable (expr)));
446 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
447 expr, hash, &ix);
448 gcc_assert (!exists_p);
449 if (TREE_CODE (expr) == INTEGER_CST
450 && !TREE_OVERFLOW (expr))
452 /* Shared INTEGER_CST nodes are special because they need their
453 original type to be materialized by the reader (to implement
454 TYPE_CACHED_VALUES). */
455 streamer_write_integer_cst (ob, expr, ref_p);
457 else
459 /* This is the first time we see EXPR, write its fields
460 to OB. */
461 lto_write_tree (ob, expr, ref_p);
465 class DFS
467 public:
468 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
469 bool single_p);
470 ~DFS ();
472 struct scc_entry
474 tree t;
475 hashval_t hash;
477 vec<scc_entry> sccstack;
479 private:
480 struct sccs
482 unsigned int dfsnum;
483 unsigned int low;
485 struct worklist
487 tree expr;
488 sccs *from_state;
489 sccs *cstate;
490 bool ref_p;
491 bool this_ref_p;
494 static int scc_entry_compare (const void *, const void *);
496 void DFS_write_tree_body (struct output_block *ob,
497 tree expr, sccs *expr_state, bool ref_p);
499 void DFS_write_tree (struct output_block *ob, sccs *from_state,
500 tree expr, bool ref_p, bool this_ref_p);
502 hashval_t
503 hash_scc (struct output_block *ob, unsigned first, unsigned size,
504 bool ref_p, bool this_ref_p);
506 hash_map<tree, sccs *> sccstate;
507 vec<worklist> worklist_vec;
508 struct obstack sccstate_obstack;
511 /* Emit the physical representation of tree node EXPR to output block OB,
512 using depth-first search on the subgraph. If THIS_REF_P is true, the
513 leaves of EXPR are emitted as references via lto_output_tree_ref.
514 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
515 this is for a rewalk of a single leaf SCC. */
517 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
518 bool single_p)
520 unsigned int next_dfs_num = 1;
521 sccstack.create (0);
522 gcc_obstack_init (&sccstate_obstack);
523 worklist_vec = vNULL;
524 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
525 while (!worklist_vec.is_empty ())
527 worklist &w = worklist_vec.last ();
528 expr = w.expr;
529 sccs *from_state = w.from_state;
530 sccs *cstate = w.cstate;
531 ref_p = w.ref_p;
532 this_ref_p = w.this_ref_p;
533 if (cstate == NULL)
535 sccs **slot = &sccstate.get_or_insert (expr);
536 cstate = *slot;
537 if (cstate)
539 gcc_checking_assert (from_state);
540 if (cstate->dfsnum < from_state->dfsnum)
541 from_state->low = MIN (cstate->dfsnum, from_state->low);
542 worklist_vec.pop ();
543 continue;
546 scc_entry e = { expr, 0 };
547 /* Not yet visited. DFS recurse and push it onto the stack. */
548 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
549 sccstack.safe_push (e);
550 cstate->dfsnum = next_dfs_num++;
551 cstate->low = cstate->dfsnum;
552 w.cstate = cstate;
554 if (TREE_CODE (expr) == INTEGER_CST
555 && !TREE_OVERFLOW (expr))
556 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
557 else
559 DFS_write_tree_body (ob, expr, cstate, ref_p);
561 /* Walk any LTO-specific edges. */
562 if (DECL_P (expr)
563 && TREE_CODE (expr) != FUNCTION_DECL
564 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
566 /* Handle DECL_INITIAL for symbols. */
567 tree initial
568 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
569 expr);
570 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
573 continue;
576 /* See if we found an SCC. */
577 if (cstate->low == cstate->dfsnum)
579 unsigned first, size;
580 tree x;
582 /* If we are re-walking a single leaf SCC just pop it,
583 let earlier worklist item access the sccstack. */
584 if (single_p)
586 worklist_vec.pop ();
587 continue;
590 /* Pop the SCC and compute its size. */
591 first = sccstack.length ();
594 x = sccstack[--first].t;
596 while (x != expr);
597 size = sccstack.length () - first;
599 /* No need to compute hashes for LTRANS units, we don't perform
600 any merging there. */
601 hashval_t scc_hash = 0;
602 unsigned scc_entry_len = 0;
603 if (!flag_wpa)
605 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
607 /* Put the entries with the least number of collisions first. */
608 unsigned entry_start = 0;
609 scc_entry_len = size + 1;
610 for (unsigned i = 0; i < size;)
612 unsigned from = i;
613 for (i = i + 1; i < size
614 && (sccstack[first + i].hash
615 == sccstack[first + from].hash); ++i)
617 if (i - from < scc_entry_len)
619 scc_entry_len = i - from;
620 entry_start = from;
623 for (unsigned i = 0; i < scc_entry_len; ++i)
624 std::swap (sccstack[first + i],
625 sccstack[first + entry_start + i]);
627 /* We already sorted SCC deterministically in hash_scc. */
629 /* Check that we have only one SCC.
630 Naturally we may have conflicts if hash function is not
631 strong enough. Lets see how far this gets. */
632 gcc_checking_assert (scc_entry_len == 1);
635 /* Write LTO_tree_scc. */
636 streamer_write_record_start (ob, LTO_tree_scc);
637 streamer_write_uhwi (ob, size);
638 streamer_write_uhwi (ob, scc_hash);
640 /* Write size-1 SCCs without wrapping them inside SCC bundles.
641 All INTEGER_CSTs need to be handled this way as we need
642 their type to materialize them. Also builtins are handled
643 this way.
644 ??? We still wrap these in LTO_tree_scc so at the
645 input side we can properly identify the tree we want
646 to ultimatively return. */
647 if (size == 1)
648 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
649 else
651 /* Write the size of the SCC entry candidates. */
652 streamer_write_uhwi (ob, scc_entry_len);
654 /* Write all headers and populate the streamer cache. */
655 for (unsigned i = 0; i < size; ++i)
657 hashval_t hash = sccstack[first+i].hash;
658 tree t = sccstack[first+i].t;
659 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
660 t, hash, NULL);
661 gcc_assert (!exists_p);
663 if (!lto_is_streamable (t))
664 internal_error ("tree code %qs is not supported "
665 "in LTO streams",
666 get_tree_code_name (TREE_CODE (t)));
668 /* Write the header, containing everything needed to
669 materialize EXPR on the reading side. */
670 streamer_write_tree_header (ob, t);
673 /* Write the bitpacks and tree references. */
674 for (unsigned i = 0; i < size; ++i)
676 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
678 /* Mark the end of the tree. */
679 streamer_write_zero (ob);
683 /* Finally truncate the vector. */
684 sccstack.truncate (first);
686 if (from_state)
687 from_state->low = MIN (from_state->low, cstate->low);
688 worklist_vec.pop ();
689 continue;
692 gcc_checking_assert (from_state);
693 from_state->low = MIN (from_state->low, cstate->low);
694 if (cstate->dfsnum < from_state->dfsnum)
695 from_state->low = MIN (cstate->dfsnum, from_state->low);
696 worklist_vec.pop ();
698 worklist_vec.release ();
701 DFS::~DFS ()
703 sccstack.release ();
704 obstack_free (&sccstate_obstack, NULL);
707 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
708 DFS recurse for all tree edges originating from it. */
710 void
711 DFS::DFS_write_tree_body (struct output_block *ob,
712 tree expr, sccs *expr_state, bool ref_p)
714 #define DFS_follow_tree_edge(DEST) \
715 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
717 enum tree_code code;
719 code = TREE_CODE (expr);
721 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
723 if (TREE_CODE (expr) != IDENTIFIER_NODE)
724 DFS_follow_tree_edge (TREE_TYPE (expr));
727 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
729 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
730 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
733 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
735 DFS_follow_tree_edge (TREE_REALPART (expr));
736 DFS_follow_tree_edge (TREE_IMAGPART (expr));
739 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
741 /* Drop names that were created for anonymous entities. */
742 if (DECL_NAME (expr)
743 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
744 && anon_aggrname_p (DECL_NAME (expr)))
746 else
747 DFS_follow_tree_edge (DECL_NAME (expr));
748 DFS_follow_tree_edge (DECL_CONTEXT (expr));
751 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
753 DFS_follow_tree_edge (DECL_SIZE (expr));
754 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
756 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
757 special handling in LTO, it must be handled by streamer hooks. */
759 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
761 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
762 for early inlining so drop it on the floor instead of ICEing in
763 dwarf2out.c.
764 We however use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
765 declarations which should be eliminated by decl merging. Be sure none
766 leaks to this point. */
767 gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
769 if ((VAR_P (expr)
770 || TREE_CODE (expr) == PARM_DECL)
771 && DECL_HAS_VALUE_EXPR_P (expr))
772 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
773 if (VAR_P (expr))
774 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
777 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
779 if (TREE_CODE (expr) == TYPE_DECL)
780 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
783 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
785 /* Make sure we don't inadvertently set the assembler name. */
786 if (DECL_ASSEMBLER_NAME_SET_P (expr))
787 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
790 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
792 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
793 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
794 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
795 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
796 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
799 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
801 DFS_follow_tree_edge (DECL_VINDEX (expr));
802 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
803 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
804 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
807 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
809 DFS_follow_tree_edge (TYPE_SIZE (expr));
810 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
811 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
812 DFS_follow_tree_edge (TYPE_NAME (expr));
813 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
814 reconstructed during fixup. */
815 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
816 during fixup. */
817 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
818 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
819 /* TYPE_CANONICAL is re-computed during type merging, so no need
820 to follow it here. */
821 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
824 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
826 if (TREE_CODE (expr) == ENUMERAL_TYPE)
827 DFS_follow_tree_edge (TYPE_VALUES (expr));
828 else if (TREE_CODE (expr) == ARRAY_TYPE)
829 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
830 else if (RECORD_OR_UNION_TYPE_P (expr))
831 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
832 DFS_follow_tree_edge (t);
833 else if (TREE_CODE (expr) == FUNCTION_TYPE
834 || TREE_CODE (expr) == METHOD_TYPE)
835 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
837 if (!POINTER_TYPE_P (expr))
838 DFS_follow_tree_edge (TYPE_MINVAL (expr));
839 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
840 if (RECORD_OR_UNION_TYPE_P (expr))
841 DFS_follow_tree_edge (TYPE_BINFO (expr));
844 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
846 DFS_follow_tree_edge (TREE_PURPOSE (expr));
847 DFS_follow_tree_edge (TREE_VALUE (expr));
848 DFS_follow_tree_edge (TREE_CHAIN (expr));
851 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
853 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
854 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
857 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
859 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
860 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
861 DFS_follow_tree_edge (TREE_BLOCK (expr));
864 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
866 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
867 if (VAR_OR_FUNCTION_DECL_P (t)
868 && DECL_EXTERNAL (t))
869 /* We have to stream externals in the block chain as
870 non-references. See also
871 tree-streamer-out.c:streamer_write_chain. */
872 DFS_write_tree (ob, expr_state, t, ref_p, false);
873 else
874 DFS_follow_tree_edge (t);
876 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
878 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
879 handle - those that represent inlined function scopes.
880 For the drop rest them on the floor instead of ICEing
881 in dwarf2out.c, but keep the notion of whether the block
882 is an inlined block by refering to itself for the sake of
883 tree_nonartificial_location. */
884 if (inlined_function_outer_scope_p (expr))
886 tree ultimate_origin = block_ultimate_origin (expr);
887 DFS_follow_tree_edge (ultimate_origin);
889 else if (BLOCK_ABSTRACT_ORIGIN (expr))
890 DFS_follow_tree_edge (expr);
891 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
892 information for early inlined BLOCKs so drop it on the floor instead
893 of ICEing in dwarf2out.c. */
895 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
896 streaming time. */
898 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
899 list is re-constructed from BLOCK_SUPERCONTEXT. */
902 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
904 unsigned i;
905 tree t;
907 /* Note that the number of BINFO slots has already been emitted in
908 EXPR's header (see streamer_write_tree_header) because this length
909 is needed to build the empty BINFO node on the reader side. */
910 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
911 DFS_follow_tree_edge (t);
912 DFS_follow_tree_edge (BINFO_OFFSET (expr));
913 DFS_follow_tree_edge (BINFO_VTABLE (expr));
914 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
916 /* The number of BINFO_BASE_ACCESSES has already been emitted in
917 EXPR's bitfield section. */
918 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
919 DFS_follow_tree_edge (t);
921 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
922 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
925 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
927 unsigned i;
928 tree index, value;
930 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
932 DFS_follow_tree_edge (index);
933 DFS_follow_tree_edge (value);
937 if (code == OMP_CLAUSE)
939 int i;
940 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
941 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
942 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
945 #undef DFS_follow_tree_edge
948 /* Return a hash value for the tree T.
949 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
950 may hold hash values if trees inside current SCC. */
952 static hashval_t
953 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
955 inchash::hash hstate;
957 #define visit(SIBLING) \
958 do { \
959 unsigned ix; \
960 if (!SIBLING) \
961 hstate.add_int (0); \
962 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
963 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
964 else if (map) \
965 hstate.add_int (*map->get (SIBLING)); \
966 else \
967 hstate.add_int (1); \
968 } while (0)
970 /* Hash TS_BASE. */
971 enum tree_code code = TREE_CODE (t);
972 hstate.add_int (code);
973 if (!TYPE_P (t))
975 hstate.add_flag (TREE_SIDE_EFFECTS (t));
976 hstate.add_flag (TREE_CONSTANT (t));
977 hstate.add_flag (TREE_READONLY (t));
978 hstate.add_flag (TREE_PUBLIC (t));
980 hstate.add_flag (TREE_ADDRESSABLE (t));
981 hstate.add_flag (TREE_THIS_VOLATILE (t));
982 if (DECL_P (t))
983 hstate.add_flag (DECL_UNSIGNED (t));
984 else if (TYPE_P (t))
985 hstate.add_flag (TYPE_UNSIGNED (t));
986 if (TYPE_P (t))
987 hstate.add_flag (TYPE_ARTIFICIAL (t));
988 else
989 hstate.add_flag (TREE_NO_WARNING (t));
990 hstate.add_flag (TREE_NOTHROW (t));
991 hstate.add_flag (TREE_STATIC (t));
992 hstate.add_flag (TREE_PROTECTED (t));
993 hstate.add_flag (TREE_DEPRECATED (t));
994 if (code != TREE_BINFO)
995 hstate.add_flag (TREE_PRIVATE (t));
996 if (TYPE_P (t))
998 hstate.add_flag (AGGREGATE_TYPE_P (t)
999 ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1000 hstate.add_flag (TYPE_ADDR_SPACE (t));
1002 else if (code == SSA_NAME)
1003 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1004 hstate.commit_flag ();
1006 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1008 int i;
1009 hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
1010 hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
1011 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1012 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
1015 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1017 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1018 hstate.add_flag (r.cl);
1019 hstate.add_flag (r.sign);
1020 hstate.add_flag (r.signalling);
1021 hstate.add_flag (r.canonical);
1022 hstate.commit_flag ();
1023 hstate.add_int (r.uexp);
1024 hstate.add (r.sig, sizeof (r.sig));
1027 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1029 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1030 hstate.add_int (f.mode);
1031 hstate.add_int (f.data.low);
1032 hstate.add_int (f.data.high);
1035 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1037 hstate.add_wide_int (DECL_MODE (t));
1038 hstate.add_flag (DECL_NONLOCAL (t));
1039 hstate.add_flag (DECL_VIRTUAL_P (t));
1040 hstate.add_flag (DECL_IGNORED_P (t));
1041 hstate.add_flag (DECL_ABSTRACT_P (t));
1042 hstate.add_flag (DECL_ARTIFICIAL (t));
1043 hstate.add_flag (DECL_USER_ALIGN (t));
1044 hstate.add_flag (DECL_PRESERVE_P (t));
1045 hstate.add_flag (DECL_EXTERNAL (t));
1046 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1047 hstate.commit_flag ();
1048 hstate.add_int (DECL_ALIGN (t));
1049 if (code == LABEL_DECL)
1051 hstate.add_int (EH_LANDING_PAD_NR (t));
1052 hstate.add_int (LABEL_DECL_UID (t));
1054 else if (code == FIELD_DECL)
1056 hstate.add_flag (DECL_PACKED (t));
1057 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1058 hstate.add_int (DECL_OFFSET_ALIGN (t));
1060 else if (code == VAR_DECL)
1062 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1063 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1065 if (code == RESULT_DECL
1066 || code == PARM_DECL
1067 || code == VAR_DECL)
1069 hstate.add_flag (DECL_BY_REFERENCE (t));
1070 if (code == VAR_DECL
1071 || code == PARM_DECL)
1072 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1074 hstate.commit_flag ();
1077 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1078 hstate.add_int (DECL_REGISTER (t));
1080 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1082 hstate.add_flag (DECL_COMMON (t));
1083 hstate.add_flag (DECL_DLLIMPORT_P (t));
1084 hstate.add_flag (DECL_WEAK (t));
1085 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1086 hstate.add_flag (DECL_COMDAT (t));
1087 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1088 hstate.add_int (DECL_VISIBILITY (t));
1089 if (code == VAR_DECL)
1091 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1092 hstate.add_flag (DECL_HARD_REGISTER (t));
1093 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1095 if (TREE_CODE (t) == FUNCTION_DECL)
1097 hstate.add_flag (DECL_FINAL_P (t));
1098 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1099 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1101 hstate.commit_flag ();
1104 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1106 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1107 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1108 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1109 hstate.add_flag (DECL_UNINLINABLE (t));
1110 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1111 hstate.add_flag (DECL_IS_NOVOPS (t));
1112 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1113 hstate.add_flag (DECL_IS_MALLOC (t));
1114 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1115 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1116 hstate.add_flag (DECL_STATIC_CHAIN (t));
1117 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1118 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1119 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1120 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1121 hstate.add_flag (DECL_PURE_P (t));
1122 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1123 hstate.commit_flag ();
1124 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1125 hstate.add_int (DECL_FUNCTION_CODE (t));
1128 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1130 hstate.add_wide_int (TYPE_MODE (t));
1131 hstate.add_flag (TYPE_STRING_FLAG (t));
1132 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1133 no streaming. */
1134 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
1135 hstate.add_flag (TYPE_PACKED (t));
1136 hstate.add_flag (TYPE_RESTRICT (t));
1137 hstate.add_flag (TYPE_USER_ALIGN (t));
1138 hstate.add_flag (TYPE_READONLY (t));
1139 if (RECORD_OR_UNION_TYPE_P (t))
1141 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1142 hstate.add_flag (TYPE_FINAL_P (t));
1144 else if (code == ARRAY_TYPE)
1145 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1146 hstate.commit_flag ();
1147 hstate.add_int (TYPE_PRECISION (t));
1148 hstate.add_int (TYPE_ALIGN (t));
1151 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1152 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1153 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1155 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1156 /* We don't stream these when passing things to a different target. */
1157 && !lto_stream_offload_p)
1158 hstate.add_wide_int (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1160 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1161 hstate.add_wide_int (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1163 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1164 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1166 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1167 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1169 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1171 if (code != IDENTIFIER_NODE)
1172 visit (TREE_TYPE (t));
1175 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1176 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
1177 visit (VECTOR_CST_ELT (t, i));
1179 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1181 visit (TREE_REALPART (t));
1182 visit (TREE_IMAGPART (t));
1185 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1187 /* Drop names that were created for anonymous entities. */
1188 if (DECL_NAME (t)
1189 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1190 && anon_aggrname_p (DECL_NAME (t)))
1192 else
1193 visit (DECL_NAME (t));
1194 if (DECL_FILE_SCOPE_P (t))
1196 else
1197 visit (DECL_CONTEXT (t));
1200 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1202 visit (DECL_SIZE (t));
1203 visit (DECL_SIZE_UNIT (t));
1204 visit (DECL_ATTRIBUTES (t));
1205 if ((code == VAR_DECL
1206 || code == PARM_DECL)
1207 && DECL_HAS_VALUE_EXPR_P (t))
1208 visit (DECL_VALUE_EXPR (t));
1209 if (code == VAR_DECL
1210 && DECL_HAS_DEBUG_EXPR_P (t))
1211 visit (DECL_DEBUG_EXPR (t));
1212 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1213 be able to call get_symbol_initial_value. */
1216 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1218 if (code == TYPE_DECL)
1219 visit (DECL_ORIGINAL_TYPE (t));
1222 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1224 if (DECL_ASSEMBLER_NAME_SET_P (t))
1225 visit (DECL_ASSEMBLER_NAME (t));
1228 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1230 visit (DECL_FIELD_OFFSET (t));
1231 visit (DECL_BIT_FIELD_TYPE (t));
1232 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1233 visit (DECL_FIELD_BIT_OFFSET (t));
1234 visit (DECL_FCONTEXT (t));
1237 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1239 visit (DECL_VINDEX (t));
1240 visit (DECL_FUNCTION_PERSONALITY (t));
1241 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1242 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1245 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1247 visit (TYPE_SIZE (t));
1248 visit (TYPE_SIZE_UNIT (t));
1249 visit (TYPE_ATTRIBUTES (t));
1250 visit (TYPE_NAME (t));
1251 visit (TYPE_MAIN_VARIANT (t));
1252 if (TYPE_FILE_SCOPE_P (t))
1254 else
1255 visit (TYPE_CONTEXT (t));
1256 visit (TYPE_STUB_DECL (t));
1259 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1261 if (code == ENUMERAL_TYPE)
1262 visit (TYPE_VALUES (t));
1263 else if (code == ARRAY_TYPE)
1264 visit (TYPE_DOMAIN (t));
1265 else if (RECORD_OR_UNION_TYPE_P (t))
1266 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1267 visit (f);
1268 else if (code == FUNCTION_TYPE
1269 || code == METHOD_TYPE)
1270 visit (TYPE_ARG_TYPES (t));
1271 if (!POINTER_TYPE_P (t))
1272 visit (TYPE_MINVAL (t));
1273 visit (TYPE_MAXVAL (t));
1274 if (RECORD_OR_UNION_TYPE_P (t))
1275 visit (TYPE_BINFO (t));
1278 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1280 visit (TREE_PURPOSE (t));
1281 visit (TREE_VALUE (t));
1282 visit (TREE_CHAIN (t));
1285 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1286 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1287 visit (TREE_VEC_ELT (t, i));
1289 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1291 hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
1292 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1293 visit (TREE_OPERAND (t, i));
1296 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1298 unsigned i;
1299 tree b;
1300 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1301 visit (b);
1302 visit (BINFO_OFFSET (t));
1303 visit (BINFO_VTABLE (t));
1304 visit (BINFO_VPTR_FIELD (t));
1305 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1306 visit (b);
1307 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1308 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1311 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1313 unsigned i;
1314 tree index, value;
1315 hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
1316 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1318 visit (index);
1319 visit (value);
1323 if (code == OMP_CLAUSE)
1325 int i;
1326 HOST_WIDE_INT val;
1328 hstate.add_wide_int (OMP_CLAUSE_CODE (t));
1329 switch (OMP_CLAUSE_CODE (t))
1331 case OMP_CLAUSE_DEFAULT:
1332 val = OMP_CLAUSE_DEFAULT_KIND (t);
1333 break;
1334 case OMP_CLAUSE_SCHEDULE:
1335 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1336 break;
1337 case OMP_CLAUSE_DEPEND:
1338 val = OMP_CLAUSE_DEPEND_KIND (t);
1339 break;
1340 case OMP_CLAUSE_MAP:
1341 val = OMP_CLAUSE_MAP_KIND (t);
1342 break;
1343 case OMP_CLAUSE_PROC_BIND:
1344 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1345 break;
1346 case OMP_CLAUSE_REDUCTION:
1347 val = OMP_CLAUSE_REDUCTION_CODE (t);
1348 break;
1349 default:
1350 val = 0;
1351 break;
1353 hstate.add_wide_int (val);
1354 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1355 visit (OMP_CLAUSE_OPERAND (t, i));
1356 visit (OMP_CLAUSE_CHAIN (t));
1359 return hstate.end ();
1361 #undef visit
1364 /* Compare two SCC entries by their hash value for qsorting them. */
1367 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1369 const scc_entry *p1 = (const scc_entry *) p1_;
1370 const scc_entry *p2 = (const scc_entry *) p2_;
1371 if (p1->hash < p2->hash)
1372 return -1;
1373 else if (p1->hash > p2->hash)
1374 return 1;
1375 return 0;
1378 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1379 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1381 hashval_t
1382 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1383 bool ref_p, bool this_ref_p)
1385 unsigned int last_classes = 0, iterations = 0;
1387 /* Compute hash values for the SCC members. */
1388 for (unsigned i = 0; i < size; ++i)
1389 sccstack[first+i].hash
1390 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1392 if (size == 1)
1393 return sccstack[first].hash;
1395 /* We aim to get unique hash for every tree within SCC and compute hash value
1396 of the whole SCC by combining all values together in a stable (entry-point
1397 independent) order. This guarantees that the same SCC regions within
1398 different translation units will get the same hash values and therefore
1399 will be merged at WPA time.
1401 Often the hashes are already unique. In that case we compute the SCC hash
1402 by combining individual hash values in an increasing order.
1404 If there are duplicates, we seek at least one tree with unique hash (and
1405 pick one with minimal hash and this property). Then we obtain a stable
1406 order by DFS walk starting from this unique tree and then use the index
1407 within this order to make individual hash values unique.
1409 If there is no tree with unique hash, we iteratively propagate the hash
1410 values across the internal edges of SCC. This usually quickly leads
1411 to unique hashes. Consider, for example, an SCC containing two pointers
1412 that are identical except for the types they point to and assume that
1413 these types are also part of the SCC. The propagation will add the
1414 points-to type information into their hash values. */
1417 /* Sort the SCC so we can easily check for uniqueness. */
1418 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1420 unsigned int classes = 1;
1421 int firstunique = -1;
1423 /* Find the tree with lowest unique hash (if it exists) and compute
1424 the number of equivalence classes. */
1425 if (sccstack[first].hash != sccstack[first+1].hash)
1426 firstunique = 0;
1427 for (unsigned i = 1; i < size; ++i)
1428 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1430 classes++;
1431 if (firstunique == -1
1432 && (i == size - 1
1433 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1434 firstunique = i;
1437 /* If we found a tree with unique hash, stop the iteration. */
1438 if (firstunique != -1
1439 /* Also terminate if we run out of iterations or if the number of
1440 equivalence classes is no longer increasing.
1441 For example a cyclic list of trees that are all equivalent will
1442 never have unique entry point; we however do not build such SCCs
1443 in our IL. */
1444 || classes <= last_classes || iterations > 16)
1446 hashval_t scc_hash;
1448 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1449 starting from FIRSTUNIQUE to obtain a stable order. */
1450 if (classes != size && firstunique != -1)
1452 hash_map <tree, hashval_t> map(size*2);
1454 /* Store hash values into a map, so we can associate them with
1455 the reordered SCC. */
1456 for (unsigned i = 0; i < size; ++i)
1457 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1459 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1460 true);
1461 gcc_assert (again.sccstack.length () == size);
1463 memcpy (sccstack.address () + first,
1464 again.sccstack.address (),
1465 sizeof (scc_entry) * size);
1467 /* Update hash values of individual members by hashing in the
1468 index within the stable order. This ensures uniqueness.
1469 Also compute the SCC hash by mixing in all hash values in
1470 the stable order we obtained. */
1471 sccstack[first].hash = *map.get (sccstack[first].t);
1472 scc_hash = sccstack[first].hash;
1473 for (unsigned i = 1; i < size; ++i)
1475 sccstack[first+i].hash
1476 = iterative_hash_hashval_t (i,
1477 *map.get (sccstack[first+i].t));
1478 scc_hash
1479 = iterative_hash_hashval_t (scc_hash,
1480 sccstack[first+i].hash);
1483 /* If we got a unique hash value for each tree, then sort already
1484 ensured entry-point independent order. Only compute the final
1485 SCC hash.
1487 If we failed to find the unique entry point, we go by the same
1488 route. We will eventually introduce unwanted hash conflicts. */
1489 else
1491 scc_hash = sccstack[first].hash;
1492 for (unsigned i = 1; i < size; ++i)
1493 scc_hash
1494 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1496 /* We cannot 100% guarantee that the hash won't conflict so as
1497 to make it impossible to find a unique hash. This however
1498 should be an extremely rare case. ICE for now so possible
1499 issues are found and evaluated. */
1500 gcc_checking_assert (classes == size);
1503 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1504 hash into the hash of each element. */
1505 for (unsigned i = 0; i < size; ++i)
1506 sccstack[first+i].hash
1507 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1508 return scc_hash;
1511 last_classes = classes;
1512 iterations++;
1514 /* We failed to identify the entry point; propagate hash values across
1515 the edges. */
1516 hash_map <tree, hashval_t> map(size*2);
1518 for (unsigned i = 0; i < size; ++i)
1519 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1521 for (unsigned i = 0; i < size; i++)
1522 sccstack[first+i].hash
1523 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1525 while (true);
1528 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1529 already in the streamer cache. Main routine called for
1530 each visit of EXPR. */
1532 void
1533 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1534 tree expr, bool ref_p, bool this_ref_p)
1536 /* Handle special cases. */
1537 if (expr == NULL_TREE)
1538 return;
1540 /* Do not DFS walk into indexable trees. */
1541 if (this_ref_p && tree_is_indexable (expr))
1542 return;
1544 /* Check if we already streamed EXPR. */
1545 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1546 return;
1548 worklist w;
1549 w.expr = expr;
1550 w.from_state = from_state;
1551 w.cstate = NULL;
1552 w.ref_p = ref_p;
1553 w.this_ref_p = this_ref_p;
1554 worklist_vec.safe_push (w);
1558 /* Emit the physical representation of tree node EXPR to output block OB.
1559 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1560 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1562 void
1563 lto_output_tree (struct output_block *ob, tree expr,
1564 bool ref_p, bool this_ref_p)
1566 unsigned ix;
1567 bool existed_p;
1569 if (expr == NULL_TREE)
1571 streamer_write_record_start (ob, LTO_null);
1572 return;
1575 if (this_ref_p && tree_is_indexable (expr))
1577 lto_output_tree_ref (ob, expr);
1578 return;
1581 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1582 if (existed_p)
1584 /* If a node has already been streamed out, make sure that
1585 we don't write it more than once. Otherwise, the reader
1586 will instantiate two different nodes for the same object. */
1587 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1588 streamer_write_uhwi (ob, ix);
1589 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1590 lto_tree_code_to_tag (TREE_CODE (expr)));
1591 lto_stats.num_pickle_refs_output++;
1593 else
1595 /* This is the first time we see EXPR, write all reachable
1596 trees to OB. */
1597 static bool in_dfs_walk;
1599 /* Protect against recursion which means disconnect between
1600 what tree edges we walk in the DFS walk and what edges
1601 we stream out. */
1602 gcc_assert (!in_dfs_walk);
1604 /* Start the DFS walk. */
1605 /* Save ob state ... */
1606 /* let's see ... */
1607 in_dfs_walk = true;
1608 DFS (ob, expr, ref_p, this_ref_p, false);
1609 in_dfs_walk = false;
1611 /* Finally append a reference to the tree we were writing.
1612 ??? If expr ended up as a singleton we could have
1613 inlined it here and avoid outputting a reference. */
1614 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1615 gcc_assert (existed_p);
1616 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1617 streamer_write_uhwi (ob, ix);
1618 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1619 lto_tree_code_to_tag (TREE_CODE (expr)));
1620 lto_stats.num_pickle_refs_output++;
1625 /* Output to OB a list of try/catch handlers starting with FIRST. */
1627 static void
1628 output_eh_try_list (struct output_block *ob, eh_catch first)
1630 eh_catch n;
1632 for (n = first; n; n = n->next_catch)
1634 streamer_write_record_start (ob, LTO_eh_catch);
1635 stream_write_tree (ob, n->type_list, true);
1636 stream_write_tree (ob, n->filter_list, true);
1637 stream_write_tree (ob, n->label, true);
1640 streamer_write_record_start (ob, LTO_null);
1644 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1645 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1646 detect EH region sharing. */
1648 static void
1649 output_eh_region (struct output_block *ob, eh_region r)
1651 enum LTO_tags tag;
1653 if (r == NULL)
1655 streamer_write_record_start (ob, LTO_null);
1656 return;
1659 if (r->type == ERT_CLEANUP)
1660 tag = LTO_ert_cleanup;
1661 else if (r->type == ERT_TRY)
1662 tag = LTO_ert_try;
1663 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1664 tag = LTO_ert_allowed_exceptions;
1665 else if (r->type == ERT_MUST_NOT_THROW)
1666 tag = LTO_ert_must_not_throw;
1667 else
1668 gcc_unreachable ();
1670 streamer_write_record_start (ob, tag);
1671 streamer_write_hwi (ob, r->index);
1673 if (r->outer)
1674 streamer_write_hwi (ob, r->outer->index);
1675 else
1676 streamer_write_zero (ob);
1678 if (r->inner)
1679 streamer_write_hwi (ob, r->inner->index);
1680 else
1681 streamer_write_zero (ob);
1683 if (r->next_peer)
1684 streamer_write_hwi (ob, r->next_peer->index);
1685 else
1686 streamer_write_zero (ob);
1688 if (r->type == ERT_TRY)
1690 output_eh_try_list (ob, r->u.eh_try.first_catch);
1692 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1694 stream_write_tree (ob, r->u.allowed.type_list, true);
1695 stream_write_tree (ob, r->u.allowed.label, true);
1696 streamer_write_uhwi (ob, r->u.allowed.filter);
1698 else if (r->type == ERT_MUST_NOT_THROW)
1700 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1701 bitpack_d bp = bitpack_create (ob->main_stream);
1702 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1703 streamer_write_bitpack (&bp);
1706 if (r->landing_pads)
1707 streamer_write_hwi (ob, r->landing_pads->index);
1708 else
1709 streamer_write_zero (ob);
1713 /* Output landing pad LP to OB. */
1715 static void
1716 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1718 if (lp == NULL)
1720 streamer_write_record_start (ob, LTO_null);
1721 return;
1724 streamer_write_record_start (ob, LTO_eh_landing_pad);
1725 streamer_write_hwi (ob, lp->index);
1726 if (lp->next_lp)
1727 streamer_write_hwi (ob, lp->next_lp->index);
1728 else
1729 streamer_write_zero (ob);
1731 if (lp->region)
1732 streamer_write_hwi (ob, lp->region->index);
1733 else
1734 streamer_write_zero (ob);
1736 stream_write_tree (ob, lp->post_landing_pad, true);
1740 /* Output the existing eh_table to OB. */
1742 static void
1743 output_eh_regions (struct output_block *ob, struct function *fn)
1745 if (fn->eh && fn->eh->region_tree)
1747 unsigned i;
1748 eh_region eh;
1749 eh_landing_pad lp;
1750 tree ttype;
1752 streamer_write_record_start (ob, LTO_eh_table);
1754 /* Emit the index of the root of the EH region tree. */
1755 streamer_write_hwi (ob, fn->eh->region_tree->index);
1757 /* Emit all the EH regions in the region array. */
1758 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1759 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1760 output_eh_region (ob, eh);
1762 /* Emit all landing pads. */
1763 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1764 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1765 output_eh_lp (ob, lp);
1767 /* Emit all the runtime type data. */
1768 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1769 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1770 stream_write_tree (ob, ttype, true);
1772 /* Emit the table of action chains. */
1773 if (targetm.arm_eabi_unwinder)
1775 tree t;
1776 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1777 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1778 stream_write_tree (ob, t, true);
1780 else
1782 uchar c;
1783 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1784 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1785 streamer_write_char_stream (ob->main_stream, c);
1789 /* The LTO_null either terminates the record or indicates that there
1790 are no eh_records at all. */
1791 streamer_write_record_start (ob, LTO_null);
1795 /* Output all of the active ssa names to the ssa_names stream. */
1797 static void
1798 output_ssa_names (struct output_block *ob, struct function *fn)
1800 unsigned int i, len;
1802 len = vec_safe_length (SSANAMES (fn));
1803 streamer_write_uhwi (ob, len);
1805 for (i = 1; i < len; i++)
1807 tree ptr = (*SSANAMES (fn))[i];
1809 if (ptr == NULL_TREE
1810 || SSA_NAME_IN_FREE_LIST (ptr)
1811 || virtual_operand_p (ptr)
1812 /* Simply skip unreleased SSA names. */
1813 || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
1814 && (! SSA_NAME_DEF_STMT (ptr)
1815 || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
1816 continue;
1818 streamer_write_uhwi (ob, i);
1819 streamer_write_char_stream (ob->main_stream,
1820 SSA_NAME_IS_DEFAULT_DEF (ptr));
1821 if (SSA_NAME_VAR (ptr))
1822 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1823 else
1824 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1825 stream_write_tree (ob, TREE_TYPE (ptr), true);
1828 streamer_write_zero (ob);
1833 /* Output the cfg. */
1835 static void
1836 output_cfg (struct output_block *ob, struct function *fn)
1838 struct lto_output_stream *tmp_stream = ob->main_stream;
1839 basic_block bb;
1841 ob->main_stream = ob->cfg_stream;
1843 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1844 profile_status_for_fn (fn));
1846 /* Output the number of the highest basic block. */
1847 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1849 FOR_ALL_BB_FN (bb, fn)
1851 edge_iterator ei;
1852 edge e;
1854 streamer_write_hwi (ob, bb->index);
1856 /* Output the successors and the edge flags. */
1857 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1858 FOR_EACH_EDGE (e, ei, bb->succs)
1860 streamer_write_uhwi (ob, e->dest->index);
1861 streamer_write_hwi (ob, e->probability);
1862 streamer_write_gcov_count (ob, e->count);
1863 streamer_write_uhwi (ob, e->flags);
1867 streamer_write_hwi (ob, -1);
1869 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1870 while (bb->next_bb)
1872 streamer_write_hwi (ob, bb->next_bb->index);
1873 bb = bb->next_bb;
1876 streamer_write_hwi (ob, -1);
1878 /* ??? The cfgloop interface is tied to cfun. */
1879 gcc_assert (cfun == fn);
1881 /* Output the number of loops. */
1882 streamer_write_uhwi (ob, number_of_loops (fn));
1884 /* Output each loop, skipping the tree root which has number zero. */
1885 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1887 struct loop *loop = get_loop (fn, i);
1889 /* Write the index of the loop header. That's enough to rebuild
1890 the loop tree on the reader side. Stream -1 for an unused
1891 loop entry. */
1892 if (!loop)
1894 streamer_write_hwi (ob, -1);
1895 continue;
1897 else
1898 streamer_write_hwi (ob, loop->header->index);
1900 /* Write everything copy_loop_info copies. */
1901 streamer_write_enum (ob->main_stream,
1902 loop_estimation, EST_LAST, loop->estimate_state);
1903 streamer_write_hwi (ob, loop->any_upper_bound);
1904 if (loop->any_upper_bound)
1905 streamer_write_widest_int (ob, loop->nb_iterations_upper_bound);
1906 streamer_write_hwi (ob, loop->any_likely_upper_bound);
1907 if (loop->any_likely_upper_bound)
1908 streamer_write_widest_int (ob, loop->nb_iterations_likely_upper_bound);
1909 streamer_write_hwi (ob, loop->any_estimate);
1910 if (loop->any_estimate)
1911 streamer_write_widest_int (ob, loop->nb_iterations_estimate);
1913 /* Write OMP SIMD related info. */
1914 streamer_write_hwi (ob, loop->safelen);
1915 streamer_write_hwi (ob, loop->dont_vectorize);
1916 streamer_write_hwi (ob, loop->force_vectorize);
1917 stream_write_tree (ob, loop->simduid, true);
1920 ob->main_stream = tmp_stream;
1924 /* Create the header in the file using OB. If the section type is for
1925 a function, set FN to the decl for that function. */
1927 void
1928 produce_asm (struct output_block *ob, tree fn)
1930 enum lto_section_type section_type = ob->section_type;
1931 struct lto_function_header header;
1932 char *section_name;
1934 if (section_type == LTO_section_function_body)
1936 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1937 section_name = lto_get_section_name (section_type, name, NULL);
1939 else
1940 section_name = lto_get_section_name (section_type, NULL, NULL);
1942 lto_begin_section (section_name, !flag_wpa);
1943 free (section_name);
1945 /* The entire header is stream computed here. */
1946 memset (&header, 0, sizeof (struct lto_function_header));
1948 /* Write the header. */
1949 header.major_version = LTO_major_version;
1950 header.minor_version = LTO_minor_version;
1952 if (section_type == LTO_section_function_body)
1953 header.cfg_size = ob->cfg_stream->total_size;
1954 header.main_size = ob->main_stream->total_size;
1955 header.string_size = ob->string_stream->total_size;
1956 lto_write_data (&header, sizeof header);
1958 /* Put all of the gimple and the string table out the asm file as a
1959 block of text. */
1960 if (section_type == LTO_section_function_body)
1961 lto_write_stream (ob->cfg_stream);
1962 lto_write_stream (ob->main_stream);
1963 lto_write_stream (ob->string_stream);
1965 lto_end_section ();
1969 /* Output the base body of struct function FN using output block OB. */
1971 static void
1972 output_struct_function_base (struct output_block *ob, struct function *fn)
1974 struct bitpack_d bp;
1975 unsigned i;
1976 tree t;
1978 /* Output the static chain and non-local goto save area. */
1979 stream_write_tree (ob, fn->static_chain_decl, true);
1980 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1982 /* Output all the local variables in the function. */
1983 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1984 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1985 stream_write_tree (ob, t, true);
1987 /* Output current IL state of the function. */
1988 streamer_write_uhwi (ob, fn->curr_properties);
1990 /* Write all the attributes for FN. */
1991 bp = bitpack_create (ob->main_stream);
1992 bp_pack_value (&bp, fn->is_thunk, 1);
1993 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1994 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1995 bp_pack_value (&bp, fn->returns_struct, 1);
1996 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1997 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1998 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1999 bp_pack_value (&bp, fn->after_inlining, 1);
2000 bp_pack_value (&bp, fn->stdarg, 1);
2001 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2002 bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2003 bp_pack_value (&bp, fn->calls_alloca, 1);
2004 bp_pack_value (&bp, fn->calls_setjmp, 1);
2005 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2006 bp_pack_value (&bp, fn->has_simduid_loops, 1);
2007 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2008 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2009 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2011 /* Output the function start and end loci. */
2012 stream_output_location (ob, &bp, fn->function_start_locus);
2013 stream_output_location (ob, &bp, fn->function_end_locus);
2015 streamer_write_bitpack (&bp);
2019 /* Collect all leaf BLOCKs beyond ROOT into LEAFS. */
2021 static void
2022 collect_block_tree_leafs (tree root, vec<tree> &leafs)
2024 for (root = BLOCK_SUBBLOCKS (root); root; root = BLOCK_CHAIN (root))
2025 if (! BLOCK_SUBBLOCKS (root))
2026 leafs.safe_push (root);
2027 else
2028 collect_block_tree_leafs (BLOCK_SUBBLOCKS (root), leafs);
2031 /* Output the body of function NODE->DECL. */
2033 static void
2034 output_function (struct cgraph_node *node)
2036 tree function;
2037 struct function *fn;
2038 basic_block bb;
2039 struct output_block *ob;
2041 function = node->decl;
2042 fn = DECL_STRUCT_FUNCTION (function);
2043 ob = create_output_block (LTO_section_function_body);
2045 clear_line_info (ob);
2046 ob->symbol = node;
2048 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2050 /* Set current_function_decl and cfun. */
2051 push_cfun (fn);
2053 /* Make string 0 be a NULL string. */
2054 streamer_write_char_stream (ob->string_stream, 0);
2056 streamer_write_record_start (ob, LTO_function);
2058 /* Output decls for parameters and args. */
2059 stream_write_tree (ob, DECL_RESULT (function), true);
2060 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2062 /* Output DECL_INITIAL for the function, which contains the tree of
2063 lexical scopes. */
2064 stream_write_tree (ob, DECL_INITIAL (function), true);
2065 /* As we do not recurse into BLOCK_SUBBLOCKS but only BLOCK_SUPERCONTEXT
2066 collect block tree leafs and stream those. */
2067 auto_vec<tree> block_tree_leafs;
2068 if (DECL_INITIAL (function))
2069 collect_block_tree_leafs (DECL_INITIAL (function), block_tree_leafs);
2070 streamer_write_uhwi (ob, block_tree_leafs.length ());
2071 for (unsigned i = 0; i < block_tree_leafs.length (); ++i)
2072 stream_write_tree (ob, block_tree_leafs[i], true);
2074 /* We also stream abstract functions where we stream only stuff needed for
2075 debug info. */
2076 if (gimple_has_body_p (function))
2078 streamer_write_uhwi (ob, 1);
2079 output_struct_function_base (ob, fn);
2081 /* Output all the SSA names used in the function. */
2082 output_ssa_names (ob, fn);
2084 /* Output any exception handling regions. */
2085 output_eh_regions (ob, fn);
2088 /* We will renumber the statements. The code that does this uses
2089 the same ordering that we use for serializing them so we can use
2090 the same code on the other end and not have to write out the
2091 statement numbers. We do not assign UIDs to PHIs here because
2092 virtual PHIs get re-computed on-the-fly which would make numbers
2093 inconsistent. */
2094 set_gimple_stmt_max_uid (cfun, 0);
2095 FOR_ALL_BB_FN (bb, cfun)
2097 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2098 gsi_next (&gsi))
2100 gphi *stmt = gsi.phi ();
2102 /* Virtual PHIs are not going to be streamed. */
2103 if (!virtual_operand_p (gimple_phi_result (stmt)))
2104 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2106 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2107 gsi_next (&gsi))
2109 gimple *stmt = gsi_stmt (gsi);
2110 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2113 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2114 virtual phis now. */
2115 FOR_ALL_BB_FN (bb, cfun)
2117 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2118 gsi_next (&gsi))
2120 gphi *stmt = gsi.phi ();
2121 if (virtual_operand_p (gimple_phi_result (stmt)))
2122 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2126 /* Output the code for the function. */
2127 FOR_ALL_BB_FN (bb, fn)
2128 output_bb (ob, bb, fn);
2130 /* The terminator for this function. */
2131 streamer_write_record_start (ob, LTO_null);
2133 output_cfg (ob, fn);
2135 pop_cfun ();
2137 else
2138 streamer_write_uhwi (ob, 0);
2140 /* Create a section to hold the pickled output of this function. */
2141 produce_asm (ob, function);
2143 destroy_output_block (ob);
2146 /* Output the body of function NODE->DECL. */
2148 static void
2149 output_constructor (struct varpool_node *node)
2151 tree var = node->decl;
2152 struct output_block *ob;
2154 ob = create_output_block (LTO_section_function_body);
2156 clear_line_info (ob);
2157 ob->symbol = node;
2159 /* Make string 0 be a NULL string. */
2160 streamer_write_char_stream (ob->string_stream, 0);
2162 /* Output DECL_INITIAL for the function, which contains the tree of
2163 lexical scopes. */
2164 stream_write_tree (ob, DECL_INITIAL (var), true);
2166 /* Create a section to hold the pickled output of this function. */
2167 produce_asm (ob, var);
2169 destroy_output_block (ob);
2173 /* Emit toplevel asms. */
2175 void
2176 lto_output_toplevel_asms (void)
2178 struct output_block *ob;
2179 struct asm_node *can;
2180 char *section_name;
2181 struct lto_simple_header_with_strings header;
2183 if (!symtab->first_asm_symbol ())
2184 return;
2186 ob = create_output_block (LTO_section_asm);
2188 /* Make string 0 be a NULL string. */
2189 streamer_write_char_stream (ob->string_stream, 0);
2191 for (can = symtab->first_asm_symbol (); can; can = can->next)
2193 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2194 streamer_write_hwi (ob, can->order);
2197 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2199 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2200 lto_begin_section (section_name, !flag_wpa);
2201 free (section_name);
2203 /* The entire header stream is computed here. */
2204 memset (&header, 0, sizeof (header));
2206 /* Write the header. */
2207 header.major_version = LTO_major_version;
2208 header.minor_version = LTO_minor_version;
2210 header.main_size = ob->main_stream->total_size;
2211 header.string_size = ob->string_stream->total_size;
2212 lto_write_data (&header, sizeof header);
2214 /* Put all of the gimple and the string table out the asm file as a
2215 block of text. */
2216 lto_write_stream (ob->main_stream);
2217 lto_write_stream (ob->string_stream);
2219 lto_end_section ();
2221 destroy_output_block (ob);
2225 /* Copy the function body or variable constructor of NODE without deserializing. */
2227 static void
2228 copy_function_or_variable (struct symtab_node *node)
2230 tree function = node->decl;
2231 struct lto_file_decl_data *file_data = node->lto_file_data;
2232 const char *data;
2233 size_t len;
2234 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2235 char *section_name =
2236 lto_get_section_name (LTO_section_function_body, name, NULL);
2237 size_t i, j;
2238 struct lto_in_decl_state *in_state;
2239 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2241 lto_begin_section (section_name, false);
2242 free (section_name);
2244 /* We may have renamed the declaration, e.g., a static function. */
2245 name = lto_get_decl_name_mapping (file_data, name);
2247 data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2248 name, &len);
2249 gcc_assert (data);
2251 /* Do a bit copy of the function body. */
2252 lto_write_raw_data (data, len);
2254 /* Copy decls. */
2255 in_state =
2256 lto_get_function_in_decl_state (node->lto_file_data, function);
2257 out_state->compressed = in_state->compressed;
2258 gcc_assert (in_state);
2260 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2262 size_t n = vec_safe_length (in_state->streams[i]);
2263 vec<tree, va_gc> *trees = in_state->streams[i];
2264 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2266 /* The out state must have the same indices and the in state.
2267 So just copy the vector. All the encoders in the in state
2268 must be empty where we reach here. */
2269 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2270 encoder->trees.reserve_exact (n);
2271 for (j = 0; j < n; j++)
2272 encoder->trees.safe_push ((*trees)[j]);
2275 lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2276 data, len);
2277 lto_end_section ();
2280 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2282 static tree
2283 wrap_refs (tree *tp, int *ws, void *)
2285 tree t = *tp;
2286 if (handled_component_p (t)
2287 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2288 && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2290 tree decl = TREE_OPERAND (t, 0);
2291 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2292 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2293 build1 (ADDR_EXPR, ptrtype, decl),
2294 build_int_cst (ptrtype, 0));
2295 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2296 *ws = 0;
2298 else if (TREE_CODE (t) == CONSTRUCTOR)
2300 else if (!EXPR_P (t))
2301 *ws = 0;
2302 return NULL_TREE;
2305 /* Main entry point from the pass manager. */
2307 void
2308 lto_output (void)
2310 struct lto_out_decl_state *decl_state;
2311 bitmap output = NULL;
2312 int i, n_nodes;
2313 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2315 if (flag_checking)
2316 output = lto_bitmap_alloc ();
2318 /* Initialize the streamer. */
2319 lto_streamer_init ();
2321 n_nodes = lto_symtab_encoder_size (encoder);
2322 /* Process only the functions with bodies. */
2323 for (i = 0; i < n_nodes; i++)
2325 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2326 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2328 if (lto_symtab_encoder_encode_body_p (encoder, node)
2329 && !node->alias
2330 && (!node->thunk.thunk_p || !node->thunk.add_pointer_bounds_args))
2332 if (flag_checking)
2334 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2335 bitmap_set_bit (output, DECL_UID (node->decl));
2337 decl_state = lto_new_out_decl_state ();
2338 lto_push_out_decl_state (decl_state);
2339 if (gimple_has_body_p (node->decl) || !flag_wpa
2340 /* Thunks have no body but they may be synthetized
2341 at WPA time. */
2342 || DECL_ARGUMENTS (node->decl))
2343 output_function (node);
2344 else
2345 copy_function_or_variable (node);
2346 gcc_assert (lto_get_out_decl_state () == decl_state);
2347 lto_pop_out_decl_state ();
2348 lto_record_function_out_decl_state (node->decl, decl_state);
2351 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2353 /* Wrap symbol references inside the ctor in a type
2354 preserving MEM_REF. */
2355 tree ctor = DECL_INITIAL (node->decl);
2356 if (ctor && !in_lto_p)
2357 walk_tree (&ctor, wrap_refs, NULL, NULL);
2358 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2359 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2360 && !node->alias)
2362 timevar_push (TV_IPA_LTO_CTORS_OUT);
2363 if (flag_checking)
2365 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2366 bitmap_set_bit (output, DECL_UID (node->decl));
2368 decl_state = lto_new_out_decl_state ();
2369 lto_push_out_decl_state (decl_state);
2370 if (DECL_INITIAL (node->decl) != error_mark_node
2371 || !flag_wpa)
2372 output_constructor (node);
2373 else
2374 copy_function_or_variable (node);
2375 gcc_assert (lto_get_out_decl_state () == decl_state);
2376 lto_pop_out_decl_state ();
2377 lto_record_function_out_decl_state (node->decl, decl_state);
2378 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2383 /* Emit the callgraph after emitting function bodies. This needs to
2384 be done now to make sure that all the statements in every function
2385 have been renumbered so that edges can be associated with call
2386 statements using the statement UIDs. */
2387 output_symtab ();
2389 output_offload_tables ();
2391 #if CHECKING_P
2392 lto_bitmap_free (output);
2393 #endif
2396 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2397 from it and required for correct representation of its semantics.
2398 Each node in ENCODER must be a global declaration or a type. A node
2399 is written only once, even if it appears multiple times in the
2400 vector. Certain transitively-reachable nodes, such as those
2401 representing expressions, may be duplicated, but such nodes
2402 must not appear in ENCODER itself. */
2404 static void
2405 write_global_stream (struct output_block *ob,
2406 struct lto_tree_ref_encoder *encoder)
2408 tree t;
2409 size_t index;
2410 const size_t size = lto_tree_ref_encoder_size (encoder);
2412 for (index = 0; index < size; index++)
2414 t = lto_tree_ref_encoder_get_tree (encoder, index);
2415 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2416 stream_write_tree (ob, t, false);
2421 /* Write a sequence of indices into the globals vector corresponding
2422 to the trees in ENCODER. These are used by the reader to map the
2423 indices used to refer to global entities within function bodies to
2424 their referents. */
2426 static void
2427 write_global_references (struct output_block *ob,
2428 struct lto_tree_ref_encoder *encoder)
2430 tree t;
2431 uint32_t index;
2432 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2434 /* Write size and slot indexes as 32-bit unsigned numbers. */
2435 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2436 data[0] = size;
2438 for (index = 0; index < size; index++)
2440 unsigned slot_num;
2442 t = lto_tree_ref_encoder_get_tree (encoder, index);
2443 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2444 gcc_assert (slot_num != (unsigned)-1);
2445 data[index + 1] = slot_num;
2448 lto_write_data (data, sizeof (int32_t) * (size + 1));
2449 free (data);
2453 /* Write all the streams in an lto_out_decl_state STATE using
2454 output block OB and output stream OUT_STREAM. */
2456 void
2457 lto_output_decl_state_streams (struct output_block *ob,
2458 struct lto_out_decl_state *state)
2460 int i;
2462 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2463 write_global_stream (ob, &state->streams[i]);
2467 /* Write all the references in an lto_out_decl_state STATE using
2468 output block OB and output stream OUT_STREAM. */
2470 void
2471 lto_output_decl_state_refs (struct output_block *ob,
2472 struct lto_out_decl_state *state)
2474 unsigned i;
2475 unsigned ref;
2476 tree decl;
2478 /* Write reference to FUNCTION_DECL. If there is not function,
2479 write reference to void_type_node. */
2480 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2481 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2482 gcc_assert (ref != (unsigned)-1);
2483 ref = ref * 2 + (state->compressed ? 1 : 0);
2484 lto_write_data (&ref, sizeof (uint32_t));
2486 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2487 write_global_references (ob, &state->streams[i]);
2491 /* Return the written size of STATE. */
2493 static size_t
2494 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2496 int i;
2497 size_t size;
2499 size = sizeof (int32_t); /* fn_ref. */
2500 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2502 size += sizeof (int32_t); /* vector size. */
2503 size += (lto_tree_ref_encoder_size (&state->streams[i])
2504 * sizeof (int32_t));
2506 return size;
2510 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2511 so far. */
2513 static void
2514 write_symbol (struct streamer_tree_cache_d *cache,
2515 tree t, hash_set<const char *> *seen, bool alias)
2517 const char *name;
2518 enum gcc_plugin_symbol_kind kind;
2519 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2520 unsigned slot_num;
2521 uint64_t size;
2522 const char *comdat;
2523 unsigned char c;
2525 /* None of the following kinds of symbols are needed in the
2526 symbol table. */
2527 if (!TREE_PUBLIC (t)
2528 || is_builtin_fn (t)
2529 || DECL_ABSTRACT_P (t)
2530 || (VAR_P (t) && DECL_HARD_REGISTER (t)))
2531 return;
2533 gcc_assert (VAR_OR_FUNCTION_DECL_P (t));
2535 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2537 /* This behaves like assemble_name_raw in varasm.c, performing the
2538 same name manipulations that ASM_OUTPUT_LABELREF does. */
2539 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2541 if (seen->add (name))
2542 return;
2544 streamer_tree_cache_lookup (cache, t, &slot_num);
2545 gcc_assert (slot_num != (unsigned)-1);
2547 if (DECL_EXTERNAL (t))
2549 if (DECL_WEAK (t))
2550 kind = GCCPK_WEAKUNDEF;
2551 else
2552 kind = GCCPK_UNDEF;
2554 else
2556 if (DECL_WEAK (t))
2557 kind = GCCPK_WEAKDEF;
2558 else if (DECL_COMMON (t))
2559 kind = GCCPK_COMMON;
2560 else
2561 kind = GCCPK_DEF;
2563 /* When something is defined, it should have node attached. */
2564 gcc_assert (alias || !VAR_P (t) || varpool_node::get (t)->definition);
2565 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2566 || (cgraph_node::get (t)
2567 && cgraph_node::get (t)->definition));
2570 /* Imitate what default_elf_asm_output_external do.
2571 When symbol is external, we need to output it with DEFAULT visibility
2572 when compiling with -fvisibility=default, while with HIDDEN visibility
2573 when symbol has attribute (visibility("hidden")) specified.
2574 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2575 right. */
2577 if (DECL_EXTERNAL (t)
2578 && !targetm.binds_local_p (t))
2579 visibility = GCCPV_DEFAULT;
2580 else
2581 switch (DECL_VISIBILITY (t))
2583 case VISIBILITY_DEFAULT:
2584 visibility = GCCPV_DEFAULT;
2585 break;
2586 case VISIBILITY_PROTECTED:
2587 visibility = GCCPV_PROTECTED;
2588 break;
2589 case VISIBILITY_HIDDEN:
2590 visibility = GCCPV_HIDDEN;
2591 break;
2592 case VISIBILITY_INTERNAL:
2593 visibility = GCCPV_INTERNAL;
2594 break;
2597 if (kind == GCCPK_COMMON
2598 && DECL_SIZE_UNIT (t)
2599 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2600 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2601 else
2602 size = 0;
2604 if (DECL_ONE_ONLY (t))
2605 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2606 else
2607 comdat = "";
2609 lto_write_data (name, strlen (name) + 1);
2610 lto_write_data (comdat, strlen (comdat) + 1);
2611 c = (unsigned char) kind;
2612 lto_write_data (&c, 1);
2613 c = (unsigned char) visibility;
2614 lto_write_data (&c, 1);
2615 lto_write_data (&size, 8);
2616 lto_write_data (&slot_num, 4);
2619 /* Return true if NODE should appear in the plugin symbol table. */
2621 bool
2622 output_symbol_p (symtab_node *node)
2624 struct cgraph_node *cnode;
2625 if (!node->real_symbol_p ())
2626 return false;
2627 /* We keep external functions in symtab for sake of inlining
2628 and devirtualization. We do not want to see them in symbol table as
2629 references unless they are really used. */
2630 cnode = dyn_cast <cgraph_node *> (node);
2631 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2632 && cnode->callers)
2633 return true;
2635 /* Ignore all references from external vars initializers - they are not really
2636 part of the compilation unit until they are used by folding. Some symbols,
2637 like references to external construction vtables can not be referred to at all.
2638 We decide this at can_refer_decl_in_current_unit_p. */
2639 if (!node->definition || DECL_EXTERNAL (node->decl))
2641 int i;
2642 struct ipa_ref *ref;
2643 for (i = 0; node->iterate_referring (i, ref); i++)
2645 if (ref->use == IPA_REF_ALIAS)
2646 continue;
2647 if (is_a <cgraph_node *> (ref->referring))
2648 return true;
2649 if (!DECL_EXTERNAL (ref->referring->decl))
2650 return true;
2652 return false;
2654 return true;
2658 /* Write an IL symbol table to OB.
2659 SET and VSET are cgraph/varpool node sets we are outputting. */
2661 static void
2662 produce_symtab (struct output_block *ob)
2664 struct streamer_tree_cache_d *cache = ob->writer_cache;
2665 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2666 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2667 lto_symtab_encoder_iterator lsei;
2669 lto_begin_section (section_name, false);
2670 free (section_name);
2672 hash_set<const char *> seen;
2674 /* Write the symbol table.
2675 First write everything defined and then all declarations.
2676 This is necessary to handle cases where we have duplicated symbols. */
2677 for (lsei = lsei_start (encoder);
2678 !lsei_end_p (lsei); lsei_next (&lsei))
2680 symtab_node *node = lsei_node (lsei);
2682 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2683 continue;
2684 write_symbol (cache, node->decl, &seen, false);
2686 for (lsei = lsei_start (encoder);
2687 !lsei_end_p (lsei); lsei_next (&lsei))
2689 symtab_node *node = lsei_node (lsei);
2691 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2692 continue;
2693 write_symbol (cache, node->decl, &seen, false);
2696 lto_end_section ();
2700 /* Init the streamer_mode_table for output, where we collect info on what
2701 machine_mode values have been streamed. */
2702 void
2703 lto_output_init_mode_table (void)
2705 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2709 /* Write the mode table. */
2710 static void
2711 lto_write_mode_table (void)
2713 struct output_block *ob;
2714 ob = create_output_block (LTO_section_mode_table);
2715 bitpack_d bp = bitpack_create (ob->main_stream);
2717 /* Ensure that for GET_MODE_INNER (m) != m we have
2718 also the inner mode marked. */
2719 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2720 if (streamer_mode_table[i])
2722 machine_mode m = (machine_mode) i;
2723 machine_mode inner_m = GET_MODE_INNER (m);
2724 if (inner_m != m)
2725 streamer_mode_table[(int) inner_m] = 1;
2727 /* First stream modes that have GET_MODE_INNER (m) == m,
2728 so that we can refer to them afterwards. */
2729 for (int pass = 0; pass < 2; pass++)
2730 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2731 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2733 machine_mode m = (machine_mode) i;
2734 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2735 continue;
2736 bp_pack_value (&bp, m, 8);
2737 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2738 bp_pack_value (&bp, GET_MODE_SIZE (m), 8);
2739 bp_pack_value (&bp, GET_MODE_PRECISION (m), 16);
2740 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2741 bp_pack_value (&bp, GET_MODE_NUNITS (m), 8);
2742 switch (GET_MODE_CLASS (m))
2744 case MODE_FRACT:
2745 case MODE_UFRACT:
2746 case MODE_ACCUM:
2747 case MODE_UACCUM:
2748 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2749 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2750 break;
2751 case MODE_FLOAT:
2752 case MODE_DECIMAL_FLOAT:
2753 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2754 break;
2755 default:
2756 break;
2758 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2760 bp_pack_value (&bp, VOIDmode, 8);
2762 streamer_write_bitpack (&bp);
2764 char *section_name
2765 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2766 lto_begin_section (section_name, !flag_wpa);
2767 free (section_name);
2769 /* The entire header stream is computed here. */
2770 struct lto_simple_header_with_strings header;
2771 memset (&header, 0, sizeof (header));
2773 /* Write the header. */
2774 header.major_version = LTO_major_version;
2775 header.minor_version = LTO_minor_version;
2777 header.main_size = ob->main_stream->total_size;
2778 header.string_size = ob->string_stream->total_size;
2779 lto_write_data (&header, sizeof header);
2781 /* Put all of the gimple and the string table out the asm file as a
2782 block of text. */
2783 lto_write_stream (ob->main_stream);
2784 lto_write_stream (ob->string_stream);
2786 lto_end_section ();
2787 destroy_output_block (ob);
2791 /* This pass is run after all of the functions are serialized and all
2792 of the IPA passes have written their serialized forms. This pass
2793 causes the vector of all of the global decls and types used from
2794 this file to be written in to a section that can then be read in to
2795 recover these on other side. */
2797 void
2798 produce_asm_for_decls (void)
2800 struct lto_out_decl_state *out_state;
2801 struct lto_out_decl_state *fn_out_state;
2802 struct lto_decl_header header;
2803 char *section_name;
2804 struct output_block *ob;
2805 unsigned idx, num_fns;
2806 size_t decl_state_size;
2807 int32_t num_decl_states;
2809 ob = create_output_block (LTO_section_decls);
2811 memset (&header, 0, sizeof (struct lto_decl_header));
2813 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2814 lto_begin_section (section_name, !flag_wpa);
2815 free (section_name);
2817 /* Make string 0 be a NULL string. */
2818 streamer_write_char_stream (ob->string_stream, 0);
2820 gcc_assert (!alias_pairs);
2822 /* Get rid of the global decl state hash tables to save some memory. */
2823 out_state = lto_get_out_decl_state ();
2824 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2825 if (out_state->streams[i].tree_hash_table)
2827 delete out_state->streams[i].tree_hash_table;
2828 out_state->streams[i].tree_hash_table = NULL;
2831 /* Write the global symbols. */
2832 lto_output_decl_state_streams (ob, out_state);
2833 num_fns = lto_function_decl_states.length ();
2834 for (idx = 0; idx < num_fns; idx++)
2836 fn_out_state =
2837 lto_function_decl_states[idx];
2838 lto_output_decl_state_streams (ob, fn_out_state);
2841 header.major_version = LTO_major_version;
2842 header.minor_version = LTO_minor_version;
2844 /* Currently not used. This field would allow us to preallocate
2845 the globals vector, so that it need not be resized as it is extended. */
2846 header.num_nodes = -1;
2848 /* Compute the total size of all decl out states. */
2849 decl_state_size = sizeof (int32_t);
2850 decl_state_size += lto_out_decl_state_written_size (out_state);
2851 for (idx = 0; idx < num_fns; idx++)
2853 fn_out_state =
2854 lto_function_decl_states[idx];
2855 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2857 header.decl_state_size = decl_state_size;
2859 header.main_size = ob->main_stream->total_size;
2860 header.string_size = ob->string_stream->total_size;
2862 lto_write_data (&header, sizeof header);
2864 /* Write the main out-decl state, followed by out-decl states of
2865 functions. */
2866 num_decl_states = num_fns + 1;
2867 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2868 lto_output_decl_state_refs (ob, out_state);
2869 for (idx = 0; idx < num_fns; idx++)
2871 fn_out_state = lto_function_decl_states[idx];
2872 lto_output_decl_state_refs (ob, fn_out_state);
2875 lto_write_stream (ob->main_stream);
2876 lto_write_stream (ob->string_stream);
2878 lto_end_section ();
2880 /* Write the symbol table. It is used by linker to determine dependencies
2881 and thus we can skip it for WPA. */
2882 if (!flag_wpa)
2883 produce_symtab (ob);
2885 /* Write command line opts. */
2886 lto_write_options ();
2888 /* Deallocate memory and clean up. */
2889 for (idx = 0; idx < num_fns; idx++)
2891 fn_out_state =
2892 lto_function_decl_states[idx];
2893 lto_delete_out_decl_state (fn_out_state);
2895 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2896 lto_function_decl_states.release ();
2897 destroy_output_block (ob);
2898 if (lto_stream_offload_p)
2899 lto_write_mode_table ();