[PATCH 2/2] S/390: Implement "target" attribute.
[official-gcc.git] / gcc / lto-streamer-out.c
blob8928873bcc2be6d26ce20fbf8e7cb6c8a4ba15e8
1 /* Write the GIMPLE representation to a file stream.
3 Copyright (C) 2009-2015 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
45 static void lto_write_tree (struct output_block*, tree, bool);
47 /* Clear the line info stored in DATA_IN. */
49 static void
50 clear_line_info (struct output_block *ob)
52 ob->current_file = NULL;
53 ob->current_line = 0;
54 ob->current_col = 0;
55 ob->current_sysp = false;
59 /* Create the output block and return it. SECTION_TYPE is
60 LTO_section_function_body or LTO_static_initializer. */
62 struct output_block *
63 create_output_block (enum lto_section_type section_type)
65 struct output_block *ob = XCNEW (struct output_block);
67 ob->section_type = section_type;
68 ob->decl_state = lto_get_out_decl_state ();
69 ob->main_stream = XCNEW (struct lto_output_stream);
70 ob->string_stream = XCNEW (struct lto_output_stream);
71 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
73 if (section_type == LTO_section_function_body)
74 ob->cfg_stream = XCNEW (struct lto_output_stream);
76 clear_line_info (ob);
78 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
79 gcc_obstack_init (&ob->obstack);
81 return ob;
85 /* Destroy the output block OB. */
87 void
88 destroy_output_block (struct output_block *ob)
90 enum lto_section_type section_type = ob->section_type;
92 delete ob->string_hash_table;
93 ob->string_hash_table = NULL;
95 free (ob->main_stream);
96 free (ob->string_stream);
97 if (section_type == LTO_section_function_body)
98 free (ob->cfg_stream);
100 streamer_tree_cache_delete (ob->writer_cache);
101 obstack_free (&ob->obstack, NULL);
103 free (ob);
107 /* Look up NODE in the type table and write the index for it to OB. */
109 static void
110 output_type_ref (struct output_block *ob, tree node)
112 streamer_write_record_start (ob, LTO_type_ref);
113 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
117 /* Return true if tree node T is written to various tables. For these
118 nodes, we sometimes want to write their phyiscal representation
119 (via lto_output_tree), and sometimes we need to emit an index
120 reference into a table (via lto_output_tree_ref). */
122 static bool
123 tree_is_indexable (tree t)
125 /* Parameters and return values of functions of variably modified types
126 must go to global stream, because they may be used in the type
127 definition. */
128 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
129 && DECL_CONTEXT (t))
130 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
131 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
132 else if (TREE_CODE (t) == IMPORTED_DECL)
133 return false;
134 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
135 || TREE_CODE (t) == TYPE_DECL
136 || TREE_CODE (t) == CONST_DECL
137 || TREE_CODE (t) == NAMELIST_DECL)
138 && decl_function_context (t))
139 return false;
140 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
141 return false;
142 /* Variably modified types need to be streamed alongside function
143 bodies because they can refer to local entities. Together with
144 them we have to localize their members as well.
145 ??? In theory that includes non-FIELD_DECLs as well. */
146 else if (TYPE_P (t)
147 && variably_modified_type_p (t, NULL_TREE))
148 return false;
149 else if (TREE_CODE (t) == FIELD_DECL
150 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
151 return false;
152 else
153 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
157 /* Output info about new location into bitpack BP.
158 After outputting bitpack, lto_output_location_data has
159 to be done to output actual data. */
161 void
162 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
163 location_t loc)
165 expanded_location xloc;
167 loc = LOCATION_LOCUS (loc);
168 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
169 loc < RESERVED_LOCATION_COUNT
170 ? loc : RESERVED_LOCATION_COUNT);
171 if (loc < RESERVED_LOCATION_COUNT)
172 return;
174 xloc = expand_location (loc);
176 bp_pack_value (bp, ob->current_file != xloc.file, 1);
177 bp_pack_value (bp, ob->current_line != xloc.line, 1);
178 bp_pack_value (bp, ob->current_col != xloc.column, 1);
180 if (ob->current_file != xloc.file)
182 bp_pack_string (ob, bp, xloc.file, true);
183 bp_pack_value (bp, xloc.sysp, 1);
185 ob->current_file = xloc.file;
186 ob->current_sysp = xloc.sysp;
188 if (ob->current_line != xloc.line)
189 bp_pack_var_len_unsigned (bp, xloc.line);
190 ob->current_line = xloc.line;
192 if (ob->current_col != xloc.column)
193 bp_pack_var_len_unsigned (bp, xloc.column);
194 ob->current_col = xloc.column;
198 /* If EXPR is an indexable tree node, output a reference to it to
199 output block OB. Otherwise, output the physical representation of
200 EXPR to OB. */
202 static void
203 lto_output_tree_ref (struct output_block *ob, tree expr)
205 enum tree_code code;
207 if (TYPE_P (expr))
209 output_type_ref (ob, expr);
210 return;
213 code = TREE_CODE (expr);
214 switch (code)
216 case SSA_NAME:
217 streamer_write_record_start (ob, LTO_ssa_name_ref);
218 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
219 break;
221 case FIELD_DECL:
222 streamer_write_record_start (ob, LTO_field_decl_ref);
223 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
224 break;
226 case FUNCTION_DECL:
227 streamer_write_record_start (ob, LTO_function_decl_ref);
228 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
229 break;
231 case VAR_DECL:
232 case DEBUG_EXPR_DECL:
233 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
234 case PARM_DECL:
235 streamer_write_record_start (ob, LTO_global_decl_ref);
236 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
237 break;
239 case CONST_DECL:
240 streamer_write_record_start (ob, LTO_const_decl_ref);
241 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
242 break;
244 case IMPORTED_DECL:
245 gcc_assert (decl_function_context (expr) == NULL);
246 streamer_write_record_start (ob, LTO_imported_decl_ref);
247 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
248 break;
250 case TYPE_DECL:
251 streamer_write_record_start (ob, LTO_type_decl_ref);
252 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
253 break;
255 case NAMELIST_DECL:
256 streamer_write_record_start (ob, LTO_namelist_decl_ref);
257 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
258 break;
260 case NAMESPACE_DECL:
261 streamer_write_record_start (ob, LTO_namespace_decl_ref);
262 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
263 break;
265 case LABEL_DECL:
266 streamer_write_record_start (ob, LTO_label_decl_ref);
267 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
268 break;
270 case RESULT_DECL:
271 streamer_write_record_start (ob, LTO_result_decl_ref);
272 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
273 break;
275 case TRANSLATION_UNIT_DECL:
276 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
277 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
278 break;
280 default:
281 /* No other node is indexable, so it should have been handled by
282 lto_output_tree. */
283 gcc_unreachable ();
288 /* Return true if EXPR is a tree node that can be written to disk. */
290 static inline bool
291 lto_is_streamable (tree expr)
293 enum tree_code code = TREE_CODE (expr);
295 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
296 name version in lto_output_tree_ref (see output_ssa_names). */
297 return !is_lang_specific (expr)
298 && code != SSA_NAME
299 && code != CALL_EXPR
300 && code != LANG_TYPE
301 && code != MODIFY_EXPR
302 && code != INIT_EXPR
303 && code != TARGET_EXPR
304 && code != BIND_EXPR
305 && code != WITH_CLEANUP_EXPR
306 && code != STATEMENT_LIST
307 && (code == CASE_LABEL_EXPR
308 || code == DECL_EXPR
309 || TREE_CODE_CLASS (code) != tcc_statement);
313 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
315 static tree
316 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
318 gcc_checking_assert (DECL_P (expr)
319 && TREE_CODE (expr) != FUNCTION_DECL
320 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
322 /* Handle DECL_INITIAL for symbols. */
323 tree initial = DECL_INITIAL (expr);
324 if (TREE_CODE (expr) == VAR_DECL
325 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
326 && !DECL_IN_CONSTANT_POOL (expr)
327 && initial)
329 varpool_node *vnode;
330 /* Extra section needs about 30 bytes; do not produce it for simple
331 scalar values. */
332 if (TREE_CODE (DECL_INITIAL (expr)) == CONSTRUCTOR
333 || !(vnode = varpool_node::get (expr))
334 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
335 initial = error_mark_node;
338 return initial;
342 /* Write a physical representation of tree node EXPR to output block
343 OB. If REF_P is true, the leaves of EXPR are emitted as references
344 via lto_output_tree_ref. IX is the index into the streamer cache
345 where EXPR is stored. */
347 static void
348 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
350 /* Pack all the non-pointer fields in EXPR into a bitpack and write
351 the resulting bitpack. */
352 streamer_write_tree_bitfields (ob, expr);
354 /* Write all the pointer fields in EXPR. */
355 streamer_write_tree_body (ob, expr, ref_p);
357 /* Write any LTO-specific data to OB. */
358 if (DECL_P (expr)
359 && TREE_CODE (expr) != FUNCTION_DECL
360 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
362 /* Handle DECL_INITIAL for symbols. */
363 tree initial = get_symbol_initial_value
364 (ob->decl_state->symtab_node_encoder, expr);
365 stream_write_tree (ob, initial, ref_p);
369 /* Write a physical representation of tree node EXPR to output block
370 OB. If REF_P is true, the leaves of EXPR are emitted as references
371 via lto_output_tree_ref. IX is the index into the streamer cache
372 where EXPR is stored. */
374 static void
375 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
377 if (!lto_is_streamable (expr))
378 internal_error ("tree code %qs is not supported in LTO streams",
379 get_tree_code_name (TREE_CODE (expr)));
381 /* Write the header, containing everything needed to materialize
382 EXPR on the reading side. */
383 streamer_write_tree_header (ob, expr);
385 lto_write_tree_1 (ob, expr, ref_p);
387 /* Mark the end of EXPR. */
388 streamer_write_zero (ob);
391 /* Emit the physical representation of tree node EXPR to output block OB,
392 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
393 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
395 static void
396 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
397 bool ref_p, bool this_ref_p)
399 unsigned ix;
401 gcc_checking_assert (expr != NULL_TREE
402 && !(this_ref_p && tree_is_indexable (expr)));
404 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
405 expr, hash, &ix);
406 gcc_assert (!exists_p);
407 if (streamer_handle_as_builtin_p (expr))
409 /* MD and NORMAL builtins do not need to be written out
410 completely as they are always instantiated by the
411 compiler on startup. The only builtins that need to
412 be written out are BUILT_IN_FRONTEND. For all other
413 builtins, we simply write the class and code. */
414 streamer_write_builtin (ob, expr);
416 else if (TREE_CODE (expr) == INTEGER_CST
417 && !TREE_OVERFLOW (expr))
419 /* Shared INTEGER_CST nodes are special because they need their
420 original type to be materialized by the reader (to implement
421 TYPE_CACHED_VALUES). */
422 streamer_write_integer_cst (ob, expr, ref_p);
424 else
426 /* This is the first time we see EXPR, write its fields
427 to OB. */
428 lto_write_tree (ob, expr, ref_p);
432 class DFS
434 public:
435 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
436 bool single_p);
437 ~DFS ();
439 struct scc_entry
441 tree t;
442 hashval_t hash;
444 vec<scc_entry> sccstack;
446 private:
447 struct sccs
449 unsigned int dfsnum;
450 unsigned int low;
452 struct worklist
454 tree expr;
455 sccs *from_state;
456 sccs *cstate;
457 bool ref_p;
458 bool this_ref_p;
461 static int scc_entry_compare (const void *, const void *);
463 void DFS_write_tree_body (struct output_block *ob,
464 tree expr, sccs *expr_state, bool ref_p);
466 void DFS_write_tree (struct output_block *ob, sccs *from_state,
467 tree expr, bool ref_p, bool this_ref_p);
469 hashval_t
470 hash_scc (struct output_block *ob, unsigned first, unsigned size,
471 bool ref_p, bool this_ref_p);
473 hash_map<tree, sccs *> sccstate;
474 vec<worklist> worklist_vec;
475 struct obstack sccstate_obstack;
478 /* Emit the physical representation of tree node EXPR to output block OB,
479 using depth-first search on the subgraph. If THIS_REF_P is true, the
480 leaves of EXPR are emitted as references via lto_output_tree_ref.
481 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
482 this is for a rewalk of a single leaf SCC. */
484 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
485 bool single_p)
487 unsigned int next_dfs_num = 1;
488 sccstack.create (0);
489 gcc_obstack_init (&sccstate_obstack);
490 worklist_vec = vNULL;
491 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
492 while (!worklist_vec.is_empty ())
494 worklist &w = worklist_vec.last ();
495 expr = w.expr;
496 sccs *from_state = w.from_state;
497 sccs *cstate = w.cstate;
498 ref_p = w.ref_p;
499 this_ref_p = w.this_ref_p;
500 if (cstate == NULL)
502 sccs **slot = &sccstate.get_or_insert (expr);
503 cstate = *slot;
504 if (cstate)
506 gcc_checking_assert (from_state);
507 if (cstate->dfsnum < from_state->dfsnum)
508 from_state->low = MIN (cstate->dfsnum, from_state->low);
509 worklist_vec.pop ();
510 continue;
513 scc_entry e = { expr, 0 };
514 /* Not yet visited. DFS recurse and push it onto the stack. */
515 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
516 sccstack.safe_push (e);
517 cstate->dfsnum = next_dfs_num++;
518 cstate->low = cstate->dfsnum;
519 w.cstate = cstate;
521 if (streamer_handle_as_builtin_p (expr))
523 else if (TREE_CODE (expr) == INTEGER_CST
524 && !TREE_OVERFLOW (expr))
525 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
526 else
528 DFS_write_tree_body (ob, expr, cstate, ref_p);
530 /* Walk any LTO-specific edges. */
531 if (DECL_P (expr)
532 && TREE_CODE (expr) != FUNCTION_DECL
533 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
535 /* Handle DECL_INITIAL for symbols. */
536 tree initial
537 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
538 expr);
539 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
542 continue;
545 /* See if we found an SCC. */
546 if (cstate->low == cstate->dfsnum)
548 unsigned first, size;
549 tree x;
551 /* If we are re-walking a single leaf SCC just pop it,
552 let earlier worklist item access the sccstack. */
553 if (single_p)
555 worklist_vec.pop ();
556 continue;
559 /* Pop the SCC and compute its size. */
560 first = sccstack.length ();
563 x = sccstack[--first].t;
565 while (x != expr);
566 size = sccstack.length () - first;
568 /* No need to compute hashes for LTRANS units, we don't perform
569 any merging there. */
570 hashval_t scc_hash = 0;
571 unsigned scc_entry_len = 0;
572 if (!flag_wpa)
574 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
576 /* Put the entries with the least number of collisions first. */
577 unsigned entry_start = 0;
578 scc_entry_len = size + 1;
579 for (unsigned i = 0; i < size;)
581 unsigned from = i;
582 for (i = i + 1; i < size
583 && (sccstack[first + i].hash
584 == sccstack[first + from].hash); ++i)
586 if (i - from < scc_entry_len)
588 scc_entry_len = i - from;
589 entry_start = from;
592 for (unsigned i = 0; i < scc_entry_len; ++i)
593 std::swap (sccstack[first + i],
594 sccstack[first + entry_start + i]);
596 /* We already sorted SCC deterministically in hash_scc. */
598 /* Check that we have only one SCC.
599 Naturally we may have conflicts if hash function is not
600 strong enough. Lets see how far this gets. */
601 gcc_checking_assert (scc_entry_len == 1);
604 /* Write LTO_tree_scc. */
605 streamer_write_record_start (ob, LTO_tree_scc);
606 streamer_write_uhwi (ob, size);
607 streamer_write_uhwi (ob, scc_hash);
609 /* Write size-1 SCCs without wrapping them inside SCC bundles.
610 All INTEGER_CSTs need to be handled this way as we need
611 their type to materialize them. Also builtins are handled
612 this way.
613 ??? We still wrap these in LTO_tree_scc so at the
614 input side we can properly identify the tree we want
615 to ultimatively return. */
616 if (size == 1)
617 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
618 else
620 /* Write the size of the SCC entry candidates. */
621 streamer_write_uhwi (ob, scc_entry_len);
623 /* Write all headers and populate the streamer cache. */
624 for (unsigned i = 0; i < size; ++i)
626 hashval_t hash = sccstack[first+i].hash;
627 tree t = sccstack[first+i].t;
628 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
629 t, hash, NULL);
630 gcc_assert (!exists_p);
632 if (!lto_is_streamable (t))
633 internal_error ("tree code %qs is not supported "
634 "in LTO streams",
635 get_tree_code_name (TREE_CODE (t)));
637 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
639 /* Write the header, containing everything needed to
640 materialize EXPR on the reading side. */
641 streamer_write_tree_header (ob, t);
644 /* Write the bitpacks and tree references. */
645 for (unsigned i = 0; i < size; ++i)
647 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
649 /* Mark the end of the tree. */
650 streamer_write_zero (ob);
654 /* Finally truncate the vector. */
655 sccstack.truncate (first);
657 if (from_state)
658 from_state->low = MIN (from_state->low, cstate->low);
659 worklist_vec.pop ();
660 continue;
663 gcc_checking_assert (from_state);
664 from_state->low = MIN (from_state->low, cstate->low);
665 if (cstate->dfsnum < from_state->dfsnum)
666 from_state->low = MIN (cstate->dfsnum, from_state->low);
667 worklist_vec.pop ();
669 worklist_vec.release ();
672 DFS::~DFS ()
674 sccstack.release ();
675 obstack_free (&sccstate_obstack, NULL);
678 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
679 DFS recurse for all tree edges originating from it. */
681 void
682 DFS::DFS_write_tree_body (struct output_block *ob,
683 tree expr, sccs *expr_state, bool ref_p)
685 #define DFS_follow_tree_edge(DEST) \
686 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
688 enum tree_code code;
690 code = TREE_CODE (expr);
692 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
694 if (TREE_CODE (expr) != IDENTIFIER_NODE)
695 DFS_follow_tree_edge (TREE_TYPE (expr));
698 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
700 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
701 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
704 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
706 DFS_follow_tree_edge (TREE_REALPART (expr));
707 DFS_follow_tree_edge (TREE_IMAGPART (expr));
710 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
712 /* Drop names that were created for anonymous entities. */
713 if (DECL_NAME (expr)
714 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
715 && anon_aggrname_p (DECL_NAME (expr)))
717 else
718 DFS_follow_tree_edge (DECL_NAME (expr));
719 DFS_follow_tree_edge (DECL_CONTEXT (expr));
722 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
724 DFS_follow_tree_edge (DECL_SIZE (expr));
725 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
727 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
728 special handling in LTO, it must be handled by streamer hooks. */
730 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
732 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
733 for early inlining so drop it on the floor instead of ICEing in
734 dwarf2out.c. */
736 if ((TREE_CODE (expr) == VAR_DECL
737 || TREE_CODE (expr) == PARM_DECL)
738 && DECL_HAS_VALUE_EXPR_P (expr))
739 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
740 if (TREE_CODE (expr) == VAR_DECL)
741 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
744 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
746 if (TREE_CODE (expr) == TYPE_DECL)
747 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
750 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
752 /* Make sure we don't inadvertently set the assembler name. */
753 if (DECL_ASSEMBLER_NAME_SET_P (expr))
754 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
757 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
759 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
760 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
761 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
762 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
763 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
766 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
768 DFS_follow_tree_edge (DECL_VINDEX (expr));
769 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
770 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
771 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
774 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
776 DFS_follow_tree_edge (TYPE_SIZE (expr));
777 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
778 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
779 DFS_follow_tree_edge (TYPE_NAME (expr));
780 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
781 reconstructed during fixup. */
782 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
783 during fixup. */
784 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
785 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
786 /* TYPE_CANONICAL is re-computed during type merging, so no need
787 to follow it here. */
788 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
791 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
793 if (TREE_CODE (expr) == ENUMERAL_TYPE)
794 DFS_follow_tree_edge (TYPE_VALUES (expr));
795 else if (TREE_CODE (expr) == ARRAY_TYPE)
796 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
797 else if (RECORD_OR_UNION_TYPE_P (expr))
798 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
799 DFS_follow_tree_edge (t);
800 else if (TREE_CODE (expr) == FUNCTION_TYPE
801 || TREE_CODE (expr) == METHOD_TYPE)
802 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
804 if (!POINTER_TYPE_P (expr))
805 DFS_follow_tree_edge (TYPE_MINVAL (expr));
806 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
807 if (RECORD_OR_UNION_TYPE_P (expr))
808 DFS_follow_tree_edge (TYPE_BINFO (expr));
811 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
813 DFS_follow_tree_edge (TREE_PURPOSE (expr));
814 DFS_follow_tree_edge (TREE_VALUE (expr));
815 DFS_follow_tree_edge (TREE_CHAIN (expr));
818 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
820 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
821 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
824 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
826 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
827 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
828 DFS_follow_tree_edge (TREE_BLOCK (expr));
831 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
833 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
834 if (VAR_OR_FUNCTION_DECL_P (t)
835 && DECL_EXTERNAL (t))
836 /* We have to stream externals in the block chain as
837 non-references. See also
838 tree-streamer-out.c:streamer_write_chain. */
839 DFS_write_tree (ob, expr_state, t, ref_p, false);
840 else
841 DFS_follow_tree_edge (t);
843 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
845 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
846 handle - those that represent inlined function scopes.
847 For the drop rest them on the floor instead of ICEing
848 in dwarf2out.c. */
849 if (inlined_function_outer_scope_p (expr))
851 tree ultimate_origin = block_ultimate_origin (expr);
852 DFS_follow_tree_edge (ultimate_origin);
854 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
855 information for early inlined BLOCKs so drop it on the floor instead
856 of ICEing in dwarf2out.c. */
858 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
859 streaming time. */
861 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
862 list is re-constructed from BLOCK_SUPERCONTEXT. */
865 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
867 unsigned i;
868 tree t;
870 /* Note that the number of BINFO slots has already been emitted in
871 EXPR's header (see streamer_write_tree_header) because this length
872 is needed to build the empty BINFO node on the reader side. */
873 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
874 DFS_follow_tree_edge (t);
875 DFS_follow_tree_edge (BINFO_OFFSET (expr));
876 DFS_follow_tree_edge (BINFO_VTABLE (expr));
877 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
879 /* The number of BINFO_BASE_ACCESSES has already been emitted in
880 EXPR's bitfield section. */
881 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
882 DFS_follow_tree_edge (t);
884 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
885 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
888 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
890 unsigned i;
891 tree index, value;
893 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
895 DFS_follow_tree_edge (index);
896 DFS_follow_tree_edge (value);
900 if (code == OMP_CLAUSE)
902 int i;
903 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
904 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
905 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
908 #undef DFS_follow_tree_edge
911 /* Return a hash value for the tree T.
912 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
913 may hold hash values if trees inside current SCC. */
915 static hashval_t
916 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
918 inchash::hash hstate;
920 #define visit(SIBLING) \
921 do { \
922 unsigned ix; \
923 if (!SIBLING) \
924 hstate.add_int (0); \
925 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
926 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
927 else if (map) \
928 hstate.add_int (*map->get (SIBLING)); \
929 else \
930 hstate.add_int (1); \
931 } while (0)
933 /* Hash TS_BASE. */
934 enum tree_code code = TREE_CODE (t);
935 hstate.add_int (code);
936 if (!TYPE_P (t))
938 hstate.add_flag (TREE_SIDE_EFFECTS (t));
939 hstate.add_flag (TREE_CONSTANT (t));
940 hstate.add_flag (TREE_READONLY (t));
941 hstate.add_flag (TREE_PUBLIC (t));
943 hstate.add_flag (TREE_ADDRESSABLE (t));
944 hstate.add_flag (TREE_THIS_VOLATILE (t));
945 if (DECL_P (t))
946 hstate.add_flag (DECL_UNSIGNED (t));
947 else if (TYPE_P (t))
948 hstate.add_flag (TYPE_UNSIGNED (t));
949 if (TYPE_P (t))
950 hstate.add_flag (TYPE_ARTIFICIAL (t));
951 else
952 hstate.add_flag (TREE_NO_WARNING (t));
953 hstate.add_flag (TREE_NOTHROW (t));
954 hstate.add_flag (TREE_STATIC (t));
955 hstate.add_flag (TREE_PROTECTED (t));
956 hstate.add_flag (TREE_DEPRECATED (t));
957 if (code != TREE_BINFO)
958 hstate.add_flag (TREE_PRIVATE (t));
959 if (TYPE_P (t))
961 hstate.add_flag (AGGREGATE_TYPE_P (t)
962 ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
963 hstate.add_flag (TYPE_ADDR_SPACE (t));
965 else if (code == SSA_NAME)
966 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
967 hstate.commit_flag ();
969 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
971 int i;
972 hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
973 hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
974 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
975 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
978 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
980 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
981 hstate.add_flag (r.cl);
982 hstate.add_flag (r.sign);
983 hstate.add_flag (r.signalling);
984 hstate.add_flag (r.canonical);
985 hstate.commit_flag ();
986 hstate.add_int (r.uexp);
987 hstate.add (r.sig, sizeof (r.sig));
990 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
992 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
993 hstate.add_int (f.mode);
994 hstate.add_int (f.data.low);
995 hstate.add_int (f.data.high);
998 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1000 hstate.add_wide_int (DECL_MODE (t));
1001 hstate.add_flag (DECL_NONLOCAL (t));
1002 hstate.add_flag (DECL_VIRTUAL_P (t));
1003 hstate.add_flag (DECL_IGNORED_P (t));
1004 hstate.add_flag (DECL_ABSTRACT_P (t));
1005 hstate.add_flag (DECL_ARTIFICIAL (t));
1006 hstate.add_flag (DECL_USER_ALIGN (t));
1007 hstate.add_flag (DECL_PRESERVE_P (t));
1008 hstate.add_flag (DECL_EXTERNAL (t));
1009 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1010 hstate.commit_flag ();
1011 hstate.add_int (DECL_ALIGN (t));
1012 if (code == LABEL_DECL)
1014 hstate.add_int (EH_LANDING_PAD_NR (t));
1015 hstate.add_int (LABEL_DECL_UID (t));
1017 else if (code == FIELD_DECL)
1019 hstate.add_flag (DECL_PACKED (t));
1020 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1021 hstate.add_int (DECL_OFFSET_ALIGN (t));
1023 else if (code == VAR_DECL)
1025 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1026 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1028 if (code == RESULT_DECL
1029 || code == PARM_DECL
1030 || code == VAR_DECL)
1032 hstate.add_flag (DECL_BY_REFERENCE (t));
1033 if (code == VAR_DECL
1034 || code == PARM_DECL)
1035 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1037 hstate.commit_flag ();
1040 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1041 hstate.add_int (DECL_REGISTER (t));
1043 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1045 hstate.add_flag (DECL_COMMON (t));
1046 hstate.add_flag (DECL_DLLIMPORT_P (t));
1047 hstate.add_flag (DECL_WEAK (t));
1048 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1049 hstate.add_flag (DECL_COMDAT (t));
1050 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1051 hstate.add_int (DECL_VISIBILITY (t));
1052 if (code == VAR_DECL)
1054 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1055 hstate.add_flag (DECL_HARD_REGISTER (t));
1056 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1058 if (TREE_CODE (t) == FUNCTION_DECL)
1060 hstate.add_flag (DECL_FINAL_P (t));
1061 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1062 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1064 hstate.commit_flag ();
1067 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1069 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1070 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1071 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1072 hstate.add_flag (DECL_UNINLINABLE (t));
1073 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1074 hstate.add_flag (DECL_IS_NOVOPS (t));
1075 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1076 hstate.add_flag (DECL_IS_MALLOC (t));
1077 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1078 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1079 hstate.add_flag (DECL_STATIC_CHAIN (t));
1080 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1081 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1082 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1083 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1084 hstate.add_flag (DECL_PURE_P (t));
1085 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1086 hstate.commit_flag ();
1087 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1088 hstate.add_int (DECL_FUNCTION_CODE (t));
1091 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1093 hstate.add_wide_int (TYPE_MODE (t));
1094 hstate.add_flag (TYPE_STRING_FLAG (t));
1095 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1096 no streaming. */
1097 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
1098 hstate.add_flag (TYPE_PACKED (t));
1099 hstate.add_flag (TYPE_RESTRICT (t));
1100 hstate.add_flag (TYPE_USER_ALIGN (t));
1101 hstate.add_flag (TYPE_READONLY (t));
1102 if (RECORD_OR_UNION_TYPE_P (t))
1104 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1105 hstate.add_flag (TYPE_FINAL_P (t));
1107 else if (code == ARRAY_TYPE)
1108 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1109 hstate.commit_flag ();
1110 hstate.add_int (TYPE_PRECISION (t));
1111 hstate.add_int (TYPE_ALIGN (t));
1114 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1115 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1116 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1118 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1119 /* We don't stream these when passing things to a different target. */
1120 && !lto_stream_offload_p)
1121 hstate.add_wide_int (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1123 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1124 hstate.add_wide_int (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1126 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1127 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1129 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1130 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1132 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1134 if (code != IDENTIFIER_NODE)
1135 visit (TREE_TYPE (t));
1138 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1139 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
1140 visit (VECTOR_CST_ELT (t, i));
1142 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1144 visit (TREE_REALPART (t));
1145 visit (TREE_IMAGPART (t));
1148 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1150 /* Drop names that were created for anonymous entities. */
1151 if (DECL_NAME (t)
1152 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1153 && anon_aggrname_p (DECL_NAME (t)))
1155 else
1156 visit (DECL_NAME (t));
1157 if (DECL_FILE_SCOPE_P (t))
1159 else
1160 visit (DECL_CONTEXT (t));
1163 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1165 visit (DECL_SIZE (t));
1166 visit (DECL_SIZE_UNIT (t));
1167 visit (DECL_ATTRIBUTES (t));
1168 if ((code == VAR_DECL
1169 || code == PARM_DECL)
1170 && DECL_HAS_VALUE_EXPR_P (t))
1171 visit (DECL_VALUE_EXPR (t));
1172 if (code == VAR_DECL
1173 && DECL_HAS_DEBUG_EXPR_P (t))
1174 visit (DECL_DEBUG_EXPR (t));
1175 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1176 be able to call get_symbol_initial_value. */
1179 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1181 if (code == TYPE_DECL)
1182 visit (DECL_ORIGINAL_TYPE (t));
1185 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1187 if (DECL_ASSEMBLER_NAME_SET_P (t))
1188 visit (DECL_ASSEMBLER_NAME (t));
1191 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1193 visit (DECL_FIELD_OFFSET (t));
1194 visit (DECL_BIT_FIELD_TYPE (t));
1195 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1196 visit (DECL_FIELD_BIT_OFFSET (t));
1197 visit (DECL_FCONTEXT (t));
1200 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1202 visit (DECL_VINDEX (t));
1203 visit (DECL_FUNCTION_PERSONALITY (t));
1204 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1205 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1208 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1210 visit (TYPE_SIZE (t));
1211 visit (TYPE_SIZE_UNIT (t));
1212 visit (TYPE_ATTRIBUTES (t));
1213 visit (TYPE_NAME (t));
1214 visit (TYPE_MAIN_VARIANT (t));
1215 if (TYPE_FILE_SCOPE_P (t))
1217 else
1218 visit (TYPE_CONTEXT (t));
1219 visit (TYPE_STUB_DECL (t));
1222 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1224 if (code == ENUMERAL_TYPE)
1225 visit (TYPE_VALUES (t));
1226 else if (code == ARRAY_TYPE)
1227 visit (TYPE_DOMAIN (t));
1228 else if (RECORD_OR_UNION_TYPE_P (t))
1229 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1230 visit (f);
1231 else if (code == FUNCTION_TYPE
1232 || code == METHOD_TYPE)
1233 visit (TYPE_ARG_TYPES (t));
1234 if (!POINTER_TYPE_P (t))
1235 visit (TYPE_MINVAL (t));
1236 visit (TYPE_MAXVAL (t));
1237 if (RECORD_OR_UNION_TYPE_P (t))
1238 visit (TYPE_BINFO (t));
1241 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1243 visit (TREE_PURPOSE (t));
1244 visit (TREE_VALUE (t));
1245 visit (TREE_CHAIN (t));
1248 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1249 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1250 visit (TREE_VEC_ELT (t, i));
1252 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1254 hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
1255 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1256 visit (TREE_OPERAND (t, i));
1259 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1261 unsigned i;
1262 tree b;
1263 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1264 visit (b);
1265 visit (BINFO_OFFSET (t));
1266 visit (BINFO_VTABLE (t));
1267 visit (BINFO_VPTR_FIELD (t));
1268 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1269 visit (b);
1270 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1271 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1274 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1276 unsigned i;
1277 tree index, value;
1278 hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
1279 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1281 visit (index);
1282 visit (value);
1286 if (code == OMP_CLAUSE)
1288 int i;
1289 HOST_WIDE_INT val;
1291 hstate.add_wide_int (OMP_CLAUSE_CODE (t));
1292 switch (OMP_CLAUSE_CODE (t))
1294 case OMP_CLAUSE_DEFAULT:
1295 val = OMP_CLAUSE_DEFAULT_KIND (t);
1296 break;
1297 case OMP_CLAUSE_SCHEDULE:
1298 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1299 break;
1300 case OMP_CLAUSE_DEPEND:
1301 val = OMP_CLAUSE_DEPEND_KIND (t);
1302 break;
1303 case OMP_CLAUSE_MAP:
1304 val = OMP_CLAUSE_MAP_KIND (t);
1305 break;
1306 case OMP_CLAUSE_PROC_BIND:
1307 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1308 break;
1309 case OMP_CLAUSE_REDUCTION:
1310 val = OMP_CLAUSE_REDUCTION_CODE (t);
1311 break;
1312 default:
1313 val = 0;
1314 break;
1316 hstate.add_wide_int (val);
1317 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1318 visit (OMP_CLAUSE_OPERAND (t, i));
1319 visit (OMP_CLAUSE_CHAIN (t));
1322 return hstate.end ();
1324 #undef visit
1327 /* Compare two SCC entries by their hash value for qsorting them. */
1330 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1332 const scc_entry *p1 = (const scc_entry *) p1_;
1333 const scc_entry *p2 = (const scc_entry *) p2_;
1334 if (p1->hash < p2->hash)
1335 return -1;
1336 else if (p1->hash > p2->hash)
1337 return 1;
1338 return 0;
1341 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1342 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1344 hashval_t
1345 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1346 bool ref_p, bool this_ref_p)
1348 unsigned int last_classes = 0, iterations = 0;
1350 /* Compute hash values for the SCC members. */
1351 for (unsigned i = 0; i < size; ++i)
1352 sccstack[first+i].hash
1353 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1355 if (size == 1)
1356 return sccstack[first].hash;
1358 /* We aim to get unique hash for every tree within SCC and compute hash value
1359 of the whole SCC by combining all values together in a stable (entry-point
1360 independent) order. This guarantees that the same SCC regions within
1361 different translation units will get the same hash values and therefore
1362 will be merged at WPA time.
1364 Often the hashes are already unique. In that case we compute the SCC hash
1365 by combining individual hash values in an increasing order.
1367 If there are duplicates, we seek at least one tree with unique hash (and
1368 pick one with minimal hash and this property). Then we obtain a stable
1369 order by DFS walk starting from this unique tree and then use the index
1370 within this order to make individual hash values unique.
1372 If there is no tree with unique hash, we iteratively propagate the hash
1373 values across the internal edges of SCC. This usually quickly leads
1374 to unique hashes. Consider, for example, an SCC containing two pointers
1375 that are identical except for the types they point to and assume that
1376 these types are also part of the SCC. The propagation will add the
1377 points-to type information into their hash values. */
1380 /* Sort the SCC so we can easily check for uniqueness. */
1381 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1383 unsigned int classes = 1;
1384 int firstunique = -1;
1386 /* Find the tree with lowest unique hash (if it exists) and compute
1387 the number of equivalence classes. */
1388 if (sccstack[first].hash != sccstack[first+1].hash)
1389 firstunique = 0;
1390 for (unsigned i = 1; i < size; ++i)
1391 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1393 classes++;
1394 if (firstunique == -1
1395 && (i == size - 1
1396 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1397 firstunique = i;
1400 /* If we found a tree with unique hash, stop the iteration. */
1401 if (firstunique != -1
1402 /* Also terminate if we run out of iterations or if the number of
1403 equivalence classes is no longer increasing.
1404 For example a cyclic list of trees that are all equivalent will
1405 never have unique entry point; we however do not build such SCCs
1406 in our IL. */
1407 || classes <= last_classes || iterations > 16)
1409 hashval_t scc_hash;
1411 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1412 starting from FIRSTUNIQUE to obtain a stable order. */
1413 if (classes != size && firstunique != -1)
1415 hash_map <tree, hashval_t> map(size*2);
1417 /* Store hash values into a map, so we can associate them with
1418 the reordered SCC. */
1419 for (unsigned i = 0; i < size; ++i)
1420 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1422 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1423 true);
1424 gcc_assert (again.sccstack.length () == size);
1426 memcpy (sccstack.address () + first,
1427 again.sccstack.address (),
1428 sizeof (scc_entry) * size);
1430 /* Update hash values of individual members by hashing in the
1431 index within the stable order. This ensures uniqueness.
1432 Also compute the SCC hash by mixing in all hash values in
1433 the stable order we obtained. */
1434 sccstack[first].hash = *map.get (sccstack[first].t);
1435 scc_hash = sccstack[first].hash;
1436 for (unsigned i = 1; i < size; ++i)
1438 sccstack[first+i].hash
1439 = iterative_hash_hashval_t (i,
1440 *map.get (sccstack[first+i].t));
1441 scc_hash
1442 = iterative_hash_hashval_t (scc_hash,
1443 sccstack[first+i].hash);
1446 /* If we got a unique hash value for each tree, then sort already
1447 ensured entry-point independent order. Only compute the final
1448 SCC hash.
1450 If we failed to find the unique entry point, we go by the same
1451 route. We will eventually introduce unwanted hash conflicts. */
1452 else
1454 scc_hash = sccstack[first].hash;
1455 for (unsigned i = 1; i < size; ++i)
1456 scc_hash
1457 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1459 /* We cannot 100% guarantee that the hash won't conflict so as
1460 to make it impossible to find a unique hash. This however
1461 should be an extremely rare case. ICE for now so possible
1462 issues are found and evaluated. */
1463 gcc_checking_assert (classes == size);
1466 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1467 hash into the hash of each element. */
1468 for (unsigned i = 0; i < size; ++i)
1469 sccstack[first+i].hash
1470 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1471 return scc_hash;
1474 last_classes = classes;
1475 iterations++;
1477 /* We failed to identify the entry point; propagate hash values across
1478 the edges. */
1479 hash_map <tree, hashval_t> map(size*2);
1481 for (unsigned i = 0; i < size; ++i)
1482 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1484 for (unsigned i = 0; i < size; i++)
1485 sccstack[first+i].hash
1486 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1488 while (true);
1491 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1492 already in the streamer cache. Main routine called for
1493 each visit of EXPR. */
1495 void
1496 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1497 tree expr, bool ref_p, bool this_ref_p)
1499 /* Handle special cases. */
1500 if (expr == NULL_TREE)
1501 return;
1503 /* Do not DFS walk into indexable trees. */
1504 if (this_ref_p && tree_is_indexable (expr))
1505 return;
1507 /* Check if we already streamed EXPR. */
1508 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1509 return;
1511 worklist w;
1512 w.expr = expr;
1513 w.from_state = from_state;
1514 w.cstate = NULL;
1515 w.ref_p = ref_p;
1516 w.this_ref_p = this_ref_p;
1517 worklist_vec.safe_push (w);
1521 /* Emit the physical representation of tree node EXPR to output block OB.
1522 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1523 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1525 void
1526 lto_output_tree (struct output_block *ob, tree expr,
1527 bool ref_p, bool this_ref_p)
1529 unsigned ix;
1530 bool existed_p;
1532 if (expr == NULL_TREE)
1534 streamer_write_record_start (ob, LTO_null);
1535 return;
1538 if (this_ref_p && tree_is_indexable (expr))
1540 lto_output_tree_ref (ob, expr);
1541 return;
1544 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1545 if (existed_p)
1547 /* If a node has already been streamed out, make sure that
1548 we don't write it more than once. Otherwise, the reader
1549 will instantiate two different nodes for the same object. */
1550 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1551 streamer_write_uhwi (ob, ix);
1552 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1553 lto_tree_code_to_tag (TREE_CODE (expr)));
1554 lto_stats.num_pickle_refs_output++;
1556 else
1558 /* This is the first time we see EXPR, write all reachable
1559 trees to OB. */
1560 static bool in_dfs_walk;
1562 /* Protect against recursion which means disconnect between
1563 what tree edges we walk in the DFS walk and what edges
1564 we stream out. */
1565 gcc_assert (!in_dfs_walk);
1567 /* Start the DFS walk. */
1568 /* Save ob state ... */
1569 /* let's see ... */
1570 in_dfs_walk = true;
1571 DFS (ob, expr, ref_p, this_ref_p, false);
1572 in_dfs_walk = false;
1574 /* Finally append a reference to the tree we were writing.
1575 ??? If expr ended up as a singleton we could have
1576 inlined it here and avoid outputting a reference. */
1577 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1578 gcc_assert (existed_p);
1579 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1580 streamer_write_uhwi (ob, ix);
1581 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1582 lto_tree_code_to_tag (TREE_CODE (expr)));
1583 lto_stats.num_pickle_refs_output++;
1588 /* Output to OB a list of try/catch handlers starting with FIRST. */
1590 static void
1591 output_eh_try_list (struct output_block *ob, eh_catch first)
1593 eh_catch n;
1595 for (n = first; n; n = n->next_catch)
1597 streamer_write_record_start (ob, LTO_eh_catch);
1598 stream_write_tree (ob, n->type_list, true);
1599 stream_write_tree (ob, n->filter_list, true);
1600 stream_write_tree (ob, n->label, true);
1603 streamer_write_record_start (ob, LTO_null);
1607 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1608 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1609 detect EH region sharing. */
1611 static void
1612 output_eh_region (struct output_block *ob, eh_region r)
1614 enum LTO_tags tag;
1616 if (r == NULL)
1618 streamer_write_record_start (ob, LTO_null);
1619 return;
1622 if (r->type == ERT_CLEANUP)
1623 tag = LTO_ert_cleanup;
1624 else if (r->type == ERT_TRY)
1625 tag = LTO_ert_try;
1626 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1627 tag = LTO_ert_allowed_exceptions;
1628 else if (r->type == ERT_MUST_NOT_THROW)
1629 tag = LTO_ert_must_not_throw;
1630 else
1631 gcc_unreachable ();
1633 streamer_write_record_start (ob, tag);
1634 streamer_write_hwi (ob, r->index);
1636 if (r->outer)
1637 streamer_write_hwi (ob, r->outer->index);
1638 else
1639 streamer_write_zero (ob);
1641 if (r->inner)
1642 streamer_write_hwi (ob, r->inner->index);
1643 else
1644 streamer_write_zero (ob);
1646 if (r->next_peer)
1647 streamer_write_hwi (ob, r->next_peer->index);
1648 else
1649 streamer_write_zero (ob);
1651 if (r->type == ERT_TRY)
1653 output_eh_try_list (ob, r->u.eh_try.first_catch);
1655 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1657 stream_write_tree (ob, r->u.allowed.type_list, true);
1658 stream_write_tree (ob, r->u.allowed.label, true);
1659 streamer_write_uhwi (ob, r->u.allowed.filter);
1661 else if (r->type == ERT_MUST_NOT_THROW)
1663 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1664 bitpack_d bp = bitpack_create (ob->main_stream);
1665 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1666 streamer_write_bitpack (&bp);
1669 if (r->landing_pads)
1670 streamer_write_hwi (ob, r->landing_pads->index);
1671 else
1672 streamer_write_zero (ob);
1676 /* Output landing pad LP to OB. */
1678 static void
1679 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1681 if (lp == NULL)
1683 streamer_write_record_start (ob, LTO_null);
1684 return;
1687 streamer_write_record_start (ob, LTO_eh_landing_pad);
1688 streamer_write_hwi (ob, lp->index);
1689 if (lp->next_lp)
1690 streamer_write_hwi (ob, lp->next_lp->index);
1691 else
1692 streamer_write_zero (ob);
1694 if (lp->region)
1695 streamer_write_hwi (ob, lp->region->index);
1696 else
1697 streamer_write_zero (ob);
1699 stream_write_tree (ob, lp->post_landing_pad, true);
1703 /* Output the existing eh_table to OB. */
1705 static void
1706 output_eh_regions (struct output_block *ob, struct function *fn)
1708 if (fn->eh && fn->eh->region_tree)
1710 unsigned i;
1711 eh_region eh;
1712 eh_landing_pad lp;
1713 tree ttype;
1715 streamer_write_record_start (ob, LTO_eh_table);
1717 /* Emit the index of the root of the EH region tree. */
1718 streamer_write_hwi (ob, fn->eh->region_tree->index);
1720 /* Emit all the EH regions in the region array. */
1721 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1722 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1723 output_eh_region (ob, eh);
1725 /* Emit all landing pads. */
1726 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1727 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1728 output_eh_lp (ob, lp);
1730 /* Emit all the runtime type data. */
1731 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1732 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1733 stream_write_tree (ob, ttype, true);
1735 /* Emit the table of action chains. */
1736 if (targetm.arm_eabi_unwinder)
1738 tree t;
1739 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1740 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1741 stream_write_tree (ob, t, true);
1743 else
1745 uchar c;
1746 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1747 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1748 streamer_write_char_stream (ob->main_stream, c);
1752 /* The LTO_null either terminates the record or indicates that there
1753 are no eh_records at all. */
1754 streamer_write_record_start (ob, LTO_null);
1758 /* Output all of the active ssa names to the ssa_names stream. */
1760 static void
1761 output_ssa_names (struct output_block *ob, struct function *fn)
1763 unsigned int i, len;
1765 len = vec_safe_length (SSANAMES (fn));
1766 streamer_write_uhwi (ob, len);
1768 for (i = 1; i < len; i++)
1770 tree ptr = (*SSANAMES (fn))[i];
1772 if (ptr == NULL_TREE
1773 || SSA_NAME_IN_FREE_LIST (ptr)
1774 || virtual_operand_p (ptr))
1775 continue;
1777 streamer_write_uhwi (ob, i);
1778 streamer_write_char_stream (ob->main_stream,
1779 SSA_NAME_IS_DEFAULT_DEF (ptr));
1780 if (SSA_NAME_VAR (ptr))
1781 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1782 else
1783 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1784 stream_write_tree (ob, TREE_TYPE (ptr), true);
1787 streamer_write_zero (ob);
1791 /* Output a wide-int. */
1793 static void
1794 streamer_write_wi (struct output_block *ob,
1795 const widest_int &w)
1797 int len = w.get_len ();
1799 streamer_write_uhwi (ob, w.get_precision ());
1800 streamer_write_uhwi (ob, len);
1801 for (int i = 0; i < len; i++)
1802 streamer_write_hwi (ob, w.elt (i));
1806 /* Output the cfg. */
1808 static void
1809 output_cfg (struct output_block *ob, struct function *fn)
1811 struct lto_output_stream *tmp_stream = ob->main_stream;
1812 basic_block bb;
1814 ob->main_stream = ob->cfg_stream;
1816 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1817 profile_status_for_fn (fn));
1819 /* Output the number of the highest basic block. */
1820 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1822 FOR_ALL_BB_FN (bb, fn)
1824 edge_iterator ei;
1825 edge e;
1827 streamer_write_hwi (ob, bb->index);
1829 /* Output the successors and the edge flags. */
1830 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1831 FOR_EACH_EDGE (e, ei, bb->succs)
1833 streamer_write_uhwi (ob, e->dest->index);
1834 streamer_write_hwi (ob, e->probability);
1835 streamer_write_gcov_count (ob, e->count);
1836 streamer_write_uhwi (ob, e->flags);
1840 streamer_write_hwi (ob, -1);
1842 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1843 while (bb->next_bb)
1845 streamer_write_hwi (ob, bb->next_bb->index);
1846 bb = bb->next_bb;
1849 streamer_write_hwi (ob, -1);
1851 /* ??? The cfgloop interface is tied to cfun. */
1852 gcc_assert (cfun == fn);
1854 /* Output the number of loops. */
1855 streamer_write_uhwi (ob, number_of_loops (fn));
1857 /* Output each loop, skipping the tree root which has number zero. */
1858 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1860 struct loop *loop = get_loop (fn, i);
1862 /* Write the index of the loop header. That's enough to rebuild
1863 the loop tree on the reader side. Stream -1 for an unused
1864 loop entry. */
1865 if (!loop)
1867 streamer_write_hwi (ob, -1);
1868 continue;
1870 else
1871 streamer_write_hwi (ob, loop->header->index);
1873 /* Write everything copy_loop_info copies. */
1874 streamer_write_enum (ob->main_stream,
1875 loop_estimation, EST_LAST, loop->estimate_state);
1876 streamer_write_hwi (ob, loop->any_upper_bound);
1877 if (loop->any_upper_bound)
1878 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1879 streamer_write_hwi (ob, loop->any_estimate);
1880 if (loop->any_estimate)
1881 streamer_write_wi (ob, loop->nb_iterations_estimate);
1883 /* Write OMP SIMD related info. */
1884 streamer_write_hwi (ob, loop->safelen);
1885 streamer_write_hwi (ob, loop->dont_vectorize);
1886 streamer_write_hwi (ob, loop->force_vectorize);
1887 stream_write_tree (ob, loop->simduid, true);
1890 ob->main_stream = tmp_stream;
1894 /* Create the header in the file using OB. If the section type is for
1895 a function, set FN to the decl for that function. */
1897 void
1898 produce_asm (struct output_block *ob, tree fn)
1900 enum lto_section_type section_type = ob->section_type;
1901 struct lto_function_header header;
1902 char *section_name;
1904 if (section_type == LTO_section_function_body)
1906 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1907 section_name = lto_get_section_name (section_type, name, NULL);
1909 else
1910 section_name = lto_get_section_name (section_type, NULL, NULL);
1912 lto_begin_section (section_name, !flag_wpa);
1913 free (section_name);
1915 /* The entire header is stream computed here. */
1916 memset (&header, 0, sizeof (struct lto_function_header));
1918 /* Write the header. */
1919 header.major_version = LTO_major_version;
1920 header.minor_version = LTO_minor_version;
1922 if (section_type == LTO_section_function_body)
1923 header.cfg_size = ob->cfg_stream->total_size;
1924 header.main_size = ob->main_stream->total_size;
1925 header.string_size = ob->string_stream->total_size;
1926 lto_write_data (&header, sizeof header);
1928 /* Put all of the gimple and the string table out the asm file as a
1929 block of text. */
1930 if (section_type == LTO_section_function_body)
1931 lto_write_stream (ob->cfg_stream);
1932 lto_write_stream (ob->main_stream);
1933 lto_write_stream (ob->string_stream);
1935 lto_end_section ();
1939 /* Output the base body of struct function FN using output block OB. */
1941 static void
1942 output_struct_function_base (struct output_block *ob, struct function *fn)
1944 struct bitpack_d bp;
1945 unsigned i;
1946 tree t;
1948 /* Output the static chain and non-local goto save area. */
1949 stream_write_tree (ob, fn->static_chain_decl, true);
1950 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1952 /* Output all the local variables in the function. */
1953 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1954 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1955 stream_write_tree (ob, t, true);
1957 /* Output current IL state of the function. */
1958 streamer_write_uhwi (ob, fn->curr_properties);
1960 /* Write all the attributes for FN. */
1961 bp = bitpack_create (ob->main_stream);
1962 bp_pack_value (&bp, fn->is_thunk, 1);
1963 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1964 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1965 bp_pack_value (&bp, fn->returns_struct, 1);
1966 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1967 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1968 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1969 bp_pack_value (&bp, fn->after_inlining, 1);
1970 bp_pack_value (&bp, fn->stdarg, 1);
1971 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1972 bp_pack_value (&bp, fn->calls_alloca, 1);
1973 bp_pack_value (&bp, fn->calls_setjmp, 1);
1974 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
1975 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1976 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1977 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1978 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
1980 /* Output the function start and end loci. */
1981 stream_output_location (ob, &bp, fn->function_start_locus);
1982 stream_output_location (ob, &bp, fn->function_end_locus);
1984 streamer_write_bitpack (&bp);
1988 /* Output the body of function NODE->DECL. */
1990 static void
1991 output_function (struct cgraph_node *node)
1993 tree function;
1994 struct function *fn;
1995 basic_block bb;
1996 struct output_block *ob;
1998 function = node->decl;
1999 fn = DECL_STRUCT_FUNCTION (function);
2000 ob = create_output_block (LTO_section_function_body);
2002 clear_line_info (ob);
2003 ob->symbol = node;
2005 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2007 /* Set current_function_decl and cfun. */
2008 push_cfun (fn);
2010 /* Make string 0 be a NULL string. */
2011 streamer_write_char_stream (ob->string_stream, 0);
2013 streamer_write_record_start (ob, LTO_function);
2015 /* Output decls for parameters and args. */
2016 stream_write_tree (ob, DECL_RESULT (function), true);
2017 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2019 /* Output DECL_INITIAL for the function, which contains the tree of
2020 lexical scopes. */
2021 stream_write_tree (ob, DECL_INITIAL (function), true);
2023 /* We also stream abstract functions where we stream only stuff needed for
2024 debug info. */
2025 if (gimple_has_body_p (function))
2027 streamer_write_uhwi (ob, 1);
2028 output_struct_function_base (ob, fn);
2030 /* Output all the SSA names used in the function. */
2031 output_ssa_names (ob, fn);
2033 /* Output any exception handling regions. */
2034 output_eh_regions (ob, fn);
2037 /* We will renumber the statements. The code that does this uses
2038 the same ordering that we use for serializing them so we can use
2039 the same code on the other end and not have to write out the
2040 statement numbers. We do not assign UIDs to PHIs here because
2041 virtual PHIs get re-computed on-the-fly which would make numbers
2042 inconsistent. */
2043 set_gimple_stmt_max_uid (cfun, 0);
2044 FOR_ALL_BB_FN (bb, cfun)
2046 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2047 gsi_next (&gsi))
2049 gphi *stmt = gsi.phi ();
2051 /* Virtual PHIs are not going to be streamed. */
2052 if (!virtual_operand_p (gimple_phi_result (stmt)))
2053 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2055 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2056 gsi_next (&gsi))
2058 gimple *stmt = gsi_stmt (gsi);
2059 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2062 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2063 virtual phis now. */
2064 FOR_ALL_BB_FN (bb, cfun)
2066 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2067 gsi_next (&gsi))
2069 gphi *stmt = gsi.phi ();
2070 if (virtual_operand_p (gimple_phi_result (stmt)))
2071 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2075 /* Output the code for the function. */
2076 FOR_ALL_BB_FN (bb, fn)
2077 output_bb (ob, bb, fn);
2079 /* The terminator for this function. */
2080 streamer_write_record_start (ob, LTO_null);
2082 output_cfg (ob, fn);
2084 pop_cfun ();
2086 else
2087 streamer_write_uhwi (ob, 0);
2089 /* Create a section to hold the pickled output of this function. */
2090 produce_asm (ob, function);
2092 destroy_output_block (ob);
2095 /* Output the body of function NODE->DECL. */
2097 static void
2098 output_constructor (struct varpool_node *node)
2100 tree var = node->decl;
2101 struct output_block *ob;
2103 ob = create_output_block (LTO_section_function_body);
2105 clear_line_info (ob);
2106 ob->symbol = node;
2108 /* Make string 0 be a NULL string. */
2109 streamer_write_char_stream (ob->string_stream, 0);
2111 /* Output DECL_INITIAL for the function, which contains the tree of
2112 lexical scopes. */
2113 stream_write_tree (ob, DECL_INITIAL (var), true);
2115 /* Create a section to hold the pickled output of this function. */
2116 produce_asm (ob, var);
2118 destroy_output_block (ob);
2122 /* Emit toplevel asms. */
2124 void
2125 lto_output_toplevel_asms (void)
2127 struct output_block *ob;
2128 struct asm_node *can;
2129 char *section_name;
2130 struct lto_simple_header_with_strings header;
2132 if (!symtab->first_asm_symbol ())
2133 return;
2135 ob = create_output_block (LTO_section_asm);
2137 /* Make string 0 be a NULL string. */
2138 streamer_write_char_stream (ob->string_stream, 0);
2140 for (can = symtab->first_asm_symbol (); can; can = can->next)
2142 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2143 streamer_write_hwi (ob, can->order);
2146 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2148 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2149 lto_begin_section (section_name, !flag_wpa);
2150 free (section_name);
2152 /* The entire header stream is computed here. */
2153 memset (&header, 0, sizeof (header));
2155 /* Write the header. */
2156 header.major_version = LTO_major_version;
2157 header.minor_version = LTO_minor_version;
2159 header.main_size = ob->main_stream->total_size;
2160 header.string_size = ob->string_stream->total_size;
2161 lto_write_data (&header, sizeof header);
2163 /* Put all of the gimple and the string table out the asm file as a
2164 block of text. */
2165 lto_write_stream (ob->main_stream);
2166 lto_write_stream (ob->string_stream);
2168 lto_end_section ();
2170 destroy_output_block (ob);
2174 /* Copy the function body or variable constructor of NODE without deserializing. */
2176 static void
2177 copy_function_or_variable (struct symtab_node *node)
2179 tree function = node->decl;
2180 struct lto_file_decl_data *file_data = node->lto_file_data;
2181 const char *data;
2182 size_t len;
2183 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2184 char *section_name =
2185 lto_get_section_name (LTO_section_function_body, name, NULL);
2186 size_t i, j;
2187 struct lto_in_decl_state *in_state;
2188 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2190 lto_begin_section (section_name, !flag_wpa);
2191 free (section_name);
2193 /* We may have renamed the declaration, e.g., a static function. */
2194 name = lto_get_decl_name_mapping (file_data, name);
2196 data = lto_get_section_data (file_data, LTO_section_function_body,
2197 name, &len);
2198 gcc_assert (data);
2200 /* Do a bit copy of the function body. */
2201 lto_write_data (data, len);
2203 /* Copy decls. */
2204 in_state =
2205 lto_get_function_in_decl_state (node->lto_file_data, function);
2206 gcc_assert (in_state);
2208 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2210 size_t n = vec_safe_length (in_state->streams[i]);
2211 vec<tree, va_gc> *trees = in_state->streams[i];
2212 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2214 /* The out state must have the same indices and the in state.
2215 So just copy the vector. All the encoders in the in state
2216 must be empty where we reach here. */
2217 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2218 encoder->trees.reserve_exact (n);
2219 for (j = 0; j < n; j++)
2220 encoder->trees.safe_push ((*trees)[j]);
2223 lto_free_section_data (file_data, LTO_section_function_body, name,
2224 data, len);
2225 lto_end_section ();
2228 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2230 static tree
2231 wrap_refs (tree *tp, int *ws, void *)
2233 tree t = *tp;
2234 if (handled_component_p (t)
2235 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL)
2237 tree decl = TREE_OPERAND (t, 0);
2238 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2239 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2240 build1 (ADDR_EXPR, ptrtype, decl),
2241 build_int_cst (ptrtype, 0));
2242 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2243 *ws = 0;
2245 else if (TREE_CODE (t) == CONSTRUCTOR)
2247 else if (!EXPR_P (t))
2248 *ws = 0;
2249 return NULL_TREE;
2252 /* Main entry point from the pass manager. */
2254 void
2255 lto_output (void)
2257 struct lto_out_decl_state *decl_state;
2258 bitmap output = NULL;
2259 int i, n_nodes;
2260 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2262 if (flag_checking)
2263 output = lto_bitmap_alloc ();
2265 /* Initialize the streamer. */
2266 lto_streamer_init ();
2268 n_nodes = lto_symtab_encoder_size (encoder);
2269 /* Process only the functions with bodies. */
2270 for (i = 0; i < n_nodes; i++)
2272 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2273 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2275 if (lto_symtab_encoder_encode_body_p (encoder, node)
2276 && !node->alias)
2278 if (flag_checking)
2280 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2281 bitmap_set_bit (output, DECL_UID (node->decl));
2283 decl_state = lto_new_out_decl_state ();
2284 lto_push_out_decl_state (decl_state);
2285 if (gimple_has_body_p (node->decl) || !flag_wpa
2286 /* Thunks have no body but they may be synthetized
2287 at WPA time. */
2288 || DECL_ARGUMENTS (node->decl))
2289 output_function (node);
2290 else
2291 copy_function_or_variable (node);
2292 gcc_assert (lto_get_out_decl_state () == decl_state);
2293 lto_pop_out_decl_state ();
2294 lto_record_function_out_decl_state (node->decl, decl_state);
2297 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2299 /* Wrap symbol references inside the ctor in a type
2300 preserving MEM_REF. */
2301 tree ctor = DECL_INITIAL (node->decl);
2302 if (ctor && !in_lto_p)
2303 walk_tree (&ctor, wrap_refs, NULL, NULL);
2304 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2305 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2306 && !node->alias)
2308 timevar_push (TV_IPA_LTO_CTORS_OUT);
2309 if (flag_checking)
2311 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2312 bitmap_set_bit (output, DECL_UID (node->decl));
2314 decl_state = lto_new_out_decl_state ();
2315 lto_push_out_decl_state (decl_state);
2316 if (DECL_INITIAL (node->decl) != error_mark_node
2317 || !flag_wpa)
2318 output_constructor (node);
2319 else
2320 copy_function_or_variable (node);
2321 gcc_assert (lto_get_out_decl_state () == decl_state);
2322 lto_pop_out_decl_state ();
2323 lto_record_function_out_decl_state (node->decl, decl_state);
2324 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2329 /* Emit the callgraph after emitting function bodies. This needs to
2330 be done now to make sure that all the statements in every function
2331 have been renumbered so that edges can be associated with call
2332 statements using the statement UIDs. */
2333 output_symtab ();
2335 output_offload_tables ();
2337 #if CHECKING_P
2338 lto_bitmap_free (output);
2339 #endif
2342 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2343 from it and required for correct representation of its semantics.
2344 Each node in ENCODER must be a global declaration or a type. A node
2345 is written only once, even if it appears multiple times in the
2346 vector. Certain transitively-reachable nodes, such as those
2347 representing expressions, may be duplicated, but such nodes
2348 must not appear in ENCODER itself. */
2350 static void
2351 write_global_stream (struct output_block *ob,
2352 struct lto_tree_ref_encoder *encoder)
2354 tree t;
2355 size_t index;
2356 const size_t size = lto_tree_ref_encoder_size (encoder);
2358 for (index = 0; index < size; index++)
2360 t = lto_tree_ref_encoder_get_tree (encoder, index);
2361 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2362 stream_write_tree (ob, t, false);
2367 /* Write a sequence of indices into the globals vector corresponding
2368 to the trees in ENCODER. These are used by the reader to map the
2369 indices used to refer to global entities within function bodies to
2370 their referents. */
2372 static void
2373 write_global_references (struct output_block *ob,
2374 struct lto_tree_ref_encoder *encoder)
2376 tree t;
2377 uint32_t index;
2378 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2380 /* Write size and slot indexes as 32-bit unsigned numbers. */
2381 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2382 data[0] = size;
2384 for (index = 0; index < size; index++)
2386 unsigned slot_num;
2388 t = lto_tree_ref_encoder_get_tree (encoder, index);
2389 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2390 gcc_assert (slot_num != (unsigned)-1);
2391 data[index + 1] = slot_num;
2394 lto_write_data (data, sizeof (int32_t) * (size + 1));
2395 free (data);
2399 /* Write all the streams in an lto_out_decl_state STATE using
2400 output block OB and output stream OUT_STREAM. */
2402 void
2403 lto_output_decl_state_streams (struct output_block *ob,
2404 struct lto_out_decl_state *state)
2406 int i;
2408 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2409 write_global_stream (ob, &state->streams[i]);
2413 /* Write all the references in an lto_out_decl_state STATE using
2414 output block OB and output stream OUT_STREAM. */
2416 void
2417 lto_output_decl_state_refs (struct output_block *ob,
2418 struct lto_out_decl_state *state)
2420 unsigned i;
2421 unsigned ref;
2422 tree decl;
2424 /* Write reference to FUNCTION_DECL. If there is not function,
2425 write reference to void_type_node. */
2426 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2427 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2428 gcc_assert (ref != (unsigned)-1);
2429 lto_write_data (&ref, sizeof (uint32_t));
2431 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2432 write_global_references (ob, &state->streams[i]);
2436 /* Return the written size of STATE. */
2438 static size_t
2439 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2441 int i;
2442 size_t size;
2444 size = sizeof (int32_t); /* fn_ref. */
2445 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2447 size += sizeof (int32_t); /* vector size. */
2448 size += (lto_tree_ref_encoder_size (&state->streams[i])
2449 * sizeof (int32_t));
2451 return size;
2455 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2456 so far. */
2458 static void
2459 write_symbol (struct streamer_tree_cache_d *cache,
2460 tree t, hash_set<const char *> *seen, bool alias)
2462 const char *name;
2463 enum gcc_plugin_symbol_kind kind;
2464 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2465 unsigned slot_num;
2466 uint64_t size;
2467 const char *comdat;
2468 unsigned char c;
2470 /* None of the following kinds of symbols are needed in the
2471 symbol table. */
2472 if (!TREE_PUBLIC (t)
2473 || is_builtin_fn (t)
2474 || DECL_ABSTRACT_P (t)
2475 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2476 return;
2477 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2479 gcc_assert (TREE_CODE (t) == VAR_DECL
2480 || TREE_CODE (t) == FUNCTION_DECL);
2482 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2484 /* This behaves like assemble_name_raw in varasm.c, performing the
2485 same name manipulations that ASM_OUTPUT_LABELREF does. */
2486 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2488 if (seen->add (name))
2489 return;
2491 streamer_tree_cache_lookup (cache, t, &slot_num);
2492 gcc_assert (slot_num != (unsigned)-1);
2494 if (DECL_EXTERNAL (t))
2496 if (DECL_WEAK (t))
2497 kind = GCCPK_WEAKUNDEF;
2498 else
2499 kind = GCCPK_UNDEF;
2501 else
2503 if (DECL_WEAK (t))
2504 kind = GCCPK_WEAKDEF;
2505 else if (DECL_COMMON (t))
2506 kind = GCCPK_COMMON;
2507 else
2508 kind = GCCPK_DEF;
2510 /* When something is defined, it should have node attached. */
2511 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2512 || varpool_node::get (t)->definition);
2513 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2514 || (cgraph_node::get (t)
2515 && cgraph_node::get (t)->definition));
2518 /* Imitate what default_elf_asm_output_external do.
2519 When symbol is external, we need to output it with DEFAULT visibility
2520 when compiling with -fvisibility=default, while with HIDDEN visibility
2521 when symbol has attribute (visibility("hidden")) specified.
2522 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2523 right. */
2525 if (DECL_EXTERNAL (t)
2526 && !targetm.binds_local_p (t))
2527 visibility = GCCPV_DEFAULT;
2528 else
2529 switch (DECL_VISIBILITY (t))
2531 case VISIBILITY_DEFAULT:
2532 visibility = GCCPV_DEFAULT;
2533 break;
2534 case VISIBILITY_PROTECTED:
2535 visibility = GCCPV_PROTECTED;
2536 break;
2537 case VISIBILITY_HIDDEN:
2538 visibility = GCCPV_HIDDEN;
2539 break;
2540 case VISIBILITY_INTERNAL:
2541 visibility = GCCPV_INTERNAL;
2542 break;
2545 if (kind == GCCPK_COMMON
2546 && DECL_SIZE_UNIT (t)
2547 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2548 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2549 else
2550 size = 0;
2552 if (DECL_ONE_ONLY (t))
2553 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2554 else
2555 comdat = "";
2557 lto_write_data (name, strlen (name) + 1);
2558 lto_write_data (comdat, strlen (comdat) + 1);
2559 c = (unsigned char) kind;
2560 lto_write_data (&c, 1);
2561 c = (unsigned char) visibility;
2562 lto_write_data (&c, 1);
2563 lto_write_data (&size, 8);
2564 lto_write_data (&slot_num, 4);
2567 /* Return true if NODE should appear in the plugin symbol table. */
2569 bool
2570 output_symbol_p (symtab_node *node)
2572 struct cgraph_node *cnode;
2573 if (!node->real_symbol_p ())
2574 return false;
2575 /* We keep external functions in symtab for sake of inlining
2576 and devirtualization. We do not want to see them in symbol table as
2577 references unless they are really used. */
2578 cnode = dyn_cast <cgraph_node *> (node);
2579 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2580 && cnode->callers)
2581 return true;
2583 /* Ignore all references from external vars initializers - they are not really
2584 part of the compilation unit until they are used by folding. Some symbols,
2585 like references to external construction vtables can not be referred to at all.
2586 We decide this at can_refer_decl_in_current_unit_p. */
2587 if (!node->definition || DECL_EXTERNAL (node->decl))
2589 int i;
2590 struct ipa_ref *ref;
2591 for (i = 0; node->iterate_referring (i, ref); i++)
2593 if (ref->use == IPA_REF_ALIAS)
2594 continue;
2595 if (is_a <cgraph_node *> (ref->referring))
2596 return true;
2597 if (!DECL_EXTERNAL (ref->referring->decl))
2598 return true;
2600 return false;
2602 return true;
2606 /* Write an IL symbol table to OB.
2607 SET and VSET are cgraph/varpool node sets we are outputting. */
2609 static void
2610 produce_symtab (struct output_block *ob)
2612 struct streamer_tree_cache_d *cache = ob->writer_cache;
2613 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2614 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2615 lto_symtab_encoder_iterator lsei;
2617 lto_begin_section (section_name, false);
2618 free (section_name);
2620 hash_set<const char *> seen;
2622 /* Write the symbol table.
2623 First write everything defined and then all declarations.
2624 This is necessary to handle cases where we have duplicated symbols. */
2625 for (lsei = lsei_start (encoder);
2626 !lsei_end_p (lsei); lsei_next (&lsei))
2628 symtab_node *node = lsei_node (lsei);
2630 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2631 continue;
2632 write_symbol (cache, node->decl, &seen, false);
2634 for (lsei = lsei_start (encoder);
2635 !lsei_end_p (lsei); lsei_next (&lsei))
2637 symtab_node *node = lsei_node (lsei);
2639 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2640 continue;
2641 write_symbol (cache, node->decl, &seen, false);
2644 lto_end_section ();
2648 /* Init the streamer_mode_table for output, where we collect info on what
2649 machine_mode values have been streamed. */
2650 void
2651 lto_output_init_mode_table (void)
2653 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2657 /* Write the mode table. */
2658 static void
2659 lto_write_mode_table (void)
2661 struct output_block *ob;
2662 ob = create_output_block (LTO_section_mode_table);
2663 bitpack_d bp = bitpack_create (ob->main_stream);
2665 /* Ensure that for GET_MODE_INNER (m) != m we have
2666 also the inner mode marked. */
2667 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2668 if (streamer_mode_table[i])
2670 machine_mode m = (machine_mode) i;
2671 if (GET_MODE_INNER (m) != m)
2672 streamer_mode_table[(int) GET_MODE_INNER (m)] = 1;
2674 /* First stream modes that have GET_MODE_INNER (m) == m,
2675 so that we can refer to them afterwards. */
2676 for (int pass = 0; pass < 2; pass++)
2677 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2678 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2680 machine_mode m = (machine_mode) i;
2681 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2682 continue;
2683 bp_pack_value (&bp, m, 8);
2684 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2685 bp_pack_value (&bp, GET_MODE_SIZE (m), 8);
2686 bp_pack_value (&bp, GET_MODE_PRECISION (m), 16);
2687 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2688 bp_pack_value (&bp, GET_MODE_NUNITS (m), 8);
2689 switch (GET_MODE_CLASS (m))
2691 case MODE_FRACT:
2692 case MODE_UFRACT:
2693 case MODE_ACCUM:
2694 case MODE_UACCUM:
2695 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2696 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2697 break;
2698 case MODE_FLOAT:
2699 case MODE_DECIMAL_FLOAT:
2700 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2701 break;
2702 default:
2703 break;
2705 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2707 bp_pack_value (&bp, VOIDmode, 8);
2709 streamer_write_bitpack (&bp);
2711 char *section_name
2712 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2713 lto_begin_section (section_name, !flag_wpa);
2714 free (section_name);
2716 /* The entire header stream is computed here. */
2717 struct lto_simple_header_with_strings header;
2718 memset (&header, 0, sizeof (header));
2720 /* Write the header. */
2721 header.major_version = LTO_major_version;
2722 header.minor_version = LTO_minor_version;
2724 header.main_size = ob->main_stream->total_size;
2725 header.string_size = ob->string_stream->total_size;
2726 lto_write_data (&header, sizeof header);
2728 /* Put all of the gimple and the string table out the asm file as a
2729 block of text. */
2730 lto_write_stream (ob->main_stream);
2731 lto_write_stream (ob->string_stream);
2733 lto_end_section ();
2734 destroy_output_block (ob);
2738 /* This pass is run after all of the functions are serialized and all
2739 of the IPA passes have written their serialized forms. This pass
2740 causes the vector of all of the global decls and types used from
2741 this file to be written in to a section that can then be read in to
2742 recover these on other side. */
2744 void
2745 produce_asm_for_decls (void)
2747 struct lto_out_decl_state *out_state;
2748 struct lto_out_decl_state *fn_out_state;
2749 struct lto_decl_header header;
2750 char *section_name;
2751 struct output_block *ob;
2752 unsigned idx, num_fns;
2753 size_t decl_state_size;
2754 int32_t num_decl_states;
2756 ob = create_output_block (LTO_section_decls);
2758 memset (&header, 0, sizeof (struct lto_decl_header));
2760 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2761 lto_begin_section (section_name, !flag_wpa);
2762 free (section_name);
2764 /* Make string 0 be a NULL string. */
2765 streamer_write_char_stream (ob->string_stream, 0);
2767 gcc_assert (!alias_pairs);
2769 /* Get rid of the global decl state hash tables to save some memory. */
2770 out_state = lto_get_out_decl_state ();
2771 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2772 if (out_state->streams[i].tree_hash_table)
2774 delete out_state->streams[i].tree_hash_table;
2775 out_state->streams[i].tree_hash_table = NULL;
2778 /* Write the global symbols. */
2779 lto_output_decl_state_streams (ob, out_state);
2780 num_fns = lto_function_decl_states.length ();
2781 for (idx = 0; idx < num_fns; idx++)
2783 fn_out_state =
2784 lto_function_decl_states[idx];
2785 lto_output_decl_state_streams (ob, fn_out_state);
2788 header.major_version = LTO_major_version;
2789 header.minor_version = LTO_minor_version;
2791 /* Currently not used. This field would allow us to preallocate
2792 the globals vector, so that it need not be resized as it is extended. */
2793 header.num_nodes = -1;
2795 /* Compute the total size of all decl out states. */
2796 decl_state_size = sizeof (int32_t);
2797 decl_state_size += lto_out_decl_state_written_size (out_state);
2798 for (idx = 0; idx < num_fns; idx++)
2800 fn_out_state =
2801 lto_function_decl_states[idx];
2802 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2804 header.decl_state_size = decl_state_size;
2806 header.main_size = ob->main_stream->total_size;
2807 header.string_size = ob->string_stream->total_size;
2809 lto_write_data (&header, sizeof header);
2811 /* Write the main out-decl state, followed by out-decl states of
2812 functions. */
2813 num_decl_states = num_fns + 1;
2814 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2815 lto_output_decl_state_refs (ob, out_state);
2816 for (idx = 0; idx < num_fns; idx++)
2818 fn_out_state = lto_function_decl_states[idx];
2819 lto_output_decl_state_refs (ob, fn_out_state);
2822 lto_write_stream (ob->main_stream);
2823 lto_write_stream (ob->string_stream);
2825 lto_end_section ();
2827 /* Write the symbol table. It is used by linker to determine dependencies
2828 and thus we can skip it for WPA. */
2829 if (!flag_wpa)
2830 produce_symtab (ob);
2832 /* Write command line opts. */
2833 lto_write_options ();
2835 /* Deallocate memory and clean up. */
2836 for (idx = 0; idx < num_fns; idx++)
2838 fn_out_state =
2839 lto_function_decl_states[idx];
2840 lto_delete_out_decl_state (fn_out_state);
2842 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2843 lto_function_decl_states.release ();
2844 destroy_output_block (ob);
2845 if (lto_stream_offload_p)
2846 lto_write_mode_table ();