remove unused defines from sendmsg.c
[official-gcc.git] / gcc / lto-streamer-out.c
blobb6bc515201d8a4682d22d22778e6bced225aa38f
1 /* Write the GIMPLE representation to a file stream.
3 Copyright (C) 2009-2015 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "rtl.h"
30 #include "ssa.h"
31 #include "alias.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "flags.h"
35 #include "insn-config.h"
36 #include "expmed.h"
37 #include "dojump.h"
38 #include "explow.h"
39 #include "calls.h"
40 #include "emit-rtl.h"
41 #include "varasm.h"
42 #include "stmt.h"
43 #include "expr.h"
44 #include "params.h"
45 #include "internal-fn.h"
46 #include "gimple-iterator.h"
47 #include "tree-pass.h"
48 #include "diagnostic-core.h"
49 #include "except.h"
50 #include "lto-symtab.h"
51 #include "cgraph.h"
52 #include "target.h"
53 #include "gimple-streamer.h"
54 #include "cfgloop.h"
55 #include "builtins.h"
56 #include "gomp-constants.h"
59 static void lto_write_tree (struct output_block*, tree, bool);
61 /* Clear the line info stored in DATA_IN. */
63 static void
64 clear_line_info (struct output_block *ob)
66 ob->current_file = NULL;
67 ob->current_line = 0;
68 ob->current_col = 0;
72 /* Create the output block and return it. SECTION_TYPE is
73 LTO_section_function_body or LTO_static_initializer. */
75 struct output_block *
76 create_output_block (enum lto_section_type section_type)
78 struct output_block *ob = XCNEW (struct output_block);
80 ob->section_type = section_type;
81 ob->decl_state = lto_get_out_decl_state ();
82 ob->main_stream = XCNEW (struct lto_output_stream);
83 ob->string_stream = XCNEW (struct lto_output_stream);
84 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
86 if (section_type == LTO_section_function_body)
87 ob->cfg_stream = XCNEW (struct lto_output_stream);
89 clear_line_info (ob);
91 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
92 gcc_obstack_init (&ob->obstack);
94 return ob;
98 /* Destroy the output block OB. */
100 void
101 destroy_output_block (struct output_block *ob)
103 enum lto_section_type section_type = ob->section_type;
105 delete ob->string_hash_table;
106 ob->string_hash_table = NULL;
108 free (ob->main_stream);
109 free (ob->string_stream);
110 if (section_type == LTO_section_function_body)
111 free (ob->cfg_stream);
113 streamer_tree_cache_delete (ob->writer_cache);
114 obstack_free (&ob->obstack, NULL);
116 free (ob);
120 /* Look up NODE in the type table and write the index for it to OB. */
122 static void
123 output_type_ref (struct output_block *ob, tree node)
125 streamer_write_record_start (ob, LTO_type_ref);
126 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
130 /* Return true if tree node T is written to various tables. For these
131 nodes, we sometimes want to write their phyiscal representation
132 (via lto_output_tree), and sometimes we need to emit an index
133 reference into a table (via lto_output_tree_ref). */
135 static bool
136 tree_is_indexable (tree t)
138 /* Parameters and return values of functions of variably modified types
139 must go to global stream, because they may be used in the type
140 definition. */
141 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
142 && DECL_CONTEXT (t))
143 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
144 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
145 else if (TREE_CODE (t) == IMPORTED_DECL)
146 return false;
147 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
148 || TREE_CODE (t) == TYPE_DECL
149 || TREE_CODE (t) == CONST_DECL
150 || TREE_CODE (t) == NAMELIST_DECL)
151 && decl_function_context (t))
152 return false;
153 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
154 return false;
155 /* Variably modified types need to be streamed alongside function
156 bodies because they can refer to local entities. Together with
157 them we have to localize their members as well.
158 ??? In theory that includes non-FIELD_DECLs as well. */
159 else if (TYPE_P (t)
160 && variably_modified_type_p (t, NULL_TREE))
161 return false;
162 else if (TREE_CODE (t) == FIELD_DECL
163 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
164 return false;
165 else
166 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
170 /* Output info about new location into bitpack BP.
171 After outputting bitpack, lto_output_location_data has
172 to be done to output actual data. */
174 void
175 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
176 location_t loc)
178 expanded_location xloc;
180 loc = LOCATION_LOCUS (loc);
181 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
182 loc < RESERVED_LOCATION_COUNT
183 ? loc : RESERVED_LOCATION_COUNT);
184 if (loc < RESERVED_LOCATION_COUNT)
185 return;
187 xloc = expand_location (loc);
189 bp_pack_value (bp, ob->current_file != xloc.file, 1);
190 bp_pack_value (bp, ob->current_line != xloc.line, 1);
191 bp_pack_value (bp, ob->current_col != xloc.column, 1);
193 if (ob->current_file != xloc.file)
195 bp_pack_string (ob, bp, xloc.file, true);
196 bp_pack_value (bp, xloc.sysp, 1);
198 ob->current_file = xloc.file;
199 ob->current_sysp = xloc.sysp;
201 if (ob->current_line != xloc.line)
202 bp_pack_var_len_unsigned (bp, xloc.line);
203 ob->current_line = xloc.line;
205 if (ob->current_col != xloc.column)
206 bp_pack_var_len_unsigned (bp, xloc.column);
207 ob->current_col = xloc.column;
211 /* If EXPR is an indexable tree node, output a reference to it to
212 output block OB. Otherwise, output the physical representation of
213 EXPR to OB. */
215 static void
216 lto_output_tree_ref (struct output_block *ob, tree expr)
218 enum tree_code code;
220 if (TYPE_P (expr))
222 output_type_ref (ob, expr);
223 return;
226 code = TREE_CODE (expr);
227 switch (code)
229 case SSA_NAME:
230 streamer_write_record_start (ob, LTO_ssa_name_ref);
231 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
232 break;
234 case FIELD_DECL:
235 streamer_write_record_start (ob, LTO_field_decl_ref);
236 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
237 break;
239 case FUNCTION_DECL:
240 streamer_write_record_start (ob, LTO_function_decl_ref);
241 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
242 break;
244 case VAR_DECL:
245 case DEBUG_EXPR_DECL:
246 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
247 case PARM_DECL:
248 streamer_write_record_start (ob, LTO_global_decl_ref);
249 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
250 break;
252 case CONST_DECL:
253 streamer_write_record_start (ob, LTO_const_decl_ref);
254 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
255 break;
257 case IMPORTED_DECL:
258 gcc_assert (decl_function_context (expr) == NULL);
259 streamer_write_record_start (ob, LTO_imported_decl_ref);
260 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
261 break;
263 case TYPE_DECL:
264 streamer_write_record_start (ob, LTO_type_decl_ref);
265 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
266 break;
268 case NAMELIST_DECL:
269 streamer_write_record_start (ob, LTO_namelist_decl_ref);
270 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
271 break;
273 case NAMESPACE_DECL:
274 streamer_write_record_start (ob, LTO_namespace_decl_ref);
275 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
276 break;
278 case LABEL_DECL:
279 streamer_write_record_start (ob, LTO_label_decl_ref);
280 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
281 break;
283 case RESULT_DECL:
284 streamer_write_record_start (ob, LTO_result_decl_ref);
285 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
286 break;
288 case TRANSLATION_UNIT_DECL:
289 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
290 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
291 break;
293 default:
294 /* No other node is indexable, so it should have been handled by
295 lto_output_tree. */
296 gcc_unreachable ();
301 /* Return true if EXPR is a tree node that can be written to disk. */
303 static inline bool
304 lto_is_streamable (tree expr)
306 enum tree_code code = TREE_CODE (expr);
308 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
309 name version in lto_output_tree_ref (see output_ssa_names). */
310 return !is_lang_specific (expr)
311 && code != SSA_NAME
312 && code != CALL_EXPR
313 && code != LANG_TYPE
314 && code != MODIFY_EXPR
315 && code != INIT_EXPR
316 && code != TARGET_EXPR
317 && code != BIND_EXPR
318 && code != WITH_CLEANUP_EXPR
319 && code != STATEMENT_LIST
320 && (code == CASE_LABEL_EXPR
321 || code == DECL_EXPR
322 || TREE_CODE_CLASS (code) != tcc_statement);
326 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
328 static tree
329 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
331 gcc_checking_assert (DECL_P (expr)
332 && TREE_CODE (expr) != FUNCTION_DECL
333 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
335 /* Handle DECL_INITIAL for symbols. */
336 tree initial = DECL_INITIAL (expr);
337 if (TREE_CODE (expr) == VAR_DECL
338 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
339 && !DECL_IN_CONSTANT_POOL (expr)
340 && initial)
342 varpool_node *vnode;
343 /* Extra section needs about 30 bytes; do not produce it for simple
344 scalar values. */
345 if (TREE_CODE (DECL_INITIAL (expr)) == CONSTRUCTOR
346 || !(vnode = varpool_node::get (expr))
347 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
348 initial = error_mark_node;
351 return initial;
355 /* Write a physical representation of tree node EXPR to output block
356 OB. If REF_P is true, the leaves of EXPR are emitted as references
357 via lto_output_tree_ref. IX is the index into the streamer cache
358 where EXPR is stored. */
360 static void
361 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
363 /* Pack all the non-pointer fields in EXPR into a bitpack and write
364 the resulting bitpack. */
365 streamer_write_tree_bitfields (ob, expr);
367 /* Write all the pointer fields in EXPR. */
368 streamer_write_tree_body (ob, expr, ref_p);
370 /* Write any LTO-specific data to OB. */
371 if (DECL_P (expr)
372 && TREE_CODE (expr) != FUNCTION_DECL
373 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
375 /* Handle DECL_INITIAL for symbols. */
376 tree initial = get_symbol_initial_value
377 (ob->decl_state->symtab_node_encoder, expr);
378 stream_write_tree (ob, initial, ref_p);
382 /* Write a physical representation of tree node EXPR to output block
383 OB. If REF_P is true, the leaves of EXPR are emitted as references
384 via lto_output_tree_ref. IX is the index into the streamer cache
385 where EXPR is stored. */
387 static void
388 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
390 if (!lto_is_streamable (expr))
391 internal_error ("tree code %qs is not supported in LTO streams",
392 get_tree_code_name (TREE_CODE (expr)));
394 /* Write the header, containing everything needed to materialize
395 EXPR on the reading side. */
396 streamer_write_tree_header (ob, expr);
398 lto_write_tree_1 (ob, expr, ref_p);
400 /* Mark the end of EXPR. */
401 streamer_write_zero (ob);
404 /* Emit the physical representation of tree node EXPR to output block OB,
405 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
406 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
408 static void
409 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
410 bool ref_p, bool this_ref_p)
412 unsigned ix;
414 gcc_checking_assert (expr != NULL_TREE
415 && !(this_ref_p && tree_is_indexable (expr)));
417 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
418 expr, hash, &ix);
419 gcc_assert (!exists_p);
420 if (streamer_handle_as_builtin_p (expr))
422 /* MD and NORMAL builtins do not need to be written out
423 completely as they are always instantiated by the
424 compiler on startup. The only builtins that need to
425 be written out are BUILT_IN_FRONTEND. For all other
426 builtins, we simply write the class and code. */
427 streamer_write_builtin (ob, expr);
429 else if (TREE_CODE (expr) == INTEGER_CST
430 && !TREE_OVERFLOW (expr))
432 /* Shared INTEGER_CST nodes are special because they need their
433 original type to be materialized by the reader (to implement
434 TYPE_CACHED_VALUES). */
435 streamer_write_integer_cst (ob, expr, ref_p);
437 else
439 /* This is the first time we see EXPR, write its fields
440 to OB. */
441 lto_write_tree (ob, expr, ref_p);
445 class DFS
447 public:
448 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
449 bool single_p);
450 ~DFS ();
452 struct scc_entry
454 tree t;
455 hashval_t hash;
457 vec<scc_entry> sccstack;
459 private:
460 struct sccs
462 unsigned int dfsnum;
463 unsigned int low;
465 struct worklist
467 tree expr;
468 sccs *from_state;
469 sccs *cstate;
470 bool ref_p;
471 bool this_ref_p;
474 static int scc_entry_compare (const void *, const void *);
476 void DFS_write_tree_body (struct output_block *ob,
477 tree expr, sccs *expr_state, bool ref_p);
479 void DFS_write_tree (struct output_block *ob, sccs *from_state,
480 tree expr, bool ref_p, bool this_ref_p);
482 hashval_t
483 hash_scc (struct output_block *ob, unsigned first, unsigned size,
484 bool ref_p, bool this_ref_p);
486 hash_map<tree, sccs *> sccstate;
487 vec<worklist> worklist_vec;
488 struct obstack sccstate_obstack;
491 /* Emit the physical representation of tree node EXPR to output block OB,
492 using depth-first search on the subgraph. If THIS_REF_P is true, the
493 leaves of EXPR are emitted as references via lto_output_tree_ref.
494 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
495 this is for a rewalk of a single leaf SCC. */
497 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
498 bool single_p)
500 unsigned int next_dfs_num = 1;
501 sccstack.create (0);
502 gcc_obstack_init (&sccstate_obstack);
503 worklist_vec = vNULL;
504 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
505 while (!worklist_vec.is_empty ())
507 worklist &w = worklist_vec.last ();
508 expr = w.expr;
509 sccs *from_state = w.from_state;
510 sccs *cstate = w.cstate;
511 ref_p = w.ref_p;
512 this_ref_p = w.this_ref_p;
513 if (cstate == NULL)
515 sccs **slot = &sccstate.get_or_insert (expr);
516 cstate = *slot;
517 if (cstate)
519 gcc_checking_assert (from_state);
520 if (cstate->dfsnum < from_state->dfsnum)
521 from_state->low = MIN (cstate->dfsnum, from_state->low);
522 worklist_vec.pop ();
523 continue;
526 scc_entry e = { expr, 0 };
527 /* Not yet visited. DFS recurse and push it onto the stack. */
528 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
529 sccstack.safe_push (e);
530 cstate->dfsnum = next_dfs_num++;
531 cstate->low = cstate->dfsnum;
532 w.cstate = cstate;
534 if (streamer_handle_as_builtin_p (expr))
536 else if (TREE_CODE (expr) == INTEGER_CST
537 && !TREE_OVERFLOW (expr))
538 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
539 else
541 DFS_write_tree_body (ob, expr, cstate, ref_p);
543 /* Walk any LTO-specific edges. */
544 if (DECL_P (expr)
545 && TREE_CODE (expr) != FUNCTION_DECL
546 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
548 /* Handle DECL_INITIAL for symbols. */
549 tree initial
550 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
551 expr);
552 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
555 continue;
558 /* See if we found an SCC. */
559 if (cstate->low == cstate->dfsnum)
561 unsigned first, size;
562 tree x;
564 /* If we are re-walking a single leaf SCC just pop it,
565 let earlier worklist item access the sccstack. */
566 if (single_p)
568 worklist_vec.pop ();
569 continue;
572 /* Pop the SCC and compute its size. */
573 first = sccstack.length ();
576 x = sccstack[--first].t;
578 while (x != expr);
579 size = sccstack.length () - first;
581 /* No need to compute hashes for LTRANS units, we don't perform
582 any merging there. */
583 hashval_t scc_hash = 0;
584 unsigned scc_entry_len = 0;
585 if (!flag_wpa)
587 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
589 /* Put the entries with the least number of collisions first. */
590 unsigned entry_start = 0;
591 scc_entry_len = size + 1;
592 for (unsigned i = 0; i < size;)
594 unsigned from = i;
595 for (i = i + 1; i < size
596 && (sccstack[first + i].hash
597 == sccstack[first + from].hash); ++i)
599 if (i - from < scc_entry_len)
601 scc_entry_len = i - from;
602 entry_start = from;
605 for (unsigned i = 0; i < scc_entry_len; ++i)
606 std::swap (sccstack[first + i],
607 sccstack[first + entry_start + i]);
609 if (scc_entry_len == 1)
610 ; /* We already sorted SCC deterministically in hash_scc. */
611 else
612 /* Check that we have only one SCC.
613 Naturally we may have conflicts if hash function is not
614 strong enough. Lets see how far this gets. */
616 #ifdef ENABLE_CHECKING
617 gcc_unreachable ();
618 #endif
622 /* Write LTO_tree_scc. */
623 streamer_write_record_start (ob, LTO_tree_scc);
624 streamer_write_uhwi (ob, size);
625 streamer_write_uhwi (ob, scc_hash);
627 /* Write size-1 SCCs without wrapping them inside SCC bundles.
628 All INTEGER_CSTs need to be handled this way as we need
629 their type to materialize them. Also builtins are handled
630 this way.
631 ??? We still wrap these in LTO_tree_scc so at the
632 input side we can properly identify the tree we want
633 to ultimatively return. */
634 if (size == 1)
635 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
636 else
638 /* Write the size of the SCC entry candidates. */
639 streamer_write_uhwi (ob, scc_entry_len);
641 /* Write all headers and populate the streamer cache. */
642 for (unsigned i = 0; i < size; ++i)
644 hashval_t hash = sccstack[first+i].hash;
645 tree t = sccstack[first+i].t;
646 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
647 t, hash, NULL);
648 gcc_assert (!exists_p);
650 if (!lto_is_streamable (t))
651 internal_error ("tree code %qs is not supported "
652 "in LTO streams",
653 get_tree_code_name (TREE_CODE (t)));
655 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
657 /* Write the header, containing everything needed to
658 materialize EXPR on the reading side. */
659 streamer_write_tree_header (ob, t);
662 /* Write the bitpacks and tree references. */
663 for (unsigned i = 0; i < size; ++i)
665 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
667 /* Mark the end of the tree. */
668 streamer_write_zero (ob);
672 /* Finally truncate the vector. */
673 sccstack.truncate (first);
675 if (from_state)
676 from_state->low = MIN (from_state->low, cstate->low);
677 worklist_vec.pop ();
678 continue;
681 gcc_checking_assert (from_state);
682 from_state->low = MIN (from_state->low, cstate->low);
683 if (cstate->dfsnum < from_state->dfsnum)
684 from_state->low = MIN (cstate->dfsnum, from_state->low);
685 worklist_vec.pop ();
687 worklist_vec.release ();
690 DFS::~DFS ()
692 sccstack.release ();
693 obstack_free (&sccstate_obstack, NULL);
696 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
697 DFS recurse for all tree edges originating from it. */
699 void
700 DFS::DFS_write_tree_body (struct output_block *ob,
701 tree expr, sccs *expr_state, bool ref_p)
703 #define DFS_follow_tree_edge(DEST) \
704 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
706 enum tree_code code;
708 code = TREE_CODE (expr);
710 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
712 if (TREE_CODE (expr) != IDENTIFIER_NODE)
713 DFS_follow_tree_edge (TREE_TYPE (expr));
716 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
718 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
719 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
722 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
724 DFS_follow_tree_edge (TREE_REALPART (expr));
725 DFS_follow_tree_edge (TREE_IMAGPART (expr));
728 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
730 /* Drop names that were created for anonymous entities. */
731 if (DECL_NAME (expr)
732 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
733 && anon_aggrname_p (DECL_NAME (expr)))
735 else
736 DFS_follow_tree_edge (DECL_NAME (expr));
737 DFS_follow_tree_edge (DECL_CONTEXT (expr));
740 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
742 DFS_follow_tree_edge (DECL_SIZE (expr));
743 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
745 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
746 special handling in LTO, it must be handled by streamer hooks. */
748 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
750 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
751 for early inlining so drop it on the floor instead of ICEing in
752 dwarf2out.c. */
754 if ((TREE_CODE (expr) == VAR_DECL
755 || TREE_CODE (expr) == PARM_DECL)
756 && DECL_HAS_VALUE_EXPR_P (expr))
757 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
758 if (TREE_CODE (expr) == VAR_DECL)
759 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
762 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
764 if (TREE_CODE (expr) == TYPE_DECL)
765 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
768 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
770 /* Make sure we don't inadvertently set the assembler name. */
771 if (DECL_ASSEMBLER_NAME_SET_P (expr))
772 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
775 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
777 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
778 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
779 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
780 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
781 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
784 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
786 DFS_follow_tree_edge (DECL_VINDEX (expr));
787 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
788 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
789 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
792 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
794 DFS_follow_tree_edge (TYPE_SIZE (expr));
795 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
796 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
797 DFS_follow_tree_edge (TYPE_NAME (expr));
798 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
799 reconstructed during fixup. */
800 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
801 during fixup. */
802 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
803 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
804 /* TYPE_CANONICAL is re-computed during type merging, so no need
805 to follow it here. */
806 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
809 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
811 if (TREE_CODE (expr) == ENUMERAL_TYPE)
812 DFS_follow_tree_edge (TYPE_VALUES (expr));
813 else if (TREE_CODE (expr) == ARRAY_TYPE)
814 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
815 else if (RECORD_OR_UNION_TYPE_P (expr))
816 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
817 DFS_follow_tree_edge (t);
818 else if (TREE_CODE (expr) == FUNCTION_TYPE
819 || TREE_CODE (expr) == METHOD_TYPE)
820 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
822 if (!POINTER_TYPE_P (expr))
823 DFS_follow_tree_edge (TYPE_MINVAL (expr));
824 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
825 if (RECORD_OR_UNION_TYPE_P (expr))
826 DFS_follow_tree_edge (TYPE_BINFO (expr));
829 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
831 DFS_follow_tree_edge (TREE_PURPOSE (expr));
832 DFS_follow_tree_edge (TREE_VALUE (expr));
833 DFS_follow_tree_edge (TREE_CHAIN (expr));
836 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
838 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
839 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
842 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
844 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
845 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
846 DFS_follow_tree_edge (TREE_BLOCK (expr));
849 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
851 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
852 if (VAR_OR_FUNCTION_DECL_P (t)
853 && DECL_EXTERNAL (t))
854 /* We have to stream externals in the block chain as
855 non-references. See also
856 tree-streamer-out.c:streamer_write_chain. */
857 DFS_write_tree (ob, expr_state, t, ref_p, false);
858 else
859 DFS_follow_tree_edge (t);
861 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
863 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
864 handle - those that represent inlined function scopes.
865 For the drop rest them on the floor instead of ICEing
866 in dwarf2out.c. */
867 if (inlined_function_outer_scope_p (expr))
869 tree ultimate_origin = block_ultimate_origin (expr);
870 DFS_follow_tree_edge (ultimate_origin);
872 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
873 information for early inlined BLOCKs so drop it on the floor instead
874 of ICEing in dwarf2out.c. */
876 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
877 streaming time. */
879 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
880 list is re-constructed from BLOCK_SUPERCONTEXT. */
883 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
885 unsigned i;
886 tree t;
888 /* Note that the number of BINFO slots has already been emitted in
889 EXPR's header (see streamer_write_tree_header) because this length
890 is needed to build the empty BINFO node on the reader side. */
891 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
892 DFS_follow_tree_edge (t);
893 DFS_follow_tree_edge (BINFO_OFFSET (expr));
894 DFS_follow_tree_edge (BINFO_VTABLE (expr));
895 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
897 /* The number of BINFO_BASE_ACCESSES has already been emitted in
898 EXPR's bitfield section. */
899 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
900 DFS_follow_tree_edge (t);
902 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
903 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
906 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
908 unsigned i;
909 tree index, value;
911 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
913 DFS_follow_tree_edge (index);
914 DFS_follow_tree_edge (value);
918 if (code == OMP_CLAUSE)
920 int i;
921 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
922 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
923 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
926 #undef DFS_follow_tree_edge
929 /* Return a hash value for the tree T.
930 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
931 may hold hash values if trees inside current SCC. */
933 static hashval_t
934 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
936 inchash::hash hstate;
938 #define visit(SIBLING) \
939 do { \
940 unsigned ix; \
941 if (!SIBLING) \
942 hstate.add_int (0); \
943 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
944 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
945 else if (map) \
946 hstate.add_int (*map->get (SIBLING)); \
947 else \
948 hstate.add_int (1); \
949 } while (0)
951 /* Hash TS_BASE. */
952 enum tree_code code = TREE_CODE (t);
953 hstate.add_int (code);
954 if (!TYPE_P (t))
956 hstate.add_flag (TREE_SIDE_EFFECTS (t));
957 hstate.add_flag (TREE_CONSTANT (t));
958 hstate.add_flag (TREE_READONLY (t));
959 hstate.add_flag (TREE_PUBLIC (t));
961 hstate.add_flag (TREE_ADDRESSABLE (t));
962 hstate.add_flag (TREE_THIS_VOLATILE (t));
963 if (DECL_P (t))
964 hstate.add_flag (DECL_UNSIGNED (t));
965 else if (TYPE_P (t))
966 hstate.add_flag (TYPE_UNSIGNED (t));
967 if (TYPE_P (t))
968 hstate.add_flag (TYPE_ARTIFICIAL (t));
969 else
970 hstate.add_flag (TREE_NO_WARNING (t));
971 hstate.add_flag (TREE_NOTHROW (t));
972 hstate.add_flag (TREE_STATIC (t));
973 hstate.add_flag (TREE_PROTECTED (t));
974 hstate.add_flag (TREE_DEPRECATED (t));
975 if (code != TREE_BINFO)
976 hstate.add_flag (TREE_PRIVATE (t));
977 if (TYPE_P (t))
979 hstate.add_flag (TYPE_SATURATING (t));
980 hstate.add_flag (TYPE_ADDR_SPACE (t));
982 else if (code == SSA_NAME)
983 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
984 hstate.commit_flag ();
986 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
988 int i;
989 hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
990 hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
991 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
992 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
995 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
997 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
998 hstate.add_flag (r.cl);
999 hstate.add_flag (r.sign);
1000 hstate.add_flag (r.signalling);
1001 hstate.add_flag (r.canonical);
1002 hstate.commit_flag ();
1003 hstate.add_int (r.uexp);
1004 hstate.add (r.sig, sizeof (r.sig));
1007 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1009 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1010 hstate.add_int (f.mode);
1011 hstate.add_int (f.data.low);
1012 hstate.add_int (f.data.high);
1015 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1017 hstate.add_wide_int (DECL_MODE (t));
1018 hstate.add_flag (DECL_NONLOCAL (t));
1019 hstate.add_flag (DECL_VIRTUAL_P (t));
1020 hstate.add_flag (DECL_IGNORED_P (t));
1021 hstate.add_flag (DECL_ABSTRACT_P (t));
1022 hstate.add_flag (DECL_ARTIFICIAL (t));
1023 hstate.add_flag (DECL_USER_ALIGN (t));
1024 hstate.add_flag (DECL_PRESERVE_P (t));
1025 hstate.add_flag (DECL_EXTERNAL (t));
1026 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1027 hstate.commit_flag ();
1028 hstate.add_int (DECL_ALIGN (t));
1029 if (code == LABEL_DECL)
1031 hstate.add_int (EH_LANDING_PAD_NR (t));
1032 hstate.add_int (LABEL_DECL_UID (t));
1034 else if (code == FIELD_DECL)
1036 hstate.add_flag (DECL_PACKED (t));
1037 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1038 hstate.add_int (DECL_OFFSET_ALIGN (t));
1040 else if (code == VAR_DECL)
1042 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1043 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1045 if (code == RESULT_DECL
1046 || code == PARM_DECL
1047 || code == VAR_DECL)
1049 hstate.add_flag (DECL_BY_REFERENCE (t));
1050 if (code == VAR_DECL
1051 || code == PARM_DECL)
1052 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1054 hstate.commit_flag ();
1057 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1058 hstate.add_int (DECL_REGISTER (t));
1060 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1062 hstate.add_flag (DECL_COMMON (t));
1063 hstate.add_flag (DECL_DLLIMPORT_P (t));
1064 hstate.add_flag (DECL_WEAK (t));
1065 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1066 hstate.add_flag (DECL_COMDAT (t));
1067 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1068 hstate.add_int (DECL_VISIBILITY (t));
1069 if (code == VAR_DECL)
1071 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1072 hstate.add_flag (DECL_HARD_REGISTER (t));
1073 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1075 if (TREE_CODE (t) == FUNCTION_DECL)
1077 hstate.add_flag (DECL_FINAL_P (t));
1078 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1079 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1081 hstate.commit_flag ();
1084 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1086 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1087 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1088 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1089 hstate.add_flag (DECL_UNINLINABLE (t));
1090 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1091 hstate.add_flag (DECL_IS_NOVOPS (t));
1092 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1093 hstate.add_flag (DECL_IS_MALLOC (t));
1094 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1095 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1096 hstate.add_flag (DECL_STATIC_CHAIN (t));
1097 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1098 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1099 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1100 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1101 hstate.add_flag (DECL_PURE_P (t));
1102 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1103 hstate.commit_flag ();
1104 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1105 hstate.add_int (DECL_FUNCTION_CODE (t));
1108 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1110 hstate.add_wide_int (TYPE_MODE (t));
1111 hstate.add_flag (TYPE_STRING_FLAG (t));
1112 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1113 no streaming. */
1114 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
1115 hstate.add_flag (TYPE_PACKED (t));
1116 hstate.add_flag (TYPE_RESTRICT (t));
1117 hstate.add_flag (TYPE_USER_ALIGN (t));
1118 hstate.add_flag (TYPE_READONLY (t));
1119 if (RECORD_OR_UNION_TYPE_P (t))
1121 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1122 hstate.add_flag (TYPE_FINAL_P (t));
1124 else if (code == ARRAY_TYPE)
1125 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1126 hstate.commit_flag ();
1127 hstate.add_int (TYPE_PRECISION (t));
1128 hstate.add_int (TYPE_ALIGN (t));
1129 hstate.add_int ((TYPE_ALIAS_SET (t) == 0
1130 || (!in_lto_p
1131 && get_alias_set (t) == 0))
1132 ? 0 : -1);
1135 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1136 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1137 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1139 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1140 /* We don't stream these when passing things to a different target. */
1141 && !lto_stream_offload_p)
1142 hstate.add_wide_int (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1144 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1145 hstate.add_wide_int (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1147 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1148 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1150 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1151 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1153 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1155 if (code != IDENTIFIER_NODE)
1156 visit (TREE_TYPE (t));
1159 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1160 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
1161 visit (VECTOR_CST_ELT (t, i));
1163 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1165 visit (TREE_REALPART (t));
1166 visit (TREE_IMAGPART (t));
1169 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1171 /* Drop names that were created for anonymous entities. */
1172 if (DECL_NAME (t)
1173 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1174 && anon_aggrname_p (DECL_NAME (t)))
1176 else
1177 visit (DECL_NAME (t));
1178 if (DECL_FILE_SCOPE_P (t))
1180 else
1181 visit (DECL_CONTEXT (t));
1184 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1186 visit (DECL_SIZE (t));
1187 visit (DECL_SIZE_UNIT (t));
1188 visit (DECL_ATTRIBUTES (t));
1189 if ((code == VAR_DECL
1190 || code == PARM_DECL)
1191 && DECL_HAS_VALUE_EXPR_P (t))
1192 visit (DECL_VALUE_EXPR (t));
1193 if (code == VAR_DECL
1194 && DECL_HAS_DEBUG_EXPR_P (t))
1195 visit (DECL_DEBUG_EXPR (t));
1196 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1197 be able to call get_symbol_initial_value. */
1200 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1202 if (code == TYPE_DECL)
1203 visit (DECL_ORIGINAL_TYPE (t));
1206 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1208 if (DECL_ASSEMBLER_NAME_SET_P (t))
1209 visit (DECL_ASSEMBLER_NAME (t));
1212 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1214 visit (DECL_FIELD_OFFSET (t));
1215 visit (DECL_BIT_FIELD_TYPE (t));
1216 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1217 visit (DECL_FIELD_BIT_OFFSET (t));
1218 visit (DECL_FCONTEXT (t));
1221 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1223 visit (DECL_VINDEX (t));
1224 visit (DECL_FUNCTION_PERSONALITY (t));
1225 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1226 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1229 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1231 visit (TYPE_SIZE (t));
1232 visit (TYPE_SIZE_UNIT (t));
1233 visit (TYPE_ATTRIBUTES (t));
1234 visit (TYPE_NAME (t));
1235 visit (TYPE_MAIN_VARIANT (t));
1236 if (TYPE_FILE_SCOPE_P (t))
1238 else
1239 visit (TYPE_CONTEXT (t));
1240 visit (TYPE_STUB_DECL (t));
1243 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1245 if (code == ENUMERAL_TYPE)
1246 visit (TYPE_VALUES (t));
1247 else if (code == ARRAY_TYPE)
1248 visit (TYPE_DOMAIN (t));
1249 else if (RECORD_OR_UNION_TYPE_P (t))
1250 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1251 visit (f);
1252 else if (code == FUNCTION_TYPE
1253 || code == METHOD_TYPE)
1254 visit (TYPE_ARG_TYPES (t));
1255 if (!POINTER_TYPE_P (t))
1256 visit (TYPE_MINVAL (t));
1257 visit (TYPE_MAXVAL (t));
1258 if (RECORD_OR_UNION_TYPE_P (t))
1259 visit (TYPE_BINFO (t));
1262 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1264 visit (TREE_PURPOSE (t));
1265 visit (TREE_VALUE (t));
1266 visit (TREE_CHAIN (t));
1269 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1270 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1271 visit (TREE_VEC_ELT (t, i));
1273 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1275 hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
1276 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1277 visit (TREE_OPERAND (t, i));
1280 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1282 unsigned i;
1283 tree b;
1284 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1285 visit (b);
1286 visit (BINFO_OFFSET (t));
1287 visit (BINFO_VTABLE (t));
1288 visit (BINFO_VPTR_FIELD (t));
1289 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1290 visit (b);
1291 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1292 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1295 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1297 unsigned i;
1298 tree index, value;
1299 hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
1300 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1302 visit (index);
1303 visit (value);
1307 if (code == OMP_CLAUSE)
1309 int i;
1310 HOST_WIDE_INT val;
1312 hstate.add_wide_int (OMP_CLAUSE_CODE (t));
1313 switch (OMP_CLAUSE_CODE (t))
1315 case OMP_CLAUSE_DEFAULT:
1316 val = OMP_CLAUSE_DEFAULT_KIND (t);
1317 break;
1318 case OMP_CLAUSE_SCHEDULE:
1319 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1320 break;
1321 case OMP_CLAUSE_DEPEND:
1322 val = OMP_CLAUSE_DEPEND_KIND (t);
1323 break;
1324 case OMP_CLAUSE_MAP:
1325 val = OMP_CLAUSE_MAP_KIND (t);
1326 break;
1327 case OMP_CLAUSE_PROC_BIND:
1328 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1329 break;
1330 case OMP_CLAUSE_REDUCTION:
1331 val = OMP_CLAUSE_REDUCTION_CODE (t);
1332 break;
1333 default:
1334 val = 0;
1335 break;
1337 hstate.add_wide_int (val);
1338 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1339 visit (OMP_CLAUSE_OPERAND (t, i));
1340 visit (OMP_CLAUSE_CHAIN (t));
1343 return hstate.end ();
1345 #undef visit
1348 /* Compare two SCC entries by their hash value for qsorting them. */
1351 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1353 const scc_entry *p1 = (const scc_entry *) p1_;
1354 const scc_entry *p2 = (const scc_entry *) p2_;
1355 if (p1->hash < p2->hash)
1356 return -1;
1357 else if (p1->hash > p2->hash)
1358 return 1;
1359 return 0;
1362 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1363 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1365 hashval_t
1366 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1367 bool ref_p, bool this_ref_p)
1369 unsigned int last_classes = 0, iterations = 0;
1371 /* Compute hash values for the SCC members. */
1372 for (unsigned i = 0; i < size; ++i)
1373 sccstack[first+i].hash
1374 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1376 if (size == 1)
1377 return sccstack[first].hash;
1379 /* We aim to get unique hash for every tree within SCC and compute hash value
1380 of the whole SCC by combining all values together in a stable (entry-point
1381 independent) order. This guarantees that the same SCC regions within
1382 different translation units will get the same hash values and therefore
1383 will be merged at WPA time.
1385 Often the hashes are already unique. In that case we compute the SCC hash
1386 by combining individual hash values in an increasing order.
1388 If there are duplicates, we seek at least one tree with unique hash (and
1389 pick one with minimal hash and this property). Then we obtain a stable
1390 order by DFS walk starting from this unique tree and then use the index
1391 within this order to make individual hash values unique.
1393 If there is no tree with unique hash, we iteratively propagate the hash
1394 values across the internal edges of SCC. This usually quickly leads
1395 to unique hashes. Consider, for example, an SCC containing two pointers
1396 that are identical except for the types they point to and assume that
1397 these types are also part of the SCC. The propagation will add the
1398 points-to type information into their hash values. */
1401 /* Sort the SCC so we can easily check for uniqueness. */
1402 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1404 unsigned int classes = 1;
1405 int firstunique = -1;
1407 /* Find the tree with lowest unique hash (if it exists) and compute
1408 the number of equivalence classes. */
1409 if (sccstack[first].hash != sccstack[first+1].hash)
1410 firstunique = 0;
1411 for (unsigned i = 1; i < size; ++i)
1412 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1414 classes++;
1415 if (firstunique == -1
1416 && (i == size - 1
1417 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1418 firstunique = i;
1421 /* If we found a tree with unique hash, stop the iteration. */
1422 if (firstunique != -1
1423 /* Also terminate if we run out of iterations or if the number of
1424 equivalence classes is no longer increasing.
1425 For example a cyclic list of trees that are all equivalent will
1426 never have unique entry point; we however do not build such SCCs
1427 in our IL. */
1428 || classes <= last_classes || iterations > 16)
1430 hashval_t scc_hash;
1432 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1433 starting from FIRSTUNIQUE to obtain a stable order. */
1434 if (classes != size && firstunique != -1)
1436 hash_map <tree, hashval_t> map(size*2);
1438 /* Store hash values into a map, so we can associate them with
1439 the reordered SCC. */
1440 for (unsigned i = 0; i < size; ++i)
1441 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1443 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1444 true);
1445 gcc_assert (again.sccstack.length () == size);
1447 memcpy (sccstack.address () + first,
1448 again.sccstack.address (),
1449 sizeof (scc_entry) * size);
1451 /* Update hash values of individual members by hashing in the
1452 index within the stable order. This ensures uniqueness.
1453 Also compute the SCC hash by mixing in all hash values in
1454 the stable order we obtained. */
1455 sccstack[first].hash = *map.get (sccstack[first].t);
1456 scc_hash = sccstack[first].hash;
1457 for (unsigned i = 1; i < size; ++i)
1459 sccstack[first+i].hash
1460 = iterative_hash_hashval_t (i,
1461 *map.get (sccstack[first+i].t));
1462 scc_hash
1463 = iterative_hash_hashval_t (scc_hash,
1464 sccstack[first+i].hash);
1467 /* If we got a unique hash value for each tree, then sort already
1468 ensured entry-point independent order. Only compute the final
1469 SCC hash.
1471 If we failed to find the unique entry point, we go by the same
1472 route. We will eventually introduce unwanted hash conflicts. */
1473 else
1475 scc_hash = sccstack[first].hash;
1476 for (unsigned i = 1; i < size; ++i)
1477 scc_hash
1478 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1480 /* We cannot 100% guarantee that the hash won't conflict so as
1481 to make it impossible to find a unique hash. This however
1482 should be an extremely rare case. ICE for now so possible
1483 issues are found and evaluated. */
1484 gcc_checking_assert (classes == size);
1487 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1488 hash into the hash of each element. */
1489 for (unsigned i = 0; i < size; ++i)
1490 sccstack[first+i].hash
1491 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1492 return scc_hash;
1495 last_classes = classes;
1496 iterations++;
1498 /* We failed to identify the entry point; propagate hash values across
1499 the edges. */
1500 hash_map <tree, hashval_t> map(size*2);
1502 for (unsigned i = 0; i < size; ++i)
1503 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1505 for (unsigned i = 0; i < size; i++)
1506 sccstack[first+i].hash
1507 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1509 while (true);
1512 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1513 already in the streamer cache. Main routine called for
1514 each visit of EXPR. */
1516 void
1517 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1518 tree expr, bool ref_p, bool this_ref_p)
1520 /* Handle special cases. */
1521 if (expr == NULL_TREE)
1522 return;
1524 /* Do not DFS walk into indexable trees. */
1525 if (this_ref_p && tree_is_indexable (expr))
1526 return;
1528 /* Check if we already streamed EXPR. */
1529 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1530 return;
1532 worklist w;
1533 w.expr = expr;
1534 w.from_state = from_state;
1535 w.cstate = NULL;
1536 w.ref_p = ref_p;
1537 w.this_ref_p = this_ref_p;
1538 worklist_vec.safe_push (w);
1542 /* Emit the physical representation of tree node EXPR to output block OB.
1543 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1544 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1546 void
1547 lto_output_tree (struct output_block *ob, tree expr,
1548 bool ref_p, bool this_ref_p)
1550 unsigned ix;
1551 bool existed_p;
1553 if (expr == NULL_TREE)
1555 streamer_write_record_start (ob, LTO_null);
1556 return;
1559 if (this_ref_p && tree_is_indexable (expr))
1561 lto_output_tree_ref (ob, expr);
1562 return;
1565 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1566 if (existed_p)
1568 /* If a node has already been streamed out, make sure that
1569 we don't write it more than once. Otherwise, the reader
1570 will instantiate two different nodes for the same object. */
1571 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1572 streamer_write_uhwi (ob, ix);
1573 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1574 lto_tree_code_to_tag (TREE_CODE (expr)));
1575 lto_stats.num_pickle_refs_output++;
1577 else
1579 /* This is the first time we see EXPR, write all reachable
1580 trees to OB. */
1581 static bool in_dfs_walk;
1583 /* Protect against recursion which means disconnect between
1584 what tree edges we walk in the DFS walk and what edges
1585 we stream out. */
1586 gcc_assert (!in_dfs_walk);
1588 /* Start the DFS walk. */
1589 /* Save ob state ... */
1590 /* let's see ... */
1591 in_dfs_walk = true;
1592 DFS (ob, expr, ref_p, this_ref_p, false);
1593 in_dfs_walk = false;
1595 /* Finally append a reference to the tree we were writing.
1596 ??? If expr ended up as a singleton we could have
1597 inlined it here and avoid outputting a reference. */
1598 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1599 gcc_assert (existed_p);
1600 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1601 streamer_write_uhwi (ob, ix);
1602 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1603 lto_tree_code_to_tag (TREE_CODE (expr)));
1604 lto_stats.num_pickle_refs_output++;
1609 /* Output to OB a list of try/catch handlers starting with FIRST. */
1611 static void
1612 output_eh_try_list (struct output_block *ob, eh_catch first)
1614 eh_catch n;
1616 for (n = first; n; n = n->next_catch)
1618 streamer_write_record_start (ob, LTO_eh_catch);
1619 stream_write_tree (ob, n->type_list, true);
1620 stream_write_tree (ob, n->filter_list, true);
1621 stream_write_tree (ob, n->label, true);
1624 streamer_write_record_start (ob, LTO_null);
1628 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1629 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1630 detect EH region sharing. */
1632 static void
1633 output_eh_region (struct output_block *ob, eh_region r)
1635 enum LTO_tags tag;
1637 if (r == NULL)
1639 streamer_write_record_start (ob, LTO_null);
1640 return;
1643 if (r->type == ERT_CLEANUP)
1644 tag = LTO_ert_cleanup;
1645 else if (r->type == ERT_TRY)
1646 tag = LTO_ert_try;
1647 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1648 tag = LTO_ert_allowed_exceptions;
1649 else if (r->type == ERT_MUST_NOT_THROW)
1650 tag = LTO_ert_must_not_throw;
1651 else
1652 gcc_unreachable ();
1654 streamer_write_record_start (ob, tag);
1655 streamer_write_hwi (ob, r->index);
1657 if (r->outer)
1658 streamer_write_hwi (ob, r->outer->index);
1659 else
1660 streamer_write_zero (ob);
1662 if (r->inner)
1663 streamer_write_hwi (ob, r->inner->index);
1664 else
1665 streamer_write_zero (ob);
1667 if (r->next_peer)
1668 streamer_write_hwi (ob, r->next_peer->index);
1669 else
1670 streamer_write_zero (ob);
1672 if (r->type == ERT_TRY)
1674 output_eh_try_list (ob, r->u.eh_try.first_catch);
1676 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1678 stream_write_tree (ob, r->u.allowed.type_list, true);
1679 stream_write_tree (ob, r->u.allowed.label, true);
1680 streamer_write_uhwi (ob, r->u.allowed.filter);
1682 else if (r->type == ERT_MUST_NOT_THROW)
1684 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1685 bitpack_d bp = bitpack_create (ob->main_stream);
1686 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1687 streamer_write_bitpack (&bp);
1690 if (r->landing_pads)
1691 streamer_write_hwi (ob, r->landing_pads->index);
1692 else
1693 streamer_write_zero (ob);
1697 /* Output landing pad LP to OB. */
1699 static void
1700 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1702 if (lp == NULL)
1704 streamer_write_record_start (ob, LTO_null);
1705 return;
1708 streamer_write_record_start (ob, LTO_eh_landing_pad);
1709 streamer_write_hwi (ob, lp->index);
1710 if (lp->next_lp)
1711 streamer_write_hwi (ob, lp->next_lp->index);
1712 else
1713 streamer_write_zero (ob);
1715 if (lp->region)
1716 streamer_write_hwi (ob, lp->region->index);
1717 else
1718 streamer_write_zero (ob);
1720 stream_write_tree (ob, lp->post_landing_pad, true);
1724 /* Output the existing eh_table to OB. */
1726 static void
1727 output_eh_regions (struct output_block *ob, struct function *fn)
1729 if (fn->eh && fn->eh->region_tree)
1731 unsigned i;
1732 eh_region eh;
1733 eh_landing_pad lp;
1734 tree ttype;
1736 streamer_write_record_start (ob, LTO_eh_table);
1738 /* Emit the index of the root of the EH region tree. */
1739 streamer_write_hwi (ob, fn->eh->region_tree->index);
1741 /* Emit all the EH regions in the region array. */
1742 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1743 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1744 output_eh_region (ob, eh);
1746 /* Emit all landing pads. */
1747 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1748 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1749 output_eh_lp (ob, lp);
1751 /* Emit all the runtime type data. */
1752 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1753 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1754 stream_write_tree (ob, ttype, true);
1756 /* Emit the table of action chains. */
1757 if (targetm.arm_eabi_unwinder)
1759 tree t;
1760 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1761 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1762 stream_write_tree (ob, t, true);
1764 else
1766 uchar c;
1767 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1768 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1769 streamer_write_char_stream (ob->main_stream, c);
1773 /* The LTO_null either terminates the record or indicates that there
1774 are no eh_records at all. */
1775 streamer_write_record_start (ob, LTO_null);
1779 /* Output all of the active ssa names to the ssa_names stream. */
1781 static void
1782 output_ssa_names (struct output_block *ob, struct function *fn)
1784 unsigned int i, len;
1786 len = vec_safe_length (SSANAMES (fn));
1787 streamer_write_uhwi (ob, len);
1789 for (i = 1; i < len; i++)
1791 tree ptr = (*SSANAMES (fn))[i];
1793 if (ptr == NULL_TREE
1794 || SSA_NAME_IN_FREE_LIST (ptr)
1795 || virtual_operand_p (ptr))
1796 continue;
1798 streamer_write_uhwi (ob, i);
1799 streamer_write_char_stream (ob->main_stream,
1800 SSA_NAME_IS_DEFAULT_DEF (ptr));
1801 if (SSA_NAME_VAR (ptr))
1802 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1803 else
1804 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1805 stream_write_tree (ob, TREE_TYPE (ptr), true);
1808 streamer_write_zero (ob);
1812 /* Output a wide-int. */
1814 static void
1815 streamer_write_wi (struct output_block *ob,
1816 const widest_int &w)
1818 int len = w.get_len ();
1820 streamer_write_uhwi (ob, w.get_precision ());
1821 streamer_write_uhwi (ob, len);
1822 for (int i = 0; i < len; i++)
1823 streamer_write_hwi (ob, w.elt (i));
1827 /* Output the cfg. */
1829 static void
1830 output_cfg (struct output_block *ob, struct function *fn)
1832 struct lto_output_stream *tmp_stream = ob->main_stream;
1833 basic_block bb;
1835 ob->main_stream = ob->cfg_stream;
1837 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1838 profile_status_for_fn (fn));
1840 /* Output the number of the highest basic block. */
1841 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1843 FOR_ALL_BB_FN (bb, fn)
1845 edge_iterator ei;
1846 edge e;
1848 streamer_write_hwi (ob, bb->index);
1850 /* Output the successors and the edge flags. */
1851 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1852 FOR_EACH_EDGE (e, ei, bb->succs)
1854 streamer_write_uhwi (ob, e->dest->index);
1855 streamer_write_hwi (ob, e->probability);
1856 streamer_write_gcov_count (ob, e->count);
1857 streamer_write_uhwi (ob, e->flags);
1861 streamer_write_hwi (ob, -1);
1863 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1864 while (bb->next_bb)
1866 streamer_write_hwi (ob, bb->next_bb->index);
1867 bb = bb->next_bb;
1870 streamer_write_hwi (ob, -1);
1872 /* ??? The cfgloop interface is tied to cfun. */
1873 gcc_assert (cfun == fn);
1875 /* Output the number of loops. */
1876 streamer_write_uhwi (ob, number_of_loops (fn));
1878 /* Output each loop, skipping the tree root which has number zero. */
1879 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1881 struct loop *loop = get_loop (fn, i);
1883 /* Write the index of the loop header. That's enough to rebuild
1884 the loop tree on the reader side. Stream -1 for an unused
1885 loop entry. */
1886 if (!loop)
1888 streamer_write_hwi (ob, -1);
1889 continue;
1891 else
1892 streamer_write_hwi (ob, loop->header->index);
1894 /* Write everything copy_loop_info copies. */
1895 streamer_write_enum (ob->main_stream,
1896 loop_estimation, EST_LAST, loop->estimate_state);
1897 streamer_write_hwi (ob, loop->any_upper_bound);
1898 if (loop->any_upper_bound)
1899 streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1900 streamer_write_hwi (ob, loop->any_estimate);
1901 if (loop->any_estimate)
1902 streamer_write_wi (ob, loop->nb_iterations_estimate);
1904 /* Write OMP SIMD related info. */
1905 streamer_write_hwi (ob, loop->safelen);
1906 streamer_write_hwi (ob, loop->dont_vectorize);
1907 streamer_write_hwi (ob, loop->force_vectorize);
1908 stream_write_tree (ob, loop->simduid, true);
1911 ob->main_stream = tmp_stream;
1915 /* Create the header in the file using OB. If the section type is for
1916 a function, set FN to the decl for that function. */
1918 void
1919 produce_asm (struct output_block *ob, tree fn)
1921 enum lto_section_type section_type = ob->section_type;
1922 struct lto_function_header header;
1923 char *section_name;
1925 if (section_type == LTO_section_function_body)
1927 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1928 section_name = lto_get_section_name (section_type, name, NULL);
1930 else
1931 section_name = lto_get_section_name (section_type, NULL, NULL);
1933 lto_begin_section (section_name, !flag_wpa);
1934 free (section_name);
1936 /* The entire header is stream computed here. */
1937 memset (&header, 0, sizeof (struct lto_function_header));
1939 /* Write the header. */
1940 header.major_version = LTO_major_version;
1941 header.minor_version = LTO_minor_version;
1943 if (section_type == LTO_section_function_body)
1944 header.cfg_size = ob->cfg_stream->total_size;
1945 header.main_size = ob->main_stream->total_size;
1946 header.string_size = ob->string_stream->total_size;
1947 lto_write_data (&header, sizeof header);
1949 /* Put all of the gimple and the string table out the asm file as a
1950 block of text. */
1951 if (section_type == LTO_section_function_body)
1952 lto_write_stream (ob->cfg_stream);
1953 lto_write_stream (ob->main_stream);
1954 lto_write_stream (ob->string_stream);
1956 lto_end_section ();
1960 /* Output the base body of struct function FN using output block OB. */
1962 static void
1963 output_struct_function_base (struct output_block *ob, struct function *fn)
1965 struct bitpack_d bp;
1966 unsigned i;
1967 tree t;
1969 /* Output the static chain and non-local goto save area. */
1970 stream_write_tree (ob, fn->static_chain_decl, true);
1971 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1973 /* Output all the local variables in the function. */
1974 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1975 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1976 stream_write_tree (ob, t, true);
1978 /* Output current IL state of the function. */
1979 streamer_write_uhwi (ob, fn->curr_properties);
1981 /* Write all the attributes for FN. */
1982 bp = bitpack_create (ob->main_stream);
1983 bp_pack_value (&bp, fn->is_thunk, 1);
1984 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1985 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1986 bp_pack_value (&bp, fn->returns_struct, 1);
1987 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1988 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1989 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1990 bp_pack_value (&bp, fn->after_inlining, 1);
1991 bp_pack_value (&bp, fn->stdarg, 1);
1992 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1993 bp_pack_value (&bp, fn->calls_alloca, 1);
1994 bp_pack_value (&bp, fn->calls_setjmp, 1);
1995 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
1996 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1997 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1998 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1999 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2001 /* Output the function start and end loci. */
2002 stream_output_location (ob, &bp, fn->function_start_locus);
2003 stream_output_location (ob, &bp, fn->function_end_locus);
2005 streamer_write_bitpack (&bp);
2009 /* Output the body of function NODE->DECL. */
2011 static void
2012 output_function (struct cgraph_node *node)
2014 tree function;
2015 struct function *fn;
2016 basic_block bb;
2017 struct output_block *ob;
2019 function = node->decl;
2020 fn = DECL_STRUCT_FUNCTION (function);
2021 ob = create_output_block (LTO_section_function_body);
2023 clear_line_info (ob);
2024 ob->symbol = node;
2026 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2028 /* Set current_function_decl and cfun. */
2029 push_cfun (fn);
2031 /* Make string 0 be a NULL string. */
2032 streamer_write_char_stream (ob->string_stream, 0);
2034 streamer_write_record_start (ob, LTO_function);
2036 /* Output decls for parameters and args. */
2037 stream_write_tree (ob, DECL_RESULT (function), true);
2038 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2040 /* Output DECL_INITIAL for the function, which contains the tree of
2041 lexical scopes. */
2042 stream_write_tree (ob, DECL_INITIAL (function), true);
2044 /* We also stream abstract functions where we stream only stuff needed for
2045 debug info. */
2046 if (gimple_has_body_p (function))
2048 streamer_write_uhwi (ob, 1);
2049 output_struct_function_base (ob, fn);
2051 /* Output all the SSA names used in the function. */
2052 output_ssa_names (ob, fn);
2054 /* Output any exception handling regions. */
2055 output_eh_regions (ob, fn);
2058 /* We will renumber the statements. The code that does this uses
2059 the same ordering that we use for serializing them so we can use
2060 the same code on the other end and not have to write out the
2061 statement numbers. We do not assign UIDs to PHIs here because
2062 virtual PHIs get re-computed on-the-fly which would make numbers
2063 inconsistent. */
2064 set_gimple_stmt_max_uid (cfun, 0);
2065 FOR_ALL_BB_FN (bb, cfun)
2067 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2068 gsi_next (&gsi))
2070 gphi *stmt = gsi.phi ();
2072 /* Virtual PHIs are not going to be streamed. */
2073 if (!virtual_operand_p (gimple_phi_result (stmt)))
2074 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2076 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2077 gsi_next (&gsi))
2079 gimple stmt = gsi_stmt (gsi);
2080 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2083 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2084 virtual phis now. */
2085 FOR_ALL_BB_FN (bb, cfun)
2087 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2088 gsi_next (&gsi))
2090 gphi *stmt = gsi.phi ();
2091 if (virtual_operand_p (gimple_phi_result (stmt)))
2092 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2096 /* Output the code for the function. */
2097 FOR_ALL_BB_FN (bb, fn)
2098 output_bb (ob, bb, fn);
2100 /* The terminator for this function. */
2101 streamer_write_record_start (ob, LTO_null);
2103 output_cfg (ob, fn);
2105 pop_cfun ();
2107 else
2108 streamer_write_uhwi (ob, 0);
2110 /* Create a section to hold the pickled output of this function. */
2111 produce_asm (ob, function);
2113 destroy_output_block (ob);
2116 /* Output the body of function NODE->DECL. */
2118 static void
2119 output_constructor (struct varpool_node *node)
2121 tree var = node->decl;
2122 struct output_block *ob;
2124 ob = create_output_block (LTO_section_function_body);
2126 clear_line_info (ob);
2127 ob->symbol = node;
2129 /* Make string 0 be a NULL string. */
2130 streamer_write_char_stream (ob->string_stream, 0);
2132 /* Output DECL_INITIAL for the function, which contains the tree of
2133 lexical scopes. */
2134 stream_write_tree (ob, DECL_INITIAL (var), true);
2136 /* Create a section to hold the pickled output of this function. */
2137 produce_asm (ob, var);
2139 destroy_output_block (ob);
2143 /* Emit toplevel asms. */
2145 void
2146 lto_output_toplevel_asms (void)
2148 struct output_block *ob;
2149 struct asm_node *can;
2150 char *section_name;
2151 struct lto_simple_header_with_strings header;
2153 if (!symtab->first_asm_symbol ())
2154 return;
2156 ob = create_output_block (LTO_section_asm);
2158 /* Make string 0 be a NULL string. */
2159 streamer_write_char_stream (ob->string_stream, 0);
2161 for (can = symtab->first_asm_symbol (); can; can = can->next)
2163 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2164 streamer_write_hwi (ob, can->order);
2167 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2169 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2170 lto_begin_section (section_name, !flag_wpa);
2171 free (section_name);
2173 /* The entire header stream is computed here. */
2174 memset (&header, 0, sizeof (header));
2176 /* Write the header. */
2177 header.major_version = LTO_major_version;
2178 header.minor_version = LTO_minor_version;
2180 header.main_size = ob->main_stream->total_size;
2181 header.string_size = ob->string_stream->total_size;
2182 lto_write_data (&header, sizeof header);
2184 /* Put all of the gimple and the string table out the asm file as a
2185 block of text. */
2186 lto_write_stream (ob->main_stream);
2187 lto_write_stream (ob->string_stream);
2189 lto_end_section ();
2191 destroy_output_block (ob);
2195 /* Copy the function body or variable constructor of NODE without deserializing. */
2197 static void
2198 copy_function_or_variable (struct symtab_node *node)
2200 tree function = node->decl;
2201 struct lto_file_decl_data *file_data = node->lto_file_data;
2202 const char *data;
2203 size_t len;
2204 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2205 char *section_name =
2206 lto_get_section_name (LTO_section_function_body, name, NULL);
2207 size_t i, j;
2208 struct lto_in_decl_state *in_state;
2209 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2211 lto_begin_section (section_name, !flag_wpa);
2212 free (section_name);
2214 /* We may have renamed the declaration, e.g., a static function. */
2215 name = lto_get_decl_name_mapping (file_data, name);
2217 data = lto_get_section_data (file_data, LTO_section_function_body,
2218 name, &len);
2219 gcc_assert (data);
2221 /* Do a bit copy of the function body. */
2222 lto_write_data (data, len);
2224 /* Copy decls. */
2225 in_state =
2226 lto_get_function_in_decl_state (node->lto_file_data, function);
2227 gcc_assert (in_state);
2229 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2231 size_t n = vec_safe_length (in_state->streams[i]);
2232 vec<tree, va_gc> *trees = in_state->streams[i];
2233 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2235 /* The out state must have the same indices and the in state.
2236 So just copy the vector. All the encoders in the in state
2237 must be empty where we reach here. */
2238 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2239 encoder->trees.reserve_exact (n);
2240 for (j = 0; j < n; j++)
2241 encoder->trees.safe_push ((*trees)[j]);
2244 lto_free_section_data (file_data, LTO_section_function_body, name,
2245 data, len);
2246 lto_end_section ();
2249 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2251 static tree
2252 wrap_refs (tree *tp, int *ws, void *)
2254 tree t = *tp;
2255 if (handled_component_p (t)
2256 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL)
2258 tree decl = TREE_OPERAND (t, 0);
2259 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2260 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2261 build1 (ADDR_EXPR, ptrtype, decl),
2262 build_int_cst (ptrtype, 0));
2263 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2264 *ws = 0;
2266 else if (TREE_CODE (t) == CONSTRUCTOR)
2268 else if (!EXPR_P (t))
2269 *ws = 0;
2270 return NULL_TREE;
2273 /* Main entry point from the pass manager. */
2275 void
2276 lto_output (void)
2278 struct lto_out_decl_state *decl_state;
2279 #ifdef ENABLE_CHECKING
2280 bitmap output = lto_bitmap_alloc ();
2281 #endif
2282 int i, n_nodes;
2283 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2285 /* Initialize the streamer. */
2286 lto_streamer_init ();
2288 n_nodes = lto_symtab_encoder_size (encoder);
2289 /* Process only the functions with bodies. */
2290 for (i = 0; i < n_nodes; i++)
2292 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2293 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2295 if (lto_symtab_encoder_encode_body_p (encoder, node)
2296 && !node->alias)
2298 #ifdef ENABLE_CHECKING
2299 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2300 bitmap_set_bit (output, DECL_UID (node->decl));
2301 #endif
2302 decl_state = lto_new_out_decl_state ();
2303 lto_push_out_decl_state (decl_state);
2304 if (gimple_has_body_p (node->decl) || !flag_wpa
2305 /* Thunks have no body but they may be synthetized
2306 at WPA time. */
2307 || DECL_ARGUMENTS (node->decl))
2308 output_function (node);
2309 else
2310 copy_function_or_variable (node);
2311 gcc_assert (lto_get_out_decl_state () == decl_state);
2312 lto_pop_out_decl_state ();
2313 lto_record_function_out_decl_state (node->decl, decl_state);
2316 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2318 /* Wrap symbol references inside the ctor in a type
2319 preserving MEM_REF. */
2320 tree ctor = DECL_INITIAL (node->decl);
2321 if (ctor && !in_lto_p)
2322 walk_tree (&ctor, wrap_refs, NULL, NULL);
2323 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2324 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2325 && !node->alias)
2327 timevar_push (TV_IPA_LTO_CTORS_OUT);
2328 #ifdef ENABLE_CHECKING
2329 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2330 bitmap_set_bit (output, DECL_UID (node->decl));
2331 #endif
2332 decl_state = lto_new_out_decl_state ();
2333 lto_push_out_decl_state (decl_state);
2334 if (DECL_INITIAL (node->decl) != error_mark_node
2335 || !flag_wpa)
2336 output_constructor (node);
2337 else
2338 copy_function_or_variable (node);
2339 gcc_assert (lto_get_out_decl_state () == decl_state);
2340 lto_pop_out_decl_state ();
2341 lto_record_function_out_decl_state (node->decl, decl_state);
2342 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2347 /* Emit the callgraph after emitting function bodies. This needs to
2348 be done now to make sure that all the statements in every function
2349 have been renumbered so that edges can be associated with call
2350 statements using the statement UIDs. */
2351 output_symtab ();
2353 output_offload_tables ();
2355 #ifdef ENABLE_CHECKING
2356 lto_bitmap_free (output);
2357 #endif
2360 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2361 from it and required for correct representation of its semantics.
2362 Each node in ENCODER must be a global declaration or a type. A node
2363 is written only once, even if it appears multiple times in the
2364 vector. Certain transitively-reachable nodes, such as those
2365 representing expressions, may be duplicated, but such nodes
2366 must not appear in ENCODER itself. */
2368 static void
2369 write_global_stream (struct output_block *ob,
2370 struct lto_tree_ref_encoder *encoder)
2372 tree t;
2373 size_t index;
2374 const size_t size = lto_tree_ref_encoder_size (encoder);
2376 for (index = 0; index < size; index++)
2378 t = lto_tree_ref_encoder_get_tree (encoder, index);
2379 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2380 stream_write_tree (ob, t, false);
2385 /* Write a sequence of indices into the globals vector corresponding
2386 to the trees in ENCODER. These are used by the reader to map the
2387 indices used to refer to global entities within function bodies to
2388 their referents. */
2390 static void
2391 write_global_references (struct output_block *ob,
2392 struct lto_tree_ref_encoder *encoder)
2394 tree t;
2395 uint32_t index;
2396 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2398 /* Write size and slot indexes as 32-bit unsigned numbers. */
2399 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2400 data[0] = size;
2402 for (index = 0; index < size; index++)
2404 uint32_t slot_num;
2406 t = lto_tree_ref_encoder_get_tree (encoder, index);
2407 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2408 gcc_assert (slot_num != (unsigned)-1);
2409 data[index + 1] = slot_num;
2412 lto_write_data (data, sizeof (int32_t) * (size + 1));
2413 free (data);
2417 /* Write all the streams in an lto_out_decl_state STATE using
2418 output block OB and output stream OUT_STREAM. */
2420 void
2421 lto_output_decl_state_streams (struct output_block *ob,
2422 struct lto_out_decl_state *state)
2424 int i;
2426 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2427 write_global_stream (ob, &state->streams[i]);
2431 /* Write all the references in an lto_out_decl_state STATE using
2432 output block OB and output stream OUT_STREAM. */
2434 void
2435 lto_output_decl_state_refs (struct output_block *ob,
2436 struct lto_out_decl_state *state)
2438 unsigned i;
2439 uint32_t ref;
2440 tree decl;
2442 /* Write reference to FUNCTION_DECL. If there is not function,
2443 write reference to void_type_node. */
2444 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2445 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2446 gcc_assert (ref != (unsigned)-1);
2447 lto_write_data (&ref, sizeof (uint32_t));
2449 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2450 write_global_references (ob, &state->streams[i]);
2454 /* Return the written size of STATE. */
2456 static size_t
2457 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2459 int i;
2460 size_t size;
2462 size = sizeof (int32_t); /* fn_ref. */
2463 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2465 size += sizeof (int32_t); /* vector size. */
2466 size += (lto_tree_ref_encoder_size (&state->streams[i])
2467 * sizeof (int32_t));
2469 return size;
2473 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2474 so far. */
2476 static void
2477 write_symbol (struct streamer_tree_cache_d *cache,
2478 tree t, hash_set<const char *> *seen, bool alias)
2480 const char *name;
2481 enum gcc_plugin_symbol_kind kind;
2482 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2483 unsigned slot_num;
2484 uint64_t size;
2485 const char *comdat;
2486 unsigned char c;
2488 /* None of the following kinds of symbols are needed in the
2489 symbol table. */
2490 if (!TREE_PUBLIC (t)
2491 || is_builtin_fn (t)
2492 || DECL_ABSTRACT_P (t)
2493 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2494 return;
2495 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2497 gcc_assert (TREE_CODE (t) == VAR_DECL
2498 || TREE_CODE (t) == FUNCTION_DECL);
2500 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2502 /* This behaves like assemble_name_raw in varasm.c, performing the
2503 same name manipulations that ASM_OUTPUT_LABELREF does. */
2504 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2506 if (seen->add (name))
2507 return;
2509 streamer_tree_cache_lookup (cache, t, &slot_num);
2510 gcc_assert (slot_num != (unsigned)-1);
2512 if (DECL_EXTERNAL (t))
2514 if (DECL_WEAK (t))
2515 kind = GCCPK_WEAKUNDEF;
2516 else
2517 kind = GCCPK_UNDEF;
2519 else
2521 if (DECL_WEAK (t))
2522 kind = GCCPK_WEAKDEF;
2523 else if (DECL_COMMON (t))
2524 kind = GCCPK_COMMON;
2525 else
2526 kind = GCCPK_DEF;
2528 /* When something is defined, it should have node attached. */
2529 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2530 || varpool_node::get (t)->definition);
2531 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2532 || (cgraph_node::get (t)
2533 && cgraph_node::get (t)->definition));
2536 /* Imitate what default_elf_asm_output_external do.
2537 When symbol is external, we need to output it with DEFAULT visibility
2538 when compiling with -fvisibility=default, while with HIDDEN visibility
2539 when symbol has attribute (visibility("hidden")) specified.
2540 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2541 right. */
2543 if (DECL_EXTERNAL (t)
2544 && !targetm.binds_local_p (t))
2545 visibility = GCCPV_DEFAULT;
2546 else
2547 switch (DECL_VISIBILITY (t))
2549 case VISIBILITY_DEFAULT:
2550 visibility = GCCPV_DEFAULT;
2551 break;
2552 case VISIBILITY_PROTECTED:
2553 visibility = GCCPV_PROTECTED;
2554 break;
2555 case VISIBILITY_HIDDEN:
2556 visibility = GCCPV_HIDDEN;
2557 break;
2558 case VISIBILITY_INTERNAL:
2559 visibility = GCCPV_INTERNAL;
2560 break;
2563 if (kind == GCCPK_COMMON
2564 && DECL_SIZE_UNIT (t)
2565 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2566 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2567 else
2568 size = 0;
2570 if (DECL_ONE_ONLY (t))
2571 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2572 else
2573 comdat = "";
2575 lto_write_data (name, strlen (name) + 1);
2576 lto_write_data (comdat, strlen (comdat) + 1);
2577 c = (unsigned char) kind;
2578 lto_write_data (&c, 1);
2579 c = (unsigned char) visibility;
2580 lto_write_data (&c, 1);
2581 lto_write_data (&size, 8);
2582 lto_write_data (&slot_num, 4);
2585 /* Return true if NODE should appear in the plugin symbol table. */
2587 bool
2588 output_symbol_p (symtab_node *node)
2590 struct cgraph_node *cnode;
2591 if (!node->real_symbol_p ())
2592 return false;
2593 /* We keep external functions in symtab for sake of inlining
2594 and devirtualization. We do not want to see them in symbol table as
2595 references unless they are really used. */
2596 cnode = dyn_cast <cgraph_node *> (node);
2597 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2598 && cnode->callers)
2599 return true;
2601 /* Ignore all references from external vars initializers - they are not really
2602 part of the compilation unit until they are used by folding. Some symbols,
2603 like references to external construction vtables can not be referred to at all.
2604 We decide this at can_refer_decl_in_current_unit_p. */
2605 if (!node->definition || DECL_EXTERNAL (node->decl))
2607 int i;
2608 struct ipa_ref *ref;
2609 for (i = 0; node->iterate_referring (i, ref); i++)
2611 if (ref->use == IPA_REF_ALIAS)
2612 continue;
2613 if (is_a <cgraph_node *> (ref->referring))
2614 return true;
2615 if (!DECL_EXTERNAL (ref->referring->decl))
2616 return true;
2618 return false;
2620 return true;
2624 /* Write an IL symbol table to OB.
2625 SET and VSET are cgraph/varpool node sets we are outputting. */
2627 static void
2628 produce_symtab (struct output_block *ob)
2630 struct streamer_tree_cache_d *cache = ob->writer_cache;
2631 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2632 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2633 lto_symtab_encoder_iterator lsei;
2635 lto_begin_section (section_name, false);
2636 free (section_name);
2638 hash_set<const char *> seen;
2640 /* Write the symbol table.
2641 First write everything defined and then all declarations.
2642 This is necessary to handle cases where we have duplicated symbols. */
2643 for (lsei = lsei_start (encoder);
2644 !lsei_end_p (lsei); lsei_next (&lsei))
2646 symtab_node *node = lsei_node (lsei);
2648 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2649 continue;
2650 write_symbol (cache, node->decl, &seen, false);
2652 for (lsei = lsei_start (encoder);
2653 !lsei_end_p (lsei); lsei_next (&lsei))
2655 symtab_node *node = lsei_node (lsei);
2657 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2658 continue;
2659 write_symbol (cache, node->decl, &seen, false);
2662 lto_end_section ();
2666 /* Init the streamer_mode_table for output, where we collect info on what
2667 machine_mode values have been streamed. */
2668 void
2669 lto_output_init_mode_table (void)
2671 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2675 /* Write the mode table. */
2676 static void
2677 lto_write_mode_table (void)
2679 struct output_block *ob;
2680 ob = create_output_block (LTO_section_mode_table);
2681 bitpack_d bp = bitpack_create (ob->main_stream);
2683 /* Ensure that for GET_MODE_INNER (m) != m we have
2684 also the inner mode marked. */
2685 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2686 if (streamer_mode_table[i])
2688 machine_mode m = (machine_mode) i;
2689 if (GET_MODE_INNER (m) != m)
2690 streamer_mode_table[(int) GET_MODE_INNER (m)] = 1;
2692 /* First stream modes that have GET_MODE_INNER (m) == m,
2693 so that we can refer to them afterwards. */
2694 for (int pass = 0; pass < 2; pass++)
2695 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2696 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2698 machine_mode m = (machine_mode) i;
2699 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2700 continue;
2701 bp_pack_value (&bp, m, 8);
2702 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2703 bp_pack_value (&bp, GET_MODE_SIZE (m), 8);
2704 bp_pack_value (&bp, GET_MODE_PRECISION (m), 16);
2705 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2706 bp_pack_value (&bp, GET_MODE_NUNITS (m), 8);
2707 switch (GET_MODE_CLASS (m))
2709 case MODE_FRACT:
2710 case MODE_UFRACT:
2711 case MODE_ACCUM:
2712 case MODE_UACCUM:
2713 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2714 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2715 break;
2716 case MODE_FLOAT:
2717 case MODE_DECIMAL_FLOAT:
2718 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2719 break;
2720 default:
2721 break;
2723 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2725 bp_pack_value (&bp, VOIDmode, 8);
2727 streamer_write_bitpack (&bp);
2729 char *section_name
2730 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2731 lto_begin_section (section_name, !flag_wpa);
2732 free (section_name);
2734 /* The entire header stream is computed here. */
2735 struct lto_simple_header_with_strings header;
2736 memset (&header, 0, sizeof (header));
2738 /* Write the header. */
2739 header.major_version = LTO_major_version;
2740 header.minor_version = LTO_minor_version;
2742 header.main_size = ob->main_stream->total_size;
2743 header.string_size = ob->string_stream->total_size;
2744 lto_write_data (&header, sizeof header);
2746 /* Put all of the gimple and the string table out the asm file as a
2747 block of text. */
2748 lto_write_stream (ob->main_stream);
2749 lto_write_stream (ob->string_stream);
2751 lto_end_section ();
2752 destroy_output_block (ob);
2756 /* This pass is run after all of the functions are serialized and all
2757 of the IPA passes have written their serialized forms. This pass
2758 causes the vector of all of the global decls and types used from
2759 this file to be written in to a section that can then be read in to
2760 recover these on other side. */
2762 void
2763 produce_asm_for_decls (void)
2765 struct lto_out_decl_state *out_state;
2766 struct lto_out_decl_state *fn_out_state;
2767 struct lto_decl_header header;
2768 char *section_name;
2769 struct output_block *ob;
2770 unsigned idx, num_fns;
2771 size_t decl_state_size;
2772 int32_t num_decl_states;
2774 ob = create_output_block (LTO_section_decls);
2776 memset (&header, 0, sizeof (struct lto_decl_header));
2778 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2779 lto_begin_section (section_name, !flag_wpa);
2780 free (section_name);
2782 /* Make string 0 be a NULL string. */
2783 streamer_write_char_stream (ob->string_stream, 0);
2785 gcc_assert (!alias_pairs);
2787 /* Get rid of the global decl state hash tables to save some memory. */
2788 out_state = lto_get_out_decl_state ();
2789 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2790 if (out_state->streams[i].tree_hash_table)
2792 delete out_state->streams[i].tree_hash_table;
2793 out_state->streams[i].tree_hash_table = NULL;
2796 /* Write the global symbols. */
2797 lto_output_decl_state_streams (ob, out_state);
2798 num_fns = lto_function_decl_states.length ();
2799 for (idx = 0; idx < num_fns; idx++)
2801 fn_out_state =
2802 lto_function_decl_states[idx];
2803 lto_output_decl_state_streams (ob, fn_out_state);
2806 header.major_version = LTO_major_version;
2807 header.minor_version = LTO_minor_version;
2809 /* Currently not used. This field would allow us to preallocate
2810 the globals vector, so that it need not be resized as it is extended. */
2811 header.num_nodes = -1;
2813 /* Compute the total size of all decl out states. */
2814 decl_state_size = sizeof (int32_t);
2815 decl_state_size += lto_out_decl_state_written_size (out_state);
2816 for (idx = 0; idx < num_fns; idx++)
2818 fn_out_state =
2819 lto_function_decl_states[idx];
2820 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2822 header.decl_state_size = decl_state_size;
2824 header.main_size = ob->main_stream->total_size;
2825 header.string_size = ob->string_stream->total_size;
2827 lto_write_data (&header, sizeof header);
2829 /* Write the main out-decl state, followed by out-decl states of
2830 functions. */
2831 num_decl_states = num_fns + 1;
2832 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2833 lto_output_decl_state_refs (ob, out_state);
2834 for (idx = 0; idx < num_fns; idx++)
2836 fn_out_state = lto_function_decl_states[idx];
2837 lto_output_decl_state_refs (ob, fn_out_state);
2840 lto_write_stream (ob->main_stream);
2841 lto_write_stream (ob->string_stream);
2843 lto_end_section ();
2845 /* Write the symbol table. It is used by linker to determine dependencies
2846 and thus we can skip it for WPA. */
2847 if (!flag_wpa)
2848 produce_symtab (ob);
2850 /* Write command line opts. */
2851 lto_write_options ();
2853 /* Deallocate memory and clean up. */
2854 for (idx = 0; idx < num_fns; idx++)
2856 fn_out_state =
2857 lto_function_decl_states[idx];
2858 lto_delete_out_decl_state (fn_out_state);
2860 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2861 lto_function_decl_states.release ();
2862 destroy_output_block (ob);
2863 if (lto_stream_offload_p)
2864 lto_write_mode_table ();