/cp
[official-gcc.git] / gcc / lto-streamer-out.c
blobeb726bfb22e558a99d2f71bb63344b21ec4abb77
1 /* Write the GIMPLE representation to a file stream.
3 Copyright (C) 2009-2018 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
43 #include "debug.h"
44 #include "omp-offload.h"
47 static void lto_write_tree (struct output_block*, tree, bool);
49 /* Clear the line info stored in DATA_IN. */
51 static void
52 clear_line_info (struct output_block *ob)
54 ob->current_file = NULL;
55 ob->current_line = 0;
56 ob->current_col = 0;
57 ob->current_sysp = false;
61 /* Create the output block and return it. SECTION_TYPE is
62 LTO_section_function_body or LTO_static_initializer. */
64 struct output_block *
65 create_output_block (enum lto_section_type section_type)
67 struct output_block *ob = XCNEW (struct output_block);
69 ob->section_type = section_type;
70 ob->decl_state = lto_get_out_decl_state ();
71 ob->main_stream = XCNEW (struct lto_output_stream);
72 ob->string_stream = XCNEW (struct lto_output_stream);
73 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
75 if (section_type == LTO_section_function_body)
76 ob->cfg_stream = XCNEW (struct lto_output_stream);
78 clear_line_info (ob);
80 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
81 gcc_obstack_init (&ob->obstack);
83 return ob;
87 /* Destroy the output block OB. */
89 void
90 destroy_output_block (struct output_block *ob)
92 enum lto_section_type section_type = ob->section_type;
94 delete ob->string_hash_table;
95 ob->string_hash_table = NULL;
97 free (ob->main_stream);
98 free (ob->string_stream);
99 if (section_type == LTO_section_function_body)
100 free (ob->cfg_stream);
102 streamer_tree_cache_delete (ob->writer_cache);
103 obstack_free (&ob->obstack, NULL);
105 free (ob);
109 /* Look up NODE in the type table and write the index for it to OB. */
111 static void
112 output_type_ref (struct output_block *ob, tree node)
114 streamer_write_record_start (ob, LTO_type_ref);
115 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
119 /* Return true if tree node T is written to various tables. For these
120 nodes, we sometimes want to write their phyiscal representation
121 (via lto_output_tree), and sometimes we need to emit an index
122 reference into a table (via lto_output_tree_ref). */
124 static bool
125 tree_is_indexable (tree t)
127 /* Parameters and return values of functions of variably modified types
128 must go to global stream, because they may be used in the type
129 definition. */
130 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
131 && DECL_CONTEXT (t))
132 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
133 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
134 else if (TREE_CODE (t) == IMPORTED_DECL)
135 return false;
136 else if (((VAR_P (t) && !TREE_STATIC (t))
137 || TREE_CODE (t) == TYPE_DECL
138 || TREE_CODE (t) == CONST_DECL
139 || TREE_CODE (t) == NAMELIST_DECL)
140 && decl_function_context (t))
141 return false;
142 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
143 return false;
144 /* Variably modified types need to be streamed alongside function
145 bodies because they can refer to local entities. Together with
146 them we have to localize their members as well.
147 ??? In theory that includes non-FIELD_DECLs as well. */
148 else if (TYPE_P (t)
149 && variably_modified_type_p (t, NULL_TREE))
150 return false;
151 else if (TREE_CODE (t) == FIELD_DECL
152 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
153 return false;
154 else
155 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
159 /* Output info about new location into bitpack BP.
160 After outputting bitpack, lto_output_location_data has
161 to be done to output actual data. */
163 void
164 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
165 location_t loc)
167 expanded_location xloc;
169 loc = LOCATION_LOCUS (loc);
170 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
171 loc < RESERVED_LOCATION_COUNT
172 ? loc : RESERVED_LOCATION_COUNT);
173 if (loc < RESERVED_LOCATION_COUNT)
174 return;
176 xloc = expand_location (loc);
178 bp_pack_value (bp, ob->current_file != xloc.file, 1);
179 bp_pack_value (bp, ob->current_line != xloc.line, 1);
180 bp_pack_value (bp, ob->current_col != xloc.column, 1);
182 if (ob->current_file != xloc.file)
184 bp_pack_string (ob, bp, xloc.file, true);
185 bp_pack_value (bp, xloc.sysp, 1);
187 ob->current_file = xloc.file;
188 ob->current_sysp = xloc.sysp;
190 if (ob->current_line != xloc.line)
191 bp_pack_var_len_unsigned (bp, xloc.line);
192 ob->current_line = xloc.line;
194 if (ob->current_col != xloc.column)
195 bp_pack_var_len_unsigned (bp, xloc.column);
196 ob->current_col = xloc.column;
200 /* If EXPR is an indexable tree node, output a reference to it to
201 output block OB. Otherwise, output the physical representation of
202 EXPR to OB. */
204 static void
205 lto_output_tree_ref (struct output_block *ob, tree expr)
207 enum tree_code code;
209 if (TYPE_P (expr))
211 output_type_ref (ob, expr);
212 return;
215 code = TREE_CODE (expr);
216 switch (code)
218 case SSA_NAME:
219 streamer_write_record_start (ob, LTO_ssa_name_ref);
220 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
221 break;
223 case FIELD_DECL:
224 streamer_write_record_start (ob, LTO_field_decl_ref);
225 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
226 break;
228 case FUNCTION_DECL:
229 streamer_write_record_start (ob, LTO_function_decl_ref);
230 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
231 break;
233 case VAR_DECL:
234 case DEBUG_EXPR_DECL:
235 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
236 /* FALLTHRU */
237 case PARM_DECL:
238 streamer_write_record_start (ob, LTO_global_decl_ref);
239 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
240 break;
242 case CONST_DECL:
243 streamer_write_record_start (ob, LTO_const_decl_ref);
244 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
245 break;
247 case IMPORTED_DECL:
248 gcc_assert (decl_function_context (expr) == NULL);
249 streamer_write_record_start (ob, LTO_imported_decl_ref);
250 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
251 break;
253 case TYPE_DECL:
254 streamer_write_record_start (ob, LTO_type_decl_ref);
255 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
256 break;
258 case NAMELIST_DECL:
259 streamer_write_record_start (ob, LTO_namelist_decl_ref);
260 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
261 break;
263 case NAMESPACE_DECL:
264 streamer_write_record_start (ob, LTO_namespace_decl_ref);
265 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
266 break;
268 case LABEL_DECL:
269 streamer_write_record_start (ob, LTO_label_decl_ref);
270 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
271 break;
273 case RESULT_DECL:
274 streamer_write_record_start (ob, LTO_result_decl_ref);
275 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
276 break;
278 case TRANSLATION_UNIT_DECL:
279 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
280 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
281 break;
283 default:
284 /* No other node is indexable, so it should have been handled by
285 lto_output_tree. */
286 gcc_unreachable ();
291 /* Return true if EXPR is a tree node that can be written to disk. */
293 static inline bool
294 lto_is_streamable (tree expr)
296 enum tree_code code = TREE_CODE (expr);
298 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
299 name version in lto_output_tree_ref (see output_ssa_names). */
300 return !is_lang_specific (expr)
301 && code != SSA_NAME
302 && code != CALL_EXPR
303 && code != LANG_TYPE
304 && code != MODIFY_EXPR
305 && code != INIT_EXPR
306 && code != TARGET_EXPR
307 && code != BIND_EXPR
308 && code != WITH_CLEANUP_EXPR
309 && code != STATEMENT_LIST
310 && (code == CASE_LABEL_EXPR
311 || code == DECL_EXPR
312 || TREE_CODE_CLASS (code) != tcc_statement);
315 /* Very rough estimate of streaming size of the initializer. If we ignored
316 presence of strings, we could simply just count number of non-indexable
317 tree nodes and number of references to indexable nodes. Strings however
318 may be very large and we do not want to dump them int othe global stream.
320 Count the size of initializer until the size in DATA is positive. */
322 static tree
323 subtract_estimated_size (tree *tp, int *ws, void *data)
325 long *sum = (long *)data;
326 if (tree_is_indexable (*tp))
328 /* Indexable tree is one reference to global stream.
329 Guess it may be about 4 bytes. */
330 *sum -= 4;
331 *ws = 0;
333 /* String table entry + base of tree node needs to be streamed. */
334 if (TREE_CODE (*tp) == STRING_CST)
335 *sum -= TREE_STRING_LENGTH (*tp) + 8;
336 else
338 /* Identifiers are also variable length but should not appear
339 naked in constructor. */
340 gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
341 /* We do not really make attempt to work out size of pickled tree, as
342 it is very variable. Make it bigger than the reference. */
343 *sum -= 16;
345 if (*sum < 0)
346 return *tp;
347 return NULL_TREE;
351 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
353 static tree
354 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
356 gcc_checking_assert (DECL_P (expr)
357 && TREE_CODE (expr) != FUNCTION_DECL
358 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
360 /* Handle DECL_INITIAL for symbols. */
361 tree initial = DECL_INITIAL (expr);
362 if (VAR_P (expr)
363 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
364 && !DECL_IN_CONSTANT_POOL (expr)
365 && initial)
367 varpool_node *vnode;
368 /* Extra section needs about 30 bytes; do not produce it for simple
369 scalar values. */
370 if (!(vnode = varpool_node::get (expr))
371 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
372 initial = error_mark_node;
373 if (initial != error_mark_node)
375 long max_size = 30;
376 if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
377 NULL))
378 initial = error_mark_node;
382 return initial;
386 /* Write a physical representation of tree node EXPR to output block
387 OB. If REF_P is true, the leaves of EXPR are emitted as references
388 via lto_output_tree_ref. IX is the index into the streamer cache
389 where EXPR is stored. */
391 static void
392 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
394 /* Pack all the non-pointer fields in EXPR into a bitpack and write
395 the resulting bitpack. */
396 streamer_write_tree_bitfields (ob, expr);
398 /* Write all the pointer fields in EXPR. */
399 streamer_write_tree_body (ob, expr, ref_p);
401 /* Write any LTO-specific data to OB. */
402 if (DECL_P (expr)
403 && TREE_CODE (expr) != FUNCTION_DECL
404 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
406 /* Handle DECL_INITIAL for symbols. */
407 tree initial = get_symbol_initial_value
408 (ob->decl_state->symtab_node_encoder, expr);
409 stream_write_tree (ob, initial, ref_p);
412 /* Stream references to early generated DIEs. Keep in sync with the
413 trees handled in dwarf2out_die_ref_for_decl. */
414 if ((DECL_P (expr)
415 && TREE_CODE (expr) != FIELD_DECL
416 && TREE_CODE (expr) != DEBUG_EXPR_DECL
417 && TREE_CODE (expr) != TYPE_DECL)
418 || TREE_CODE (expr) == BLOCK)
420 const char *sym;
421 unsigned HOST_WIDE_INT off;
422 if (debug_info_level > DINFO_LEVEL_NONE
423 && debug_hooks->die_ref_for_decl (expr, &sym, &off))
425 streamer_write_string (ob, ob->main_stream, sym, true);
426 streamer_write_uhwi (ob, off);
428 else
429 streamer_write_string (ob, ob->main_stream, NULL, true);
433 /* Write a physical representation of tree node EXPR to output block
434 OB. If REF_P is true, the leaves of EXPR are emitted as references
435 via lto_output_tree_ref. IX is the index into the streamer cache
436 where EXPR is stored. */
438 static void
439 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
441 if (!lto_is_streamable (expr))
442 internal_error ("tree code %qs is not supported in LTO streams",
443 get_tree_code_name (TREE_CODE (expr)));
445 /* Write the header, containing everything needed to materialize
446 EXPR on the reading side. */
447 streamer_write_tree_header (ob, expr);
449 lto_write_tree_1 (ob, expr, ref_p);
451 /* Mark the end of EXPR. */
452 streamer_write_zero (ob);
455 /* Emit the physical representation of tree node EXPR to output block OB,
456 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
457 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
459 static void
460 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
461 bool ref_p, bool this_ref_p)
463 unsigned ix;
465 gcc_checking_assert (expr != NULL_TREE
466 && !(this_ref_p && tree_is_indexable (expr)));
468 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
469 expr, hash, &ix);
470 gcc_assert (!exists_p);
471 if (TREE_CODE (expr) == INTEGER_CST
472 && !TREE_OVERFLOW (expr))
474 /* Shared INTEGER_CST nodes are special because they need their
475 original type to be materialized by the reader (to implement
476 TYPE_CACHED_VALUES). */
477 streamer_write_integer_cst (ob, expr, ref_p);
479 else
481 /* This is the first time we see EXPR, write its fields
482 to OB. */
483 lto_write_tree (ob, expr, ref_p);
487 class DFS
489 public:
490 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
491 bool single_p);
492 ~DFS ();
494 struct scc_entry
496 tree t;
497 hashval_t hash;
499 vec<scc_entry> sccstack;
501 private:
502 struct sccs
504 unsigned int dfsnum;
505 unsigned int low;
507 struct worklist
509 tree expr;
510 sccs *from_state;
511 sccs *cstate;
512 bool ref_p;
513 bool this_ref_p;
516 static int scc_entry_compare (const void *, const void *);
518 void DFS_write_tree_body (struct output_block *ob,
519 tree expr, sccs *expr_state, bool ref_p);
521 void DFS_write_tree (struct output_block *ob, sccs *from_state,
522 tree expr, bool ref_p, bool this_ref_p);
524 hashval_t
525 hash_scc (struct output_block *ob, unsigned first, unsigned size,
526 bool ref_p, bool this_ref_p);
528 hash_map<tree, sccs *> sccstate;
529 vec<worklist> worklist_vec;
530 struct obstack sccstate_obstack;
533 /* Emit the physical representation of tree node EXPR to output block OB,
534 using depth-first search on the subgraph. If THIS_REF_P is true, the
535 leaves of EXPR are emitted as references via lto_output_tree_ref.
536 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
537 this is for a rewalk of a single leaf SCC. */
539 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
540 bool single_p)
542 unsigned int next_dfs_num = 1;
543 sccstack.create (0);
544 gcc_obstack_init (&sccstate_obstack);
545 worklist_vec = vNULL;
546 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
547 while (!worklist_vec.is_empty ())
549 worklist &w = worklist_vec.last ();
550 expr = w.expr;
551 sccs *from_state = w.from_state;
552 sccs *cstate = w.cstate;
553 ref_p = w.ref_p;
554 this_ref_p = w.this_ref_p;
555 if (cstate == NULL)
557 sccs **slot = &sccstate.get_or_insert (expr);
558 cstate = *slot;
559 if (cstate)
561 gcc_checking_assert (from_state);
562 if (cstate->dfsnum < from_state->dfsnum)
563 from_state->low = MIN (cstate->dfsnum, from_state->low);
564 worklist_vec.pop ();
565 continue;
568 scc_entry e = { expr, 0 };
569 /* Not yet visited. DFS recurse and push it onto the stack. */
570 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
571 sccstack.safe_push (e);
572 cstate->dfsnum = next_dfs_num++;
573 cstate->low = cstate->dfsnum;
574 w.cstate = cstate;
576 if (TREE_CODE (expr) == INTEGER_CST
577 && !TREE_OVERFLOW (expr))
578 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
579 else
581 DFS_write_tree_body (ob, expr, cstate, ref_p);
583 /* Walk any LTO-specific edges. */
584 if (DECL_P (expr)
585 && TREE_CODE (expr) != FUNCTION_DECL
586 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
588 /* Handle DECL_INITIAL for symbols. */
589 tree initial
590 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
591 expr);
592 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
595 continue;
598 /* See if we found an SCC. */
599 if (cstate->low == cstate->dfsnum)
601 unsigned first, size;
602 tree x;
604 /* If we are re-walking a single leaf SCC just pop it,
605 let earlier worklist item access the sccstack. */
606 if (single_p)
608 worklist_vec.pop ();
609 continue;
612 /* Pop the SCC and compute its size. */
613 first = sccstack.length ();
616 x = sccstack[--first].t;
618 while (x != expr);
619 size = sccstack.length () - first;
621 /* No need to compute hashes for LTRANS units, we don't perform
622 any merging there. */
623 hashval_t scc_hash = 0;
624 unsigned scc_entry_len = 0;
625 if (!flag_wpa)
627 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
629 /* Put the entries with the least number of collisions first. */
630 unsigned entry_start = 0;
631 scc_entry_len = size + 1;
632 for (unsigned i = 0; i < size;)
634 unsigned from = i;
635 for (i = i + 1; i < size
636 && (sccstack[first + i].hash
637 == sccstack[first + from].hash); ++i)
639 if (i - from < scc_entry_len)
641 scc_entry_len = i - from;
642 entry_start = from;
645 for (unsigned i = 0; i < scc_entry_len; ++i)
646 std::swap (sccstack[first + i],
647 sccstack[first + entry_start + i]);
649 /* We already sorted SCC deterministically in hash_scc. */
651 /* Check that we have only one SCC.
652 Naturally we may have conflicts if hash function is not
653 strong enough. Lets see how far this gets. */
654 gcc_checking_assert (scc_entry_len == 1);
657 /* Write LTO_tree_scc. */
658 streamer_write_record_start (ob, LTO_tree_scc);
659 streamer_write_uhwi (ob, size);
660 streamer_write_uhwi (ob, scc_hash);
662 /* Write size-1 SCCs without wrapping them inside SCC bundles.
663 All INTEGER_CSTs need to be handled this way as we need
664 their type to materialize them. Also builtins are handled
665 this way.
666 ??? We still wrap these in LTO_tree_scc so at the
667 input side we can properly identify the tree we want
668 to ultimatively return. */
669 if (size == 1)
670 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
671 else
673 /* Write the size of the SCC entry candidates. */
674 streamer_write_uhwi (ob, scc_entry_len);
676 /* Write all headers and populate the streamer cache. */
677 for (unsigned i = 0; i < size; ++i)
679 hashval_t hash = sccstack[first+i].hash;
680 tree t = sccstack[first+i].t;
681 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
682 t, hash, NULL);
683 gcc_assert (!exists_p);
685 if (!lto_is_streamable (t))
686 internal_error ("tree code %qs is not supported "
687 "in LTO streams",
688 get_tree_code_name (TREE_CODE (t)));
690 /* Write the header, containing everything needed to
691 materialize EXPR on the reading side. */
692 streamer_write_tree_header (ob, t);
695 /* Write the bitpacks and tree references. */
696 for (unsigned i = 0; i < size; ++i)
698 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
700 /* Mark the end of the tree. */
701 streamer_write_zero (ob);
705 /* Finally truncate the vector. */
706 sccstack.truncate (first);
708 if (from_state)
709 from_state->low = MIN (from_state->low, cstate->low);
710 worklist_vec.pop ();
711 continue;
714 gcc_checking_assert (from_state);
715 from_state->low = MIN (from_state->low, cstate->low);
716 if (cstate->dfsnum < from_state->dfsnum)
717 from_state->low = MIN (cstate->dfsnum, from_state->low);
718 worklist_vec.pop ();
720 worklist_vec.release ();
723 DFS::~DFS ()
725 sccstack.release ();
726 obstack_free (&sccstate_obstack, NULL);
729 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
730 DFS recurse for all tree edges originating from it. */
732 void
733 DFS::DFS_write_tree_body (struct output_block *ob,
734 tree expr, sccs *expr_state, bool ref_p)
736 #define DFS_follow_tree_edge(DEST) \
737 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
739 enum tree_code code;
741 code = TREE_CODE (expr);
743 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
745 if (TREE_CODE (expr) != IDENTIFIER_NODE)
746 DFS_follow_tree_edge (TREE_TYPE (expr));
749 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
751 unsigned int count = vector_cst_encoded_nelts (expr);
752 for (unsigned int i = 0; i < count; ++i)
753 DFS_follow_tree_edge (VECTOR_CST_ENCODED_ELT (expr, i));
756 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
757 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
758 DFS_follow_tree_edge (POLY_INT_CST_COEFF (expr, i));
760 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
762 DFS_follow_tree_edge (TREE_REALPART (expr));
763 DFS_follow_tree_edge (TREE_IMAGPART (expr));
766 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
768 /* Drop names that were created for anonymous entities. */
769 if (DECL_NAME (expr)
770 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
771 && anon_aggrname_p (DECL_NAME (expr)))
773 else
774 DFS_follow_tree_edge (DECL_NAME (expr));
775 if (TREE_CODE (expr) != TRANSLATION_UNIT_DECL
776 && ! DECL_CONTEXT (expr))
777 DFS_follow_tree_edge ((*all_translation_units)[0]);
778 else
779 DFS_follow_tree_edge (DECL_CONTEXT (expr));
782 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
784 DFS_follow_tree_edge (DECL_SIZE (expr));
785 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
787 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
788 special handling in LTO, it must be handled by streamer hooks. */
790 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
792 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
793 for early inlining so drop it on the floor instead of ICEing in
794 dwarf2out.c.
795 We however use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
796 declarations which should be eliminated by decl merging. Be sure none
797 leaks to this point. */
798 gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
799 DFS_follow_tree_edge (DECL_ABSTRACT_ORIGIN (expr));
801 if ((VAR_P (expr)
802 || TREE_CODE (expr) == PARM_DECL)
803 && DECL_HAS_VALUE_EXPR_P (expr))
804 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
805 if (VAR_P (expr)
806 && DECL_HAS_DEBUG_EXPR_P (expr))
807 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
810 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
812 if (TREE_CODE (expr) == TYPE_DECL)
813 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
816 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
818 /* Make sure we don't inadvertently set the assembler name. */
819 if (DECL_ASSEMBLER_NAME_SET_P (expr))
820 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
823 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
825 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
826 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
827 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
828 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
829 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
832 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
834 DFS_follow_tree_edge (DECL_VINDEX (expr));
835 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
836 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
837 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
840 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
842 DFS_follow_tree_edge (TYPE_SIZE (expr));
843 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
844 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
845 DFS_follow_tree_edge (TYPE_NAME (expr));
846 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
847 reconstructed during fixup. */
848 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
849 during fixup. */
850 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
851 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
852 /* TYPE_CANONICAL is re-computed during type merging, so no need
853 to follow it here. */
854 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
857 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
859 if (TREE_CODE (expr) == ENUMERAL_TYPE)
860 DFS_follow_tree_edge (TYPE_VALUES (expr));
861 else if (TREE_CODE (expr) == ARRAY_TYPE)
862 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
863 else if (RECORD_OR_UNION_TYPE_P (expr))
864 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
865 DFS_follow_tree_edge (t);
866 else if (TREE_CODE (expr) == FUNCTION_TYPE
867 || TREE_CODE (expr) == METHOD_TYPE)
868 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
870 if (!POINTER_TYPE_P (expr))
871 DFS_follow_tree_edge (TYPE_MIN_VALUE_RAW (expr));
872 DFS_follow_tree_edge (TYPE_MAX_VALUE_RAW (expr));
875 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
877 DFS_follow_tree_edge (TREE_PURPOSE (expr));
878 DFS_follow_tree_edge (TREE_VALUE (expr));
879 DFS_follow_tree_edge (TREE_CHAIN (expr));
882 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
884 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
885 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
888 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
890 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
891 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
892 DFS_follow_tree_edge (TREE_BLOCK (expr));
895 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
897 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
898 if (VAR_OR_FUNCTION_DECL_P (t)
899 && DECL_EXTERNAL (t))
900 /* We have to stream externals in the block chain as
901 non-references. See also
902 tree-streamer-out.c:streamer_write_chain. */
903 DFS_write_tree (ob, expr_state, t, ref_p, false);
904 else
905 DFS_follow_tree_edge (t);
907 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
909 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
910 handle - those that represent inlined function scopes.
911 For the drop rest them on the floor instead of ICEing
912 in dwarf2out.c, but keep the notion of whether the block
913 is an inlined block by refering to itself for the sake of
914 tree_nonartificial_location. */
915 if (inlined_function_outer_scope_p (expr))
917 tree ultimate_origin = block_ultimate_origin (expr);
918 DFS_follow_tree_edge (ultimate_origin);
920 else if (BLOCK_ABSTRACT_ORIGIN (expr))
921 DFS_follow_tree_edge (expr);
922 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
923 information for early inlined BLOCKs so drop it on the floor instead
924 of ICEing in dwarf2out.c. */
926 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
927 streaming time. */
929 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
930 list is re-constructed from BLOCK_SUPERCONTEXT. */
933 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
935 unsigned i;
936 tree t;
938 /* Note that the number of BINFO slots has already been emitted in
939 EXPR's header (see streamer_write_tree_header) because this length
940 is needed to build the empty BINFO node on the reader side. */
941 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
942 DFS_follow_tree_edge (t);
943 DFS_follow_tree_edge (BINFO_OFFSET (expr));
944 DFS_follow_tree_edge (BINFO_VTABLE (expr));
945 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
947 /* The number of BINFO_BASE_ACCESSES has already been emitted in
948 EXPR's bitfield section. */
949 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
950 DFS_follow_tree_edge (t);
952 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
953 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
956 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
958 unsigned i;
959 tree index, value;
961 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
963 DFS_follow_tree_edge (index);
964 DFS_follow_tree_edge (value);
968 if (code == OMP_CLAUSE)
970 int i;
971 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
972 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
973 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
976 #undef DFS_follow_tree_edge
979 /* Return a hash value for the tree T.
980 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
981 may hold hash values if trees inside current SCC. */
983 static hashval_t
984 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
986 inchash::hash hstate;
988 #define visit(SIBLING) \
989 do { \
990 unsigned ix; \
991 if (!SIBLING) \
992 hstate.add_int (0); \
993 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
994 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
995 else if (map) \
996 hstate.add_int (*map->get (SIBLING)); \
997 else \
998 hstate.add_int (1); \
999 } while (0)
1001 /* Hash TS_BASE. */
1002 enum tree_code code = TREE_CODE (t);
1003 hstate.add_int (code);
1004 if (!TYPE_P (t))
1006 hstate.add_flag (TREE_SIDE_EFFECTS (t));
1007 hstate.add_flag (TREE_CONSTANT (t));
1008 hstate.add_flag (TREE_READONLY (t));
1009 hstate.add_flag (TREE_PUBLIC (t));
1011 hstate.add_flag (TREE_ADDRESSABLE (t));
1012 hstate.add_flag (TREE_THIS_VOLATILE (t));
1013 if (DECL_P (t))
1014 hstate.add_flag (DECL_UNSIGNED (t));
1015 else if (TYPE_P (t))
1016 hstate.add_flag (TYPE_UNSIGNED (t));
1017 if (TYPE_P (t))
1018 hstate.add_flag (TYPE_ARTIFICIAL (t));
1019 else
1020 hstate.add_flag (TREE_NO_WARNING (t));
1021 hstate.add_flag (TREE_NOTHROW (t));
1022 hstate.add_flag (TREE_STATIC (t));
1023 hstate.add_flag (TREE_PROTECTED (t));
1024 hstate.add_flag (TREE_DEPRECATED (t));
1025 if (code != TREE_BINFO)
1026 hstate.add_flag (TREE_PRIVATE (t));
1027 if (TYPE_P (t))
1029 hstate.add_flag (AGGREGATE_TYPE_P (t)
1030 ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1031 hstate.add_flag (TYPE_ADDR_SPACE (t));
1033 else if (code == SSA_NAME)
1034 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1035 hstate.commit_flag ();
1037 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1038 hstate.add_wide_int (wi::to_widest (t));
1040 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1042 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1043 hstate.add_flag (r.cl);
1044 hstate.add_flag (r.sign);
1045 hstate.add_flag (r.signalling);
1046 hstate.add_flag (r.canonical);
1047 hstate.commit_flag ();
1048 hstate.add_int (r.uexp);
1049 hstate.add (r.sig, sizeof (r.sig));
1052 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1054 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1055 hstate.add_int (f.mode);
1056 hstate.add_int (f.data.low);
1057 hstate.add_int (f.data.high);
1060 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1062 hstate.add_hwi (DECL_MODE (t));
1063 hstate.add_flag (DECL_NONLOCAL (t));
1064 hstate.add_flag (DECL_VIRTUAL_P (t));
1065 hstate.add_flag (DECL_IGNORED_P (t));
1066 hstate.add_flag (DECL_ABSTRACT_P (t));
1067 hstate.add_flag (DECL_ARTIFICIAL (t));
1068 hstate.add_flag (DECL_USER_ALIGN (t));
1069 hstate.add_flag (DECL_PRESERVE_P (t));
1070 hstate.add_flag (DECL_EXTERNAL (t));
1071 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1072 hstate.commit_flag ();
1073 hstate.add_int (DECL_ALIGN (t));
1074 if (code == LABEL_DECL)
1076 hstate.add_int (EH_LANDING_PAD_NR (t));
1077 hstate.add_int (LABEL_DECL_UID (t));
1079 else if (code == FIELD_DECL)
1081 hstate.add_flag (DECL_PACKED (t));
1082 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1083 hstate.add_flag (DECL_PADDING_P (t));
1084 hstate.add_int (DECL_OFFSET_ALIGN (t));
1086 else if (code == VAR_DECL)
1088 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1089 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1091 if (code == RESULT_DECL
1092 || code == PARM_DECL
1093 || code == VAR_DECL)
1095 hstate.add_flag (DECL_BY_REFERENCE (t));
1096 if (code == VAR_DECL
1097 || code == PARM_DECL)
1098 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1100 hstate.commit_flag ();
1103 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1104 hstate.add_int (DECL_REGISTER (t));
1106 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1108 hstate.add_flag (DECL_COMMON (t));
1109 hstate.add_flag (DECL_DLLIMPORT_P (t));
1110 hstate.add_flag (DECL_WEAK (t));
1111 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1112 hstate.add_flag (DECL_COMDAT (t));
1113 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1114 hstate.add_int (DECL_VISIBILITY (t));
1115 if (code == VAR_DECL)
1117 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1118 hstate.add_flag (DECL_HARD_REGISTER (t));
1119 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1121 if (TREE_CODE (t) == FUNCTION_DECL)
1123 hstate.add_flag (DECL_FINAL_P (t));
1124 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1125 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1127 hstate.commit_flag ();
1130 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1132 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1133 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1134 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1135 hstate.add_flag (DECL_UNINLINABLE (t));
1136 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1137 hstate.add_flag (DECL_IS_NOVOPS (t));
1138 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1139 hstate.add_flag (DECL_IS_MALLOC (t));
1140 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1141 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1142 hstate.add_flag (DECL_STATIC_CHAIN (t));
1143 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1144 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1145 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1146 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1147 hstate.add_flag (DECL_PURE_P (t));
1148 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1149 hstate.commit_flag ();
1150 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1151 hstate.add_int (DECL_FUNCTION_CODE (t));
1154 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1156 hstate.add_hwi (TYPE_MODE (t));
1157 hstate.add_flag (TYPE_STRING_FLAG (t));
1158 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1159 no streaming. */
1160 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
1161 hstate.add_flag (TYPE_PACKED (t));
1162 hstate.add_flag (TYPE_RESTRICT (t));
1163 hstate.add_flag (TYPE_USER_ALIGN (t));
1164 hstate.add_flag (TYPE_READONLY (t));
1165 if (RECORD_OR_UNION_TYPE_P (t))
1167 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1168 hstate.add_flag (TYPE_FINAL_P (t));
1170 else if (code == ARRAY_TYPE)
1171 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1172 if (AGGREGATE_TYPE_P (t))
1173 hstate.add_flag (TYPE_TYPELESS_STORAGE (t));
1174 hstate.commit_flag ();
1175 hstate.add_int (TYPE_PRECISION (t));
1176 hstate.add_int (TYPE_ALIGN (t));
1177 hstate.add_int (TYPE_EMPTY_P (t));
1180 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1181 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1182 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1184 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1185 /* We don't stream these when passing things to a different target. */
1186 && !lto_stream_offload_p)
1187 hstate.add_hwi (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1189 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1190 hstate.add_hwi (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1192 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1193 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1195 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1196 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1198 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1200 if (code != IDENTIFIER_NODE)
1201 visit (TREE_TYPE (t));
1204 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1206 unsigned int count = vector_cst_encoded_nelts (t);
1207 for (unsigned int i = 0; i < count; ++i)
1208 visit (VECTOR_CST_ENCODED_ELT (t, i));
1211 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
1212 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1213 visit (POLY_INT_CST_COEFF (t, i));
1215 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1217 visit (TREE_REALPART (t));
1218 visit (TREE_IMAGPART (t));
1221 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1223 /* Drop names that were created for anonymous entities. */
1224 if (DECL_NAME (t)
1225 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1226 && anon_aggrname_p (DECL_NAME (t)))
1228 else
1229 visit (DECL_NAME (t));
1230 if (DECL_FILE_SCOPE_P (t))
1232 else
1233 visit (DECL_CONTEXT (t));
1236 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1238 visit (DECL_SIZE (t));
1239 visit (DECL_SIZE_UNIT (t));
1240 visit (DECL_ATTRIBUTES (t));
1241 if ((code == VAR_DECL
1242 || code == PARM_DECL)
1243 && DECL_HAS_VALUE_EXPR_P (t))
1244 visit (DECL_VALUE_EXPR (t));
1245 if (code == VAR_DECL
1246 && DECL_HAS_DEBUG_EXPR_P (t))
1247 visit (DECL_DEBUG_EXPR (t));
1248 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1249 be able to call get_symbol_initial_value. */
1252 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1254 if (code == TYPE_DECL)
1255 visit (DECL_ORIGINAL_TYPE (t));
1258 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1260 if (DECL_ASSEMBLER_NAME_SET_P (t))
1261 visit (DECL_ASSEMBLER_NAME (t));
1264 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1266 visit (DECL_FIELD_OFFSET (t));
1267 visit (DECL_BIT_FIELD_TYPE (t));
1268 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1269 visit (DECL_FIELD_BIT_OFFSET (t));
1270 visit (DECL_FCONTEXT (t));
1273 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1275 visit (DECL_VINDEX (t));
1276 visit (DECL_FUNCTION_PERSONALITY (t));
1277 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1278 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1281 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1283 visit (TYPE_SIZE (t));
1284 visit (TYPE_SIZE_UNIT (t));
1285 visit (TYPE_ATTRIBUTES (t));
1286 visit (TYPE_NAME (t));
1287 visit (TYPE_MAIN_VARIANT (t));
1288 if (TYPE_FILE_SCOPE_P (t))
1290 else
1291 visit (TYPE_CONTEXT (t));
1292 visit (TYPE_STUB_DECL (t));
1295 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1297 if (code == ENUMERAL_TYPE)
1298 visit (TYPE_VALUES (t));
1299 else if (code == ARRAY_TYPE)
1300 visit (TYPE_DOMAIN (t));
1301 else if (RECORD_OR_UNION_TYPE_P (t))
1302 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1303 visit (f);
1304 else if (code == FUNCTION_TYPE
1305 || code == METHOD_TYPE)
1306 visit (TYPE_ARG_TYPES (t));
1307 if (!POINTER_TYPE_P (t))
1308 visit (TYPE_MIN_VALUE_RAW (t));
1309 visit (TYPE_MAX_VALUE_RAW (t));
1312 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1314 visit (TREE_PURPOSE (t));
1315 visit (TREE_VALUE (t));
1316 visit (TREE_CHAIN (t));
1319 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1320 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1321 visit (TREE_VEC_ELT (t, i));
1323 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1325 hstate.add_hwi (TREE_OPERAND_LENGTH (t));
1326 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1327 visit (TREE_OPERAND (t, i));
1330 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1332 unsigned i;
1333 tree b;
1334 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1335 visit (b);
1336 visit (BINFO_OFFSET (t));
1337 visit (BINFO_VTABLE (t));
1338 visit (BINFO_VPTR_FIELD (t));
1339 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1340 visit (b);
1341 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1342 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1345 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1347 unsigned i;
1348 tree index, value;
1349 hstate.add_hwi (CONSTRUCTOR_NELTS (t));
1350 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1352 visit (index);
1353 visit (value);
1357 if (code == OMP_CLAUSE)
1359 int i;
1360 HOST_WIDE_INT val;
1362 hstate.add_hwi (OMP_CLAUSE_CODE (t));
1363 switch (OMP_CLAUSE_CODE (t))
1365 case OMP_CLAUSE_DEFAULT:
1366 val = OMP_CLAUSE_DEFAULT_KIND (t);
1367 break;
1368 case OMP_CLAUSE_SCHEDULE:
1369 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1370 break;
1371 case OMP_CLAUSE_DEPEND:
1372 val = OMP_CLAUSE_DEPEND_KIND (t);
1373 break;
1374 case OMP_CLAUSE_MAP:
1375 val = OMP_CLAUSE_MAP_KIND (t);
1376 break;
1377 case OMP_CLAUSE_PROC_BIND:
1378 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1379 break;
1380 case OMP_CLAUSE_REDUCTION:
1381 val = OMP_CLAUSE_REDUCTION_CODE (t);
1382 break;
1383 default:
1384 val = 0;
1385 break;
1387 hstate.add_hwi (val);
1388 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1389 visit (OMP_CLAUSE_OPERAND (t, i));
1390 visit (OMP_CLAUSE_CHAIN (t));
1393 return hstate.end ();
1395 #undef visit
1398 /* Compare two SCC entries by their hash value for qsorting them. */
1401 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1403 const scc_entry *p1 = (const scc_entry *) p1_;
1404 const scc_entry *p2 = (const scc_entry *) p2_;
1405 if (p1->hash < p2->hash)
1406 return -1;
1407 else if (p1->hash > p2->hash)
1408 return 1;
1409 return 0;
1412 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1413 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1415 hashval_t
1416 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1417 bool ref_p, bool this_ref_p)
1419 unsigned int last_classes = 0, iterations = 0;
1421 /* Compute hash values for the SCC members. */
1422 for (unsigned i = 0; i < size; ++i)
1423 sccstack[first+i].hash
1424 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1426 if (size == 1)
1427 return sccstack[first].hash;
1429 /* We aim to get unique hash for every tree within SCC and compute hash value
1430 of the whole SCC by combining all values together in a stable (entry-point
1431 independent) order. This guarantees that the same SCC regions within
1432 different translation units will get the same hash values and therefore
1433 will be merged at WPA time.
1435 Often the hashes are already unique. In that case we compute the SCC hash
1436 by combining individual hash values in an increasing order.
1438 If there are duplicates, we seek at least one tree with unique hash (and
1439 pick one with minimal hash and this property). Then we obtain a stable
1440 order by DFS walk starting from this unique tree and then use the index
1441 within this order to make individual hash values unique.
1443 If there is no tree with unique hash, we iteratively propagate the hash
1444 values across the internal edges of SCC. This usually quickly leads
1445 to unique hashes. Consider, for example, an SCC containing two pointers
1446 that are identical except for the types they point to and assume that
1447 these types are also part of the SCC. The propagation will add the
1448 points-to type information into their hash values. */
1451 /* Sort the SCC so we can easily check for uniqueness. */
1452 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1454 unsigned int classes = 1;
1455 int firstunique = -1;
1457 /* Find the tree with lowest unique hash (if it exists) and compute
1458 the number of equivalence classes. */
1459 if (sccstack[first].hash != sccstack[first+1].hash)
1460 firstunique = 0;
1461 for (unsigned i = 1; i < size; ++i)
1462 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1464 classes++;
1465 if (firstunique == -1
1466 && (i == size - 1
1467 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1468 firstunique = i;
1471 /* If we found a tree with unique hash, stop the iteration. */
1472 if (firstunique != -1
1473 /* Also terminate if we run out of iterations or if the number of
1474 equivalence classes is no longer increasing.
1475 For example a cyclic list of trees that are all equivalent will
1476 never have unique entry point; we however do not build such SCCs
1477 in our IL. */
1478 || classes <= last_classes || iterations > 16)
1480 hashval_t scc_hash;
1482 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1483 starting from FIRSTUNIQUE to obtain a stable order. */
1484 if (classes != size && firstunique != -1)
1486 hash_map <tree, hashval_t> map(size*2);
1488 /* Store hash values into a map, so we can associate them with
1489 the reordered SCC. */
1490 for (unsigned i = 0; i < size; ++i)
1491 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1493 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1494 true);
1495 gcc_assert (again.sccstack.length () == size);
1497 memcpy (sccstack.address () + first,
1498 again.sccstack.address (),
1499 sizeof (scc_entry) * size);
1501 /* Update hash values of individual members by hashing in the
1502 index within the stable order. This ensures uniqueness.
1503 Also compute the SCC hash by mixing in all hash values in
1504 the stable order we obtained. */
1505 sccstack[first].hash = *map.get (sccstack[first].t);
1506 scc_hash = sccstack[first].hash;
1507 for (unsigned i = 1; i < size; ++i)
1509 sccstack[first+i].hash
1510 = iterative_hash_hashval_t (i,
1511 *map.get (sccstack[first+i].t));
1512 scc_hash
1513 = iterative_hash_hashval_t (scc_hash,
1514 sccstack[first+i].hash);
1517 /* If we got a unique hash value for each tree, then sort already
1518 ensured entry-point independent order. Only compute the final
1519 SCC hash.
1521 If we failed to find the unique entry point, we go by the same
1522 route. We will eventually introduce unwanted hash conflicts. */
1523 else
1525 scc_hash = sccstack[first].hash;
1526 for (unsigned i = 1; i < size; ++i)
1527 scc_hash
1528 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1530 /* We cannot 100% guarantee that the hash won't conflict so as
1531 to make it impossible to find a unique hash. This however
1532 should be an extremely rare case. ICE for now so possible
1533 issues are found and evaluated. */
1534 gcc_checking_assert (classes == size);
1537 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1538 hash into the hash of each element. */
1539 for (unsigned i = 0; i < size; ++i)
1540 sccstack[first+i].hash
1541 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1542 return scc_hash;
1545 last_classes = classes;
1546 iterations++;
1548 /* We failed to identify the entry point; propagate hash values across
1549 the edges. */
1550 hash_map <tree, hashval_t> map(size*2);
1552 for (unsigned i = 0; i < size; ++i)
1553 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1555 for (unsigned i = 0; i < size; i++)
1556 sccstack[first+i].hash
1557 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1559 while (true);
1562 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1563 already in the streamer cache. Main routine called for
1564 each visit of EXPR. */
1566 void
1567 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1568 tree expr, bool ref_p, bool this_ref_p)
1570 /* Handle special cases. */
1571 if (expr == NULL_TREE)
1572 return;
1574 /* Do not DFS walk into indexable trees. */
1575 if (this_ref_p && tree_is_indexable (expr))
1576 return;
1578 /* Check if we already streamed EXPR. */
1579 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1580 return;
1582 worklist w;
1583 w.expr = expr;
1584 w.from_state = from_state;
1585 w.cstate = NULL;
1586 w.ref_p = ref_p;
1587 w.this_ref_p = this_ref_p;
1588 worklist_vec.safe_push (w);
1592 /* Emit the physical representation of tree node EXPR to output block OB.
1593 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1594 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1596 void
1597 lto_output_tree (struct output_block *ob, tree expr,
1598 bool ref_p, bool this_ref_p)
1600 unsigned ix;
1601 bool existed_p;
1603 if (expr == NULL_TREE)
1605 streamer_write_record_start (ob, LTO_null);
1606 return;
1609 if (this_ref_p && tree_is_indexable (expr))
1611 lto_output_tree_ref (ob, expr);
1612 return;
1615 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1616 if (existed_p)
1618 /* If a node has already been streamed out, make sure that
1619 we don't write it more than once. Otherwise, the reader
1620 will instantiate two different nodes for the same object. */
1621 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1622 streamer_write_uhwi (ob, ix);
1623 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1624 lto_tree_code_to_tag (TREE_CODE (expr)));
1625 lto_stats.num_pickle_refs_output++;
1627 else
1629 /* This is the first time we see EXPR, write all reachable
1630 trees to OB. */
1631 static bool in_dfs_walk;
1633 /* Protect against recursion which means disconnect between
1634 what tree edges we walk in the DFS walk and what edges
1635 we stream out. */
1636 gcc_assert (!in_dfs_walk);
1638 /* Start the DFS walk. */
1639 /* Save ob state ... */
1640 /* let's see ... */
1641 in_dfs_walk = true;
1642 DFS (ob, expr, ref_p, this_ref_p, false);
1643 in_dfs_walk = false;
1645 /* Finally append a reference to the tree we were writing.
1646 ??? If expr ended up as a singleton we could have
1647 inlined it here and avoid outputting a reference. */
1648 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1649 gcc_assert (existed_p);
1650 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1651 streamer_write_uhwi (ob, ix);
1652 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1653 lto_tree_code_to_tag (TREE_CODE (expr)));
1654 lto_stats.num_pickle_refs_output++;
1659 /* Output to OB a list of try/catch handlers starting with FIRST. */
1661 static void
1662 output_eh_try_list (struct output_block *ob, eh_catch first)
1664 eh_catch n;
1666 for (n = first; n; n = n->next_catch)
1668 streamer_write_record_start (ob, LTO_eh_catch);
1669 stream_write_tree (ob, n->type_list, true);
1670 stream_write_tree (ob, n->filter_list, true);
1671 stream_write_tree (ob, n->label, true);
1674 streamer_write_record_start (ob, LTO_null);
1678 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1679 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1680 detect EH region sharing. */
1682 static void
1683 output_eh_region (struct output_block *ob, eh_region r)
1685 enum LTO_tags tag;
1687 if (r == NULL)
1689 streamer_write_record_start (ob, LTO_null);
1690 return;
1693 if (r->type == ERT_CLEANUP)
1694 tag = LTO_ert_cleanup;
1695 else if (r->type == ERT_TRY)
1696 tag = LTO_ert_try;
1697 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1698 tag = LTO_ert_allowed_exceptions;
1699 else if (r->type == ERT_MUST_NOT_THROW)
1700 tag = LTO_ert_must_not_throw;
1701 else
1702 gcc_unreachable ();
1704 streamer_write_record_start (ob, tag);
1705 streamer_write_hwi (ob, r->index);
1707 if (r->outer)
1708 streamer_write_hwi (ob, r->outer->index);
1709 else
1710 streamer_write_zero (ob);
1712 if (r->inner)
1713 streamer_write_hwi (ob, r->inner->index);
1714 else
1715 streamer_write_zero (ob);
1717 if (r->next_peer)
1718 streamer_write_hwi (ob, r->next_peer->index);
1719 else
1720 streamer_write_zero (ob);
1722 if (r->type == ERT_TRY)
1724 output_eh_try_list (ob, r->u.eh_try.first_catch);
1726 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1728 stream_write_tree (ob, r->u.allowed.type_list, true);
1729 stream_write_tree (ob, r->u.allowed.label, true);
1730 streamer_write_uhwi (ob, r->u.allowed.filter);
1732 else if (r->type == ERT_MUST_NOT_THROW)
1734 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1735 bitpack_d bp = bitpack_create (ob->main_stream);
1736 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1737 streamer_write_bitpack (&bp);
1740 if (r->landing_pads)
1741 streamer_write_hwi (ob, r->landing_pads->index);
1742 else
1743 streamer_write_zero (ob);
1747 /* Output landing pad LP to OB. */
1749 static void
1750 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1752 if (lp == NULL)
1754 streamer_write_record_start (ob, LTO_null);
1755 return;
1758 streamer_write_record_start (ob, LTO_eh_landing_pad);
1759 streamer_write_hwi (ob, lp->index);
1760 if (lp->next_lp)
1761 streamer_write_hwi (ob, lp->next_lp->index);
1762 else
1763 streamer_write_zero (ob);
1765 if (lp->region)
1766 streamer_write_hwi (ob, lp->region->index);
1767 else
1768 streamer_write_zero (ob);
1770 stream_write_tree (ob, lp->post_landing_pad, true);
1774 /* Output the existing eh_table to OB. */
1776 static void
1777 output_eh_regions (struct output_block *ob, struct function *fn)
1779 if (fn->eh && fn->eh->region_tree)
1781 unsigned i;
1782 eh_region eh;
1783 eh_landing_pad lp;
1784 tree ttype;
1786 streamer_write_record_start (ob, LTO_eh_table);
1788 /* Emit the index of the root of the EH region tree. */
1789 streamer_write_hwi (ob, fn->eh->region_tree->index);
1791 /* Emit all the EH regions in the region array. */
1792 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1793 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1794 output_eh_region (ob, eh);
1796 /* Emit all landing pads. */
1797 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1798 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1799 output_eh_lp (ob, lp);
1801 /* Emit all the runtime type data. */
1802 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1803 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1804 stream_write_tree (ob, ttype, true);
1806 /* Emit the table of action chains. */
1807 if (targetm.arm_eabi_unwinder)
1809 tree t;
1810 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1811 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1812 stream_write_tree (ob, t, true);
1814 else
1816 uchar c;
1817 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1818 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1819 streamer_write_char_stream (ob->main_stream, c);
1823 /* The LTO_null either terminates the record or indicates that there
1824 are no eh_records at all. */
1825 streamer_write_record_start (ob, LTO_null);
1829 /* Output all of the active ssa names to the ssa_names stream. */
1831 static void
1832 output_ssa_names (struct output_block *ob, struct function *fn)
1834 unsigned int i, len;
1836 len = vec_safe_length (SSANAMES (fn));
1837 streamer_write_uhwi (ob, len);
1839 for (i = 1; i < len; i++)
1841 tree ptr = (*SSANAMES (fn))[i];
1843 if (ptr == NULL_TREE
1844 || SSA_NAME_IN_FREE_LIST (ptr)
1845 || virtual_operand_p (ptr)
1846 /* Simply skip unreleased SSA names. */
1847 || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
1848 && (! SSA_NAME_DEF_STMT (ptr)
1849 || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
1850 continue;
1852 streamer_write_uhwi (ob, i);
1853 streamer_write_char_stream (ob->main_stream,
1854 SSA_NAME_IS_DEFAULT_DEF (ptr));
1855 if (SSA_NAME_VAR (ptr))
1856 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1857 else
1858 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1859 stream_write_tree (ob, TREE_TYPE (ptr), true);
1862 streamer_write_zero (ob);
1867 /* Output the cfg. */
1869 static void
1870 output_cfg (struct output_block *ob, struct function *fn)
1872 struct lto_output_stream *tmp_stream = ob->main_stream;
1873 basic_block bb;
1875 ob->main_stream = ob->cfg_stream;
1877 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1878 profile_status_for_fn (fn));
1880 /* Output the number of the highest basic block. */
1881 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1883 FOR_ALL_BB_FN (bb, fn)
1885 edge_iterator ei;
1886 edge e;
1888 streamer_write_hwi (ob, bb->index);
1890 /* Output the successors and the edge flags. */
1891 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1892 FOR_EACH_EDGE (e, ei, bb->succs)
1894 streamer_write_uhwi (ob, e->dest->index);
1895 e->probability.stream_out (ob);
1896 streamer_write_uhwi (ob, e->flags);
1900 streamer_write_hwi (ob, -1);
1902 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1903 while (bb->next_bb)
1905 streamer_write_hwi (ob, bb->next_bb->index);
1906 bb = bb->next_bb;
1909 streamer_write_hwi (ob, -1);
1911 /* ??? The cfgloop interface is tied to cfun. */
1912 gcc_assert (cfun == fn);
1914 /* Output the number of loops. */
1915 streamer_write_uhwi (ob, number_of_loops (fn));
1917 /* Output each loop, skipping the tree root which has number zero. */
1918 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1920 struct loop *loop = get_loop (fn, i);
1922 /* Write the index of the loop header. That's enough to rebuild
1923 the loop tree on the reader side. Stream -1 for an unused
1924 loop entry. */
1925 if (!loop)
1927 streamer_write_hwi (ob, -1);
1928 continue;
1930 else
1931 streamer_write_hwi (ob, loop->header->index);
1933 /* Write everything copy_loop_info copies. */
1934 streamer_write_enum (ob->main_stream,
1935 loop_estimation, EST_LAST, loop->estimate_state);
1936 streamer_write_hwi (ob, loop->any_upper_bound);
1937 if (loop->any_upper_bound)
1938 streamer_write_widest_int (ob, loop->nb_iterations_upper_bound);
1939 streamer_write_hwi (ob, loop->any_likely_upper_bound);
1940 if (loop->any_likely_upper_bound)
1941 streamer_write_widest_int (ob, loop->nb_iterations_likely_upper_bound);
1942 streamer_write_hwi (ob, loop->any_estimate);
1943 if (loop->any_estimate)
1944 streamer_write_widest_int (ob, loop->nb_iterations_estimate);
1946 /* Write OMP SIMD related info. */
1947 streamer_write_hwi (ob, loop->safelen);
1948 streamer_write_hwi (ob, loop->unroll);
1949 streamer_write_hwi (ob, loop->dont_vectorize);
1950 streamer_write_hwi (ob, loop->force_vectorize);
1951 stream_write_tree (ob, loop->simduid, true);
1954 ob->main_stream = tmp_stream;
1958 /* Create the header in the file using OB. If the section type is for
1959 a function, set FN to the decl for that function. */
1961 void
1962 produce_asm (struct output_block *ob, tree fn)
1964 enum lto_section_type section_type = ob->section_type;
1965 struct lto_function_header header;
1966 char *section_name;
1968 if (section_type == LTO_section_function_body)
1970 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1971 section_name = lto_get_section_name (section_type, name, NULL);
1973 else
1974 section_name = lto_get_section_name (section_type, NULL, NULL);
1976 lto_begin_section (section_name, !flag_wpa);
1977 free (section_name);
1979 /* The entire header is stream computed here. */
1980 memset (&header, 0, sizeof (struct lto_function_header));
1982 /* Write the header. */
1983 header.major_version = LTO_major_version;
1984 header.minor_version = LTO_minor_version;
1986 if (section_type == LTO_section_function_body)
1987 header.cfg_size = ob->cfg_stream->total_size;
1988 header.main_size = ob->main_stream->total_size;
1989 header.string_size = ob->string_stream->total_size;
1990 lto_write_data (&header, sizeof header);
1992 /* Put all of the gimple and the string table out the asm file as a
1993 block of text. */
1994 if (section_type == LTO_section_function_body)
1995 lto_write_stream (ob->cfg_stream);
1996 lto_write_stream (ob->main_stream);
1997 lto_write_stream (ob->string_stream);
1999 lto_end_section ();
2003 /* Output the base body of struct function FN using output block OB. */
2005 static void
2006 output_struct_function_base (struct output_block *ob, struct function *fn)
2008 struct bitpack_d bp;
2009 unsigned i;
2010 tree t;
2012 /* Output the static chain and non-local goto save area. */
2013 stream_write_tree (ob, fn->static_chain_decl, true);
2014 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
2016 /* Output all the local variables in the function. */
2017 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
2018 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
2019 stream_write_tree (ob, t, true);
2021 /* Output current IL state of the function. */
2022 streamer_write_uhwi (ob, fn->curr_properties);
2024 /* Write all the attributes for FN. */
2025 bp = bitpack_create (ob->main_stream);
2026 bp_pack_value (&bp, fn->is_thunk, 1);
2027 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
2028 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
2029 bp_pack_value (&bp, fn->returns_struct, 1);
2030 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
2031 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2032 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2033 bp_pack_value (&bp, fn->after_inlining, 1);
2034 bp_pack_value (&bp, fn->stdarg, 1);
2035 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2036 bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2037 bp_pack_value (&bp, fn->calls_alloca, 1);
2038 bp_pack_value (&bp, fn->calls_setjmp, 1);
2039 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2040 bp_pack_value (&bp, fn->has_simduid_loops, 1);
2041 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2042 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2043 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2045 /* Output the function start and end loci. */
2046 stream_output_location (ob, &bp, fn->function_start_locus);
2047 stream_output_location (ob, &bp, fn->function_end_locus);
2049 streamer_write_bitpack (&bp);
2053 /* Collect all leaf BLOCKs beyond ROOT into LEAFS. */
2055 static void
2056 collect_block_tree_leafs (tree root, vec<tree> &leafs)
2058 for (root = BLOCK_SUBBLOCKS (root); root; root = BLOCK_CHAIN (root))
2059 if (! BLOCK_SUBBLOCKS (root))
2060 leafs.safe_push (root);
2061 else
2062 collect_block_tree_leafs (BLOCK_SUBBLOCKS (root), leafs);
2065 /* Output the body of function NODE->DECL. */
2067 static void
2068 output_function (struct cgraph_node *node)
2070 tree function;
2071 struct function *fn;
2072 basic_block bb;
2073 struct output_block *ob;
2075 function = node->decl;
2076 fn = DECL_STRUCT_FUNCTION (function);
2077 ob = create_output_block (LTO_section_function_body);
2079 clear_line_info (ob);
2080 ob->symbol = node;
2082 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2084 /* Set current_function_decl and cfun. */
2085 push_cfun (fn);
2087 /* Make string 0 be a NULL string. */
2088 streamer_write_char_stream (ob->string_stream, 0);
2090 streamer_write_record_start (ob, LTO_function);
2092 /* Output decls for parameters and args. */
2093 stream_write_tree (ob, DECL_RESULT (function), true);
2094 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2096 /* Output debug args if available. */
2097 vec<tree, va_gc> **debugargs = decl_debug_args_lookup (function);
2098 if (! debugargs)
2099 streamer_write_uhwi (ob, 0);
2100 else
2102 streamer_write_uhwi (ob, (*debugargs)->length ());
2103 for (unsigned i = 0; i < (*debugargs)->length (); ++i)
2104 stream_write_tree (ob, (**debugargs)[i], true);
2107 /* Output DECL_INITIAL for the function, which contains the tree of
2108 lexical scopes. */
2109 stream_write_tree (ob, DECL_INITIAL (function), true);
2110 /* As we do not recurse into BLOCK_SUBBLOCKS but only BLOCK_SUPERCONTEXT
2111 collect block tree leafs and stream those. */
2112 auto_vec<tree> block_tree_leafs;
2113 if (DECL_INITIAL (function))
2114 collect_block_tree_leafs (DECL_INITIAL (function), block_tree_leafs);
2115 streamer_write_uhwi (ob, block_tree_leafs.length ());
2116 for (unsigned i = 0; i < block_tree_leafs.length (); ++i)
2117 stream_write_tree (ob, block_tree_leafs[i], true);
2119 /* We also stream abstract functions where we stream only stuff needed for
2120 debug info. */
2121 if (gimple_has_body_p (function))
2123 /* Fixup loops if required to match discovery done in the reader. */
2124 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
2126 streamer_write_uhwi (ob, 1);
2127 output_struct_function_base (ob, fn);
2129 /* Output all the SSA names used in the function. */
2130 output_ssa_names (ob, fn);
2132 /* Output any exception handling regions. */
2133 output_eh_regions (ob, fn);
2136 /* We will renumber the statements. The code that does this uses
2137 the same ordering that we use for serializing them so we can use
2138 the same code on the other end and not have to write out the
2139 statement numbers. We do not assign UIDs to PHIs here because
2140 virtual PHIs get re-computed on-the-fly which would make numbers
2141 inconsistent. */
2142 set_gimple_stmt_max_uid (cfun, 0);
2143 FOR_ALL_BB_FN (bb, cfun)
2145 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2146 gsi_next (&gsi))
2148 gphi *stmt = gsi.phi ();
2150 /* Virtual PHIs are not going to be streamed. */
2151 if (!virtual_operand_p (gimple_phi_result (stmt)))
2152 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2154 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2155 gsi_next (&gsi))
2157 gimple *stmt = gsi_stmt (gsi);
2158 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2161 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2162 virtual phis now. */
2163 FOR_ALL_BB_FN (bb, cfun)
2165 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2166 gsi_next (&gsi))
2168 gphi *stmt = gsi.phi ();
2169 if (virtual_operand_p (gimple_phi_result (stmt)))
2170 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2174 /* Output the code for the function. */
2175 FOR_ALL_BB_FN (bb, fn)
2176 output_bb (ob, bb, fn);
2178 /* The terminator for this function. */
2179 streamer_write_record_start (ob, LTO_null);
2181 output_cfg (ob, fn);
2183 loop_optimizer_finalize ();
2184 pop_cfun ();
2186 else
2187 streamer_write_uhwi (ob, 0);
2189 /* Create a section to hold the pickled output of this function. */
2190 produce_asm (ob, function);
2192 destroy_output_block (ob);
2195 /* Output the body of function NODE->DECL. */
2197 static void
2198 output_constructor (struct varpool_node *node)
2200 tree var = node->decl;
2201 struct output_block *ob;
2203 ob = create_output_block (LTO_section_function_body);
2205 clear_line_info (ob);
2206 ob->symbol = node;
2208 /* Make string 0 be a NULL string. */
2209 streamer_write_char_stream (ob->string_stream, 0);
2211 /* Output DECL_INITIAL for the function, which contains the tree of
2212 lexical scopes. */
2213 stream_write_tree (ob, DECL_INITIAL (var), true);
2215 /* Create a section to hold the pickled output of this function. */
2216 produce_asm (ob, var);
2218 destroy_output_block (ob);
2222 /* Emit toplevel asms. */
2224 void
2225 lto_output_toplevel_asms (void)
2227 struct output_block *ob;
2228 struct asm_node *can;
2229 char *section_name;
2230 struct lto_simple_header_with_strings header;
2232 if (!symtab->first_asm_symbol ())
2233 return;
2235 ob = create_output_block (LTO_section_asm);
2237 /* Make string 0 be a NULL string. */
2238 streamer_write_char_stream (ob->string_stream, 0);
2240 for (can = symtab->first_asm_symbol (); can; can = can->next)
2242 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2243 streamer_write_hwi (ob, can->order);
2246 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2248 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2249 lto_begin_section (section_name, !flag_wpa);
2250 free (section_name);
2252 /* The entire header stream is computed here. */
2253 memset (&header, 0, sizeof (header));
2255 /* Write the header. */
2256 header.major_version = LTO_major_version;
2257 header.minor_version = LTO_minor_version;
2259 header.main_size = ob->main_stream->total_size;
2260 header.string_size = ob->string_stream->total_size;
2261 lto_write_data (&header, sizeof header);
2263 /* Put all of the gimple and the string table out the asm file as a
2264 block of text. */
2265 lto_write_stream (ob->main_stream);
2266 lto_write_stream (ob->string_stream);
2268 lto_end_section ();
2270 destroy_output_block (ob);
2274 /* Copy the function body or variable constructor of NODE without deserializing. */
2276 static void
2277 copy_function_or_variable (struct symtab_node *node)
2279 tree function = node->decl;
2280 struct lto_file_decl_data *file_data = node->lto_file_data;
2281 const char *data;
2282 size_t len;
2283 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2284 char *section_name =
2285 lto_get_section_name (LTO_section_function_body, name, NULL);
2286 size_t i, j;
2287 struct lto_in_decl_state *in_state;
2288 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2290 lto_begin_section (section_name, false);
2291 free (section_name);
2293 /* We may have renamed the declaration, e.g., a static function. */
2294 name = lto_get_decl_name_mapping (file_data, name);
2296 data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2297 name, &len);
2298 gcc_assert (data);
2300 /* Do a bit copy of the function body. */
2301 lto_write_raw_data (data, len);
2303 /* Copy decls. */
2304 in_state =
2305 lto_get_function_in_decl_state (node->lto_file_data, function);
2306 out_state->compressed = in_state->compressed;
2307 gcc_assert (in_state);
2309 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2311 size_t n = vec_safe_length (in_state->streams[i]);
2312 vec<tree, va_gc> *trees = in_state->streams[i];
2313 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2315 /* The out state must have the same indices and the in state.
2316 So just copy the vector. All the encoders in the in state
2317 must be empty where we reach here. */
2318 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2319 encoder->trees.reserve_exact (n);
2320 for (j = 0; j < n; j++)
2321 encoder->trees.safe_push ((*trees)[j]);
2324 lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2325 data, len);
2326 lto_end_section ();
2329 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2331 static tree
2332 wrap_refs (tree *tp, int *ws, void *)
2334 tree t = *tp;
2335 if (handled_component_p (t)
2336 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2337 && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2339 tree decl = TREE_OPERAND (t, 0);
2340 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2341 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2342 build1 (ADDR_EXPR, ptrtype, decl),
2343 build_int_cst (ptrtype, 0));
2344 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2345 *ws = 0;
2347 else if (TREE_CODE (t) == CONSTRUCTOR)
2349 else if (!EXPR_P (t))
2350 *ws = 0;
2351 return NULL_TREE;
2354 /* Remove functions that are no longer used from offload_funcs, and mark the
2355 remaining ones with DECL_PRESERVE_P. */
2357 static void
2358 prune_offload_funcs (void)
2360 if (!offload_funcs)
2361 return;
2363 unsigned ix, ix2;
2364 tree *elem_ptr;
2365 VEC_ORDERED_REMOVE_IF (*offload_funcs, ix, ix2, elem_ptr,
2366 cgraph_node::get (*elem_ptr) == NULL);
2368 tree fn_decl;
2369 FOR_EACH_VEC_ELT (*offload_funcs, ix, fn_decl)
2370 DECL_PRESERVE_P (fn_decl) = 1;
2373 /* Main entry point from the pass manager. */
2375 void
2376 lto_output (void)
2378 struct lto_out_decl_state *decl_state;
2379 bitmap output = NULL;
2380 int i, n_nodes;
2381 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2383 prune_offload_funcs ();
2385 if (flag_checking)
2386 output = lto_bitmap_alloc ();
2388 /* Initialize the streamer. */
2389 lto_streamer_init ();
2391 n_nodes = lto_symtab_encoder_size (encoder);
2392 /* Process only the functions with bodies. */
2393 for (i = 0; i < n_nodes; i++)
2395 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2396 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2398 if (lto_symtab_encoder_encode_body_p (encoder, node)
2399 && !node->alias
2400 && (!node->thunk.thunk_p || !node->thunk.add_pointer_bounds_args))
2402 if (flag_checking)
2404 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2405 bitmap_set_bit (output, DECL_UID (node->decl));
2407 decl_state = lto_new_out_decl_state ();
2408 lto_push_out_decl_state (decl_state);
2409 if (gimple_has_body_p (node->decl)
2410 || (!flag_wpa
2411 && flag_incremental_link != INCREMENTAL_LINK_LTO)
2412 /* Thunks have no body but they may be synthetized
2413 at WPA time. */
2414 || DECL_ARGUMENTS (node->decl))
2415 output_function (node);
2416 else
2417 copy_function_or_variable (node);
2418 gcc_assert (lto_get_out_decl_state () == decl_state);
2419 lto_pop_out_decl_state ();
2420 lto_record_function_out_decl_state (node->decl, decl_state);
2423 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2425 /* Wrap symbol references inside the ctor in a type
2426 preserving MEM_REF. */
2427 tree ctor = DECL_INITIAL (node->decl);
2428 if (ctor && !in_lto_p)
2429 walk_tree (&ctor, wrap_refs, NULL, NULL);
2430 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2431 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2432 && !node->alias)
2434 timevar_push (TV_IPA_LTO_CTORS_OUT);
2435 if (flag_checking)
2437 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2438 bitmap_set_bit (output, DECL_UID (node->decl));
2440 decl_state = lto_new_out_decl_state ();
2441 lto_push_out_decl_state (decl_state);
2442 if (DECL_INITIAL (node->decl) != error_mark_node
2443 || (!flag_wpa
2444 && flag_incremental_link != INCREMENTAL_LINK_LTO))
2445 output_constructor (node);
2446 else
2447 copy_function_or_variable (node);
2448 gcc_assert (lto_get_out_decl_state () == decl_state);
2449 lto_pop_out_decl_state ();
2450 lto_record_function_out_decl_state (node->decl, decl_state);
2451 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2456 /* Emit the callgraph after emitting function bodies. This needs to
2457 be done now to make sure that all the statements in every function
2458 have been renumbered so that edges can be associated with call
2459 statements using the statement UIDs. */
2460 output_symtab ();
2462 output_offload_tables ();
2464 #if CHECKING_P
2465 lto_bitmap_free (output);
2466 #endif
2469 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2470 from it and required for correct representation of its semantics.
2471 Each node in ENCODER must be a global declaration or a type. A node
2472 is written only once, even if it appears multiple times in the
2473 vector. Certain transitively-reachable nodes, such as those
2474 representing expressions, may be duplicated, but such nodes
2475 must not appear in ENCODER itself. */
2477 static void
2478 write_global_stream (struct output_block *ob,
2479 struct lto_tree_ref_encoder *encoder)
2481 tree t;
2482 size_t index;
2483 const size_t size = lto_tree_ref_encoder_size (encoder);
2485 for (index = 0; index < size; index++)
2487 t = lto_tree_ref_encoder_get_tree (encoder, index);
2488 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2489 stream_write_tree (ob, t, false);
2494 /* Write a sequence of indices into the globals vector corresponding
2495 to the trees in ENCODER. These are used by the reader to map the
2496 indices used to refer to global entities within function bodies to
2497 their referents. */
2499 static void
2500 write_global_references (struct output_block *ob,
2501 struct lto_tree_ref_encoder *encoder)
2503 tree t;
2504 uint32_t index;
2505 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2507 /* Write size and slot indexes as 32-bit unsigned numbers. */
2508 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2509 data[0] = size;
2511 for (index = 0; index < size; index++)
2513 unsigned slot_num;
2515 t = lto_tree_ref_encoder_get_tree (encoder, index);
2516 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2517 gcc_assert (slot_num != (unsigned)-1);
2518 data[index + 1] = slot_num;
2521 lto_write_data (data, sizeof (int32_t) * (size + 1));
2522 free (data);
2526 /* Write all the streams in an lto_out_decl_state STATE using
2527 output block OB and output stream OUT_STREAM. */
2529 void
2530 lto_output_decl_state_streams (struct output_block *ob,
2531 struct lto_out_decl_state *state)
2533 int i;
2535 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2536 write_global_stream (ob, &state->streams[i]);
2540 /* Write all the references in an lto_out_decl_state STATE using
2541 output block OB and output stream OUT_STREAM. */
2543 void
2544 lto_output_decl_state_refs (struct output_block *ob,
2545 struct lto_out_decl_state *state)
2547 unsigned i;
2548 unsigned ref;
2549 tree decl;
2551 /* Write reference to FUNCTION_DECL. If there is not function,
2552 write reference to void_type_node. */
2553 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2554 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2555 gcc_assert (ref != (unsigned)-1);
2556 ref = ref * 2 + (state->compressed ? 1 : 0);
2557 lto_write_data (&ref, sizeof (uint32_t));
2559 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2560 write_global_references (ob, &state->streams[i]);
2564 /* Return the written size of STATE. */
2566 static size_t
2567 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2569 int i;
2570 size_t size;
2572 size = sizeof (int32_t); /* fn_ref. */
2573 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2575 size += sizeof (int32_t); /* vector size. */
2576 size += (lto_tree_ref_encoder_size (&state->streams[i])
2577 * sizeof (int32_t));
2579 return size;
2583 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2584 so far. */
2586 static void
2587 write_symbol (struct streamer_tree_cache_d *cache,
2588 tree t, hash_set<const char *> *seen, bool alias)
2590 const char *name;
2591 enum gcc_plugin_symbol_kind kind;
2592 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2593 unsigned slot_num;
2594 uint64_t size;
2595 const char *comdat;
2596 unsigned char c;
2598 gcc_checking_assert (TREE_PUBLIC (t)
2599 && !is_builtin_fn (t)
2600 && !DECL_ABSTRACT_P (t)
2601 && (!VAR_P (t) || !DECL_HARD_REGISTER (t)));
2603 gcc_assert (VAR_OR_FUNCTION_DECL_P (t));
2605 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2607 /* This behaves like assemble_name_raw in varasm.c, performing the
2608 same name manipulations that ASM_OUTPUT_LABELREF does. */
2609 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2611 if (seen->add (name))
2612 return;
2614 streamer_tree_cache_lookup (cache, t, &slot_num);
2615 gcc_assert (slot_num != (unsigned)-1);
2617 if (DECL_EXTERNAL (t))
2619 if (DECL_WEAK (t))
2620 kind = GCCPK_WEAKUNDEF;
2621 else
2622 kind = GCCPK_UNDEF;
2624 else
2626 if (DECL_WEAK (t))
2627 kind = GCCPK_WEAKDEF;
2628 else if (DECL_COMMON (t))
2629 kind = GCCPK_COMMON;
2630 else
2631 kind = GCCPK_DEF;
2633 /* When something is defined, it should have node attached. */
2634 gcc_assert (alias || !VAR_P (t) || varpool_node::get (t)->definition);
2635 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2636 || (cgraph_node::get (t)
2637 && cgraph_node::get (t)->definition));
2640 /* Imitate what default_elf_asm_output_external do.
2641 When symbol is external, we need to output it with DEFAULT visibility
2642 when compiling with -fvisibility=default, while with HIDDEN visibility
2643 when symbol has attribute (visibility("hidden")) specified.
2644 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2645 right. */
2647 if (DECL_EXTERNAL (t)
2648 && !targetm.binds_local_p (t))
2649 visibility = GCCPV_DEFAULT;
2650 else
2651 switch (DECL_VISIBILITY (t))
2653 case VISIBILITY_DEFAULT:
2654 visibility = GCCPV_DEFAULT;
2655 break;
2656 case VISIBILITY_PROTECTED:
2657 visibility = GCCPV_PROTECTED;
2658 break;
2659 case VISIBILITY_HIDDEN:
2660 visibility = GCCPV_HIDDEN;
2661 break;
2662 case VISIBILITY_INTERNAL:
2663 visibility = GCCPV_INTERNAL;
2664 break;
2667 if (kind == GCCPK_COMMON
2668 && DECL_SIZE_UNIT (t)
2669 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2670 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2671 else
2672 size = 0;
2674 if (DECL_ONE_ONLY (t))
2675 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2676 else
2677 comdat = "";
2679 lto_write_data (name, strlen (name) + 1);
2680 lto_write_data (comdat, strlen (comdat) + 1);
2681 c = (unsigned char) kind;
2682 lto_write_data (&c, 1);
2683 c = (unsigned char) visibility;
2684 lto_write_data (&c, 1);
2685 lto_write_data (&size, 8);
2686 lto_write_data (&slot_num, 4);
2689 /* Write an IL symbol table to OB.
2690 SET and VSET are cgraph/varpool node sets we are outputting. */
2692 static void
2693 produce_symtab (struct output_block *ob)
2695 struct streamer_tree_cache_d *cache = ob->writer_cache;
2696 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2697 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2698 lto_symtab_encoder_iterator lsei;
2700 lto_begin_section (section_name, false);
2701 free (section_name);
2703 hash_set<const char *> seen;
2705 /* Write the symbol table.
2706 First write everything defined and then all declarations.
2707 This is necessary to handle cases where we have duplicated symbols. */
2708 for (lsei = lsei_start (encoder);
2709 !lsei_end_p (lsei); lsei_next (&lsei))
2711 symtab_node *node = lsei_node (lsei);
2713 if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2714 continue;
2715 write_symbol (cache, node->decl, &seen, false);
2717 for (lsei = lsei_start (encoder);
2718 !lsei_end_p (lsei); lsei_next (&lsei))
2720 symtab_node *node = lsei_node (lsei);
2722 if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2723 continue;
2724 write_symbol (cache, node->decl, &seen, false);
2727 lto_end_section ();
2731 /* Init the streamer_mode_table for output, where we collect info on what
2732 machine_mode values have been streamed. */
2733 void
2734 lto_output_init_mode_table (void)
2736 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2740 /* Write the mode table. */
2741 static void
2742 lto_write_mode_table (void)
2744 struct output_block *ob;
2745 ob = create_output_block (LTO_section_mode_table);
2746 bitpack_d bp = bitpack_create (ob->main_stream);
2748 /* Ensure that for GET_MODE_INNER (m) != m we have
2749 also the inner mode marked. */
2750 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2751 if (streamer_mode_table[i])
2753 machine_mode m = (machine_mode) i;
2754 machine_mode inner_m = GET_MODE_INNER (m);
2755 if (inner_m != m)
2756 streamer_mode_table[(int) inner_m] = 1;
2758 /* First stream modes that have GET_MODE_INNER (m) == m,
2759 so that we can refer to them afterwards. */
2760 for (int pass = 0; pass < 2; pass++)
2761 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2762 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2764 machine_mode m = (machine_mode) i;
2765 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2766 continue;
2767 bp_pack_value (&bp, m, 8);
2768 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2769 bp_pack_poly_value (&bp, GET_MODE_SIZE (m), 16);
2770 bp_pack_poly_value (&bp, GET_MODE_PRECISION (m), 16);
2771 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2772 bp_pack_poly_value (&bp, GET_MODE_NUNITS (m), 16);
2773 switch (GET_MODE_CLASS (m))
2775 case MODE_FRACT:
2776 case MODE_UFRACT:
2777 case MODE_ACCUM:
2778 case MODE_UACCUM:
2779 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2780 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2781 break;
2782 case MODE_FLOAT:
2783 case MODE_DECIMAL_FLOAT:
2784 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2785 break;
2786 default:
2787 break;
2789 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2791 bp_pack_value (&bp, VOIDmode, 8);
2793 streamer_write_bitpack (&bp);
2795 char *section_name
2796 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2797 lto_begin_section (section_name, !flag_wpa);
2798 free (section_name);
2800 /* The entire header stream is computed here. */
2801 struct lto_simple_header_with_strings header;
2802 memset (&header, 0, sizeof (header));
2804 /* Write the header. */
2805 header.major_version = LTO_major_version;
2806 header.minor_version = LTO_minor_version;
2808 header.main_size = ob->main_stream->total_size;
2809 header.string_size = ob->string_stream->total_size;
2810 lto_write_data (&header, sizeof header);
2812 /* Put all of the gimple and the string table out the asm file as a
2813 block of text. */
2814 lto_write_stream (ob->main_stream);
2815 lto_write_stream (ob->string_stream);
2817 lto_end_section ();
2818 destroy_output_block (ob);
2822 /* This pass is run after all of the functions are serialized and all
2823 of the IPA passes have written their serialized forms. This pass
2824 causes the vector of all of the global decls and types used from
2825 this file to be written in to a section that can then be read in to
2826 recover these on other side. */
2828 void
2829 produce_asm_for_decls (void)
2831 struct lto_out_decl_state *out_state;
2832 struct lto_out_decl_state *fn_out_state;
2833 struct lto_decl_header header;
2834 char *section_name;
2835 struct output_block *ob;
2836 unsigned idx, num_fns;
2837 size_t decl_state_size;
2838 int32_t num_decl_states;
2840 ob = create_output_block (LTO_section_decls);
2842 memset (&header, 0, sizeof (struct lto_decl_header));
2844 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2845 lto_begin_section (section_name, !flag_wpa);
2846 free (section_name);
2848 /* Make string 0 be a NULL string. */
2849 streamer_write_char_stream (ob->string_stream, 0);
2851 gcc_assert (!alias_pairs);
2853 /* Get rid of the global decl state hash tables to save some memory. */
2854 out_state = lto_get_out_decl_state ();
2855 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2856 if (out_state->streams[i].tree_hash_table)
2858 delete out_state->streams[i].tree_hash_table;
2859 out_state->streams[i].tree_hash_table = NULL;
2862 /* Write the global symbols. */
2863 lto_output_decl_state_streams (ob, out_state);
2864 num_fns = lto_function_decl_states.length ();
2865 for (idx = 0; idx < num_fns; idx++)
2867 fn_out_state =
2868 lto_function_decl_states[idx];
2869 lto_output_decl_state_streams (ob, fn_out_state);
2872 header.major_version = LTO_major_version;
2873 header.minor_version = LTO_minor_version;
2875 /* Currently not used. This field would allow us to preallocate
2876 the globals vector, so that it need not be resized as it is extended. */
2877 header.num_nodes = -1;
2879 /* Compute the total size of all decl out states. */
2880 decl_state_size = sizeof (int32_t);
2881 decl_state_size += lto_out_decl_state_written_size (out_state);
2882 for (idx = 0; idx < num_fns; idx++)
2884 fn_out_state =
2885 lto_function_decl_states[idx];
2886 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2888 header.decl_state_size = decl_state_size;
2890 header.main_size = ob->main_stream->total_size;
2891 header.string_size = ob->string_stream->total_size;
2893 lto_write_data (&header, sizeof header);
2895 /* Write the main out-decl state, followed by out-decl states of
2896 functions. */
2897 num_decl_states = num_fns + 1;
2898 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2899 lto_output_decl_state_refs (ob, out_state);
2900 for (idx = 0; idx < num_fns; idx++)
2902 fn_out_state = lto_function_decl_states[idx];
2903 lto_output_decl_state_refs (ob, fn_out_state);
2906 lto_write_stream (ob->main_stream);
2907 lto_write_stream (ob->string_stream);
2909 lto_end_section ();
2911 /* Write the symbol table. It is used by linker to determine dependencies
2912 and thus we can skip it for WPA. */
2913 if (!flag_wpa)
2914 produce_symtab (ob);
2916 /* Write command line opts. */
2917 lto_write_options ();
2919 /* Deallocate memory and clean up. */
2920 for (idx = 0; idx < num_fns; idx++)
2922 fn_out_state =
2923 lto_function_decl_states[idx];
2924 lto_delete_out_decl_state (fn_out_state);
2926 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2927 lto_function_decl_states.release ();
2928 destroy_output_block (ob);
2929 if (lto_stream_offload_p)
2930 lto_write_mode_table ();