PR target/82682
[official-gcc.git] / gcc / lto-streamer-out.c
blob8ffcecd494cc7cc03f4956f604a45234bcc67ece
1 /* Write the GIMPLE representation to a file stream.
3 Copyright (C) 2009-2018 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
43 #include "debug.h"
44 #include "omp-offload.h"
47 static void lto_write_tree (struct output_block*, tree, bool);
49 /* Clear the line info stored in DATA_IN. */
51 static void
52 clear_line_info (struct output_block *ob)
54 ob->current_file = NULL;
55 ob->current_line = 0;
56 ob->current_col = 0;
57 ob->current_sysp = false;
61 /* Create the output block and return it. SECTION_TYPE is
62 LTO_section_function_body or LTO_static_initializer. */
64 struct output_block *
65 create_output_block (enum lto_section_type section_type)
67 struct output_block *ob = XCNEW (struct output_block);
69 ob->section_type = section_type;
70 ob->decl_state = lto_get_out_decl_state ();
71 ob->main_stream = XCNEW (struct lto_output_stream);
72 ob->string_stream = XCNEW (struct lto_output_stream);
73 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
75 if (section_type == LTO_section_function_body)
76 ob->cfg_stream = XCNEW (struct lto_output_stream);
78 clear_line_info (ob);
80 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
81 gcc_obstack_init (&ob->obstack);
83 return ob;
87 /* Destroy the output block OB. */
89 void
90 destroy_output_block (struct output_block *ob)
92 enum lto_section_type section_type = ob->section_type;
94 delete ob->string_hash_table;
95 ob->string_hash_table = NULL;
97 free (ob->main_stream);
98 free (ob->string_stream);
99 if (section_type == LTO_section_function_body)
100 free (ob->cfg_stream);
102 streamer_tree_cache_delete (ob->writer_cache);
103 obstack_free (&ob->obstack, NULL);
105 free (ob);
109 /* Look up NODE in the type table and write the index for it to OB. */
111 static void
112 output_type_ref (struct output_block *ob, tree node)
114 streamer_write_record_start (ob, LTO_type_ref);
115 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
119 /* Return true if tree node T is written to various tables. For these
120 nodes, we sometimes want to write their phyiscal representation
121 (via lto_output_tree), and sometimes we need to emit an index
122 reference into a table (via lto_output_tree_ref). */
124 static bool
125 tree_is_indexable (tree t)
127 /* Parameters and return values of functions of variably modified types
128 must go to global stream, because they may be used in the type
129 definition. */
130 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
131 && DECL_CONTEXT (t))
132 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
133 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
134 else if (TREE_CODE (t) == IMPORTED_DECL)
135 return false;
136 else if (((VAR_P (t) && !TREE_STATIC (t))
137 || TREE_CODE (t) == TYPE_DECL
138 || TREE_CODE (t) == CONST_DECL
139 || TREE_CODE (t) == NAMELIST_DECL)
140 && decl_function_context (t))
141 return false;
142 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
143 return false;
144 /* Variably modified types need to be streamed alongside function
145 bodies because they can refer to local entities. Together with
146 them we have to localize their members as well.
147 ??? In theory that includes non-FIELD_DECLs as well. */
148 else if (TYPE_P (t)
149 && variably_modified_type_p (t, NULL_TREE))
150 return false;
151 else if (TREE_CODE (t) == FIELD_DECL
152 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
153 return false;
154 else
155 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
159 /* Output info about new location into bitpack BP.
160 After outputting bitpack, lto_output_location_data has
161 to be done to output actual data. */
163 void
164 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
165 location_t loc)
167 expanded_location xloc;
169 loc = LOCATION_LOCUS (loc);
170 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
171 loc < RESERVED_LOCATION_COUNT
172 ? loc : RESERVED_LOCATION_COUNT);
173 if (loc < RESERVED_LOCATION_COUNT)
174 return;
176 xloc = expand_location (loc);
178 bp_pack_value (bp, ob->current_file != xloc.file, 1);
179 bp_pack_value (bp, ob->current_line != xloc.line, 1);
180 bp_pack_value (bp, ob->current_col != xloc.column, 1);
182 if (ob->current_file != xloc.file)
184 bp_pack_string (ob, bp, xloc.file, true);
185 bp_pack_value (bp, xloc.sysp, 1);
187 ob->current_file = xloc.file;
188 ob->current_sysp = xloc.sysp;
190 if (ob->current_line != xloc.line)
191 bp_pack_var_len_unsigned (bp, xloc.line);
192 ob->current_line = xloc.line;
194 if (ob->current_col != xloc.column)
195 bp_pack_var_len_unsigned (bp, xloc.column);
196 ob->current_col = xloc.column;
200 /* If EXPR is an indexable tree node, output a reference to it to
201 output block OB. Otherwise, output the physical representation of
202 EXPR to OB. */
204 static void
205 lto_output_tree_ref (struct output_block *ob, tree expr)
207 enum tree_code code;
209 if (TYPE_P (expr))
211 output_type_ref (ob, expr);
212 return;
215 code = TREE_CODE (expr);
216 switch (code)
218 case SSA_NAME:
219 streamer_write_record_start (ob, LTO_ssa_name_ref);
220 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
221 break;
223 case FIELD_DECL:
224 streamer_write_record_start (ob, LTO_field_decl_ref);
225 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
226 break;
228 case FUNCTION_DECL:
229 streamer_write_record_start (ob, LTO_function_decl_ref);
230 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
231 break;
233 case VAR_DECL:
234 case DEBUG_EXPR_DECL:
235 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
236 /* FALLTHRU */
237 case PARM_DECL:
238 streamer_write_record_start (ob, LTO_global_decl_ref);
239 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
240 break;
242 case CONST_DECL:
243 streamer_write_record_start (ob, LTO_const_decl_ref);
244 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
245 break;
247 case IMPORTED_DECL:
248 gcc_assert (decl_function_context (expr) == NULL);
249 streamer_write_record_start (ob, LTO_imported_decl_ref);
250 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
251 break;
253 case TYPE_DECL:
254 streamer_write_record_start (ob, LTO_type_decl_ref);
255 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
256 break;
258 case NAMELIST_DECL:
259 streamer_write_record_start (ob, LTO_namelist_decl_ref);
260 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
261 break;
263 case NAMESPACE_DECL:
264 streamer_write_record_start (ob, LTO_namespace_decl_ref);
265 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
266 break;
268 case LABEL_DECL:
269 streamer_write_record_start (ob, LTO_label_decl_ref);
270 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
271 break;
273 case RESULT_DECL:
274 streamer_write_record_start (ob, LTO_result_decl_ref);
275 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
276 break;
278 case TRANSLATION_UNIT_DECL:
279 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
280 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
281 break;
283 default:
284 /* No other node is indexable, so it should have been handled by
285 lto_output_tree. */
286 gcc_unreachable ();
291 /* Return true if EXPR is a tree node that can be written to disk. */
293 static inline bool
294 lto_is_streamable (tree expr)
296 enum tree_code code = TREE_CODE (expr);
298 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
299 name version in lto_output_tree_ref (see output_ssa_names). */
300 return !is_lang_specific (expr)
301 && code != SSA_NAME
302 && code != CALL_EXPR
303 && code != LANG_TYPE
304 && code != MODIFY_EXPR
305 && code != INIT_EXPR
306 && code != TARGET_EXPR
307 && code != BIND_EXPR
308 && code != WITH_CLEANUP_EXPR
309 && code != STATEMENT_LIST
310 && (code == CASE_LABEL_EXPR
311 || code == DECL_EXPR
312 || TREE_CODE_CLASS (code) != tcc_statement);
315 /* Very rough estimate of streaming size of the initializer. If we ignored
316 presence of strings, we could simply just count number of non-indexable
317 tree nodes and number of references to indexable nodes. Strings however
318 may be very large and we do not want to dump them int othe global stream.
320 Count the size of initializer until the size in DATA is positive. */
322 static tree
323 subtract_estimated_size (tree *tp, int *ws, void *data)
325 long *sum = (long *)data;
326 if (tree_is_indexable (*tp))
328 /* Indexable tree is one reference to global stream.
329 Guess it may be about 4 bytes. */
330 *sum -= 4;
331 *ws = 0;
333 /* String table entry + base of tree node needs to be streamed. */
334 if (TREE_CODE (*tp) == STRING_CST)
335 *sum -= TREE_STRING_LENGTH (*tp) + 8;
336 else
338 /* Identifiers are also variable length but should not appear
339 naked in constructor. */
340 gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
341 /* We do not really make attempt to work out size of pickled tree, as
342 it is very variable. Make it bigger than the reference. */
343 *sum -= 16;
345 if (*sum < 0)
346 return *tp;
347 return NULL_TREE;
351 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
353 static tree
354 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
356 gcc_checking_assert (DECL_P (expr)
357 && TREE_CODE (expr) != FUNCTION_DECL
358 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
360 /* Handle DECL_INITIAL for symbols. */
361 tree initial = DECL_INITIAL (expr);
362 if (VAR_P (expr)
363 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
364 && !DECL_IN_CONSTANT_POOL (expr)
365 && initial)
367 varpool_node *vnode;
368 /* Extra section needs about 30 bytes; do not produce it for simple
369 scalar values. */
370 if (!(vnode = varpool_node::get (expr))
371 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
372 initial = error_mark_node;
373 if (initial != error_mark_node)
375 long max_size = 30;
376 if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
377 NULL))
378 initial = error_mark_node;
382 return initial;
386 /* Write a physical representation of tree node EXPR to output block
387 OB. If REF_P is true, the leaves of EXPR are emitted as references
388 via lto_output_tree_ref. IX is the index into the streamer cache
389 where EXPR is stored. */
391 static void
392 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
394 /* Pack all the non-pointer fields in EXPR into a bitpack and write
395 the resulting bitpack. */
396 streamer_write_tree_bitfields (ob, expr);
398 /* Write all the pointer fields in EXPR. */
399 streamer_write_tree_body (ob, expr, ref_p);
401 /* Write any LTO-specific data to OB. */
402 if (DECL_P (expr)
403 && TREE_CODE (expr) != FUNCTION_DECL
404 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
406 /* Handle DECL_INITIAL for symbols. */
407 tree initial = get_symbol_initial_value
408 (ob->decl_state->symtab_node_encoder, expr);
409 stream_write_tree (ob, initial, ref_p);
412 /* Stream references to early generated DIEs. Keep in sync with the
413 trees handled in dwarf2out_die_ref_for_decl. */
414 if ((DECL_P (expr)
415 && TREE_CODE (expr) != FIELD_DECL
416 && TREE_CODE (expr) != DEBUG_EXPR_DECL
417 && TREE_CODE (expr) != TYPE_DECL)
418 || TREE_CODE (expr) == BLOCK)
420 const char *sym;
421 unsigned HOST_WIDE_INT off;
422 if (debug_info_level > DINFO_LEVEL_NONE
423 && debug_hooks->die_ref_for_decl (expr, &sym, &off))
425 streamer_write_string (ob, ob->main_stream, sym, true);
426 streamer_write_uhwi (ob, off);
428 else
429 streamer_write_string (ob, ob->main_stream, NULL, true);
433 /* Write a physical representation of tree node EXPR to output block
434 OB. If REF_P is true, the leaves of EXPR are emitted as references
435 via lto_output_tree_ref. IX is the index into the streamer cache
436 where EXPR is stored. */
438 static void
439 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
441 if (!lto_is_streamable (expr))
442 internal_error ("tree code %qs is not supported in LTO streams",
443 get_tree_code_name (TREE_CODE (expr)));
445 /* Write the header, containing everything needed to materialize
446 EXPR on the reading side. */
447 streamer_write_tree_header (ob, expr);
449 lto_write_tree_1 (ob, expr, ref_p);
451 /* Mark the end of EXPR. */
452 streamer_write_zero (ob);
455 /* Emit the physical representation of tree node EXPR to output block OB,
456 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
457 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
459 static void
460 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
461 bool ref_p, bool this_ref_p)
463 unsigned ix;
465 gcc_checking_assert (expr != NULL_TREE
466 && !(this_ref_p && tree_is_indexable (expr)));
468 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
469 expr, hash, &ix);
470 gcc_assert (!exists_p);
471 if (TREE_CODE (expr) == INTEGER_CST
472 && !TREE_OVERFLOW (expr))
474 /* Shared INTEGER_CST nodes are special because they need their
475 original type to be materialized by the reader (to implement
476 TYPE_CACHED_VALUES). */
477 streamer_write_integer_cst (ob, expr, ref_p);
479 else
481 /* This is the first time we see EXPR, write its fields
482 to OB. */
483 lto_write_tree (ob, expr, ref_p);
487 class DFS
489 public:
490 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
491 bool single_p);
492 ~DFS ();
494 struct scc_entry
496 tree t;
497 hashval_t hash;
499 vec<scc_entry> sccstack;
501 private:
502 struct sccs
504 unsigned int dfsnum;
505 unsigned int low;
507 struct worklist
509 tree expr;
510 sccs *from_state;
511 sccs *cstate;
512 bool ref_p;
513 bool this_ref_p;
516 static int scc_entry_compare (const void *, const void *);
518 void DFS_write_tree_body (struct output_block *ob,
519 tree expr, sccs *expr_state, bool ref_p);
521 void DFS_write_tree (struct output_block *ob, sccs *from_state,
522 tree expr, bool ref_p, bool this_ref_p);
524 hashval_t
525 hash_scc (struct output_block *ob, unsigned first, unsigned size,
526 bool ref_p, bool this_ref_p);
528 hash_map<tree, sccs *> sccstate;
529 vec<worklist> worklist_vec;
530 struct obstack sccstate_obstack;
533 /* Emit the physical representation of tree node EXPR to output block OB,
534 using depth-first search on the subgraph. If THIS_REF_P is true, the
535 leaves of EXPR are emitted as references via lto_output_tree_ref.
536 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
537 this is for a rewalk of a single leaf SCC. */
539 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
540 bool single_p)
542 unsigned int next_dfs_num = 1;
543 sccstack.create (0);
544 gcc_obstack_init (&sccstate_obstack);
545 worklist_vec = vNULL;
546 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
547 while (!worklist_vec.is_empty ())
549 worklist &w = worklist_vec.last ();
550 expr = w.expr;
551 sccs *from_state = w.from_state;
552 sccs *cstate = w.cstate;
553 ref_p = w.ref_p;
554 this_ref_p = w.this_ref_p;
555 if (cstate == NULL)
557 sccs **slot = &sccstate.get_or_insert (expr);
558 cstate = *slot;
559 if (cstate)
561 gcc_checking_assert (from_state);
562 if (cstate->dfsnum < from_state->dfsnum)
563 from_state->low = MIN (cstate->dfsnum, from_state->low);
564 worklist_vec.pop ();
565 continue;
568 scc_entry e = { expr, 0 };
569 /* Not yet visited. DFS recurse and push it onto the stack. */
570 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
571 sccstack.safe_push (e);
572 cstate->dfsnum = next_dfs_num++;
573 cstate->low = cstate->dfsnum;
574 w.cstate = cstate;
576 if (TREE_CODE (expr) == INTEGER_CST
577 && !TREE_OVERFLOW (expr))
578 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
579 else
581 DFS_write_tree_body (ob, expr, cstate, ref_p);
583 /* Walk any LTO-specific edges. */
584 if (DECL_P (expr)
585 && TREE_CODE (expr) != FUNCTION_DECL
586 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
588 /* Handle DECL_INITIAL for symbols. */
589 tree initial
590 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
591 expr);
592 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
595 continue;
598 /* See if we found an SCC. */
599 if (cstate->low == cstate->dfsnum)
601 unsigned first, size;
602 tree x;
604 /* If we are re-walking a single leaf SCC just pop it,
605 let earlier worklist item access the sccstack. */
606 if (single_p)
608 worklist_vec.pop ();
609 continue;
612 /* Pop the SCC and compute its size. */
613 first = sccstack.length ();
616 x = sccstack[--first].t;
618 while (x != expr);
619 size = sccstack.length () - first;
621 /* No need to compute hashes for LTRANS units, we don't perform
622 any merging there. */
623 hashval_t scc_hash = 0;
624 unsigned scc_entry_len = 0;
625 if (!flag_wpa)
627 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
629 /* Put the entries with the least number of collisions first. */
630 unsigned entry_start = 0;
631 scc_entry_len = size + 1;
632 for (unsigned i = 0; i < size;)
634 unsigned from = i;
635 for (i = i + 1; i < size
636 && (sccstack[first + i].hash
637 == sccstack[first + from].hash); ++i)
639 if (i - from < scc_entry_len)
641 scc_entry_len = i - from;
642 entry_start = from;
645 for (unsigned i = 0; i < scc_entry_len; ++i)
646 std::swap (sccstack[first + i],
647 sccstack[first + entry_start + i]);
649 /* We already sorted SCC deterministically in hash_scc. */
651 /* Check that we have only one SCC.
652 Naturally we may have conflicts if hash function is not
653 strong enough. Lets see how far this gets. */
654 gcc_checking_assert (scc_entry_len == 1);
657 /* Write LTO_tree_scc. */
658 streamer_write_record_start (ob, LTO_tree_scc);
659 streamer_write_uhwi (ob, size);
660 streamer_write_uhwi (ob, scc_hash);
662 /* Write size-1 SCCs without wrapping them inside SCC bundles.
663 All INTEGER_CSTs need to be handled this way as we need
664 their type to materialize them. Also builtins are handled
665 this way.
666 ??? We still wrap these in LTO_tree_scc so at the
667 input side we can properly identify the tree we want
668 to ultimatively return. */
669 if (size == 1)
670 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
671 else
673 /* Write the size of the SCC entry candidates. */
674 streamer_write_uhwi (ob, scc_entry_len);
676 /* Write all headers and populate the streamer cache. */
677 for (unsigned i = 0; i < size; ++i)
679 hashval_t hash = sccstack[first+i].hash;
680 tree t = sccstack[first+i].t;
681 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
682 t, hash, NULL);
683 gcc_assert (!exists_p);
685 if (!lto_is_streamable (t))
686 internal_error ("tree code %qs is not supported "
687 "in LTO streams",
688 get_tree_code_name (TREE_CODE (t)));
690 /* Write the header, containing everything needed to
691 materialize EXPR on the reading side. */
692 streamer_write_tree_header (ob, t);
695 /* Write the bitpacks and tree references. */
696 for (unsigned i = 0; i < size; ++i)
698 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
700 /* Mark the end of the tree. */
701 streamer_write_zero (ob);
705 /* Finally truncate the vector. */
706 sccstack.truncate (first);
708 if (from_state)
709 from_state->low = MIN (from_state->low, cstate->low);
710 worklist_vec.pop ();
711 continue;
714 gcc_checking_assert (from_state);
715 from_state->low = MIN (from_state->low, cstate->low);
716 if (cstate->dfsnum < from_state->dfsnum)
717 from_state->low = MIN (cstate->dfsnum, from_state->low);
718 worklist_vec.pop ();
720 worklist_vec.release ();
723 DFS::~DFS ()
725 sccstack.release ();
726 obstack_free (&sccstate_obstack, NULL);
729 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
730 DFS recurse for all tree edges originating from it. */
732 void
733 DFS::DFS_write_tree_body (struct output_block *ob,
734 tree expr, sccs *expr_state, bool ref_p)
736 #define DFS_follow_tree_edge(DEST) \
737 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
739 enum tree_code code;
741 code = TREE_CODE (expr);
743 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
745 if (TREE_CODE (expr) != IDENTIFIER_NODE)
746 DFS_follow_tree_edge (TREE_TYPE (expr));
749 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
751 unsigned int count = vector_cst_encoded_nelts (expr);
752 for (unsigned int i = 0; i < count; ++i)
753 DFS_follow_tree_edge (VECTOR_CST_ENCODED_ELT (expr, i));
756 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
757 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
758 DFS_follow_tree_edge (POLY_INT_CST_COEFF (expr, i));
760 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
762 DFS_follow_tree_edge (TREE_REALPART (expr));
763 DFS_follow_tree_edge (TREE_IMAGPART (expr));
766 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
768 /* Drop names that were created for anonymous entities. */
769 if (DECL_NAME (expr)
770 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
771 && anon_aggrname_p (DECL_NAME (expr)))
773 else
774 DFS_follow_tree_edge (DECL_NAME (expr));
775 if (TREE_CODE (expr) != TRANSLATION_UNIT_DECL
776 && ! DECL_CONTEXT (expr))
777 DFS_follow_tree_edge ((*all_translation_units)[0]);
778 else
779 DFS_follow_tree_edge (DECL_CONTEXT (expr));
782 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
784 DFS_follow_tree_edge (DECL_SIZE (expr));
785 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
787 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
788 special handling in LTO, it must be handled by streamer hooks. */
790 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
792 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
793 for early inlining so drop it on the floor instead of ICEing in
794 dwarf2out.c.
795 We however use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
796 declarations which should be eliminated by decl merging. Be sure none
797 leaks to this point. */
798 gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
799 DFS_follow_tree_edge (DECL_ABSTRACT_ORIGIN (expr));
801 if ((VAR_P (expr)
802 || TREE_CODE (expr) == PARM_DECL)
803 && DECL_HAS_VALUE_EXPR_P (expr))
804 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
805 if (VAR_P (expr))
806 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
809 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
811 if (TREE_CODE (expr) == TYPE_DECL)
812 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
815 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
817 /* Make sure we don't inadvertently set the assembler name. */
818 if (DECL_ASSEMBLER_NAME_SET_P (expr))
819 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
822 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
824 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
825 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
826 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
827 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
828 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
831 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
833 DFS_follow_tree_edge (DECL_VINDEX (expr));
834 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
835 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
836 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
839 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
841 DFS_follow_tree_edge (TYPE_SIZE (expr));
842 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
843 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
844 DFS_follow_tree_edge (TYPE_NAME (expr));
845 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
846 reconstructed during fixup. */
847 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
848 during fixup. */
849 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
850 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
851 /* TYPE_CANONICAL is re-computed during type merging, so no need
852 to follow it here. */
853 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
856 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
858 if (TREE_CODE (expr) == ENUMERAL_TYPE)
859 DFS_follow_tree_edge (TYPE_VALUES (expr));
860 else if (TREE_CODE (expr) == ARRAY_TYPE)
861 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
862 else if (RECORD_OR_UNION_TYPE_P (expr))
863 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
864 DFS_follow_tree_edge (t);
865 else if (TREE_CODE (expr) == FUNCTION_TYPE
866 || TREE_CODE (expr) == METHOD_TYPE)
867 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
869 if (!POINTER_TYPE_P (expr))
870 DFS_follow_tree_edge (TYPE_MIN_VALUE_RAW (expr));
871 DFS_follow_tree_edge (TYPE_MAX_VALUE_RAW (expr));
874 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
876 DFS_follow_tree_edge (TREE_PURPOSE (expr));
877 DFS_follow_tree_edge (TREE_VALUE (expr));
878 DFS_follow_tree_edge (TREE_CHAIN (expr));
881 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
883 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
884 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
887 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
889 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
890 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
891 DFS_follow_tree_edge (TREE_BLOCK (expr));
894 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
896 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
897 if (VAR_OR_FUNCTION_DECL_P (t)
898 && DECL_EXTERNAL (t))
899 /* We have to stream externals in the block chain as
900 non-references. See also
901 tree-streamer-out.c:streamer_write_chain. */
902 DFS_write_tree (ob, expr_state, t, ref_p, false);
903 else
904 DFS_follow_tree_edge (t);
906 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
908 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
909 handle - those that represent inlined function scopes.
910 For the drop rest them on the floor instead of ICEing
911 in dwarf2out.c, but keep the notion of whether the block
912 is an inlined block by refering to itself for the sake of
913 tree_nonartificial_location. */
914 if (inlined_function_outer_scope_p (expr))
916 tree ultimate_origin = block_ultimate_origin (expr);
917 DFS_follow_tree_edge (ultimate_origin);
919 else if (BLOCK_ABSTRACT_ORIGIN (expr))
920 DFS_follow_tree_edge (expr);
921 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
922 information for early inlined BLOCKs so drop it on the floor instead
923 of ICEing in dwarf2out.c. */
925 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
926 streaming time. */
928 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
929 list is re-constructed from BLOCK_SUPERCONTEXT. */
932 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
934 unsigned i;
935 tree t;
937 /* Note that the number of BINFO slots has already been emitted in
938 EXPR's header (see streamer_write_tree_header) because this length
939 is needed to build the empty BINFO node on the reader side. */
940 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
941 DFS_follow_tree_edge (t);
942 DFS_follow_tree_edge (BINFO_OFFSET (expr));
943 DFS_follow_tree_edge (BINFO_VTABLE (expr));
944 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
946 /* The number of BINFO_BASE_ACCESSES has already been emitted in
947 EXPR's bitfield section. */
948 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
949 DFS_follow_tree_edge (t);
951 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
952 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
955 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
957 unsigned i;
958 tree index, value;
960 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
962 DFS_follow_tree_edge (index);
963 DFS_follow_tree_edge (value);
967 if (code == OMP_CLAUSE)
969 int i;
970 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
971 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
972 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
975 #undef DFS_follow_tree_edge
978 /* Return a hash value for the tree T.
979 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
980 may hold hash values if trees inside current SCC. */
982 static hashval_t
983 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
985 inchash::hash hstate;
987 #define visit(SIBLING) \
988 do { \
989 unsigned ix; \
990 if (!SIBLING) \
991 hstate.add_int (0); \
992 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
993 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
994 else if (map) \
995 hstate.add_int (*map->get (SIBLING)); \
996 else \
997 hstate.add_int (1); \
998 } while (0)
1000 /* Hash TS_BASE. */
1001 enum tree_code code = TREE_CODE (t);
1002 hstate.add_int (code);
1003 if (!TYPE_P (t))
1005 hstate.add_flag (TREE_SIDE_EFFECTS (t));
1006 hstate.add_flag (TREE_CONSTANT (t));
1007 hstate.add_flag (TREE_READONLY (t));
1008 hstate.add_flag (TREE_PUBLIC (t));
1010 hstate.add_flag (TREE_ADDRESSABLE (t));
1011 hstate.add_flag (TREE_THIS_VOLATILE (t));
1012 if (DECL_P (t))
1013 hstate.add_flag (DECL_UNSIGNED (t));
1014 else if (TYPE_P (t))
1015 hstate.add_flag (TYPE_UNSIGNED (t));
1016 if (TYPE_P (t))
1017 hstate.add_flag (TYPE_ARTIFICIAL (t));
1018 else
1019 hstate.add_flag (TREE_NO_WARNING (t));
1020 hstate.add_flag (TREE_NOTHROW (t));
1021 hstate.add_flag (TREE_STATIC (t));
1022 hstate.add_flag (TREE_PROTECTED (t));
1023 hstate.add_flag (TREE_DEPRECATED (t));
1024 if (code != TREE_BINFO)
1025 hstate.add_flag (TREE_PRIVATE (t));
1026 if (TYPE_P (t))
1028 hstate.add_flag (AGGREGATE_TYPE_P (t)
1029 ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1030 hstate.add_flag (TYPE_ADDR_SPACE (t));
1032 else if (code == SSA_NAME)
1033 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1034 hstate.commit_flag ();
1036 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1037 hstate.add_wide_int (wi::to_widest (t));
1039 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1041 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1042 hstate.add_flag (r.cl);
1043 hstate.add_flag (r.sign);
1044 hstate.add_flag (r.signalling);
1045 hstate.add_flag (r.canonical);
1046 hstate.commit_flag ();
1047 hstate.add_int (r.uexp);
1048 hstate.add (r.sig, sizeof (r.sig));
1051 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1053 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1054 hstate.add_int (f.mode);
1055 hstate.add_int (f.data.low);
1056 hstate.add_int (f.data.high);
1059 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1061 hstate.add_hwi (DECL_MODE (t));
1062 hstate.add_flag (DECL_NONLOCAL (t));
1063 hstate.add_flag (DECL_VIRTUAL_P (t));
1064 hstate.add_flag (DECL_IGNORED_P (t));
1065 hstate.add_flag (DECL_ABSTRACT_P (t));
1066 hstate.add_flag (DECL_ARTIFICIAL (t));
1067 hstate.add_flag (DECL_USER_ALIGN (t));
1068 hstate.add_flag (DECL_PRESERVE_P (t));
1069 hstate.add_flag (DECL_EXTERNAL (t));
1070 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1071 hstate.commit_flag ();
1072 hstate.add_int (DECL_ALIGN (t));
1073 if (code == LABEL_DECL)
1075 hstate.add_int (EH_LANDING_PAD_NR (t));
1076 hstate.add_int (LABEL_DECL_UID (t));
1078 else if (code == FIELD_DECL)
1080 hstate.add_flag (DECL_PACKED (t));
1081 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1082 hstate.add_flag (DECL_PADDING_P (t));
1083 hstate.add_int (DECL_OFFSET_ALIGN (t));
1085 else if (code == VAR_DECL)
1087 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1088 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1090 if (code == RESULT_DECL
1091 || code == PARM_DECL
1092 || code == VAR_DECL)
1094 hstate.add_flag (DECL_BY_REFERENCE (t));
1095 if (code == VAR_DECL
1096 || code == PARM_DECL)
1097 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1099 hstate.commit_flag ();
1102 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1103 hstate.add_int (DECL_REGISTER (t));
1105 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1107 hstate.add_flag (DECL_COMMON (t));
1108 hstate.add_flag (DECL_DLLIMPORT_P (t));
1109 hstate.add_flag (DECL_WEAK (t));
1110 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1111 hstate.add_flag (DECL_COMDAT (t));
1112 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1113 hstate.add_int (DECL_VISIBILITY (t));
1114 if (code == VAR_DECL)
1116 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1117 hstate.add_flag (DECL_HARD_REGISTER (t));
1118 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1120 if (TREE_CODE (t) == FUNCTION_DECL)
1122 hstate.add_flag (DECL_FINAL_P (t));
1123 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1124 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1126 hstate.commit_flag ();
1129 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1131 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1132 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1133 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1134 hstate.add_flag (DECL_UNINLINABLE (t));
1135 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1136 hstate.add_flag (DECL_IS_NOVOPS (t));
1137 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1138 hstate.add_flag (DECL_IS_MALLOC (t));
1139 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1140 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1141 hstate.add_flag (DECL_STATIC_CHAIN (t));
1142 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1143 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1144 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1145 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1146 hstate.add_flag (DECL_PURE_P (t));
1147 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1148 hstate.commit_flag ();
1149 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1150 hstate.add_int (DECL_FUNCTION_CODE (t));
1153 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1155 hstate.add_hwi (TYPE_MODE (t));
1156 hstate.add_flag (TYPE_STRING_FLAG (t));
1157 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1158 no streaming. */
1159 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
1160 hstate.add_flag (TYPE_PACKED (t));
1161 hstate.add_flag (TYPE_RESTRICT (t));
1162 hstate.add_flag (TYPE_USER_ALIGN (t));
1163 hstate.add_flag (TYPE_READONLY (t));
1164 if (RECORD_OR_UNION_TYPE_P (t))
1166 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1167 hstate.add_flag (TYPE_FINAL_P (t));
1169 else if (code == ARRAY_TYPE)
1170 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1171 if (AGGREGATE_TYPE_P (t))
1172 hstate.add_flag (TYPE_TYPELESS_STORAGE (t));
1173 hstate.commit_flag ();
1174 hstate.add_int (TYPE_PRECISION (t));
1175 hstate.add_int (TYPE_ALIGN (t));
1176 hstate.add_int (TYPE_EMPTY_P (t));
1179 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1180 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1181 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1183 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1184 /* We don't stream these when passing things to a different target. */
1185 && !lto_stream_offload_p)
1186 hstate.add_hwi (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1188 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1189 hstate.add_hwi (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1191 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1192 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1194 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1195 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1197 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1199 if (code != IDENTIFIER_NODE)
1200 visit (TREE_TYPE (t));
1203 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1205 unsigned int count = vector_cst_encoded_nelts (t);
1206 for (unsigned int i = 0; i < count; ++i)
1207 visit (VECTOR_CST_ENCODED_ELT (t, i));
1210 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
1211 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1212 visit (POLY_INT_CST_COEFF (t, i));
1214 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1216 visit (TREE_REALPART (t));
1217 visit (TREE_IMAGPART (t));
1220 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1222 /* Drop names that were created for anonymous entities. */
1223 if (DECL_NAME (t)
1224 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1225 && anon_aggrname_p (DECL_NAME (t)))
1227 else
1228 visit (DECL_NAME (t));
1229 if (DECL_FILE_SCOPE_P (t))
1231 else
1232 visit (DECL_CONTEXT (t));
1235 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1237 visit (DECL_SIZE (t));
1238 visit (DECL_SIZE_UNIT (t));
1239 visit (DECL_ATTRIBUTES (t));
1240 if ((code == VAR_DECL
1241 || code == PARM_DECL)
1242 && DECL_HAS_VALUE_EXPR_P (t))
1243 visit (DECL_VALUE_EXPR (t));
1244 if (code == VAR_DECL
1245 && DECL_HAS_DEBUG_EXPR_P (t))
1246 visit (DECL_DEBUG_EXPR (t));
1247 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1248 be able to call get_symbol_initial_value. */
1251 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1253 if (code == TYPE_DECL)
1254 visit (DECL_ORIGINAL_TYPE (t));
1257 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1259 if (DECL_ASSEMBLER_NAME_SET_P (t))
1260 visit (DECL_ASSEMBLER_NAME (t));
1263 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1265 visit (DECL_FIELD_OFFSET (t));
1266 visit (DECL_BIT_FIELD_TYPE (t));
1267 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1268 visit (DECL_FIELD_BIT_OFFSET (t));
1269 visit (DECL_FCONTEXT (t));
1272 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1274 visit (DECL_VINDEX (t));
1275 visit (DECL_FUNCTION_PERSONALITY (t));
1276 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1277 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1280 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1282 visit (TYPE_SIZE (t));
1283 visit (TYPE_SIZE_UNIT (t));
1284 visit (TYPE_ATTRIBUTES (t));
1285 visit (TYPE_NAME (t));
1286 visit (TYPE_MAIN_VARIANT (t));
1287 if (TYPE_FILE_SCOPE_P (t))
1289 else
1290 visit (TYPE_CONTEXT (t));
1291 visit (TYPE_STUB_DECL (t));
1294 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1296 if (code == ENUMERAL_TYPE)
1297 visit (TYPE_VALUES (t));
1298 else if (code == ARRAY_TYPE)
1299 visit (TYPE_DOMAIN (t));
1300 else if (RECORD_OR_UNION_TYPE_P (t))
1301 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1302 visit (f);
1303 else if (code == FUNCTION_TYPE
1304 || code == METHOD_TYPE)
1305 visit (TYPE_ARG_TYPES (t));
1306 if (!POINTER_TYPE_P (t))
1307 visit (TYPE_MIN_VALUE_RAW (t));
1308 visit (TYPE_MAX_VALUE_RAW (t));
1311 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1313 visit (TREE_PURPOSE (t));
1314 visit (TREE_VALUE (t));
1315 visit (TREE_CHAIN (t));
1318 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1319 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1320 visit (TREE_VEC_ELT (t, i));
1322 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1324 hstate.add_hwi (TREE_OPERAND_LENGTH (t));
1325 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1326 visit (TREE_OPERAND (t, i));
1329 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1331 unsigned i;
1332 tree b;
1333 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1334 visit (b);
1335 visit (BINFO_OFFSET (t));
1336 visit (BINFO_VTABLE (t));
1337 visit (BINFO_VPTR_FIELD (t));
1338 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1339 visit (b);
1340 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1341 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1344 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1346 unsigned i;
1347 tree index, value;
1348 hstate.add_hwi (CONSTRUCTOR_NELTS (t));
1349 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1351 visit (index);
1352 visit (value);
1356 if (code == OMP_CLAUSE)
1358 int i;
1359 HOST_WIDE_INT val;
1361 hstate.add_hwi (OMP_CLAUSE_CODE (t));
1362 switch (OMP_CLAUSE_CODE (t))
1364 case OMP_CLAUSE_DEFAULT:
1365 val = OMP_CLAUSE_DEFAULT_KIND (t);
1366 break;
1367 case OMP_CLAUSE_SCHEDULE:
1368 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1369 break;
1370 case OMP_CLAUSE_DEPEND:
1371 val = OMP_CLAUSE_DEPEND_KIND (t);
1372 break;
1373 case OMP_CLAUSE_MAP:
1374 val = OMP_CLAUSE_MAP_KIND (t);
1375 break;
1376 case OMP_CLAUSE_PROC_BIND:
1377 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1378 break;
1379 case OMP_CLAUSE_REDUCTION:
1380 val = OMP_CLAUSE_REDUCTION_CODE (t);
1381 break;
1382 default:
1383 val = 0;
1384 break;
1386 hstate.add_hwi (val);
1387 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1388 visit (OMP_CLAUSE_OPERAND (t, i));
1389 visit (OMP_CLAUSE_CHAIN (t));
1392 return hstate.end ();
1394 #undef visit
1397 /* Compare two SCC entries by their hash value for qsorting them. */
1400 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1402 const scc_entry *p1 = (const scc_entry *) p1_;
1403 const scc_entry *p2 = (const scc_entry *) p2_;
1404 if (p1->hash < p2->hash)
1405 return -1;
1406 else if (p1->hash > p2->hash)
1407 return 1;
1408 return 0;
1411 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1412 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1414 hashval_t
1415 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1416 bool ref_p, bool this_ref_p)
1418 unsigned int last_classes = 0, iterations = 0;
1420 /* Compute hash values for the SCC members. */
1421 for (unsigned i = 0; i < size; ++i)
1422 sccstack[first+i].hash
1423 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1425 if (size == 1)
1426 return sccstack[first].hash;
1428 /* We aim to get unique hash for every tree within SCC and compute hash value
1429 of the whole SCC by combining all values together in a stable (entry-point
1430 independent) order. This guarantees that the same SCC regions within
1431 different translation units will get the same hash values and therefore
1432 will be merged at WPA time.
1434 Often the hashes are already unique. In that case we compute the SCC hash
1435 by combining individual hash values in an increasing order.
1437 If there are duplicates, we seek at least one tree with unique hash (and
1438 pick one with minimal hash and this property). Then we obtain a stable
1439 order by DFS walk starting from this unique tree and then use the index
1440 within this order to make individual hash values unique.
1442 If there is no tree with unique hash, we iteratively propagate the hash
1443 values across the internal edges of SCC. This usually quickly leads
1444 to unique hashes. Consider, for example, an SCC containing two pointers
1445 that are identical except for the types they point to and assume that
1446 these types are also part of the SCC. The propagation will add the
1447 points-to type information into their hash values. */
1450 /* Sort the SCC so we can easily check for uniqueness. */
1451 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1453 unsigned int classes = 1;
1454 int firstunique = -1;
1456 /* Find the tree with lowest unique hash (if it exists) and compute
1457 the number of equivalence classes. */
1458 if (sccstack[first].hash != sccstack[first+1].hash)
1459 firstunique = 0;
1460 for (unsigned i = 1; i < size; ++i)
1461 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1463 classes++;
1464 if (firstunique == -1
1465 && (i == size - 1
1466 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1467 firstunique = i;
1470 /* If we found a tree with unique hash, stop the iteration. */
1471 if (firstunique != -1
1472 /* Also terminate if we run out of iterations or if the number of
1473 equivalence classes is no longer increasing.
1474 For example a cyclic list of trees that are all equivalent will
1475 never have unique entry point; we however do not build such SCCs
1476 in our IL. */
1477 || classes <= last_classes || iterations > 16)
1479 hashval_t scc_hash;
1481 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1482 starting from FIRSTUNIQUE to obtain a stable order. */
1483 if (classes != size && firstunique != -1)
1485 hash_map <tree, hashval_t> map(size*2);
1487 /* Store hash values into a map, so we can associate them with
1488 the reordered SCC. */
1489 for (unsigned i = 0; i < size; ++i)
1490 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1492 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1493 true);
1494 gcc_assert (again.sccstack.length () == size);
1496 memcpy (sccstack.address () + first,
1497 again.sccstack.address (),
1498 sizeof (scc_entry) * size);
1500 /* Update hash values of individual members by hashing in the
1501 index within the stable order. This ensures uniqueness.
1502 Also compute the SCC hash by mixing in all hash values in
1503 the stable order we obtained. */
1504 sccstack[first].hash = *map.get (sccstack[first].t);
1505 scc_hash = sccstack[first].hash;
1506 for (unsigned i = 1; i < size; ++i)
1508 sccstack[first+i].hash
1509 = iterative_hash_hashval_t (i,
1510 *map.get (sccstack[first+i].t));
1511 scc_hash
1512 = iterative_hash_hashval_t (scc_hash,
1513 sccstack[first+i].hash);
1516 /* If we got a unique hash value for each tree, then sort already
1517 ensured entry-point independent order. Only compute the final
1518 SCC hash.
1520 If we failed to find the unique entry point, we go by the same
1521 route. We will eventually introduce unwanted hash conflicts. */
1522 else
1524 scc_hash = sccstack[first].hash;
1525 for (unsigned i = 1; i < size; ++i)
1526 scc_hash
1527 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1529 /* We cannot 100% guarantee that the hash won't conflict so as
1530 to make it impossible to find a unique hash. This however
1531 should be an extremely rare case. ICE for now so possible
1532 issues are found and evaluated. */
1533 gcc_checking_assert (classes == size);
1536 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1537 hash into the hash of each element. */
1538 for (unsigned i = 0; i < size; ++i)
1539 sccstack[first+i].hash
1540 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1541 return scc_hash;
1544 last_classes = classes;
1545 iterations++;
1547 /* We failed to identify the entry point; propagate hash values across
1548 the edges. */
1549 hash_map <tree, hashval_t> map(size*2);
1551 for (unsigned i = 0; i < size; ++i)
1552 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1554 for (unsigned i = 0; i < size; i++)
1555 sccstack[first+i].hash
1556 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1558 while (true);
1561 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1562 already in the streamer cache. Main routine called for
1563 each visit of EXPR. */
1565 void
1566 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1567 tree expr, bool ref_p, bool this_ref_p)
1569 /* Handle special cases. */
1570 if (expr == NULL_TREE)
1571 return;
1573 /* Do not DFS walk into indexable trees. */
1574 if (this_ref_p && tree_is_indexable (expr))
1575 return;
1577 /* Check if we already streamed EXPR. */
1578 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1579 return;
1581 worklist w;
1582 w.expr = expr;
1583 w.from_state = from_state;
1584 w.cstate = NULL;
1585 w.ref_p = ref_p;
1586 w.this_ref_p = this_ref_p;
1587 worklist_vec.safe_push (w);
1591 /* Emit the physical representation of tree node EXPR to output block OB.
1592 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1593 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1595 void
1596 lto_output_tree (struct output_block *ob, tree expr,
1597 bool ref_p, bool this_ref_p)
1599 unsigned ix;
1600 bool existed_p;
1602 if (expr == NULL_TREE)
1604 streamer_write_record_start (ob, LTO_null);
1605 return;
1608 if (this_ref_p && tree_is_indexable (expr))
1610 lto_output_tree_ref (ob, expr);
1611 return;
1614 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1615 if (existed_p)
1617 /* If a node has already been streamed out, make sure that
1618 we don't write it more than once. Otherwise, the reader
1619 will instantiate two different nodes for the same object. */
1620 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1621 streamer_write_uhwi (ob, ix);
1622 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1623 lto_tree_code_to_tag (TREE_CODE (expr)));
1624 lto_stats.num_pickle_refs_output++;
1626 else
1628 /* This is the first time we see EXPR, write all reachable
1629 trees to OB. */
1630 static bool in_dfs_walk;
1632 /* Protect against recursion which means disconnect between
1633 what tree edges we walk in the DFS walk and what edges
1634 we stream out. */
1635 gcc_assert (!in_dfs_walk);
1637 /* Start the DFS walk. */
1638 /* Save ob state ... */
1639 /* let's see ... */
1640 in_dfs_walk = true;
1641 DFS (ob, expr, ref_p, this_ref_p, false);
1642 in_dfs_walk = false;
1644 /* Finally append a reference to the tree we were writing.
1645 ??? If expr ended up as a singleton we could have
1646 inlined it here and avoid outputting a reference. */
1647 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1648 gcc_assert (existed_p);
1649 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1650 streamer_write_uhwi (ob, ix);
1651 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1652 lto_tree_code_to_tag (TREE_CODE (expr)));
1653 lto_stats.num_pickle_refs_output++;
1658 /* Output to OB a list of try/catch handlers starting with FIRST. */
1660 static void
1661 output_eh_try_list (struct output_block *ob, eh_catch first)
1663 eh_catch n;
1665 for (n = first; n; n = n->next_catch)
1667 streamer_write_record_start (ob, LTO_eh_catch);
1668 stream_write_tree (ob, n->type_list, true);
1669 stream_write_tree (ob, n->filter_list, true);
1670 stream_write_tree (ob, n->label, true);
1673 streamer_write_record_start (ob, LTO_null);
1677 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1678 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1679 detect EH region sharing. */
1681 static void
1682 output_eh_region (struct output_block *ob, eh_region r)
1684 enum LTO_tags tag;
1686 if (r == NULL)
1688 streamer_write_record_start (ob, LTO_null);
1689 return;
1692 if (r->type == ERT_CLEANUP)
1693 tag = LTO_ert_cleanup;
1694 else if (r->type == ERT_TRY)
1695 tag = LTO_ert_try;
1696 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1697 tag = LTO_ert_allowed_exceptions;
1698 else if (r->type == ERT_MUST_NOT_THROW)
1699 tag = LTO_ert_must_not_throw;
1700 else
1701 gcc_unreachable ();
1703 streamer_write_record_start (ob, tag);
1704 streamer_write_hwi (ob, r->index);
1706 if (r->outer)
1707 streamer_write_hwi (ob, r->outer->index);
1708 else
1709 streamer_write_zero (ob);
1711 if (r->inner)
1712 streamer_write_hwi (ob, r->inner->index);
1713 else
1714 streamer_write_zero (ob);
1716 if (r->next_peer)
1717 streamer_write_hwi (ob, r->next_peer->index);
1718 else
1719 streamer_write_zero (ob);
1721 if (r->type == ERT_TRY)
1723 output_eh_try_list (ob, r->u.eh_try.first_catch);
1725 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1727 stream_write_tree (ob, r->u.allowed.type_list, true);
1728 stream_write_tree (ob, r->u.allowed.label, true);
1729 streamer_write_uhwi (ob, r->u.allowed.filter);
1731 else if (r->type == ERT_MUST_NOT_THROW)
1733 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1734 bitpack_d bp = bitpack_create (ob->main_stream);
1735 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1736 streamer_write_bitpack (&bp);
1739 if (r->landing_pads)
1740 streamer_write_hwi (ob, r->landing_pads->index);
1741 else
1742 streamer_write_zero (ob);
1746 /* Output landing pad LP to OB. */
1748 static void
1749 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1751 if (lp == NULL)
1753 streamer_write_record_start (ob, LTO_null);
1754 return;
1757 streamer_write_record_start (ob, LTO_eh_landing_pad);
1758 streamer_write_hwi (ob, lp->index);
1759 if (lp->next_lp)
1760 streamer_write_hwi (ob, lp->next_lp->index);
1761 else
1762 streamer_write_zero (ob);
1764 if (lp->region)
1765 streamer_write_hwi (ob, lp->region->index);
1766 else
1767 streamer_write_zero (ob);
1769 stream_write_tree (ob, lp->post_landing_pad, true);
1773 /* Output the existing eh_table to OB. */
1775 static void
1776 output_eh_regions (struct output_block *ob, struct function *fn)
1778 if (fn->eh && fn->eh->region_tree)
1780 unsigned i;
1781 eh_region eh;
1782 eh_landing_pad lp;
1783 tree ttype;
1785 streamer_write_record_start (ob, LTO_eh_table);
1787 /* Emit the index of the root of the EH region tree. */
1788 streamer_write_hwi (ob, fn->eh->region_tree->index);
1790 /* Emit all the EH regions in the region array. */
1791 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1792 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1793 output_eh_region (ob, eh);
1795 /* Emit all landing pads. */
1796 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1797 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1798 output_eh_lp (ob, lp);
1800 /* Emit all the runtime type data. */
1801 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1802 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1803 stream_write_tree (ob, ttype, true);
1805 /* Emit the table of action chains. */
1806 if (targetm.arm_eabi_unwinder)
1808 tree t;
1809 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1810 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1811 stream_write_tree (ob, t, true);
1813 else
1815 uchar c;
1816 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1817 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1818 streamer_write_char_stream (ob->main_stream, c);
1822 /* The LTO_null either terminates the record or indicates that there
1823 are no eh_records at all. */
1824 streamer_write_record_start (ob, LTO_null);
1828 /* Output all of the active ssa names to the ssa_names stream. */
1830 static void
1831 output_ssa_names (struct output_block *ob, struct function *fn)
1833 unsigned int i, len;
1835 len = vec_safe_length (SSANAMES (fn));
1836 streamer_write_uhwi (ob, len);
1838 for (i = 1; i < len; i++)
1840 tree ptr = (*SSANAMES (fn))[i];
1842 if (ptr == NULL_TREE
1843 || SSA_NAME_IN_FREE_LIST (ptr)
1844 || virtual_operand_p (ptr)
1845 /* Simply skip unreleased SSA names. */
1846 || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
1847 && (! SSA_NAME_DEF_STMT (ptr)
1848 || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
1849 continue;
1851 streamer_write_uhwi (ob, i);
1852 streamer_write_char_stream (ob->main_stream,
1853 SSA_NAME_IS_DEFAULT_DEF (ptr));
1854 if (SSA_NAME_VAR (ptr))
1855 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1856 else
1857 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1858 stream_write_tree (ob, TREE_TYPE (ptr), true);
1861 streamer_write_zero (ob);
1866 /* Output the cfg. */
1868 static void
1869 output_cfg (struct output_block *ob, struct function *fn)
1871 struct lto_output_stream *tmp_stream = ob->main_stream;
1872 basic_block bb;
1874 ob->main_stream = ob->cfg_stream;
1876 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1877 profile_status_for_fn (fn));
1879 /* Output the number of the highest basic block. */
1880 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1882 FOR_ALL_BB_FN (bb, fn)
1884 edge_iterator ei;
1885 edge e;
1887 streamer_write_hwi (ob, bb->index);
1889 /* Output the successors and the edge flags. */
1890 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1891 FOR_EACH_EDGE (e, ei, bb->succs)
1893 streamer_write_uhwi (ob, e->dest->index);
1894 e->probability.stream_out (ob);
1895 streamer_write_uhwi (ob, e->flags);
1899 streamer_write_hwi (ob, -1);
1901 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1902 while (bb->next_bb)
1904 streamer_write_hwi (ob, bb->next_bb->index);
1905 bb = bb->next_bb;
1908 streamer_write_hwi (ob, -1);
1910 /* ??? The cfgloop interface is tied to cfun. */
1911 gcc_assert (cfun == fn);
1913 /* Output the number of loops. */
1914 streamer_write_uhwi (ob, number_of_loops (fn));
1916 /* Output each loop, skipping the tree root which has number zero. */
1917 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1919 struct loop *loop = get_loop (fn, i);
1921 /* Write the index of the loop header. That's enough to rebuild
1922 the loop tree on the reader side. Stream -1 for an unused
1923 loop entry. */
1924 if (!loop)
1926 streamer_write_hwi (ob, -1);
1927 continue;
1929 else
1930 streamer_write_hwi (ob, loop->header->index);
1932 /* Write everything copy_loop_info copies. */
1933 streamer_write_enum (ob->main_stream,
1934 loop_estimation, EST_LAST, loop->estimate_state);
1935 streamer_write_hwi (ob, loop->any_upper_bound);
1936 if (loop->any_upper_bound)
1937 streamer_write_widest_int (ob, loop->nb_iterations_upper_bound);
1938 streamer_write_hwi (ob, loop->any_likely_upper_bound);
1939 if (loop->any_likely_upper_bound)
1940 streamer_write_widest_int (ob, loop->nb_iterations_likely_upper_bound);
1941 streamer_write_hwi (ob, loop->any_estimate);
1942 if (loop->any_estimate)
1943 streamer_write_widest_int (ob, loop->nb_iterations_estimate);
1945 /* Write OMP SIMD related info. */
1946 streamer_write_hwi (ob, loop->safelen);
1947 streamer_write_hwi (ob, loop->unroll);
1948 streamer_write_hwi (ob, loop->dont_vectorize);
1949 streamer_write_hwi (ob, loop->force_vectorize);
1950 stream_write_tree (ob, loop->simduid, true);
1953 ob->main_stream = tmp_stream;
1957 /* Create the header in the file using OB. If the section type is for
1958 a function, set FN to the decl for that function. */
1960 void
1961 produce_asm (struct output_block *ob, tree fn)
1963 enum lto_section_type section_type = ob->section_type;
1964 struct lto_function_header header;
1965 char *section_name;
1967 if (section_type == LTO_section_function_body)
1969 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1970 section_name = lto_get_section_name (section_type, name, NULL);
1972 else
1973 section_name = lto_get_section_name (section_type, NULL, NULL);
1975 lto_begin_section (section_name, !flag_wpa);
1976 free (section_name);
1978 /* The entire header is stream computed here. */
1979 memset (&header, 0, sizeof (struct lto_function_header));
1981 /* Write the header. */
1982 header.major_version = LTO_major_version;
1983 header.minor_version = LTO_minor_version;
1985 if (section_type == LTO_section_function_body)
1986 header.cfg_size = ob->cfg_stream->total_size;
1987 header.main_size = ob->main_stream->total_size;
1988 header.string_size = ob->string_stream->total_size;
1989 lto_write_data (&header, sizeof header);
1991 /* Put all of the gimple and the string table out the asm file as a
1992 block of text. */
1993 if (section_type == LTO_section_function_body)
1994 lto_write_stream (ob->cfg_stream);
1995 lto_write_stream (ob->main_stream);
1996 lto_write_stream (ob->string_stream);
1998 lto_end_section ();
2002 /* Output the base body of struct function FN using output block OB. */
2004 static void
2005 output_struct_function_base (struct output_block *ob, struct function *fn)
2007 struct bitpack_d bp;
2008 unsigned i;
2009 tree t;
2011 /* Output the static chain and non-local goto save area. */
2012 stream_write_tree (ob, fn->static_chain_decl, true);
2013 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
2015 /* Output all the local variables in the function. */
2016 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
2017 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
2018 stream_write_tree (ob, t, true);
2020 /* Output current IL state of the function. */
2021 streamer_write_uhwi (ob, fn->curr_properties);
2023 /* Write all the attributes for FN. */
2024 bp = bitpack_create (ob->main_stream);
2025 bp_pack_value (&bp, fn->is_thunk, 1);
2026 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
2027 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
2028 bp_pack_value (&bp, fn->returns_struct, 1);
2029 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
2030 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2031 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2032 bp_pack_value (&bp, fn->after_inlining, 1);
2033 bp_pack_value (&bp, fn->stdarg, 1);
2034 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2035 bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2036 bp_pack_value (&bp, fn->calls_alloca, 1);
2037 bp_pack_value (&bp, fn->calls_setjmp, 1);
2038 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2039 bp_pack_value (&bp, fn->has_simduid_loops, 1);
2040 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2041 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2042 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2044 /* Output the function start and end loci. */
2045 stream_output_location (ob, &bp, fn->function_start_locus);
2046 stream_output_location (ob, &bp, fn->function_end_locus);
2048 streamer_write_bitpack (&bp);
2052 /* Collect all leaf BLOCKs beyond ROOT into LEAFS. */
2054 static void
2055 collect_block_tree_leafs (tree root, vec<tree> &leafs)
2057 for (root = BLOCK_SUBBLOCKS (root); root; root = BLOCK_CHAIN (root))
2058 if (! BLOCK_SUBBLOCKS (root))
2059 leafs.safe_push (root);
2060 else
2061 collect_block_tree_leafs (BLOCK_SUBBLOCKS (root), leafs);
2064 /* Output the body of function NODE->DECL. */
2066 static void
2067 output_function (struct cgraph_node *node)
2069 tree function;
2070 struct function *fn;
2071 basic_block bb;
2072 struct output_block *ob;
2074 function = node->decl;
2075 fn = DECL_STRUCT_FUNCTION (function);
2076 ob = create_output_block (LTO_section_function_body);
2078 clear_line_info (ob);
2079 ob->symbol = node;
2081 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2083 /* Set current_function_decl and cfun. */
2084 push_cfun (fn);
2086 /* Make string 0 be a NULL string. */
2087 streamer_write_char_stream (ob->string_stream, 0);
2089 streamer_write_record_start (ob, LTO_function);
2091 /* Output decls for parameters and args. */
2092 stream_write_tree (ob, DECL_RESULT (function), true);
2093 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2095 /* Output debug args if available. */
2096 vec<tree, va_gc> **debugargs = decl_debug_args_lookup (function);
2097 if (! debugargs)
2098 streamer_write_uhwi (ob, 0);
2099 else
2101 streamer_write_uhwi (ob, (*debugargs)->length ());
2102 for (unsigned i = 0; i < (*debugargs)->length (); ++i)
2103 stream_write_tree (ob, (**debugargs)[i], true);
2106 /* Output DECL_INITIAL for the function, which contains the tree of
2107 lexical scopes. */
2108 stream_write_tree (ob, DECL_INITIAL (function), true);
2109 /* As we do not recurse into BLOCK_SUBBLOCKS but only BLOCK_SUPERCONTEXT
2110 collect block tree leafs and stream those. */
2111 auto_vec<tree> block_tree_leafs;
2112 if (DECL_INITIAL (function))
2113 collect_block_tree_leafs (DECL_INITIAL (function), block_tree_leafs);
2114 streamer_write_uhwi (ob, block_tree_leafs.length ());
2115 for (unsigned i = 0; i < block_tree_leafs.length (); ++i)
2116 stream_write_tree (ob, block_tree_leafs[i], true);
2118 /* We also stream abstract functions where we stream only stuff needed for
2119 debug info. */
2120 if (gimple_has_body_p (function))
2122 streamer_write_uhwi (ob, 1);
2123 output_struct_function_base (ob, fn);
2125 /* Output all the SSA names used in the function. */
2126 output_ssa_names (ob, fn);
2128 /* Output any exception handling regions. */
2129 output_eh_regions (ob, fn);
2132 /* We will renumber the statements. The code that does this uses
2133 the same ordering that we use for serializing them so we can use
2134 the same code on the other end and not have to write out the
2135 statement numbers. We do not assign UIDs to PHIs here because
2136 virtual PHIs get re-computed on-the-fly which would make numbers
2137 inconsistent. */
2138 set_gimple_stmt_max_uid (cfun, 0);
2139 FOR_ALL_BB_FN (bb, cfun)
2141 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2142 gsi_next (&gsi))
2144 gphi *stmt = gsi.phi ();
2146 /* Virtual PHIs are not going to be streamed. */
2147 if (!virtual_operand_p (gimple_phi_result (stmt)))
2148 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2150 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2151 gsi_next (&gsi))
2153 gimple *stmt = gsi_stmt (gsi);
2154 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2157 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2158 virtual phis now. */
2159 FOR_ALL_BB_FN (bb, cfun)
2161 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2162 gsi_next (&gsi))
2164 gphi *stmt = gsi.phi ();
2165 if (virtual_operand_p (gimple_phi_result (stmt)))
2166 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2170 /* Output the code for the function. */
2171 FOR_ALL_BB_FN (bb, fn)
2172 output_bb (ob, bb, fn);
2174 /* The terminator for this function. */
2175 streamer_write_record_start (ob, LTO_null);
2177 output_cfg (ob, fn);
2179 pop_cfun ();
2181 else
2182 streamer_write_uhwi (ob, 0);
2184 /* Create a section to hold the pickled output of this function. */
2185 produce_asm (ob, function);
2187 destroy_output_block (ob);
2190 /* Output the body of function NODE->DECL. */
2192 static void
2193 output_constructor (struct varpool_node *node)
2195 tree var = node->decl;
2196 struct output_block *ob;
2198 ob = create_output_block (LTO_section_function_body);
2200 clear_line_info (ob);
2201 ob->symbol = node;
2203 /* Make string 0 be a NULL string. */
2204 streamer_write_char_stream (ob->string_stream, 0);
2206 /* Output DECL_INITIAL for the function, which contains the tree of
2207 lexical scopes. */
2208 stream_write_tree (ob, DECL_INITIAL (var), true);
2210 /* Create a section to hold the pickled output of this function. */
2211 produce_asm (ob, var);
2213 destroy_output_block (ob);
2217 /* Emit toplevel asms. */
2219 void
2220 lto_output_toplevel_asms (void)
2222 struct output_block *ob;
2223 struct asm_node *can;
2224 char *section_name;
2225 struct lto_simple_header_with_strings header;
2227 if (!symtab->first_asm_symbol ())
2228 return;
2230 ob = create_output_block (LTO_section_asm);
2232 /* Make string 0 be a NULL string. */
2233 streamer_write_char_stream (ob->string_stream, 0);
2235 for (can = symtab->first_asm_symbol (); can; can = can->next)
2237 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2238 streamer_write_hwi (ob, can->order);
2241 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2243 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2244 lto_begin_section (section_name, !flag_wpa);
2245 free (section_name);
2247 /* The entire header stream is computed here. */
2248 memset (&header, 0, sizeof (header));
2250 /* Write the header. */
2251 header.major_version = LTO_major_version;
2252 header.minor_version = LTO_minor_version;
2254 header.main_size = ob->main_stream->total_size;
2255 header.string_size = ob->string_stream->total_size;
2256 lto_write_data (&header, sizeof header);
2258 /* Put all of the gimple and the string table out the asm file as a
2259 block of text. */
2260 lto_write_stream (ob->main_stream);
2261 lto_write_stream (ob->string_stream);
2263 lto_end_section ();
2265 destroy_output_block (ob);
2269 /* Copy the function body or variable constructor of NODE without deserializing. */
2271 static void
2272 copy_function_or_variable (struct symtab_node *node)
2274 tree function = node->decl;
2275 struct lto_file_decl_data *file_data = node->lto_file_data;
2276 const char *data;
2277 size_t len;
2278 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2279 char *section_name =
2280 lto_get_section_name (LTO_section_function_body, name, NULL);
2281 size_t i, j;
2282 struct lto_in_decl_state *in_state;
2283 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2285 lto_begin_section (section_name, false);
2286 free (section_name);
2288 /* We may have renamed the declaration, e.g., a static function. */
2289 name = lto_get_decl_name_mapping (file_data, name);
2291 data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2292 name, &len);
2293 gcc_assert (data);
2295 /* Do a bit copy of the function body. */
2296 lto_write_raw_data (data, len);
2298 /* Copy decls. */
2299 in_state =
2300 lto_get_function_in_decl_state (node->lto_file_data, function);
2301 out_state->compressed = in_state->compressed;
2302 gcc_assert (in_state);
2304 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2306 size_t n = vec_safe_length (in_state->streams[i]);
2307 vec<tree, va_gc> *trees = in_state->streams[i];
2308 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2310 /* The out state must have the same indices and the in state.
2311 So just copy the vector. All the encoders in the in state
2312 must be empty where we reach here. */
2313 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2314 encoder->trees.reserve_exact (n);
2315 for (j = 0; j < n; j++)
2316 encoder->trees.safe_push ((*trees)[j]);
2319 lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2320 data, len);
2321 lto_end_section ();
2324 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2326 static tree
2327 wrap_refs (tree *tp, int *ws, void *)
2329 tree t = *tp;
2330 if (handled_component_p (t)
2331 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2332 && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2334 tree decl = TREE_OPERAND (t, 0);
2335 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2336 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2337 build1 (ADDR_EXPR, ptrtype, decl),
2338 build_int_cst (ptrtype, 0));
2339 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2340 *ws = 0;
2342 else if (TREE_CODE (t) == CONSTRUCTOR)
2344 else if (!EXPR_P (t))
2345 *ws = 0;
2346 return NULL_TREE;
2349 /* Remove functions that are no longer used from offload_funcs, and mark the
2350 remaining ones with DECL_PRESERVE_P. */
2352 static void
2353 prune_offload_funcs (void)
2355 if (!offload_funcs)
2356 return;
2358 unsigned int write_index = 0;
2359 for (unsigned read_index = 0; read_index < vec_safe_length (offload_funcs);
2360 read_index++)
2362 tree fn_decl = (*offload_funcs)[read_index];
2363 bool remove_p = cgraph_node::get (fn_decl) == NULL;
2364 if (remove_p)
2365 continue;
2367 DECL_PRESERVE_P (fn_decl) = 1;
2369 if (write_index != read_index)
2370 (*offload_funcs)[write_index] = (*offload_funcs)[read_index];
2372 write_index++;
2375 offload_funcs->truncate (write_index);
2378 /* Main entry point from the pass manager. */
2380 void
2381 lto_output (void)
2383 struct lto_out_decl_state *decl_state;
2384 bitmap output = NULL;
2385 int i, n_nodes;
2386 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2388 prune_offload_funcs ();
2390 if (flag_checking)
2391 output = lto_bitmap_alloc ();
2393 /* Initialize the streamer. */
2394 lto_streamer_init ();
2396 n_nodes = lto_symtab_encoder_size (encoder);
2397 /* Process only the functions with bodies. */
2398 for (i = 0; i < n_nodes; i++)
2400 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2401 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2403 if (lto_symtab_encoder_encode_body_p (encoder, node)
2404 && !node->alias
2405 && (!node->thunk.thunk_p || !node->thunk.add_pointer_bounds_args))
2407 if (flag_checking)
2409 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2410 bitmap_set_bit (output, DECL_UID (node->decl));
2412 decl_state = lto_new_out_decl_state ();
2413 lto_push_out_decl_state (decl_state);
2414 if (gimple_has_body_p (node->decl) || !flag_wpa
2415 /* Thunks have no body but they may be synthetized
2416 at WPA time. */
2417 || DECL_ARGUMENTS (node->decl))
2418 output_function (node);
2419 else
2420 copy_function_or_variable (node);
2421 gcc_assert (lto_get_out_decl_state () == decl_state);
2422 lto_pop_out_decl_state ();
2423 lto_record_function_out_decl_state (node->decl, decl_state);
2426 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2428 /* Wrap symbol references inside the ctor in a type
2429 preserving MEM_REF. */
2430 tree ctor = DECL_INITIAL (node->decl);
2431 if (ctor && !in_lto_p)
2432 walk_tree (&ctor, wrap_refs, NULL, NULL);
2433 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2434 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2435 && !node->alias)
2437 timevar_push (TV_IPA_LTO_CTORS_OUT);
2438 if (flag_checking)
2440 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2441 bitmap_set_bit (output, DECL_UID (node->decl));
2443 decl_state = lto_new_out_decl_state ();
2444 lto_push_out_decl_state (decl_state);
2445 if (DECL_INITIAL (node->decl) != error_mark_node
2446 || !flag_wpa)
2447 output_constructor (node);
2448 else
2449 copy_function_or_variable (node);
2450 gcc_assert (lto_get_out_decl_state () == decl_state);
2451 lto_pop_out_decl_state ();
2452 lto_record_function_out_decl_state (node->decl, decl_state);
2453 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2458 /* Emit the callgraph after emitting function bodies. This needs to
2459 be done now to make sure that all the statements in every function
2460 have been renumbered so that edges can be associated with call
2461 statements using the statement UIDs. */
2462 output_symtab ();
2464 output_offload_tables ();
2466 #if CHECKING_P
2467 lto_bitmap_free (output);
2468 #endif
2471 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2472 from it and required for correct representation of its semantics.
2473 Each node in ENCODER must be a global declaration or a type. A node
2474 is written only once, even if it appears multiple times in the
2475 vector. Certain transitively-reachable nodes, such as those
2476 representing expressions, may be duplicated, but such nodes
2477 must not appear in ENCODER itself. */
2479 static void
2480 write_global_stream (struct output_block *ob,
2481 struct lto_tree_ref_encoder *encoder)
2483 tree t;
2484 size_t index;
2485 const size_t size = lto_tree_ref_encoder_size (encoder);
2487 for (index = 0; index < size; index++)
2489 t = lto_tree_ref_encoder_get_tree (encoder, index);
2490 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2491 stream_write_tree (ob, t, false);
2496 /* Write a sequence of indices into the globals vector corresponding
2497 to the trees in ENCODER. These are used by the reader to map the
2498 indices used to refer to global entities within function bodies to
2499 their referents. */
2501 static void
2502 write_global_references (struct output_block *ob,
2503 struct lto_tree_ref_encoder *encoder)
2505 tree t;
2506 uint32_t index;
2507 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2509 /* Write size and slot indexes as 32-bit unsigned numbers. */
2510 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2511 data[0] = size;
2513 for (index = 0; index < size; index++)
2515 unsigned slot_num;
2517 t = lto_tree_ref_encoder_get_tree (encoder, index);
2518 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2519 gcc_assert (slot_num != (unsigned)-1);
2520 data[index + 1] = slot_num;
2523 lto_write_data (data, sizeof (int32_t) * (size + 1));
2524 free (data);
2528 /* Write all the streams in an lto_out_decl_state STATE using
2529 output block OB and output stream OUT_STREAM. */
2531 void
2532 lto_output_decl_state_streams (struct output_block *ob,
2533 struct lto_out_decl_state *state)
2535 int i;
2537 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2538 write_global_stream (ob, &state->streams[i]);
2542 /* Write all the references in an lto_out_decl_state STATE using
2543 output block OB and output stream OUT_STREAM. */
2545 void
2546 lto_output_decl_state_refs (struct output_block *ob,
2547 struct lto_out_decl_state *state)
2549 unsigned i;
2550 unsigned ref;
2551 tree decl;
2553 /* Write reference to FUNCTION_DECL. If there is not function,
2554 write reference to void_type_node. */
2555 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2556 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2557 gcc_assert (ref != (unsigned)-1);
2558 ref = ref * 2 + (state->compressed ? 1 : 0);
2559 lto_write_data (&ref, sizeof (uint32_t));
2561 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2562 write_global_references (ob, &state->streams[i]);
2566 /* Return the written size of STATE. */
2568 static size_t
2569 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2571 int i;
2572 size_t size;
2574 size = sizeof (int32_t); /* fn_ref. */
2575 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2577 size += sizeof (int32_t); /* vector size. */
2578 size += (lto_tree_ref_encoder_size (&state->streams[i])
2579 * sizeof (int32_t));
2581 return size;
2585 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2586 so far. */
2588 static void
2589 write_symbol (struct streamer_tree_cache_d *cache,
2590 tree t, hash_set<const char *> *seen, bool alias)
2592 const char *name;
2593 enum gcc_plugin_symbol_kind kind;
2594 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2595 unsigned slot_num;
2596 uint64_t size;
2597 const char *comdat;
2598 unsigned char c;
2600 /* None of the following kinds of symbols are needed in the
2601 symbol table. */
2602 if (!TREE_PUBLIC (t)
2603 || is_builtin_fn (t)
2604 || DECL_ABSTRACT_P (t)
2605 || (VAR_P (t) && DECL_HARD_REGISTER (t)))
2606 return;
2608 gcc_assert (VAR_OR_FUNCTION_DECL_P (t));
2610 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2612 /* This behaves like assemble_name_raw in varasm.c, performing the
2613 same name manipulations that ASM_OUTPUT_LABELREF does. */
2614 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2616 if (seen->add (name))
2617 return;
2619 streamer_tree_cache_lookup (cache, t, &slot_num);
2620 gcc_assert (slot_num != (unsigned)-1);
2622 if (DECL_EXTERNAL (t))
2624 if (DECL_WEAK (t))
2625 kind = GCCPK_WEAKUNDEF;
2626 else
2627 kind = GCCPK_UNDEF;
2629 else
2631 if (DECL_WEAK (t))
2632 kind = GCCPK_WEAKDEF;
2633 else if (DECL_COMMON (t))
2634 kind = GCCPK_COMMON;
2635 else
2636 kind = GCCPK_DEF;
2638 /* When something is defined, it should have node attached. */
2639 gcc_assert (alias || !VAR_P (t) || varpool_node::get (t)->definition);
2640 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2641 || (cgraph_node::get (t)
2642 && cgraph_node::get (t)->definition));
2645 /* Imitate what default_elf_asm_output_external do.
2646 When symbol is external, we need to output it with DEFAULT visibility
2647 when compiling with -fvisibility=default, while with HIDDEN visibility
2648 when symbol has attribute (visibility("hidden")) specified.
2649 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2650 right. */
2652 if (DECL_EXTERNAL (t)
2653 && !targetm.binds_local_p (t))
2654 visibility = GCCPV_DEFAULT;
2655 else
2656 switch (DECL_VISIBILITY (t))
2658 case VISIBILITY_DEFAULT:
2659 visibility = GCCPV_DEFAULT;
2660 break;
2661 case VISIBILITY_PROTECTED:
2662 visibility = GCCPV_PROTECTED;
2663 break;
2664 case VISIBILITY_HIDDEN:
2665 visibility = GCCPV_HIDDEN;
2666 break;
2667 case VISIBILITY_INTERNAL:
2668 visibility = GCCPV_INTERNAL;
2669 break;
2672 if (kind == GCCPK_COMMON
2673 && DECL_SIZE_UNIT (t)
2674 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2675 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2676 else
2677 size = 0;
2679 if (DECL_ONE_ONLY (t))
2680 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2681 else
2682 comdat = "";
2684 lto_write_data (name, strlen (name) + 1);
2685 lto_write_data (comdat, strlen (comdat) + 1);
2686 c = (unsigned char) kind;
2687 lto_write_data (&c, 1);
2688 c = (unsigned char) visibility;
2689 lto_write_data (&c, 1);
2690 lto_write_data (&size, 8);
2691 lto_write_data (&slot_num, 4);
2694 /* Return true if NODE should appear in the plugin symbol table. */
2696 bool
2697 output_symbol_p (symtab_node *node)
2699 struct cgraph_node *cnode;
2700 if (!node->real_symbol_p ())
2701 return false;
2702 /* We keep external functions in symtab for sake of inlining
2703 and devirtualization. We do not want to see them in symbol table as
2704 references unless they are really used. */
2705 cnode = dyn_cast <cgraph_node *> (node);
2706 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2707 && cnode->callers)
2708 return true;
2710 /* Ignore all references from external vars initializers - they are not really
2711 part of the compilation unit until they are used by folding. Some symbols,
2712 like references to external construction vtables can not be referred to at all.
2713 We decide this at can_refer_decl_in_current_unit_p. */
2714 if (!node->definition || DECL_EXTERNAL (node->decl))
2716 int i;
2717 struct ipa_ref *ref;
2718 for (i = 0; node->iterate_referring (i, ref); i++)
2720 if (ref->use == IPA_REF_ALIAS)
2721 continue;
2722 if (is_a <cgraph_node *> (ref->referring))
2723 return true;
2724 if (!DECL_EXTERNAL (ref->referring->decl))
2725 return true;
2727 return false;
2729 return true;
2733 /* Write an IL symbol table to OB.
2734 SET and VSET are cgraph/varpool node sets we are outputting. */
2736 static void
2737 produce_symtab (struct output_block *ob)
2739 struct streamer_tree_cache_d *cache = ob->writer_cache;
2740 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2741 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2742 lto_symtab_encoder_iterator lsei;
2744 lto_begin_section (section_name, false);
2745 free (section_name);
2747 hash_set<const char *> seen;
2749 /* Write the symbol table.
2750 First write everything defined and then all declarations.
2751 This is necessary to handle cases where we have duplicated symbols. */
2752 for (lsei = lsei_start (encoder);
2753 !lsei_end_p (lsei); lsei_next (&lsei))
2755 symtab_node *node = lsei_node (lsei);
2757 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2758 continue;
2759 write_symbol (cache, node->decl, &seen, false);
2761 for (lsei = lsei_start (encoder);
2762 !lsei_end_p (lsei); lsei_next (&lsei))
2764 symtab_node *node = lsei_node (lsei);
2766 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2767 continue;
2768 write_symbol (cache, node->decl, &seen, false);
2771 lto_end_section ();
2775 /* Init the streamer_mode_table for output, where we collect info on what
2776 machine_mode values have been streamed. */
2777 void
2778 lto_output_init_mode_table (void)
2780 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2784 /* Write the mode table. */
2785 static void
2786 lto_write_mode_table (void)
2788 struct output_block *ob;
2789 ob = create_output_block (LTO_section_mode_table);
2790 bitpack_d bp = bitpack_create (ob->main_stream);
2792 /* Ensure that for GET_MODE_INNER (m) != m we have
2793 also the inner mode marked. */
2794 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2795 if (streamer_mode_table[i])
2797 machine_mode m = (machine_mode) i;
2798 machine_mode inner_m = GET_MODE_INNER (m);
2799 if (inner_m != m)
2800 streamer_mode_table[(int) inner_m] = 1;
2802 /* First stream modes that have GET_MODE_INNER (m) == m,
2803 so that we can refer to them afterwards. */
2804 for (int pass = 0; pass < 2; pass++)
2805 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2806 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2808 machine_mode m = (machine_mode) i;
2809 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2810 continue;
2811 bp_pack_value (&bp, m, 8);
2812 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2813 bp_pack_poly_value (&bp, GET_MODE_SIZE (m), 16);
2814 bp_pack_poly_value (&bp, GET_MODE_PRECISION (m), 16);
2815 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2816 bp_pack_poly_value (&bp, GET_MODE_NUNITS (m), 16);
2817 switch (GET_MODE_CLASS (m))
2819 case MODE_FRACT:
2820 case MODE_UFRACT:
2821 case MODE_ACCUM:
2822 case MODE_UACCUM:
2823 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2824 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2825 break;
2826 case MODE_FLOAT:
2827 case MODE_DECIMAL_FLOAT:
2828 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2829 break;
2830 default:
2831 break;
2833 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2835 bp_pack_value (&bp, VOIDmode, 8);
2837 streamer_write_bitpack (&bp);
2839 char *section_name
2840 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2841 lto_begin_section (section_name, !flag_wpa);
2842 free (section_name);
2844 /* The entire header stream is computed here. */
2845 struct lto_simple_header_with_strings header;
2846 memset (&header, 0, sizeof (header));
2848 /* Write the header. */
2849 header.major_version = LTO_major_version;
2850 header.minor_version = LTO_minor_version;
2852 header.main_size = ob->main_stream->total_size;
2853 header.string_size = ob->string_stream->total_size;
2854 lto_write_data (&header, sizeof header);
2856 /* Put all of the gimple and the string table out the asm file as a
2857 block of text. */
2858 lto_write_stream (ob->main_stream);
2859 lto_write_stream (ob->string_stream);
2861 lto_end_section ();
2862 destroy_output_block (ob);
2866 /* This pass is run after all of the functions are serialized and all
2867 of the IPA passes have written their serialized forms. This pass
2868 causes the vector of all of the global decls and types used from
2869 this file to be written in to a section that can then be read in to
2870 recover these on other side. */
2872 void
2873 produce_asm_for_decls (void)
2875 struct lto_out_decl_state *out_state;
2876 struct lto_out_decl_state *fn_out_state;
2877 struct lto_decl_header header;
2878 char *section_name;
2879 struct output_block *ob;
2880 unsigned idx, num_fns;
2881 size_t decl_state_size;
2882 int32_t num_decl_states;
2884 ob = create_output_block (LTO_section_decls);
2886 memset (&header, 0, sizeof (struct lto_decl_header));
2888 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2889 lto_begin_section (section_name, !flag_wpa);
2890 free (section_name);
2892 /* Make string 0 be a NULL string. */
2893 streamer_write_char_stream (ob->string_stream, 0);
2895 gcc_assert (!alias_pairs);
2897 /* Get rid of the global decl state hash tables to save some memory. */
2898 out_state = lto_get_out_decl_state ();
2899 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2900 if (out_state->streams[i].tree_hash_table)
2902 delete out_state->streams[i].tree_hash_table;
2903 out_state->streams[i].tree_hash_table = NULL;
2906 /* Write the global symbols. */
2907 lto_output_decl_state_streams (ob, out_state);
2908 num_fns = lto_function_decl_states.length ();
2909 for (idx = 0; idx < num_fns; idx++)
2911 fn_out_state =
2912 lto_function_decl_states[idx];
2913 lto_output_decl_state_streams (ob, fn_out_state);
2916 header.major_version = LTO_major_version;
2917 header.minor_version = LTO_minor_version;
2919 /* Currently not used. This field would allow us to preallocate
2920 the globals vector, so that it need not be resized as it is extended. */
2921 header.num_nodes = -1;
2923 /* Compute the total size of all decl out states. */
2924 decl_state_size = sizeof (int32_t);
2925 decl_state_size += lto_out_decl_state_written_size (out_state);
2926 for (idx = 0; idx < num_fns; idx++)
2928 fn_out_state =
2929 lto_function_decl_states[idx];
2930 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2932 header.decl_state_size = decl_state_size;
2934 header.main_size = ob->main_stream->total_size;
2935 header.string_size = ob->string_stream->total_size;
2937 lto_write_data (&header, sizeof header);
2939 /* Write the main out-decl state, followed by out-decl states of
2940 functions. */
2941 num_decl_states = num_fns + 1;
2942 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2943 lto_output_decl_state_refs (ob, out_state);
2944 for (idx = 0; idx < num_fns; idx++)
2946 fn_out_state = lto_function_decl_states[idx];
2947 lto_output_decl_state_refs (ob, fn_out_state);
2950 lto_write_stream (ob->main_stream);
2951 lto_write_stream (ob->string_stream);
2953 lto_end_section ();
2955 /* Write the symbol table. It is used by linker to determine dependencies
2956 and thus we can skip it for WPA. */
2957 if (!flag_wpa)
2958 produce_symtab (ob);
2960 /* Write command line opts. */
2961 lto_write_options ();
2963 /* Deallocate memory and clean up. */
2964 for (idx = 0; idx < num_fns; idx++)
2966 fn_out_state =
2967 lto_function_decl_states[idx];
2968 lto_delete_out_decl_state (fn_out_state);
2970 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2971 lto_function_decl_states.release ();
2972 destroy_output_block (ob);
2973 if (lto_stream_offload_p)
2974 lto_write_mode_table ();