2018-02-09 Sebastian Perta <sebastian.perta@renesas.com>
[official-gcc.git] / gcc / lto-streamer-out.c
blob1d2ab9757f1a3352f03f03028e59358a4f210683
1 /* Write the GIMPLE representation to a file stream.
3 Copyright (C) 2009-2018 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
43 #include "debug.h"
44 #include "omp-offload.h"
47 static void lto_write_tree (struct output_block*, tree, bool);
49 /* Clear the line info stored in DATA_IN. */
51 static void
52 clear_line_info (struct output_block *ob)
54 ob->current_file = NULL;
55 ob->current_line = 0;
56 ob->current_col = 0;
57 ob->current_sysp = false;
61 /* Create the output block and return it. SECTION_TYPE is
62 LTO_section_function_body or LTO_static_initializer. */
64 struct output_block *
65 create_output_block (enum lto_section_type section_type)
67 struct output_block *ob = XCNEW (struct output_block);
69 ob->section_type = section_type;
70 ob->decl_state = lto_get_out_decl_state ();
71 ob->main_stream = XCNEW (struct lto_output_stream);
72 ob->string_stream = XCNEW (struct lto_output_stream);
73 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
75 if (section_type == LTO_section_function_body)
76 ob->cfg_stream = XCNEW (struct lto_output_stream);
78 clear_line_info (ob);
80 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
81 gcc_obstack_init (&ob->obstack);
83 return ob;
87 /* Destroy the output block OB. */
89 void
90 destroy_output_block (struct output_block *ob)
92 enum lto_section_type section_type = ob->section_type;
94 delete ob->string_hash_table;
95 ob->string_hash_table = NULL;
97 free (ob->main_stream);
98 free (ob->string_stream);
99 if (section_type == LTO_section_function_body)
100 free (ob->cfg_stream);
102 streamer_tree_cache_delete (ob->writer_cache);
103 obstack_free (&ob->obstack, NULL);
105 free (ob);
109 /* Look up NODE in the type table and write the index for it to OB. */
111 static void
112 output_type_ref (struct output_block *ob, tree node)
114 streamer_write_record_start (ob, LTO_type_ref);
115 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
119 /* Return true if tree node T is written to various tables. For these
120 nodes, we sometimes want to write their phyiscal representation
121 (via lto_output_tree), and sometimes we need to emit an index
122 reference into a table (via lto_output_tree_ref). */
124 static bool
125 tree_is_indexable (tree t)
127 /* Parameters and return values of functions of variably modified types
128 must go to global stream, because they may be used in the type
129 definition. */
130 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
131 && DECL_CONTEXT (t))
132 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
133 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
134 else if (TREE_CODE (t) == IMPORTED_DECL)
135 return false;
136 else if (((VAR_P (t) && !TREE_STATIC (t))
137 || TREE_CODE (t) == TYPE_DECL
138 || TREE_CODE (t) == CONST_DECL
139 || TREE_CODE (t) == NAMELIST_DECL)
140 && decl_function_context (t))
141 return false;
142 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
143 return false;
144 /* Variably modified types need to be streamed alongside function
145 bodies because they can refer to local entities. Together with
146 them we have to localize their members as well.
147 ??? In theory that includes non-FIELD_DECLs as well. */
148 else if (TYPE_P (t)
149 && variably_modified_type_p (t, NULL_TREE))
150 return false;
151 else if (TREE_CODE (t) == FIELD_DECL
152 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
153 return false;
154 else
155 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
159 /* Output info about new location into bitpack BP.
160 After outputting bitpack, lto_output_location_data has
161 to be done to output actual data. */
163 void
164 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
165 location_t loc)
167 expanded_location xloc;
169 loc = LOCATION_LOCUS (loc);
170 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
171 loc < RESERVED_LOCATION_COUNT
172 ? loc : RESERVED_LOCATION_COUNT);
173 if (loc < RESERVED_LOCATION_COUNT)
174 return;
176 xloc = expand_location (loc);
178 bp_pack_value (bp, ob->current_file != xloc.file, 1);
179 bp_pack_value (bp, ob->current_line != xloc.line, 1);
180 bp_pack_value (bp, ob->current_col != xloc.column, 1);
182 if (ob->current_file != xloc.file)
184 bp_pack_string (ob, bp, xloc.file, true);
185 bp_pack_value (bp, xloc.sysp, 1);
187 ob->current_file = xloc.file;
188 ob->current_sysp = xloc.sysp;
190 if (ob->current_line != xloc.line)
191 bp_pack_var_len_unsigned (bp, xloc.line);
192 ob->current_line = xloc.line;
194 if (ob->current_col != xloc.column)
195 bp_pack_var_len_unsigned (bp, xloc.column);
196 ob->current_col = xloc.column;
200 /* If EXPR is an indexable tree node, output a reference to it to
201 output block OB. Otherwise, output the physical representation of
202 EXPR to OB. */
204 static void
205 lto_output_tree_ref (struct output_block *ob, tree expr)
207 enum tree_code code;
209 if (TYPE_P (expr))
211 output_type_ref (ob, expr);
212 return;
215 code = TREE_CODE (expr);
216 switch (code)
218 case SSA_NAME:
219 streamer_write_record_start (ob, LTO_ssa_name_ref);
220 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
221 break;
223 case FIELD_DECL:
224 streamer_write_record_start (ob, LTO_field_decl_ref);
225 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
226 break;
228 case FUNCTION_DECL:
229 streamer_write_record_start (ob, LTO_function_decl_ref);
230 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
231 break;
233 case VAR_DECL:
234 case DEBUG_EXPR_DECL:
235 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
236 /* FALLTHRU */
237 case PARM_DECL:
238 streamer_write_record_start (ob, LTO_global_decl_ref);
239 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
240 break;
242 case CONST_DECL:
243 streamer_write_record_start (ob, LTO_const_decl_ref);
244 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
245 break;
247 case IMPORTED_DECL:
248 gcc_assert (decl_function_context (expr) == NULL);
249 streamer_write_record_start (ob, LTO_imported_decl_ref);
250 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
251 break;
253 case TYPE_DECL:
254 streamer_write_record_start (ob, LTO_type_decl_ref);
255 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
256 break;
258 case NAMELIST_DECL:
259 streamer_write_record_start (ob, LTO_namelist_decl_ref);
260 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
261 break;
263 case NAMESPACE_DECL:
264 streamer_write_record_start (ob, LTO_namespace_decl_ref);
265 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
266 break;
268 case LABEL_DECL:
269 streamer_write_record_start (ob, LTO_label_decl_ref);
270 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
271 break;
273 case RESULT_DECL:
274 streamer_write_record_start (ob, LTO_result_decl_ref);
275 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
276 break;
278 case TRANSLATION_UNIT_DECL:
279 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
280 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
281 break;
283 default:
284 /* No other node is indexable, so it should have been handled by
285 lto_output_tree. */
286 gcc_unreachable ();
291 /* Return true if EXPR is a tree node that can be written to disk. */
293 static inline bool
294 lto_is_streamable (tree expr)
296 enum tree_code code = TREE_CODE (expr);
298 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
299 name version in lto_output_tree_ref (see output_ssa_names). */
300 return !is_lang_specific (expr)
301 && code != SSA_NAME
302 && code != CALL_EXPR
303 && code != LANG_TYPE
304 && code != MODIFY_EXPR
305 && code != INIT_EXPR
306 && code != TARGET_EXPR
307 && code != BIND_EXPR
308 && code != WITH_CLEANUP_EXPR
309 && code != STATEMENT_LIST
310 && (code == CASE_LABEL_EXPR
311 || code == DECL_EXPR
312 || TREE_CODE_CLASS (code) != tcc_statement);
315 /* Very rough estimate of streaming size of the initializer. If we ignored
316 presence of strings, we could simply just count number of non-indexable
317 tree nodes and number of references to indexable nodes. Strings however
318 may be very large and we do not want to dump them int othe global stream.
320 Count the size of initializer until the size in DATA is positive. */
322 static tree
323 subtract_estimated_size (tree *tp, int *ws, void *data)
325 long *sum = (long *)data;
326 if (tree_is_indexable (*tp))
328 /* Indexable tree is one reference to global stream.
329 Guess it may be about 4 bytes. */
330 *sum -= 4;
331 *ws = 0;
333 /* String table entry + base of tree node needs to be streamed. */
334 if (TREE_CODE (*tp) == STRING_CST)
335 *sum -= TREE_STRING_LENGTH (*tp) + 8;
336 else
338 /* Identifiers are also variable length but should not appear
339 naked in constructor. */
340 gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
341 /* We do not really make attempt to work out size of pickled tree, as
342 it is very variable. Make it bigger than the reference. */
343 *sum -= 16;
345 if (*sum < 0)
346 return *tp;
347 return NULL_TREE;
351 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
353 static tree
354 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
356 gcc_checking_assert (DECL_P (expr)
357 && TREE_CODE (expr) != FUNCTION_DECL
358 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
360 /* Handle DECL_INITIAL for symbols. */
361 tree initial = DECL_INITIAL (expr);
362 if (VAR_P (expr)
363 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
364 && !DECL_IN_CONSTANT_POOL (expr)
365 && initial)
367 varpool_node *vnode;
368 /* Extra section needs about 30 bytes; do not produce it for simple
369 scalar values. */
370 if (!(vnode = varpool_node::get (expr))
371 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
372 initial = error_mark_node;
373 if (initial != error_mark_node)
375 long max_size = 30;
376 if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
377 NULL))
378 initial = error_mark_node;
382 return initial;
386 /* Write a physical representation of tree node EXPR to output block
387 OB. If REF_P is true, the leaves of EXPR are emitted as references
388 via lto_output_tree_ref. IX is the index into the streamer cache
389 where EXPR is stored. */
391 static void
392 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
394 /* Pack all the non-pointer fields in EXPR into a bitpack and write
395 the resulting bitpack. */
396 streamer_write_tree_bitfields (ob, expr);
398 /* Write all the pointer fields in EXPR. */
399 streamer_write_tree_body (ob, expr, ref_p);
401 /* Write any LTO-specific data to OB. */
402 if (DECL_P (expr)
403 && TREE_CODE (expr) != FUNCTION_DECL
404 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
406 /* Handle DECL_INITIAL for symbols. */
407 tree initial = get_symbol_initial_value
408 (ob->decl_state->symtab_node_encoder, expr);
409 stream_write_tree (ob, initial, ref_p);
412 /* Stream references to early generated DIEs. Keep in sync with the
413 trees handled in dwarf2out_die_ref_for_decl. */
414 if ((DECL_P (expr)
415 && TREE_CODE (expr) != FIELD_DECL
416 && TREE_CODE (expr) != DEBUG_EXPR_DECL
417 && TREE_CODE (expr) != TYPE_DECL)
418 || TREE_CODE (expr) == BLOCK)
420 const char *sym;
421 unsigned HOST_WIDE_INT off;
422 if (debug_info_level > DINFO_LEVEL_NONE
423 && debug_hooks->die_ref_for_decl (expr, &sym, &off))
425 streamer_write_string (ob, ob->main_stream, sym, true);
426 streamer_write_uhwi (ob, off);
428 else
429 streamer_write_string (ob, ob->main_stream, NULL, true);
433 /* Write a physical representation of tree node EXPR to output block
434 OB. If REF_P is true, the leaves of EXPR are emitted as references
435 via lto_output_tree_ref. IX is the index into the streamer cache
436 where EXPR is stored. */
438 static void
439 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
441 if (!lto_is_streamable (expr))
442 internal_error ("tree code %qs is not supported in LTO streams",
443 get_tree_code_name (TREE_CODE (expr)));
445 /* Write the header, containing everything needed to materialize
446 EXPR on the reading side. */
447 streamer_write_tree_header (ob, expr);
449 lto_write_tree_1 (ob, expr, ref_p);
451 /* Mark the end of EXPR. */
452 streamer_write_zero (ob);
455 /* Emit the physical representation of tree node EXPR to output block OB,
456 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
457 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
459 static void
460 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
461 bool ref_p, bool this_ref_p)
463 unsigned ix;
465 gcc_checking_assert (expr != NULL_TREE
466 && !(this_ref_p && tree_is_indexable (expr)));
468 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
469 expr, hash, &ix);
470 gcc_assert (!exists_p);
471 if (TREE_CODE (expr) == INTEGER_CST
472 && !TREE_OVERFLOW (expr))
474 /* Shared INTEGER_CST nodes are special because they need their
475 original type to be materialized by the reader (to implement
476 TYPE_CACHED_VALUES). */
477 streamer_write_integer_cst (ob, expr, ref_p);
479 else
481 /* This is the first time we see EXPR, write its fields
482 to OB. */
483 lto_write_tree (ob, expr, ref_p);
487 class DFS
489 public:
490 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
491 bool single_p);
492 ~DFS ();
494 struct scc_entry
496 tree t;
497 hashval_t hash;
499 vec<scc_entry> sccstack;
501 private:
502 struct sccs
504 unsigned int dfsnum;
505 unsigned int low;
507 struct worklist
509 tree expr;
510 sccs *from_state;
511 sccs *cstate;
512 bool ref_p;
513 bool this_ref_p;
516 static int scc_entry_compare (const void *, const void *);
518 void DFS_write_tree_body (struct output_block *ob,
519 tree expr, sccs *expr_state, bool ref_p);
521 void DFS_write_tree (struct output_block *ob, sccs *from_state,
522 tree expr, bool ref_p, bool this_ref_p);
524 hashval_t
525 hash_scc (struct output_block *ob, unsigned first, unsigned size,
526 bool ref_p, bool this_ref_p);
528 hash_map<tree, sccs *> sccstate;
529 vec<worklist> worklist_vec;
530 struct obstack sccstate_obstack;
533 /* Emit the physical representation of tree node EXPR to output block OB,
534 using depth-first search on the subgraph. If THIS_REF_P is true, the
535 leaves of EXPR are emitted as references via lto_output_tree_ref.
536 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
537 this is for a rewalk of a single leaf SCC. */
539 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
540 bool single_p)
542 unsigned int next_dfs_num = 1;
543 sccstack.create (0);
544 gcc_obstack_init (&sccstate_obstack);
545 worklist_vec = vNULL;
546 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
547 while (!worklist_vec.is_empty ())
549 worklist &w = worklist_vec.last ();
550 expr = w.expr;
551 sccs *from_state = w.from_state;
552 sccs *cstate = w.cstate;
553 ref_p = w.ref_p;
554 this_ref_p = w.this_ref_p;
555 if (cstate == NULL)
557 sccs **slot = &sccstate.get_or_insert (expr);
558 cstate = *slot;
559 if (cstate)
561 gcc_checking_assert (from_state);
562 if (cstate->dfsnum < from_state->dfsnum)
563 from_state->low = MIN (cstate->dfsnum, from_state->low);
564 worklist_vec.pop ();
565 continue;
568 scc_entry e = { expr, 0 };
569 /* Not yet visited. DFS recurse and push it onto the stack. */
570 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
571 sccstack.safe_push (e);
572 cstate->dfsnum = next_dfs_num++;
573 cstate->low = cstate->dfsnum;
574 w.cstate = cstate;
576 if (TREE_CODE (expr) == INTEGER_CST
577 && !TREE_OVERFLOW (expr))
578 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
579 else
581 DFS_write_tree_body (ob, expr, cstate, ref_p);
583 /* Walk any LTO-specific edges. */
584 if (DECL_P (expr)
585 && TREE_CODE (expr) != FUNCTION_DECL
586 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
588 /* Handle DECL_INITIAL for symbols. */
589 tree initial
590 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
591 expr);
592 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
595 continue;
598 /* See if we found an SCC. */
599 if (cstate->low == cstate->dfsnum)
601 unsigned first, size;
602 tree x;
604 /* If we are re-walking a single leaf SCC just pop it,
605 let earlier worklist item access the sccstack. */
606 if (single_p)
608 worklist_vec.pop ();
609 continue;
612 /* Pop the SCC and compute its size. */
613 first = sccstack.length ();
616 x = sccstack[--first].t;
618 while (x != expr);
619 size = sccstack.length () - first;
621 /* No need to compute hashes for LTRANS units, we don't perform
622 any merging there. */
623 hashval_t scc_hash = 0;
624 unsigned scc_entry_len = 0;
625 if (!flag_wpa)
627 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
629 /* Put the entries with the least number of collisions first. */
630 unsigned entry_start = 0;
631 scc_entry_len = size + 1;
632 for (unsigned i = 0; i < size;)
634 unsigned from = i;
635 for (i = i + 1; i < size
636 && (sccstack[first + i].hash
637 == sccstack[first + from].hash); ++i)
639 if (i - from < scc_entry_len)
641 scc_entry_len = i - from;
642 entry_start = from;
645 for (unsigned i = 0; i < scc_entry_len; ++i)
646 std::swap (sccstack[first + i],
647 sccstack[first + entry_start + i]);
649 /* We already sorted SCC deterministically in hash_scc. */
651 /* Check that we have only one SCC.
652 Naturally we may have conflicts if hash function is not
653 strong enough. Lets see how far this gets. */
654 gcc_checking_assert (scc_entry_len == 1);
657 /* Write LTO_tree_scc. */
658 streamer_write_record_start (ob, LTO_tree_scc);
659 streamer_write_uhwi (ob, size);
660 streamer_write_uhwi (ob, scc_hash);
662 /* Write size-1 SCCs without wrapping them inside SCC bundles.
663 All INTEGER_CSTs need to be handled this way as we need
664 their type to materialize them. Also builtins are handled
665 this way.
666 ??? We still wrap these in LTO_tree_scc so at the
667 input side we can properly identify the tree we want
668 to ultimatively return. */
669 if (size == 1)
670 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
671 else
673 /* Write the size of the SCC entry candidates. */
674 streamer_write_uhwi (ob, scc_entry_len);
676 /* Write all headers and populate the streamer cache. */
677 for (unsigned i = 0; i < size; ++i)
679 hashval_t hash = sccstack[first+i].hash;
680 tree t = sccstack[first+i].t;
681 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
682 t, hash, NULL);
683 gcc_assert (!exists_p);
685 if (!lto_is_streamable (t))
686 internal_error ("tree code %qs is not supported "
687 "in LTO streams",
688 get_tree_code_name (TREE_CODE (t)));
690 /* Write the header, containing everything needed to
691 materialize EXPR on the reading side. */
692 streamer_write_tree_header (ob, t);
695 /* Write the bitpacks and tree references. */
696 for (unsigned i = 0; i < size; ++i)
698 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
700 /* Mark the end of the tree. */
701 streamer_write_zero (ob);
705 /* Finally truncate the vector. */
706 sccstack.truncate (first);
708 if (from_state)
709 from_state->low = MIN (from_state->low, cstate->low);
710 worklist_vec.pop ();
711 continue;
714 gcc_checking_assert (from_state);
715 from_state->low = MIN (from_state->low, cstate->low);
716 if (cstate->dfsnum < from_state->dfsnum)
717 from_state->low = MIN (cstate->dfsnum, from_state->low);
718 worklist_vec.pop ();
720 worklist_vec.release ();
723 DFS::~DFS ()
725 sccstack.release ();
726 obstack_free (&sccstate_obstack, NULL);
729 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
730 DFS recurse for all tree edges originating from it. */
732 void
733 DFS::DFS_write_tree_body (struct output_block *ob,
734 tree expr, sccs *expr_state, bool ref_p)
736 #define DFS_follow_tree_edge(DEST) \
737 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
739 enum tree_code code;
741 code = TREE_CODE (expr);
743 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
745 if (TREE_CODE (expr) != IDENTIFIER_NODE)
746 DFS_follow_tree_edge (TREE_TYPE (expr));
749 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
751 unsigned int count = vector_cst_encoded_nelts (expr);
752 for (unsigned int i = 0; i < count; ++i)
753 DFS_follow_tree_edge (VECTOR_CST_ENCODED_ELT (expr, i));
756 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
757 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
758 DFS_follow_tree_edge (POLY_INT_CST_COEFF (expr, i));
760 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
762 DFS_follow_tree_edge (TREE_REALPART (expr));
763 DFS_follow_tree_edge (TREE_IMAGPART (expr));
766 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
768 /* Drop names that were created for anonymous entities. */
769 if (DECL_NAME (expr)
770 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
771 && anon_aggrname_p (DECL_NAME (expr)))
773 else
774 DFS_follow_tree_edge (DECL_NAME (expr));
775 if (TREE_CODE (expr) != TRANSLATION_UNIT_DECL
776 && ! DECL_CONTEXT (expr))
777 DFS_follow_tree_edge ((*all_translation_units)[0]);
778 else
779 DFS_follow_tree_edge (DECL_CONTEXT (expr));
782 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
784 DFS_follow_tree_edge (DECL_SIZE (expr));
785 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
787 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
788 special handling in LTO, it must be handled by streamer hooks. */
790 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
792 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
793 for early inlining so drop it on the floor instead of ICEing in
794 dwarf2out.c.
795 We however use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
796 declarations which should be eliminated by decl merging. Be sure none
797 leaks to this point. */
798 gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
799 DFS_follow_tree_edge (DECL_ABSTRACT_ORIGIN (expr));
801 if ((VAR_P (expr)
802 || TREE_CODE (expr) == PARM_DECL)
803 && DECL_HAS_VALUE_EXPR_P (expr))
804 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
805 if (VAR_P (expr)
806 && DECL_HAS_DEBUG_EXPR_P (expr))
807 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
810 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
812 if (TREE_CODE (expr) == TYPE_DECL)
813 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
816 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
818 /* Make sure we don't inadvertently set the assembler name. */
819 if (DECL_ASSEMBLER_NAME_SET_P (expr))
820 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
823 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
825 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
826 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
827 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
828 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
829 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
832 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
834 DFS_follow_tree_edge (DECL_VINDEX (expr));
835 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
836 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
837 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
840 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
842 DFS_follow_tree_edge (TYPE_SIZE (expr));
843 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
844 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
845 DFS_follow_tree_edge (TYPE_NAME (expr));
846 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
847 reconstructed during fixup. */
848 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
849 during fixup. */
850 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
851 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
852 /* TYPE_CANONICAL is re-computed during type merging, so no need
853 to follow it here. */
854 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
857 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
859 if (TREE_CODE (expr) == ENUMERAL_TYPE)
860 DFS_follow_tree_edge (TYPE_VALUES (expr));
861 else if (TREE_CODE (expr) == ARRAY_TYPE)
862 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
863 else if (RECORD_OR_UNION_TYPE_P (expr))
864 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
865 DFS_follow_tree_edge (t);
866 else if (TREE_CODE (expr) == FUNCTION_TYPE
867 || TREE_CODE (expr) == METHOD_TYPE)
868 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
870 if (!POINTER_TYPE_P (expr))
871 DFS_follow_tree_edge (TYPE_MIN_VALUE_RAW (expr));
872 DFS_follow_tree_edge (TYPE_MAX_VALUE_RAW (expr));
875 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
877 DFS_follow_tree_edge (TREE_PURPOSE (expr));
878 DFS_follow_tree_edge (TREE_VALUE (expr));
879 DFS_follow_tree_edge (TREE_CHAIN (expr));
882 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
884 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
885 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
888 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
890 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
891 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
892 DFS_follow_tree_edge (TREE_BLOCK (expr));
895 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
897 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
898 if (VAR_OR_FUNCTION_DECL_P (t)
899 && DECL_EXTERNAL (t))
900 /* We have to stream externals in the block chain as
901 non-references. See also
902 tree-streamer-out.c:streamer_write_chain. */
903 DFS_write_tree (ob, expr_state, t, ref_p, false);
904 else
905 DFS_follow_tree_edge (t);
907 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
909 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
910 handle - those that represent inlined function scopes.
911 For the drop rest them on the floor instead of ICEing
912 in dwarf2out.c, but keep the notion of whether the block
913 is an inlined block by refering to itself for the sake of
914 tree_nonartificial_location. */
915 if (inlined_function_outer_scope_p (expr))
917 tree ultimate_origin = block_ultimate_origin (expr);
918 DFS_follow_tree_edge (ultimate_origin);
920 else if (BLOCK_ABSTRACT_ORIGIN (expr))
921 DFS_follow_tree_edge (expr);
922 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
923 information for early inlined BLOCKs so drop it on the floor instead
924 of ICEing in dwarf2out.c. */
926 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
927 streaming time. */
929 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
930 list is re-constructed from BLOCK_SUPERCONTEXT. */
933 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
935 unsigned i;
936 tree t;
938 /* Note that the number of BINFO slots has already been emitted in
939 EXPR's header (see streamer_write_tree_header) because this length
940 is needed to build the empty BINFO node on the reader side. */
941 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
942 DFS_follow_tree_edge (t);
943 DFS_follow_tree_edge (BINFO_OFFSET (expr));
944 DFS_follow_tree_edge (BINFO_VTABLE (expr));
945 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
947 /* The number of BINFO_BASE_ACCESSES has already been emitted in
948 EXPR's bitfield section. */
949 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
950 DFS_follow_tree_edge (t);
952 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
953 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
956 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
958 unsigned i;
959 tree index, value;
961 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
963 DFS_follow_tree_edge (index);
964 DFS_follow_tree_edge (value);
968 if (code == OMP_CLAUSE)
970 int i;
971 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
972 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
973 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
976 #undef DFS_follow_tree_edge
979 /* Return a hash value for the tree T.
980 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
981 may hold hash values if trees inside current SCC. */
983 static hashval_t
984 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
986 inchash::hash hstate;
988 #define visit(SIBLING) \
989 do { \
990 unsigned ix; \
991 if (!SIBLING) \
992 hstate.add_int (0); \
993 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
994 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
995 else if (map) \
996 hstate.add_int (*map->get (SIBLING)); \
997 else \
998 hstate.add_int (1); \
999 } while (0)
1001 /* Hash TS_BASE. */
1002 enum tree_code code = TREE_CODE (t);
1003 hstate.add_int (code);
1004 if (!TYPE_P (t))
1006 hstate.add_flag (TREE_SIDE_EFFECTS (t));
1007 hstate.add_flag (TREE_CONSTANT (t));
1008 hstate.add_flag (TREE_READONLY (t));
1009 hstate.add_flag (TREE_PUBLIC (t));
1011 hstate.add_flag (TREE_ADDRESSABLE (t));
1012 hstate.add_flag (TREE_THIS_VOLATILE (t));
1013 if (DECL_P (t))
1014 hstate.add_flag (DECL_UNSIGNED (t));
1015 else if (TYPE_P (t))
1016 hstate.add_flag (TYPE_UNSIGNED (t));
1017 if (TYPE_P (t))
1018 hstate.add_flag (TYPE_ARTIFICIAL (t));
1019 else
1020 hstate.add_flag (TREE_NO_WARNING (t));
1021 hstate.add_flag (TREE_NOTHROW (t));
1022 hstate.add_flag (TREE_STATIC (t));
1023 hstate.add_flag (TREE_PROTECTED (t));
1024 hstate.add_flag (TREE_DEPRECATED (t));
1025 if (code != TREE_BINFO)
1026 hstate.add_flag (TREE_PRIVATE (t));
1027 if (TYPE_P (t))
1029 hstate.add_flag (AGGREGATE_TYPE_P (t)
1030 ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1031 hstate.add_flag (TYPE_ADDR_SPACE (t));
1033 else if (code == SSA_NAME)
1034 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1035 hstate.commit_flag ();
1037 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1038 hstate.add_wide_int (wi::to_widest (t));
1040 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1042 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1043 hstate.add_flag (r.cl);
1044 hstate.add_flag (r.sign);
1045 hstate.add_flag (r.signalling);
1046 hstate.add_flag (r.canonical);
1047 hstate.commit_flag ();
1048 hstate.add_int (r.uexp);
1049 hstate.add (r.sig, sizeof (r.sig));
1052 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1054 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1055 hstate.add_int (f.mode);
1056 hstate.add_int (f.data.low);
1057 hstate.add_int (f.data.high);
1060 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1062 hstate.add_hwi (DECL_MODE (t));
1063 hstate.add_flag (DECL_NONLOCAL (t));
1064 hstate.add_flag (DECL_VIRTUAL_P (t));
1065 hstate.add_flag (DECL_IGNORED_P (t));
1066 hstate.add_flag (DECL_ABSTRACT_P (t));
1067 hstate.add_flag (DECL_ARTIFICIAL (t));
1068 hstate.add_flag (DECL_USER_ALIGN (t));
1069 hstate.add_flag (DECL_PRESERVE_P (t));
1070 hstate.add_flag (DECL_EXTERNAL (t));
1071 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1072 hstate.commit_flag ();
1073 hstate.add_int (DECL_ALIGN (t));
1074 if (code == LABEL_DECL)
1076 hstate.add_int (EH_LANDING_PAD_NR (t));
1077 hstate.add_int (LABEL_DECL_UID (t));
1079 else if (code == FIELD_DECL)
1081 hstate.add_flag (DECL_PACKED (t));
1082 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1083 hstate.add_flag (DECL_PADDING_P (t));
1084 hstate.add_int (DECL_OFFSET_ALIGN (t));
1086 else if (code == VAR_DECL)
1088 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1089 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1091 if (code == RESULT_DECL
1092 || code == PARM_DECL
1093 || code == VAR_DECL)
1095 hstate.add_flag (DECL_BY_REFERENCE (t));
1096 if (code == VAR_DECL
1097 || code == PARM_DECL)
1098 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1100 hstate.commit_flag ();
1103 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1104 hstate.add_int (DECL_REGISTER (t));
1106 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1108 hstate.add_flag (DECL_COMMON (t));
1109 hstate.add_flag (DECL_DLLIMPORT_P (t));
1110 hstate.add_flag (DECL_WEAK (t));
1111 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1112 hstate.add_flag (DECL_COMDAT (t));
1113 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1114 hstate.add_int (DECL_VISIBILITY (t));
1115 if (code == VAR_DECL)
1117 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1118 hstate.add_flag (DECL_HARD_REGISTER (t));
1119 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1121 if (TREE_CODE (t) == FUNCTION_DECL)
1123 hstate.add_flag (DECL_FINAL_P (t));
1124 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1125 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1127 hstate.commit_flag ();
1130 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1132 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1133 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1134 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1135 hstate.add_flag (DECL_UNINLINABLE (t));
1136 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1137 hstate.add_flag (DECL_IS_NOVOPS (t));
1138 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1139 hstate.add_flag (DECL_IS_MALLOC (t));
1140 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1141 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1142 hstate.add_flag (DECL_STATIC_CHAIN (t));
1143 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1144 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1145 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1146 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1147 hstate.add_flag (DECL_PURE_P (t));
1148 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1149 hstate.commit_flag ();
1150 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1151 hstate.add_int (DECL_FUNCTION_CODE (t));
1154 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1156 hstate.add_hwi (TYPE_MODE (t));
1157 hstate.add_flag (TYPE_STRING_FLAG (t));
1158 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1159 no streaming. */
1160 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
1161 hstate.add_flag (TYPE_PACKED (t));
1162 hstate.add_flag (TYPE_RESTRICT (t));
1163 hstate.add_flag (TYPE_USER_ALIGN (t));
1164 hstate.add_flag (TYPE_READONLY (t));
1165 if (RECORD_OR_UNION_TYPE_P (t))
1167 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1168 hstate.add_flag (TYPE_FINAL_P (t));
1170 else if (code == ARRAY_TYPE)
1171 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1172 if (AGGREGATE_TYPE_P (t))
1173 hstate.add_flag (TYPE_TYPELESS_STORAGE (t));
1174 hstate.commit_flag ();
1175 hstate.add_int (TYPE_PRECISION (t));
1176 hstate.add_int (TYPE_ALIGN (t));
1177 hstate.add_int (TYPE_EMPTY_P (t));
1180 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1181 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1182 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1184 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1185 /* We don't stream these when passing things to a different target. */
1186 && !lto_stream_offload_p)
1187 hstate.add_hwi (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1189 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1190 hstate.add_hwi (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1192 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1193 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1195 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1196 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1198 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1200 if (code != IDENTIFIER_NODE)
1201 visit (TREE_TYPE (t));
1204 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1206 unsigned int count = vector_cst_encoded_nelts (t);
1207 for (unsigned int i = 0; i < count; ++i)
1208 visit (VECTOR_CST_ENCODED_ELT (t, i));
1211 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
1212 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1213 visit (POLY_INT_CST_COEFF (t, i));
1215 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1217 visit (TREE_REALPART (t));
1218 visit (TREE_IMAGPART (t));
1221 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1223 /* Drop names that were created for anonymous entities. */
1224 if (DECL_NAME (t)
1225 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1226 && anon_aggrname_p (DECL_NAME (t)))
1228 else
1229 visit (DECL_NAME (t));
1230 if (DECL_FILE_SCOPE_P (t))
1232 else
1233 visit (DECL_CONTEXT (t));
1236 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1238 visit (DECL_SIZE (t));
1239 visit (DECL_SIZE_UNIT (t));
1240 visit (DECL_ATTRIBUTES (t));
1241 if ((code == VAR_DECL
1242 || code == PARM_DECL)
1243 && DECL_HAS_VALUE_EXPR_P (t))
1244 visit (DECL_VALUE_EXPR (t));
1245 if (code == VAR_DECL
1246 && DECL_HAS_DEBUG_EXPR_P (t))
1247 visit (DECL_DEBUG_EXPR (t));
1248 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1249 be able to call get_symbol_initial_value. */
1252 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1254 if (code == TYPE_DECL)
1255 visit (DECL_ORIGINAL_TYPE (t));
1258 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1260 if (DECL_ASSEMBLER_NAME_SET_P (t))
1261 visit (DECL_ASSEMBLER_NAME (t));
1264 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1266 visit (DECL_FIELD_OFFSET (t));
1267 visit (DECL_BIT_FIELD_TYPE (t));
1268 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1269 visit (DECL_FIELD_BIT_OFFSET (t));
1270 visit (DECL_FCONTEXT (t));
1273 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1275 visit (DECL_VINDEX (t));
1276 visit (DECL_FUNCTION_PERSONALITY (t));
1277 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1278 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1281 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1283 visit (TYPE_SIZE (t));
1284 visit (TYPE_SIZE_UNIT (t));
1285 visit (TYPE_ATTRIBUTES (t));
1286 visit (TYPE_NAME (t));
1287 visit (TYPE_MAIN_VARIANT (t));
1288 if (TYPE_FILE_SCOPE_P (t))
1290 else
1291 visit (TYPE_CONTEXT (t));
1292 visit (TYPE_STUB_DECL (t));
1295 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1297 if (code == ENUMERAL_TYPE)
1298 visit (TYPE_VALUES (t));
1299 else if (code == ARRAY_TYPE)
1300 visit (TYPE_DOMAIN (t));
1301 else if (RECORD_OR_UNION_TYPE_P (t))
1302 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1303 visit (f);
1304 else if (code == FUNCTION_TYPE
1305 || code == METHOD_TYPE)
1306 visit (TYPE_ARG_TYPES (t));
1307 if (!POINTER_TYPE_P (t))
1308 visit (TYPE_MIN_VALUE_RAW (t));
1309 visit (TYPE_MAX_VALUE_RAW (t));
1312 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1314 visit (TREE_PURPOSE (t));
1315 visit (TREE_VALUE (t));
1316 visit (TREE_CHAIN (t));
1319 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1320 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1321 visit (TREE_VEC_ELT (t, i));
1323 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1325 hstate.add_hwi (TREE_OPERAND_LENGTH (t));
1326 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1327 visit (TREE_OPERAND (t, i));
1330 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1332 unsigned i;
1333 tree b;
1334 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1335 visit (b);
1336 visit (BINFO_OFFSET (t));
1337 visit (BINFO_VTABLE (t));
1338 visit (BINFO_VPTR_FIELD (t));
1339 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1340 visit (b);
1341 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1342 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1345 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1347 unsigned i;
1348 tree index, value;
1349 hstate.add_hwi (CONSTRUCTOR_NELTS (t));
1350 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1352 visit (index);
1353 visit (value);
1357 if (code == OMP_CLAUSE)
1359 int i;
1360 HOST_WIDE_INT val;
1362 hstate.add_hwi (OMP_CLAUSE_CODE (t));
1363 switch (OMP_CLAUSE_CODE (t))
1365 case OMP_CLAUSE_DEFAULT:
1366 val = OMP_CLAUSE_DEFAULT_KIND (t);
1367 break;
1368 case OMP_CLAUSE_SCHEDULE:
1369 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1370 break;
1371 case OMP_CLAUSE_DEPEND:
1372 val = OMP_CLAUSE_DEPEND_KIND (t);
1373 break;
1374 case OMP_CLAUSE_MAP:
1375 val = OMP_CLAUSE_MAP_KIND (t);
1376 break;
1377 case OMP_CLAUSE_PROC_BIND:
1378 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1379 break;
1380 case OMP_CLAUSE_REDUCTION:
1381 val = OMP_CLAUSE_REDUCTION_CODE (t);
1382 break;
1383 default:
1384 val = 0;
1385 break;
1387 hstate.add_hwi (val);
1388 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1389 visit (OMP_CLAUSE_OPERAND (t, i));
1390 visit (OMP_CLAUSE_CHAIN (t));
1393 return hstate.end ();
1395 #undef visit
1398 /* Compare two SCC entries by their hash value for qsorting them. */
1401 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1403 const scc_entry *p1 = (const scc_entry *) p1_;
1404 const scc_entry *p2 = (const scc_entry *) p2_;
1405 if (p1->hash < p2->hash)
1406 return -1;
1407 else if (p1->hash > p2->hash)
1408 return 1;
1409 return 0;
1412 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1413 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1415 hashval_t
1416 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1417 bool ref_p, bool this_ref_p)
1419 unsigned int last_classes = 0, iterations = 0;
1421 /* Compute hash values for the SCC members. */
1422 for (unsigned i = 0; i < size; ++i)
1423 sccstack[first+i].hash
1424 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1426 if (size == 1)
1427 return sccstack[first].hash;
1429 /* We aim to get unique hash for every tree within SCC and compute hash value
1430 of the whole SCC by combining all values together in a stable (entry-point
1431 independent) order. This guarantees that the same SCC regions within
1432 different translation units will get the same hash values and therefore
1433 will be merged at WPA time.
1435 Often the hashes are already unique. In that case we compute the SCC hash
1436 by combining individual hash values in an increasing order.
1438 If there are duplicates, we seek at least one tree with unique hash (and
1439 pick one with minimal hash and this property). Then we obtain a stable
1440 order by DFS walk starting from this unique tree and then use the index
1441 within this order to make individual hash values unique.
1443 If there is no tree with unique hash, we iteratively propagate the hash
1444 values across the internal edges of SCC. This usually quickly leads
1445 to unique hashes. Consider, for example, an SCC containing two pointers
1446 that are identical except for the types they point to and assume that
1447 these types are also part of the SCC. The propagation will add the
1448 points-to type information into their hash values. */
1451 /* Sort the SCC so we can easily check for uniqueness. */
1452 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1454 unsigned int classes = 1;
1455 int firstunique = -1;
1457 /* Find the tree with lowest unique hash (if it exists) and compute
1458 the number of equivalence classes. */
1459 if (sccstack[first].hash != sccstack[first+1].hash)
1460 firstunique = 0;
1461 for (unsigned i = 1; i < size; ++i)
1462 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1464 classes++;
1465 if (firstunique == -1
1466 && (i == size - 1
1467 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1468 firstunique = i;
1471 /* If we found a tree with unique hash, stop the iteration. */
1472 if (firstunique != -1
1473 /* Also terminate if we run out of iterations or if the number of
1474 equivalence classes is no longer increasing.
1475 For example a cyclic list of trees that are all equivalent will
1476 never have unique entry point; we however do not build such SCCs
1477 in our IL. */
1478 || classes <= last_classes || iterations > 16)
1480 hashval_t scc_hash;
1482 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1483 starting from FIRSTUNIQUE to obtain a stable order. */
1484 if (classes != size && firstunique != -1)
1486 hash_map <tree, hashval_t> map(size*2);
1488 /* Store hash values into a map, so we can associate them with
1489 the reordered SCC. */
1490 for (unsigned i = 0; i < size; ++i)
1491 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1493 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1494 true);
1495 gcc_assert (again.sccstack.length () == size);
1497 memcpy (sccstack.address () + first,
1498 again.sccstack.address (),
1499 sizeof (scc_entry) * size);
1501 /* Update hash values of individual members by hashing in the
1502 index within the stable order. This ensures uniqueness.
1503 Also compute the SCC hash by mixing in all hash values in
1504 the stable order we obtained. */
1505 sccstack[first].hash = *map.get (sccstack[first].t);
1506 scc_hash = sccstack[first].hash;
1507 for (unsigned i = 1; i < size; ++i)
1509 sccstack[first+i].hash
1510 = iterative_hash_hashval_t (i,
1511 *map.get (sccstack[first+i].t));
1512 scc_hash
1513 = iterative_hash_hashval_t (scc_hash,
1514 sccstack[first+i].hash);
1517 /* If we got a unique hash value for each tree, then sort already
1518 ensured entry-point independent order. Only compute the final
1519 SCC hash.
1521 If we failed to find the unique entry point, we go by the same
1522 route. We will eventually introduce unwanted hash conflicts. */
1523 else
1525 scc_hash = sccstack[first].hash;
1526 for (unsigned i = 1; i < size; ++i)
1527 scc_hash
1528 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1530 /* We cannot 100% guarantee that the hash won't conflict so as
1531 to make it impossible to find a unique hash. This however
1532 should be an extremely rare case. ICE for now so possible
1533 issues are found and evaluated. */
1534 gcc_checking_assert (classes == size);
1537 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1538 hash into the hash of each element. */
1539 for (unsigned i = 0; i < size; ++i)
1540 sccstack[first+i].hash
1541 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1542 return scc_hash;
1545 last_classes = classes;
1546 iterations++;
1548 /* We failed to identify the entry point; propagate hash values across
1549 the edges. */
1550 hash_map <tree, hashval_t> map(size*2);
1552 for (unsigned i = 0; i < size; ++i)
1553 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1555 for (unsigned i = 0; i < size; i++)
1556 sccstack[first+i].hash
1557 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1559 while (true);
1562 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1563 already in the streamer cache. Main routine called for
1564 each visit of EXPR. */
1566 void
1567 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1568 tree expr, bool ref_p, bool this_ref_p)
1570 /* Handle special cases. */
1571 if (expr == NULL_TREE)
1572 return;
1574 /* Do not DFS walk into indexable trees. */
1575 if (this_ref_p && tree_is_indexable (expr))
1576 return;
1578 /* Check if we already streamed EXPR. */
1579 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1580 return;
1582 worklist w;
1583 w.expr = expr;
1584 w.from_state = from_state;
1585 w.cstate = NULL;
1586 w.ref_p = ref_p;
1587 w.this_ref_p = this_ref_p;
1588 worklist_vec.safe_push (w);
1592 /* Emit the physical representation of tree node EXPR to output block OB.
1593 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1594 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1596 void
1597 lto_output_tree (struct output_block *ob, tree expr,
1598 bool ref_p, bool this_ref_p)
1600 unsigned ix;
1601 bool existed_p;
1603 if (expr == NULL_TREE)
1605 streamer_write_record_start (ob, LTO_null);
1606 return;
1609 if (this_ref_p && tree_is_indexable (expr))
1611 lto_output_tree_ref (ob, expr);
1612 return;
1615 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1616 if (existed_p)
1618 /* If a node has already been streamed out, make sure that
1619 we don't write it more than once. Otherwise, the reader
1620 will instantiate two different nodes for the same object. */
1621 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1622 streamer_write_uhwi (ob, ix);
1623 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1624 lto_tree_code_to_tag (TREE_CODE (expr)));
1625 lto_stats.num_pickle_refs_output++;
1627 else
1629 /* This is the first time we see EXPR, write all reachable
1630 trees to OB. */
1631 static bool in_dfs_walk;
1633 /* Protect against recursion which means disconnect between
1634 what tree edges we walk in the DFS walk and what edges
1635 we stream out. */
1636 gcc_assert (!in_dfs_walk);
1638 /* Start the DFS walk. */
1639 /* Save ob state ... */
1640 /* let's see ... */
1641 in_dfs_walk = true;
1642 DFS (ob, expr, ref_p, this_ref_p, false);
1643 in_dfs_walk = false;
1645 /* Finally append a reference to the tree we were writing.
1646 ??? If expr ended up as a singleton we could have
1647 inlined it here and avoid outputting a reference. */
1648 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1649 gcc_assert (existed_p);
1650 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1651 streamer_write_uhwi (ob, ix);
1652 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1653 lto_tree_code_to_tag (TREE_CODE (expr)));
1654 lto_stats.num_pickle_refs_output++;
1659 /* Output to OB a list of try/catch handlers starting with FIRST. */
1661 static void
1662 output_eh_try_list (struct output_block *ob, eh_catch first)
1664 eh_catch n;
1666 for (n = first; n; n = n->next_catch)
1668 streamer_write_record_start (ob, LTO_eh_catch);
1669 stream_write_tree (ob, n->type_list, true);
1670 stream_write_tree (ob, n->filter_list, true);
1671 stream_write_tree (ob, n->label, true);
1674 streamer_write_record_start (ob, LTO_null);
1678 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1679 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1680 detect EH region sharing. */
1682 static void
1683 output_eh_region (struct output_block *ob, eh_region r)
1685 enum LTO_tags tag;
1687 if (r == NULL)
1689 streamer_write_record_start (ob, LTO_null);
1690 return;
1693 if (r->type == ERT_CLEANUP)
1694 tag = LTO_ert_cleanup;
1695 else if (r->type == ERT_TRY)
1696 tag = LTO_ert_try;
1697 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1698 tag = LTO_ert_allowed_exceptions;
1699 else if (r->type == ERT_MUST_NOT_THROW)
1700 tag = LTO_ert_must_not_throw;
1701 else
1702 gcc_unreachable ();
1704 streamer_write_record_start (ob, tag);
1705 streamer_write_hwi (ob, r->index);
1707 if (r->outer)
1708 streamer_write_hwi (ob, r->outer->index);
1709 else
1710 streamer_write_zero (ob);
1712 if (r->inner)
1713 streamer_write_hwi (ob, r->inner->index);
1714 else
1715 streamer_write_zero (ob);
1717 if (r->next_peer)
1718 streamer_write_hwi (ob, r->next_peer->index);
1719 else
1720 streamer_write_zero (ob);
1722 if (r->type == ERT_TRY)
1724 output_eh_try_list (ob, r->u.eh_try.first_catch);
1726 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1728 stream_write_tree (ob, r->u.allowed.type_list, true);
1729 stream_write_tree (ob, r->u.allowed.label, true);
1730 streamer_write_uhwi (ob, r->u.allowed.filter);
1732 else if (r->type == ERT_MUST_NOT_THROW)
1734 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1735 bitpack_d bp = bitpack_create (ob->main_stream);
1736 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1737 streamer_write_bitpack (&bp);
1740 if (r->landing_pads)
1741 streamer_write_hwi (ob, r->landing_pads->index);
1742 else
1743 streamer_write_zero (ob);
1747 /* Output landing pad LP to OB. */
1749 static void
1750 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1752 if (lp == NULL)
1754 streamer_write_record_start (ob, LTO_null);
1755 return;
1758 streamer_write_record_start (ob, LTO_eh_landing_pad);
1759 streamer_write_hwi (ob, lp->index);
1760 if (lp->next_lp)
1761 streamer_write_hwi (ob, lp->next_lp->index);
1762 else
1763 streamer_write_zero (ob);
1765 if (lp->region)
1766 streamer_write_hwi (ob, lp->region->index);
1767 else
1768 streamer_write_zero (ob);
1770 stream_write_tree (ob, lp->post_landing_pad, true);
1774 /* Output the existing eh_table to OB. */
1776 static void
1777 output_eh_regions (struct output_block *ob, struct function *fn)
1779 if (fn->eh && fn->eh->region_tree)
1781 unsigned i;
1782 eh_region eh;
1783 eh_landing_pad lp;
1784 tree ttype;
1786 streamer_write_record_start (ob, LTO_eh_table);
1788 /* Emit the index of the root of the EH region tree. */
1789 streamer_write_hwi (ob, fn->eh->region_tree->index);
1791 /* Emit all the EH regions in the region array. */
1792 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1793 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1794 output_eh_region (ob, eh);
1796 /* Emit all landing pads. */
1797 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1798 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1799 output_eh_lp (ob, lp);
1801 /* Emit all the runtime type data. */
1802 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1803 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1804 stream_write_tree (ob, ttype, true);
1806 /* Emit the table of action chains. */
1807 if (targetm.arm_eabi_unwinder)
1809 tree t;
1810 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1811 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1812 stream_write_tree (ob, t, true);
1814 else
1816 uchar c;
1817 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1818 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1819 streamer_write_char_stream (ob->main_stream, c);
1823 /* The LTO_null either terminates the record or indicates that there
1824 are no eh_records at all. */
1825 streamer_write_record_start (ob, LTO_null);
1829 /* Output all of the active ssa names to the ssa_names stream. */
1831 static void
1832 output_ssa_names (struct output_block *ob, struct function *fn)
1834 unsigned int i, len;
1836 len = vec_safe_length (SSANAMES (fn));
1837 streamer_write_uhwi (ob, len);
1839 for (i = 1; i < len; i++)
1841 tree ptr = (*SSANAMES (fn))[i];
1843 if (ptr == NULL_TREE
1844 || SSA_NAME_IN_FREE_LIST (ptr)
1845 || virtual_operand_p (ptr)
1846 /* Simply skip unreleased SSA names. */
1847 || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
1848 && (! SSA_NAME_DEF_STMT (ptr)
1849 || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
1850 continue;
1852 streamer_write_uhwi (ob, i);
1853 streamer_write_char_stream (ob->main_stream,
1854 SSA_NAME_IS_DEFAULT_DEF (ptr));
1855 if (SSA_NAME_VAR (ptr))
1856 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1857 else
1858 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1859 stream_write_tree (ob, TREE_TYPE (ptr), true);
1862 streamer_write_zero (ob);
1867 /* Output the cfg. */
1869 static void
1870 output_cfg (struct output_block *ob, struct function *fn)
1872 struct lto_output_stream *tmp_stream = ob->main_stream;
1873 basic_block bb;
1875 ob->main_stream = ob->cfg_stream;
1877 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1878 profile_status_for_fn (fn));
1880 /* Output the number of the highest basic block. */
1881 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1883 FOR_ALL_BB_FN (bb, fn)
1885 edge_iterator ei;
1886 edge e;
1888 streamer_write_hwi (ob, bb->index);
1890 /* Output the successors and the edge flags. */
1891 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1892 FOR_EACH_EDGE (e, ei, bb->succs)
1894 streamer_write_uhwi (ob, e->dest->index);
1895 e->probability.stream_out (ob);
1896 streamer_write_uhwi (ob, e->flags);
1900 streamer_write_hwi (ob, -1);
1902 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1903 while (bb->next_bb)
1905 streamer_write_hwi (ob, bb->next_bb->index);
1906 bb = bb->next_bb;
1909 streamer_write_hwi (ob, -1);
1911 /* ??? The cfgloop interface is tied to cfun. */
1912 gcc_assert (cfun == fn);
1914 /* Output the number of loops. */
1915 streamer_write_uhwi (ob, number_of_loops (fn));
1917 /* Output each loop, skipping the tree root which has number zero. */
1918 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1920 struct loop *loop = get_loop (fn, i);
1922 /* Write the index of the loop header. That's enough to rebuild
1923 the loop tree on the reader side. Stream -1 for an unused
1924 loop entry. */
1925 if (!loop)
1927 streamer_write_hwi (ob, -1);
1928 continue;
1930 else
1931 streamer_write_hwi (ob, loop->header->index);
1933 /* Write everything copy_loop_info copies. */
1934 streamer_write_enum (ob->main_stream,
1935 loop_estimation, EST_LAST, loop->estimate_state);
1936 streamer_write_hwi (ob, loop->any_upper_bound);
1937 if (loop->any_upper_bound)
1938 streamer_write_widest_int (ob, loop->nb_iterations_upper_bound);
1939 streamer_write_hwi (ob, loop->any_likely_upper_bound);
1940 if (loop->any_likely_upper_bound)
1941 streamer_write_widest_int (ob, loop->nb_iterations_likely_upper_bound);
1942 streamer_write_hwi (ob, loop->any_estimate);
1943 if (loop->any_estimate)
1944 streamer_write_widest_int (ob, loop->nb_iterations_estimate);
1946 /* Write OMP SIMD related info. */
1947 streamer_write_hwi (ob, loop->safelen);
1948 streamer_write_hwi (ob, loop->unroll);
1949 streamer_write_hwi (ob, loop->dont_vectorize);
1950 streamer_write_hwi (ob, loop->force_vectorize);
1951 stream_write_tree (ob, loop->simduid, true);
1954 ob->main_stream = tmp_stream;
1958 /* Create the header in the file using OB. If the section type is for
1959 a function, set FN to the decl for that function. */
1961 void
1962 produce_asm (struct output_block *ob, tree fn)
1964 enum lto_section_type section_type = ob->section_type;
1965 struct lto_function_header header;
1966 char *section_name;
1968 if (section_type == LTO_section_function_body)
1970 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1971 section_name = lto_get_section_name (section_type, name, NULL);
1973 else
1974 section_name = lto_get_section_name (section_type, NULL, NULL);
1976 lto_begin_section (section_name, !flag_wpa);
1977 free (section_name);
1979 /* The entire header is stream computed here. */
1980 memset (&header, 0, sizeof (struct lto_function_header));
1982 /* Write the header. */
1983 header.major_version = LTO_major_version;
1984 header.minor_version = LTO_minor_version;
1986 if (section_type == LTO_section_function_body)
1987 header.cfg_size = ob->cfg_stream->total_size;
1988 header.main_size = ob->main_stream->total_size;
1989 header.string_size = ob->string_stream->total_size;
1990 lto_write_data (&header, sizeof header);
1992 /* Put all of the gimple and the string table out the asm file as a
1993 block of text. */
1994 if (section_type == LTO_section_function_body)
1995 lto_write_stream (ob->cfg_stream);
1996 lto_write_stream (ob->main_stream);
1997 lto_write_stream (ob->string_stream);
1999 lto_end_section ();
2003 /* Output the base body of struct function FN using output block OB. */
2005 static void
2006 output_struct_function_base (struct output_block *ob, struct function *fn)
2008 struct bitpack_d bp;
2009 unsigned i;
2010 tree t;
2012 /* Output the static chain and non-local goto save area. */
2013 stream_write_tree (ob, fn->static_chain_decl, true);
2014 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
2016 /* Output all the local variables in the function. */
2017 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
2018 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
2019 stream_write_tree (ob, t, true);
2021 /* Output current IL state of the function. */
2022 streamer_write_uhwi (ob, fn->curr_properties);
2024 /* Write all the attributes for FN. */
2025 bp = bitpack_create (ob->main_stream);
2026 bp_pack_value (&bp, fn->is_thunk, 1);
2027 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
2028 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
2029 bp_pack_value (&bp, fn->returns_struct, 1);
2030 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
2031 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2032 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2033 bp_pack_value (&bp, fn->after_inlining, 1);
2034 bp_pack_value (&bp, fn->stdarg, 1);
2035 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2036 bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2037 bp_pack_value (&bp, fn->calls_alloca, 1);
2038 bp_pack_value (&bp, fn->calls_setjmp, 1);
2039 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2040 bp_pack_value (&bp, fn->has_simduid_loops, 1);
2041 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2042 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2043 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2045 /* Output the function start and end loci. */
2046 stream_output_location (ob, &bp, fn->function_start_locus);
2047 stream_output_location (ob, &bp, fn->function_end_locus);
2049 streamer_write_bitpack (&bp);
2053 /* Collect all leaf BLOCKs beyond ROOT into LEAFS. */
2055 static void
2056 collect_block_tree_leafs (tree root, vec<tree> &leafs)
2058 for (root = BLOCK_SUBBLOCKS (root); root; root = BLOCK_CHAIN (root))
2059 if (! BLOCK_SUBBLOCKS (root))
2060 leafs.safe_push (root);
2061 else
2062 collect_block_tree_leafs (BLOCK_SUBBLOCKS (root), leafs);
2065 /* Output the body of function NODE->DECL. */
2067 static void
2068 output_function (struct cgraph_node *node)
2070 tree function;
2071 struct function *fn;
2072 basic_block bb;
2073 struct output_block *ob;
2075 function = node->decl;
2076 fn = DECL_STRUCT_FUNCTION (function);
2077 ob = create_output_block (LTO_section_function_body);
2079 clear_line_info (ob);
2080 ob->symbol = node;
2082 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2084 /* Set current_function_decl and cfun. */
2085 push_cfun (fn);
2087 /* Make string 0 be a NULL string. */
2088 streamer_write_char_stream (ob->string_stream, 0);
2090 streamer_write_record_start (ob, LTO_function);
2092 /* Output decls for parameters and args. */
2093 stream_write_tree (ob, DECL_RESULT (function), true);
2094 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2096 /* Output debug args if available. */
2097 vec<tree, va_gc> **debugargs = decl_debug_args_lookup (function);
2098 if (! debugargs)
2099 streamer_write_uhwi (ob, 0);
2100 else
2102 streamer_write_uhwi (ob, (*debugargs)->length ());
2103 for (unsigned i = 0; i < (*debugargs)->length (); ++i)
2104 stream_write_tree (ob, (**debugargs)[i], true);
2107 /* Output DECL_INITIAL for the function, which contains the tree of
2108 lexical scopes. */
2109 stream_write_tree (ob, DECL_INITIAL (function), true);
2110 /* As we do not recurse into BLOCK_SUBBLOCKS but only BLOCK_SUPERCONTEXT
2111 collect block tree leafs and stream those. */
2112 auto_vec<tree> block_tree_leafs;
2113 if (DECL_INITIAL (function))
2114 collect_block_tree_leafs (DECL_INITIAL (function), block_tree_leafs);
2115 streamer_write_uhwi (ob, block_tree_leafs.length ());
2116 for (unsigned i = 0; i < block_tree_leafs.length (); ++i)
2117 stream_write_tree (ob, block_tree_leafs[i], true);
2119 /* We also stream abstract functions where we stream only stuff needed for
2120 debug info. */
2121 if (gimple_has_body_p (function))
2123 streamer_write_uhwi (ob, 1);
2124 output_struct_function_base (ob, fn);
2126 /* Output all the SSA names used in the function. */
2127 output_ssa_names (ob, fn);
2129 /* Output any exception handling regions. */
2130 output_eh_regions (ob, fn);
2133 /* We will renumber the statements. The code that does this uses
2134 the same ordering that we use for serializing them so we can use
2135 the same code on the other end and not have to write out the
2136 statement numbers. We do not assign UIDs to PHIs here because
2137 virtual PHIs get re-computed on-the-fly which would make numbers
2138 inconsistent. */
2139 set_gimple_stmt_max_uid (cfun, 0);
2140 FOR_ALL_BB_FN (bb, cfun)
2142 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2143 gsi_next (&gsi))
2145 gphi *stmt = gsi.phi ();
2147 /* Virtual PHIs are not going to be streamed. */
2148 if (!virtual_operand_p (gimple_phi_result (stmt)))
2149 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2151 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2152 gsi_next (&gsi))
2154 gimple *stmt = gsi_stmt (gsi);
2155 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2158 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2159 virtual phis now. */
2160 FOR_ALL_BB_FN (bb, cfun)
2162 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2163 gsi_next (&gsi))
2165 gphi *stmt = gsi.phi ();
2166 if (virtual_operand_p (gimple_phi_result (stmt)))
2167 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2171 /* Output the code for the function. */
2172 FOR_ALL_BB_FN (bb, fn)
2173 output_bb (ob, bb, fn);
2175 /* The terminator for this function. */
2176 streamer_write_record_start (ob, LTO_null);
2178 output_cfg (ob, fn);
2180 pop_cfun ();
2182 else
2183 streamer_write_uhwi (ob, 0);
2185 /* Create a section to hold the pickled output of this function. */
2186 produce_asm (ob, function);
2188 destroy_output_block (ob);
2191 /* Output the body of function NODE->DECL. */
2193 static void
2194 output_constructor (struct varpool_node *node)
2196 tree var = node->decl;
2197 struct output_block *ob;
2199 ob = create_output_block (LTO_section_function_body);
2201 clear_line_info (ob);
2202 ob->symbol = node;
2204 /* Make string 0 be a NULL string. */
2205 streamer_write_char_stream (ob->string_stream, 0);
2207 /* Output DECL_INITIAL for the function, which contains the tree of
2208 lexical scopes. */
2209 stream_write_tree (ob, DECL_INITIAL (var), true);
2211 /* Create a section to hold the pickled output of this function. */
2212 produce_asm (ob, var);
2214 destroy_output_block (ob);
2218 /* Emit toplevel asms. */
2220 void
2221 lto_output_toplevel_asms (void)
2223 struct output_block *ob;
2224 struct asm_node *can;
2225 char *section_name;
2226 struct lto_simple_header_with_strings header;
2228 if (!symtab->first_asm_symbol ())
2229 return;
2231 ob = create_output_block (LTO_section_asm);
2233 /* Make string 0 be a NULL string. */
2234 streamer_write_char_stream (ob->string_stream, 0);
2236 for (can = symtab->first_asm_symbol (); can; can = can->next)
2238 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2239 streamer_write_hwi (ob, can->order);
2242 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2244 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2245 lto_begin_section (section_name, !flag_wpa);
2246 free (section_name);
2248 /* The entire header stream is computed here. */
2249 memset (&header, 0, sizeof (header));
2251 /* Write the header. */
2252 header.major_version = LTO_major_version;
2253 header.minor_version = LTO_minor_version;
2255 header.main_size = ob->main_stream->total_size;
2256 header.string_size = ob->string_stream->total_size;
2257 lto_write_data (&header, sizeof header);
2259 /* Put all of the gimple and the string table out the asm file as a
2260 block of text. */
2261 lto_write_stream (ob->main_stream);
2262 lto_write_stream (ob->string_stream);
2264 lto_end_section ();
2266 destroy_output_block (ob);
2270 /* Copy the function body or variable constructor of NODE without deserializing. */
2272 static void
2273 copy_function_or_variable (struct symtab_node *node)
2275 tree function = node->decl;
2276 struct lto_file_decl_data *file_data = node->lto_file_data;
2277 const char *data;
2278 size_t len;
2279 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2280 char *section_name =
2281 lto_get_section_name (LTO_section_function_body, name, NULL);
2282 size_t i, j;
2283 struct lto_in_decl_state *in_state;
2284 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2286 lto_begin_section (section_name, false);
2287 free (section_name);
2289 /* We may have renamed the declaration, e.g., a static function. */
2290 name = lto_get_decl_name_mapping (file_data, name);
2292 data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2293 name, &len);
2294 gcc_assert (data);
2296 /* Do a bit copy of the function body. */
2297 lto_write_raw_data (data, len);
2299 /* Copy decls. */
2300 in_state =
2301 lto_get_function_in_decl_state (node->lto_file_data, function);
2302 out_state->compressed = in_state->compressed;
2303 gcc_assert (in_state);
2305 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2307 size_t n = vec_safe_length (in_state->streams[i]);
2308 vec<tree, va_gc> *trees = in_state->streams[i];
2309 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2311 /* The out state must have the same indices and the in state.
2312 So just copy the vector. All the encoders in the in state
2313 must be empty where we reach here. */
2314 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2315 encoder->trees.reserve_exact (n);
2316 for (j = 0; j < n; j++)
2317 encoder->trees.safe_push ((*trees)[j]);
2320 lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2321 data, len);
2322 lto_end_section ();
2325 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2327 static tree
2328 wrap_refs (tree *tp, int *ws, void *)
2330 tree t = *tp;
2331 if (handled_component_p (t)
2332 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2333 && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2335 tree decl = TREE_OPERAND (t, 0);
2336 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2337 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2338 build1 (ADDR_EXPR, ptrtype, decl),
2339 build_int_cst (ptrtype, 0));
2340 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2341 *ws = 0;
2343 else if (TREE_CODE (t) == CONSTRUCTOR)
2345 else if (!EXPR_P (t))
2346 *ws = 0;
2347 return NULL_TREE;
2350 /* Remove functions that are no longer used from offload_funcs, and mark the
2351 remaining ones with DECL_PRESERVE_P. */
2353 static void
2354 prune_offload_funcs (void)
2356 if (!offload_funcs)
2357 return;
2359 unsigned int write_index = 0;
2360 for (unsigned read_index = 0; read_index < vec_safe_length (offload_funcs);
2361 read_index++)
2363 tree fn_decl = (*offload_funcs)[read_index];
2364 bool remove_p = cgraph_node::get (fn_decl) == NULL;
2365 if (remove_p)
2366 continue;
2368 DECL_PRESERVE_P (fn_decl) = 1;
2370 if (write_index != read_index)
2371 (*offload_funcs)[write_index] = (*offload_funcs)[read_index];
2373 write_index++;
2376 offload_funcs->truncate (write_index);
2379 /* Main entry point from the pass manager. */
2381 void
2382 lto_output (void)
2384 struct lto_out_decl_state *decl_state;
2385 bitmap output = NULL;
2386 int i, n_nodes;
2387 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2389 prune_offload_funcs ();
2391 if (flag_checking)
2392 output = lto_bitmap_alloc ();
2394 /* Initialize the streamer. */
2395 lto_streamer_init ();
2397 n_nodes = lto_symtab_encoder_size (encoder);
2398 /* Process only the functions with bodies. */
2399 for (i = 0; i < n_nodes; i++)
2401 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2402 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2404 if (lto_symtab_encoder_encode_body_p (encoder, node)
2405 && !node->alias
2406 && (!node->thunk.thunk_p || !node->thunk.add_pointer_bounds_args))
2408 if (flag_checking)
2410 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2411 bitmap_set_bit (output, DECL_UID (node->decl));
2413 decl_state = lto_new_out_decl_state ();
2414 lto_push_out_decl_state (decl_state);
2415 if (gimple_has_body_p (node->decl) || !flag_wpa
2416 /* Thunks have no body but they may be synthetized
2417 at WPA time. */
2418 || DECL_ARGUMENTS (node->decl))
2419 output_function (node);
2420 else
2421 copy_function_or_variable (node);
2422 gcc_assert (lto_get_out_decl_state () == decl_state);
2423 lto_pop_out_decl_state ();
2424 lto_record_function_out_decl_state (node->decl, decl_state);
2427 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2429 /* Wrap symbol references inside the ctor in a type
2430 preserving MEM_REF. */
2431 tree ctor = DECL_INITIAL (node->decl);
2432 if (ctor && !in_lto_p)
2433 walk_tree (&ctor, wrap_refs, NULL, NULL);
2434 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2435 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2436 && !node->alias)
2438 timevar_push (TV_IPA_LTO_CTORS_OUT);
2439 if (flag_checking)
2441 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2442 bitmap_set_bit (output, DECL_UID (node->decl));
2444 decl_state = lto_new_out_decl_state ();
2445 lto_push_out_decl_state (decl_state);
2446 if (DECL_INITIAL (node->decl) != error_mark_node
2447 || !flag_wpa)
2448 output_constructor (node);
2449 else
2450 copy_function_or_variable (node);
2451 gcc_assert (lto_get_out_decl_state () == decl_state);
2452 lto_pop_out_decl_state ();
2453 lto_record_function_out_decl_state (node->decl, decl_state);
2454 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2459 /* Emit the callgraph after emitting function bodies. This needs to
2460 be done now to make sure that all the statements in every function
2461 have been renumbered so that edges can be associated with call
2462 statements using the statement UIDs. */
2463 output_symtab ();
2465 output_offload_tables ();
2467 #if CHECKING_P
2468 lto_bitmap_free (output);
2469 #endif
2472 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2473 from it and required for correct representation of its semantics.
2474 Each node in ENCODER must be a global declaration or a type. A node
2475 is written only once, even if it appears multiple times in the
2476 vector. Certain transitively-reachable nodes, such as those
2477 representing expressions, may be duplicated, but such nodes
2478 must not appear in ENCODER itself. */
2480 static void
2481 write_global_stream (struct output_block *ob,
2482 struct lto_tree_ref_encoder *encoder)
2484 tree t;
2485 size_t index;
2486 const size_t size = lto_tree_ref_encoder_size (encoder);
2488 for (index = 0; index < size; index++)
2490 t = lto_tree_ref_encoder_get_tree (encoder, index);
2491 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2492 stream_write_tree (ob, t, false);
2497 /* Write a sequence of indices into the globals vector corresponding
2498 to the trees in ENCODER. These are used by the reader to map the
2499 indices used to refer to global entities within function bodies to
2500 their referents. */
2502 static void
2503 write_global_references (struct output_block *ob,
2504 struct lto_tree_ref_encoder *encoder)
2506 tree t;
2507 uint32_t index;
2508 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2510 /* Write size and slot indexes as 32-bit unsigned numbers. */
2511 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2512 data[0] = size;
2514 for (index = 0; index < size; index++)
2516 unsigned slot_num;
2518 t = lto_tree_ref_encoder_get_tree (encoder, index);
2519 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2520 gcc_assert (slot_num != (unsigned)-1);
2521 data[index + 1] = slot_num;
2524 lto_write_data (data, sizeof (int32_t) * (size + 1));
2525 free (data);
2529 /* Write all the streams in an lto_out_decl_state STATE using
2530 output block OB and output stream OUT_STREAM. */
2532 void
2533 lto_output_decl_state_streams (struct output_block *ob,
2534 struct lto_out_decl_state *state)
2536 int i;
2538 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2539 write_global_stream (ob, &state->streams[i]);
2543 /* Write all the references in an lto_out_decl_state STATE using
2544 output block OB and output stream OUT_STREAM. */
2546 void
2547 lto_output_decl_state_refs (struct output_block *ob,
2548 struct lto_out_decl_state *state)
2550 unsigned i;
2551 unsigned ref;
2552 tree decl;
2554 /* Write reference to FUNCTION_DECL. If there is not function,
2555 write reference to void_type_node. */
2556 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2557 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2558 gcc_assert (ref != (unsigned)-1);
2559 ref = ref * 2 + (state->compressed ? 1 : 0);
2560 lto_write_data (&ref, sizeof (uint32_t));
2562 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2563 write_global_references (ob, &state->streams[i]);
2567 /* Return the written size of STATE. */
2569 static size_t
2570 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2572 int i;
2573 size_t size;
2575 size = sizeof (int32_t); /* fn_ref. */
2576 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2578 size += sizeof (int32_t); /* vector size. */
2579 size += (lto_tree_ref_encoder_size (&state->streams[i])
2580 * sizeof (int32_t));
2582 return size;
2586 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2587 so far. */
2589 static void
2590 write_symbol (struct streamer_tree_cache_d *cache,
2591 tree t, hash_set<const char *> *seen, bool alias)
2593 const char *name;
2594 enum gcc_plugin_symbol_kind kind;
2595 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2596 unsigned slot_num;
2597 uint64_t size;
2598 const char *comdat;
2599 unsigned char c;
2601 gcc_checking_assert (TREE_PUBLIC (t)
2602 && !is_builtin_fn (t)
2603 && !DECL_ABSTRACT_P (t)
2604 && (!VAR_P (t) || !DECL_HARD_REGISTER (t)));
2606 gcc_assert (VAR_OR_FUNCTION_DECL_P (t));
2608 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2610 /* This behaves like assemble_name_raw in varasm.c, performing the
2611 same name manipulations that ASM_OUTPUT_LABELREF does. */
2612 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2614 if (seen->add (name))
2615 return;
2617 streamer_tree_cache_lookup (cache, t, &slot_num);
2618 gcc_assert (slot_num != (unsigned)-1);
2620 if (DECL_EXTERNAL (t))
2622 if (DECL_WEAK (t))
2623 kind = GCCPK_WEAKUNDEF;
2624 else
2625 kind = GCCPK_UNDEF;
2627 else
2629 if (DECL_WEAK (t))
2630 kind = GCCPK_WEAKDEF;
2631 else if (DECL_COMMON (t))
2632 kind = GCCPK_COMMON;
2633 else
2634 kind = GCCPK_DEF;
2636 /* When something is defined, it should have node attached. */
2637 gcc_assert (alias || !VAR_P (t) || varpool_node::get (t)->definition);
2638 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2639 || (cgraph_node::get (t)
2640 && cgraph_node::get (t)->definition));
2643 /* Imitate what default_elf_asm_output_external do.
2644 When symbol is external, we need to output it with DEFAULT visibility
2645 when compiling with -fvisibility=default, while with HIDDEN visibility
2646 when symbol has attribute (visibility("hidden")) specified.
2647 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2648 right. */
2650 if (DECL_EXTERNAL (t)
2651 && !targetm.binds_local_p (t))
2652 visibility = GCCPV_DEFAULT;
2653 else
2654 switch (DECL_VISIBILITY (t))
2656 case VISIBILITY_DEFAULT:
2657 visibility = GCCPV_DEFAULT;
2658 break;
2659 case VISIBILITY_PROTECTED:
2660 visibility = GCCPV_PROTECTED;
2661 break;
2662 case VISIBILITY_HIDDEN:
2663 visibility = GCCPV_HIDDEN;
2664 break;
2665 case VISIBILITY_INTERNAL:
2666 visibility = GCCPV_INTERNAL;
2667 break;
2670 if (kind == GCCPK_COMMON
2671 && DECL_SIZE_UNIT (t)
2672 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2673 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2674 else
2675 size = 0;
2677 if (DECL_ONE_ONLY (t))
2678 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2679 else
2680 comdat = "";
2682 lto_write_data (name, strlen (name) + 1);
2683 lto_write_data (comdat, strlen (comdat) + 1);
2684 c = (unsigned char) kind;
2685 lto_write_data (&c, 1);
2686 c = (unsigned char) visibility;
2687 lto_write_data (&c, 1);
2688 lto_write_data (&size, 8);
2689 lto_write_data (&slot_num, 4);
2692 /* Write an IL symbol table to OB.
2693 SET and VSET are cgraph/varpool node sets we are outputting. */
2695 static void
2696 produce_symtab (struct output_block *ob)
2698 struct streamer_tree_cache_d *cache = ob->writer_cache;
2699 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2700 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2701 lto_symtab_encoder_iterator lsei;
2703 lto_begin_section (section_name, false);
2704 free (section_name);
2706 hash_set<const char *> seen;
2708 /* Write the symbol table.
2709 First write everything defined and then all declarations.
2710 This is necessary to handle cases where we have duplicated symbols. */
2711 for (lsei = lsei_start (encoder);
2712 !lsei_end_p (lsei); lsei_next (&lsei))
2714 symtab_node *node = lsei_node (lsei);
2716 if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2717 continue;
2718 write_symbol (cache, node->decl, &seen, false);
2720 for (lsei = lsei_start (encoder);
2721 !lsei_end_p (lsei); lsei_next (&lsei))
2723 symtab_node *node = lsei_node (lsei);
2725 if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2726 continue;
2727 write_symbol (cache, node->decl, &seen, false);
2730 lto_end_section ();
2734 /* Init the streamer_mode_table for output, where we collect info on what
2735 machine_mode values have been streamed. */
2736 void
2737 lto_output_init_mode_table (void)
2739 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2743 /* Write the mode table. */
2744 static void
2745 lto_write_mode_table (void)
2747 struct output_block *ob;
2748 ob = create_output_block (LTO_section_mode_table);
2749 bitpack_d bp = bitpack_create (ob->main_stream);
2751 /* Ensure that for GET_MODE_INNER (m) != m we have
2752 also the inner mode marked. */
2753 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2754 if (streamer_mode_table[i])
2756 machine_mode m = (machine_mode) i;
2757 machine_mode inner_m = GET_MODE_INNER (m);
2758 if (inner_m != m)
2759 streamer_mode_table[(int) inner_m] = 1;
2761 /* First stream modes that have GET_MODE_INNER (m) == m,
2762 so that we can refer to them afterwards. */
2763 for (int pass = 0; pass < 2; pass++)
2764 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2765 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2767 machine_mode m = (machine_mode) i;
2768 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2769 continue;
2770 bp_pack_value (&bp, m, 8);
2771 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2772 bp_pack_poly_value (&bp, GET_MODE_SIZE (m), 16);
2773 bp_pack_poly_value (&bp, GET_MODE_PRECISION (m), 16);
2774 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2775 bp_pack_poly_value (&bp, GET_MODE_NUNITS (m), 16);
2776 switch (GET_MODE_CLASS (m))
2778 case MODE_FRACT:
2779 case MODE_UFRACT:
2780 case MODE_ACCUM:
2781 case MODE_UACCUM:
2782 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2783 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2784 break;
2785 case MODE_FLOAT:
2786 case MODE_DECIMAL_FLOAT:
2787 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2788 break;
2789 default:
2790 break;
2792 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2794 bp_pack_value (&bp, VOIDmode, 8);
2796 streamer_write_bitpack (&bp);
2798 char *section_name
2799 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2800 lto_begin_section (section_name, !flag_wpa);
2801 free (section_name);
2803 /* The entire header stream is computed here. */
2804 struct lto_simple_header_with_strings header;
2805 memset (&header, 0, sizeof (header));
2807 /* Write the header. */
2808 header.major_version = LTO_major_version;
2809 header.minor_version = LTO_minor_version;
2811 header.main_size = ob->main_stream->total_size;
2812 header.string_size = ob->string_stream->total_size;
2813 lto_write_data (&header, sizeof header);
2815 /* Put all of the gimple and the string table out the asm file as a
2816 block of text. */
2817 lto_write_stream (ob->main_stream);
2818 lto_write_stream (ob->string_stream);
2820 lto_end_section ();
2821 destroy_output_block (ob);
2825 /* This pass is run after all of the functions are serialized and all
2826 of the IPA passes have written their serialized forms. This pass
2827 causes the vector of all of the global decls and types used from
2828 this file to be written in to a section that can then be read in to
2829 recover these on other side. */
2831 void
2832 produce_asm_for_decls (void)
2834 struct lto_out_decl_state *out_state;
2835 struct lto_out_decl_state *fn_out_state;
2836 struct lto_decl_header header;
2837 char *section_name;
2838 struct output_block *ob;
2839 unsigned idx, num_fns;
2840 size_t decl_state_size;
2841 int32_t num_decl_states;
2843 ob = create_output_block (LTO_section_decls);
2845 memset (&header, 0, sizeof (struct lto_decl_header));
2847 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2848 lto_begin_section (section_name, !flag_wpa);
2849 free (section_name);
2851 /* Make string 0 be a NULL string. */
2852 streamer_write_char_stream (ob->string_stream, 0);
2854 gcc_assert (!alias_pairs);
2856 /* Get rid of the global decl state hash tables to save some memory. */
2857 out_state = lto_get_out_decl_state ();
2858 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2859 if (out_state->streams[i].tree_hash_table)
2861 delete out_state->streams[i].tree_hash_table;
2862 out_state->streams[i].tree_hash_table = NULL;
2865 /* Write the global symbols. */
2866 lto_output_decl_state_streams (ob, out_state);
2867 num_fns = lto_function_decl_states.length ();
2868 for (idx = 0; idx < num_fns; idx++)
2870 fn_out_state =
2871 lto_function_decl_states[idx];
2872 lto_output_decl_state_streams (ob, fn_out_state);
2875 header.major_version = LTO_major_version;
2876 header.minor_version = LTO_minor_version;
2878 /* Currently not used. This field would allow us to preallocate
2879 the globals vector, so that it need not be resized as it is extended. */
2880 header.num_nodes = -1;
2882 /* Compute the total size of all decl out states. */
2883 decl_state_size = sizeof (int32_t);
2884 decl_state_size += lto_out_decl_state_written_size (out_state);
2885 for (idx = 0; idx < num_fns; idx++)
2887 fn_out_state =
2888 lto_function_decl_states[idx];
2889 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2891 header.decl_state_size = decl_state_size;
2893 header.main_size = ob->main_stream->total_size;
2894 header.string_size = ob->string_stream->total_size;
2896 lto_write_data (&header, sizeof header);
2898 /* Write the main out-decl state, followed by out-decl states of
2899 functions. */
2900 num_decl_states = num_fns + 1;
2901 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2902 lto_output_decl_state_refs (ob, out_state);
2903 for (idx = 0; idx < num_fns; idx++)
2905 fn_out_state = lto_function_decl_states[idx];
2906 lto_output_decl_state_refs (ob, fn_out_state);
2909 lto_write_stream (ob->main_stream);
2910 lto_write_stream (ob->string_stream);
2912 lto_end_section ();
2914 /* Write the symbol table. It is used by linker to determine dependencies
2915 and thus we can skip it for WPA. */
2916 if (!flag_wpa)
2917 produce_symtab (ob);
2919 /* Write command line opts. */
2920 lto_write_options ();
2922 /* Deallocate memory and clean up. */
2923 for (idx = 0; idx < num_fns; idx++)
2925 fn_out_state =
2926 lto_function_decl_states[idx];
2927 lto_delete_out_decl_state (fn_out_state);
2929 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2930 lto_function_decl_states.release ();
2931 destroy_output_block (ob);
2932 if (lto_stream_offload_p)
2933 lto_write_mode_table ();