1 /* Write the GIMPLE representation to a file stream.
3 Copyright (C) 2009-2014 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
28 #include "stor-layout.h"
29 #include "stringpool.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-expr.h"
41 #include "gimple-iterator.h"
42 #include "gimple-ssa.h"
43 #include "tree-ssanames.h"
44 #include "tree-pass.h"
46 #include "diagnostic-core.h"
48 #include "lto-symtab.h"
49 #include "lto-streamer.h"
50 #include "data-streamer.h"
51 #include "gimple-streamer.h"
52 #include "tree-streamer.h"
53 #include "streamer-hooks.h"
57 static void lto_write_tree (struct output_block
*, tree
, bool);
59 /* Clear the line info stored in DATA_IN. */
62 clear_line_info (struct output_block
*ob
)
64 ob
->current_file
= NULL
;
70 /* Create the output block and return it. SECTION_TYPE is
71 LTO_section_function_body or LTO_static_initializer. */
74 create_output_block (enum lto_section_type section_type
)
76 struct output_block
*ob
= XCNEW (struct output_block
);
78 ob
->section_type
= section_type
;
79 ob
->decl_state
= lto_get_out_decl_state ();
80 ob
->main_stream
= XCNEW (struct lto_output_stream
);
81 ob
->string_stream
= XCNEW (struct lto_output_stream
);
82 ob
->writer_cache
= streamer_tree_cache_create (!flag_wpa
, true);
84 if (section_type
== LTO_section_function_body
)
85 ob
->cfg_stream
= XCNEW (struct lto_output_stream
);
89 ob
->string_hash_table
.create (37);
90 gcc_obstack_init (&ob
->obstack
);
96 /* Destroy the output block OB. */
99 destroy_output_block (struct output_block
*ob
)
101 enum lto_section_type section_type
= ob
->section_type
;
103 ob
->string_hash_table
.dispose ();
105 free (ob
->main_stream
);
106 free (ob
->string_stream
);
107 if (section_type
== LTO_section_function_body
)
108 free (ob
->cfg_stream
);
110 streamer_tree_cache_delete (ob
->writer_cache
);
111 obstack_free (&ob
->obstack
, NULL
);
117 /* Look up NODE in the type table and write the index for it to OB. */
120 output_type_ref (struct output_block
*ob
, tree node
)
122 streamer_write_record_start (ob
, LTO_type_ref
);
123 lto_output_type_ref_index (ob
->decl_state
, ob
->main_stream
, node
);
127 /* Return true if tree node T is written to various tables. For these
128 nodes, we sometimes want to write their phyiscal representation
129 (via lto_output_tree), and sometimes we need to emit an index
130 reference into a table (via lto_output_tree_ref). */
133 tree_is_indexable (tree t
)
135 /* Parameters and return values of functions of variably modified types
136 must go to global stream, because they may be used in the type
138 if (TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
139 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t
)), NULL_TREE
);
140 else if (((TREE_CODE (t
) == VAR_DECL
&& !TREE_STATIC (t
))
141 || TREE_CODE (t
) == TYPE_DECL
142 || TREE_CODE (t
) == CONST_DECL
143 || TREE_CODE (t
) == NAMELIST_DECL
)
144 && decl_function_context (t
))
146 else if (TREE_CODE (t
) == DEBUG_EXPR_DECL
)
148 /* Variably modified types need to be streamed alongside function
149 bodies because they can refer to local entities. Together with
150 them we have to localize their members as well.
151 ??? In theory that includes non-FIELD_DECLs as well. */
153 && variably_modified_type_p (t
, NULL_TREE
))
155 else if (TREE_CODE (t
) == FIELD_DECL
156 && variably_modified_type_p (DECL_CONTEXT (t
), NULL_TREE
))
159 return (TYPE_P (t
) || DECL_P (t
) || TREE_CODE (t
) == SSA_NAME
);
163 /* Output info about new location into bitpack BP.
164 After outputting bitpack, lto_output_location_data has
165 to be done to output actual data. */
168 lto_output_location (struct output_block
*ob
, struct bitpack_d
*bp
,
171 expanded_location xloc
;
173 loc
= LOCATION_LOCUS (loc
);
174 bp_pack_value (bp
, loc
== UNKNOWN_LOCATION
, 1);
175 if (loc
== UNKNOWN_LOCATION
)
178 xloc
= expand_location (loc
);
180 bp_pack_value (bp
, ob
->current_file
!= xloc
.file
, 1);
181 bp_pack_value (bp
, ob
->current_line
!= xloc
.line
, 1);
182 bp_pack_value (bp
, ob
->current_col
!= xloc
.column
, 1);
184 if (ob
->current_file
!= xloc
.file
)
185 bp_pack_var_len_unsigned (bp
,
186 streamer_string_index (ob
, xloc
.file
,
187 strlen (xloc
.file
) + 1,
189 ob
->current_file
= xloc
.file
;
191 if (ob
->current_line
!= xloc
.line
)
192 bp_pack_var_len_unsigned (bp
, xloc
.line
);
193 ob
->current_line
= xloc
.line
;
195 if (ob
->current_col
!= xloc
.column
)
196 bp_pack_var_len_unsigned (bp
, xloc
.column
);
197 ob
->current_col
= xloc
.column
;
201 /* If EXPR is an indexable tree node, output a reference to it to
202 output block OB. Otherwise, output the physical representation of
206 lto_output_tree_ref (struct output_block
*ob
, tree expr
)
212 output_type_ref (ob
, expr
);
216 code
= TREE_CODE (expr
);
220 streamer_write_record_start (ob
, LTO_ssa_name_ref
);
221 streamer_write_uhwi (ob
, SSA_NAME_VERSION (expr
));
225 streamer_write_record_start (ob
, LTO_field_decl_ref
);
226 lto_output_field_decl_index (ob
->decl_state
, ob
->main_stream
, expr
);
230 streamer_write_record_start (ob
, LTO_function_decl_ref
);
231 lto_output_fn_decl_index (ob
->decl_state
, ob
->main_stream
, expr
);
235 case DEBUG_EXPR_DECL
:
236 gcc_assert (decl_function_context (expr
) == NULL
|| TREE_STATIC (expr
));
238 streamer_write_record_start (ob
, LTO_global_decl_ref
);
239 lto_output_var_decl_index (ob
->decl_state
, ob
->main_stream
, expr
);
243 streamer_write_record_start (ob
, LTO_const_decl_ref
);
244 lto_output_var_decl_index (ob
->decl_state
, ob
->main_stream
, expr
);
248 gcc_assert (decl_function_context (expr
) == NULL
);
249 streamer_write_record_start (ob
, LTO_imported_decl_ref
);
250 lto_output_var_decl_index (ob
->decl_state
, ob
->main_stream
, expr
);
254 streamer_write_record_start (ob
, LTO_type_decl_ref
);
255 lto_output_type_decl_index (ob
->decl_state
, ob
->main_stream
, expr
);
259 streamer_write_record_start (ob
, LTO_namelist_decl_ref
);
260 lto_output_var_decl_index (ob
->decl_state
, ob
->main_stream
, expr
);
264 streamer_write_record_start (ob
, LTO_namespace_decl_ref
);
265 lto_output_namespace_decl_index (ob
->decl_state
, ob
->main_stream
, expr
);
269 streamer_write_record_start (ob
, LTO_label_decl_ref
);
270 lto_output_var_decl_index (ob
->decl_state
, ob
->main_stream
, expr
);
274 streamer_write_record_start (ob
, LTO_result_decl_ref
);
275 lto_output_var_decl_index (ob
->decl_state
, ob
->main_stream
, expr
);
278 case TRANSLATION_UNIT_DECL
:
279 streamer_write_record_start (ob
, LTO_translation_unit_decl_ref
);
280 lto_output_var_decl_index (ob
->decl_state
, ob
->main_stream
, expr
);
284 /* No other node is indexable, so it should have been handled by
291 /* Return true if EXPR is a tree node that can be written to disk. */
294 lto_is_streamable (tree expr
)
296 enum tree_code code
= TREE_CODE (expr
);
298 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
299 name version in lto_output_tree_ref (see output_ssa_names). */
300 return !is_lang_specific (expr
)
304 && code
!= MODIFY_EXPR
306 && code
!= TARGET_EXPR
308 && code
!= WITH_CLEANUP_EXPR
309 && code
!= STATEMENT_LIST
310 && (code
== CASE_LABEL_EXPR
312 || TREE_CODE_CLASS (code
) != tcc_statement
);
316 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
319 get_symbol_initial_value (struct output_block
*ob
, tree expr
)
321 gcc_checking_assert (DECL_P (expr
)
322 && TREE_CODE (expr
) != FUNCTION_DECL
323 && TREE_CODE (expr
) != TRANSLATION_UNIT_DECL
);
325 /* Handle DECL_INITIAL for symbols. */
326 tree initial
= DECL_INITIAL (expr
);
327 if (TREE_CODE (expr
) == VAR_DECL
328 && (TREE_STATIC (expr
) || DECL_EXTERNAL (expr
))
329 && !DECL_IN_CONSTANT_POOL (expr
)
332 lto_symtab_encoder_t encoder
;
335 encoder
= ob
->decl_state
->symtab_node_encoder
;
336 vnode
= varpool_get_node (expr
);
338 || !lto_symtab_encoder_encode_initializer_p (encoder
,
340 initial
= error_mark_node
;
347 /* Write a physical representation of tree node EXPR to output block
348 OB. If REF_P is true, the leaves of EXPR are emitted as references
349 via lto_output_tree_ref. IX is the index into the streamer cache
350 where EXPR is stored. */
353 lto_write_tree_1 (struct output_block
*ob
, tree expr
, bool ref_p
)
355 /* Pack all the non-pointer fields in EXPR into a bitpack and write
356 the resulting bitpack. */
357 bitpack_d bp
= bitpack_create (ob
->main_stream
);
358 streamer_pack_tree_bitfields (ob
, &bp
, expr
);
359 streamer_write_bitpack (&bp
);
361 /* Write all the pointer fields in EXPR. */
362 streamer_write_tree_body (ob
, expr
, ref_p
);
364 /* Write any LTO-specific data to OB. */
366 && TREE_CODE (expr
) != FUNCTION_DECL
367 && TREE_CODE (expr
) != TRANSLATION_UNIT_DECL
)
369 /* Handle DECL_INITIAL for symbols. */
370 tree initial
= get_symbol_initial_value (ob
, expr
);
371 stream_write_tree (ob
, initial
, ref_p
);
375 /* Write a physical representation of tree node EXPR to output block
376 OB. If REF_P is true, the leaves of EXPR are emitted as references
377 via lto_output_tree_ref. IX is the index into the streamer cache
378 where EXPR is stored. */
381 lto_write_tree (struct output_block
*ob
, tree expr
, bool ref_p
)
383 if (!lto_is_streamable (expr
))
384 internal_error ("tree code %qs is not supported in LTO streams",
385 get_tree_code_name (TREE_CODE (expr
)));
387 /* Write the header, containing everything needed to materialize
388 EXPR on the reading side. */
389 streamer_write_tree_header (ob
, expr
);
391 lto_write_tree_1 (ob
, expr
, ref_p
);
393 /* Mark the end of EXPR. */
394 streamer_write_zero (ob
);
397 /* Emit the physical representation of tree node EXPR to output block
398 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
399 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
402 lto_output_tree_1 (struct output_block
*ob
, tree expr
, hashval_t hash
,
403 bool ref_p
, bool this_ref_p
)
407 gcc_checking_assert (expr
!= NULL_TREE
408 && !(this_ref_p
&& tree_is_indexable (expr
)));
410 bool exists_p
= streamer_tree_cache_insert (ob
->writer_cache
,
412 gcc_assert (!exists_p
);
413 if (streamer_handle_as_builtin_p (expr
))
415 /* MD and NORMAL builtins do not need to be written out
416 completely as they are always instantiated by the
417 compiler on startup. The only builtins that need to
418 be written out are BUILT_IN_FRONTEND. For all other
419 builtins, we simply write the class and code. */
420 streamer_write_builtin (ob
, expr
);
422 else if (TREE_CODE (expr
) == INTEGER_CST
423 && !TREE_OVERFLOW (expr
))
425 /* Shared INTEGER_CST nodes are special because they need their
426 original type to be materialized by the reader (to implement
427 TYPE_CACHED_VALUES). */
428 streamer_write_integer_cst (ob
, expr
, ref_p
);
432 /* This is the first time we see EXPR, write its fields
434 lto_write_tree (ob
, expr
, ref_p
);
450 static unsigned int next_dfs_num
;
451 static vec
<scc_entry
> sccstack
;
452 static struct pointer_map_t
*sccstate
;
453 static struct obstack sccstate_obstack
;
456 DFS_write_tree (struct output_block
*ob
, sccs
*from_state
,
457 tree expr
, bool ref_p
, bool this_ref_p
);
459 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
460 DFS recurse for all tree edges originating from it. */
463 DFS_write_tree_body (struct output_block
*ob
,
464 tree expr
, sccs
*expr_state
, bool ref_p
)
466 #define DFS_follow_tree_edge(DEST) \
467 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
471 code
= TREE_CODE (expr
);
473 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
475 if (TREE_CODE (expr
) != IDENTIFIER_NODE
)
476 DFS_follow_tree_edge (TREE_TYPE (expr
));
479 if (CODE_CONTAINS_STRUCT (code
, TS_VECTOR
))
481 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
482 DFS_follow_tree_edge (VECTOR_CST_ELT (expr
, i
));
485 if (CODE_CONTAINS_STRUCT (code
, TS_COMPLEX
))
487 DFS_follow_tree_edge (TREE_REALPART (expr
));
488 DFS_follow_tree_edge (TREE_IMAGPART (expr
));
491 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_MINIMAL
))
493 /* Drop names that were created for anonymous entities. */
495 && TREE_CODE (DECL_NAME (expr
)) == IDENTIFIER_NODE
496 && ANON_AGGRNAME_P (DECL_NAME (expr
)))
499 DFS_follow_tree_edge (DECL_NAME (expr
));
500 DFS_follow_tree_edge (DECL_CONTEXT (expr
));
503 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_COMMON
))
505 DFS_follow_tree_edge (DECL_SIZE (expr
));
506 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr
));
508 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
509 special handling in LTO, it must be handled by streamer hooks. */
511 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr
));
513 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
514 for early inlining so drop it on the floor instead of ICEing in
517 if ((TREE_CODE (expr
) == VAR_DECL
518 || TREE_CODE (expr
) == PARM_DECL
)
519 && DECL_HAS_VALUE_EXPR_P (expr
))
520 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr
));
521 if (TREE_CODE (expr
) == VAR_DECL
)
522 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr
));
525 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_NON_COMMON
))
527 if (TREE_CODE (expr
) == TYPE_DECL
)
528 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr
));
529 DFS_follow_tree_edge (DECL_VINDEX (expr
));
532 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_WITH_VIS
))
534 /* Make sure we don't inadvertently set the assembler name. */
535 if (DECL_ASSEMBLER_NAME_SET_P (expr
))
536 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr
));
537 DFS_follow_tree_edge (DECL_SECTION_NAME (expr
));
538 DFS_follow_tree_edge (DECL_COMDAT_GROUP (expr
));
541 if (CODE_CONTAINS_STRUCT (code
, TS_FIELD_DECL
))
543 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr
));
544 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr
));
545 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr
));
546 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr
));
547 DFS_follow_tree_edge (DECL_FCONTEXT (expr
));
550 if (CODE_CONTAINS_STRUCT (code
, TS_FUNCTION_DECL
))
552 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr
));
553 /* Do not DECL_FUNCTION_SPECIFIC_TARGET. They will be regenerated. */
554 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr
));
557 if (CODE_CONTAINS_STRUCT (code
, TS_TYPE_COMMON
))
559 DFS_follow_tree_edge (TYPE_SIZE (expr
));
560 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr
));
561 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr
));
562 DFS_follow_tree_edge (TYPE_NAME (expr
));
563 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
564 reconstructed during fixup. */
565 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
567 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr
));
568 DFS_follow_tree_edge (TYPE_CONTEXT (expr
));
569 /* TYPE_CANONICAL is re-computed during type merging, so no need
570 to follow it here. */
571 DFS_follow_tree_edge (TYPE_STUB_DECL (expr
));
574 if (CODE_CONTAINS_STRUCT (code
, TS_TYPE_NON_COMMON
))
576 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
577 DFS_follow_tree_edge (TYPE_VALUES (expr
));
578 else if (TREE_CODE (expr
) == ARRAY_TYPE
)
579 DFS_follow_tree_edge (TYPE_DOMAIN (expr
));
580 else if (RECORD_OR_UNION_TYPE_P (expr
))
581 for (tree t
= TYPE_FIELDS (expr
); t
; t
= TREE_CHAIN (t
))
582 DFS_follow_tree_edge (t
);
583 else if (TREE_CODE (expr
) == FUNCTION_TYPE
584 || TREE_CODE (expr
) == METHOD_TYPE
)
585 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr
));
587 if (!POINTER_TYPE_P (expr
))
588 DFS_follow_tree_edge (TYPE_MINVAL (expr
));
589 DFS_follow_tree_edge (TYPE_MAXVAL (expr
));
590 if (RECORD_OR_UNION_TYPE_P (expr
))
591 DFS_follow_tree_edge (TYPE_BINFO (expr
));
594 if (CODE_CONTAINS_STRUCT (code
, TS_LIST
))
596 DFS_follow_tree_edge (TREE_PURPOSE (expr
));
597 DFS_follow_tree_edge (TREE_VALUE (expr
));
598 DFS_follow_tree_edge (TREE_CHAIN (expr
));
601 if (CODE_CONTAINS_STRUCT (code
, TS_VEC
))
603 for (int i
= 0; i
< TREE_VEC_LENGTH (expr
); i
++)
604 DFS_follow_tree_edge (TREE_VEC_ELT (expr
, i
));
607 if (CODE_CONTAINS_STRUCT (code
, TS_EXP
))
609 for (int i
= 0; i
< TREE_OPERAND_LENGTH (expr
); i
++)
610 DFS_follow_tree_edge (TREE_OPERAND (expr
, i
));
611 DFS_follow_tree_edge (TREE_BLOCK (expr
));
614 if (CODE_CONTAINS_STRUCT (code
, TS_BLOCK
))
616 for (tree t
= BLOCK_VARS (expr
); t
; t
= TREE_CHAIN (t
))
617 /* ??? FIXME. See also streamer_write_chain. */
618 if (!(VAR_OR_FUNCTION_DECL_P (t
)
619 && DECL_EXTERNAL (t
)))
620 DFS_follow_tree_edge (t
);
622 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr
));
624 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
625 handle - those that represent inlined function scopes.
626 For the drop rest them on the floor instead of ICEing
628 if (inlined_function_outer_scope_p (expr
))
630 tree ultimate_origin
= block_ultimate_origin (expr
);
631 DFS_follow_tree_edge (ultimate_origin
);
633 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
634 information for early inlined BLOCKs so drop it on the floor instead
635 of ICEing in dwarf2out.c. */
637 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
640 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
641 list is re-constructed from BLOCK_SUPERCONTEXT. */
644 if (CODE_CONTAINS_STRUCT (code
, TS_BINFO
))
649 /* Note that the number of BINFO slots has already been emitted in
650 EXPR's header (see streamer_write_tree_header) because this length
651 is needed to build the empty BINFO node on the reader side. */
652 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr
), i
, t
)
653 DFS_follow_tree_edge (t
);
654 DFS_follow_tree_edge (BINFO_OFFSET (expr
));
655 DFS_follow_tree_edge (BINFO_VTABLE (expr
));
656 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr
));
658 /* The number of BINFO_BASE_ACCESSES has already been emitted in
659 EXPR's bitfield section. */
660 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr
), i
, t
)
661 DFS_follow_tree_edge (t
);
663 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
664 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
667 if (CODE_CONTAINS_STRUCT (code
, TS_CONSTRUCTOR
))
672 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr
), i
, index
, value
)
674 DFS_follow_tree_edge (index
);
675 DFS_follow_tree_edge (value
);
679 if (code
== OMP_CLAUSE
)
682 for (i
= 0; i
< omp_clause_num_ops
[OMP_CLAUSE_CODE (expr
)]; i
++)
683 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr
, i
));
684 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr
));
687 #undef DFS_follow_tree_edge
690 /* Return a hash value for the tree T. */
693 hash_tree (struct streamer_tree_cache_d
*cache
, tree t
)
695 #define visit(SIBLING) \
698 if (SIBLING && streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
699 v = iterative_hash_hashval_t (streamer_tree_cache_get_hash (cache, ix), v); \
703 enum tree_code code
= TREE_CODE (t
);
704 hashval_t v
= iterative_hash_host_wide_int (code
, 0);
707 v
= iterative_hash_host_wide_int (TREE_SIDE_EFFECTS (t
)
708 | (TREE_CONSTANT (t
) << 1)
709 | (TREE_READONLY (t
) << 2)
710 | (TREE_PUBLIC (t
) << 3), v
);
712 v
= iterative_hash_host_wide_int (TREE_ADDRESSABLE (t
)
713 | (TREE_THIS_VOLATILE (t
) << 1), v
);
715 v
= iterative_hash_host_wide_int (DECL_UNSIGNED (t
), v
);
717 v
= iterative_hash_host_wide_int (TYPE_UNSIGNED (t
), v
);
719 v
= iterative_hash_host_wide_int (TYPE_ARTIFICIAL (t
), v
);
721 v
= iterative_hash_host_wide_int (TREE_NO_WARNING (t
), v
);
722 v
= iterative_hash_host_wide_int (TREE_NOTHROW (t
)
723 | (TREE_STATIC (t
) << 1)
724 | (TREE_PROTECTED (t
) << 2)
725 | (TREE_DEPRECATED (t
) << 3), v
);
726 if (code
!= TREE_BINFO
)
727 v
= iterative_hash_host_wide_int (TREE_PRIVATE (t
), v
);
729 v
= iterative_hash_host_wide_int (TYPE_SATURATING (t
)
730 | (TYPE_ADDR_SPACE (t
) << 1), v
);
731 else if (code
== SSA_NAME
)
732 v
= iterative_hash_host_wide_int (SSA_NAME_IS_DEFAULT_DEF (t
), v
);
734 if (CODE_CONTAINS_STRUCT (code
, TS_INT_CST
))
736 v
= iterative_hash_host_wide_int (TREE_INT_CST_LOW (t
), v
);
737 v
= iterative_hash_host_wide_int (TREE_INT_CST_HIGH (t
), v
);
740 if (CODE_CONTAINS_STRUCT (code
, TS_REAL_CST
))
742 REAL_VALUE_TYPE r
= TREE_REAL_CST (t
);
743 v
= iterative_hash_host_wide_int (r
.cl
, v
);
744 v
= iterative_hash_host_wide_int (r
.decimal
746 | (r
.signalling
<< 2)
747 | (r
.canonical
<< 3), v
);
748 v
= iterative_hash_host_wide_int (r
.uexp
, v
);
749 for (unsigned i
= 0; i
< SIGSZ
; ++i
)
750 v
= iterative_hash_host_wide_int (r
.sig
[i
], v
);
753 if (CODE_CONTAINS_STRUCT (code
, TS_FIXED_CST
))
755 FIXED_VALUE_TYPE f
= TREE_FIXED_CST (t
);
756 v
= iterative_hash_host_wide_int (f
.mode
, v
);
757 v
= iterative_hash_host_wide_int (f
.data
.low
, v
);
758 v
= iterative_hash_host_wide_int (f
.data
.high
, v
);
761 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_COMMON
))
763 v
= iterative_hash_host_wide_int (DECL_MODE (t
), v
);
764 v
= iterative_hash_host_wide_int (DECL_NONLOCAL (t
)
765 | (DECL_VIRTUAL_P (t
) << 1)
766 | (DECL_IGNORED_P (t
) << 2)
767 | (DECL_ABSTRACT (t
) << 3)
768 | (DECL_ARTIFICIAL (t
) << 4)
769 | (DECL_USER_ALIGN (t
) << 5)
770 | (DECL_PRESERVE_P (t
) << 6)
771 | (DECL_EXTERNAL (t
) << 7)
772 | (DECL_GIMPLE_REG_P (t
) << 8), v
);
773 v
= iterative_hash_host_wide_int (DECL_ALIGN (t
), v
);
774 if (code
== LABEL_DECL
)
776 v
= iterative_hash_host_wide_int (EH_LANDING_PAD_NR (t
), v
);
777 v
= iterative_hash_host_wide_int (LABEL_DECL_UID (t
), v
);
779 else if (code
== FIELD_DECL
)
781 v
= iterative_hash_host_wide_int (DECL_PACKED (t
)
782 | (DECL_NONADDRESSABLE_P (t
) << 1),
784 v
= iterative_hash_host_wide_int (DECL_OFFSET_ALIGN (t
), v
);
786 else if (code
== VAR_DECL
)
788 v
= iterative_hash_host_wide_int (DECL_HAS_DEBUG_EXPR_P (t
)
789 | (DECL_NONLOCAL_FRAME (t
) << 1),
792 if (code
== RESULT_DECL
796 v
= iterative_hash_host_wide_int (DECL_BY_REFERENCE (t
), v
);
798 || code
== PARM_DECL
)
799 v
= iterative_hash_host_wide_int (DECL_HAS_VALUE_EXPR_P (t
), v
);
803 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_WRTL
))
804 v
= iterative_hash_host_wide_int (DECL_REGISTER (t
), v
);
806 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_WITH_VIS
))
808 v
= iterative_hash_host_wide_int ((DECL_COMMON (t
))
809 | (DECL_DLLIMPORT_P (t
) << 1)
810 | (DECL_WEAK (t
) << 2)
811 | (DECL_SEEN_IN_BIND_EXPR_P (t
) << 3)
812 | (DECL_COMDAT (t
) << 4)
813 | (DECL_VISIBILITY_SPECIFIED (t
) << 6),
815 v
= iterative_hash_host_wide_int (DECL_VISIBILITY (t
), v
);
816 if (code
== VAR_DECL
)
818 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
819 v
= iterative_hash_host_wide_int (DECL_HARD_REGISTER (t
)
820 | (DECL_IN_CONSTANT_POOL (t
) << 1),
822 v
= iterative_hash_host_wide_int (DECL_TLS_MODEL (t
), v
);
824 if (TREE_CODE (t
) == FUNCTION_DECL
)
825 v
= iterative_hash_host_wide_int (DECL_FINAL_P (t
)
826 | (DECL_CXX_CONSTRUCTOR_P (t
) << 1)
827 | (DECL_CXX_DESTRUCTOR_P (t
) << 2),
829 if (VAR_OR_FUNCTION_DECL_P (t
))
830 v
= iterative_hash_host_wide_int (DECL_INIT_PRIORITY (t
), v
);
833 if (CODE_CONTAINS_STRUCT (code
, TS_FUNCTION_DECL
))
835 v
= iterative_hash_host_wide_int (DECL_BUILT_IN_CLASS (t
), v
);
836 v
= iterative_hash_host_wide_int (DECL_STATIC_CONSTRUCTOR (t
)
837 | (DECL_STATIC_DESTRUCTOR (t
) << 1)
838 | (DECL_UNINLINABLE (t
) << 2)
839 | (DECL_POSSIBLY_INLINED (t
) << 3)
840 | (DECL_IS_NOVOPS (t
) << 4)
841 | (DECL_IS_RETURNS_TWICE (t
) << 5)
842 | (DECL_IS_MALLOC (t
) << 6)
843 | (DECL_IS_OPERATOR_NEW (t
) << 7)
844 | (DECL_DECLARED_INLINE_P (t
) << 8)
845 | (DECL_STATIC_CHAIN (t
) << 9)
846 | (DECL_NO_INLINE_WARNING_P (t
) << 10)
847 | (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t
) << 11)
848 | (DECL_NO_LIMIT_STACK (t
) << 12)
849 | (DECL_DISREGARD_INLINE_LIMITS (t
) << 13)
850 | (DECL_PURE_P (t
) << 14)
851 | (DECL_LOOPING_CONST_OR_PURE_P (t
) << 15), v
);
852 if (DECL_BUILT_IN_CLASS (t
) != NOT_BUILT_IN
)
853 v
= iterative_hash_host_wide_int (DECL_FUNCTION_CODE (t
), v
);
854 if (DECL_STATIC_DESTRUCTOR (t
))
855 v
= iterative_hash_host_wide_int (DECL_FINI_PRIORITY (t
), v
);
858 if (CODE_CONTAINS_STRUCT (code
, TS_TYPE_COMMON
))
860 v
= iterative_hash_host_wide_int (TYPE_MODE (t
), v
);
861 v
= iterative_hash_host_wide_int (TYPE_STRING_FLAG (t
)
862 | (TYPE_NO_FORCE_BLK (t
) << 1)
863 | (TYPE_NEEDS_CONSTRUCTING (t
) << 2)
864 | (TYPE_PACKED (t
) << 3)
865 | (TYPE_RESTRICT (t
) << 4)
866 | (TYPE_USER_ALIGN (t
) << 5)
867 | (TYPE_READONLY (t
) << 6), v
);
868 if (RECORD_OR_UNION_TYPE_P (t
))
870 v
= iterative_hash_host_wide_int (TYPE_TRANSPARENT_AGGR (t
)
871 | (TYPE_FINAL_P (t
) << 1), v
);
873 else if (code
== ARRAY_TYPE
)
874 v
= iterative_hash_host_wide_int (TYPE_NONALIASED_COMPONENT (t
), v
);
875 v
= iterative_hash_host_wide_int (TYPE_PRECISION (t
), v
);
876 v
= iterative_hash_host_wide_int (TYPE_ALIGN (t
), v
);
877 v
= iterative_hash_host_wide_int ((TYPE_ALIAS_SET (t
) == 0
879 && get_alias_set (t
) == 0))
883 if (CODE_CONTAINS_STRUCT (code
, TS_TRANSLATION_UNIT_DECL
))
884 v
= iterative_hash (TRANSLATION_UNIT_LANGUAGE (t
),
885 strlen (TRANSLATION_UNIT_LANGUAGE (t
)), v
);
887 if (CODE_CONTAINS_STRUCT (code
, TS_TARGET_OPTION
))
890 if (CODE_CONTAINS_STRUCT (code
, TS_OPTIMIZATION
))
891 v
= iterative_hash (t
, sizeof (struct cl_optimization
), v
);
893 if (CODE_CONTAINS_STRUCT (code
, TS_IDENTIFIER
))
894 v
= iterative_hash_host_wide_int (IDENTIFIER_HASH_VALUE (t
), v
);
896 if (CODE_CONTAINS_STRUCT (code
, TS_STRING
))
897 v
= iterative_hash (TREE_STRING_POINTER (t
), TREE_STRING_LENGTH (t
), v
);
899 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
901 if (POINTER_TYPE_P (t
))
903 /* For pointers factor in the pointed-to type recursively as
904 we cannot recurse through only pointers.
905 ??? We can generalize this by keeping track of the
906 in-SCC edges for each tree (or arbitrarily the first
907 such edge) and hashing that in in a second stage
908 (instead of the quadratic mixing of the SCC we do now). */
911 if (streamer_tree_cache_lookup (cache
, TREE_TYPE (t
), &ix
))
912 x
= streamer_tree_cache_get_hash (cache
, ix
);
914 x
= hash_tree (cache
, TREE_TYPE (t
));
915 v
= iterative_hash_hashval_t (x
, v
);
917 else if (code
!= IDENTIFIER_NODE
)
918 visit (TREE_TYPE (t
));
921 if (CODE_CONTAINS_STRUCT (code
, TS_VECTOR
))
922 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (t
); ++i
)
923 visit (VECTOR_CST_ELT (t
, i
));
925 if (CODE_CONTAINS_STRUCT (code
, TS_COMPLEX
))
927 visit (TREE_REALPART (t
));
928 visit (TREE_IMAGPART (t
));
931 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_MINIMAL
))
933 /* Drop names that were created for anonymous entities. */
935 && TREE_CODE (DECL_NAME (t
)) == IDENTIFIER_NODE
936 && ANON_AGGRNAME_P (DECL_NAME (t
)))
939 visit (DECL_NAME (t
));
940 if (DECL_FILE_SCOPE_P (t
))
943 visit (DECL_CONTEXT (t
));
946 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_COMMON
))
948 visit (DECL_SIZE (t
));
949 visit (DECL_SIZE_UNIT (t
));
950 visit (DECL_ATTRIBUTES (t
));
951 if ((code
== VAR_DECL
952 || code
== PARM_DECL
)
953 && DECL_HAS_VALUE_EXPR_P (t
))
954 visit (DECL_VALUE_EXPR (t
));
956 && DECL_HAS_DEBUG_EXPR_P (t
))
957 visit (DECL_DEBUG_EXPR (t
));
958 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
959 be able to call get_symbol_initial_value. */
962 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_NON_COMMON
))
964 if (code
== TYPE_DECL
)
965 visit (DECL_ORIGINAL_TYPE (t
));
966 visit (DECL_VINDEX (t
));
969 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_WITH_VIS
))
971 if (DECL_ASSEMBLER_NAME_SET_P (t
))
972 visit (DECL_ASSEMBLER_NAME (t
));
973 visit (DECL_SECTION_NAME (t
));
974 visit (DECL_COMDAT_GROUP (t
));
977 if (CODE_CONTAINS_STRUCT (code
, TS_FIELD_DECL
))
979 visit (DECL_FIELD_OFFSET (t
));
980 visit (DECL_BIT_FIELD_TYPE (t
));
981 visit (DECL_BIT_FIELD_REPRESENTATIVE (t
));
982 visit (DECL_FIELD_BIT_OFFSET (t
));
983 visit (DECL_FCONTEXT (t
));
986 if (CODE_CONTAINS_STRUCT (code
, TS_FUNCTION_DECL
))
988 visit (DECL_FUNCTION_PERSONALITY (t
));
989 /* Do not follow DECL_FUNCTION_SPECIFIC_TARGET. */
990 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t
));
993 if (CODE_CONTAINS_STRUCT (code
, TS_TYPE_COMMON
))
995 visit (TYPE_SIZE (t
));
996 visit (TYPE_SIZE_UNIT (t
));
997 visit (TYPE_ATTRIBUTES (t
));
998 visit (TYPE_NAME (t
));
999 visit (TYPE_MAIN_VARIANT (t
));
1000 if (TYPE_FILE_SCOPE_P (t
))
1003 visit (TYPE_CONTEXT (t
));
1004 visit (TYPE_STUB_DECL (t
));
1007 if (CODE_CONTAINS_STRUCT (code
, TS_TYPE_NON_COMMON
))
1009 if (code
== ENUMERAL_TYPE
)
1010 visit (TYPE_VALUES (t
));
1011 else if (code
== ARRAY_TYPE
)
1012 visit (TYPE_DOMAIN (t
));
1013 else if (RECORD_OR_UNION_TYPE_P (t
))
1014 for (tree f
= TYPE_FIELDS (t
); f
; f
= TREE_CHAIN (f
))
1016 else if (code
== FUNCTION_TYPE
1017 || code
== METHOD_TYPE
)
1018 visit (TYPE_ARG_TYPES (t
));
1019 if (!POINTER_TYPE_P (t
))
1020 visit (TYPE_MINVAL (t
));
1021 visit (TYPE_MAXVAL (t
));
1022 if (RECORD_OR_UNION_TYPE_P (t
))
1023 visit (TYPE_BINFO (t
));
1026 if (CODE_CONTAINS_STRUCT (code
, TS_LIST
))
1028 visit (TREE_PURPOSE (t
));
1029 visit (TREE_VALUE (t
));
1030 visit (TREE_CHAIN (t
));
1033 if (CODE_CONTAINS_STRUCT (code
, TS_VEC
))
1034 for (int i
= 0; i
< TREE_VEC_LENGTH (t
); ++i
)
1035 visit (TREE_VEC_ELT (t
, i
));
1037 if (CODE_CONTAINS_STRUCT (code
, TS_EXP
))
1039 v
= iterative_hash_host_wide_int (TREE_OPERAND_LENGTH (t
), v
);
1040 for (int i
= 0; i
< TREE_OPERAND_LENGTH (t
); ++i
)
1041 visit (TREE_OPERAND (t
, i
));
1044 if (CODE_CONTAINS_STRUCT (code
, TS_BINFO
))
1048 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t
), i
, b
)
1050 visit (BINFO_OFFSET (t
));
1051 visit (BINFO_VTABLE (t
));
1052 visit (BINFO_VPTR_FIELD (t
));
1053 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t
), i
, b
)
1055 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1056 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1059 if (CODE_CONTAINS_STRUCT (code
, TS_CONSTRUCTOR
))
1063 v
= iterative_hash_host_wide_int (CONSTRUCTOR_NELTS (t
), v
);
1064 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t
), i
, index
, value
)
1071 if (code
== OMP_CLAUSE
)
1075 v
= iterative_hash_host_wide_int (OMP_CLAUSE_CODE (t
), v
);
1076 switch (OMP_CLAUSE_CODE (t
))
1078 case OMP_CLAUSE_DEFAULT
:
1079 v
= iterative_hash_host_wide_int (OMP_CLAUSE_DEFAULT_KIND (t
), v
);
1081 case OMP_CLAUSE_SCHEDULE
:
1082 v
= iterative_hash_host_wide_int (OMP_CLAUSE_SCHEDULE_KIND (t
), v
);
1084 case OMP_CLAUSE_DEPEND
:
1085 v
= iterative_hash_host_wide_int (OMP_CLAUSE_DEPEND_KIND (t
), v
);
1087 case OMP_CLAUSE_MAP
:
1088 v
= iterative_hash_host_wide_int (OMP_CLAUSE_MAP_KIND (t
), v
);
1090 case OMP_CLAUSE_PROC_BIND
:
1091 v
= iterative_hash_host_wide_int (OMP_CLAUSE_PROC_BIND_KIND (t
), v
);
1093 case OMP_CLAUSE_REDUCTION
:
1094 v
= iterative_hash_host_wide_int (OMP_CLAUSE_REDUCTION_CODE (t
), v
);
1099 for (i
= 0; i
< omp_clause_num_ops
[OMP_CLAUSE_CODE (t
)]; i
++)
1100 visit (OMP_CLAUSE_OPERAND (t
, i
));
1101 visit (OMP_CLAUSE_CHAIN (t
));
1109 /* Compare two SCC entries by their hash value for qsorting them. */
1112 scc_entry_compare (const void *p1_
, const void *p2_
)
1114 const scc_entry
*p1
= (const scc_entry
*) p1_
;
1115 const scc_entry
*p2
= (const scc_entry
*) p2_
;
1116 if (p1
->hash
< p2
->hash
)
1118 else if (p1
->hash
> p2
->hash
)
1123 /* Return a hash value for the SCC on the SCC stack from FIRST with
1127 hash_scc (struct streamer_tree_cache_d
*cache
, unsigned first
, unsigned size
)
1129 /* Compute hash values for the SCC members. */
1130 for (unsigned i
= 0; i
< size
; ++i
)
1131 sccstack
[first
+i
].hash
= hash_tree (cache
, sccstack
[first
+i
].t
);
1134 return sccstack
[first
].hash
;
1136 /* Sort the SCC of type, hash pairs so that when we mix in
1137 all members of the SCC the hash value becomes independent on
1138 the order we visited the SCC. Disregard hashes equal to
1139 the hash of the tree we mix into because we cannot guarantee
1140 a stable sort for those across different TUs. */
1141 qsort (&sccstack
[first
], size
, sizeof (scc_entry
), scc_entry_compare
);
1142 hashval_t
*tem
= XALLOCAVEC (hashval_t
, size
);
1143 for (unsigned i
= 0; i
< size
; ++i
)
1145 hashval_t hash
= sccstack
[first
+i
].hash
;
1146 hashval_t orig_hash
= hash
;
1148 /* Skip same hashes. */
1150 j
< size
&& sccstack
[first
+j
].hash
== orig_hash
; ++j
)
1152 for (; j
< size
; ++j
)
1153 hash
= iterative_hash_hashval_t (sccstack
[first
+j
].hash
, hash
);
1154 for (j
= 0; sccstack
[first
+j
].hash
!= orig_hash
; ++j
)
1155 hash
= iterative_hash_hashval_t (sccstack
[first
+j
].hash
, hash
);
1158 hashval_t scc_hash
= 0;
1159 for (unsigned i
= 0; i
< size
; ++i
)
1161 sccstack
[first
+i
].hash
= tem
[i
];
1162 scc_hash
= iterative_hash_hashval_t (tem
[i
], scc_hash
);
1167 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1168 already in the streamer cache. Main routine called for
1169 each visit of EXPR. */
1172 DFS_write_tree (struct output_block
*ob
, sccs
*from_state
,
1173 tree expr
, bool ref_p
, bool this_ref_p
)
1178 /* Handle special cases. */
1179 if (expr
== NULL_TREE
)
1182 /* Do not DFS walk into indexable trees. */
1183 if (this_ref_p
&& tree_is_indexable (expr
))
1186 /* Check if we already streamed EXPR. */
1187 if (streamer_tree_cache_lookup (ob
->writer_cache
, expr
, &ix
))
1190 slot
= (sccs
**)pointer_map_insert (sccstate
, expr
);
1191 sccs
*cstate
= *slot
;
1194 scc_entry e
= { expr
, 0 };
1195 /* Not yet visited. DFS recurse and push it onto the stack. */
1196 *slot
= cstate
= XOBNEW (&sccstate_obstack
, struct sccs
);
1197 sccstack
.safe_push (e
);
1198 cstate
->dfsnum
= next_dfs_num
++;
1199 cstate
->low
= cstate
->dfsnum
;
1201 if (streamer_handle_as_builtin_p (expr
))
1203 else if (TREE_CODE (expr
) == INTEGER_CST
1204 && !TREE_OVERFLOW (expr
))
1205 DFS_write_tree (ob
, cstate
, TREE_TYPE (expr
), ref_p
, ref_p
);
1208 DFS_write_tree_body (ob
, expr
, cstate
, ref_p
);
1210 /* Walk any LTO-specific edges. */
1212 && TREE_CODE (expr
) != FUNCTION_DECL
1213 && TREE_CODE (expr
) != TRANSLATION_UNIT_DECL
)
1215 /* Handle DECL_INITIAL for symbols. */
1216 tree initial
= get_symbol_initial_value (ob
, expr
);
1217 DFS_write_tree (ob
, cstate
, initial
, ref_p
, ref_p
);
1221 /* See if we found an SCC. */
1222 if (cstate
->low
== cstate
->dfsnum
)
1224 unsigned first
, size
;
1227 /* Pop the SCC and compute its size. */
1228 first
= sccstack
.length ();
1231 x
= sccstack
[--first
].t
;
1234 size
= sccstack
.length () - first
;
1236 /* No need to compute hashes for LTRANS units, we don't perform
1237 any merging there. */
1238 hashval_t scc_hash
= 0;
1239 unsigned scc_entry_len
= 0;
1242 scc_hash
= hash_scc (ob
->writer_cache
, first
, size
);
1244 /* Put the entries with the least number of collisions first. */
1245 unsigned entry_start
= 0;
1246 scc_entry_len
= size
+ 1;
1247 for (unsigned i
= 0; i
< size
;)
1250 for (i
= i
+ 1; i
< size
1251 && (sccstack
[first
+ i
].hash
1252 == sccstack
[first
+ from
].hash
); ++i
)
1254 if (i
- from
< scc_entry_len
)
1256 scc_entry_len
= i
- from
;
1260 for (unsigned i
= 0; i
< scc_entry_len
; ++i
)
1262 scc_entry tem
= sccstack
[first
+ i
];
1263 sccstack
[first
+ i
] = sccstack
[first
+ entry_start
+ i
];
1264 sccstack
[first
+ entry_start
+ i
] = tem
;
1268 /* Write LTO_tree_scc. */
1269 streamer_write_record_start (ob
, LTO_tree_scc
);
1270 streamer_write_uhwi (ob
, size
);
1271 streamer_write_uhwi (ob
, scc_hash
);
1273 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1274 All INTEGER_CSTs need to be handled this way as we need
1275 their type to materialize them. Also builtins are handled
1277 ??? We still wrap these in LTO_tree_scc so at the
1278 input side we can properly identify the tree we want
1279 to ultimatively return. */
1280 size_t old_len
= ob
->writer_cache
->nodes
.length ();
1282 lto_output_tree_1 (ob
, expr
, scc_hash
, ref_p
, this_ref_p
);
1285 /* Write the size of the SCC entry candidates. */
1286 streamer_write_uhwi (ob
, scc_entry_len
);
1288 /* Write all headers and populate the streamer cache. */
1289 for (unsigned i
= 0; i
< size
; ++i
)
1291 hashval_t hash
= sccstack
[first
+i
].hash
;
1292 tree t
= sccstack
[first
+i
].t
;
1293 bool exists_p
= streamer_tree_cache_insert (ob
->writer_cache
,
1295 gcc_assert (!exists_p
);
1297 if (!lto_is_streamable (t
))
1298 internal_error ("tree code %qs is not supported "
1300 get_tree_code_name (TREE_CODE (t
)));
1302 gcc_checking_assert (!streamer_handle_as_builtin_p (t
));
1304 /* Write the header, containing everything needed to
1305 materialize EXPR on the reading side. */
1306 streamer_write_tree_header (ob
, t
);
1309 /* Write the bitpacks and tree references. */
1310 for (unsigned i
= 0; i
< size
; ++i
)
1312 lto_write_tree_1 (ob
, sccstack
[first
+i
].t
, ref_p
);
1314 /* Mark the end of the tree. */
1315 streamer_write_zero (ob
);
1318 gcc_assert (old_len
+ size
== ob
->writer_cache
->nodes
.length ());
1320 /* Finally truncate the vector. */
1321 sccstack
.truncate (first
);
1324 from_state
->low
= MIN (from_state
->low
, cstate
->low
);
1329 from_state
->low
= MIN (from_state
->low
, cstate
->low
);
1331 gcc_checking_assert (from_state
);
1332 if (cstate
->dfsnum
< from_state
->dfsnum
)
1333 from_state
->low
= MIN (cstate
->dfsnum
, from_state
->low
);
1337 /* Emit the physical representation of tree node EXPR to output block
1338 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1339 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1342 lto_output_tree (struct output_block
*ob
, tree expr
,
1343 bool ref_p
, bool this_ref_p
)
1348 if (expr
== NULL_TREE
)
1350 streamer_write_record_start (ob
, LTO_null
);
1354 if (this_ref_p
&& tree_is_indexable (expr
))
1356 lto_output_tree_ref (ob
, expr
);
1360 existed_p
= streamer_tree_cache_lookup (ob
->writer_cache
, expr
, &ix
);
1363 /* If a node has already been streamed out, make sure that
1364 we don't write it more than once. Otherwise, the reader
1365 will instantiate two different nodes for the same object. */
1366 streamer_write_record_start (ob
, LTO_tree_pickle_reference
);
1367 streamer_write_uhwi (ob
, ix
);
1368 streamer_write_enum (ob
->main_stream
, LTO_tags
, LTO_NUM_TAGS
,
1369 lto_tree_code_to_tag (TREE_CODE (expr
)));
1370 lto_stats
.num_pickle_refs_output
++;
1374 /* This is the first time we see EXPR, write all reachable
1376 static bool in_dfs_walk
;
1378 /* Protect against recursion which means disconnect between
1379 what tree edges we walk in the DFS walk and what edges
1381 gcc_assert (!in_dfs_walk
);
1383 /* Start the DFS walk. */
1384 /* Save ob state ... */
1387 sccstate
= pointer_map_create ();
1388 gcc_obstack_init (&sccstate_obstack
);
1390 DFS_write_tree (ob
, NULL
, expr
, ref_p
, this_ref_p
);
1391 sccstack
.release ();
1392 pointer_map_destroy (sccstate
);
1393 obstack_free (&sccstate_obstack
, NULL
);
1394 in_dfs_walk
= false;
1396 /* Finally append a reference to the tree we were writing.
1397 ??? If expr ended up as a singleton we could have
1398 inlined it here and avoid outputting a reference. */
1399 existed_p
= streamer_tree_cache_lookup (ob
->writer_cache
, expr
, &ix
);
1400 gcc_assert (existed_p
);
1401 streamer_write_record_start (ob
, LTO_tree_pickle_reference
);
1402 streamer_write_uhwi (ob
, ix
);
1403 streamer_write_enum (ob
->main_stream
, LTO_tags
, LTO_NUM_TAGS
,
1404 lto_tree_code_to_tag (TREE_CODE (expr
)));
1405 lto_stats
.num_pickle_refs_output
++;
1410 /* Output to OB a list of try/catch handlers starting with FIRST. */
1413 output_eh_try_list (struct output_block
*ob
, eh_catch first
)
1417 for (n
= first
; n
; n
= n
->next_catch
)
1419 streamer_write_record_start (ob
, LTO_eh_catch
);
1420 stream_write_tree (ob
, n
->type_list
, true);
1421 stream_write_tree (ob
, n
->filter_list
, true);
1422 stream_write_tree (ob
, n
->label
, true);
1425 streamer_write_record_start (ob
, LTO_null
);
1429 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1430 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1431 detect EH region sharing. */
1434 output_eh_region (struct output_block
*ob
, eh_region r
)
1440 streamer_write_record_start (ob
, LTO_null
);
1444 if (r
->type
== ERT_CLEANUP
)
1445 tag
= LTO_ert_cleanup
;
1446 else if (r
->type
== ERT_TRY
)
1448 else if (r
->type
== ERT_ALLOWED_EXCEPTIONS
)
1449 tag
= LTO_ert_allowed_exceptions
;
1450 else if (r
->type
== ERT_MUST_NOT_THROW
)
1451 tag
= LTO_ert_must_not_throw
;
1455 streamer_write_record_start (ob
, tag
);
1456 streamer_write_hwi (ob
, r
->index
);
1459 streamer_write_hwi (ob
, r
->outer
->index
);
1461 streamer_write_zero (ob
);
1464 streamer_write_hwi (ob
, r
->inner
->index
);
1466 streamer_write_zero (ob
);
1469 streamer_write_hwi (ob
, r
->next_peer
->index
);
1471 streamer_write_zero (ob
);
1473 if (r
->type
== ERT_TRY
)
1475 output_eh_try_list (ob
, r
->u
.eh_try
.first_catch
);
1477 else if (r
->type
== ERT_ALLOWED_EXCEPTIONS
)
1479 stream_write_tree (ob
, r
->u
.allowed
.type_list
, true);
1480 stream_write_tree (ob
, r
->u
.allowed
.label
, true);
1481 streamer_write_uhwi (ob
, r
->u
.allowed
.filter
);
1483 else if (r
->type
== ERT_MUST_NOT_THROW
)
1485 stream_write_tree (ob
, r
->u
.must_not_throw
.failure_decl
, true);
1486 bitpack_d bp
= bitpack_create (ob
->main_stream
);
1487 stream_output_location (ob
, &bp
, r
->u
.must_not_throw
.failure_loc
);
1488 streamer_write_bitpack (&bp
);
1491 if (r
->landing_pads
)
1492 streamer_write_hwi (ob
, r
->landing_pads
->index
);
1494 streamer_write_zero (ob
);
1498 /* Output landing pad LP to OB. */
1501 output_eh_lp (struct output_block
*ob
, eh_landing_pad lp
)
1505 streamer_write_record_start (ob
, LTO_null
);
1509 streamer_write_record_start (ob
, LTO_eh_landing_pad
);
1510 streamer_write_hwi (ob
, lp
->index
);
1512 streamer_write_hwi (ob
, lp
->next_lp
->index
);
1514 streamer_write_zero (ob
);
1517 streamer_write_hwi (ob
, lp
->region
->index
);
1519 streamer_write_zero (ob
);
1521 stream_write_tree (ob
, lp
->post_landing_pad
, true);
1525 /* Output the existing eh_table to OB. */
1528 output_eh_regions (struct output_block
*ob
, struct function
*fn
)
1530 if (fn
->eh
&& fn
->eh
->region_tree
)
1537 streamer_write_record_start (ob
, LTO_eh_table
);
1539 /* Emit the index of the root of the EH region tree. */
1540 streamer_write_hwi (ob
, fn
->eh
->region_tree
->index
);
1542 /* Emit all the EH regions in the region array. */
1543 streamer_write_hwi (ob
, vec_safe_length (fn
->eh
->region_array
));
1544 FOR_EACH_VEC_SAFE_ELT (fn
->eh
->region_array
, i
, eh
)
1545 output_eh_region (ob
, eh
);
1547 /* Emit all landing pads. */
1548 streamer_write_hwi (ob
, vec_safe_length (fn
->eh
->lp_array
));
1549 FOR_EACH_VEC_SAFE_ELT (fn
->eh
->lp_array
, i
, lp
)
1550 output_eh_lp (ob
, lp
);
1552 /* Emit all the runtime type data. */
1553 streamer_write_hwi (ob
, vec_safe_length (fn
->eh
->ttype_data
));
1554 FOR_EACH_VEC_SAFE_ELT (fn
->eh
->ttype_data
, i
, ttype
)
1555 stream_write_tree (ob
, ttype
, true);
1557 /* Emit the table of action chains. */
1558 if (targetm
.arm_eabi_unwinder
)
1561 streamer_write_hwi (ob
, vec_safe_length (fn
->eh
->ehspec_data
.arm_eabi
));
1562 FOR_EACH_VEC_SAFE_ELT (fn
->eh
->ehspec_data
.arm_eabi
, i
, t
)
1563 stream_write_tree (ob
, t
, true);
1568 streamer_write_hwi (ob
, vec_safe_length (fn
->eh
->ehspec_data
.other
));
1569 FOR_EACH_VEC_SAFE_ELT (fn
->eh
->ehspec_data
.other
, i
, c
)
1570 streamer_write_char_stream (ob
->main_stream
, c
);
1574 /* The LTO_null either terminates the record or indicates that there
1575 are no eh_records at all. */
1576 streamer_write_record_start (ob
, LTO_null
);
1580 /* Output all of the active ssa names to the ssa_names stream. */
1583 output_ssa_names (struct output_block
*ob
, struct function
*fn
)
1585 unsigned int i
, len
;
1587 len
= vec_safe_length (SSANAMES (fn
));
1588 streamer_write_uhwi (ob
, len
);
1590 for (i
= 1; i
< len
; i
++)
1592 tree ptr
= (*SSANAMES (fn
))[i
];
1594 if (ptr
== NULL_TREE
1595 || SSA_NAME_IN_FREE_LIST (ptr
)
1596 || virtual_operand_p (ptr
))
1599 streamer_write_uhwi (ob
, i
);
1600 streamer_write_char_stream (ob
->main_stream
,
1601 SSA_NAME_IS_DEFAULT_DEF (ptr
));
1602 if (SSA_NAME_VAR (ptr
))
1603 stream_write_tree (ob
, SSA_NAME_VAR (ptr
), true);
1605 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1606 stream_write_tree (ob
, TREE_TYPE (ptr
), true);
1609 streamer_write_zero (ob
);
1613 /* Output the cfg. */
1616 output_cfg (struct output_block
*ob
, struct function
*fn
)
1618 struct lto_output_stream
*tmp_stream
= ob
->main_stream
;
1621 ob
->main_stream
= ob
->cfg_stream
;
1623 streamer_write_enum (ob
->main_stream
, profile_status_d
, PROFILE_LAST
,
1624 profile_status_for_fn (fn
));
1626 /* Output the number of the highest basic block. */
1627 streamer_write_uhwi (ob
, last_basic_block_for_fn (fn
));
1629 FOR_ALL_BB_FN (bb
, fn
)
1634 streamer_write_hwi (ob
, bb
->index
);
1636 /* Output the successors and the edge flags. */
1637 streamer_write_uhwi (ob
, EDGE_COUNT (bb
->succs
));
1638 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1640 streamer_write_uhwi (ob
, e
->dest
->index
);
1641 streamer_write_hwi (ob
, e
->probability
);
1642 streamer_write_gcov_count (ob
, e
->count
);
1643 streamer_write_uhwi (ob
, e
->flags
);
1647 streamer_write_hwi (ob
, -1);
1649 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
1652 streamer_write_hwi (ob
, bb
->next_bb
->index
);
1656 streamer_write_hwi (ob
, -1);
1658 /* ??? The cfgloop interface is tied to cfun. */
1659 gcc_assert (cfun
== fn
);
1661 /* Output the number of loops. */
1662 streamer_write_uhwi (ob
, number_of_loops (fn
));
1664 /* Output each loop, skipping the tree root which has number zero. */
1665 for (unsigned i
= 1; i
< number_of_loops (fn
); ++i
)
1667 struct loop
*loop
= get_loop (fn
, i
);
1669 /* Write the index of the loop header. That's enough to rebuild
1670 the loop tree on the reader side. Stream -1 for an unused
1674 streamer_write_hwi (ob
, -1);
1678 streamer_write_hwi (ob
, loop
->header
->index
);
1680 /* Write everything copy_loop_info copies. */
1681 streamer_write_enum (ob
->main_stream
,
1682 loop_estimation
, EST_LAST
, loop
->estimate_state
);
1683 streamer_write_hwi (ob
, loop
->any_upper_bound
);
1684 if (loop
->any_upper_bound
)
1686 streamer_write_uhwi (ob
, loop
->nb_iterations_upper_bound
.low
);
1687 streamer_write_hwi (ob
, loop
->nb_iterations_upper_bound
.high
);
1689 streamer_write_hwi (ob
, loop
->any_estimate
);
1690 if (loop
->any_estimate
)
1692 streamer_write_uhwi (ob
, loop
->nb_iterations_estimate
.low
);
1693 streamer_write_hwi (ob
, loop
->nb_iterations_estimate
.high
);
1696 /* Write OMP SIMD related info. */
1697 streamer_write_hwi (ob
, loop
->safelen
);
1698 streamer_write_hwi (ob
, loop
->force_vect
);
1699 stream_write_tree (ob
, loop
->simduid
, true);
1702 ob
->main_stream
= tmp_stream
;
1706 /* Create the header in the file using OB. If the section type is for
1707 a function, set FN to the decl for that function. */
1710 produce_asm (struct output_block
*ob
, tree fn
)
1712 enum lto_section_type section_type
= ob
->section_type
;
1713 struct lto_function_header header
;
1715 struct lto_output_stream
*header_stream
;
1717 if (section_type
== LTO_section_function_body
)
1719 const char *name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn
));
1720 section_name
= lto_get_section_name (section_type
, name
, NULL
);
1723 section_name
= lto_get_section_name (section_type
, NULL
, NULL
);
1725 lto_begin_section (section_name
, !flag_wpa
);
1726 free (section_name
);
1728 /* The entire header is stream computed here. */
1729 memset (&header
, 0, sizeof (struct lto_function_header
));
1731 /* Write the header. */
1732 header
.lto_header
.major_version
= LTO_major_version
;
1733 header
.lto_header
.minor_version
= LTO_minor_version
;
1735 header
.compressed_size
= 0;
1737 if (section_type
== LTO_section_function_body
)
1738 header
.cfg_size
= ob
->cfg_stream
->total_size
;
1739 header
.main_size
= ob
->main_stream
->total_size
;
1740 header
.string_size
= ob
->string_stream
->total_size
;
1742 header_stream
= XCNEW (struct lto_output_stream
);
1743 lto_output_data_stream (header_stream
, &header
, sizeof header
);
1744 lto_write_stream (header_stream
);
1745 free (header_stream
);
1747 /* Put all of the gimple and the string table out the asm file as a
1749 if (section_type
== LTO_section_function_body
)
1750 lto_write_stream (ob
->cfg_stream
);
1751 lto_write_stream (ob
->main_stream
);
1752 lto_write_stream (ob
->string_stream
);
1758 /* Output the base body of struct function FN using output block OB. */
1761 output_struct_function_base (struct output_block
*ob
, struct function
*fn
)
1763 struct bitpack_d bp
;
1767 /* Output the static chain and non-local goto save area. */
1768 stream_write_tree (ob
, fn
->static_chain_decl
, true);
1769 stream_write_tree (ob
, fn
->nonlocal_goto_save_area
, true);
1771 /* Output all the local variables in the function. */
1772 streamer_write_hwi (ob
, vec_safe_length (fn
->local_decls
));
1773 FOR_EACH_VEC_SAFE_ELT (fn
->local_decls
, i
, t
)
1774 stream_write_tree (ob
, t
, true);
1776 /* Output current IL state of the function. */
1777 streamer_write_uhwi (ob
, fn
->curr_properties
);
1779 /* Write all the attributes for FN. */
1780 bp
= bitpack_create (ob
->main_stream
);
1781 bp_pack_value (&bp
, fn
->is_thunk
, 1);
1782 bp_pack_value (&bp
, fn
->has_local_explicit_reg_vars
, 1);
1783 bp_pack_value (&bp
, fn
->returns_pcc_struct
, 1);
1784 bp_pack_value (&bp
, fn
->returns_struct
, 1);
1785 bp_pack_value (&bp
, fn
->can_throw_non_call_exceptions
, 1);
1786 bp_pack_value (&bp
, fn
->can_delete_dead_exceptions
, 1);
1787 bp_pack_value (&bp
, fn
->always_inline_functions_inlined
, 1);
1788 bp_pack_value (&bp
, fn
->after_inlining
, 1);
1789 bp_pack_value (&bp
, fn
->stdarg
, 1);
1790 bp_pack_value (&bp
, fn
->has_nonlocal_label
, 1);
1791 bp_pack_value (&bp
, fn
->calls_alloca
, 1);
1792 bp_pack_value (&bp
, fn
->calls_setjmp
, 1);
1793 bp_pack_value (&bp
, fn
->has_force_vect_loops
, 1);
1794 bp_pack_value (&bp
, fn
->has_simduid_loops
, 1);
1795 bp_pack_value (&bp
, fn
->va_list_fpr_size
, 8);
1796 bp_pack_value (&bp
, fn
->va_list_gpr_size
, 8);
1798 /* Output the function start and end loci. */
1799 stream_output_location (ob
, &bp
, fn
->function_start_locus
);
1800 stream_output_location (ob
, &bp
, fn
->function_end_locus
);
1802 streamer_write_bitpack (&bp
);
1806 /* Output the body of function NODE->DECL. */
1809 output_function (struct cgraph_node
*node
)
1812 struct function
*fn
;
1814 struct output_block
*ob
;
1816 function
= node
->decl
;
1817 fn
= DECL_STRUCT_FUNCTION (function
);
1818 ob
= create_output_block (LTO_section_function_body
);
1820 clear_line_info (ob
);
1821 ob
->cgraph_node
= node
;
1823 gcc_assert (current_function_decl
== NULL_TREE
&& cfun
== NULL
);
1825 /* Set current_function_decl and cfun. */
1828 /* Make string 0 be a NULL string. */
1829 streamer_write_char_stream (ob
->string_stream
, 0);
1831 streamer_write_record_start (ob
, LTO_function
);
1833 /* Output decls for parameters and args. */
1834 stream_write_tree (ob
, DECL_RESULT (function
), true);
1835 streamer_write_chain (ob
, DECL_ARGUMENTS (function
), true);
1837 /* Output DECL_INITIAL for the function, which contains the tree of
1839 stream_write_tree (ob
, DECL_INITIAL (function
), true);
1841 /* We also stream abstract functions where we stream only stuff needed for
1843 if (gimple_has_body_p (function
))
1845 streamer_write_uhwi (ob
, 1);
1846 output_struct_function_base (ob
, fn
);
1848 /* Output all the SSA names used in the function. */
1849 output_ssa_names (ob
, fn
);
1851 /* Output any exception handling regions. */
1852 output_eh_regions (ob
, fn
);
1855 /* We will renumber the statements. The code that does this uses
1856 the same ordering that we use for serializing them so we can use
1857 the same code on the other end and not have to write out the
1858 statement numbers. We do not assign UIDs to PHIs here because
1859 virtual PHIs get re-computed on-the-fly which would make numbers
1861 set_gimple_stmt_max_uid (cfun
, 0);
1862 FOR_ALL_BB_FN (bb
, cfun
)
1864 gimple_stmt_iterator gsi
;
1865 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1867 gimple stmt
= gsi_stmt (gsi
);
1869 /* Virtual PHIs are not going to be streamed. */
1870 if (!virtual_operand_p (gimple_phi_result (stmt
)))
1871 gimple_set_uid (stmt
, inc_gimple_stmt_max_uid (cfun
));
1873 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1875 gimple stmt
= gsi_stmt (gsi
);
1876 gimple_set_uid (stmt
, inc_gimple_stmt_max_uid (cfun
));
1879 /* To avoid keeping duplicate gimple IDs in the statements, renumber
1880 virtual phis now. */
1881 FOR_ALL_BB_FN (bb
, cfun
)
1883 gimple_stmt_iterator gsi
;
1884 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1886 gimple stmt
= gsi_stmt (gsi
);
1887 if (virtual_operand_p (gimple_phi_result (stmt
)))
1888 gimple_set_uid (stmt
, inc_gimple_stmt_max_uid (cfun
));
1892 /* Output the code for the function. */
1893 FOR_ALL_BB_FN (bb
, fn
)
1894 output_bb (ob
, bb
, fn
);
1896 /* The terminator for this function. */
1897 streamer_write_record_start (ob
, LTO_null
);
1899 output_cfg (ob
, fn
);
1904 streamer_write_uhwi (ob
, 0);
1906 /* Create a section to hold the pickled output of this function. */
1907 produce_asm (ob
, function
);
1909 destroy_output_block (ob
);
1913 /* Emit toplevel asms. */
1916 lto_output_toplevel_asms (void)
1918 struct output_block
*ob
;
1919 struct asm_node
*can
;
1921 struct lto_output_stream
*header_stream
;
1922 struct lto_asm_header header
;
1927 ob
= create_output_block (LTO_section_asm
);
1929 /* Make string 0 be a NULL string. */
1930 streamer_write_char_stream (ob
->string_stream
, 0);
1932 for (can
= asm_nodes
; can
; can
= can
->next
)
1934 streamer_write_string_cst (ob
, ob
->main_stream
, can
->asm_str
);
1935 streamer_write_hwi (ob
, can
->order
);
1938 streamer_write_string_cst (ob
, ob
->main_stream
, NULL_TREE
);
1940 section_name
= lto_get_section_name (LTO_section_asm
, NULL
, NULL
);
1941 lto_begin_section (section_name
, !flag_wpa
);
1942 free (section_name
);
1944 /* The entire header stream is computed here. */
1945 memset (&header
, 0, sizeof (header
));
1947 /* Write the header. */
1948 header
.lto_header
.major_version
= LTO_major_version
;
1949 header
.lto_header
.minor_version
= LTO_minor_version
;
1951 header
.main_size
= ob
->main_stream
->total_size
;
1952 header
.string_size
= ob
->string_stream
->total_size
;
1954 header_stream
= XCNEW (struct lto_output_stream
);
1955 lto_output_data_stream (header_stream
, &header
, sizeof (header
));
1956 lto_write_stream (header_stream
);
1957 free (header_stream
);
1959 /* Put all of the gimple and the string table out the asm file as a
1961 lto_write_stream (ob
->main_stream
);
1962 lto_write_stream (ob
->string_stream
);
1966 destroy_output_block (ob
);
1970 /* Copy the function body of NODE without deserializing. */
1973 copy_function (struct cgraph_node
*node
)
1975 tree function
= node
->decl
;
1976 struct lto_file_decl_data
*file_data
= node
->lto_file_data
;
1977 struct lto_output_stream
*output_stream
= XCNEW (struct lto_output_stream
);
1980 const char *name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function
));
1981 char *section_name
=
1982 lto_get_section_name (LTO_section_function_body
, name
, NULL
);
1984 struct lto_in_decl_state
*in_state
;
1985 struct lto_out_decl_state
*out_state
= lto_get_out_decl_state ();
1987 lto_begin_section (section_name
, !flag_wpa
);
1988 free (section_name
);
1990 /* We may have renamed the declaration, e.g., a static function. */
1991 name
= lto_get_decl_name_mapping (file_data
, name
);
1993 data
= lto_get_section_data (file_data
, LTO_section_function_body
,
1997 /* Do a bit copy of the function body. */
1998 lto_output_data_stream (output_stream
, data
, len
);
1999 lto_write_stream (output_stream
);
2003 lto_get_function_in_decl_state (node
->lto_file_data
, function
);
2004 gcc_assert (in_state
);
2006 for (i
= 0; i
< LTO_N_DECL_STREAMS
; i
++)
2008 size_t n
= in_state
->streams
[i
].size
;
2009 tree
*trees
= in_state
->streams
[i
].trees
;
2010 struct lto_tree_ref_encoder
*encoder
= &(out_state
->streams
[i
]);
2012 /* The out state must have the same indices and the in state.
2013 So just copy the vector. All the encoders in the in state
2014 must be empty where we reach here. */
2015 gcc_assert (lto_tree_ref_encoder_size (encoder
) == 0);
2016 encoder
->trees
.reserve_exact (n
);
2017 for (j
= 0; j
< n
; j
++)
2018 encoder
->trees
.safe_push (trees
[j
]);
2021 lto_free_section_data (file_data
, LTO_section_function_body
, name
,
2023 free (output_stream
);
2028 /* Main entry point from the pass manager. */
2033 struct lto_out_decl_state
*decl_state
;
2034 #ifdef ENABLE_CHECKING
2035 bitmap output
= lto_bitmap_alloc ();
2038 lto_symtab_encoder_t encoder
= lto_get_out_decl_state ()->symtab_node_encoder
;
2040 /* Initialize the streamer. */
2041 lto_streamer_init ();
2043 n_nodes
= lto_symtab_encoder_size (encoder
);
2044 /* Process only the functions with bodies. */
2045 for (i
= 0; i
< n_nodes
; i
++)
2047 symtab_node
*snode
= lto_symtab_encoder_deref (encoder
, i
);
2048 cgraph_node
*node
= dyn_cast
<cgraph_node
> (snode
);
2050 && lto_symtab_encoder_encode_body_p (encoder
, node
)
2053 #ifdef ENABLE_CHECKING
2054 gcc_assert (!bitmap_bit_p (output
, DECL_UID (node
->decl
)));
2055 bitmap_set_bit (output
, DECL_UID (node
->decl
));
2057 decl_state
= lto_new_out_decl_state ();
2058 lto_push_out_decl_state (decl_state
);
2059 if (gimple_has_body_p (node
->decl
) || !flag_wpa
)
2060 output_function (node
);
2062 copy_function (node
);
2063 gcc_assert (lto_get_out_decl_state () == decl_state
);
2064 lto_pop_out_decl_state ();
2065 lto_record_function_out_decl_state (node
->decl
, decl_state
);
2069 /* Emit the callgraph after emitting function bodies. This needs to
2070 be done now to make sure that all the statements in every function
2071 have been renumbered so that edges can be associated with call
2072 statements using the statement UIDs. */
2075 #ifdef ENABLE_CHECKING
2076 lto_bitmap_free (output
);
2080 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2081 from it and required for correct representation of its semantics.
2082 Each node in ENCODER must be a global declaration or a type. A node
2083 is written only once, even if it appears multiple times in the
2084 vector. Certain transitively-reachable nodes, such as those
2085 representing expressions, may be duplicated, but such nodes
2086 must not appear in ENCODER itself. */
2089 write_global_stream (struct output_block
*ob
,
2090 struct lto_tree_ref_encoder
*encoder
)
2094 const size_t size
= lto_tree_ref_encoder_size (encoder
);
2096 for (index
= 0; index
< size
; index
++)
2098 t
= lto_tree_ref_encoder_get_tree (encoder
, index
);
2099 if (!streamer_tree_cache_lookup (ob
->writer_cache
, t
, NULL
))
2100 stream_write_tree (ob
, t
, false);
2105 /* Write a sequence of indices into the globals vector corresponding
2106 to the trees in ENCODER. These are used by the reader to map the
2107 indices used to refer to global entities within function bodies to
2111 write_global_references (struct output_block
*ob
,
2112 struct lto_output_stream
*ref_stream
,
2113 struct lto_tree_ref_encoder
*encoder
)
2117 const uint32_t size
= lto_tree_ref_encoder_size (encoder
);
2119 /* Write size as 32-bit unsigned. */
2120 lto_output_data_stream (ref_stream
, &size
, sizeof (int32_t));
2122 for (index
= 0; index
< size
; index
++)
2126 t
= lto_tree_ref_encoder_get_tree (encoder
, index
);
2127 streamer_tree_cache_lookup (ob
->writer_cache
, t
, &slot_num
);
2128 gcc_assert (slot_num
!= (unsigned)-1);
2129 lto_output_data_stream (ref_stream
, &slot_num
, sizeof slot_num
);
2134 /* Write all the streams in an lto_out_decl_state STATE using
2135 output block OB and output stream OUT_STREAM. */
2138 lto_output_decl_state_streams (struct output_block
*ob
,
2139 struct lto_out_decl_state
*state
)
2143 for (i
= 0; i
< LTO_N_DECL_STREAMS
; i
++)
2144 write_global_stream (ob
, &state
->streams
[i
]);
2148 /* Write all the references in an lto_out_decl_state STATE using
2149 output block OB and output stream OUT_STREAM. */
2152 lto_output_decl_state_refs (struct output_block
*ob
,
2153 struct lto_output_stream
*out_stream
,
2154 struct lto_out_decl_state
*state
)
2160 /* Write reference to FUNCTION_DECL. If there is not function,
2161 write reference to void_type_node. */
2162 decl
= (state
->fn_decl
) ? state
->fn_decl
: void_type_node
;
2163 streamer_tree_cache_lookup (ob
->writer_cache
, decl
, &ref
);
2164 gcc_assert (ref
!= (unsigned)-1);
2165 lto_output_data_stream (out_stream
, &ref
, sizeof (uint32_t));
2167 for (i
= 0; i
< LTO_N_DECL_STREAMS
; i
++)
2168 write_global_references (ob
, out_stream
, &state
->streams
[i
]);
2172 /* Return the written size of STATE. */
2175 lto_out_decl_state_written_size (struct lto_out_decl_state
*state
)
2180 size
= sizeof (int32_t); /* fn_ref. */
2181 for (i
= 0; i
< LTO_N_DECL_STREAMS
; i
++)
2183 size
+= sizeof (int32_t); /* vector size. */
2184 size
+= (lto_tree_ref_encoder_size (&state
->streams
[i
])
2185 * sizeof (int32_t));
2191 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2195 write_symbol (struct streamer_tree_cache_d
*cache
,
2196 struct lto_output_stream
*stream
,
2197 tree t
, struct pointer_set_t
*seen
, bool alias
)
2200 enum gcc_plugin_symbol_kind kind
;
2201 enum gcc_plugin_symbol_visibility visibility
;
2203 unsigned HOST_WIDEST_INT size
;
2207 /* None of the following kinds of symbols are needed in the
2209 if (!TREE_PUBLIC (t
)
2210 || is_builtin_fn (t
)
2211 || DECL_ABSTRACT (t
)
2212 || (TREE_CODE (t
) == VAR_DECL
&& DECL_HARD_REGISTER (t
)))
2214 gcc_assert (TREE_CODE (t
) != RESULT_DECL
);
2216 gcc_assert (TREE_CODE (t
) == VAR_DECL
2217 || TREE_CODE (t
) == FUNCTION_DECL
);
2219 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t
));
2221 /* This behaves like assemble_name_raw in varasm.c, performing the
2222 same name manipulations that ASM_OUTPUT_LABELREF does. */
2223 name
= IDENTIFIER_POINTER ((*targetm
.asm_out
.mangle_assembler_name
) (name
));
2225 if (pointer_set_contains (seen
, name
))
2227 pointer_set_insert (seen
, name
);
2229 streamer_tree_cache_lookup (cache
, t
, &slot_num
);
2230 gcc_assert (slot_num
!= (unsigned)-1);
2232 if (DECL_EXTERNAL (t
))
2235 kind
= GCCPK_WEAKUNDEF
;
2242 kind
= GCCPK_WEAKDEF
;
2243 else if (DECL_COMMON (t
))
2244 kind
= GCCPK_COMMON
;
2248 /* When something is defined, it should have node attached. */
2249 gcc_assert (alias
|| TREE_CODE (t
) != VAR_DECL
2250 || varpool_get_node (t
)->definition
);
2251 gcc_assert (alias
|| TREE_CODE (t
) != FUNCTION_DECL
2252 || (cgraph_get_node (t
)
2253 && cgraph_get_node (t
)->definition
));
2256 /* Imitate what default_elf_asm_output_external do.
2257 When symbol is external, we need to output it with DEFAULT visibility
2258 when compiling with -fvisibility=default, while with HIDDEN visibility
2259 when symbol has attribute (visibility("hidden")) specified.
2260 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2263 if (DECL_EXTERNAL (t
)
2264 && !targetm
.binds_local_p (t
))
2265 visibility
= GCCPV_DEFAULT
;
2267 switch (DECL_VISIBILITY (t
))
2269 case VISIBILITY_DEFAULT
:
2270 visibility
= GCCPV_DEFAULT
;
2272 case VISIBILITY_PROTECTED
:
2273 visibility
= GCCPV_PROTECTED
;
2275 case VISIBILITY_HIDDEN
:
2276 visibility
= GCCPV_HIDDEN
;
2278 case VISIBILITY_INTERNAL
:
2279 visibility
= GCCPV_INTERNAL
;
2283 if (kind
== GCCPK_COMMON
2284 && DECL_SIZE_UNIT (t
)
2285 && TREE_CODE (DECL_SIZE_UNIT (t
)) == INTEGER_CST
)
2286 size
= TREE_INT_CST_LOW (DECL_SIZE_UNIT (t
));
2290 if (DECL_ONE_ONLY (t
))
2291 comdat
= IDENTIFIER_POINTER (DECL_COMDAT_GROUP (t
));
2295 lto_output_data_stream (stream
, name
, strlen (name
) + 1);
2296 lto_output_data_stream (stream
, comdat
, strlen (comdat
) + 1);
2297 c
= (unsigned char) kind
;
2298 lto_output_data_stream (stream
, &c
, 1);
2299 c
= (unsigned char) visibility
;
2300 lto_output_data_stream (stream
, &c
, 1);
2301 lto_output_data_stream (stream
, &size
, 8);
2302 lto_output_data_stream (stream
, &slot_num
, 4);
2305 /* Return true if NODE should appear in the plugin symbol table. */
2308 output_symbol_p (symtab_node
*node
)
2310 struct cgraph_node
*cnode
;
2311 if (!symtab_real_symbol_p (node
))
2313 /* We keep external functions in symtab for sake of inlining
2314 and devirtualization. We do not want to see them in symbol table as
2315 references unless they are really used. */
2316 cnode
= dyn_cast
<cgraph_node
> (node
);
2317 if (cnode
&& (!node
->definition
|| DECL_EXTERNAL (cnode
->decl
))
2321 /* Ignore all references from external vars initializers - they are not really
2322 part of the compilation unit until they are used by folding. Some symbols,
2323 like references to external construction vtables can not be referred to at all.
2324 We decide this at can_refer_decl_in_current_unit_p. */
2325 if (!node
->definition
|| DECL_EXTERNAL (node
->decl
))
2328 struct ipa_ref
*ref
;
2329 for (i
= 0; ipa_ref_list_referring_iterate (&node
->ref_list
,
2332 if (ref
->use
== IPA_REF_ALIAS
)
2334 if (is_a
<cgraph_node
> (ref
->referring
))
2336 if (!DECL_EXTERNAL (ref
->referring
->decl
))
2345 /* Write an IL symbol table to OB.
2346 SET and VSET are cgraph/varpool node sets we are outputting. */
2349 produce_symtab (struct output_block
*ob
)
2351 struct streamer_tree_cache_d
*cache
= ob
->writer_cache
;
2352 char *section_name
= lto_get_section_name (LTO_section_symtab
, NULL
, NULL
);
2353 struct pointer_set_t
*seen
;
2354 struct lto_output_stream stream
;
2355 lto_symtab_encoder_t encoder
= ob
->decl_state
->symtab_node_encoder
;
2356 lto_symtab_encoder_iterator lsei
;
2358 lto_begin_section (section_name
, false);
2359 free (section_name
);
2361 seen
= pointer_set_create ();
2362 memset (&stream
, 0, sizeof (stream
));
2364 /* Write the symbol table.
2365 First write everything defined and then all declarations.
2366 This is necessary to handle cases where we have duplicated symbols. */
2367 for (lsei
= lsei_start (encoder
);
2368 !lsei_end_p (lsei
); lsei_next (&lsei
))
2370 symtab_node
*node
= lsei_node (lsei
);
2372 if (!output_symbol_p (node
) || DECL_EXTERNAL (node
->decl
))
2374 write_symbol (cache
, &stream
, node
->decl
, seen
, false);
2376 for (lsei
= lsei_start (encoder
);
2377 !lsei_end_p (lsei
); lsei_next (&lsei
))
2379 symtab_node
*node
= lsei_node (lsei
);
2381 if (!output_symbol_p (node
) || !DECL_EXTERNAL (node
->decl
))
2383 write_symbol (cache
, &stream
, node
->decl
, seen
, false);
2386 lto_write_stream (&stream
);
2387 pointer_set_destroy (seen
);
2393 /* This pass is run after all of the functions are serialized and all
2394 of the IPA passes have written their serialized forms. This pass
2395 causes the vector of all of the global decls and types used from
2396 this file to be written in to a section that can then be read in to
2397 recover these on other side. */
2400 produce_asm_for_decls (void)
2402 struct lto_out_decl_state
*out_state
;
2403 struct lto_out_decl_state
*fn_out_state
;
2404 struct lto_decl_header header
;
2406 struct output_block
*ob
;
2407 struct lto_output_stream
*header_stream
, *decl_state_stream
;
2408 unsigned idx
, num_fns
;
2409 size_t decl_state_size
;
2410 int32_t num_decl_states
;
2412 ob
= create_output_block (LTO_section_decls
);
2415 memset (&header
, 0, sizeof (struct lto_decl_header
));
2417 section_name
= lto_get_section_name (LTO_section_decls
, NULL
, NULL
);
2418 lto_begin_section (section_name
, !flag_wpa
);
2419 free (section_name
);
2421 /* Make string 0 be a NULL string. */
2422 streamer_write_char_stream (ob
->string_stream
, 0);
2424 gcc_assert (!alias_pairs
);
2426 /* Write the global symbols. */
2427 out_state
= lto_get_out_decl_state ();
2428 num_fns
= lto_function_decl_states
.length ();
2429 lto_output_decl_state_streams (ob
, out_state
);
2430 for (idx
= 0; idx
< num_fns
; idx
++)
2433 lto_function_decl_states
[idx
];
2434 lto_output_decl_state_streams (ob
, fn_out_state
);
2437 header
.lto_header
.major_version
= LTO_major_version
;
2438 header
.lto_header
.minor_version
= LTO_minor_version
;
2440 /* Currently not used. This field would allow us to preallocate
2441 the globals vector, so that it need not be resized as it is extended. */
2442 header
.num_nodes
= -1;
2444 /* Compute the total size of all decl out states. */
2445 decl_state_size
= sizeof (int32_t);
2446 decl_state_size
+= lto_out_decl_state_written_size (out_state
);
2447 for (idx
= 0; idx
< num_fns
; idx
++)
2450 lto_function_decl_states
[idx
];
2451 decl_state_size
+= lto_out_decl_state_written_size (fn_out_state
);
2453 header
.decl_state_size
= decl_state_size
;
2455 header
.main_size
= ob
->main_stream
->total_size
;
2456 header
.string_size
= ob
->string_stream
->total_size
;
2458 header_stream
= XCNEW (struct lto_output_stream
);
2459 lto_output_data_stream (header_stream
, &header
, sizeof header
);
2460 lto_write_stream (header_stream
);
2461 free (header_stream
);
2463 /* Write the main out-decl state, followed by out-decl states of
2465 decl_state_stream
= XCNEW (struct lto_output_stream
);
2466 num_decl_states
= num_fns
+ 1;
2467 lto_output_data_stream (decl_state_stream
, &num_decl_states
,
2468 sizeof (num_decl_states
));
2469 lto_output_decl_state_refs (ob
, decl_state_stream
, out_state
);
2470 for (idx
= 0; idx
< num_fns
; idx
++)
2473 lto_function_decl_states
[idx
];
2474 lto_output_decl_state_refs (ob
, decl_state_stream
, fn_out_state
);
2476 lto_write_stream (decl_state_stream
);
2477 free (decl_state_stream
);
2479 lto_write_stream (ob
->main_stream
);
2480 lto_write_stream (ob
->string_stream
);
2484 /* Write the symbol table. It is used by linker to determine dependencies
2485 and thus we can skip it for WPA. */
2487 produce_symtab (ob
);
2489 /* Write command line opts. */
2490 lto_write_options ();
2492 /* Deallocate memory and clean up. */
2493 for (idx
= 0; idx
< num_fns
; idx
++)
2496 lto_function_decl_states
[idx
];
2497 lto_delete_out_decl_state (fn_out_state
);
2499 lto_symtab_encoder_delete (ob
->decl_state
->symtab_node_encoder
);
2500 lto_function_decl_states
.release ();
2501 destroy_output_block (ob
);