1 /* Write and read the cgraph to the memory mapped representation of a
4 Copyright (C) 2009-2014 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
28 #include "stringpool.h"
34 #include "hard-reg-set.h"
37 #include "basic-block.h"
38 #include "tree-ssa-alias.h"
39 #include "internal-fn.h"
40 #include "gimple-expr.h"
46 #include "langhooks.h"
48 #include "diagnostic-core.h"
52 #include "plugin-api.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
59 #include "tree-pass.h"
62 #include "pass_manager.h"
63 #include "ipa-utils.h"
66 /* True when asm nodes has been output. */
67 bool asm_nodes_output
= false;
69 static void output_cgraph_opt_summary (void);
70 static void input_cgraph_opt_summary (vec
<symtab_node
*> nodes
);
72 /* Number of LDPR values known to GCC. */
73 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
75 /* All node orders are ofsetted by ORDER_BASE. */
76 static int order_base
;
78 /* Cgraph streaming is organized as set of record whose type
79 is indicated by a tag. */
82 /* Must leave 0 for the stopper. */
84 /* Cgraph node without body available. */
85 LTO_symtab_unavail_node
= 1,
86 /* Cgraph node with function body. */
87 LTO_symtab_analyzed_node
,
90 LTO_symtab_indirect_edge
,
95 /* Create a new symtab encoder.
96 if FOR_INPUT, the encoder allocate only datastructures needed
97 to read the symtab. */
100 lto_symtab_encoder_new (bool for_input
)
102 lto_symtab_encoder_t encoder
= XCNEW (struct lto_symtab_encoder_d
);
105 encoder
->map
= new hash_map
<symtab_node
*, size_t>;
106 encoder
->nodes
.create (0);
111 /* Delete ENCODER and its components. */
114 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder
)
116 encoder
->nodes
.release ();
123 /* Return the existing reference number of NODE in the symtab encoder in
124 output block OB. Assign a new reference if this is the first time
128 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder
,
135 lto_encoder_entry entry
= {node
, false, false, false};
137 ref
= encoder
->nodes
.length ();
138 encoder
->nodes
.safe_push (entry
);
142 size_t *slot
= encoder
->map
->get (node
);
145 lto_encoder_entry entry
= {node
, false, false, false};
146 ref
= encoder
->nodes
.length ();
148 encoder
->map
->put (node
, ref
+ 1);
149 encoder
->nodes
.safe_push (entry
);
157 /* Remove NODE from encoder. */
160 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder
,
164 lto_encoder_entry last_node
;
166 size_t *slot
= encoder
->map
->get (node
);
167 if (slot
== NULL
|| !*slot
)
171 gcc_checking_assert (encoder
->nodes
[index
].node
== node
);
173 /* Remove from vector. We do this by swapping node with the last element
175 last_node
= encoder
->nodes
.pop ();
176 if (last_node
.node
!= node
)
178 gcc_assert (encoder
->map
->put (last_node
.node
, index
+ 1));
180 /* Move the last element to the original spot of NODE. */
181 encoder
->nodes
[index
] = last_node
;
184 /* Remove element from hash table. */
185 encoder
->map
->remove (node
);
190 /* Return TRUE if we should encode initializer of NODE (if any). */
193 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder
,
194 struct cgraph_node
*node
)
196 int index
= lto_symtab_encoder_lookup (encoder
, node
);
197 return encoder
->nodes
[index
].body
;
200 /* Return TRUE if we should encode body of NODE (if any). */
203 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder
,
204 struct cgraph_node
*node
)
206 int index
= lto_symtab_encoder_encode (encoder
, node
);
207 gcc_checking_assert (encoder
->nodes
[index
].node
== node
);
208 encoder
->nodes
[index
].body
= true;
211 /* Return TRUE if we should encode initializer of NODE (if any). */
214 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder
,
217 int index
= lto_symtab_encoder_lookup (encoder
, node
);
218 if (index
== LCC_NOT_FOUND
)
220 return encoder
->nodes
[index
].initializer
;
223 /* Return TRUE if we should encode initializer of NODE (if any). */
226 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder
,
229 int index
= lto_symtab_encoder_lookup (encoder
, node
);
230 encoder
->nodes
[index
].initializer
= true;
233 /* Return TRUE if we should encode initializer of NODE (if any). */
236 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder
,
239 int index
= lto_symtab_encoder_lookup (encoder
, node
);
240 if (index
== LCC_NOT_FOUND
)
242 return encoder
->nodes
[index
].in_partition
;
245 /* Return TRUE if we should encode body of NODE (if any). */
248 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder
,
251 /* Ignore not needed nodes. */
252 if (!node
->need_dump
)
254 int index
= lto_symtab_encoder_encode (encoder
, node
);
255 encoder
->nodes
[index
].in_partition
= true;
258 /* Output the cgraph EDGE to OB using ENCODER. */
261 lto_output_edge (struct lto_simple_output_block
*ob
, struct cgraph_edge
*edge
,
262 lto_symtab_encoder_t encoder
)
268 if (edge
->indirect_unknown_callee
)
269 streamer_write_enum (ob
->main_stream
, LTO_symtab_tags
, LTO_symtab_last_tag
,
270 LTO_symtab_indirect_edge
);
272 streamer_write_enum (ob
->main_stream
, LTO_symtab_tags
, LTO_symtab_last_tag
,
275 ref
= lto_symtab_encoder_lookup (encoder
, edge
->caller
);
276 gcc_assert (ref
!= LCC_NOT_FOUND
);
277 streamer_write_hwi_stream (ob
->main_stream
, ref
);
279 if (!edge
->indirect_unknown_callee
)
281 ref
= lto_symtab_encoder_lookup (encoder
, edge
->callee
);
282 gcc_assert (ref
!= LCC_NOT_FOUND
);
283 streamer_write_hwi_stream (ob
->main_stream
, ref
);
286 streamer_write_gcov_count_stream (ob
->main_stream
, edge
->count
);
288 bp
= bitpack_create (ob
->main_stream
);
289 uid
= (!gimple_has_body_p (edge
->caller
->decl
)
290 ? edge
->lto_stmt_uid
: gimple_uid (edge
->call_stmt
) + 1);
291 bp_pack_enum (&bp
, cgraph_inline_failed_t
,
292 CIF_N_REASONS
, edge
->inline_failed
);
293 bp_pack_var_len_unsigned (&bp
, uid
);
294 bp_pack_var_len_unsigned (&bp
, edge
->frequency
);
295 bp_pack_value (&bp
, edge
->indirect_inlining_edge
, 1);
296 bp_pack_value (&bp
, edge
->speculative
, 1);
297 bp_pack_value (&bp
, edge
->call_stmt_cannot_inline_p
, 1);
298 bp_pack_value (&bp
, edge
->can_throw_external
, 1);
299 bp_pack_value (&bp
, edge
->in_polymorphic_cdtor
, 1);
300 if (edge
->indirect_unknown_callee
)
302 int flags
= edge
->indirect_info
->ecf_flags
;
303 bp_pack_value (&bp
, (flags
& ECF_CONST
) != 0, 1);
304 bp_pack_value (&bp
, (flags
& ECF_PURE
) != 0, 1);
305 bp_pack_value (&bp
, (flags
& ECF_NORETURN
) != 0, 1);
306 bp_pack_value (&bp
, (flags
& ECF_MALLOC
) != 0, 1);
307 bp_pack_value (&bp
, (flags
& ECF_NOTHROW
) != 0, 1);
308 bp_pack_value (&bp
, (flags
& ECF_RETURNS_TWICE
) != 0, 1);
309 /* Flags that should not appear on indirect calls. */
310 gcc_assert (!(flags
& (ECF_LOOPING_CONST_OR_PURE
316 streamer_write_bitpack (&bp
);
317 if (edge
->indirect_unknown_callee
)
319 streamer_write_hwi_stream (ob
->main_stream
,
320 edge
->indirect_info
->common_target_id
);
321 if (edge
->indirect_info
->common_target_id
)
322 streamer_write_hwi_stream
323 (ob
->main_stream
, edge
->indirect_info
->common_target_probability
);
327 /* Return if NODE contain references from other partitions. */
330 referenced_from_other_partition_p (symtab_node
*node
, lto_symtab_encoder_t encoder
)
333 struct ipa_ref
*ref
= NULL
;
335 for (i
= 0; node
->iterate_referring (i
, ref
); i
++)
337 if (ref
->referring
->in_other_partition
338 || !lto_symtab_encoder_in_partition_p (encoder
, ref
->referring
))
344 /* Return true when node is reachable from other partition. */
347 reachable_from_other_partition_p (struct cgraph_node
*node
, lto_symtab_encoder_t encoder
)
349 struct cgraph_edge
*e
;
350 if (!node
->definition
)
352 if (node
->global
.inlined_to
)
354 for (e
= node
->callers
; e
; e
= e
->next_caller
)
355 if (e
->caller
->in_other_partition
356 || !lto_symtab_encoder_in_partition_p (encoder
, e
->caller
))
361 /* Return if NODE contain references from other partitions. */
364 referenced_from_this_partition_p (symtab_node
*node
,
365 lto_symtab_encoder_t encoder
)
368 struct ipa_ref
*ref
= NULL
;
370 for (i
= 0; node
->iterate_referring (i
, ref
); i
++)
371 if (lto_symtab_encoder_in_partition_p (encoder
, ref
->referring
))
376 /* Return true when node is reachable from other partition. */
379 reachable_from_this_partition_p (struct cgraph_node
*node
, lto_symtab_encoder_t encoder
)
381 struct cgraph_edge
*e
;
382 for (e
= node
->callers
; e
; e
= e
->next_caller
)
383 if (lto_symtab_encoder_in_partition_p (encoder
, e
->caller
))
388 /* Output the cgraph NODE to OB. ENCODER is used to find the
389 reference number of NODE->inlined_to. SET is the set of nodes we
390 are writing to the current file. If NODE is not in SET, then NODE
391 is a boundary of a cgraph_node_set and we pretend NODE just has a
392 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
393 that have had their callgraph node written so far. This is used to
394 determine if NODE is a clone of a previously written node. */
397 lto_output_node (struct lto_simple_output_block
*ob
, struct cgraph_node
*node
,
398 lto_symtab_encoder_t encoder
)
404 bool in_other_partition
= false;
405 struct cgraph_node
*clone_of
, *ultimate_clone_of
;
406 ipa_opt_pass_d
*pass
;
413 boundary_p
= !lto_symtab_encoder_in_partition_p (encoder
, node
);
415 if (node
->analyzed
&& !boundary_p
)
416 tag
= LTO_symtab_analyzed_node
;
418 tag
= LTO_symtab_unavail_node
;
420 streamer_write_enum (ob
->main_stream
, LTO_symtab_tags
, LTO_symtab_last_tag
,
422 streamer_write_hwi_stream (ob
->main_stream
, node
->order
);
424 /* In WPA mode, we only output part of the call-graph. Also, we
425 fake cgraph node attributes. There are two cases that we care.
427 Boundary nodes: There are nodes that are not part of SET but are
428 called from within SET. We artificially make them look like
429 externally visible nodes with no function body.
431 Cherry-picked nodes: These are nodes we pulled from other
432 translation units into SET during IPA-inlining. We make them as
433 local static nodes to prevent clashes with other local statics. */
434 if (boundary_p
&& node
->analyzed
435 && node
->get_partitioning_class () == SYMBOL_PARTITION
)
437 /* Inline clones can not be part of boundary.
438 gcc_assert (!node->global.inlined_to);
440 FIXME: At the moment they can be, when partition contains an inline
441 clone that is clone of inline clone from outside partition. We can
442 reshape the clone tree and make other tree to be the root, but it
443 needs a bit extra work and will be promplty done by cgraph_remove_node
444 after reading back. */
445 in_other_partition
= 1;
448 clone_of
= node
->clone_of
;
450 && (ref
= lto_symtab_encoder_lookup (encoder
, clone_of
)) == LCC_NOT_FOUND
)
451 if (clone_of
->prev_sibling_clone
)
452 clone_of
= clone_of
->prev_sibling_clone
;
454 clone_of
= clone_of
->clone_of
;
456 /* See if body of the master function is output. If not, we are seeing only
457 an declaration and we do not need to pass down clone tree. */
458 ultimate_clone_of
= clone_of
;
459 while (ultimate_clone_of
&& ultimate_clone_of
->clone_of
)
460 ultimate_clone_of
= ultimate_clone_of
->clone_of
;
462 if (clone_of
&& !lto_symtab_encoder_encode_body_p (encoder
, ultimate_clone_of
))
465 if (tag
== LTO_symtab_analyzed_node
)
466 gcc_assert (clone_of
|| !node
->clone_of
);
468 streamer_write_hwi_stream (ob
->main_stream
, LCC_NOT_FOUND
);
470 streamer_write_hwi_stream (ob
->main_stream
, ref
);
473 lto_output_fn_decl_index (ob
->decl_state
, ob
->main_stream
, node
->decl
);
474 streamer_write_gcov_count_stream (ob
->main_stream
, node
->count
);
475 streamer_write_hwi_stream (ob
->main_stream
, node
->count_materialization_scale
);
477 streamer_write_hwi_stream (ob
->main_stream
,
478 node
->ipa_transforms_to_apply
.length ());
479 FOR_EACH_VEC_ELT (node
->ipa_transforms_to_apply
, i
, pass
)
480 streamer_write_hwi_stream (ob
->main_stream
, pass
->static_pass_number
);
482 if (tag
== LTO_symtab_analyzed_node
)
484 if (node
->global
.inlined_to
)
486 ref
= lto_symtab_encoder_lookup (encoder
, node
->global
.inlined_to
);
487 gcc_assert (ref
!= LCC_NOT_FOUND
);
492 streamer_write_hwi_stream (ob
->main_stream
, ref
);
495 group
= node
->get_comdat_group ();
497 comdat
= IDENTIFIER_POINTER (group
);
500 streamer_write_data_stream (ob
->main_stream
, comdat
, strlen (comdat
) + 1);
504 if (node
->same_comdat_group
&& !boundary_p
)
506 ref
= lto_symtab_encoder_lookup (encoder
,
507 node
->same_comdat_group
);
508 gcc_assert (ref
!= LCC_NOT_FOUND
);
512 streamer_write_hwi_stream (ob
->main_stream
, ref
);
515 section
= node
->get_section ();
519 streamer_write_hwi_stream (ob
->main_stream
, node
->tp_first_run
);
521 bp
= bitpack_create (ob
->main_stream
);
522 bp_pack_value (&bp
, node
->local
.local
, 1);
523 bp_pack_value (&bp
, node
->externally_visible
, 1);
524 bp_pack_value (&bp
, node
->no_reorder
, 1);
525 bp_pack_value (&bp
, node
->definition
, 1);
526 bp_pack_value (&bp
, node
->local
.versionable
, 1);
527 bp_pack_value (&bp
, node
->local
.can_change_signature
, 1);
528 bp_pack_value (&bp
, node
->local
.redefined_extern_inline
, 1);
529 bp_pack_value (&bp
, node
->force_output
, 1);
530 bp_pack_value (&bp
, node
->forced_by_abi
, 1);
531 bp_pack_value (&bp
, node
->unique_name
, 1);
532 bp_pack_value (&bp
, node
->body_removed
, 1);
533 bp_pack_value (&bp
, node
->implicit_section
, 1);
534 bp_pack_value (&bp
, node
->address_taken
, 1);
535 bp_pack_value (&bp
, tag
== LTO_symtab_analyzed_node
536 && node
->get_partitioning_class () == SYMBOL_PARTITION
537 && (reachable_from_other_partition_p (node
, encoder
)
538 || referenced_from_other_partition_p (node
, encoder
)), 1);
539 bp_pack_value (&bp
, node
->lowered
, 1);
540 bp_pack_value (&bp
, in_other_partition
, 1);
541 /* Real aliases in a boundary become non-aliases. However we still stream
542 alias info on weakrefs.
543 TODO: We lose a bit of information here - when we know that variable is
544 defined in other unit, we may use the info on aliases to resolve
545 symbol1 != symbol2 type tests that we can do only for locally defined objects
547 alias_p
= node
->alias
&& (!boundary_p
|| node
->weakref
);
548 bp_pack_value (&bp
, alias_p
, 1);
549 bp_pack_value (&bp
, node
->weakref
, 1);
550 bp_pack_value (&bp
, node
->frequency
, 2);
551 bp_pack_value (&bp
, node
->only_called_at_startup
, 1);
552 bp_pack_value (&bp
, node
->only_called_at_exit
, 1);
553 bp_pack_value (&bp
, node
->tm_clone
, 1);
554 bp_pack_value (&bp
, node
->calls_comdat_local
, 1);
555 bp_pack_value (&bp
, node
->icf_merged
, 1);
556 bp_pack_value (&bp
, node
->thunk
.thunk_p
&& !boundary_p
, 1);
557 bp_pack_enum (&bp
, ld_plugin_symbol_resolution
,
558 LDPR_NUM_KNOWN
, node
->resolution
);
559 bp_pack_value (&bp
, node
->instrumentation_clone
, 1);
560 streamer_write_bitpack (&bp
);
561 streamer_write_data_stream (ob
->main_stream
, section
, strlen (section
) + 1);
563 if (node
->thunk
.thunk_p
&& !boundary_p
)
565 streamer_write_uhwi_stream
567 1 + (node
->thunk
.this_adjusting
!= 0) * 2
568 + (node
->thunk
.virtual_offset_p
!= 0) * 4
569 + (node
->thunk
.add_pointer_bounds_args
!= 0) * 8);
570 streamer_write_uhwi_stream (ob
->main_stream
, node
->thunk
.fixed_offset
);
571 streamer_write_uhwi_stream (ob
->main_stream
, node
->thunk
.virtual_value
);
573 streamer_write_hwi_stream (ob
->main_stream
, node
->profile_id
);
574 if (DECL_STATIC_CONSTRUCTOR (node
->decl
))
575 streamer_write_hwi_stream (ob
->main_stream
, node
->get_init_priority ());
576 if (DECL_STATIC_DESTRUCTOR (node
->decl
))
577 streamer_write_hwi_stream (ob
->main_stream
, node
->get_fini_priority ());
579 if (node
->instrumentation_clone
)
580 lto_output_fn_decl_index (ob
->decl_state
, ob
->main_stream
, node
->orig_decl
);
583 /* Output the varpool NODE to OB.
584 If NODE is not in SET, then NODE is a boundary. */
587 lto_output_varpool_node (struct lto_simple_output_block
*ob
, varpool_node
*node
,
588 lto_symtab_encoder_t encoder
)
590 bool boundary_p
= !lto_symtab_encoder_in_partition_p (encoder
, node
);
598 streamer_write_enum (ob
->main_stream
, LTO_symtab_tags
, LTO_symtab_last_tag
,
599 LTO_symtab_variable
);
600 streamer_write_hwi_stream (ob
->main_stream
, node
->order
);
601 lto_output_var_decl_index (ob
->decl_state
, ob
->main_stream
, node
->decl
);
602 bp
= bitpack_create (ob
->main_stream
);
603 bp_pack_value (&bp
, node
->externally_visible
, 1);
604 bp_pack_value (&bp
, node
->no_reorder
, 1);
605 bp_pack_value (&bp
, node
->force_output
, 1);
606 bp_pack_value (&bp
, node
->forced_by_abi
, 1);
607 bp_pack_value (&bp
, node
->unique_name
, 1);
608 bp_pack_value (&bp
, node
->body_removed
, 1);
609 bp_pack_value (&bp
, node
->implicit_section
, 1);
610 bp_pack_value (&bp
, node
->writeonly
, 1);
611 bp_pack_value (&bp
, node
->definition
, 1);
612 alias_p
= node
->alias
&& (!boundary_p
|| node
->weakref
);
613 bp_pack_value (&bp
, alias_p
, 1);
614 bp_pack_value (&bp
, node
->weakref
, 1);
615 bp_pack_value (&bp
, node
->analyzed
&& !boundary_p
, 1);
616 gcc_assert (node
->definition
|| !node
->analyzed
);
617 /* Constant pool initializers can be de-unified into individual ltrans units.
618 FIXME: Alternatively at -Os we may want to avoid generating for them the local
619 labels and share them across LTRANS partitions. */
620 if (node
->get_partitioning_class () != SYMBOL_PARTITION
)
622 bp_pack_value (&bp
, 0, 1); /* used_from_other_parition. */
623 bp_pack_value (&bp
, 0, 1); /* in_other_partition. */
627 bp_pack_value (&bp
, node
->definition
628 && referenced_from_other_partition_p (node
, encoder
), 1);
629 bp_pack_value (&bp
, node
->analyzed
630 && boundary_p
&& !DECL_EXTERNAL (node
->decl
), 1);
631 /* in_other_partition. */
633 bp_pack_value (&bp
, node
->tls_model
, 3);
634 bp_pack_value (&bp
, node
->used_by_single_function
, 1);
635 bp_pack_value (&bp
, node
->need_bounds_init
, 1);
636 streamer_write_bitpack (&bp
);
638 group
= node
->get_comdat_group ();
640 comdat
= IDENTIFIER_POINTER (group
);
643 streamer_write_data_stream (ob
->main_stream
, comdat
, strlen (comdat
) + 1);
647 if (node
->same_comdat_group
&& !boundary_p
)
649 ref
= lto_symtab_encoder_lookup (encoder
,
650 node
->same_comdat_group
);
651 gcc_assert (ref
!= LCC_NOT_FOUND
);
655 streamer_write_hwi_stream (ob
->main_stream
, ref
);
658 section
= node
->get_section ();
661 streamer_write_data_stream (ob
->main_stream
, section
, strlen (section
) + 1);
663 streamer_write_enum (ob
->main_stream
, ld_plugin_symbol_resolution
,
664 LDPR_NUM_KNOWN
, node
->resolution
);
667 /* Output the varpool NODE to OB.
668 If NODE is not in SET, then NODE is a boundary. */
671 lto_output_ref (struct lto_simple_output_block
*ob
, struct ipa_ref
*ref
,
672 lto_symtab_encoder_t encoder
)
676 int uid
= ref
->lto_stmt_uid
;
677 struct cgraph_node
*node
;
679 bp
= bitpack_create (ob
->main_stream
);
680 bp_pack_value (&bp
, ref
->use
, 3);
681 bp_pack_value (&bp
, ref
->speculative
, 1);
682 streamer_write_bitpack (&bp
);
683 nref
= lto_symtab_encoder_lookup (encoder
, ref
->referred
);
684 gcc_assert (nref
!= LCC_NOT_FOUND
);
685 streamer_write_hwi_stream (ob
->main_stream
, nref
);
687 node
= dyn_cast
<cgraph_node
*> (ref
->referring
);
691 uid
= gimple_uid (ref
->stmt
) + 1;
692 streamer_write_hwi_stream (ob
->main_stream
, uid
);
696 /* Stream out profile_summary to OB. */
699 output_profile_summary (struct lto_simple_output_block
*ob
)
706 /* We do not output num and run_max, they are not used by
707 GCC profile feedback and they are difficult to merge from multiple
709 gcc_assert (profile_info
->runs
);
710 streamer_write_uhwi_stream (ob
->main_stream
, profile_info
->runs
);
711 streamer_write_gcov_count_stream (ob
->main_stream
, profile_info
->sum_max
);
713 /* sum_all is needed for computing the working set with the
715 streamer_write_gcov_count_stream (ob
->main_stream
, profile_info
->sum_all
);
717 /* Create and output a bitpack of non-zero histogram entries indices. */
718 bp
= bitpack_create (ob
->main_stream
);
719 for (h_ix
= 0; h_ix
< GCOV_HISTOGRAM_SIZE
; h_ix
++)
720 bp_pack_value (&bp
, profile_info
->histogram
[h_ix
].num_counters
> 0, 1);
721 streamer_write_bitpack (&bp
);
722 /* Now stream out only those non-zero entries. */
723 for (h_ix
= 0; h_ix
< GCOV_HISTOGRAM_SIZE
; h_ix
++)
725 if (!profile_info
->histogram
[h_ix
].num_counters
)
727 streamer_write_gcov_count_stream (ob
->main_stream
,
728 profile_info
->histogram
[h_ix
].num_counters
);
729 streamer_write_gcov_count_stream (ob
->main_stream
,
730 profile_info
->histogram
[h_ix
].min_value
);
731 streamer_write_gcov_count_stream (ob
->main_stream
,
732 profile_info
->histogram
[h_ix
].cum_value
);
734 /* IPA-profile computes hot bb threshold based on cumulated
735 whole program profile. We need to stream it down to ltrans. */
737 streamer_write_gcov_count_stream (ob
->main_stream
,
738 get_hot_bb_threshold ());
741 streamer_write_uhwi_stream (ob
->main_stream
, 0);
744 /* Output all callees or indirect outgoing edges. EDGE must be the first such
748 output_outgoing_cgraph_edges (struct cgraph_edge
*edge
,
749 struct lto_simple_output_block
*ob
,
750 lto_symtab_encoder_t encoder
)
755 /* Output edges in backward direction, so the reconstructed callgraph match
756 and it is easy to associate call sites in the IPA pass summaries. */
757 while (edge
->next_callee
)
758 edge
= edge
->next_callee
;
759 for (; edge
; edge
= edge
->prev_callee
)
760 lto_output_edge (ob
, edge
, encoder
);
763 /* Output the part of the cgraph in SET. */
766 output_refs (lto_symtab_encoder_t encoder
)
768 lto_symtab_encoder_iterator lsei
;
769 struct lto_simple_output_block
*ob
;
774 ob
= lto_create_simple_output_block (LTO_section_refs
);
776 for (lsei
= lsei_start_in_partition (encoder
); !lsei_end_p (lsei
);
777 lsei_next_in_partition (&lsei
))
779 symtab_node
*node
= lsei_node (lsei
);
781 count
= node
->ref_list
.nreferences ();
784 streamer_write_gcov_count_stream (ob
->main_stream
, count
);
785 streamer_write_uhwi_stream (ob
->main_stream
,
786 lto_symtab_encoder_lookup (encoder
, node
));
787 for (i
= 0; node
->iterate_reference (i
, ref
); i
++)
788 lto_output_ref (ob
, ref
, encoder
);
792 streamer_write_uhwi_stream (ob
->main_stream
, 0);
794 lto_destroy_simple_output_block (ob
);
797 /* Add NODE into encoder as well as nodes it is cloned from.
798 Do it in a way so clones appear first. */
801 add_node_to (lto_symtab_encoder_t encoder
, struct cgraph_node
*node
,
805 add_node_to (encoder
, node
->clone_of
, include_body
);
806 else if (include_body
)
807 lto_set_symtab_encoder_encode_body (encoder
, node
);
808 lto_symtab_encoder_encode (encoder
, node
);
811 /* Add all references in NODE to encoders. */
814 create_references (lto_symtab_encoder_t encoder
, symtab_node
*node
)
817 struct ipa_ref
*ref
= NULL
;
818 for (i
= 0; node
->iterate_reference (i
, ref
); i
++)
819 if (is_a
<cgraph_node
*> (ref
->referred
))
820 add_node_to (encoder
, dyn_cast
<cgraph_node
*> (ref
->referred
), false);
822 lto_symtab_encoder_encode (encoder
, ref
->referred
);
825 /* Select what needs to be dumped. In lto case dump everything.
826 In omp target case only dump stuff makrked with attribute. */
828 select_what_to_dump (bool is_omp
)
830 struct symtab_node
*snode
;
831 FOR_EACH_SYMBOL(snode
)
832 snode
->need_dump
= !is_omp
|| lookup_attribute ("omp declare target",
833 DECL_ATTRIBUTES (snode
->decl
));
836 /* Find all symbols we want to stream into given partition and insert them
839 The function actually replaces IN_ENCODER by new one. The reason is that
840 streaming code needs clone's origin to be streamed before clone. This
841 means that we need to insert the nodes in specific order. This order is
842 ignored by the partitioning logic earlier. */
845 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder
)
847 struct cgraph_edge
*edge
;
849 lto_symtab_encoder_t encoder
;
850 lto_symtab_encoder_iterator lsei
;
851 hash_set
<void *> reachable_call_targets
;
853 encoder
= lto_symtab_encoder_new (false);
855 /* Go over all entries in the IN_ENCODER and duplicate them to
856 ENCODER. At the same time insert masters of clones so
857 every master appears before clone. */
858 for (lsei
= lsei_start_function_in_partition (in_encoder
);
859 !lsei_end_p (lsei
); lsei_next_function_in_partition (&lsei
))
861 struct cgraph_node
*node
= lsei_cgraph_node (lsei
);
862 add_node_to (encoder
, node
, true);
863 lto_set_symtab_encoder_in_partition (encoder
, node
);
864 create_references (encoder
, node
);
865 /* For proper debug info, we need to ship the origins, too. */
866 if (DECL_ABSTRACT_ORIGIN (node
->decl
))
868 struct cgraph_node
*origin_node
869 = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node
->decl
));
870 add_node_to (encoder
, origin_node
, true);
873 for (lsei
= lsei_start_variable_in_partition (in_encoder
);
874 !lsei_end_p (lsei
); lsei_next_variable_in_partition (&lsei
))
876 varpool_node
*vnode
= lsei_varpool_node (lsei
);
878 lto_set_symtab_encoder_in_partition (encoder
, vnode
);
879 lto_set_symtab_encoder_encode_initializer (encoder
, vnode
);
880 create_references (encoder
, vnode
);
881 /* For proper debug info, we need to ship the origins, too. */
882 if (DECL_ABSTRACT_ORIGIN (vnode
->decl
))
884 varpool_node
*origin_node
885 = varpool_node::get (DECL_ABSTRACT_ORIGIN (vnode
->decl
));
886 lto_set_symtab_encoder_in_partition (encoder
, origin_node
);
889 /* Pickle in also the initializer of all referenced readonly variables
890 to help folding. Constant pool variables are not shared, so we must
892 for (i
= 0; i
< lto_symtab_encoder_size (encoder
); i
++)
894 symtab_node
*node
= lto_symtab_encoder_deref (encoder
, i
);
895 if (varpool_node
*vnode
= dyn_cast
<varpool_node
*> (node
))
897 if (!lto_symtab_encoder_encode_initializer_p (encoder
,
899 && (vnode
->ctor_useable_for_folding_p ()
900 || POINTER_BOUNDS_P (vnode
->decl
)))
902 lto_set_symtab_encoder_encode_initializer (encoder
, vnode
);
903 create_references (encoder
, vnode
);
908 /* Go over all the nodes again to include callees that are not in
910 for (lsei
= lsei_start_function_in_partition (encoder
);
911 !lsei_end_p (lsei
); lsei_next_function_in_partition (&lsei
))
913 struct cgraph_node
*node
= lsei_cgraph_node (lsei
);
914 for (edge
= node
->callees
; edge
; edge
= edge
->next_callee
)
916 struct cgraph_node
*callee
= edge
->callee
;
917 if (!lto_symtab_encoder_in_partition_p (encoder
, callee
))
919 /* We should have moved all the inlines. */
920 gcc_assert (!callee
->global
.inlined_to
);
921 add_node_to (encoder
, callee
, false);
924 /* Add all possible targets for late devirtualization. */
925 if (flag_devirtualize
)
926 for (edge
= node
->indirect_calls
; edge
; edge
= edge
->next_callee
)
927 if (edge
->indirect_info
->polymorphic
)
932 vec
<cgraph_node
*>targets
933 = possible_polymorphic_call_targets
934 (edge
, &final
, &cache_token
);
935 if (!reachable_call_targets
.add (cache_token
))
937 for (i
= 0; i
< targets
.length (); i
++)
939 struct cgraph_node
*callee
= targets
[i
];
941 /* Adding an external declarations into the unit serves
942 no purpose and just increases its boundary. */
943 if (callee
->definition
944 && !lto_symtab_encoder_in_partition_p
947 gcc_assert (!callee
->global
.inlined_to
);
948 add_node_to (encoder
, callee
, false);
954 lto_symtab_encoder_delete (in_encoder
);
958 /* Output the part of the symtab in SET and VSET. */
963 struct cgraph_node
*node
;
964 struct lto_simple_output_block
*ob
;
965 lto_symtab_encoder_iterator lsei
;
967 lto_symtab_encoder_t encoder
;
970 output_cgraph_opt_summary ();
972 ob
= lto_create_simple_output_block (LTO_section_symtab_nodes
);
974 output_profile_summary (ob
);
976 /* An encoder for cgraph nodes should have been created by
977 ipa_write_summaries_1. */
978 gcc_assert (ob
->decl_state
->symtab_node_encoder
);
979 encoder
= ob
->decl_state
->symtab_node_encoder
;
981 /* Write out the nodes. We must first output a node and then its clones,
982 otherwise at a time reading back the node there would be nothing to clone
984 n_nodes
= lto_symtab_encoder_size (encoder
);
985 for (i
= 0; i
< n_nodes
; i
++)
987 symtab_node
*node
= lto_symtab_encoder_deref (encoder
, i
);
988 if (cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (node
))
989 lto_output_node (ob
, cnode
, encoder
);
991 lto_output_varpool_node (ob
, dyn_cast
<varpool_node
*> (node
), encoder
);
994 /* Go over the nodes in SET again to write edges. */
995 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
996 lsei_next_function_in_partition (&lsei
))
998 node
= lsei_cgraph_node (lsei
);
999 output_outgoing_cgraph_edges (node
->callees
, ob
, encoder
);
1000 output_outgoing_cgraph_edges (node
->indirect_calls
, ob
, encoder
);
1003 streamer_write_uhwi_stream (ob
->main_stream
, 0);
1005 lto_destroy_simple_output_block (ob
);
1007 /* Emit toplevel asms.
1008 When doing WPA we must output every asm just once. Since we do not partition asm
1009 nodes at all, output them to first output. This is kind of hack, but should work
1011 if (!asm_nodes_output
)
1013 asm_nodes_output
= true;
1014 lto_output_toplevel_asms ();
1017 output_refs (encoder
);
1020 /* Return identifier encoded in IB as a plain string. */
1023 read_identifier (struct lto_input_block
*ib
)
1025 unsigned int len
= strnlen (ib
->data
+ ib
->p
, ib
->len
- ib
->p
- 1);
1028 if (ib
->data
[ib
->p
+ len
])
1029 lto_section_overrun (ib
);
1035 id
= get_identifier (ib
->data
+ ib
->p
);
1040 /* Return string encoded in IB, NULL if string is empty. */
1043 read_string (struct lto_input_block
*ib
)
1045 unsigned int len
= strnlen (ib
->data
+ ib
->p
, ib
->len
- ib
->p
- 1);
1048 if (ib
->data
[ib
->p
+ len
])
1049 lto_section_overrun (ib
);
1055 str
= ib
->data
+ ib
->p
;
1060 /* Output function/variable tables that will allow libgomp to look up offload
1061 target code. OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is
1062 filled in ipa_passes. In WHOPR (partitioned) mode during the WPA stage both
1063 OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables. */
1066 output_offload_tables (void)
1068 if (vec_safe_is_empty (offload_funcs
) && vec_safe_is_empty (offload_vars
))
1071 struct lto_simple_output_block
*ob
1072 = lto_create_simple_output_block (LTO_section_offload_table
);
1074 for (unsigned i
= 0; i
< vec_safe_length (offload_funcs
); i
++)
1076 streamer_write_enum (ob
->main_stream
, LTO_symtab_tags
,
1077 LTO_symtab_last_tag
, LTO_symtab_unavail_node
);
1078 lto_output_fn_decl_index (ob
->decl_state
, ob
->main_stream
,
1079 (*offload_funcs
)[i
]);
1082 for (unsigned i
= 0; i
< vec_safe_length (offload_vars
); i
++)
1084 streamer_write_enum (ob
->main_stream
, LTO_symtab_tags
,
1085 LTO_symtab_last_tag
, LTO_symtab_variable
);
1086 lto_output_var_decl_index (ob
->decl_state
, ob
->main_stream
,
1087 (*offload_vars
)[i
]);
1090 streamer_write_uhwi_stream (ob
->main_stream
, 0);
1091 lto_destroy_simple_output_block (ob
);
1093 /* In WHOPR mode during the WPA stage the joint offload tables need to be
1094 streamed to one partition only. That's why we free offload_funcs and
1095 offload_vars after the first call of output_offload_tables. */
1098 vec_free (offload_funcs
);
1099 vec_free (offload_vars
);
1103 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1104 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
1105 NODE or to replace the values in it, for instance because the first
1106 time we saw it, the function body was not available but now it
1107 is. BP is a bitpack with all the bitflags for NODE read from the
1111 input_overwrite_node (struct lto_file_decl_data
*file_data
,
1112 struct cgraph_node
*node
,
1113 enum LTO_symtab_tags tag
,
1114 struct bitpack_d
*bp
)
1116 node
->aux
= (void *) tag
;
1117 node
->lto_file_data
= file_data
;
1119 node
->local
.local
= bp_unpack_value (bp
, 1);
1120 node
->externally_visible
= bp_unpack_value (bp
, 1);
1121 node
->no_reorder
= bp_unpack_value (bp
, 1);
1122 node
->definition
= bp_unpack_value (bp
, 1);
1123 node
->local
.versionable
= bp_unpack_value (bp
, 1);
1124 node
->local
.can_change_signature
= bp_unpack_value (bp
, 1);
1125 node
->local
.redefined_extern_inline
= bp_unpack_value (bp
, 1);
1126 node
->force_output
= bp_unpack_value (bp
, 1);
1127 node
->forced_by_abi
= bp_unpack_value (bp
, 1);
1128 node
->unique_name
= bp_unpack_value (bp
, 1);
1129 node
->body_removed
= bp_unpack_value (bp
, 1);
1130 node
->implicit_section
= bp_unpack_value (bp
, 1);
1131 node
->address_taken
= bp_unpack_value (bp
, 1);
1132 node
->used_from_other_partition
= bp_unpack_value (bp
, 1);
1133 node
->lowered
= bp_unpack_value (bp
, 1);
1134 node
->analyzed
= tag
== LTO_symtab_analyzed_node
;
1135 node
->in_other_partition
= bp_unpack_value (bp
, 1);
1136 if (node
->in_other_partition
1137 /* Avoid updating decl when we are seeing just inline clone.
1138 When inlining function that has functions already inlined into it,
1139 we produce clones of inline clones.
1141 WPA partitioning might put each clone into different unit and
1142 we might end up streaming inline clone from other partition
1143 to support clone we are interested in. */
1145 || node
->clone_of
->decl
!= node
->decl
))
1147 DECL_EXTERNAL (node
->decl
) = 1;
1148 TREE_STATIC (node
->decl
) = 0;
1150 node
->alias
= bp_unpack_value (bp
, 1);
1151 node
->weakref
= bp_unpack_value (bp
, 1);
1152 node
->frequency
= (enum node_frequency
)bp_unpack_value (bp
, 2);
1153 node
->only_called_at_startup
= bp_unpack_value (bp
, 1);
1154 node
->only_called_at_exit
= bp_unpack_value (bp
, 1);
1155 node
->tm_clone
= bp_unpack_value (bp
, 1);
1156 node
->calls_comdat_local
= bp_unpack_value (bp
, 1);
1157 node
->icf_merged
= bp_unpack_value (bp
, 1);
1158 node
->thunk
.thunk_p
= bp_unpack_value (bp
, 1);
1159 node
->resolution
= bp_unpack_enum (bp
, ld_plugin_symbol_resolution
,
1161 node
->instrumentation_clone
= bp_unpack_value (bp
, 1);
1162 gcc_assert (flag_ltrans
1163 || (!node
->in_other_partition
1164 && !node
->used_from_other_partition
));
1167 /* Return string alias is alias of. */
1170 get_alias_symbol (tree decl
)
1172 tree alias
= lookup_attribute ("alias", DECL_ATTRIBUTES (decl
));
1173 return get_identifier (TREE_STRING_POINTER
1174 (TREE_VALUE (TREE_VALUE (alias
))));
1177 /* Read a node from input_block IB. TAG is the node's tag just read.
1178 Return the node read or overwriten. */
1180 static struct cgraph_node
*
1181 input_node (struct lto_file_decl_data
*file_data
,
1182 struct lto_input_block
*ib
,
1183 enum LTO_symtab_tags tag
,
1184 vec
<symtab_node
*> nodes
)
1186 gcc::pass_manager
*passes
= g
->get_passes ();
1188 struct cgraph_node
*node
;
1189 struct bitpack_d bp
;
1190 unsigned decl_index
;
1191 int ref
= LCC_NOT_FOUND
, ref2
= LCC_NOT_FOUND
;
1196 const char *section
;
1197 order
= streamer_read_hwi (ib
) + order_base
;
1198 clone_ref
= streamer_read_hwi (ib
);
1200 decl_index
= streamer_read_uhwi (ib
);
1201 fn_decl
= lto_file_decl_data_get_fn_decl (file_data
, decl_index
);
1203 if (clone_ref
!= LCC_NOT_FOUND
)
1205 node
= dyn_cast
<cgraph_node
*> (nodes
[clone_ref
])->create_clone (fn_decl
,
1206 0, CGRAPH_FREQ_BASE
, false,
1207 vNULL
, false, NULL
, NULL
);
1211 /* Declaration of functions can be already merged with a declaration
1212 from other input file. We keep cgraph unmerged until after streaming
1213 of ipa passes is done. Alays forcingly create a fresh node. */
1214 node
= symtab
->create_empty ();
1215 node
->decl
= fn_decl
;
1216 node
->register_symbol ();
1219 node
->order
= order
;
1220 if (order
>= symtab
->order
)
1221 symtab
->order
= order
+ 1;
1223 node
->count
= streamer_read_gcov_count (ib
);
1224 node
->count_materialization_scale
= streamer_read_hwi (ib
);
1226 count
= streamer_read_hwi (ib
);
1227 node
->ipa_transforms_to_apply
= vNULL
;
1228 for (i
= 0; i
< count
; i
++)
1231 int pid
= streamer_read_hwi (ib
);
1233 gcc_assert (pid
< passes
->passes_by_id_size
);
1234 pass
= passes
->passes_by_id
[pid
];
1235 node
->ipa_transforms_to_apply
.safe_push ((ipa_opt_pass_d
*) pass
);
1238 if (tag
== LTO_symtab_analyzed_node
)
1239 ref
= streamer_read_hwi (ib
);
1241 group
= read_identifier (ib
);
1243 ref2
= streamer_read_hwi (ib
);
1245 /* Make sure that we have not read this node before. Nodes that
1246 have already been read will have their tag stored in the 'aux'
1247 field. Since built-in functions can be referenced in multiple
1248 functions, they are expected to be read more than once. */
1249 if (node
->aux
&& !DECL_BUILT_IN (node
->decl
))
1250 internal_error ("bytecode stream: found multiple instances of cgraph "
1251 "node with uid %d", node
->uid
);
1253 node
->tp_first_run
= streamer_read_uhwi (ib
);
1255 bp
= streamer_read_bitpack (ib
);
1257 input_overwrite_node (file_data
, node
, tag
, &bp
);
1259 /* Store a reference for now, and fix up later to be a pointer. */
1260 node
->global
.inlined_to
= (cgraph_node
*) (intptr_t) ref
;
1264 node
->set_comdat_group (group
);
1265 /* Store a reference for now, and fix up later to be a pointer. */
1266 node
->same_comdat_group
= (symtab_node
*) (intptr_t) ref2
;
1269 node
->same_comdat_group
= (symtab_node
*) (intptr_t) LCC_NOT_FOUND
;
1270 section
= read_string (ib
);
1272 node
->set_section_for_node (section
);
1274 if (node
->thunk
.thunk_p
)
1276 int type
= streamer_read_uhwi (ib
);
1277 HOST_WIDE_INT fixed_offset
= streamer_read_uhwi (ib
);
1278 HOST_WIDE_INT virtual_value
= streamer_read_uhwi (ib
);
1280 node
->thunk
.fixed_offset
= fixed_offset
;
1281 node
->thunk
.this_adjusting
= (type
& 2);
1282 node
->thunk
.virtual_value
= virtual_value
;
1283 node
->thunk
.virtual_offset_p
= (type
& 4);
1284 node
->thunk
.add_pointer_bounds_args
= (type
& 8);
1286 if (node
->alias
&& !node
->analyzed
&& node
->weakref
)
1287 node
->alias_target
= get_alias_symbol (node
->decl
);
1288 node
->profile_id
= streamer_read_hwi (ib
);
1289 if (DECL_STATIC_CONSTRUCTOR (node
->decl
))
1290 node
->set_init_priority (streamer_read_hwi (ib
));
1291 if (DECL_STATIC_DESTRUCTOR (node
->decl
))
1292 node
->set_fini_priority (streamer_read_hwi (ib
));
1294 if (node
->instrumentation_clone
)
1296 decl_index
= streamer_read_uhwi (ib
);
1297 fn_decl
= lto_file_decl_data_get_fn_decl (file_data
, decl_index
);
1298 node
->orig_decl
= fn_decl
;
1304 /* Read a node from input_block IB. TAG is the node's tag just read.
1305 Return the node read or overwriten. */
1307 static varpool_node
*
1308 input_varpool_node (struct lto_file_decl_data
*file_data
,
1309 struct lto_input_block
*ib
)
1314 struct bitpack_d bp
;
1315 int ref
= LCC_NOT_FOUND
;
1318 const char *section
;
1320 order
= streamer_read_hwi (ib
) + order_base
;
1321 decl_index
= streamer_read_uhwi (ib
);
1322 var_decl
= lto_file_decl_data_get_var_decl (file_data
, decl_index
);
1324 /* Declaration of functions can be already merged with a declaration
1325 from other input file. We keep cgraph unmerged until after streaming
1326 of ipa passes is done. Alays forcingly create a fresh node. */
1327 node
= varpool_node::create_empty ();
1328 node
->decl
= var_decl
;
1329 node
->register_symbol ();
1331 node
->order
= order
;
1332 if (order
>= symtab
->order
)
1333 symtab
->order
= order
+ 1;
1334 node
->lto_file_data
= file_data
;
1336 bp
= streamer_read_bitpack (ib
);
1337 node
->externally_visible
= bp_unpack_value (&bp
, 1);
1338 node
->no_reorder
= bp_unpack_value (&bp
, 1);
1339 node
->force_output
= bp_unpack_value (&bp
, 1);
1340 node
->forced_by_abi
= bp_unpack_value (&bp
, 1);
1341 node
->unique_name
= bp_unpack_value (&bp
, 1);
1342 node
->body_removed
= bp_unpack_value (&bp
, 1);
1343 node
->implicit_section
= bp_unpack_value (&bp
, 1);
1344 node
->writeonly
= bp_unpack_value (&bp
, 1);
1345 node
->definition
= bp_unpack_value (&bp
, 1);
1346 node
->alias
= bp_unpack_value (&bp
, 1);
1347 node
->weakref
= bp_unpack_value (&bp
, 1);
1348 node
->analyzed
= bp_unpack_value (&bp
, 1);
1349 node
->used_from_other_partition
= bp_unpack_value (&bp
, 1);
1350 node
->in_other_partition
= bp_unpack_value (&bp
, 1);
1351 if (node
->in_other_partition
)
1353 DECL_EXTERNAL (node
->decl
) = 1;
1354 TREE_STATIC (node
->decl
) = 0;
1356 if (node
->alias
&& !node
->analyzed
&& node
->weakref
)
1357 node
->alias_target
= get_alias_symbol (node
->decl
);
1358 node
->tls_model
= (enum tls_model
)bp_unpack_value (&bp
, 3);
1359 node
->used_by_single_function
= (enum tls_model
)bp_unpack_value (&bp
, 1);
1360 node
->need_bounds_init
= bp_unpack_value (&bp
, 1);
1361 group
= read_identifier (ib
);
1364 node
->set_comdat_group (group
);
1365 ref
= streamer_read_hwi (ib
);
1366 /* Store a reference for now, and fix up later to be a pointer. */
1367 node
->same_comdat_group
= (symtab_node
*) (intptr_t) ref
;
1370 node
->same_comdat_group
= (symtab_node
*) (intptr_t) LCC_NOT_FOUND
;
1371 section
= read_string (ib
);
1373 node
->set_section_for_node (section
);
1374 node
->resolution
= streamer_read_enum (ib
, ld_plugin_symbol_resolution
,
1376 gcc_assert (flag_ltrans
1377 || (!node
->in_other_partition
1378 && !node
->used_from_other_partition
));
1383 /* Read a node from input_block IB. TAG is the node's tag just read.
1384 Return the node read or overwriten. */
1387 input_ref (struct lto_input_block
*ib
,
1388 symtab_node
*referring_node
,
1389 vec
<symtab_node
*> nodes
)
1391 symtab_node
*node
= NULL
;
1392 struct bitpack_d bp
;
1393 enum ipa_ref_use use
;
1395 struct ipa_ref
*ref
;
1397 bp
= streamer_read_bitpack (ib
);
1398 use
= (enum ipa_ref_use
) bp_unpack_value (&bp
, 3);
1399 speculative
= (enum ipa_ref_use
) bp_unpack_value (&bp
, 1);
1400 node
= nodes
[streamer_read_hwi (ib
)];
1401 ref
= referring_node
->create_reference (node
, use
);
1402 ref
->speculative
= speculative
;
1403 if (is_a
<cgraph_node
*> (referring_node
))
1404 ref
->lto_stmt_uid
= streamer_read_hwi (ib
);
1407 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1408 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1409 edge being read is indirect (in the sense that it has
1410 indirect_unknown_callee set). */
1413 input_edge (struct lto_input_block
*ib
, vec
<symtab_node
*> nodes
,
1416 struct cgraph_node
*caller
, *callee
;
1417 struct cgraph_edge
*edge
;
1418 unsigned int stmt_id
;
1421 cgraph_inline_failed_t inline_failed
;
1422 struct bitpack_d bp
;
1425 caller
= dyn_cast
<cgraph_node
*> (nodes
[streamer_read_hwi (ib
)]);
1426 if (caller
== NULL
|| caller
->decl
== NULL_TREE
)
1427 internal_error ("bytecode stream: no caller found while reading edge");
1431 callee
= dyn_cast
<cgraph_node
*> (nodes
[streamer_read_hwi (ib
)]);
1432 if (callee
== NULL
|| callee
->decl
== NULL_TREE
)
1433 internal_error ("bytecode stream: no callee found while reading edge");
1438 count
= streamer_read_gcov_count (ib
);
1440 bp
= streamer_read_bitpack (ib
);
1441 inline_failed
= bp_unpack_enum (&bp
, cgraph_inline_failed_t
, CIF_N_REASONS
);
1442 stmt_id
= bp_unpack_var_len_unsigned (&bp
);
1443 freq
= (int) bp_unpack_var_len_unsigned (&bp
);
1446 edge
= caller
->create_indirect_edge (NULL
, 0, count
, freq
);
1448 edge
= caller
->create_edge (callee
, NULL
, count
, freq
);
1450 edge
->indirect_inlining_edge
= bp_unpack_value (&bp
, 1);
1451 edge
->speculative
= bp_unpack_value (&bp
, 1);
1452 edge
->lto_stmt_uid
= stmt_id
;
1453 edge
->inline_failed
= inline_failed
;
1454 edge
->call_stmt_cannot_inline_p
= bp_unpack_value (&bp
, 1);
1455 edge
->can_throw_external
= bp_unpack_value (&bp
, 1);
1456 edge
->in_polymorphic_cdtor
= bp_unpack_value (&bp
, 1);
1459 if (bp_unpack_value (&bp
, 1))
1460 ecf_flags
|= ECF_CONST
;
1461 if (bp_unpack_value (&bp
, 1))
1462 ecf_flags
|= ECF_PURE
;
1463 if (bp_unpack_value (&bp
, 1))
1464 ecf_flags
|= ECF_NORETURN
;
1465 if (bp_unpack_value (&bp
, 1))
1466 ecf_flags
|= ECF_MALLOC
;
1467 if (bp_unpack_value (&bp
, 1))
1468 ecf_flags
|= ECF_NOTHROW
;
1469 if (bp_unpack_value (&bp
, 1))
1470 ecf_flags
|= ECF_RETURNS_TWICE
;
1471 edge
->indirect_info
->ecf_flags
= ecf_flags
;
1472 edge
->indirect_info
->common_target_id
= streamer_read_hwi (ib
);
1473 if (edge
->indirect_info
->common_target_id
)
1474 edge
->indirect_info
->common_target_probability
= streamer_read_hwi (ib
);
1479 /* Read a cgraph from IB using the info in FILE_DATA. */
1481 static vec
<symtab_node
*>
1482 input_cgraph_1 (struct lto_file_decl_data
*file_data
,
1483 struct lto_input_block
*ib
)
1485 enum LTO_symtab_tags tag
;
1486 vec
<symtab_node
*> nodes
= vNULL
;
1490 tag
= streamer_read_enum (ib
, LTO_symtab_tags
, LTO_symtab_last_tag
);
1491 order_base
= symtab
->order
;
1494 if (tag
== LTO_symtab_edge
)
1495 input_edge (ib
, nodes
, false);
1496 else if (tag
== LTO_symtab_indirect_edge
)
1497 input_edge (ib
, nodes
, true);
1498 else if (tag
== LTO_symtab_variable
)
1500 node
= input_varpool_node (file_data
, ib
);
1501 nodes
.safe_push (node
);
1502 lto_symtab_encoder_encode (file_data
->symtab_node_encoder
, node
);
1506 node
= input_node (file_data
, ib
, tag
, nodes
);
1507 if (node
== NULL
|| node
->decl
== NULL_TREE
)
1508 internal_error ("bytecode stream: found empty cgraph node");
1509 nodes
.safe_push (node
);
1510 lto_symtab_encoder_encode (file_data
->symtab_node_encoder
, node
);
1513 tag
= streamer_read_enum (ib
, LTO_symtab_tags
, LTO_symtab_last_tag
);
1516 lto_input_toplevel_asms (file_data
, order_base
);
1518 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1519 #ifdef ENABLE_CHECKING
1520 FOR_EACH_VEC_ELT (nodes
, i
, node
)
1521 gcc_assert (node
->aux
|| !is_a
<cgraph_node
*> (node
));
1523 FOR_EACH_VEC_ELT (nodes
, i
, node
)
1526 if (cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (node
))
1528 ref
= (int) (intptr_t) cnode
->global
.inlined_to
;
1530 /* We share declaration of builtins, so we may read same node twice. */
1535 /* Fixup inlined_to from reference to pointer. */
1536 if (ref
!= LCC_NOT_FOUND
)
1537 dyn_cast
<cgraph_node
*> (node
)->global
.inlined_to
1538 = dyn_cast
<cgraph_node
*> (nodes
[ref
]);
1540 cnode
->global
.inlined_to
= NULL
;
1542 /* Compute instrumented_version. */
1543 if (cnode
->instrumentation_clone
)
1545 gcc_assert (cnode
->orig_decl
);
1547 cnode
->instrumented_version
= cgraph_node::get (cnode
->orig_decl
);
1548 if (cnode
->instrumented_version
)
1549 cnode
->instrumented_version
->instrumented_version
= cnode
;
1551 /* Restore decl names reference. */
1552 if (IDENTIFIER_TRANSPARENT_ALIAS (DECL_ASSEMBLER_NAME (cnode
->decl
))
1553 && !TREE_CHAIN (DECL_ASSEMBLER_NAME (cnode
->decl
)))
1554 TREE_CHAIN (DECL_ASSEMBLER_NAME (cnode
->decl
))
1555 = DECL_ASSEMBLER_NAME (cnode
->orig_decl
);
1559 ref
= (int) (intptr_t) node
->same_comdat_group
;
1561 /* Fixup same_comdat_group from reference to pointer. */
1562 if (ref
!= LCC_NOT_FOUND
)
1563 node
->same_comdat_group
= nodes
[ref
];
1565 node
->same_comdat_group
= NULL
;
1567 FOR_EACH_VEC_ELT (nodes
, i
, node
)
1568 node
->aux
= is_a
<cgraph_node
*> (node
) ? (void *)1 : NULL
;
1572 /* Input ipa_refs. */
1575 input_refs (struct lto_input_block
*ib
,
1576 vec
<symtab_node
*> nodes
)
1583 count
= streamer_read_uhwi (ib
);
1586 idx
= streamer_read_uhwi (ib
);
1590 input_ref (ib
, node
, nodes
);
1597 static struct gcov_ctr_summary lto_gcov_summary
;
1599 /* Input profile_info from IB. */
1601 input_profile_summary (struct lto_input_block
*ib
,
1602 struct lto_file_decl_data
*file_data
)
1605 struct bitpack_d bp
;
1606 unsigned int runs
= streamer_read_uhwi (ib
);
1609 file_data
->profile_info
.runs
= runs
;
1610 file_data
->profile_info
.sum_max
= streamer_read_gcov_count (ib
);
1611 file_data
->profile_info
.sum_all
= streamer_read_gcov_count (ib
);
1613 memset (file_data
->profile_info
.histogram
, 0,
1614 sizeof (gcov_bucket_type
) * GCOV_HISTOGRAM_SIZE
);
1615 /* Input the bitpack of non-zero histogram indices. */
1616 bp
= streamer_read_bitpack (ib
);
1617 /* Read in and unpack the full bitpack, flagging non-zero
1618 histogram entries by setting the num_counters non-zero. */
1619 for (h_ix
= 0; h_ix
< GCOV_HISTOGRAM_SIZE
; h_ix
++)
1621 file_data
->profile_info
.histogram
[h_ix
].num_counters
1622 = bp_unpack_value (&bp
, 1);
1624 for (h_ix
= 0; h_ix
< GCOV_HISTOGRAM_SIZE
; h_ix
++)
1626 if (!file_data
->profile_info
.histogram
[h_ix
].num_counters
)
1629 file_data
->profile_info
.histogram
[h_ix
].num_counters
1630 = streamer_read_gcov_count (ib
);
1631 file_data
->profile_info
.histogram
[h_ix
].min_value
1632 = streamer_read_gcov_count (ib
);
1633 file_data
->profile_info
.histogram
[h_ix
].cum_value
1634 = streamer_read_gcov_count (ib
);
1636 /* IPA-profile computes hot bb threshold based on cumulated
1637 whole program profile. We need to stream it down to ltrans. */
1639 set_hot_bb_threshold (streamer_read_gcov_count (ib
));
1644 /* Rescale profile summaries to the same number of runs in the whole unit. */
1647 merge_profile_summaries (struct lto_file_decl_data
**file_data_vec
)
1649 struct lto_file_decl_data
*file_data
;
1650 unsigned int j
, h_ix
;
1651 gcov_unsigned_t max_runs
= 0;
1652 struct cgraph_node
*node
;
1653 struct cgraph_edge
*edge
;
1654 gcov_type saved_sum_all
= 0;
1655 gcov_ctr_summary
*saved_profile_info
= 0;
1656 int saved_scale
= 0;
1658 /* Find unit with maximal number of runs. If we ever get serious about
1659 roundoff errors, we might also consider computing smallest common
1661 for (j
= 0; (file_data
= file_data_vec
[j
]) != NULL
; j
++)
1662 if (max_runs
< file_data
->profile_info
.runs
)
1663 max_runs
= file_data
->profile_info
.runs
;
1668 /* Simple overflow check. We probably don't need to support that many train
1669 runs. Such a large value probably imply data corruption anyway. */
1670 if (max_runs
> INT_MAX
/ REG_BR_PROB_BASE
)
1672 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1673 INT_MAX
/ REG_BR_PROB_BASE
);
1677 profile_info
= <o_gcov_summary
;
1678 lto_gcov_summary
.runs
= max_runs
;
1679 lto_gcov_summary
.sum_max
= 0;
1680 memset (lto_gcov_summary
.histogram
, 0,
1681 sizeof (gcov_bucket_type
) * GCOV_HISTOGRAM_SIZE
);
1683 /* Rescale all units to the maximal number of runs.
1684 sum_max can not be easily merged, as we have no idea what files come from
1685 the same run. We do not use the info anyway, so leave it 0. */
1686 for (j
= 0; (file_data
= file_data_vec
[j
]) != NULL
; j
++)
1687 if (file_data
->profile_info
.runs
)
1689 int scale
= GCOV_COMPUTE_SCALE (max_runs
,
1690 file_data
->profile_info
.runs
);
1691 lto_gcov_summary
.sum_max
1692 = MAX (lto_gcov_summary
.sum_max
,
1693 apply_scale (file_data
->profile_info
.sum_max
, scale
));
1694 lto_gcov_summary
.sum_all
1695 = MAX (lto_gcov_summary
.sum_all
,
1696 apply_scale (file_data
->profile_info
.sum_all
, scale
));
1697 /* Save a pointer to the profile_info with the largest
1698 scaled sum_all and the scale for use in merging the
1700 if (!saved_profile_info
1701 || lto_gcov_summary
.sum_all
> saved_sum_all
)
1703 saved_profile_info
= &file_data
->profile_info
;
1704 saved_sum_all
= lto_gcov_summary
.sum_all
;
1705 saved_scale
= scale
;
1709 gcc_assert (saved_profile_info
);
1711 /* Scale up the histogram from the profile that had the largest
1712 scaled sum_all above. */
1713 for (h_ix
= 0; h_ix
< GCOV_HISTOGRAM_SIZE
; h_ix
++)
1715 /* Scale up the min value as we did the corresponding sum_all
1716 above. Use that to find the new histogram index. */
1717 gcov_type scaled_min
1718 = apply_scale (saved_profile_info
->histogram
[h_ix
].min_value
,
1720 /* The new index may be shared with another scaled histogram entry,
1721 so we need to account for a non-zero histogram entry at new_ix. */
1722 unsigned new_ix
= gcov_histo_index (scaled_min
);
1723 lto_gcov_summary
.histogram
[new_ix
].min_value
1724 = (lto_gcov_summary
.histogram
[new_ix
].num_counters
1725 ? MIN (lto_gcov_summary
.histogram
[new_ix
].min_value
, scaled_min
)
1727 /* Some of the scaled counter values would ostensibly need to be placed
1728 into different (larger) histogram buckets, but we keep things simple
1729 here and place the scaled cumulative counter value in the bucket
1730 corresponding to the scaled minimum counter value. */
1731 lto_gcov_summary
.histogram
[new_ix
].cum_value
1732 += apply_scale (saved_profile_info
->histogram
[h_ix
].cum_value
,
1734 lto_gcov_summary
.histogram
[new_ix
].num_counters
1735 += saved_profile_info
->histogram
[h_ix
].num_counters
;
1738 /* Watch roundoff errors. */
1739 if (lto_gcov_summary
.sum_max
< max_runs
)
1740 lto_gcov_summary
.sum_max
= max_runs
;
1742 /* If merging already happent at WPA time, we are done. */
1746 /* Now compute count_materialization_scale of each node.
1747 During LTRANS we already have values of count_materialization_scale
1748 computed, so just update them. */
1749 FOR_EACH_FUNCTION (node
)
1750 if (node
->lto_file_data
1751 && node
->lto_file_data
->profile_info
.runs
)
1755 scale
= RDIV (node
->count_materialization_scale
* max_runs
,
1756 node
->lto_file_data
->profile_info
.runs
);
1757 node
->count_materialization_scale
= scale
;
1759 fatal_error ("Profile information in %s corrupted",
1760 file_data
->file_name
);
1762 if (scale
== REG_BR_PROB_BASE
)
1764 for (edge
= node
->callees
; edge
; edge
= edge
->next_callee
)
1765 edge
->count
= apply_scale (edge
->count
, scale
);
1766 node
->count
= apply_scale (node
->count
, scale
);
1770 /* Input and merge the symtab from each of the .o files passed to
1776 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
1777 struct lto_file_decl_data
*file_data
;
1779 struct cgraph_node
*node
;
1781 while ((file_data
= file_data_vec
[j
++]))
1785 struct lto_input_block
*ib
;
1786 vec
<symtab_node
*> nodes
;
1788 ib
= lto_create_simple_input_block (file_data
, LTO_section_symtab_nodes
,
1791 fatal_error ("cannot find LTO cgraph in %s", file_data
->file_name
);
1792 input_profile_summary (ib
, file_data
);
1793 file_data
->symtab_node_encoder
= lto_symtab_encoder_new (true);
1794 nodes
= input_cgraph_1 (file_data
, ib
);
1795 lto_destroy_simple_input_block (file_data
, LTO_section_symtab_nodes
,
1798 ib
= lto_create_simple_input_block (file_data
, LTO_section_refs
,
1801 fatal_error ("cannot find LTO section refs in %s",
1802 file_data
->file_name
);
1803 input_refs (ib
, nodes
);
1804 lto_destroy_simple_input_block (file_data
, LTO_section_refs
,
1807 input_cgraph_opt_summary (nodes
);
1811 merge_profile_summaries (file_data_vec
);
1812 get_working_sets ();
1815 /* Clear out the aux field that was used to store enough state to
1816 tell which nodes should be overwritten. */
1817 FOR_EACH_FUNCTION (node
)
1819 /* Some nodes may have been created by cgraph_node. This
1820 happens when the callgraph contains nested functions. If the
1821 node for the parent function was never emitted to the gimple
1822 file, cgraph_node will create a node for it when setting the
1823 context of the nested function. */
1824 if (node
->lto_file_data
)
1829 /* Input function/variable tables that will allow libgomp to look up offload
1830 target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS. */
1833 input_offload_tables (void)
1835 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
1836 struct lto_file_decl_data
*file_data
;
1839 while ((file_data
= file_data_vec
[j
++]))
1843 struct lto_input_block
*ib
1844 = lto_create_simple_input_block (file_data
, LTO_section_offload_table
,
1849 enum LTO_symtab_tags tag
1850 = streamer_read_enum (ib
, LTO_symtab_tags
, LTO_symtab_last_tag
);
1853 if (tag
== LTO_symtab_unavail_node
)
1855 int decl_index
= streamer_read_uhwi (ib
);
1857 = lto_file_decl_data_get_fn_decl (file_data
, decl_index
);
1858 vec_safe_push (offload_funcs
, fn_decl
);
1860 else if (tag
== LTO_symtab_variable
)
1862 int decl_index
= streamer_read_uhwi (ib
);
1864 = lto_file_decl_data_get_var_decl (file_data
, decl_index
);
1865 vec_safe_push (offload_vars
, var_decl
);
1868 fatal_error ("invalid offload table in %s", file_data
->file_name
);
1870 tag
= streamer_read_enum (ib
, LTO_symtab_tags
, LTO_symtab_last_tag
);
1873 lto_destroy_simple_input_block (file_data
, LTO_section_offload_table
,
1878 /* True when we need optimization summary for NODE. */
1881 output_cgraph_opt_summary_p (struct cgraph_node
*node
)
1883 return (node
->clone_of
1884 && (node
->clone
.tree_map
1885 || node
->clone
.args_to_skip
1886 || node
->clone
.combined_args_to_skip
));
1889 /* Output optimization summary for EDGE to OB. */
1891 output_edge_opt_summary (struct output_block
*ob ATTRIBUTE_UNUSED
,
1892 struct cgraph_edge
*edge ATTRIBUTE_UNUSED
)
1896 /* Output optimization summary for NODE to OB. */
1899 output_node_opt_summary (struct output_block
*ob
,
1900 struct cgraph_node
*node
,
1901 lto_symtab_encoder_t encoder
)
1905 struct ipa_replace_map
*map
;
1906 struct bitpack_d bp
;
1908 struct cgraph_edge
*e
;
1910 if (node
->clone
.args_to_skip
)
1912 streamer_write_uhwi (ob
, bitmap_count_bits (node
->clone
.args_to_skip
));
1913 EXECUTE_IF_SET_IN_BITMAP (node
->clone
.args_to_skip
, 0, index
, bi
)
1914 streamer_write_uhwi (ob
, index
);
1917 streamer_write_uhwi (ob
, 0);
1918 if (node
->clone
.combined_args_to_skip
)
1920 streamer_write_uhwi (ob
, bitmap_count_bits (node
->clone
.combined_args_to_skip
));
1921 EXECUTE_IF_SET_IN_BITMAP (node
->clone
.combined_args_to_skip
, 0, index
, bi
)
1922 streamer_write_uhwi (ob
, index
);
1925 streamer_write_uhwi (ob
, 0);
1926 streamer_write_uhwi (ob
, vec_safe_length (node
->clone
.tree_map
));
1927 FOR_EACH_VEC_SAFE_ELT (node
->clone
.tree_map
, i
, map
)
1929 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1930 mechanism to store function local declarations into summaries. */
1931 gcc_assert (!map
->old_tree
);
1932 streamer_write_uhwi (ob
, map
->parm_num
);
1933 gcc_assert (EXPR_LOCATION (map
->new_tree
) == UNKNOWN_LOCATION
);
1934 stream_write_tree (ob
, map
->new_tree
, true);
1935 bp
= bitpack_create (ob
->main_stream
);
1936 bp_pack_value (&bp
, map
->replace_p
, 1);
1937 bp_pack_value (&bp
, map
->ref_p
, 1);
1938 streamer_write_bitpack (&bp
);
1941 if (lto_symtab_encoder_in_partition_p (encoder
, node
))
1943 for (e
= node
->callees
; e
; e
= e
->next_callee
)
1944 output_edge_opt_summary (ob
, e
);
1945 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
1946 output_edge_opt_summary (ob
, e
);
1950 /* Output optimization summaries stored in callgraph.
1951 At the moment it is the clone info structure. */
1954 output_cgraph_opt_summary (void)
1957 lto_symtab_encoder_t encoder
;
1958 struct output_block
*ob
= create_output_block (LTO_section_cgraph_opt_sum
);
1962 encoder
= ob
->decl_state
->symtab_node_encoder
;
1963 n_nodes
= lto_symtab_encoder_size (encoder
);
1964 for (i
= 0; i
< n_nodes
; i
++)
1966 symtab_node
*node
= lto_symtab_encoder_deref (encoder
, i
);
1967 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (node
);
1968 if (cnode
&& output_cgraph_opt_summary_p (cnode
))
1971 streamer_write_uhwi (ob
, count
);
1972 for (i
= 0; i
< n_nodes
; i
++)
1974 symtab_node
*node
= lto_symtab_encoder_deref (encoder
, i
);
1975 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (node
);
1976 if (cnode
&& output_cgraph_opt_summary_p (cnode
))
1978 streamer_write_uhwi (ob
, i
);
1979 output_node_opt_summary (ob
, cnode
, encoder
);
1982 produce_asm (ob
, NULL
);
1983 destroy_output_block (ob
);
1986 /* Input optimisation summary of EDGE. */
1989 input_edge_opt_summary (struct cgraph_edge
*edge ATTRIBUTE_UNUSED
,
1990 struct lto_input_block
*ib_main ATTRIBUTE_UNUSED
)
1994 /* Input optimisation summary of NODE. */
1997 input_node_opt_summary (struct cgraph_node
*node
,
1998 struct lto_input_block
*ib_main
,
1999 struct data_in
*data_in
)
2004 struct bitpack_d bp
;
2005 struct cgraph_edge
*e
;
2007 count
= streamer_read_uhwi (ib_main
);
2009 node
->clone
.args_to_skip
= BITMAP_GGC_ALLOC ();
2010 for (i
= 0; i
< count
; i
++)
2012 bit
= streamer_read_uhwi (ib_main
);
2013 bitmap_set_bit (node
->clone
.args_to_skip
, bit
);
2015 count
= streamer_read_uhwi (ib_main
);
2017 node
->clone
.combined_args_to_skip
= BITMAP_GGC_ALLOC ();
2018 for (i
= 0; i
< count
; i
++)
2020 bit
= streamer_read_uhwi (ib_main
);
2021 bitmap_set_bit (node
->clone
.combined_args_to_skip
, bit
);
2023 count
= streamer_read_uhwi (ib_main
);
2024 for (i
= 0; i
< count
; i
++)
2026 struct ipa_replace_map
*map
= ggc_alloc
<ipa_replace_map
> ();
2028 vec_safe_push (node
->clone
.tree_map
, map
);
2029 map
->parm_num
= streamer_read_uhwi (ib_main
);
2030 map
->old_tree
= NULL
;
2031 map
->new_tree
= stream_read_tree (ib_main
, data_in
);
2032 bp
= streamer_read_bitpack (ib_main
);
2033 map
->replace_p
= bp_unpack_value (&bp
, 1);
2034 map
->ref_p
= bp_unpack_value (&bp
, 1);
2036 for (e
= node
->callees
; e
; e
= e
->next_callee
)
2037 input_edge_opt_summary (e
, ib_main
);
2038 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
2039 input_edge_opt_summary (e
, ib_main
);
2042 /* Read section in file FILE_DATA of length LEN with data DATA. */
2045 input_cgraph_opt_section (struct lto_file_decl_data
*file_data
,
2046 const char *data
, size_t len
,
2047 vec
<symtab_node
*> nodes
)
2049 const struct lto_function_header
*header
=
2050 (const struct lto_function_header
*) data
;
2051 const int cfg_offset
= sizeof (struct lto_function_header
);
2052 const int main_offset
= cfg_offset
+ header
->cfg_size
;
2053 const int string_offset
= main_offset
+ header
->main_size
;
2054 struct data_in
*data_in
;
2058 lto_input_block
ib_main ((const char *) data
+ main_offset
,
2062 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
2063 header
->string_size
, vNULL
);
2064 count
= streamer_read_uhwi (&ib_main
);
2066 for (i
= 0; i
< count
; i
++)
2068 int ref
= streamer_read_uhwi (&ib_main
);
2069 input_node_opt_summary (dyn_cast
<cgraph_node
*> (nodes
[ref
]),
2072 lto_free_section_data (file_data
, LTO_section_cgraph_opt_sum
, NULL
, data
,
2074 lto_data_in_delete (data_in
);
2077 /* Input optimization summary of cgraph. */
2080 input_cgraph_opt_summary (vec
<symtab_node
*> nodes
)
2082 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
2083 struct lto_file_decl_data
*file_data
;
2086 while ((file_data
= file_data_vec
[j
++]))
2090 lto_get_section_data (file_data
, LTO_section_cgraph_opt_sum
, NULL
,
2094 input_cgraph_opt_section (file_data
, data
, len
, nodes
);