Daily bump.
[official-gcc.git] / gcc / lto-cgraph.c
blobb941c76a8781443e6082b08ac8a73023a13101c0
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
4 Copyright (C) 2009-2019 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "predict.h"
31 #include "stringpool.h"
32 #include "tree-streamer.h"
33 #include "cgraph.h"
34 #include "tree-pass.h"
35 #include "profile.h"
36 #include "context.h"
37 #include "pass_manager.h"
38 #include "ipa-utils.h"
39 #include "omp-offload.h"
40 #include "stringpool.h"
41 #include "attribs.h"
43 /* True when asm nodes has been output. */
44 bool asm_nodes_output = false;
46 static void output_cgraph_opt_summary (void);
47 static void input_cgraph_opt_summary (vec<symtab_node *> nodes);
49 /* Number of LDPR values known to GCC. */
50 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
52 /* All node orders are ofsetted by ORDER_BASE. */
53 static int order_base;
55 /* Cgraph streaming is organized as set of record whose type
56 is indicated by a tag. */
57 enum LTO_symtab_tags
59 /* Must leave 0 for the stopper. */
61 /* Cgraph node without body available. */
62 LTO_symtab_unavail_node = 1,
63 /* Cgraph node with function body. */
64 LTO_symtab_analyzed_node,
65 /* Cgraph edges. */
66 LTO_symtab_edge,
67 LTO_symtab_indirect_edge,
68 LTO_symtab_variable,
69 LTO_symtab_last_tag
72 /* Create a new symtab encoder.
73 if FOR_INPUT, the encoder allocate only datastructures needed
74 to read the symtab. */
76 lto_symtab_encoder_t
77 lto_symtab_encoder_new (bool for_input)
79 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
81 if (!for_input)
82 encoder->map = new hash_map<symtab_node *, size_t>;
83 encoder->nodes.create (0);
84 return encoder;
88 /* Delete ENCODER and its components. */
90 void
91 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
93 encoder->nodes.release ();
94 if (encoder->map)
95 delete encoder->map;
96 free (encoder);
100 /* Return the existing reference number of NODE in the symtab encoder in
101 output block OB. Assign a new reference if this is the first time
102 NODE is encoded. */
105 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
106 symtab_node *node)
108 int ref;
110 if (!encoder->map)
112 lto_encoder_entry entry = {node, false, false, false};
114 ref = encoder->nodes.length ();
115 encoder->nodes.safe_push (entry);
116 return ref;
119 size_t *slot = encoder->map->get (node);
120 if (!slot || !*slot)
122 lto_encoder_entry entry = {node, false, false, false};
123 ref = encoder->nodes.length ();
124 if (!slot)
125 encoder->map->put (node, ref + 1);
126 encoder->nodes.safe_push (entry);
128 else
129 ref = *slot - 1;
131 return ref;
134 /* Remove NODE from encoder. */
136 bool
137 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
138 symtab_node *node)
140 int index;
141 lto_encoder_entry last_node;
143 size_t *slot = encoder->map->get (node);
144 if (slot == NULL || !*slot)
145 return false;
147 index = *slot - 1;
148 gcc_checking_assert (encoder->nodes[index].node == node);
150 /* Remove from vector. We do this by swapping node with the last element
151 of the vector. */
152 last_node = encoder->nodes.pop ();
153 if (last_node.node != node)
155 gcc_assert (encoder->map->put (last_node.node, index + 1));
157 /* Move the last element to the original spot of NODE. */
158 encoder->nodes[index] = last_node;
161 /* Remove element from hash table. */
162 encoder->map->remove (node);
163 return true;
167 /* Return TRUE if we should encode the body of NODE (if any). */
169 bool
170 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
171 struct cgraph_node *node)
173 int index = lto_symtab_encoder_lookup (encoder, node);
174 return encoder->nodes[index].body;
177 /* Specify that we encode the body of NODE in this partition. */
179 static void
180 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
181 struct cgraph_node *node)
183 int index = lto_symtab_encoder_encode (encoder, node);
184 gcc_checking_assert (encoder->nodes[index].node == node);
185 encoder->nodes[index].body = true;
188 /* Return TRUE if we should encode initializer of NODE (if any). */
190 bool
191 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
192 varpool_node *node)
194 int index = lto_symtab_encoder_lookup (encoder, node);
195 if (index == LCC_NOT_FOUND)
196 return false;
197 return encoder->nodes[index].initializer;
200 /* Specify that we should encode initializer of NODE (if any). */
202 static void
203 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
204 varpool_node *node)
206 int index = lto_symtab_encoder_lookup (encoder, node);
207 encoder->nodes[index].initializer = true;
210 /* Return TRUE if NODE is in this partition. */
212 bool
213 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
214 symtab_node *node)
216 int index = lto_symtab_encoder_lookup (encoder, node);
217 if (index == LCC_NOT_FOUND)
218 return false;
219 return encoder->nodes[index].in_partition;
222 /* Specify that NODE is in this partition. */
224 void
225 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
226 symtab_node *node)
228 int index = lto_symtab_encoder_encode (encoder, node);
229 encoder->nodes[index].in_partition = true;
232 /* Output the cgraph EDGE to OB using ENCODER. */
234 static void
235 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
236 lto_symtab_encoder_t encoder)
238 unsigned int uid;
239 intptr_t ref;
240 struct bitpack_d bp;
242 if (edge->indirect_unknown_callee)
243 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
244 LTO_symtab_indirect_edge);
245 else
246 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
247 LTO_symtab_edge);
249 ref = lto_symtab_encoder_lookup (encoder, edge->caller);
250 gcc_assert (ref != LCC_NOT_FOUND);
251 streamer_write_hwi_stream (ob->main_stream, ref);
253 if (!edge->indirect_unknown_callee)
255 ref = lto_symtab_encoder_lookup (encoder, edge->callee);
256 gcc_assert (ref != LCC_NOT_FOUND);
257 streamer_write_hwi_stream (ob->main_stream, ref);
260 edge->count.stream_out (ob->main_stream);
262 bp = bitpack_create (ob->main_stream);
263 uid = (!gimple_has_body_p (edge->caller->decl) || edge->caller->thunk.thunk_p
264 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt) + 1);
265 bp_pack_enum (&bp, cgraph_inline_failed_t,
266 CIF_N_REASONS, edge->inline_failed);
267 bp_pack_var_len_unsigned (&bp, uid);
268 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
269 bp_pack_value (&bp, edge->speculative, 1);
270 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
271 gcc_assert (!edge->call_stmt_cannot_inline_p
272 || edge->inline_failed != CIF_BODY_NOT_AVAILABLE);
273 bp_pack_value (&bp, edge->can_throw_external, 1);
274 bp_pack_value (&bp, edge->in_polymorphic_cdtor, 1);
275 if (edge->indirect_unknown_callee)
277 int flags = edge->indirect_info->ecf_flags;
278 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
279 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
280 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
281 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
282 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
283 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
284 /* Flags that should not appear on indirect calls. */
285 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
286 | ECF_MAY_BE_ALLOCA
287 | ECF_SIBCALL
288 | ECF_LEAF
289 | ECF_NOVOPS)));
291 streamer_write_bitpack (&bp);
292 if (edge->indirect_unknown_callee)
294 streamer_write_hwi_stream (ob->main_stream,
295 edge->indirect_info->common_target_id);
296 if (edge->indirect_info->common_target_id)
297 streamer_write_hwi_stream
298 (ob->main_stream, edge->indirect_info->common_target_probability);
302 /* Return if NODE contain references from other partitions. */
304 bool
305 referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
307 int i;
308 struct ipa_ref *ref = NULL;
310 for (i = 0; node->iterate_referring (i, ref); i++)
312 /* Ignore references from non-offloadable nodes while streaming NODE into
313 offload LTO section. */
314 if (!ref->referring->need_lto_streaming)
315 continue;
317 if (ref->referring->in_other_partition
318 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
319 return true;
321 return false;
324 /* Return true when node is reachable from other partition. */
326 bool
327 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
329 struct cgraph_edge *e;
330 if (!node->definition)
331 return false;
332 if (node->global.inlined_to)
333 return false;
334 for (e = node->callers; e; e = e->next_caller)
336 /* Ignore references from non-offloadable nodes while streaming NODE into
337 offload LTO section. */
338 if (!e->caller->need_lto_streaming)
339 continue;
341 if (e->caller->in_other_partition
342 || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
343 return true;
345 return false;
348 /* Return if NODE contain references from other partitions. */
350 bool
351 referenced_from_this_partition_p (symtab_node *node,
352 lto_symtab_encoder_t encoder)
354 int i;
355 struct ipa_ref *ref = NULL;
357 for (i = 0; node->iterate_referring (i, ref); i++)
358 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
359 return true;
360 return false;
363 /* Return true when node is reachable from other partition. */
365 bool
366 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
368 struct cgraph_edge *e;
369 for (e = node->callers; e; e = e->next_caller)
370 if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
371 return true;
372 return false;
375 /* Output the cgraph NODE to OB. ENCODER is used to find the
376 reference number of NODE->inlined_to. SET is the set of nodes we
377 are writing to the current file. If NODE is not in SET, then NODE
378 is a boundary of a cgraph_node_set and we pretend NODE just has a
379 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
380 that have had their callgraph node written so far. This is used to
381 determine if NODE is a clone of a previously written node. */
383 static void
384 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
385 lto_symtab_encoder_t encoder)
387 unsigned int tag;
388 struct bitpack_d bp;
389 bool boundary_p;
390 intptr_t ref;
391 bool in_other_partition = false;
392 struct cgraph_node *clone_of, *ultimate_clone_of;
393 ipa_opt_pass_d *pass;
394 int i;
395 const char *comdat;
396 const char *section;
397 tree group;
399 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
401 if (node->analyzed && (!boundary_p || node->alias
402 || (node->thunk.thunk_p && !node->global.inlined_to)))
403 tag = LTO_symtab_analyzed_node;
404 else
405 tag = LTO_symtab_unavail_node;
407 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
408 tag);
409 streamer_write_hwi_stream (ob->main_stream, node->order);
411 /* In WPA mode, we only output part of the call-graph. Also, we
412 fake cgraph node attributes. There are two cases that we care.
414 Boundary nodes: There are nodes that are not part of SET but are
415 called from within SET. We artificially make them look like
416 externally visible nodes with no function body.
418 Cherry-picked nodes: These are nodes we pulled from other
419 translation units into SET during IPA-inlining. We make them as
420 local static nodes to prevent clashes with other local statics. */
421 if (boundary_p && node->analyzed
422 && node->get_partitioning_class () == SYMBOL_PARTITION)
424 /* Inline clones cannot be part of boundary.
425 gcc_assert (!node->global.inlined_to);
427 FIXME: At the moment they can be, when partition contains an inline
428 clone that is clone of inline clone from outside partition. We can
429 reshape the clone tree and make other tree to be the root, but it
430 needs a bit extra work and will be promplty done by cgraph_remove_node
431 after reading back. */
432 in_other_partition = 1;
435 clone_of = node->clone_of;
436 while (clone_of
437 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
438 if (clone_of->prev_sibling_clone)
439 clone_of = clone_of->prev_sibling_clone;
440 else
441 clone_of = clone_of->clone_of;
443 /* See if body of the master function is output. If not, we are seeing only
444 an declaration and we do not need to pass down clone tree. */
445 ultimate_clone_of = clone_of;
446 while (ultimate_clone_of && ultimate_clone_of->clone_of)
447 ultimate_clone_of = ultimate_clone_of->clone_of;
449 if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
450 clone_of = NULL;
452 if (tag == LTO_symtab_analyzed_node)
453 gcc_assert (clone_of || !node->clone_of);
454 if (!clone_of)
455 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
456 else
457 streamer_write_hwi_stream (ob->main_stream, ref);
460 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
461 node->count.stream_out (ob->main_stream);
462 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
464 streamer_write_hwi_stream (ob->main_stream,
465 node->ipa_transforms_to_apply.length ());
466 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
467 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
469 if (tag == LTO_symtab_analyzed_node)
471 if (node->global.inlined_to)
473 ref = lto_symtab_encoder_lookup (encoder, node->global.inlined_to);
474 gcc_assert (ref != LCC_NOT_FOUND);
476 else
477 ref = LCC_NOT_FOUND;
479 streamer_write_hwi_stream (ob->main_stream, ref);
482 group = node->get_comdat_group ();
483 if (group)
484 comdat = IDENTIFIER_POINTER (group);
485 else
486 comdat = "";
487 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
489 if (group)
491 if (node->same_comdat_group)
493 ref = LCC_NOT_FOUND;
494 for (struct symtab_node *n = node->same_comdat_group;
495 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
496 ref = lto_symtab_encoder_lookup (encoder, n);
498 else
499 ref = LCC_NOT_FOUND;
500 streamer_write_hwi_stream (ob->main_stream, ref);
503 section = node->get_section ();
504 if (!section)
505 section = "";
507 streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
509 bp = bitpack_create (ob->main_stream);
510 bp_pack_value (&bp, node->local.local, 1);
511 bp_pack_value (&bp, node->externally_visible, 1);
512 bp_pack_value (&bp, node->no_reorder, 1);
513 bp_pack_value (&bp, node->definition, 1);
514 bp_pack_value (&bp, node->local.versionable, 1);
515 bp_pack_value (&bp, node->local.can_change_signature, 1);
516 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
517 bp_pack_value (&bp, node->force_output, 1);
518 bp_pack_value (&bp, node->forced_by_abi, 1);
519 bp_pack_value (&bp, node->unique_name, 1);
520 bp_pack_value (&bp, node->body_removed, 1);
521 bp_pack_value (&bp, node->implicit_section, 1);
522 bp_pack_value (&bp, node->address_taken, 1);
523 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
524 && node->get_partitioning_class () == SYMBOL_PARTITION
525 && (reachable_from_other_partition_p (node, encoder)
526 || referenced_from_other_partition_p (node, encoder)), 1);
527 bp_pack_value (&bp, node->lowered, 1);
528 bp_pack_value (&bp, in_other_partition, 1);
529 bp_pack_value (&bp, node->alias, 1);
530 bp_pack_value (&bp, node->transparent_alias, 1);
531 bp_pack_value (&bp, node->weakref, 1);
532 bp_pack_value (&bp, node->frequency, 2);
533 bp_pack_value (&bp, node->only_called_at_startup, 1);
534 bp_pack_value (&bp, node->only_called_at_exit, 1);
535 bp_pack_value (&bp, node->tm_clone, 1);
536 bp_pack_value (&bp, node->calls_comdat_local, 1);
537 bp_pack_value (&bp, node->icf_merged, 1);
538 bp_pack_value (&bp, node->nonfreeing_fn, 1);
539 bp_pack_value (&bp, node->thunk.thunk_p, 1);
540 bp_pack_value (&bp, node->parallelized_function, 1);
541 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
542 LDPR_NUM_KNOWN,
543 /* When doing incremental link, we will get new resolution
544 info next time we process the file. */
545 flag_incremental_link ? LDPR_UNKNOWN : node->resolution);
546 bp_pack_value (&bp, node->split_part, 1);
547 streamer_write_bitpack (&bp);
548 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
550 /* Stream thunk info always because we use it in
551 ipa_polymorphic_call_context::ipa_polymorphic_call_context
552 to properly interpret THIS pointers for thunks that has been converted
553 to Gimple. */
554 if (node->definition)
556 streamer_write_uhwi_stream
557 (ob->main_stream,
558 1 + (node->thunk.this_adjusting != 0) * 2
559 + (node->thunk.virtual_offset_p != 0) * 4
560 + (node->thunk.add_pointer_bounds_args != 0) * 8);
561 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
562 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
563 streamer_write_uhwi_stream (ob->main_stream, node->thunk.indirect_offset);
565 streamer_write_hwi_stream (ob->main_stream, node->profile_id);
566 if (DECL_STATIC_CONSTRUCTOR (node->decl))
567 streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
568 if (DECL_STATIC_DESTRUCTOR (node->decl))
569 streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
572 /* Output the varpool NODE to OB.
573 If NODE is not in SET, then NODE is a boundary. */
575 static void
576 lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
577 lto_symtab_encoder_t encoder)
579 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
580 bool encode_initializer_p
581 = (node->definition
582 && lto_symtab_encoder_encode_initializer_p (encoder, node));
583 struct bitpack_d bp;
584 int ref;
585 const char *comdat;
586 const char *section;
587 tree group;
589 gcc_assert (!encode_initializer_p || node->definition);
590 gcc_assert (boundary_p || encode_initializer_p);
592 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
593 LTO_symtab_variable);
594 streamer_write_hwi_stream (ob->main_stream, node->order);
595 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
596 bp = bitpack_create (ob->main_stream);
597 bp_pack_value (&bp, node->externally_visible, 1);
598 bp_pack_value (&bp, node->no_reorder, 1);
599 bp_pack_value (&bp, node->force_output, 1);
600 bp_pack_value (&bp, node->forced_by_abi, 1);
601 bp_pack_value (&bp, node->unique_name, 1);
602 bp_pack_value (&bp,
603 node->body_removed
604 || (!encode_initializer_p && !node->alias && node->definition),
606 bp_pack_value (&bp, node->implicit_section, 1);
607 bp_pack_value (&bp, node->writeonly, 1);
608 bp_pack_value (&bp, node->definition && (encode_initializer_p || node->alias),
610 bp_pack_value (&bp, node->alias, 1);
611 bp_pack_value (&bp, node->transparent_alias, 1);
612 bp_pack_value (&bp, node->weakref, 1);
613 bp_pack_value (&bp, node->analyzed && (!boundary_p || node->alias), 1);
614 gcc_assert (node->definition || !node->analyzed);
615 /* Constant pool initializers can be de-unified into individual ltrans units.
616 FIXME: Alternatively at -Os we may want to avoid generating for them the local
617 labels and share them across LTRANS partitions. */
618 if (node->get_partitioning_class () != SYMBOL_PARTITION)
620 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
621 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
623 else
625 bp_pack_value (&bp, node->definition
626 && referenced_from_other_partition_p (node, encoder), 1);
627 bp_pack_value (&bp, node->analyzed
628 && boundary_p && !DECL_EXTERNAL (node->decl), 1);
629 /* in_other_partition. */
631 bp_pack_value (&bp, node->tls_model, 3);
632 bp_pack_value (&bp, node->used_by_single_function, 1);
633 bp_pack_value (&bp, node->dynamically_initialized, 1);
634 bp_pack_value (&bp, node->need_bounds_init, 1);
635 streamer_write_bitpack (&bp);
637 group = node->get_comdat_group ();
638 if (group)
639 comdat = IDENTIFIER_POINTER (group);
640 else
641 comdat = "";
642 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
644 if (group)
646 if (node->same_comdat_group)
648 ref = LCC_NOT_FOUND;
649 for (struct symtab_node *n = node->same_comdat_group;
650 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
651 ref = lto_symtab_encoder_lookup (encoder, n);
653 else
654 ref = LCC_NOT_FOUND;
655 streamer_write_hwi_stream (ob->main_stream, ref);
658 section = node->get_section ();
659 if (!section)
660 section = "";
661 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
663 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
664 LDPR_NUM_KNOWN, node->resolution);
667 /* Output the varpool NODE to OB.
668 If NODE is not in SET, then NODE is a boundary. */
670 static void
671 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
672 lto_symtab_encoder_t encoder)
674 struct bitpack_d bp;
675 int nref;
676 int uid = ref->lto_stmt_uid;
677 struct cgraph_node *node;
679 bp = bitpack_create (ob->main_stream);
680 bp_pack_value (&bp, ref->use, 3);
681 bp_pack_value (&bp, ref->speculative, 1);
682 streamer_write_bitpack (&bp);
683 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
684 gcc_assert (nref != LCC_NOT_FOUND);
685 streamer_write_hwi_stream (ob->main_stream, nref);
687 node = dyn_cast <cgraph_node *> (ref->referring);
688 if (node)
690 if (ref->stmt)
691 uid = gimple_uid (ref->stmt) + 1;
692 streamer_write_hwi_stream (ob->main_stream, uid);
696 /* Stream out profile_summary to OB. */
698 static void
699 output_profile_summary (struct lto_simple_output_block *ob)
701 if (profile_info)
703 /* We do not output num and run_max, they are not used by
704 GCC profile feedback and they are difficult to merge from multiple
705 units. */
706 unsigned runs = (profile_info->runs);
707 streamer_write_uhwi_stream (ob->main_stream, runs);
709 /* IPA-profile computes hot bb threshold based on cumulated
710 whole program profile. We need to stream it down to ltrans. */
711 if (flag_wpa)
712 streamer_write_gcov_count_stream (ob->main_stream,
713 get_hot_bb_threshold ());
715 else
716 streamer_write_uhwi_stream (ob->main_stream, 0);
719 /* Output all callees or indirect outgoing edges. EDGE must be the first such
720 edge. */
722 static void
723 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
724 struct lto_simple_output_block *ob,
725 lto_symtab_encoder_t encoder)
727 if (!edge)
728 return;
730 /* Output edges in backward direction, so the reconstructed callgraph match
731 and it is easy to associate call sites in the IPA pass summaries. */
732 while (edge->next_callee)
733 edge = edge->next_callee;
734 for (; edge; edge = edge->prev_callee)
735 lto_output_edge (ob, edge, encoder);
738 /* Output the part of the cgraph in SET. */
740 static void
741 output_refs (lto_symtab_encoder_t encoder)
743 struct lto_simple_output_block *ob;
744 int count;
745 struct ipa_ref *ref;
747 ob = lto_create_simple_output_block (LTO_section_refs);
749 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
751 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
753 /* IPA_REF_ALIAS references are always preserved
754 in the boundary. Alias node can't have other references and
755 can be always handled as if it's not in the boundary. */
756 if (!node->alias && !lto_symtab_encoder_in_partition_p (encoder, node))
757 continue;
759 count = node->ref_list.nreferences ();
760 if (count)
762 streamer_write_gcov_count_stream (ob->main_stream, count);
763 streamer_write_uhwi_stream (ob->main_stream,
764 lto_symtab_encoder_lookup (encoder, node));
765 for (int i = 0; node->iterate_reference (i, ref); i++)
766 lto_output_ref (ob, ref, encoder);
770 streamer_write_uhwi_stream (ob->main_stream, 0);
772 lto_destroy_simple_output_block (ob);
775 /* Add NODE into encoder as well as nodes it is cloned from.
776 Do it in a way so clones appear first. */
778 static void
779 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
780 bool include_body)
782 if (node->clone_of)
783 add_node_to (encoder, node->clone_of, include_body);
784 else if (include_body)
785 lto_set_symtab_encoder_encode_body (encoder, node);
786 lto_symtab_encoder_encode (encoder, node);
789 /* Add all references in NODE to encoders. */
791 static void
792 create_references (lto_symtab_encoder_t encoder, symtab_node *node)
794 int i;
795 struct ipa_ref *ref = NULL;
796 for (i = 0; node->iterate_reference (i, ref); i++)
797 if (is_a <cgraph_node *> (ref->referred))
798 add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
799 else
800 lto_symtab_encoder_encode (encoder, ref->referred);
803 /* Select what needs to be streamed out. In regular lto mode stream everything.
804 In offload lto mode stream only nodes marked as offloadable. */
805 void
806 select_what_to_stream (void)
808 struct symtab_node *snode;
809 FOR_EACH_SYMBOL (snode)
810 snode->need_lto_streaming = !lto_stream_offload_p || snode->offloadable;
813 /* Find all symbols we want to stream into given partition and insert them
814 to encoders.
816 The function actually replaces IN_ENCODER by new one. The reason is that
817 streaming code needs clone's origin to be streamed before clone. This
818 means that we need to insert the nodes in specific order. This order is
819 ignored by the partitioning logic earlier. */
821 lto_symtab_encoder_t
822 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
824 struct cgraph_edge *edge;
825 int i;
826 lto_symtab_encoder_t encoder;
827 lto_symtab_encoder_iterator lsei;
828 hash_set<void *> reachable_call_targets;
830 encoder = lto_symtab_encoder_new (false);
832 /* Go over all entries in the IN_ENCODER and duplicate them to
833 ENCODER. At the same time insert masters of clones so
834 every master appears before clone. */
835 for (lsei = lsei_start_function_in_partition (in_encoder);
836 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
838 struct cgraph_node *node = lsei_cgraph_node (lsei);
839 if (!node->need_lto_streaming)
840 continue;
841 add_node_to (encoder, node, true);
842 lto_set_symtab_encoder_in_partition (encoder, node);
843 create_references (encoder, node);
845 for (lsei = lsei_start_variable_in_partition (in_encoder);
846 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
848 varpool_node *vnode = lsei_varpool_node (lsei);
850 if (!vnode->need_lto_streaming)
851 continue;
852 lto_set_symtab_encoder_in_partition (encoder, vnode);
853 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
854 create_references (encoder, vnode);
856 /* Pickle in also the initializer of all referenced readonly variables
857 to help folding. Constant pool variables are not shared, so we must
858 pickle those too. */
859 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
861 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
862 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
864 if (!lto_symtab_encoder_encode_initializer_p (encoder,
865 vnode)
866 && (((vnode->ctor_useable_for_folding_p ()
867 && (!DECL_VIRTUAL_P (vnode->decl)
868 || !flag_wpa
869 || flag_ltrans_devirtualize)))))
871 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
872 create_references (encoder, vnode);
877 /* Go over all the nodes again to include callees that are not in
878 SET. */
879 for (lsei = lsei_start_function_in_partition (encoder);
880 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
882 struct cgraph_node *node = lsei_cgraph_node (lsei);
883 for (edge = node->callees; edge; edge = edge->next_callee)
885 struct cgraph_node *callee = edge->callee;
886 if (!lto_symtab_encoder_in_partition_p (encoder, callee))
888 /* We should have moved all the inlines. */
889 gcc_assert (!callee->global.inlined_to);
890 add_node_to (encoder, callee, false);
893 /* Add all possible targets for late devirtualization. */
894 if (flag_ltrans_devirtualize || !flag_wpa)
895 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
896 if (edge->indirect_info->polymorphic)
898 unsigned int i;
899 void *cache_token;
900 bool final;
901 vec <cgraph_node *>targets
902 = possible_polymorphic_call_targets
903 (edge, &final, &cache_token);
904 if (!reachable_call_targets.add (cache_token))
906 for (i = 0; i < targets.length (); i++)
908 struct cgraph_node *callee = targets[i];
910 /* Adding an external declarations into the unit serves
911 no purpose and just increases its boundary. */
912 if (callee->definition
913 && !lto_symtab_encoder_in_partition_p
914 (encoder, callee))
916 gcc_assert (!callee->global.inlined_to);
917 add_node_to (encoder, callee, false);
923 /* Be sure to also insert alias targert and thunk callees. These needs
924 to stay to aid local calling conventions. */
925 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
927 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
928 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
930 if (node->alias && node->analyzed)
931 create_references (encoder, node);
932 if (cnode
933 && cnode->thunk.thunk_p && !cnode->global.inlined_to)
934 add_node_to (encoder, cnode->callees->callee, false);
935 while (node->transparent_alias && node->analyzed)
937 node = node->get_alias_target ();
938 if (is_a <cgraph_node *> (node))
939 add_node_to (encoder, dyn_cast <cgraph_node *> (node),
940 false);
941 else
942 lto_symtab_encoder_encode (encoder, node);
945 lto_symtab_encoder_delete (in_encoder);
946 return encoder;
949 /* Output the part of the symtab in SET and VSET. */
951 void
952 output_symtab (void)
954 struct cgraph_node *node;
955 struct lto_simple_output_block *ob;
956 int i, n_nodes;
957 lto_symtab_encoder_t encoder;
959 if (flag_wpa)
960 output_cgraph_opt_summary ();
962 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
964 output_profile_summary (ob);
966 /* An encoder for cgraph nodes should have been created by
967 ipa_write_summaries_1. */
968 gcc_assert (ob->decl_state->symtab_node_encoder);
969 encoder = ob->decl_state->symtab_node_encoder;
971 /* Write out the nodes. We must first output a node and then its clones,
972 otherwise at a time reading back the node there would be nothing to clone
973 from. */
974 n_nodes = lto_symtab_encoder_size (encoder);
975 for (i = 0; i < n_nodes; i++)
977 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
978 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
979 lto_output_node (ob, cnode, encoder);
980 else
981 lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
984 /* Go over the nodes in SET again to write edges. */
985 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
987 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder, i));
988 if (node
989 && ((node->thunk.thunk_p && !node->global.inlined_to)
990 || lto_symtab_encoder_in_partition_p (encoder, node)))
992 output_outgoing_cgraph_edges (node->callees, ob, encoder);
993 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
997 streamer_write_uhwi_stream (ob->main_stream, 0);
999 lto_destroy_simple_output_block (ob);
1001 /* Emit toplevel asms.
1002 When doing WPA we must output every asm just once. Since we do not partition asm
1003 nodes at all, output them to first output. This is kind of hack, but should work
1004 well. */
1005 if (!asm_nodes_output)
1007 asm_nodes_output = true;
1008 lto_output_toplevel_asms ();
1011 output_refs (encoder);
1014 /* Return identifier encoded in IB as a plain string. */
1016 static tree
1017 read_identifier (struct lto_input_block *ib)
1019 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1020 tree id;
1022 if (ib->data[ib->p + len])
1023 lto_section_overrun (ib);
1024 if (!len)
1026 ib->p++;
1027 return NULL;
1029 id = get_identifier (ib->data + ib->p);
1030 ib->p += len + 1;
1031 return id;
1034 /* Return string encoded in IB, NULL if string is empty. */
1036 static const char *
1037 read_string (struct lto_input_block *ib)
1039 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1040 const char *str;
1042 if (ib->data[ib->p + len])
1043 lto_section_overrun (ib);
1044 if (!len)
1046 ib->p++;
1047 return NULL;
1049 str = ib->data + ib->p;
1050 ib->p += len + 1;
1051 return str;
1054 /* Output function/variable tables that will allow libgomp to look up offload
1055 target code.
1056 OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is filled in
1057 varpool_node::get_create. In WHOPR (partitioned) mode during the WPA stage
1058 both OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables. */
1060 void
1061 output_offload_tables (void)
1063 if (vec_safe_is_empty (offload_funcs) && vec_safe_is_empty (offload_vars))
1064 return;
1066 struct lto_simple_output_block *ob
1067 = lto_create_simple_output_block (LTO_section_offload_table);
1069 for (unsigned i = 0; i < vec_safe_length (offload_funcs); i++)
1071 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1072 LTO_symtab_last_tag, LTO_symtab_unavail_node);
1073 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
1074 (*offload_funcs)[i]);
1077 for (unsigned i = 0; i < vec_safe_length (offload_vars); i++)
1079 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1080 LTO_symtab_last_tag, LTO_symtab_variable);
1081 lto_output_var_decl_index (ob->decl_state, ob->main_stream,
1082 (*offload_vars)[i]);
1085 streamer_write_uhwi_stream (ob->main_stream, 0);
1086 lto_destroy_simple_output_block (ob);
1088 /* In WHOPR mode during the WPA stage the joint offload tables need to be
1089 streamed to one partition only. That's why we free offload_funcs and
1090 offload_vars after the first call of output_offload_tables. */
1091 if (flag_wpa)
1093 vec_free (offload_funcs);
1094 vec_free (offload_vars);
1098 /* Verify the partitioning of NODE. */
1100 static inline void
1101 verify_node_partition (symtab_node *node)
1103 if (flag_ltrans)
1104 return;
1106 #ifdef ACCEL_COMPILER
1107 if (node->in_other_partition)
1109 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1110 error_at (DECL_SOURCE_LOCATION (node->decl),
1111 "function %qs has been referenced in offloaded code but"
1112 " hasn%'t been marked to be included in the offloaded code",
1113 node->name ());
1114 else if (VAR_P (node->decl))
1115 error_at (DECL_SOURCE_LOCATION (node->decl),
1116 "variable %qs has been referenced in offloaded code but"
1117 " hasn%'t been marked to be included in the offloaded code",
1118 node->name ());
1119 else
1120 gcc_unreachable ();
1122 #else
1123 gcc_assert (!node->in_other_partition
1124 && !node->used_from_other_partition);
1125 #endif
1128 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1129 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
1130 NODE or to replace the values in it, for instance because the first
1131 time we saw it, the function body was not available but now it
1132 is. BP is a bitpack with all the bitflags for NODE read from the
1133 stream. */
1135 static void
1136 input_overwrite_node (struct lto_file_decl_data *file_data,
1137 struct cgraph_node *node,
1138 enum LTO_symtab_tags tag,
1139 struct bitpack_d *bp)
1141 node->aux = (void *) tag;
1142 node->lto_file_data = file_data;
1144 node->local.local = bp_unpack_value (bp, 1);
1145 node->externally_visible = bp_unpack_value (bp, 1);
1146 node->no_reorder = bp_unpack_value (bp, 1);
1147 node->definition = bp_unpack_value (bp, 1);
1148 node->local.versionable = bp_unpack_value (bp, 1);
1149 node->local.can_change_signature = bp_unpack_value (bp, 1);
1150 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
1151 node->force_output = bp_unpack_value (bp, 1);
1152 node->forced_by_abi = bp_unpack_value (bp, 1);
1153 node->unique_name = bp_unpack_value (bp, 1);
1154 node->body_removed = bp_unpack_value (bp, 1);
1155 node->implicit_section = bp_unpack_value (bp, 1);
1156 node->address_taken = bp_unpack_value (bp, 1);
1157 node->used_from_other_partition = bp_unpack_value (bp, 1);
1158 node->lowered = bp_unpack_value (bp, 1);
1159 node->analyzed = tag == LTO_symtab_analyzed_node;
1160 node->in_other_partition = bp_unpack_value (bp, 1);
1161 if (node->in_other_partition
1162 /* Avoid updating decl when we are seeing just inline clone.
1163 When inlining function that has functions already inlined into it,
1164 we produce clones of inline clones.
1166 WPA partitioning might put each clone into different unit and
1167 we might end up streaming inline clone from other partition
1168 to support clone we are interested in. */
1169 && (!node->clone_of
1170 || node->clone_of->decl != node->decl))
1172 DECL_EXTERNAL (node->decl) = 1;
1173 TREE_STATIC (node->decl) = 0;
1175 node->alias = bp_unpack_value (bp, 1);
1176 node->transparent_alias = bp_unpack_value (bp, 1);
1177 node->weakref = bp_unpack_value (bp, 1);
1178 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1179 node->only_called_at_startup = bp_unpack_value (bp, 1);
1180 node->only_called_at_exit = bp_unpack_value (bp, 1);
1181 node->tm_clone = bp_unpack_value (bp, 1);
1182 node->calls_comdat_local = bp_unpack_value (bp, 1);
1183 node->icf_merged = bp_unpack_value (bp, 1);
1184 node->nonfreeing_fn = bp_unpack_value (bp, 1);
1185 node->thunk.thunk_p = bp_unpack_value (bp, 1);
1186 node->parallelized_function = bp_unpack_value (bp, 1);
1187 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1188 LDPR_NUM_KNOWN);
1189 node->split_part = bp_unpack_value (bp, 1);
1190 verify_node_partition (node);
1193 /* Return string alias is alias of. */
1195 static tree
1196 get_alias_symbol (tree decl)
1198 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1199 return get_identifier (TREE_STRING_POINTER
1200 (TREE_VALUE (TREE_VALUE (alias))));
1203 /* Read a node from input_block IB. TAG is the node's tag just read.
1204 Return the node read or overwriten. */
1206 static struct cgraph_node *
1207 input_node (struct lto_file_decl_data *file_data,
1208 struct lto_input_block *ib,
1209 enum LTO_symtab_tags tag,
1210 vec<symtab_node *> nodes)
1212 gcc::pass_manager *passes = g->get_passes ();
1213 tree fn_decl;
1214 struct cgraph_node *node;
1215 struct bitpack_d bp;
1216 unsigned decl_index;
1217 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1218 int clone_ref;
1219 int order;
1220 int i, count;
1221 tree group;
1222 const char *section;
1223 order = streamer_read_hwi (ib) + order_base;
1224 clone_ref = streamer_read_hwi (ib);
1226 decl_index = streamer_read_uhwi (ib);
1227 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1229 if (clone_ref != LCC_NOT_FOUND)
1231 node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1232 profile_count::uninitialized (), false,
1233 vNULL, false, NULL, NULL);
1235 else
1237 /* Declaration of functions can be already merged with a declaration
1238 from other input file. We keep cgraph unmerged until after streaming
1239 of ipa passes is done. Alays forcingly create a fresh node. */
1240 node = symtab->create_empty ();
1241 node->decl = fn_decl;
1242 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (fn_decl)))
1243 node->ifunc_resolver = 1;
1244 node->register_symbol ();
1247 node->order = order;
1248 if (order >= symtab->order)
1249 symtab->order = order + 1;
1251 node->count = profile_count::stream_in (ib);
1252 node->count_materialization_scale = streamer_read_hwi (ib);
1254 count = streamer_read_hwi (ib);
1255 node->ipa_transforms_to_apply = vNULL;
1256 for (i = 0; i < count; i++)
1258 opt_pass *pass;
1259 int pid = streamer_read_hwi (ib);
1261 gcc_assert (pid < passes->passes_by_id_size);
1262 pass = passes->passes_by_id[pid];
1263 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1266 if (tag == LTO_symtab_analyzed_node)
1267 ref = streamer_read_hwi (ib);
1269 group = read_identifier (ib);
1270 if (group)
1271 ref2 = streamer_read_hwi (ib);
1273 /* Make sure that we have not read this node before. Nodes that
1274 have already been read will have their tag stored in the 'aux'
1275 field. Since built-in functions can be referenced in multiple
1276 functions, they are expected to be read more than once. */
1277 if (node->aux && !fndecl_built_in_p (node->decl))
1278 internal_error ("bytecode stream: found multiple instances of cgraph "
1279 "node with uid %d", node->get_uid ());
1281 node->tp_first_run = streamer_read_uhwi (ib);
1283 bp = streamer_read_bitpack (ib);
1285 input_overwrite_node (file_data, node, tag, &bp);
1287 /* Store a reference for now, and fix up later to be a pointer. */
1288 node->global.inlined_to = (cgraph_node *) (intptr_t) ref;
1290 if (group)
1292 node->set_comdat_group (group);
1293 /* Store a reference for now, and fix up later to be a pointer. */
1294 node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1296 else
1297 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1298 section = read_string (ib);
1299 if (section)
1300 node->set_section_for_node (section);
1302 if (node->definition)
1304 int type = streamer_read_uhwi (ib);
1305 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1306 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1307 HOST_WIDE_INT indirect_offset = streamer_read_uhwi (ib);
1309 node->thunk.fixed_offset = fixed_offset;
1310 node->thunk.virtual_value = virtual_value;
1311 node->thunk.indirect_offset = indirect_offset;
1312 node->thunk.this_adjusting = (type & 2);
1313 node->thunk.virtual_offset_p = (type & 4);
1314 node->thunk.add_pointer_bounds_args = (type & 8);
1316 if (node->alias && !node->analyzed && node->weakref)
1317 node->alias_target = get_alias_symbol (node->decl);
1318 node->profile_id = streamer_read_hwi (ib);
1319 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1320 node->set_init_priority (streamer_read_hwi (ib));
1321 if (DECL_STATIC_DESTRUCTOR (node->decl))
1322 node->set_fini_priority (streamer_read_hwi (ib));
1324 return node;
1327 /* Read a node from input_block IB. TAG is the node's tag just read.
1328 Return the node read or overwriten. */
1330 static varpool_node *
1331 input_varpool_node (struct lto_file_decl_data *file_data,
1332 struct lto_input_block *ib)
1334 int decl_index;
1335 tree var_decl;
1336 varpool_node *node;
1337 struct bitpack_d bp;
1338 int ref = LCC_NOT_FOUND;
1339 int order;
1340 tree group;
1341 const char *section;
1343 order = streamer_read_hwi (ib) + order_base;
1344 decl_index = streamer_read_uhwi (ib);
1345 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1347 /* Declaration of functions can be already merged with a declaration
1348 from other input file. We keep cgraph unmerged until after streaming
1349 of ipa passes is done. Alays forcingly create a fresh node. */
1350 node = varpool_node::create_empty ();
1351 node->decl = var_decl;
1352 node->register_symbol ();
1354 node->order = order;
1355 if (order >= symtab->order)
1356 symtab->order = order + 1;
1357 node->lto_file_data = file_data;
1359 bp = streamer_read_bitpack (ib);
1360 node->externally_visible = bp_unpack_value (&bp, 1);
1361 node->no_reorder = bp_unpack_value (&bp, 1);
1362 node->force_output = bp_unpack_value (&bp, 1);
1363 node->forced_by_abi = bp_unpack_value (&bp, 1);
1364 node->unique_name = bp_unpack_value (&bp, 1);
1365 node->body_removed = bp_unpack_value (&bp, 1);
1366 node->implicit_section = bp_unpack_value (&bp, 1);
1367 node->writeonly = bp_unpack_value (&bp, 1);
1368 node->definition = bp_unpack_value (&bp, 1);
1369 node->alias = bp_unpack_value (&bp, 1);
1370 node->transparent_alias = bp_unpack_value (&bp, 1);
1371 node->weakref = bp_unpack_value (&bp, 1);
1372 node->analyzed = bp_unpack_value (&bp, 1);
1373 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1374 node->in_other_partition = bp_unpack_value (&bp, 1);
1375 if (node->in_other_partition)
1377 DECL_EXTERNAL (node->decl) = 1;
1378 TREE_STATIC (node->decl) = 0;
1380 if (node->alias && !node->analyzed && node->weakref)
1381 node->alias_target = get_alias_symbol (node->decl);
1382 node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1383 node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1384 node->dynamically_initialized = bp_unpack_value (&bp, 1);
1385 node->need_bounds_init = bp_unpack_value (&bp, 1);
1386 group = read_identifier (ib);
1387 if (group)
1389 node->set_comdat_group (group);
1390 ref = streamer_read_hwi (ib);
1391 /* Store a reference for now, and fix up later to be a pointer. */
1392 node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1394 else
1395 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1396 section = read_string (ib);
1397 if (section)
1398 node->set_section_for_node (section);
1399 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1400 LDPR_NUM_KNOWN);
1401 verify_node_partition (node);
1402 return node;
1405 /* Read a node from input_block IB. TAG is the node's tag just read.
1406 Return the node read or overwriten. */
1408 static void
1409 input_ref (struct lto_input_block *ib,
1410 symtab_node *referring_node,
1411 vec<symtab_node *> nodes)
1413 symtab_node *node = NULL;
1414 struct bitpack_d bp;
1415 enum ipa_ref_use use;
1416 bool speculative;
1417 struct ipa_ref *ref;
1419 bp = streamer_read_bitpack (ib);
1420 use = (enum ipa_ref_use) bp_unpack_value (&bp, 3);
1421 speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1422 node = nodes[streamer_read_hwi (ib)];
1423 ref = referring_node->create_reference (node, use);
1424 ref->speculative = speculative;
1425 if (is_a <cgraph_node *> (referring_node))
1426 ref->lto_stmt_uid = streamer_read_hwi (ib);
1429 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1430 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1431 edge being read is indirect (in the sense that it has
1432 indirect_unknown_callee set). */
1434 static void
1435 input_edge (struct lto_input_block *ib, vec<symtab_node *> nodes,
1436 bool indirect)
1438 struct cgraph_node *caller, *callee;
1439 struct cgraph_edge *edge;
1440 unsigned int stmt_id;
1441 profile_count count;
1442 cgraph_inline_failed_t inline_failed;
1443 struct bitpack_d bp;
1444 int ecf_flags = 0;
1446 caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1447 if (caller == NULL || caller->decl == NULL_TREE)
1448 internal_error ("bytecode stream: no caller found while reading edge");
1450 if (!indirect)
1452 callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1453 if (callee == NULL || callee->decl == NULL_TREE)
1454 internal_error ("bytecode stream: no callee found while reading edge");
1456 else
1457 callee = NULL;
1459 count = profile_count::stream_in (ib);
1461 bp = streamer_read_bitpack (ib);
1462 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1463 stmt_id = bp_unpack_var_len_unsigned (&bp);
1465 if (indirect)
1466 edge = caller->create_indirect_edge (NULL, 0, count);
1467 else
1468 edge = caller->create_edge (callee, NULL, count);
1470 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1471 edge->speculative = bp_unpack_value (&bp, 1);
1472 edge->lto_stmt_uid = stmt_id;
1473 edge->inline_failed = inline_failed;
1474 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1475 edge->can_throw_external = bp_unpack_value (&bp, 1);
1476 edge->in_polymorphic_cdtor = bp_unpack_value (&bp, 1);
1477 if (indirect)
1479 if (bp_unpack_value (&bp, 1))
1480 ecf_flags |= ECF_CONST;
1481 if (bp_unpack_value (&bp, 1))
1482 ecf_flags |= ECF_PURE;
1483 if (bp_unpack_value (&bp, 1))
1484 ecf_flags |= ECF_NORETURN;
1485 if (bp_unpack_value (&bp, 1))
1486 ecf_flags |= ECF_MALLOC;
1487 if (bp_unpack_value (&bp, 1))
1488 ecf_flags |= ECF_NOTHROW;
1489 if (bp_unpack_value (&bp, 1))
1490 ecf_flags |= ECF_RETURNS_TWICE;
1491 edge->indirect_info->ecf_flags = ecf_flags;
1492 edge->indirect_info->common_target_id = streamer_read_hwi (ib);
1493 if (edge->indirect_info->common_target_id)
1494 edge->indirect_info->common_target_probability = streamer_read_hwi (ib);
1499 /* Read a cgraph from IB using the info in FILE_DATA. */
1501 static vec<symtab_node *>
1502 input_cgraph_1 (struct lto_file_decl_data *file_data,
1503 struct lto_input_block *ib)
1505 enum LTO_symtab_tags tag;
1506 vec<symtab_node *> nodes = vNULL;
1507 symtab_node *node;
1508 unsigned i;
1510 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1511 order_base = symtab->order;
1512 while (tag)
1514 if (tag == LTO_symtab_edge)
1515 input_edge (ib, nodes, false);
1516 else if (tag == LTO_symtab_indirect_edge)
1517 input_edge (ib, nodes, true);
1518 else if (tag == LTO_symtab_variable)
1520 node = input_varpool_node (file_data, ib);
1521 nodes.safe_push (node);
1522 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1524 else
1526 node = input_node (file_data, ib, tag, nodes);
1527 if (node == NULL || node->decl == NULL_TREE)
1528 internal_error ("bytecode stream: found empty cgraph node");
1529 nodes.safe_push (node);
1530 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1533 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1536 lto_input_toplevel_asms (file_data, order_base);
1538 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1539 if (flag_checking)
1541 FOR_EACH_VEC_ELT (nodes, i, node)
1542 gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1544 FOR_EACH_VEC_ELT (nodes, i, node)
1546 int ref;
1547 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1549 ref = (int) (intptr_t) cnode->global.inlined_to;
1551 /* We share declaration of builtins, so we may read same node twice. */
1552 if (!node->aux)
1553 continue;
1554 node->aux = NULL;
1556 /* Fixup inlined_to from reference to pointer. */
1557 if (ref != LCC_NOT_FOUND)
1558 dyn_cast<cgraph_node *> (node)->global.inlined_to
1559 = dyn_cast<cgraph_node *> (nodes[ref]);
1560 else
1561 cnode->global.inlined_to = NULL;
1564 ref = (int) (intptr_t) node->same_comdat_group;
1566 /* Fixup same_comdat_group from reference to pointer. */
1567 if (ref != LCC_NOT_FOUND)
1568 node->same_comdat_group = nodes[ref];
1569 else
1570 node->same_comdat_group = NULL;
1572 FOR_EACH_VEC_ELT (nodes, i, node)
1573 node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1574 return nodes;
1577 /* Input ipa_refs. */
1579 static void
1580 input_refs (struct lto_input_block *ib,
1581 vec<symtab_node *> nodes)
1583 int count;
1584 int idx;
1585 while (true)
1587 symtab_node *node;
1588 count = streamer_read_uhwi (ib);
1589 if (!count)
1590 break;
1591 idx = streamer_read_uhwi (ib);
1592 node = nodes[idx];
1593 while (count)
1595 input_ref (ib, node, nodes);
1596 count--;
1601 /* Input profile_info from IB. */
1602 static void
1603 input_profile_summary (struct lto_input_block *ib,
1604 struct lto_file_decl_data *file_data)
1606 unsigned int runs = streamer_read_uhwi (ib);
1607 if (runs)
1609 file_data->profile_info.runs = runs;
1611 /* IPA-profile computes hot bb threshold based on cumulated
1612 whole program profile. We need to stream it down to ltrans. */
1613 if (flag_ltrans)
1614 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1619 /* Rescale profile summaries to the same number of runs in the whole unit. */
1621 static void
1622 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1624 struct lto_file_decl_data *file_data;
1625 unsigned int j;
1626 gcov_unsigned_t max_runs = 0;
1627 struct cgraph_node *node;
1628 struct cgraph_edge *edge;
1630 /* Find unit with maximal number of runs. If we ever get serious about
1631 roundoff errors, we might also consider computing smallest common
1632 multiply. */
1633 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1634 if (max_runs < file_data->profile_info.runs)
1635 max_runs = file_data->profile_info.runs;
1637 if (!max_runs)
1638 return;
1640 /* Simple overflow check. We probably don't need to support that many train
1641 runs. Such a large value probably imply data corruption anyway. */
1642 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1644 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1645 INT_MAX / REG_BR_PROB_BASE);
1646 return;
1649 profile_info = XCNEW (gcov_summary);
1650 profile_info->runs = max_runs;
1652 /* If merging already happent at WPA time, we are done. */
1653 if (flag_ltrans)
1654 return;
1656 /* Now compute count_materialization_scale of each node.
1657 During LTRANS we already have values of count_materialization_scale
1658 computed, so just update them. */
1659 FOR_EACH_FUNCTION (node)
1660 if (node->lto_file_data
1661 && node->lto_file_data->profile_info.runs)
1663 int scale;
1665 scale = RDIV (node->count_materialization_scale * max_runs,
1666 node->lto_file_data->profile_info.runs);
1667 node->count_materialization_scale = scale;
1668 if (scale < 0)
1669 fatal_error (input_location, "Profile information in %s corrupted",
1670 file_data->file_name);
1672 if (scale == REG_BR_PROB_BASE)
1673 continue;
1674 for (edge = node->callees; edge; edge = edge->next_callee)
1675 if (edge->count.ipa ().nonzero_p ())
1676 edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1677 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1678 if (edge->count.ipa ().nonzero_p ())
1679 edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1680 if (node->count.ipa ().nonzero_p ())
1681 node->count = node->count.apply_scale (scale, REG_BR_PROB_BASE);
1685 /* Input and merge the symtab from each of the .o files passed to
1686 lto1. */
1688 void
1689 input_symtab (void)
1691 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1692 struct lto_file_decl_data *file_data;
1693 unsigned int j = 0;
1694 struct cgraph_node *node;
1696 while ((file_data = file_data_vec[j++]))
1698 const char *data;
1699 size_t len;
1700 struct lto_input_block *ib;
1701 vec<symtab_node *> nodes;
1703 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1704 &data, &len);
1705 if (!ib)
1706 fatal_error (input_location,
1707 "cannot find LTO cgraph in %s", file_data->file_name);
1708 input_profile_summary (ib, file_data);
1709 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1710 nodes = input_cgraph_1 (file_data, ib);
1711 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1712 ib, data, len);
1714 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1715 &data, &len);
1716 if (!ib)
1717 fatal_error (input_location, "cannot find LTO section refs in %s",
1718 file_data->file_name);
1719 input_refs (ib, nodes);
1720 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1721 ib, data, len);
1722 if (flag_ltrans)
1723 input_cgraph_opt_summary (nodes);
1724 nodes.release ();
1727 merge_profile_summaries (file_data_vec);
1729 /* Clear out the aux field that was used to store enough state to
1730 tell which nodes should be overwritten. */
1731 FOR_EACH_FUNCTION (node)
1733 /* Some nodes may have been created by cgraph_node. This
1734 happens when the callgraph contains nested functions. If the
1735 node for the parent function was never emitted to the gimple
1736 file, cgraph_node will create a node for it when setting the
1737 context of the nested function. */
1738 if (node->lto_file_data)
1739 node->aux = NULL;
1743 /* Input function/variable tables that will allow libgomp to look up offload
1744 target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS. */
1746 void
1747 input_offload_tables (bool do_force_output)
1749 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1750 struct lto_file_decl_data *file_data;
1751 unsigned int j = 0;
1753 while ((file_data = file_data_vec[j++]))
1755 const char *data;
1756 size_t len;
1757 struct lto_input_block *ib
1758 = lto_create_simple_input_block (file_data, LTO_section_offload_table,
1759 &data, &len);
1760 if (!ib)
1761 continue;
1763 enum LTO_symtab_tags tag
1764 = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1765 while (tag)
1767 if (tag == LTO_symtab_unavail_node)
1769 int decl_index = streamer_read_uhwi (ib);
1770 tree fn_decl
1771 = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1772 vec_safe_push (offload_funcs, fn_decl);
1774 /* Prevent IPA from removing fn_decl as unreachable, since there
1775 may be no refs from the parent function to child_fn in offload
1776 LTO mode. */
1777 if (do_force_output)
1778 cgraph_node::get (fn_decl)->mark_force_output ();
1780 else if (tag == LTO_symtab_variable)
1782 int decl_index = streamer_read_uhwi (ib);
1783 tree var_decl
1784 = lto_file_decl_data_get_var_decl (file_data, decl_index);
1785 vec_safe_push (offload_vars, var_decl);
1787 /* Prevent IPA from removing var_decl as unused, since there
1788 may be no refs to var_decl in offload LTO mode. */
1789 if (do_force_output)
1790 varpool_node::get (var_decl)->force_output = 1;
1792 else
1793 fatal_error (input_location,
1794 "invalid offload table in %s", file_data->file_name);
1796 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1799 lto_destroy_simple_input_block (file_data, LTO_section_offload_table,
1800 ib, data, len);
1804 /* True when we need optimization summary for NODE. */
1806 static int
1807 output_cgraph_opt_summary_p (struct cgraph_node *node)
1809 return ((node->clone_of || node->former_clone_of)
1810 && (node->clone.tree_map
1811 || node->clone.args_to_skip
1812 || node->clone.combined_args_to_skip));
1815 /* Output optimization summary for EDGE to OB. */
1816 static void
1817 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1818 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1822 /* Output optimization summary for NODE to OB. */
1824 static void
1825 output_node_opt_summary (struct output_block *ob,
1826 struct cgraph_node *node,
1827 lto_symtab_encoder_t encoder)
1829 unsigned int index;
1830 bitmap_iterator bi;
1831 struct ipa_replace_map *map;
1832 struct bitpack_d bp;
1833 int i;
1834 struct cgraph_edge *e;
1836 if (node->clone.args_to_skip)
1838 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1839 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1840 streamer_write_uhwi (ob, index);
1842 else
1843 streamer_write_uhwi (ob, 0);
1844 if (node->clone.combined_args_to_skip)
1846 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1847 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1848 streamer_write_uhwi (ob, index);
1850 else
1851 streamer_write_uhwi (ob, 0);
1852 streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
1853 FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
1855 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1856 mechanism to store function local declarations into summaries. */
1857 gcc_assert (!map->old_tree);
1858 streamer_write_uhwi (ob, map->parm_num);
1859 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
1860 stream_write_tree (ob, map->new_tree, true);
1861 bp = bitpack_create (ob->main_stream);
1862 bp_pack_value (&bp, map->replace_p, 1);
1863 bp_pack_value (&bp, map->ref_p, 1);
1864 streamer_write_bitpack (&bp);
1867 if (lto_symtab_encoder_in_partition_p (encoder, node))
1869 for (e = node->callees; e; e = e->next_callee)
1870 output_edge_opt_summary (ob, e);
1871 for (e = node->indirect_calls; e; e = e->next_callee)
1872 output_edge_opt_summary (ob, e);
1876 /* Output optimization summaries stored in callgraph.
1877 At the moment it is the clone info structure. */
1879 static void
1880 output_cgraph_opt_summary (void)
1882 int i, n_nodes;
1883 lto_symtab_encoder_t encoder;
1884 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1885 unsigned count = 0;
1887 ob->symbol = NULL;
1888 encoder = ob->decl_state->symtab_node_encoder;
1889 n_nodes = lto_symtab_encoder_size (encoder);
1890 for (i = 0; i < n_nodes; i++)
1892 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1893 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1894 if (cnode && output_cgraph_opt_summary_p (cnode))
1895 count++;
1897 streamer_write_uhwi (ob, count);
1898 for (i = 0; i < n_nodes; i++)
1900 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1901 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1902 if (cnode && output_cgraph_opt_summary_p (cnode))
1904 streamer_write_uhwi (ob, i);
1905 output_node_opt_summary (ob, cnode, encoder);
1908 produce_asm (ob, NULL);
1909 destroy_output_block (ob);
1912 /* Input optimisation summary of EDGE. */
1914 static void
1915 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1916 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
1920 /* Input optimisation summary of NODE. */
1922 static void
1923 input_node_opt_summary (struct cgraph_node *node,
1924 struct lto_input_block *ib_main,
1925 struct data_in *data_in)
1927 int i;
1928 int count;
1929 int bit;
1930 struct bitpack_d bp;
1931 struct cgraph_edge *e;
1933 count = streamer_read_uhwi (ib_main);
1934 if (count)
1935 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1936 for (i = 0; i < count; i++)
1938 bit = streamer_read_uhwi (ib_main);
1939 bitmap_set_bit (node->clone.args_to_skip, bit);
1941 count = streamer_read_uhwi (ib_main);
1942 if (count)
1943 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1944 for (i = 0; i < count; i++)
1946 bit = streamer_read_uhwi (ib_main);
1947 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1949 count = streamer_read_uhwi (ib_main);
1950 for (i = 0; i < count; i++)
1952 struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
1954 vec_safe_push (node->clone.tree_map, map);
1955 map->parm_num = streamer_read_uhwi (ib_main);
1956 map->old_tree = NULL;
1957 map->new_tree = stream_read_tree (ib_main, data_in);
1958 bp = streamer_read_bitpack (ib_main);
1959 map->replace_p = bp_unpack_value (&bp, 1);
1960 map->ref_p = bp_unpack_value (&bp, 1);
1962 for (e = node->callees; e; e = e->next_callee)
1963 input_edge_opt_summary (e, ib_main);
1964 for (e = node->indirect_calls; e; e = e->next_callee)
1965 input_edge_opt_summary (e, ib_main);
1968 /* Read section in file FILE_DATA of length LEN with data DATA. */
1970 static void
1971 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1972 const char *data, size_t len,
1973 vec<symtab_node *> nodes)
1975 const struct lto_function_header *header =
1976 (const struct lto_function_header *) data;
1977 const int cfg_offset = sizeof (struct lto_function_header);
1978 const int main_offset = cfg_offset + header->cfg_size;
1979 const int string_offset = main_offset + header->main_size;
1980 struct data_in *data_in;
1981 unsigned int i;
1982 unsigned int count;
1984 lto_input_block ib_main ((const char *) data + main_offset,
1985 header->main_size, file_data->mode_table);
1987 data_in =
1988 lto_data_in_create (file_data, (const char *) data + string_offset,
1989 header->string_size, vNULL);
1990 count = streamer_read_uhwi (&ib_main);
1992 for (i = 0; i < count; i++)
1994 int ref = streamer_read_uhwi (&ib_main);
1995 input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
1996 &ib_main, data_in);
1998 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
1999 len);
2000 lto_data_in_delete (data_in);
2003 /* Input optimization summary of cgraph. */
2005 static void
2006 input_cgraph_opt_summary (vec<symtab_node *> nodes)
2008 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2009 struct lto_file_decl_data *file_data;
2010 unsigned int j = 0;
2012 while ((file_data = file_data_vec[j++]))
2014 size_t len;
2015 const char *data =
2016 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
2017 &len);
2019 if (data)
2020 input_cgraph_opt_section (file_data, data, len, nodes);