OpenACC acc_on_device.
[official-gcc.git] / gcc / lto-cgraph.c
blob7bd92d7bfe8bad9446ee52fe54f8d9af37432e52
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
4 Copyright (C) 2009-2014 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stringpool.h"
29 #include "basic-block.h"
30 #include "tree-ssa-alias.h"
31 #include "internal-fn.h"
32 #include "gimple-expr.h"
33 #include "is-a.h"
34 #include "gimple.h"
35 #include "expr.h"
36 #include "flags.h"
37 #include "params.h"
38 #include "input.h"
39 #include "hashtab.h"
40 #include "hash-set.h"
41 #include "langhooks.h"
42 #include "bitmap.h"
43 #include "function.h"
44 #include "diagnostic-core.h"
45 #include "except.h"
46 #include "timevar.h"
47 #include "lto-streamer.h"
48 #include "data-streamer.h"
49 #include "tree-streamer.h"
50 #include "gcov-io.h"
51 #include "tree-pass.h"
52 #include "profile.h"
53 #include "context.h"
54 #include "pass_manager.h"
55 #include "ipa-utils.h"
56 #include "omp-low.h"
58 /* True when asm nodes has been output. */
59 bool asm_nodes_output = false;
61 static void output_cgraph_opt_summary (void);
62 static void input_cgraph_opt_summary (vec<symtab_node *> nodes);
64 /* Number of LDPR values known to GCC. */
65 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
67 /* All node orders are ofsetted by ORDER_BASE. */
68 static int order_base;
70 /* Cgraph streaming is organized as set of record whose type
71 is indicated by a tag. */
72 enum LTO_symtab_tags
74 /* Must leave 0 for the stopper. */
76 /* Cgraph node without body available. */
77 LTO_symtab_unavail_node = 1,
78 /* Cgraph node with function body. */
79 LTO_symtab_analyzed_node,
80 /* Cgraph edges. */
81 LTO_symtab_edge,
82 LTO_symtab_indirect_edge,
83 LTO_symtab_variable,
84 LTO_symtab_last_tag
87 /* Create a new symtab encoder.
88 if FOR_INPUT, the encoder allocate only datastructures needed
89 to read the symtab. */
91 lto_symtab_encoder_t
92 lto_symtab_encoder_new (bool for_input)
94 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
96 if (!for_input)
97 encoder->map = new hash_map<symtab_node *, size_t>;
98 encoder->nodes.create (0);
99 return encoder;
103 /* Delete ENCODER and its components. */
105 void
106 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
108 encoder->nodes.release ();
109 if (encoder->map)
110 delete encoder->map;
111 free (encoder);
115 /* Return the existing reference number of NODE in the symtab encoder in
116 output block OB. Assign a new reference if this is the first time
117 NODE is encoded. */
120 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
121 symtab_node *node)
123 int ref;
125 if (!encoder->map)
127 lto_encoder_entry entry = {node, false, false, false};
129 ref = encoder->nodes.length ();
130 encoder->nodes.safe_push (entry);
131 return ref;
134 size_t *slot = encoder->map->get (node);
135 if (!slot || !*slot)
137 lto_encoder_entry entry = {node, false, false, false};
138 ref = encoder->nodes.length ();
139 if (!slot)
140 encoder->map->put (node, ref + 1);
141 encoder->nodes.safe_push (entry);
143 else
144 ref = *slot - 1;
146 return ref;
149 /* Remove NODE from encoder. */
151 bool
152 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
153 symtab_node *node)
155 int index;
156 lto_encoder_entry last_node;
158 size_t *slot = encoder->map->get (node);
159 if (slot == NULL || !*slot)
160 return false;
162 index = *slot - 1;
163 gcc_checking_assert (encoder->nodes[index].node == node);
165 /* Remove from vector. We do this by swapping node with the last element
166 of the vector. */
167 last_node = encoder->nodes.pop ();
168 if (last_node.node != node)
170 gcc_assert (encoder->map->put (last_node.node, index + 1));
172 /* Move the last element to the original spot of NODE. */
173 encoder->nodes[index] = last_node;
176 /* Remove element from hash table. */
177 encoder->map->remove (node);
178 return true;
182 /* Return TRUE if we should encode initializer of NODE (if any). */
184 bool
185 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
186 struct cgraph_node *node)
188 int index = lto_symtab_encoder_lookup (encoder, node);
189 return encoder->nodes[index].body;
192 /* Return TRUE if we should encode body of NODE (if any). */
194 static void
195 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
196 struct cgraph_node *node)
198 int index = lto_symtab_encoder_encode (encoder, node);
199 gcc_checking_assert (encoder->nodes[index].node == node);
200 encoder->nodes[index].body = true;
203 /* Return TRUE if we should encode initializer of NODE (if any). */
205 bool
206 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
207 varpool_node *node)
209 int index = lto_symtab_encoder_lookup (encoder, node);
210 if (index == LCC_NOT_FOUND)
211 return false;
212 return encoder->nodes[index].initializer;
215 /* Return TRUE if we should encode initializer of NODE (if any). */
217 static void
218 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
219 varpool_node *node)
221 int index = lto_symtab_encoder_lookup (encoder, node);
222 encoder->nodes[index].initializer = true;
225 /* Return TRUE if we should encode initializer of NODE (if any). */
227 bool
228 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
229 symtab_node *node)
231 int index = lto_symtab_encoder_lookup (encoder, node);
232 if (index == LCC_NOT_FOUND)
233 return false;
234 return encoder->nodes[index].in_partition;
237 /* Return TRUE if we should encode body of NODE (if any). */
239 void
240 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
241 symtab_node *node)
243 /* Ignore not needed nodes. */
244 if (!node->need_dump)
245 return;
246 int index = lto_symtab_encoder_encode (encoder, node);
247 encoder->nodes[index].in_partition = true;
250 /* Output the cgraph EDGE to OB using ENCODER. */
252 static void
253 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
254 lto_symtab_encoder_t encoder)
256 unsigned int uid;
257 intptr_t ref;
258 struct bitpack_d bp;
260 if (edge->indirect_unknown_callee)
261 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
262 LTO_symtab_indirect_edge);
263 else
264 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
265 LTO_symtab_edge);
267 ref = lto_symtab_encoder_lookup (encoder, edge->caller);
268 gcc_assert (ref != LCC_NOT_FOUND);
269 streamer_write_hwi_stream (ob->main_stream, ref);
271 if (!edge->indirect_unknown_callee)
273 ref = lto_symtab_encoder_lookup (encoder, edge->callee);
274 gcc_assert (ref != LCC_NOT_FOUND);
275 streamer_write_hwi_stream (ob->main_stream, ref);
278 streamer_write_gcov_count_stream (ob->main_stream, edge->count);
280 bp = bitpack_create (ob->main_stream);
281 uid = (!gimple_has_body_p (edge->caller->decl)
282 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt) + 1);
283 bp_pack_enum (&bp, cgraph_inline_failed_t,
284 CIF_N_REASONS, edge->inline_failed);
285 bp_pack_var_len_unsigned (&bp, uid);
286 bp_pack_var_len_unsigned (&bp, edge->frequency);
287 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
288 bp_pack_value (&bp, edge->speculative, 1);
289 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
290 bp_pack_value (&bp, edge->can_throw_external, 1);
291 if (edge->indirect_unknown_callee)
293 int flags = edge->indirect_info->ecf_flags;
294 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
295 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
296 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
297 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
298 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
299 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
300 /* Flags that should not appear on indirect calls. */
301 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
302 | ECF_MAY_BE_ALLOCA
303 | ECF_SIBCALL
304 | ECF_LEAF
305 | ECF_NOVOPS)));
307 streamer_write_bitpack (&bp);
308 if (edge->indirect_unknown_callee)
310 streamer_write_hwi_stream (ob->main_stream,
311 edge->indirect_info->common_target_id);
312 if (edge->indirect_info->common_target_id)
313 streamer_write_hwi_stream
314 (ob->main_stream, edge->indirect_info->common_target_probability);
318 /* Return if NODE contain references from other partitions. */
320 bool
321 referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
323 int i;
324 struct ipa_ref *ref = NULL;
326 for (i = 0; node->iterate_referring (i, ref); i++)
328 if (ref->referring->in_other_partition
329 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
330 return true;
332 return false;
335 /* Return true when node is reachable from other partition. */
337 bool
338 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
340 struct cgraph_edge *e;
341 if (!node->definition)
342 return false;
343 if (node->global.inlined_to)
344 return false;
345 for (e = node->callers; e; e = e->next_caller)
346 if (e->caller->in_other_partition
347 || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
348 return true;
349 return false;
352 /* Return if NODE contain references from other partitions. */
354 bool
355 referenced_from_this_partition_p (symtab_node *node,
356 lto_symtab_encoder_t encoder)
358 int i;
359 struct ipa_ref *ref = NULL;
361 for (i = 0; node->iterate_referring (i, ref); i++)
362 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
363 return true;
364 return false;
367 /* Return true when node is reachable from other partition. */
369 bool
370 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
372 struct cgraph_edge *e;
373 for (e = node->callers; e; e = e->next_caller)
374 if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
375 return true;
376 return false;
379 /* Output the cgraph NODE to OB. ENCODER is used to find the
380 reference number of NODE->inlined_to. SET is the set of nodes we
381 are writing to the current file. If NODE is not in SET, then NODE
382 is a boundary of a cgraph_node_set and we pretend NODE just has a
383 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
384 that have had their callgraph node written so far. This is used to
385 determine if NODE is a clone of a previously written node. */
387 static void
388 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
389 lto_symtab_encoder_t encoder)
391 unsigned int tag;
392 struct bitpack_d bp;
393 bool boundary_p;
394 intptr_t ref;
395 bool in_other_partition = false;
396 struct cgraph_node *clone_of, *ultimate_clone_of;
397 ipa_opt_pass_d *pass;
398 int i;
399 bool alias_p;
400 const char *comdat;
401 const char *section;
402 tree group;
404 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
406 if (node->analyzed && !boundary_p)
407 tag = LTO_symtab_analyzed_node;
408 else
409 tag = LTO_symtab_unavail_node;
411 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
412 tag);
413 streamer_write_hwi_stream (ob->main_stream, node->order);
415 /* In WPA mode, we only output part of the call-graph. Also, we
416 fake cgraph node attributes. There are two cases that we care.
418 Boundary nodes: There are nodes that are not part of SET but are
419 called from within SET. We artificially make them look like
420 externally visible nodes with no function body.
422 Cherry-picked nodes: These are nodes we pulled from other
423 translation units into SET during IPA-inlining. We make them as
424 local static nodes to prevent clashes with other local statics. */
425 if (boundary_p && node->analyzed
426 && node->get_partitioning_class () == SYMBOL_PARTITION)
428 /* Inline clones can not be part of boundary.
429 gcc_assert (!node->global.inlined_to);
431 FIXME: At the moment they can be, when partition contains an inline
432 clone that is clone of inline clone from outside partition. We can
433 reshape the clone tree and make other tree to be the root, but it
434 needs a bit extra work and will be promplty done by cgraph_remove_node
435 after reading back. */
436 in_other_partition = 1;
439 clone_of = node->clone_of;
440 while (clone_of
441 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
442 if (clone_of->prev_sibling_clone)
443 clone_of = clone_of->prev_sibling_clone;
444 else
445 clone_of = clone_of->clone_of;
447 /* See if body of the master function is output. If not, we are seeing only
448 an declaration and we do not need to pass down clone tree. */
449 ultimate_clone_of = clone_of;
450 while (ultimate_clone_of && ultimate_clone_of->clone_of)
451 ultimate_clone_of = ultimate_clone_of->clone_of;
453 if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
454 clone_of = NULL;
456 if (tag == LTO_symtab_analyzed_node)
457 gcc_assert (clone_of || !node->clone_of);
458 if (!clone_of)
459 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
460 else
461 streamer_write_hwi_stream (ob->main_stream, ref);
464 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
465 streamer_write_gcov_count_stream (ob->main_stream, node->count);
466 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
468 streamer_write_hwi_stream (ob->main_stream,
469 node->ipa_transforms_to_apply.length ());
470 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
471 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
473 if (tag == LTO_symtab_analyzed_node)
475 if (node->global.inlined_to)
477 ref = lto_symtab_encoder_lookup (encoder, node->global.inlined_to);
478 gcc_assert (ref != LCC_NOT_FOUND);
480 else
481 ref = LCC_NOT_FOUND;
483 streamer_write_hwi_stream (ob->main_stream, ref);
486 group = node->get_comdat_group ();
487 if (group)
488 comdat = IDENTIFIER_POINTER (group);
489 else
490 comdat = "";
491 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
493 if (group)
495 if (node->same_comdat_group && !boundary_p)
497 ref = lto_symtab_encoder_lookup (encoder,
498 node->same_comdat_group);
499 gcc_assert (ref != LCC_NOT_FOUND);
501 else
502 ref = LCC_NOT_FOUND;
503 streamer_write_hwi_stream (ob->main_stream, ref);
506 section = node->get_section ();
507 if (!section)
508 section = "";
510 streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
512 bp = bitpack_create (ob->main_stream);
513 bp_pack_value (&bp, node->local.local, 1);
514 bp_pack_value (&bp, node->externally_visible, 1);
515 bp_pack_value (&bp, node->definition, 1);
516 bp_pack_value (&bp, node->local.versionable, 1);
517 bp_pack_value (&bp, node->local.can_change_signature, 1);
518 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
519 bp_pack_value (&bp, node->force_output, 1);
520 bp_pack_value (&bp, node->forced_by_abi, 1);
521 bp_pack_value (&bp, node->unique_name, 1);
522 bp_pack_value (&bp, node->body_removed, 1);
523 bp_pack_value (&bp, node->implicit_section, 1);
524 bp_pack_value (&bp, node->address_taken, 1);
525 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
526 && node->get_partitioning_class () == SYMBOL_PARTITION
527 && (reachable_from_other_partition_p (node, encoder)
528 || referenced_from_other_partition_p (node, encoder)), 1);
529 bp_pack_value (&bp, node->lowered, 1);
530 bp_pack_value (&bp, in_other_partition, 1);
531 /* Real aliases in a boundary become non-aliases. However we still stream
532 alias info on weakrefs.
533 TODO: We lose a bit of information here - when we know that variable is
534 defined in other unit, we may use the info on aliases to resolve
535 symbol1 != symbol2 type tests that we can do only for locally defined objects
536 otherwise. */
537 alias_p = node->alias && (!boundary_p || node->weakref);
538 bp_pack_value (&bp, alias_p, 1);
539 bp_pack_value (&bp, node->weakref, 1);
540 bp_pack_value (&bp, node->frequency, 2);
541 bp_pack_value (&bp, node->only_called_at_startup, 1);
542 bp_pack_value (&bp, node->only_called_at_exit, 1);
543 bp_pack_value (&bp, node->tm_clone, 1);
544 bp_pack_value (&bp, node->calls_comdat_local, 1);
545 bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
546 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
547 LDPR_NUM_KNOWN, node->resolution);
548 streamer_write_bitpack (&bp);
549 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
551 if (node->thunk.thunk_p && !boundary_p)
553 streamer_write_uhwi_stream
554 (ob->main_stream,
555 1 + (node->thunk.this_adjusting != 0) * 2
556 + (node->thunk.virtual_offset_p != 0) * 4);
557 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
558 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
560 streamer_write_hwi_stream (ob->main_stream, node->profile_id);
561 if (DECL_STATIC_CONSTRUCTOR (node->decl))
562 streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
563 if (DECL_STATIC_DESTRUCTOR (node->decl))
564 streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
567 /* Output the varpool NODE to OB.
568 If NODE is not in SET, then NODE is a boundary. */
570 static void
571 lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
572 lto_symtab_encoder_t encoder)
574 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
575 struct bitpack_d bp;
576 int ref;
577 bool alias_p;
578 const char *comdat;
579 const char *section;
580 tree group;
582 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
583 LTO_symtab_variable);
584 streamer_write_hwi_stream (ob->main_stream, node->order);
585 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
586 bp = bitpack_create (ob->main_stream);
587 bp_pack_value (&bp, node->externally_visible, 1);
588 bp_pack_value (&bp, node->force_output, 1);
589 bp_pack_value (&bp, node->forced_by_abi, 1);
590 bp_pack_value (&bp, node->unique_name, 1);
591 bp_pack_value (&bp, node->body_removed, 1);
592 bp_pack_value (&bp, node->implicit_section, 1);
593 bp_pack_value (&bp, node->writeonly, 1);
594 bp_pack_value (&bp, node->definition, 1);
595 alias_p = node->alias && (!boundary_p || node->weakref);
596 bp_pack_value (&bp, alias_p, 1);
597 bp_pack_value (&bp, node->weakref, 1);
598 bp_pack_value (&bp, node->analyzed && !boundary_p, 1);
599 gcc_assert (node->definition || !node->analyzed);
600 /* Constant pool initializers can be de-unified into individual ltrans units.
601 FIXME: Alternatively at -Os we may want to avoid generating for them the local
602 labels and share them across LTRANS partitions. */
603 if (node->get_partitioning_class () != SYMBOL_PARTITION)
605 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
606 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
608 else
610 bp_pack_value (&bp, node->definition
611 && referenced_from_other_partition_p (node, encoder), 1);
612 bp_pack_value (&bp, node->analyzed
613 && boundary_p && !DECL_EXTERNAL (node->decl), 1);
614 /* in_other_partition. */
616 bp_pack_value (&bp, node->tls_model, 3);
617 bp_pack_value (&bp, node->used_by_single_function, 1);
618 streamer_write_bitpack (&bp);
620 group = node->get_comdat_group ();
621 if (group)
622 comdat = IDENTIFIER_POINTER (group);
623 else
624 comdat = "";
625 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
627 if (group)
629 if (node->same_comdat_group && !boundary_p)
631 ref = lto_symtab_encoder_lookup (encoder,
632 node->same_comdat_group);
633 gcc_assert (ref != LCC_NOT_FOUND);
635 else
636 ref = LCC_NOT_FOUND;
637 streamer_write_hwi_stream (ob->main_stream, ref);
640 section = node->get_section ();
641 if (!section)
642 section = "";
643 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
645 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
646 LDPR_NUM_KNOWN, node->resolution);
649 /* Output the varpool NODE to OB.
650 If NODE is not in SET, then NODE is a boundary. */
652 static void
653 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
654 lto_symtab_encoder_t encoder)
656 struct bitpack_d bp;
657 int nref;
658 int uid = ref->lto_stmt_uid;
659 struct cgraph_node *node;
661 bp = bitpack_create (ob->main_stream);
662 bp_pack_value (&bp, ref->use, 2);
663 bp_pack_value (&bp, ref->speculative, 1);
664 streamer_write_bitpack (&bp);
665 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
666 gcc_assert (nref != LCC_NOT_FOUND);
667 streamer_write_hwi_stream (ob->main_stream, nref);
669 node = dyn_cast <cgraph_node *> (ref->referring);
670 if (node)
672 if (ref->stmt)
673 uid = gimple_uid (ref->stmt) + 1;
674 streamer_write_hwi_stream (ob->main_stream, uid);
678 /* Stream out profile_summary to OB. */
680 static void
681 output_profile_summary (struct lto_simple_output_block *ob)
683 unsigned h_ix;
684 struct bitpack_d bp;
686 if (profile_info)
688 /* We do not output num and run_max, they are not used by
689 GCC profile feedback and they are difficult to merge from multiple
690 units. */
691 gcc_assert (profile_info->runs);
692 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
693 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_max);
695 /* sum_all is needed for computing the working set with the
696 histogram. */
697 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_all);
699 /* Create and output a bitpack of non-zero histogram entries indices. */
700 bp = bitpack_create (ob->main_stream);
701 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
702 bp_pack_value (&bp, profile_info->histogram[h_ix].num_counters > 0, 1);
703 streamer_write_bitpack (&bp);
704 /* Now stream out only those non-zero entries. */
705 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
707 if (!profile_info->histogram[h_ix].num_counters)
708 continue;
709 streamer_write_gcov_count_stream (ob->main_stream,
710 profile_info->histogram[h_ix].num_counters);
711 streamer_write_gcov_count_stream (ob->main_stream,
712 profile_info->histogram[h_ix].min_value);
713 streamer_write_gcov_count_stream (ob->main_stream,
714 profile_info->histogram[h_ix].cum_value);
716 /* IPA-profile computes hot bb threshold based on cumulated
717 whole program profile. We need to stream it down to ltrans. */
718 if (flag_wpa)
719 streamer_write_gcov_count_stream (ob->main_stream,
720 get_hot_bb_threshold ());
722 else
723 streamer_write_uhwi_stream (ob->main_stream, 0);
726 /* Output all callees or indirect outgoing edges. EDGE must be the first such
727 edge. */
729 static void
730 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
731 struct lto_simple_output_block *ob,
732 lto_symtab_encoder_t encoder)
734 if (!edge)
735 return;
737 /* Output edges in backward direction, so the reconstructed callgraph match
738 and it is easy to associate call sites in the IPA pass summaries. */
739 while (edge->next_callee)
740 edge = edge->next_callee;
741 for (; edge; edge = edge->prev_callee)
742 lto_output_edge (ob, edge, encoder);
745 /* Output the part of the cgraph in SET. */
747 static void
748 output_refs (lto_symtab_encoder_t encoder)
750 lto_symtab_encoder_iterator lsei;
751 struct lto_simple_output_block *ob;
752 int count;
753 struct ipa_ref *ref;
754 int i;
756 ob = lto_create_simple_output_block (LTO_section_refs);
758 for (lsei = lsei_start_in_partition (encoder); !lsei_end_p (lsei);
759 lsei_next_in_partition (&lsei))
761 symtab_node *node = lsei_node (lsei);
763 count = node->ref_list.nreferences ();
764 if (count)
766 streamer_write_gcov_count_stream (ob->main_stream, count);
767 streamer_write_uhwi_stream (ob->main_stream,
768 lto_symtab_encoder_lookup (encoder, node));
769 for (i = 0; node->iterate_reference (i, ref); i++)
770 lto_output_ref (ob, ref, encoder);
774 streamer_write_uhwi_stream (ob->main_stream, 0);
776 lto_destroy_simple_output_block (ob);
779 /* Add NODE into encoder as well as nodes it is cloned from.
780 Do it in a way so clones appear first. */
782 static void
783 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
784 bool include_body)
786 if (node->clone_of)
787 add_node_to (encoder, node->clone_of, include_body);
788 else if (include_body)
789 lto_set_symtab_encoder_encode_body (encoder, node);
790 lto_symtab_encoder_encode (encoder, node);
793 /* Add all references in NODE to encoders. */
795 static void
796 create_references (lto_symtab_encoder_t encoder, symtab_node *node)
798 int i;
799 struct ipa_ref *ref = NULL;
800 for (i = 0; node->iterate_reference (i, ref); i++)
801 if (is_a <cgraph_node *> (ref->referred))
802 add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
803 else
804 lto_symtab_encoder_encode (encoder, ref->referred);
807 /* Select what needs to be dumped. In lto case dump everything.
808 In omp target case only dump stuff makrked with attribute. */
809 void
810 select_what_to_dump (bool is_omp)
812 struct symtab_node *snode;
813 FOR_EACH_SYMBOL(snode)
814 snode->need_dump = !is_omp || lookup_attribute ("omp declare target",
815 DECL_ATTRIBUTES (snode->decl));
818 /* Find all symbols we want to stream into given partition and insert them
819 to encoders.
821 The function actually replaces IN_ENCODER by new one. The reason is that
822 streaming code needs clone's origin to be streamed before clone. This
823 means that we need to insert the nodes in specific order. This order is
824 ignored by the partitioning logic earlier. */
826 lto_symtab_encoder_t
827 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
829 struct cgraph_edge *edge;
830 int i;
831 lto_symtab_encoder_t encoder;
832 lto_symtab_encoder_iterator lsei;
833 hash_set<void *> reachable_call_targets;
835 encoder = lto_symtab_encoder_new (false);
837 /* Go over all entries in the IN_ENCODER and duplicate them to
838 ENCODER. At the same time insert masters of clones so
839 every master appears before clone. */
840 for (lsei = lsei_start_function_in_partition (in_encoder);
841 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
843 struct cgraph_node *node = lsei_cgraph_node (lsei);
844 add_node_to (encoder, node, true);
845 lto_set_symtab_encoder_in_partition (encoder, node);
846 create_references (encoder, node);
847 /* For proper debug info, we need to ship the origins, too. */
848 if (DECL_ABSTRACT_ORIGIN (node->decl))
850 struct cgraph_node *origin_node
851 = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
852 add_node_to (encoder, origin_node, true);
855 for (lsei = lsei_start_variable_in_partition (in_encoder);
856 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
858 varpool_node *vnode = lsei_varpool_node (lsei);
860 lto_set_symtab_encoder_in_partition (encoder, vnode);
861 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
862 create_references (encoder, vnode);
863 /* For proper debug info, we need to ship the origins, too. */
864 if (DECL_ABSTRACT_ORIGIN (vnode->decl))
866 varpool_node *origin_node
867 = varpool_node::get (DECL_ABSTRACT_ORIGIN (vnode->decl));
868 lto_set_symtab_encoder_in_partition (encoder, origin_node);
871 /* Pickle in also the initializer of all referenced readonly variables
872 to help folding. Constant pool variables are not shared, so we must
873 pickle those too. */
874 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
876 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
877 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
879 if (!lto_symtab_encoder_encode_initializer_p (encoder,
880 vnode)
881 && vnode->ctor_useable_for_folding_p ())
883 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
884 create_references (encoder, vnode);
889 /* Go over all the nodes again to include callees that are not in
890 SET. */
891 for (lsei = lsei_start_function_in_partition (encoder);
892 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
894 struct cgraph_node *node = lsei_cgraph_node (lsei);
895 for (edge = node->callees; edge; edge = edge->next_callee)
897 struct cgraph_node *callee = edge->callee;
898 if (!lto_symtab_encoder_in_partition_p (encoder, callee))
900 /* We should have moved all the inlines. */
901 gcc_assert (!callee->global.inlined_to);
902 add_node_to (encoder, callee, false);
905 /* Add all possible targets for late devirtualization. */
906 if (flag_devirtualize)
907 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
908 if (edge->indirect_info->polymorphic)
910 unsigned int i;
911 void *cache_token;
912 bool final;
913 vec <cgraph_node *>targets
914 = possible_polymorphic_call_targets
915 (edge, &final, &cache_token);
916 if (!reachable_call_targets.add (cache_token))
918 for (i = 0; i < targets.length (); i++)
920 struct cgraph_node *callee = targets[i];
922 /* Adding an external declarations into the unit serves
923 no purpose and just increases its boundary. */
924 if (callee->definition
925 && !lto_symtab_encoder_in_partition_p
926 (encoder, callee))
928 gcc_assert (!callee->global.inlined_to);
929 add_node_to (encoder, callee, false);
935 lto_symtab_encoder_delete (in_encoder);
936 return encoder;
939 /* Output the part of the symtab in SET and VSET. */
941 void
942 output_symtab (void)
944 struct cgraph_node *node;
945 struct lto_simple_output_block *ob;
946 lto_symtab_encoder_iterator lsei;
947 int i, n_nodes;
948 lto_symtab_encoder_t encoder;
950 if (flag_wpa)
951 output_cgraph_opt_summary ();
953 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
955 output_profile_summary (ob);
957 /* An encoder for cgraph nodes should have been created by
958 ipa_write_summaries_1. */
959 gcc_assert (ob->decl_state->symtab_node_encoder);
960 encoder = ob->decl_state->symtab_node_encoder;
962 /* Write out the nodes. We must first output a node and then its clones,
963 otherwise at a time reading back the node there would be nothing to clone
964 from. */
965 n_nodes = lto_symtab_encoder_size (encoder);
966 for (i = 0; i < n_nodes; i++)
968 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
969 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
970 lto_output_node (ob, cnode, encoder);
971 else
972 lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
975 /* Go over the nodes in SET again to write edges. */
976 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
977 lsei_next_function_in_partition (&lsei))
979 node = lsei_cgraph_node (lsei);
980 output_outgoing_cgraph_edges (node->callees, ob, encoder);
981 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
984 streamer_write_uhwi_stream (ob->main_stream, 0);
986 lto_destroy_simple_output_block (ob);
988 /* Emit toplevel asms.
989 When doing WPA we must output every asm just once. Since we do not partition asm
990 nodes at all, output them to first output. This is kind of hack, but should work
991 well. */
992 if (!asm_nodes_output)
994 asm_nodes_output = true;
995 lto_output_toplevel_asms ();
998 output_refs (encoder);
1001 /* Return identifier encoded in IB as a plain string. */
1003 static tree
1004 read_identifier (struct lto_input_block *ib)
1006 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1007 tree id;
1009 if (ib->data[ib->p + len])
1010 lto_section_overrun (ib);
1011 if (!len)
1013 ib->p++;
1014 return NULL;
1016 id = get_identifier (ib->data + ib->p);
1017 ib->p += len + 1;
1018 return id;
1021 /* Return string encoded in IB, NULL if string is empty. */
1023 static const char *
1024 read_string (struct lto_input_block *ib)
1026 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1027 const char *str;
1029 if (ib->data[ib->p + len])
1030 lto_section_overrun (ib);
1031 if (!len)
1033 ib->p++;
1034 return NULL;
1036 str = ib->data + ib->p;
1037 ib->p += len + 1;
1038 return str;
1041 /* Output function/variable tables that will allow libgomp to look up offload
1042 target code. OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is
1043 filled in ipa_passes. In WHOPR (partitioned) mode during the WPA stage both
1044 OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables. */
1046 void
1047 output_offload_tables (void)
1049 if (vec_safe_is_empty (offload_funcs) && vec_safe_is_empty (offload_vars))
1050 return;
1052 struct lto_simple_output_block *ob
1053 = lto_create_simple_output_block (LTO_section_offload_table);
1055 for (unsigned i = 0; i < vec_safe_length (offload_funcs); i++)
1057 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1058 LTO_symtab_last_tag, LTO_symtab_unavail_node);
1059 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
1060 (*offload_funcs)[i]);
1063 for (unsigned i = 0; i < vec_safe_length (offload_vars); i++)
1065 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1066 LTO_symtab_last_tag, LTO_symtab_variable);
1067 lto_output_var_decl_index (ob->decl_state, ob->main_stream,
1068 (*offload_vars)[i]);
1071 streamer_write_uhwi_stream (ob->main_stream, 0);
1072 lto_destroy_simple_output_block (ob);
1074 /* In WHOPR mode during the WPA stage the joint offload tables need to be
1075 streamed to one partition only. That's why we free offload_funcs and
1076 offload_vars after the first call of output_offload_tables. */
1077 if (flag_wpa)
1079 vec_free (offload_funcs);
1080 vec_free (offload_vars);
1084 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1085 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
1086 NODE or to replace the values in it, for instance because the first
1087 time we saw it, the function body was not available but now it
1088 is. BP is a bitpack with all the bitflags for NODE read from the
1089 stream. */
1091 static void
1092 input_overwrite_node (struct lto_file_decl_data *file_data,
1093 struct cgraph_node *node,
1094 enum LTO_symtab_tags tag,
1095 struct bitpack_d *bp)
1097 node->aux = (void *) tag;
1098 node->lto_file_data = file_data;
1100 node->local.local = bp_unpack_value (bp, 1);
1101 node->externally_visible = bp_unpack_value (bp, 1);
1102 node->definition = bp_unpack_value (bp, 1);
1103 node->local.versionable = bp_unpack_value (bp, 1);
1104 node->local.can_change_signature = bp_unpack_value (bp, 1);
1105 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
1106 node->force_output = bp_unpack_value (bp, 1);
1107 node->forced_by_abi = bp_unpack_value (bp, 1);
1108 node->unique_name = bp_unpack_value (bp, 1);
1109 node->body_removed = bp_unpack_value (bp, 1);
1110 node->implicit_section = bp_unpack_value (bp, 1);
1111 node->address_taken = bp_unpack_value (bp, 1);
1112 node->used_from_other_partition = bp_unpack_value (bp, 1);
1113 node->lowered = bp_unpack_value (bp, 1);
1114 node->analyzed = tag == LTO_symtab_analyzed_node;
1115 node->in_other_partition = bp_unpack_value (bp, 1);
1116 if (node->in_other_partition
1117 /* Avoid updating decl when we are seeing just inline clone.
1118 When inlining function that has functions already inlined into it,
1119 we produce clones of inline clones.
1121 WPA partitioning might put each clone into different unit and
1122 we might end up streaming inline clone from other partition
1123 to support clone we are interested in. */
1124 && (!node->clone_of
1125 || node->clone_of->decl != node->decl))
1127 DECL_EXTERNAL (node->decl) = 1;
1128 TREE_STATIC (node->decl) = 0;
1130 node->alias = bp_unpack_value (bp, 1);
1131 node->weakref = bp_unpack_value (bp, 1);
1132 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1133 node->only_called_at_startup = bp_unpack_value (bp, 1);
1134 node->only_called_at_exit = bp_unpack_value (bp, 1);
1135 node->tm_clone = bp_unpack_value (bp, 1);
1136 node->calls_comdat_local = bp_unpack_value (bp, 1);
1137 node->thunk.thunk_p = bp_unpack_value (bp, 1);
1138 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1139 LDPR_NUM_KNOWN);
1140 gcc_assert (flag_ltrans
1141 || (!node->in_other_partition
1142 && !node->used_from_other_partition));
1145 /* Return string alias is alias of. */
1147 static tree
1148 get_alias_symbol (tree decl)
1150 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1151 return get_identifier (TREE_STRING_POINTER
1152 (TREE_VALUE (TREE_VALUE (alias))));
1155 /* Read a node from input_block IB. TAG is the node's tag just read.
1156 Return the node read or overwriten. */
1158 static struct cgraph_node *
1159 input_node (struct lto_file_decl_data *file_data,
1160 struct lto_input_block *ib,
1161 enum LTO_symtab_tags tag,
1162 vec<symtab_node *> nodes)
1164 gcc::pass_manager *passes = g->get_passes ();
1165 tree fn_decl;
1166 struct cgraph_node *node;
1167 struct bitpack_d bp;
1168 unsigned decl_index;
1169 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1170 int clone_ref;
1171 int order;
1172 int i, count;
1173 tree group;
1174 const char *section;
1175 order = streamer_read_hwi (ib) + order_base;
1176 clone_ref = streamer_read_hwi (ib);
1178 decl_index = streamer_read_uhwi (ib);
1179 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1181 if (clone_ref != LCC_NOT_FOUND)
1183 node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1184 0, CGRAPH_FREQ_BASE, false,
1185 vNULL, false, NULL, NULL);
1187 else
1189 /* Declaration of functions can be already merged with a declaration
1190 from other input file. We keep cgraph unmerged until after streaming
1191 of ipa passes is done. Alays forcingly create a fresh node. */
1192 node = symtab->create_empty ();
1193 node->decl = fn_decl;
1194 node->register_symbol ();
1197 node->order = order;
1198 if (order >= symtab->order)
1199 symtab->order = order + 1;
1201 node->count = streamer_read_gcov_count (ib);
1202 node->count_materialization_scale = streamer_read_hwi (ib);
1204 count = streamer_read_hwi (ib);
1205 node->ipa_transforms_to_apply = vNULL;
1206 for (i = 0; i < count; i++)
1208 opt_pass *pass;
1209 int pid = streamer_read_hwi (ib);
1211 gcc_assert (pid < passes->passes_by_id_size);
1212 pass = passes->passes_by_id[pid];
1213 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1216 if (tag == LTO_symtab_analyzed_node)
1217 ref = streamer_read_hwi (ib);
1219 group = read_identifier (ib);
1220 if (group)
1221 ref2 = streamer_read_hwi (ib);
1223 /* Make sure that we have not read this node before. Nodes that
1224 have already been read will have their tag stored in the 'aux'
1225 field. Since built-in functions can be referenced in multiple
1226 functions, they are expected to be read more than once. */
1227 if (node->aux && !DECL_BUILT_IN (node->decl))
1228 internal_error ("bytecode stream: found multiple instances of cgraph "
1229 "node with uid %d", node->uid);
1231 node->tp_first_run = streamer_read_uhwi (ib);
1233 bp = streamer_read_bitpack (ib);
1235 input_overwrite_node (file_data, node, tag, &bp);
1237 /* Store a reference for now, and fix up later to be a pointer. */
1238 node->global.inlined_to = (cgraph_node *) (intptr_t) ref;
1240 if (group)
1242 node->set_comdat_group (group);
1243 /* Store a reference for now, and fix up later to be a pointer. */
1244 node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1246 else
1247 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1248 section = read_string (ib);
1249 if (section)
1250 node->set_section_for_node (section);
1252 if (node->thunk.thunk_p)
1254 int type = streamer_read_uhwi (ib);
1255 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1256 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1258 node->thunk.fixed_offset = fixed_offset;
1259 node->thunk.this_adjusting = (type & 2);
1260 node->thunk.virtual_value = virtual_value;
1261 node->thunk.virtual_offset_p = (type & 4);
1263 if (node->alias && !node->analyzed && node->weakref)
1264 node->alias_target = get_alias_symbol (node->decl);
1265 node->profile_id = streamer_read_hwi (ib);
1266 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1267 node->set_init_priority (streamer_read_hwi (ib));
1268 if (DECL_STATIC_DESTRUCTOR (node->decl))
1269 node->set_fini_priority (streamer_read_hwi (ib));
1270 return node;
1273 /* Read a node from input_block IB. TAG is the node's tag just read.
1274 Return the node read or overwriten. */
1276 static varpool_node *
1277 input_varpool_node (struct lto_file_decl_data *file_data,
1278 struct lto_input_block *ib)
1280 int decl_index;
1281 tree var_decl;
1282 varpool_node *node;
1283 struct bitpack_d bp;
1284 int ref = LCC_NOT_FOUND;
1285 int order;
1286 tree group;
1287 const char *section;
1289 order = streamer_read_hwi (ib) + order_base;
1290 decl_index = streamer_read_uhwi (ib);
1291 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1293 /* Declaration of functions can be already merged with a declaration
1294 from other input file. We keep cgraph unmerged until after streaming
1295 of ipa passes is done. Alays forcingly create a fresh node. */
1296 node = varpool_node::create_empty ();
1297 node->decl = var_decl;
1298 node->register_symbol ();
1300 node->order = order;
1301 if (order >= symtab->order)
1302 symtab->order = order + 1;
1303 node->lto_file_data = file_data;
1305 bp = streamer_read_bitpack (ib);
1306 node->externally_visible = bp_unpack_value (&bp, 1);
1307 node->force_output = bp_unpack_value (&bp, 1);
1308 node->forced_by_abi = bp_unpack_value (&bp, 1);
1309 node->unique_name = bp_unpack_value (&bp, 1);
1310 node->body_removed = bp_unpack_value (&bp, 1);
1311 node->implicit_section = bp_unpack_value (&bp, 1);
1312 node->writeonly = bp_unpack_value (&bp, 1);
1313 node->definition = bp_unpack_value (&bp, 1);
1314 node->alias = bp_unpack_value (&bp, 1);
1315 node->weakref = bp_unpack_value (&bp, 1);
1316 node->analyzed = bp_unpack_value (&bp, 1);
1317 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1318 node->in_other_partition = bp_unpack_value (&bp, 1);
1319 if (node->in_other_partition)
1321 DECL_EXTERNAL (node->decl) = 1;
1322 TREE_STATIC (node->decl) = 0;
1324 if (node->alias && !node->analyzed && node->weakref)
1325 node->alias_target = get_alias_symbol (node->decl);
1326 node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1327 node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1328 group = read_identifier (ib);
1329 if (group)
1331 node->set_comdat_group (group);
1332 ref = streamer_read_hwi (ib);
1333 /* Store a reference for now, and fix up later to be a pointer. */
1334 node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1336 else
1337 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1338 section = read_string (ib);
1339 if (section)
1340 node->set_section_for_node (section);
1341 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1342 LDPR_NUM_KNOWN);
1343 gcc_assert (flag_ltrans
1344 || (!node->in_other_partition
1345 && !node->used_from_other_partition));
1347 return node;
1350 /* Read a node from input_block IB. TAG is the node's tag just read.
1351 Return the node read or overwriten. */
1353 static void
1354 input_ref (struct lto_input_block *ib,
1355 symtab_node *referring_node,
1356 vec<symtab_node *> nodes)
1358 symtab_node *node = NULL;
1359 struct bitpack_d bp;
1360 enum ipa_ref_use use;
1361 bool speculative;
1362 struct ipa_ref *ref;
1364 bp = streamer_read_bitpack (ib);
1365 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1366 speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1367 node = nodes[streamer_read_hwi (ib)];
1368 ref = referring_node->create_reference (node, use);
1369 ref->speculative = speculative;
1370 if (is_a <cgraph_node *> (referring_node))
1371 ref->lto_stmt_uid = streamer_read_hwi (ib);
1374 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1375 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1376 edge being read is indirect (in the sense that it has
1377 indirect_unknown_callee set). */
1379 static void
1380 input_edge (struct lto_input_block *ib, vec<symtab_node *> nodes,
1381 bool indirect)
1383 struct cgraph_node *caller, *callee;
1384 struct cgraph_edge *edge;
1385 unsigned int stmt_id;
1386 gcov_type count;
1387 int freq;
1388 cgraph_inline_failed_t inline_failed;
1389 struct bitpack_d bp;
1390 int ecf_flags = 0;
1392 caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1393 if (caller == NULL || caller->decl == NULL_TREE)
1394 internal_error ("bytecode stream: no caller found while reading edge");
1396 if (!indirect)
1398 callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1399 if (callee == NULL || callee->decl == NULL_TREE)
1400 internal_error ("bytecode stream: no callee found while reading edge");
1402 else
1403 callee = NULL;
1405 count = streamer_read_gcov_count (ib);
1407 bp = streamer_read_bitpack (ib);
1408 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1409 stmt_id = bp_unpack_var_len_unsigned (&bp);
1410 freq = (int) bp_unpack_var_len_unsigned (&bp);
1412 if (indirect)
1413 edge = caller->create_indirect_edge (NULL, 0, count, freq);
1414 else
1415 edge = caller->create_edge (callee, NULL, count, freq);
1417 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1418 edge->speculative = bp_unpack_value (&bp, 1);
1419 edge->lto_stmt_uid = stmt_id;
1420 edge->inline_failed = inline_failed;
1421 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1422 edge->can_throw_external = bp_unpack_value (&bp, 1);
1423 if (indirect)
1425 if (bp_unpack_value (&bp, 1))
1426 ecf_flags |= ECF_CONST;
1427 if (bp_unpack_value (&bp, 1))
1428 ecf_flags |= ECF_PURE;
1429 if (bp_unpack_value (&bp, 1))
1430 ecf_flags |= ECF_NORETURN;
1431 if (bp_unpack_value (&bp, 1))
1432 ecf_flags |= ECF_MALLOC;
1433 if (bp_unpack_value (&bp, 1))
1434 ecf_flags |= ECF_NOTHROW;
1435 if (bp_unpack_value (&bp, 1))
1436 ecf_flags |= ECF_RETURNS_TWICE;
1437 edge->indirect_info->ecf_flags = ecf_flags;
1438 edge->indirect_info->common_target_id = streamer_read_hwi (ib);
1439 if (edge->indirect_info->common_target_id)
1440 edge->indirect_info->common_target_probability = streamer_read_hwi (ib);
1445 /* Read a cgraph from IB using the info in FILE_DATA. */
1447 static vec<symtab_node *>
1448 input_cgraph_1 (struct lto_file_decl_data *file_data,
1449 struct lto_input_block *ib)
1451 enum LTO_symtab_tags tag;
1452 vec<symtab_node *> nodes = vNULL;
1453 symtab_node *node;
1454 unsigned i;
1456 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1457 order_base = symtab->order;
1458 while (tag)
1460 if (tag == LTO_symtab_edge)
1461 input_edge (ib, nodes, false);
1462 else if (tag == LTO_symtab_indirect_edge)
1463 input_edge (ib, nodes, true);
1464 else if (tag == LTO_symtab_variable)
1466 node = input_varpool_node (file_data, ib);
1467 nodes.safe_push (node);
1468 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1470 else
1472 node = input_node (file_data, ib, tag, nodes);
1473 if (node == NULL || node->decl == NULL_TREE)
1474 internal_error ("bytecode stream: found empty cgraph node");
1475 nodes.safe_push (node);
1476 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1479 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1482 lto_input_toplevel_asms (file_data, order_base);
1484 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1485 #ifdef ENABLE_CHECKING
1486 FOR_EACH_VEC_ELT (nodes, i, node)
1487 gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1488 #endif
1489 FOR_EACH_VEC_ELT (nodes, i, node)
1491 int ref;
1492 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1494 ref = (int) (intptr_t) cnode->global.inlined_to;
1496 /* We share declaration of builtins, so we may read same node twice. */
1497 if (!node->aux)
1498 continue;
1499 node->aux = NULL;
1501 /* Fixup inlined_to from reference to pointer. */
1502 if (ref != LCC_NOT_FOUND)
1503 dyn_cast<cgraph_node *> (node)->global.inlined_to
1504 = dyn_cast<cgraph_node *> (nodes[ref]);
1505 else
1506 cnode->global.inlined_to = NULL;
1509 ref = (int) (intptr_t) node->same_comdat_group;
1511 /* Fixup same_comdat_group from reference to pointer. */
1512 if (ref != LCC_NOT_FOUND)
1513 node->same_comdat_group = nodes[ref];
1514 else
1515 node->same_comdat_group = NULL;
1517 FOR_EACH_VEC_ELT (nodes, i, node)
1518 node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1519 return nodes;
1522 /* Input ipa_refs. */
1524 static void
1525 input_refs (struct lto_input_block *ib,
1526 vec<symtab_node *> nodes)
1528 int count;
1529 int idx;
1530 while (true)
1532 symtab_node *node;
1533 count = streamer_read_uhwi (ib);
1534 if (!count)
1535 break;
1536 idx = streamer_read_uhwi (ib);
1537 node = nodes[idx];
1538 while (count)
1540 input_ref (ib, node, nodes);
1541 count--;
1547 static struct gcov_ctr_summary lto_gcov_summary;
1549 /* Input profile_info from IB. */
1550 static void
1551 input_profile_summary (struct lto_input_block *ib,
1552 struct lto_file_decl_data *file_data)
1554 unsigned h_ix;
1555 struct bitpack_d bp;
1556 unsigned int runs = streamer_read_uhwi (ib);
1557 if (runs)
1559 file_data->profile_info.runs = runs;
1560 file_data->profile_info.sum_max = streamer_read_gcov_count (ib);
1561 file_data->profile_info.sum_all = streamer_read_gcov_count (ib);
1563 memset (file_data->profile_info.histogram, 0,
1564 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1565 /* Input the bitpack of non-zero histogram indices. */
1566 bp = streamer_read_bitpack (ib);
1567 /* Read in and unpack the full bitpack, flagging non-zero
1568 histogram entries by setting the num_counters non-zero. */
1569 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1571 file_data->profile_info.histogram[h_ix].num_counters
1572 = bp_unpack_value (&bp, 1);
1574 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1576 if (!file_data->profile_info.histogram[h_ix].num_counters)
1577 continue;
1579 file_data->profile_info.histogram[h_ix].num_counters
1580 = streamer_read_gcov_count (ib);
1581 file_data->profile_info.histogram[h_ix].min_value
1582 = streamer_read_gcov_count (ib);
1583 file_data->profile_info.histogram[h_ix].cum_value
1584 = streamer_read_gcov_count (ib);
1586 /* IPA-profile computes hot bb threshold based on cumulated
1587 whole program profile. We need to stream it down to ltrans. */
1588 if (flag_ltrans)
1589 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1594 /* Rescale profile summaries to the same number of runs in the whole unit. */
1596 static void
1597 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1599 struct lto_file_decl_data *file_data;
1600 unsigned int j, h_ix;
1601 gcov_unsigned_t max_runs = 0;
1602 struct cgraph_node *node;
1603 struct cgraph_edge *edge;
1604 gcov_type saved_sum_all = 0;
1605 gcov_ctr_summary *saved_profile_info = 0;
1606 int saved_scale = 0;
1608 /* Find unit with maximal number of runs. If we ever get serious about
1609 roundoff errors, we might also consider computing smallest common
1610 multiply. */
1611 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1612 if (max_runs < file_data->profile_info.runs)
1613 max_runs = file_data->profile_info.runs;
1615 if (!max_runs)
1616 return;
1618 /* Simple overflow check. We probably don't need to support that many train
1619 runs. Such a large value probably imply data corruption anyway. */
1620 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1622 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1623 INT_MAX / REG_BR_PROB_BASE);
1624 return;
1627 profile_info = &lto_gcov_summary;
1628 lto_gcov_summary.runs = max_runs;
1629 lto_gcov_summary.sum_max = 0;
1630 memset (lto_gcov_summary.histogram, 0,
1631 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1633 /* Rescale all units to the maximal number of runs.
1634 sum_max can not be easily merged, as we have no idea what files come from
1635 the same run. We do not use the info anyway, so leave it 0. */
1636 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1637 if (file_data->profile_info.runs)
1639 int scale = GCOV_COMPUTE_SCALE (max_runs,
1640 file_data->profile_info.runs);
1641 lto_gcov_summary.sum_max
1642 = MAX (lto_gcov_summary.sum_max,
1643 apply_scale (file_data->profile_info.sum_max, scale));
1644 lto_gcov_summary.sum_all
1645 = MAX (lto_gcov_summary.sum_all,
1646 apply_scale (file_data->profile_info.sum_all, scale));
1647 /* Save a pointer to the profile_info with the largest
1648 scaled sum_all and the scale for use in merging the
1649 histogram. */
1650 if (!saved_profile_info
1651 || lto_gcov_summary.sum_all > saved_sum_all)
1653 saved_profile_info = &file_data->profile_info;
1654 saved_sum_all = lto_gcov_summary.sum_all;
1655 saved_scale = scale;
1659 gcc_assert (saved_profile_info);
1661 /* Scale up the histogram from the profile that had the largest
1662 scaled sum_all above. */
1663 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1665 /* Scale up the min value as we did the corresponding sum_all
1666 above. Use that to find the new histogram index. */
1667 gcov_type scaled_min
1668 = apply_scale (saved_profile_info->histogram[h_ix].min_value,
1669 saved_scale);
1670 /* The new index may be shared with another scaled histogram entry,
1671 so we need to account for a non-zero histogram entry at new_ix. */
1672 unsigned new_ix = gcov_histo_index (scaled_min);
1673 lto_gcov_summary.histogram[new_ix].min_value
1674 = (lto_gcov_summary.histogram[new_ix].num_counters
1675 ? MIN (lto_gcov_summary.histogram[new_ix].min_value, scaled_min)
1676 : scaled_min);
1677 /* Some of the scaled counter values would ostensibly need to be placed
1678 into different (larger) histogram buckets, but we keep things simple
1679 here and place the scaled cumulative counter value in the bucket
1680 corresponding to the scaled minimum counter value. */
1681 lto_gcov_summary.histogram[new_ix].cum_value
1682 += apply_scale (saved_profile_info->histogram[h_ix].cum_value,
1683 saved_scale);
1684 lto_gcov_summary.histogram[new_ix].num_counters
1685 += saved_profile_info->histogram[h_ix].num_counters;
1688 /* Watch roundoff errors. */
1689 if (lto_gcov_summary.sum_max < max_runs)
1690 lto_gcov_summary.sum_max = max_runs;
1692 /* If merging already happent at WPA time, we are done. */
1693 if (flag_ltrans)
1694 return;
1696 /* Now compute count_materialization_scale of each node.
1697 During LTRANS we already have values of count_materialization_scale
1698 computed, so just update them. */
1699 FOR_EACH_FUNCTION (node)
1700 if (node->lto_file_data
1701 && node->lto_file_data->profile_info.runs)
1703 int scale;
1705 scale = RDIV (node->count_materialization_scale * max_runs,
1706 node->lto_file_data->profile_info.runs);
1707 node->count_materialization_scale = scale;
1708 if (scale < 0)
1709 fatal_error ("Profile information in %s corrupted",
1710 file_data->file_name);
1712 if (scale == REG_BR_PROB_BASE)
1713 continue;
1714 for (edge = node->callees; edge; edge = edge->next_callee)
1715 edge->count = apply_scale (edge->count, scale);
1716 node->count = apply_scale (node->count, scale);
1720 /* Input and merge the symtab from each of the .o files passed to
1721 lto1. */
1723 void
1724 input_symtab (void)
1726 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1727 struct lto_file_decl_data *file_data;
1728 unsigned int j = 0;
1729 struct cgraph_node *node;
1731 while ((file_data = file_data_vec[j++]))
1733 const char *data;
1734 size_t len;
1735 struct lto_input_block *ib;
1736 vec<symtab_node *> nodes;
1738 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1739 &data, &len);
1740 if (!ib)
1741 fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1742 input_profile_summary (ib, file_data);
1743 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1744 nodes = input_cgraph_1 (file_data, ib);
1745 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1746 ib, data, len);
1748 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1749 &data, &len);
1750 if (!ib)
1751 fatal_error ("cannot find LTO section refs in %s",
1752 file_data->file_name);
1753 input_refs (ib, nodes);
1754 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1755 ib, data, len);
1756 if (flag_ltrans)
1757 input_cgraph_opt_summary (nodes);
1758 nodes.release ();
1761 merge_profile_summaries (file_data_vec);
1762 get_working_sets ();
1765 /* Clear out the aux field that was used to store enough state to
1766 tell which nodes should be overwritten. */
1767 FOR_EACH_FUNCTION (node)
1769 /* Some nodes may have been created by cgraph_node. This
1770 happens when the callgraph contains nested functions. If the
1771 node for the parent function was never emitted to the gimple
1772 file, cgraph_node will create a node for it when setting the
1773 context of the nested function. */
1774 if (node->lto_file_data)
1775 node->aux = NULL;
1779 /* Input function/variable tables that will allow libgomp to look up offload
1780 target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS. */
1782 void
1783 input_offload_tables (void)
1785 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1786 struct lto_file_decl_data *file_data;
1787 unsigned int j = 0;
1789 while ((file_data = file_data_vec[j++]))
1791 const char *data;
1792 size_t len;
1793 struct lto_input_block *ib
1794 = lto_create_simple_input_block (file_data, LTO_section_offload_table,
1795 &data, &len);
1796 if (!ib)
1797 continue;
1799 enum LTO_symtab_tags tag
1800 = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1801 while (tag)
1803 if (tag == LTO_symtab_unavail_node)
1805 int decl_index = streamer_read_uhwi (ib);
1806 tree fn_decl
1807 = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1808 vec_safe_push (offload_funcs, fn_decl);
1810 else if (tag == LTO_symtab_variable)
1812 int decl_index = streamer_read_uhwi (ib);
1813 tree var_decl
1814 = lto_file_decl_data_get_var_decl (file_data, decl_index);
1815 vec_safe_push (offload_vars, var_decl);
1817 else
1818 fatal_error ("invalid offload table in %s", file_data->file_name);
1820 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1823 lto_destroy_simple_input_block (file_data, LTO_section_offload_table,
1824 ib, data, len);
1828 /* True when we need optimization summary for NODE. */
1830 static int
1831 output_cgraph_opt_summary_p (struct cgraph_node *node)
1833 return (node->clone_of
1834 && (node->clone.tree_map
1835 || node->clone.args_to_skip
1836 || node->clone.combined_args_to_skip));
1839 /* Output optimization summary for EDGE to OB. */
1840 static void
1841 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1842 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1846 /* Output optimization summary for NODE to OB. */
1848 static void
1849 output_node_opt_summary (struct output_block *ob,
1850 struct cgraph_node *node,
1851 lto_symtab_encoder_t encoder)
1853 unsigned int index;
1854 bitmap_iterator bi;
1855 struct ipa_replace_map *map;
1856 struct bitpack_d bp;
1857 int i;
1858 struct cgraph_edge *e;
1860 if (node->clone.args_to_skip)
1862 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1863 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1864 streamer_write_uhwi (ob, index);
1866 else
1867 streamer_write_uhwi (ob, 0);
1868 if (node->clone.combined_args_to_skip)
1870 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1871 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1872 streamer_write_uhwi (ob, index);
1874 else
1875 streamer_write_uhwi (ob, 0);
1876 streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
1877 FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
1879 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1880 mechanism to store function local declarations into summaries. */
1881 gcc_assert (!map->old_tree);
1882 streamer_write_uhwi (ob, map->parm_num);
1883 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
1884 stream_write_tree (ob, map->new_tree, true);
1885 bp = bitpack_create (ob->main_stream);
1886 bp_pack_value (&bp, map->replace_p, 1);
1887 bp_pack_value (&bp, map->ref_p, 1);
1888 streamer_write_bitpack (&bp);
1891 if (lto_symtab_encoder_in_partition_p (encoder, node))
1893 for (e = node->callees; e; e = e->next_callee)
1894 output_edge_opt_summary (ob, e);
1895 for (e = node->indirect_calls; e; e = e->next_callee)
1896 output_edge_opt_summary (ob, e);
1900 /* Output optimization summaries stored in callgraph.
1901 At the moment it is the clone info structure. */
1903 static void
1904 output_cgraph_opt_summary (void)
1906 int i, n_nodes;
1907 lto_symtab_encoder_t encoder;
1908 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1909 unsigned count = 0;
1911 ob->symbol = NULL;
1912 encoder = ob->decl_state->symtab_node_encoder;
1913 n_nodes = lto_symtab_encoder_size (encoder);
1914 for (i = 0; i < n_nodes; i++)
1916 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1917 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1918 if (cnode && output_cgraph_opt_summary_p (cnode))
1919 count++;
1921 streamer_write_uhwi (ob, count);
1922 for (i = 0; i < n_nodes; i++)
1924 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1925 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1926 if (cnode && output_cgraph_opt_summary_p (cnode))
1928 streamer_write_uhwi (ob, i);
1929 output_node_opt_summary (ob, cnode, encoder);
1932 produce_asm (ob, NULL);
1933 destroy_output_block (ob);
1936 /* Input optimisation summary of EDGE. */
1938 static void
1939 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1940 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
1944 /* Input optimisation summary of NODE. */
1946 static void
1947 input_node_opt_summary (struct cgraph_node *node,
1948 struct lto_input_block *ib_main,
1949 struct data_in *data_in)
1951 int i;
1952 int count;
1953 int bit;
1954 struct bitpack_d bp;
1955 struct cgraph_edge *e;
1957 count = streamer_read_uhwi (ib_main);
1958 if (count)
1959 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1960 for (i = 0; i < count; i++)
1962 bit = streamer_read_uhwi (ib_main);
1963 bitmap_set_bit (node->clone.args_to_skip, bit);
1965 count = streamer_read_uhwi (ib_main);
1966 if (count)
1967 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1968 for (i = 0; i < count; i++)
1970 bit = streamer_read_uhwi (ib_main);
1971 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1973 count = streamer_read_uhwi (ib_main);
1974 for (i = 0; i < count; i++)
1976 struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
1978 vec_safe_push (node->clone.tree_map, map);
1979 map->parm_num = streamer_read_uhwi (ib_main);
1980 map->old_tree = NULL;
1981 map->new_tree = stream_read_tree (ib_main, data_in);
1982 bp = streamer_read_bitpack (ib_main);
1983 map->replace_p = bp_unpack_value (&bp, 1);
1984 map->ref_p = bp_unpack_value (&bp, 1);
1986 for (e = node->callees; e; e = e->next_callee)
1987 input_edge_opt_summary (e, ib_main);
1988 for (e = node->indirect_calls; e; e = e->next_callee)
1989 input_edge_opt_summary (e, ib_main);
1992 /* Read section in file FILE_DATA of length LEN with data DATA. */
1994 static void
1995 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1996 const char *data, size_t len,
1997 vec<symtab_node *> nodes)
1999 const struct lto_function_header *header =
2000 (const struct lto_function_header *) data;
2001 const int cfg_offset = sizeof (struct lto_function_header);
2002 const int main_offset = cfg_offset + header->cfg_size;
2003 const int string_offset = main_offset + header->main_size;
2004 struct data_in *data_in;
2005 unsigned int i;
2006 unsigned int count;
2008 lto_input_block ib_main ((const char *) data + main_offset,
2009 header->main_size);
2011 data_in =
2012 lto_data_in_create (file_data, (const char *) data + string_offset,
2013 header->string_size, vNULL);
2014 count = streamer_read_uhwi (&ib_main);
2016 for (i = 0; i < count; i++)
2018 int ref = streamer_read_uhwi (&ib_main);
2019 input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
2020 &ib_main, data_in);
2022 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
2023 len);
2024 lto_data_in_delete (data_in);
2027 /* Input optimization summary of cgraph. */
2029 static void
2030 input_cgraph_opt_summary (vec<symtab_node *> nodes)
2032 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2033 struct lto_file_decl_data *file_data;
2034 unsigned int j = 0;
2036 while ((file_data = file_data_vec[j++]))
2038 size_t len;
2039 const char *data =
2040 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
2041 &len);
2043 if (data)
2044 input_cgraph_opt_section (file_data, data, len, nodes);