2014-08-15 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / lto-cgraph.c
blob120f924ff354817d070315febeb2f7ce25e3f09a
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
4 Copyright (C) 2009-2014 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stringpool.h"
29 #include "basic-block.h"
30 #include "tree-ssa-alias.h"
31 #include "internal-fn.h"
32 #include "gimple-expr.h"
33 #include "is-a.h"
34 #include "gimple.h"
35 #include "expr.h"
36 #include "flags.h"
37 #include "params.h"
38 #include "input.h"
39 #include "hashtab.h"
40 #include "hash-set.h"
41 #include "langhooks.h"
42 #include "bitmap.h"
43 #include "function.h"
44 #include "diagnostic-core.h"
45 #include "except.h"
46 #include "timevar.h"
47 #include "lto-streamer.h"
48 #include "data-streamer.h"
49 #include "tree-streamer.h"
50 #include "gcov-io.h"
51 #include "tree-pass.h"
52 #include "profile.h"
53 #include "context.h"
54 #include "pass_manager.h"
55 #include "ipa-utils.h"
57 /* True when asm nodes has been output. */
58 bool asm_nodes_output = false;
60 static void output_cgraph_opt_summary (void);
61 static void input_cgraph_opt_summary (vec<symtab_node *> nodes);
63 /* Number of LDPR values known to GCC. */
64 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
66 /* All node orders are ofsetted by ORDER_BASE. */
67 static int order_base;
69 /* Cgraph streaming is organized as set of record whose type
70 is indicated by a tag. */
71 enum LTO_symtab_tags
73 /* Must leave 0 for the stopper. */
75 /* Cgraph node without body available. */
76 LTO_symtab_unavail_node = 1,
77 /* Cgraph node with function body. */
78 LTO_symtab_analyzed_node,
79 /* Cgraph edges. */
80 LTO_symtab_edge,
81 LTO_symtab_indirect_edge,
82 LTO_symtab_variable,
83 LTO_symtab_last_tag
86 /* Create a new symtab encoder.
87 if FOR_INPUT, the encoder allocate only datastructures needed
88 to read the symtab. */
90 lto_symtab_encoder_t
91 lto_symtab_encoder_new (bool for_input)
93 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
95 if (!for_input)
96 encoder->map = new hash_map<symtab_node *, size_t>;
97 encoder->nodes.create (0);
98 return encoder;
102 /* Delete ENCODER and its components. */
104 void
105 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
107 encoder->nodes.release ();
108 if (encoder->map)
109 delete encoder->map;
110 free (encoder);
114 /* Return the existing reference number of NODE in the symtab encoder in
115 output block OB. Assign a new reference if this is the first time
116 NODE is encoded. */
119 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
120 symtab_node *node)
122 int ref;
124 if (!encoder->map)
126 lto_encoder_entry entry = {node, false, false, false};
128 ref = encoder->nodes.length ();
129 encoder->nodes.safe_push (entry);
130 return ref;
133 size_t *slot = encoder->map->get (node);
134 if (!slot || !*slot)
136 lto_encoder_entry entry = {node, false, false, false};
137 ref = encoder->nodes.length ();
138 if (!slot)
139 encoder->map->put (node, ref + 1);
140 encoder->nodes.safe_push (entry);
142 else
143 ref = *slot - 1;
145 return ref;
148 /* Remove NODE from encoder. */
150 bool
151 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
152 symtab_node *node)
154 int index;
155 lto_encoder_entry last_node;
157 size_t *slot = encoder->map->get (node);
158 if (slot == NULL || !*slot)
159 return false;
161 index = *slot - 1;
162 gcc_checking_assert (encoder->nodes[index].node == node);
164 /* Remove from vector. We do this by swapping node with the last element
165 of the vector. */
166 last_node = encoder->nodes.pop ();
167 if (last_node.node != node)
169 gcc_assert (encoder->map->put (last_node.node, index + 1));
171 /* Move the last element to the original spot of NODE. */
172 encoder->nodes[index] = last_node;
175 /* Remove element from hash table. */
176 encoder->map->remove (node);
177 return true;
181 /* Return TRUE if we should encode initializer of NODE (if any). */
183 bool
184 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
185 struct cgraph_node *node)
187 int index = lto_symtab_encoder_lookup (encoder, node);
188 return encoder->nodes[index].body;
191 /* Return TRUE if we should encode body of NODE (if any). */
193 static void
194 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
195 struct cgraph_node *node)
197 int index = lto_symtab_encoder_encode (encoder, node);
198 gcc_checking_assert (encoder->nodes[index].node == node);
199 encoder->nodes[index].body = true;
202 /* Return TRUE if we should encode initializer of NODE (if any). */
204 bool
205 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
206 varpool_node *node)
208 int index = lto_symtab_encoder_lookup (encoder, node);
209 if (index == LCC_NOT_FOUND)
210 return false;
211 return encoder->nodes[index].initializer;
214 /* Return TRUE if we should encode initializer of NODE (if any). */
216 static void
217 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
218 varpool_node *node)
220 int index = lto_symtab_encoder_lookup (encoder, node);
221 encoder->nodes[index].initializer = true;
224 /* Return TRUE if we should encode initializer of NODE (if any). */
226 bool
227 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
228 symtab_node *node)
230 int index = lto_symtab_encoder_lookup (encoder, node);
231 if (index == LCC_NOT_FOUND)
232 return false;
233 return encoder->nodes[index].in_partition;
236 /* Return TRUE if we should encode body of NODE (if any). */
238 void
239 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
240 symtab_node *node)
242 int index = lto_symtab_encoder_encode (encoder, node);
243 encoder->nodes[index].in_partition = true;
246 /* Output the cgraph EDGE to OB using ENCODER. */
248 static void
249 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
250 lto_symtab_encoder_t encoder)
252 unsigned int uid;
253 intptr_t ref;
254 struct bitpack_d bp;
256 if (edge->indirect_unknown_callee)
257 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
258 LTO_symtab_indirect_edge);
259 else
260 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
261 LTO_symtab_edge);
263 ref = lto_symtab_encoder_lookup (encoder, edge->caller);
264 gcc_assert (ref != LCC_NOT_FOUND);
265 streamer_write_hwi_stream (ob->main_stream, ref);
267 if (!edge->indirect_unknown_callee)
269 ref = lto_symtab_encoder_lookup (encoder, edge->callee);
270 gcc_assert (ref != LCC_NOT_FOUND);
271 streamer_write_hwi_stream (ob->main_stream, ref);
274 streamer_write_gcov_count_stream (ob->main_stream, edge->count);
276 bp = bitpack_create (ob->main_stream);
277 uid = (!gimple_has_body_p (edge->caller->decl)
278 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt) + 1);
279 bp_pack_enum (&bp, cgraph_inline_failed_t,
280 CIF_N_REASONS, edge->inline_failed);
281 bp_pack_var_len_unsigned (&bp, uid);
282 bp_pack_var_len_unsigned (&bp, edge->frequency);
283 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
284 bp_pack_value (&bp, edge->speculative, 1);
285 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
286 bp_pack_value (&bp, edge->can_throw_external, 1);
287 if (edge->indirect_unknown_callee)
289 int flags = edge->indirect_info->ecf_flags;
290 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
291 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
292 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
293 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
294 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
295 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
296 /* Flags that should not appear on indirect calls. */
297 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
298 | ECF_MAY_BE_ALLOCA
299 | ECF_SIBCALL
300 | ECF_LEAF
301 | ECF_NOVOPS)));
303 streamer_write_bitpack (&bp);
304 if (edge->indirect_unknown_callee)
306 streamer_write_hwi_stream (ob->main_stream,
307 edge->indirect_info->common_target_id);
308 if (edge->indirect_info->common_target_id)
309 streamer_write_hwi_stream
310 (ob->main_stream, edge->indirect_info->common_target_probability);
314 /* Return if NODE contain references from other partitions. */
316 bool
317 referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
319 int i;
320 struct ipa_ref *ref = NULL;
322 for (i = 0; node->iterate_referring (i, ref); i++)
324 if (ref->referring->in_other_partition
325 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
326 return true;
328 return false;
331 /* Return true when node is reachable from other partition. */
333 bool
334 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
336 struct cgraph_edge *e;
337 if (!node->definition)
338 return false;
339 if (node->global.inlined_to)
340 return false;
341 for (e = node->callers; e; e = e->next_caller)
342 if (e->caller->in_other_partition
343 || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
344 return true;
345 return false;
348 /* Return if NODE contain references from other partitions. */
350 bool
351 referenced_from_this_partition_p (symtab_node *node,
352 lto_symtab_encoder_t encoder)
354 int i;
355 struct ipa_ref *ref = NULL;
357 for (i = 0; node->iterate_referring (i, ref); i++)
358 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
359 return true;
360 return false;
363 /* Return true when node is reachable from other partition. */
365 bool
366 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
368 struct cgraph_edge *e;
369 for (e = node->callers; e; e = e->next_caller)
370 if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
371 return true;
372 return false;
375 /* Output the cgraph NODE to OB. ENCODER is used to find the
376 reference number of NODE->inlined_to. SET is the set of nodes we
377 are writing to the current file. If NODE is not in SET, then NODE
378 is a boundary of a cgraph_node_set and we pretend NODE just has a
379 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
380 that have had their callgraph node written so far. This is used to
381 determine if NODE is a clone of a previously written node. */
383 static void
384 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
385 lto_symtab_encoder_t encoder)
387 unsigned int tag;
388 struct bitpack_d bp;
389 bool boundary_p;
390 intptr_t ref;
391 bool in_other_partition = false;
392 struct cgraph_node *clone_of, *ultimate_clone_of;
393 ipa_opt_pass_d *pass;
394 int i;
395 bool alias_p;
396 const char *comdat;
397 const char *section;
398 tree group;
400 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
402 if (node->analyzed && !boundary_p)
403 tag = LTO_symtab_analyzed_node;
404 else
405 tag = LTO_symtab_unavail_node;
407 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
408 tag);
409 streamer_write_hwi_stream (ob->main_stream, node->order);
411 /* In WPA mode, we only output part of the call-graph. Also, we
412 fake cgraph node attributes. There are two cases that we care.
414 Boundary nodes: There are nodes that are not part of SET but are
415 called from within SET. We artificially make them look like
416 externally visible nodes with no function body.
418 Cherry-picked nodes: These are nodes we pulled from other
419 translation units into SET during IPA-inlining. We make them as
420 local static nodes to prevent clashes with other local statics. */
421 if (boundary_p && node->analyzed
422 && node->get_partitioning_class () == SYMBOL_PARTITION)
424 /* Inline clones can not be part of boundary.
425 gcc_assert (!node->global.inlined_to);
427 FIXME: At the moment they can be, when partition contains an inline
428 clone that is clone of inline clone from outside partition. We can
429 reshape the clone tree and make other tree to be the root, but it
430 needs a bit extra work and will be promplty done by cgraph_remove_node
431 after reading back. */
432 in_other_partition = 1;
435 clone_of = node->clone_of;
436 while (clone_of
437 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
438 if (clone_of->prev_sibling_clone)
439 clone_of = clone_of->prev_sibling_clone;
440 else
441 clone_of = clone_of->clone_of;
443 /* See if body of the master function is output. If not, we are seeing only
444 an declaration and we do not need to pass down clone tree. */
445 ultimate_clone_of = clone_of;
446 while (ultimate_clone_of && ultimate_clone_of->clone_of)
447 ultimate_clone_of = ultimate_clone_of->clone_of;
449 if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
450 clone_of = NULL;
452 if (tag == LTO_symtab_analyzed_node)
453 gcc_assert (clone_of || !node->clone_of);
454 if (!clone_of)
455 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
456 else
457 streamer_write_hwi_stream (ob->main_stream, ref);
460 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
461 streamer_write_gcov_count_stream (ob->main_stream, node->count);
462 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
464 streamer_write_hwi_stream (ob->main_stream,
465 node->ipa_transforms_to_apply.length ());
466 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
467 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
469 if (tag == LTO_symtab_analyzed_node)
471 if (node->global.inlined_to)
473 ref = lto_symtab_encoder_lookup (encoder, node->global.inlined_to);
474 gcc_assert (ref != LCC_NOT_FOUND);
476 else
477 ref = LCC_NOT_FOUND;
479 streamer_write_hwi_stream (ob->main_stream, ref);
482 group = node->get_comdat_group ();
483 if (group)
484 comdat = IDENTIFIER_POINTER (group);
485 else
486 comdat = "";
487 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
489 if (group)
491 if (node->same_comdat_group && !boundary_p)
493 ref = lto_symtab_encoder_lookup (encoder,
494 node->same_comdat_group);
495 gcc_assert (ref != LCC_NOT_FOUND);
497 else
498 ref = LCC_NOT_FOUND;
499 streamer_write_hwi_stream (ob->main_stream, ref);
502 section = node->get_section ();
503 if (!section)
504 section = "";
506 streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
508 bp = bitpack_create (ob->main_stream);
509 bp_pack_value (&bp, node->local.local, 1);
510 bp_pack_value (&bp, node->externally_visible, 1);
511 bp_pack_value (&bp, node->definition, 1);
512 bp_pack_value (&bp, node->local.versionable, 1);
513 bp_pack_value (&bp, node->local.can_change_signature, 1);
514 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
515 bp_pack_value (&bp, node->force_output, 1);
516 bp_pack_value (&bp, node->forced_by_abi, 1);
517 bp_pack_value (&bp, node->unique_name, 1);
518 bp_pack_value (&bp, node->body_removed, 1);
519 bp_pack_value (&bp, node->implicit_section, 1);
520 bp_pack_value (&bp, node->address_taken, 1);
521 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
522 && node->get_partitioning_class () == SYMBOL_PARTITION
523 && (reachable_from_other_partition_p (node, encoder)
524 || referenced_from_other_partition_p (node, encoder)), 1);
525 bp_pack_value (&bp, node->lowered, 1);
526 bp_pack_value (&bp, in_other_partition, 1);
527 /* Real aliases in a boundary become non-aliases. However we still stream
528 alias info on weakrefs.
529 TODO: We lose a bit of information here - when we know that variable is
530 defined in other unit, we may use the info on aliases to resolve
531 symbol1 != symbol2 type tests that we can do only for locally defined objects
532 otherwise. */
533 alias_p = node->alias && (!boundary_p || node->weakref);
534 bp_pack_value (&bp, alias_p, 1);
535 bp_pack_value (&bp, node->weakref, 1);
536 bp_pack_value (&bp, node->frequency, 2);
537 bp_pack_value (&bp, node->only_called_at_startup, 1);
538 bp_pack_value (&bp, node->only_called_at_exit, 1);
539 bp_pack_value (&bp, node->tm_clone, 1);
540 bp_pack_value (&bp, node->calls_comdat_local, 1);
541 bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
542 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
543 LDPR_NUM_KNOWN, node->resolution);
544 streamer_write_bitpack (&bp);
545 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
547 if (node->thunk.thunk_p && !boundary_p)
549 streamer_write_uhwi_stream
550 (ob->main_stream,
551 1 + (node->thunk.this_adjusting != 0) * 2
552 + (node->thunk.virtual_offset_p != 0) * 4);
553 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
554 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
556 streamer_write_hwi_stream (ob->main_stream, node->profile_id);
557 if (DECL_STATIC_CONSTRUCTOR (node->decl))
558 streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
559 if (DECL_STATIC_DESTRUCTOR (node->decl))
560 streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
563 /* Output the varpool NODE to OB.
564 If NODE is not in SET, then NODE is a boundary. */
566 static void
567 lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
568 lto_symtab_encoder_t encoder)
570 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
571 struct bitpack_d bp;
572 int ref;
573 bool alias_p;
574 const char *comdat;
575 const char *section;
576 tree group;
578 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
579 LTO_symtab_variable);
580 streamer_write_hwi_stream (ob->main_stream, node->order);
581 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
582 bp = bitpack_create (ob->main_stream);
583 bp_pack_value (&bp, node->externally_visible, 1);
584 bp_pack_value (&bp, node->force_output, 1);
585 bp_pack_value (&bp, node->forced_by_abi, 1);
586 bp_pack_value (&bp, node->unique_name, 1);
587 bp_pack_value (&bp, node->body_removed, 1);
588 bp_pack_value (&bp, node->implicit_section, 1);
589 bp_pack_value (&bp, node->writeonly, 1);
590 bp_pack_value (&bp, node->definition, 1);
591 alias_p = node->alias && (!boundary_p || node->weakref);
592 bp_pack_value (&bp, alias_p, 1);
593 bp_pack_value (&bp, node->weakref, 1);
594 bp_pack_value (&bp, node->analyzed && !boundary_p, 1);
595 gcc_assert (node->definition || !node->analyzed);
596 /* Constant pool initializers can be de-unified into individual ltrans units.
597 FIXME: Alternatively at -Os we may want to avoid generating for them the local
598 labels and share them across LTRANS partitions. */
599 if (node->get_partitioning_class () != SYMBOL_PARTITION)
601 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
602 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
604 else
606 bp_pack_value (&bp, node->definition
607 && referenced_from_other_partition_p (node, encoder), 1);
608 bp_pack_value (&bp, node->analyzed
609 && boundary_p && !DECL_EXTERNAL (node->decl), 1);
610 /* in_other_partition. */
612 bp_pack_value (&bp, node->tls_model, 3);
613 bp_pack_value (&bp, node->used_by_single_function, 1);
614 streamer_write_bitpack (&bp);
616 group = node->get_comdat_group ();
617 if (group)
618 comdat = IDENTIFIER_POINTER (group);
619 else
620 comdat = "";
621 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
623 if (group)
625 if (node->same_comdat_group && !boundary_p)
627 ref = lto_symtab_encoder_lookup (encoder,
628 node->same_comdat_group);
629 gcc_assert (ref != LCC_NOT_FOUND);
631 else
632 ref = LCC_NOT_FOUND;
633 streamer_write_hwi_stream (ob->main_stream, ref);
636 section = node->get_section ();
637 if (!section)
638 section = "";
639 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
641 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
642 LDPR_NUM_KNOWN, node->resolution);
645 /* Output the varpool NODE to OB.
646 If NODE is not in SET, then NODE is a boundary. */
648 static void
649 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
650 lto_symtab_encoder_t encoder)
652 struct bitpack_d bp;
653 int nref;
654 int uid = ref->lto_stmt_uid;
655 struct cgraph_node *node;
657 bp = bitpack_create (ob->main_stream);
658 bp_pack_value (&bp, ref->use, 2);
659 bp_pack_value (&bp, ref->speculative, 1);
660 streamer_write_bitpack (&bp);
661 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
662 gcc_assert (nref != LCC_NOT_FOUND);
663 streamer_write_hwi_stream (ob->main_stream, nref);
665 node = dyn_cast <cgraph_node *> (ref->referring);
666 if (node)
668 if (ref->stmt)
669 uid = gimple_uid (ref->stmt) + 1;
670 streamer_write_hwi_stream (ob->main_stream, uid);
674 /* Stream out profile_summary to OB. */
676 static void
677 output_profile_summary (struct lto_simple_output_block *ob)
679 unsigned h_ix;
680 struct bitpack_d bp;
682 if (profile_info)
684 /* We do not output num and run_max, they are not used by
685 GCC profile feedback and they are difficult to merge from multiple
686 units. */
687 gcc_assert (profile_info->runs);
688 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
689 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_max);
691 /* sum_all is needed for computing the working set with the
692 histogram. */
693 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_all);
695 /* Create and output a bitpack of non-zero histogram entries indices. */
696 bp = bitpack_create (ob->main_stream);
697 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
698 bp_pack_value (&bp, profile_info->histogram[h_ix].num_counters > 0, 1);
699 streamer_write_bitpack (&bp);
700 /* Now stream out only those non-zero entries. */
701 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
703 if (!profile_info->histogram[h_ix].num_counters)
704 continue;
705 streamer_write_gcov_count_stream (ob->main_stream,
706 profile_info->histogram[h_ix].num_counters);
707 streamer_write_gcov_count_stream (ob->main_stream,
708 profile_info->histogram[h_ix].min_value);
709 streamer_write_gcov_count_stream (ob->main_stream,
710 profile_info->histogram[h_ix].cum_value);
712 /* IPA-profile computes hot bb threshold based on cumulated
713 whole program profile. We need to stream it down to ltrans. */
714 if (flag_wpa)
715 streamer_write_gcov_count_stream (ob->main_stream,
716 get_hot_bb_threshold ());
718 else
719 streamer_write_uhwi_stream (ob->main_stream, 0);
722 /* Output all callees or indirect outgoing edges. EDGE must be the first such
723 edge. */
725 static void
726 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
727 struct lto_simple_output_block *ob,
728 lto_symtab_encoder_t encoder)
730 if (!edge)
731 return;
733 /* Output edges in backward direction, so the reconstructed callgraph match
734 and it is easy to associate call sites in the IPA pass summaries. */
735 while (edge->next_callee)
736 edge = edge->next_callee;
737 for (; edge; edge = edge->prev_callee)
738 lto_output_edge (ob, edge, encoder);
741 /* Output the part of the cgraph in SET. */
743 static void
744 output_refs (lto_symtab_encoder_t encoder)
746 lto_symtab_encoder_iterator lsei;
747 struct lto_simple_output_block *ob;
748 int count;
749 struct ipa_ref *ref;
750 int i;
752 ob = lto_create_simple_output_block (LTO_section_refs);
754 for (lsei = lsei_start_in_partition (encoder); !lsei_end_p (lsei);
755 lsei_next_in_partition (&lsei))
757 symtab_node *node = lsei_node (lsei);
759 count = node->ref_list.nreferences ();
760 if (count)
762 streamer_write_gcov_count_stream (ob->main_stream, count);
763 streamer_write_uhwi_stream (ob->main_stream,
764 lto_symtab_encoder_lookup (encoder, node));
765 for (i = 0; node->iterate_reference (i, ref); i++)
766 lto_output_ref (ob, ref, encoder);
770 streamer_write_uhwi_stream (ob->main_stream, 0);
772 lto_destroy_simple_output_block (ob);
775 /* Add NODE into encoder as well as nodes it is cloned from.
776 Do it in a way so clones appear first. */
778 static void
779 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
780 bool include_body)
782 if (node->clone_of)
783 add_node_to (encoder, node->clone_of, include_body);
784 else if (include_body)
785 lto_set_symtab_encoder_encode_body (encoder, node);
786 lto_symtab_encoder_encode (encoder, node);
789 /* Add all references in NODE to encoders. */
791 static void
792 add_references (lto_symtab_encoder_t encoder, symtab_node *node)
794 int i;
795 struct ipa_ref *ref = NULL;
796 for (i = 0; node->iterate_reference (i, ref); i++)
797 if (is_a <cgraph_node *> (ref->referred))
798 add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
799 else
800 lto_symtab_encoder_encode (encoder, ref->referred);
803 /* Find all symbols we want to stream into given partition and insert them
804 to encoders.
806 The function actually replaces IN_ENCODER by new one. The reason is that
807 streaming code needs clone's origin to be streamed before clone. This
808 means that we need to insert the nodes in specific order. This order is
809 ignored by the partitioning logic earlier. */
811 lto_symtab_encoder_t
812 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
814 struct cgraph_edge *edge;
815 int i;
816 lto_symtab_encoder_t encoder;
817 lto_symtab_encoder_iterator lsei;
818 hash_set<void *> reachable_call_targets;
820 encoder = lto_symtab_encoder_new (false);
822 /* Go over all entries in the IN_ENCODER and duplicate them to
823 ENCODER. At the same time insert masters of clones so
824 every master appears before clone. */
825 for (lsei = lsei_start_function_in_partition (in_encoder);
826 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
828 struct cgraph_node *node = lsei_cgraph_node (lsei);
829 add_node_to (encoder, node, true);
830 lto_set_symtab_encoder_in_partition (encoder, node);
831 add_references (encoder, node);
832 /* For proper debug info, we need to ship the origins, too. */
833 if (DECL_ABSTRACT_ORIGIN (node->decl))
835 struct cgraph_node *origin_node
836 = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
837 add_node_to (encoder, origin_node, true);
840 for (lsei = lsei_start_variable_in_partition (in_encoder);
841 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
843 varpool_node *vnode = lsei_varpool_node (lsei);
845 lto_set_symtab_encoder_in_partition (encoder, vnode);
846 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
847 add_references (encoder, vnode);
848 /* For proper debug info, we need to ship the origins, too. */
849 if (DECL_ABSTRACT_ORIGIN (vnode->decl))
851 varpool_node *origin_node
852 = varpool_node::get (DECL_ABSTRACT_ORIGIN (vnode->decl));
853 lto_set_symtab_encoder_in_partition (encoder, origin_node);
856 /* Pickle in also the initializer of all referenced readonly variables
857 to help folding. Constant pool variables are not shared, so we must
858 pickle those too. */
859 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
861 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
862 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
864 if (!lto_symtab_encoder_encode_initializer_p (encoder,
865 vnode)
866 && vnode->ctor_useable_for_folding_p ())
868 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
869 add_references (encoder, vnode);
874 /* Go over all the nodes again to include callees that are not in
875 SET. */
876 for (lsei = lsei_start_function_in_partition (encoder);
877 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
879 struct cgraph_node *node = lsei_cgraph_node (lsei);
880 for (edge = node->callees; edge; edge = edge->next_callee)
882 struct cgraph_node *callee = edge->callee;
883 if (!lto_symtab_encoder_in_partition_p (encoder, callee))
885 /* We should have moved all the inlines. */
886 gcc_assert (!callee->global.inlined_to);
887 add_node_to (encoder, callee, false);
890 /* Add all possible targets for late devirtualization. */
891 if (flag_devirtualize)
892 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
893 if (edge->indirect_info->polymorphic)
895 unsigned int i;
896 void *cache_token;
897 bool final;
898 vec <cgraph_node *>targets
899 = possible_polymorphic_call_targets
900 (edge, &final, &cache_token);
901 if (!reachable_call_targets.add (cache_token))
903 for (i = 0; i < targets.length (); i++)
905 struct cgraph_node *callee = targets[i];
907 /* Adding an external declarations into the unit serves
908 no purpose and just increases its boundary. */
909 if (callee->definition
910 && !lto_symtab_encoder_in_partition_p
911 (encoder, callee))
913 gcc_assert (!callee->global.inlined_to);
914 add_node_to (encoder, callee, false);
920 lto_symtab_encoder_delete (in_encoder);
921 return encoder;
924 /* Output the part of the symtab in SET and VSET. */
926 void
927 output_symtab (void)
929 struct cgraph_node *node;
930 struct lto_simple_output_block *ob;
931 lto_symtab_encoder_iterator lsei;
932 int i, n_nodes;
933 lto_symtab_encoder_t encoder;
935 if (flag_wpa)
936 output_cgraph_opt_summary ();
938 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
940 output_profile_summary (ob);
942 /* An encoder for cgraph nodes should have been created by
943 ipa_write_summaries_1. */
944 gcc_assert (ob->decl_state->symtab_node_encoder);
945 encoder = ob->decl_state->symtab_node_encoder;
947 /* Write out the nodes. We must first output a node and then its clones,
948 otherwise at a time reading back the node there would be nothing to clone
949 from. */
950 n_nodes = lto_symtab_encoder_size (encoder);
951 for (i = 0; i < n_nodes; i++)
953 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
954 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
955 lto_output_node (ob, cnode, encoder);
956 else
957 lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
960 /* Go over the nodes in SET again to write edges. */
961 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
962 lsei_next_function_in_partition (&lsei))
964 node = lsei_cgraph_node (lsei);
965 output_outgoing_cgraph_edges (node->callees, ob, encoder);
966 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
969 streamer_write_uhwi_stream (ob->main_stream, 0);
971 lto_destroy_simple_output_block (ob);
973 /* Emit toplevel asms.
974 When doing WPA we must output every asm just once. Since we do not partition asm
975 nodes at all, output them to first output. This is kind of hack, but should work
976 well. */
977 if (!asm_nodes_output)
979 asm_nodes_output = true;
980 lto_output_toplevel_asms ();
983 output_refs (encoder);
986 /* Return identifier encoded in IB as a plain string. */
988 static tree
989 read_identifier (struct lto_input_block *ib)
991 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
992 tree id;
994 if (ib->data[ib->p + len])
995 lto_section_overrun (ib);
996 if (!len)
998 ib->p++;
999 return NULL;
1001 id = get_identifier (ib->data + ib->p);
1002 ib->p += len + 1;
1003 return id;
1006 /* Return string encoded in IB, NULL if string is empty. */
1008 static const char *
1009 read_string (struct lto_input_block *ib)
1011 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1012 const char *str;
1014 if (ib->data[ib->p + len])
1015 lto_section_overrun (ib);
1016 if (!len)
1018 ib->p++;
1019 return NULL;
1021 str = ib->data + ib->p;
1022 ib->p += len + 1;
1023 return str;
1026 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1027 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
1028 NODE or to replace the values in it, for instance because the first
1029 time we saw it, the function body was not available but now it
1030 is. BP is a bitpack with all the bitflags for NODE read from the
1031 stream. */
1033 static void
1034 input_overwrite_node (struct lto_file_decl_data *file_data,
1035 struct cgraph_node *node,
1036 enum LTO_symtab_tags tag,
1037 struct bitpack_d *bp)
1039 node->aux = (void *) tag;
1040 node->lto_file_data = file_data;
1042 node->local.local = bp_unpack_value (bp, 1);
1043 node->externally_visible = bp_unpack_value (bp, 1);
1044 node->definition = bp_unpack_value (bp, 1);
1045 node->local.versionable = bp_unpack_value (bp, 1);
1046 node->local.can_change_signature = bp_unpack_value (bp, 1);
1047 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
1048 node->force_output = bp_unpack_value (bp, 1);
1049 node->forced_by_abi = bp_unpack_value (bp, 1);
1050 node->unique_name = bp_unpack_value (bp, 1);
1051 node->body_removed = bp_unpack_value (bp, 1);
1052 node->implicit_section = bp_unpack_value (bp, 1);
1053 node->address_taken = bp_unpack_value (bp, 1);
1054 node->used_from_other_partition = bp_unpack_value (bp, 1);
1055 node->lowered = bp_unpack_value (bp, 1);
1056 node->analyzed = tag == LTO_symtab_analyzed_node;
1057 node->in_other_partition = bp_unpack_value (bp, 1);
1058 if (node->in_other_partition
1059 /* Avoid updating decl when we are seeing just inline clone.
1060 When inlining function that has functions already inlined into it,
1061 we produce clones of inline clones.
1063 WPA partitioning might put each clone into different unit and
1064 we might end up streaming inline clone from other partition
1065 to support clone we are interested in. */
1066 && (!node->clone_of
1067 || node->clone_of->decl != node->decl))
1069 DECL_EXTERNAL (node->decl) = 1;
1070 TREE_STATIC (node->decl) = 0;
1072 node->alias = bp_unpack_value (bp, 1);
1073 node->weakref = bp_unpack_value (bp, 1);
1074 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1075 node->only_called_at_startup = bp_unpack_value (bp, 1);
1076 node->only_called_at_exit = bp_unpack_value (bp, 1);
1077 node->tm_clone = bp_unpack_value (bp, 1);
1078 node->calls_comdat_local = bp_unpack_value (bp, 1);
1079 node->thunk.thunk_p = bp_unpack_value (bp, 1);
1080 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1081 LDPR_NUM_KNOWN);
1082 gcc_assert (flag_ltrans
1083 || (!node->in_other_partition
1084 && !node->used_from_other_partition));
1087 /* Return string alias is alias of. */
1089 static tree
1090 get_alias_symbol (tree decl)
1092 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1093 return get_identifier (TREE_STRING_POINTER
1094 (TREE_VALUE (TREE_VALUE (alias))));
1097 /* Read a node from input_block IB. TAG is the node's tag just read.
1098 Return the node read or overwriten. */
1100 static struct cgraph_node *
1101 input_node (struct lto_file_decl_data *file_data,
1102 struct lto_input_block *ib,
1103 enum LTO_symtab_tags tag,
1104 vec<symtab_node *> nodes)
1106 gcc::pass_manager *passes = g->get_passes ();
1107 tree fn_decl;
1108 struct cgraph_node *node;
1109 struct bitpack_d bp;
1110 unsigned decl_index;
1111 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1112 int clone_ref;
1113 int order;
1114 int i, count;
1115 tree group;
1116 const char *section;
1118 order = streamer_read_hwi (ib) + order_base;
1119 clone_ref = streamer_read_hwi (ib);
1121 decl_index = streamer_read_uhwi (ib);
1122 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1124 if (clone_ref != LCC_NOT_FOUND)
1126 node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1127 0, CGRAPH_FREQ_BASE, false,
1128 vNULL, false, NULL, NULL);
1130 else
1132 /* Declaration of functions can be already merged with a declaration
1133 from other input file. We keep cgraph unmerged until after streaming
1134 of ipa passes is done. Alays forcingly create a fresh node. */
1135 node = cgraph_node::create_empty ();
1136 node->decl = fn_decl;
1137 node->register_symbol ();
1140 node->order = order;
1141 if (order >= symtab_order)
1142 symtab_order = order + 1;
1144 node->count = streamer_read_gcov_count (ib);
1145 node->count_materialization_scale = streamer_read_hwi (ib);
1147 count = streamer_read_hwi (ib);
1148 node->ipa_transforms_to_apply = vNULL;
1149 for (i = 0; i < count; i++)
1151 opt_pass *pass;
1152 int pid = streamer_read_hwi (ib);
1154 gcc_assert (pid < passes->passes_by_id_size);
1155 pass = passes->passes_by_id[pid];
1156 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1159 if (tag == LTO_symtab_analyzed_node)
1160 ref = streamer_read_hwi (ib);
1162 group = read_identifier (ib);
1163 if (group)
1164 ref2 = streamer_read_hwi (ib);
1166 /* Make sure that we have not read this node before. Nodes that
1167 have already been read will have their tag stored in the 'aux'
1168 field. Since built-in functions can be referenced in multiple
1169 functions, they are expected to be read more than once. */
1170 if (node->aux && !DECL_BUILT_IN (node->decl))
1171 internal_error ("bytecode stream: found multiple instances of cgraph "
1172 "node with uid %d", node->uid);
1174 node->tp_first_run = streamer_read_uhwi (ib);
1176 bp = streamer_read_bitpack (ib);
1178 input_overwrite_node (file_data, node, tag, &bp);
1180 /* Store a reference for now, and fix up later to be a pointer. */
1181 node->global.inlined_to = (cgraph_node *) (intptr_t) ref;
1183 if (group)
1185 node->set_comdat_group (group);
1186 /* Store a reference for now, and fix up later to be a pointer. */
1187 node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1189 else
1190 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1191 section = read_string (ib);
1192 if (section)
1193 node->set_section_for_node (section);
1195 if (node->thunk.thunk_p)
1197 int type = streamer_read_uhwi (ib);
1198 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1199 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1201 node->thunk.fixed_offset = fixed_offset;
1202 node->thunk.this_adjusting = (type & 2);
1203 node->thunk.virtual_value = virtual_value;
1204 node->thunk.virtual_offset_p = (type & 4);
1206 if (node->alias && !node->analyzed && node->weakref)
1207 node->alias_target = get_alias_symbol (node->decl);
1208 node->profile_id = streamer_read_hwi (ib);
1209 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1210 node->set_init_priority (streamer_read_hwi (ib));
1211 if (DECL_STATIC_DESTRUCTOR (node->decl))
1212 node->set_fini_priority (streamer_read_hwi (ib));
1213 return node;
1216 /* Read a node from input_block IB. TAG is the node's tag just read.
1217 Return the node read or overwriten. */
1219 static varpool_node *
1220 input_varpool_node (struct lto_file_decl_data *file_data,
1221 struct lto_input_block *ib)
1223 int decl_index;
1224 tree var_decl;
1225 varpool_node *node;
1226 struct bitpack_d bp;
1227 int ref = LCC_NOT_FOUND;
1228 int order;
1229 tree group;
1230 const char *section;
1232 order = streamer_read_hwi (ib) + order_base;
1233 decl_index = streamer_read_uhwi (ib);
1234 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1236 /* Declaration of functions can be already merged with a declaration
1237 from other input file. We keep cgraph unmerged until after streaming
1238 of ipa passes is done. Alays forcingly create a fresh node. */
1239 node = varpool_node::create_empty ();
1240 node->decl = var_decl;
1241 node->register_symbol ();
1243 node->order = order;
1244 if (order >= symtab_order)
1245 symtab_order = order + 1;
1246 node->lto_file_data = file_data;
1248 bp = streamer_read_bitpack (ib);
1249 node->externally_visible = bp_unpack_value (&bp, 1);
1250 node->force_output = bp_unpack_value (&bp, 1);
1251 node->forced_by_abi = bp_unpack_value (&bp, 1);
1252 node->unique_name = bp_unpack_value (&bp, 1);
1253 node->body_removed = bp_unpack_value (&bp, 1);
1254 node->implicit_section = bp_unpack_value (&bp, 1);
1255 node->writeonly = bp_unpack_value (&bp, 1);
1256 node->definition = bp_unpack_value (&bp, 1);
1257 node->alias = bp_unpack_value (&bp, 1);
1258 node->weakref = bp_unpack_value (&bp, 1);
1259 node->analyzed = bp_unpack_value (&bp, 1);
1260 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1261 node->in_other_partition = bp_unpack_value (&bp, 1);
1262 if (node->in_other_partition)
1264 DECL_EXTERNAL (node->decl) = 1;
1265 TREE_STATIC (node->decl) = 0;
1267 if (node->alias && !node->analyzed && node->weakref)
1268 node->alias_target = get_alias_symbol (node->decl);
1269 node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1270 node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1271 group = read_identifier (ib);
1272 if (group)
1274 node->set_comdat_group (group);
1275 ref = streamer_read_hwi (ib);
1276 /* Store a reference for now, and fix up later to be a pointer. */
1277 node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1279 else
1280 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1281 section = read_string (ib);
1282 if (section)
1283 node->set_section_for_node (section);
1284 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1285 LDPR_NUM_KNOWN);
1286 gcc_assert (flag_ltrans
1287 || (!node->in_other_partition
1288 && !node->used_from_other_partition));
1290 return node;
1293 /* Read a node from input_block IB. TAG is the node's tag just read.
1294 Return the node read or overwriten. */
1296 static void
1297 input_ref (struct lto_input_block *ib,
1298 symtab_node *referring_node,
1299 vec<symtab_node *> nodes)
1301 symtab_node *node = NULL;
1302 struct bitpack_d bp;
1303 enum ipa_ref_use use;
1304 bool speculative;
1305 struct ipa_ref *ref;
1307 bp = streamer_read_bitpack (ib);
1308 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1309 speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1310 node = nodes[streamer_read_hwi (ib)];
1311 ref = referring_node->add_reference (node, use);
1312 ref->speculative = speculative;
1313 if (is_a <cgraph_node *> (referring_node))
1314 ref->lto_stmt_uid = streamer_read_hwi (ib);
1317 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1318 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1319 edge being read is indirect (in the sense that it has
1320 indirect_unknown_callee set). */
1322 static void
1323 input_edge (struct lto_input_block *ib, vec<symtab_node *> nodes,
1324 bool indirect)
1326 struct cgraph_node *caller, *callee;
1327 struct cgraph_edge *edge;
1328 unsigned int stmt_id;
1329 gcov_type count;
1330 int freq;
1331 cgraph_inline_failed_t inline_failed;
1332 struct bitpack_d bp;
1333 int ecf_flags = 0;
1335 caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1336 if (caller == NULL || caller->decl == NULL_TREE)
1337 internal_error ("bytecode stream: no caller found while reading edge");
1339 if (!indirect)
1341 callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1342 if (callee == NULL || callee->decl == NULL_TREE)
1343 internal_error ("bytecode stream: no callee found while reading edge");
1345 else
1346 callee = NULL;
1348 count = streamer_read_gcov_count (ib);
1350 bp = streamer_read_bitpack (ib);
1351 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1352 stmt_id = bp_unpack_var_len_unsigned (&bp);
1353 freq = (int) bp_unpack_var_len_unsigned (&bp);
1355 if (indirect)
1356 edge = caller->create_indirect_edge (NULL, 0, count, freq);
1357 else
1358 edge = caller->create_edge (callee, NULL, count, freq);
1360 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1361 edge->speculative = bp_unpack_value (&bp, 1);
1362 edge->lto_stmt_uid = stmt_id;
1363 edge->inline_failed = inline_failed;
1364 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1365 edge->can_throw_external = bp_unpack_value (&bp, 1);
1366 if (indirect)
1368 if (bp_unpack_value (&bp, 1))
1369 ecf_flags |= ECF_CONST;
1370 if (bp_unpack_value (&bp, 1))
1371 ecf_flags |= ECF_PURE;
1372 if (bp_unpack_value (&bp, 1))
1373 ecf_flags |= ECF_NORETURN;
1374 if (bp_unpack_value (&bp, 1))
1375 ecf_flags |= ECF_MALLOC;
1376 if (bp_unpack_value (&bp, 1))
1377 ecf_flags |= ECF_NOTHROW;
1378 if (bp_unpack_value (&bp, 1))
1379 ecf_flags |= ECF_RETURNS_TWICE;
1380 edge->indirect_info->ecf_flags = ecf_flags;
1381 edge->indirect_info->common_target_id = streamer_read_hwi (ib);
1382 if (edge->indirect_info->common_target_id)
1383 edge->indirect_info->common_target_probability = streamer_read_hwi (ib);
1388 /* Read a cgraph from IB using the info in FILE_DATA. */
1390 static vec<symtab_node *>
1391 input_cgraph_1 (struct lto_file_decl_data *file_data,
1392 struct lto_input_block *ib)
1394 enum LTO_symtab_tags tag;
1395 vec<symtab_node *> nodes = vNULL;
1396 symtab_node *node;
1397 unsigned i;
1399 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1400 order_base = symtab_order;
1401 while (tag)
1403 if (tag == LTO_symtab_edge)
1404 input_edge (ib, nodes, false);
1405 else if (tag == LTO_symtab_indirect_edge)
1406 input_edge (ib, nodes, true);
1407 else if (tag == LTO_symtab_variable)
1409 node = input_varpool_node (file_data, ib);
1410 nodes.safe_push (node);
1411 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1413 else
1415 node = input_node (file_data, ib, tag, nodes);
1416 if (node == NULL || node->decl == NULL_TREE)
1417 internal_error ("bytecode stream: found empty cgraph node");
1418 nodes.safe_push (node);
1419 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1422 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1425 lto_input_toplevel_asms (file_data, order_base);
1427 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1428 #ifdef ENABLE_CHECKING
1429 FOR_EACH_VEC_ELT (nodes, i, node)
1430 gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1431 #endif
1432 FOR_EACH_VEC_ELT (nodes, i, node)
1434 int ref;
1435 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1437 ref = (int) (intptr_t) cnode->global.inlined_to;
1439 /* We share declaration of builtins, so we may read same node twice. */
1440 if (!node->aux)
1441 continue;
1442 node->aux = NULL;
1444 /* Fixup inlined_to from reference to pointer. */
1445 if (ref != LCC_NOT_FOUND)
1446 dyn_cast<cgraph_node *> (node)->global.inlined_to
1447 = dyn_cast<cgraph_node *> (nodes[ref]);
1448 else
1449 cnode->global.inlined_to = NULL;
1452 ref = (int) (intptr_t) node->same_comdat_group;
1454 /* Fixup same_comdat_group from reference to pointer. */
1455 if (ref != LCC_NOT_FOUND)
1456 node->same_comdat_group = nodes[ref];
1457 else
1458 node->same_comdat_group = NULL;
1460 FOR_EACH_VEC_ELT (nodes, i, node)
1461 node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1462 return nodes;
1465 /* Input ipa_refs. */
1467 static void
1468 input_refs (struct lto_input_block *ib,
1469 vec<symtab_node *> nodes)
1471 int count;
1472 int idx;
1473 while (true)
1475 symtab_node *node;
1476 count = streamer_read_uhwi (ib);
1477 if (!count)
1478 break;
1479 idx = streamer_read_uhwi (ib);
1480 node = nodes[idx];
1481 while (count)
1483 input_ref (ib, node, nodes);
1484 count--;
1490 static struct gcov_ctr_summary lto_gcov_summary;
1492 /* Input profile_info from IB. */
1493 static void
1494 input_profile_summary (struct lto_input_block *ib,
1495 struct lto_file_decl_data *file_data)
1497 unsigned h_ix;
1498 struct bitpack_d bp;
1499 unsigned int runs = streamer_read_uhwi (ib);
1500 if (runs)
1502 file_data->profile_info.runs = runs;
1503 file_data->profile_info.sum_max = streamer_read_gcov_count (ib);
1504 file_data->profile_info.sum_all = streamer_read_gcov_count (ib);
1506 memset (file_data->profile_info.histogram, 0,
1507 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1508 /* Input the bitpack of non-zero histogram indices. */
1509 bp = streamer_read_bitpack (ib);
1510 /* Read in and unpack the full bitpack, flagging non-zero
1511 histogram entries by setting the num_counters non-zero. */
1512 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1514 file_data->profile_info.histogram[h_ix].num_counters
1515 = bp_unpack_value (&bp, 1);
1517 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1519 if (!file_data->profile_info.histogram[h_ix].num_counters)
1520 continue;
1522 file_data->profile_info.histogram[h_ix].num_counters
1523 = streamer_read_gcov_count (ib);
1524 file_data->profile_info.histogram[h_ix].min_value
1525 = streamer_read_gcov_count (ib);
1526 file_data->profile_info.histogram[h_ix].cum_value
1527 = streamer_read_gcov_count (ib);
1529 /* IPA-profile computes hot bb threshold based on cumulated
1530 whole program profile. We need to stream it down to ltrans. */
1531 if (flag_ltrans)
1532 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1537 /* Rescale profile summaries to the same number of runs in the whole unit. */
1539 static void
1540 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1542 struct lto_file_decl_data *file_data;
1543 unsigned int j, h_ix;
1544 gcov_unsigned_t max_runs = 0;
1545 struct cgraph_node *node;
1546 struct cgraph_edge *edge;
1547 gcov_type saved_sum_all = 0;
1548 gcov_ctr_summary *saved_profile_info = 0;
1549 int saved_scale = 0;
1551 /* Find unit with maximal number of runs. If we ever get serious about
1552 roundoff errors, we might also consider computing smallest common
1553 multiply. */
1554 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1555 if (max_runs < file_data->profile_info.runs)
1556 max_runs = file_data->profile_info.runs;
1558 if (!max_runs)
1559 return;
1561 /* Simple overflow check. We probably don't need to support that many train
1562 runs. Such a large value probably imply data corruption anyway. */
1563 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1565 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1566 INT_MAX / REG_BR_PROB_BASE);
1567 return;
1570 profile_info = &lto_gcov_summary;
1571 lto_gcov_summary.runs = max_runs;
1572 lto_gcov_summary.sum_max = 0;
1573 memset (lto_gcov_summary.histogram, 0,
1574 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1576 /* Rescale all units to the maximal number of runs.
1577 sum_max can not be easily merged, as we have no idea what files come from
1578 the same run. We do not use the info anyway, so leave it 0. */
1579 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1580 if (file_data->profile_info.runs)
1582 int scale = GCOV_COMPUTE_SCALE (max_runs,
1583 file_data->profile_info.runs);
1584 lto_gcov_summary.sum_max
1585 = MAX (lto_gcov_summary.sum_max,
1586 apply_scale (file_data->profile_info.sum_max, scale));
1587 lto_gcov_summary.sum_all
1588 = MAX (lto_gcov_summary.sum_all,
1589 apply_scale (file_data->profile_info.sum_all, scale));
1590 /* Save a pointer to the profile_info with the largest
1591 scaled sum_all and the scale for use in merging the
1592 histogram. */
1593 if (!saved_profile_info
1594 || lto_gcov_summary.sum_all > saved_sum_all)
1596 saved_profile_info = &file_data->profile_info;
1597 saved_sum_all = lto_gcov_summary.sum_all;
1598 saved_scale = scale;
1602 gcc_assert (saved_profile_info);
1604 /* Scale up the histogram from the profile that had the largest
1605 scaled sum_all above. */
1606 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1608 /* Scale up the min value as we did the corresponding sum_all
1609 above. Use that to find the new histogram index. */
1610 gcov_type scaled_min
1611 = apply_scale (saved_profile_info->histogram[h_ix].min_value,
1612 saved_scale);
1613 /* The new index may be shared with another scaled histogram entry,
1614 so we need to account for a non-zero histogram entry at new_ix. */
1615 unsigned new_ix = gcov_histo_index (scaled_min);
1616 lto_gcov_summary.histogram[new_ix].min_value
1617 = (lto_gcov_summary.histogram[new_ix].num_counters
1618 ? MIN (lto_gcov_summary.histogram[new_ix].min_value, scaled_min)
1619 : scaled_min);
1620 /* Some of the scaled counter values would ostensibly need to be placed
1621 into different (larger) histogram buckets, but we keep things simple
1622 here and place the scaled cumulative counter value in the bucket
1623 corresponding to the scaled minimum counter value. */
1624 lto_gcov_summary.histogram[new_ix].cum_value
1625 += apply_scale (saved_profile_info->histogram[h_ix].cum_value,
1626 saved_scale);
1627 lto_gcov_summary.histogram[new_ix].num_counters
1628 += saved_profile_info->histogram[h_ix].num_counters;
1631 /* Watch roundoff errors. */
1632 if (lto_gcov_summary.sum_max < max_runs)
1633 lto_gcov_summary.sum_max = max_runs;
1635 /* If merging already happent at WPA time, we are done. */
1636 if (flag_ltrans)
1637 return;
1639 /* Now compute count_materialization_scale of each node.
1640 During LTRANS we already have values of count_materialization_scale
1641 computed, so just update them. */
1642 FOR_EACH_FUNCTION (node)
1643 if (node->lto_file_data
1644 && node->lto_file_data->profile_info.runs)
1646 int scale;
1648 scale = RDIV (node->count_materialization_scale * max_runs,
1649 node->lto_file_data->profile_info.runs);
1650 node->count_materialization_scale = scale;
1651 if (scale < 0)
1652 fatal_error ("Profile information in %s corrupted",
1653 file_data->file_name);
1655 if (scale == REG_BR_PROB_BASE)
1656 continue;
1657 for (edge = node->callees; edge; edge = edge->next_callee)
1658 edge->count = apply_scale (edge->count, scale);
1659 node->count = apply_scale (node->count, scale);
1663 /* Input and merge the symtab from each of the .o files passed to
1664 lto1. */
1666 void
1667 input_symtab (void)
1669 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1670 struct lto_file_decl_data *file_data;
1671 unsigned int j = 0;
1672 struct cgraph_node *node;
1674 while ((file_data = file_data_vec[j++]))
1676 const char *data;
1677 size_t len;
1678 struct lto_input_block *ib;
1679 vec<symtab_node *> nodes;
1681 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1682 &data, &len);
1683 if (!ib)
1684 fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1685 input_profile_summary (ib, file_data);
1686 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1687 nodes = input_cgraph_1 (file_data, ib);
1688 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1689 ib, data, len);
1691 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1692 &data, &len);
1693 if (!ib)
1694 fatal_error ("cannot find LTO section refs in %s",
1695 file_data->file_name);
1696 input_refs (ib, nodes);
1697 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1698 ib, data, len);
1699 if (flag_ltrans)
1700 input_cgraph_opt_summary (nodes);
1701 nodes.release ();
1704 merge_profile_summaries (file_data_vec);
1705 get_working_sets ();
1708 /* Clear out the aux field that was used to store enough state to
1709 tell which nodes should be overwritten. */
1710 FOR_EACH_FUNCTION (node)
1712 /* Some nodes may have been created by cgraph_node. This
1713 happens when the callgraph contains nested functions. If the
1714 node for the parent function was never emitted to the gimple
1715 file, cgraph_node will create a node for it when setting the
1716 context of the nested function. */
1717 if (node->lto_file_data)
1718 node->aux = NULL;
1722 /* True when we need optimization summary for NODE. */
1724 static int
1725 output_cgraph_opt_summary_p (struct cgraph_node *node)
1727 return (node->clone_of
1728 && (node->clone.tree_map
1729 || node->clone.args_to_skip
1730 || node->clone.combined_args_to_skip));
1733 /* Output optimization summary for EDGE to OB. */
1734 static void
1735 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1736 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1740 /* Output optimization summary for NODE to OB. */
1742 static void
1743 output_node_opt_summary (struct output_block *ob,
1744 struct cgraph_node *node,
1745 lto_symtab_encoder_t encoder)
1747 unsigned int index;
1748 bitmap_iterator bi;
1749 struct ipa_replace_map *map;
1750 struct bitpack_d bp;
1751 int i;
1752 struct cgraph_edge *e;
1754 if (node->clone.args_to_skip)
1756 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1757 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1758 streamer_write_uhwi (ob, index);
1760 else
1761 streamer_write_uhwi (ob, 0);
1762 if (node->clone.combined_args_to_skip)
1764 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1765 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1766 streamer_write_uhwi (ob, index);
1768 else
1769 streamer_write_uhwi (ob, 0);
1770 streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
1771 FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
1773 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1774 mechanism to store function local declarations into summaries. */
1775 gcc_assert (!map->old_tree);
1776 streamer_write_uhwi (ob, map->parm_num);
1777 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
1778 stream_write_tree (ob, map->new_tree, true);
1779 bp = bitpack_create (ob->main_stream);
1780 bp_pack_value (&bp, map->replace_p, 1);
1781 bp_pack_value (&bp, map->ref_p, 1);
1782 streamer_write_bitpack (&bp);
1785 if (lto_symtab_encoder_in_partition_p (encoder, node))
1787 for (e = node->callees; e; e = e->next_callee)
1788 output_edge_opt_summary (ob, e);
1789 for (e = node->indirect_calls; e; e = e->next_callee)
1790 output_edge_opt_summary (ob, e);
1794 /* Output optimization summaries stored in callgraph.
1795 At the moment it is the clone info structure. */
1797 static void
1798 output_cgraph_opt_summary (void)
1800 int i, n_nodes;
1801 lto_symtab_encoder_t encoder;
1802 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1803 unsigned count = 0;
1805 ob->symbol = NULL;
1806 encoder = ob->decl_state->symtab_node_encoder;
1807 n_nodes = lto_symtab_encoder_size (encoder);
1808 for (i = 0; i < n_nodes; i++)
1810 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1811 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1812 if (cnode && output_cgraph_opt_summary_p (cnode))
1813 count++;
1815 streamer_write_uhwi (ob, count);
1816 for (i = 0; i < n_nodes; i++)
1818 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1819 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1820 if (cnode && output_cgraph_opt_summary_p (cnode))
1822 streamer_write_uhwi (ob, i);
1823 output_node_opt_summary (ob, cnode, encoder);
1826 produce_asm (ob, NULL);
1827 destroy_output_block (ob);
1830 /* Input optimisation summary of EDGE. */
1832 static void
1833 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1834 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
1838 /* Input optimisation summary of NODE. */
1840 static void
1841 input_node_opt_summary (struct cgraph_node *node,
1842 struct lto_input_block *ib_main,
1843 struct data_in *data_in)
1845 int i;
1846 int count;
1847 int bit;
1848 struct bitpack_d bp;
1849 struct cgraph_edge *e;
1851 count = streamer_read_uhwi (ib_main);
1852 if (count)
1853 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1854 for (i = 0; i < count; i++)
1856 bit = streamer_read_uhwi (ib_main);
1857 bitmap_set_bit (node->clone.args_to_skip, bit);
1859 count = streamer_read_uhwi (ib_main);
1860 if (count)
1861 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1862 for (i = 0; i < count; i++)
1864 bit = streamer_read_uhwi (ib_main);
1865 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1867 count = streamer_read_uhwi (ib_main);
1868 for (i = 0; i < count; i++)
1870 struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
1872 vec_safe_push (node->clone.tree_map, map);
1873 map->parm_num = streamer_read_uhwi (ib_main);
1874 map->old_tree = NULL;
1875 map->new_tree = stream_read_tree (ib_main, data_in);
1876 bp = streamer_read_bitpack (ib_main);
1877 map->replace_p = bp_unpack_value (&bp, 1);
1878 map->ref_p = bp_unpack_value (&bp, 1);
1880 for (e = node->callees; e; e = e->next_callee)
1881 input_edge_opt_summary (e, ib_main);
1882 for (e = node->indirect_calls; e; e = e->next_callee)
1883 input_edge_opt_summary (e, ib_main);
1886 /* Read section in file FILE_DATA of length LEN with data DATA. */
1888 static void
1889 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1890 const char *data, size_t len,
1891 vec<symtab_node *> nodes)
1893 const struct lto_function_header *header =
1894 (const struct lto_function_header *) data;
1895 const int cfg_offset = sizeof (struct lto_function_header);
1896 const int main_offset = cfg_offset + header->cfg_size;
1897 const int string_offset = main_offset + header->main_size;
1898 struct data_in *data_in;
1899 unsigned int i;
1900 unsigned int count;
1902 lto_input_block ib_main ((const char *) data + main_offset,
1903 header->main_size);
1905 data_in =
1906 lto_data_in_create (file_data, (const char *) data + string_offset,
1907 header->string_size, vNULL);
1908 count = streamer_read_uhwi (&ib_main);
1910 for (i = 0; i < count; i++)
1912 int ref = streamer_read_uhwi (&ib_main);
1913 input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
1914 &ib_main, data_in);
1916 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
1917 len);
1918 lto_data_in_delete (data_in);
1921 /* Input optimization summary of cgraph. */
1923 static void
1924 input_cgraph_opt_summary (vec<symtab_node *> nodes)
1926 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1927 struct lto_file_decl_data *file_data;
1928 unsigned int j = 0;
1930 while ((file_data = file_data_vec[j++]))
1932 size_t len;
1933 const char *data =
1934 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1935 &len);
1937 if (data)
1938 input_cgraph_opt_section (file_data, data, len, nodes);