final.c: Use rtx_sequence
[official-gcc.git] / gcc / lto-cgraph.c
blob68f6a521682a4ce03e79f45bc241985c503ae2bc
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
4 Copyright (C) 2009-2014 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stringpool.h"
29 #include "basic-block.h"
30 #include "tree-ssa-alias.h"
31 #include "internal-fn.h"
32 #include "gimple-expr.h"
33 #include "is-a.h"
34 #include "gimple.h"
35 #include "expr.h"
36 #include "flags.h"
37 #include "params.h"
38 #include "input.h"
39 #include "hashtab.h"
40 #include "hash-set.h"
41 #include "langhooks.h"
42 #include "bitmap.h"
43 #include "function.h"
44 #include "diagnostic-core.h"
45 #include "except.h"
46 #include "timevar.h"
47 #include "lto-streamer.h"
48 #include "data-streamer.h"
49 #include "tree-streamer.h"
50 #include "gcov-io.h"
51 #include "tree-pass.h"
52 #include "profile.h"
53 #include "context.h"
54 #include "pass_manager.h"
55 #include "ipa-utils.h"
57 /* True when asm nodes has been output. */
58 bool asm_nodes_output = false;
60 static void output_cgraph_opt_summary (void);
61 static void input_cgraph_opt_summary (vec<symtab_node *> nodes);
63 /* Number of LDPR values known to GCC. */
64 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
66 /* All node orders are ofsetted by ORDER_BASE. */
67 static int order_base;
69 /* Cgraph streaming is organized as set of record whose type
70 is indicated by a tag. */
71 enum LTO_symtab_tags
73 /* Must leave 0 for the stopper. */
75 /* Cgraph node without body available. */
76 LTO_symtab_unavail_node = 1,
77 /* Cgraph node with function body. */
78 LTO_symtab_analyzed_node,
79 /* Cgraph edges. */
80 LTO_symtab_edge,
81 LTO_symtab_indirect_edge,
82 LTO_symtab_variable,
83 LTO_symtab_last_tag
86 /* Create a new symtab encoder.
87 if FOR_INPUT, the encoder allocate only datastructures needed
88 to read the symtab. */
90 lto_symtab_encoder_t
91 lto_symtab_encoder_new (bool for_input)
93 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
95 if (!for_input)
96 encoder->map = new hash_map<symtab_node *, size_t>;
97 encoder->nodes.create (0);
98 return encoder;
102 /* Delete ENCODER and its components. */
104 void
105 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
107 encoder->nodes.release ();
108 if (encoder->map)
109 delete encoder->map;
110 free (encoder);
114 /* Return the existing reference number of NODE in the symtab encoder in
115 output block OB. Assign a new reference if this is the first time
116 NODE is encoded. */
119 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
120 symtab_node *node)
122 int ref;
124 if (!encoder->map)
126 lto_encoder_entry entry = {node, false, false, false};
128 ref = encoder->nodes.length ();
129 encoder->nodes.safe_push (entry);
130 return ref;
133 size_t *slot = encoder->map->get (node);
134 if (!slot || !*slot)
136 lto_encoder_entry entry = {node, false, false, false};
137 ref = encoder->nodes.length ();
138 if (!slot)
139 encoder->map->put (node, ref + 1);
140 encoder->nodes.safe_push (entry);
142 else
143 ref = *slot - 1;
145 return ref;
148 /* Remove NODE from encoder. */
150 bool
151 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
152 symtab_node *node)
154 int index;
155 lto_encoder_entry last_node;
157 size_t *slot = encoder->map->get (node);
158 if (slot == NULL || !*slot)
159 return false;
161 index = *slot - 1;
162 gcc_checking_assert (encoder->nodes[index].node == node);
164 /* Remove from vector. We do this by swapping node with the last element
165 of the vector. */
166 last_node = encoder->nodes.pop ();
167 if (last_node.node != node)
169 gcc_assert (encoder->map->put (last_node.node, index + 1));
171 /* Move the last element to the original spot of NODE. */
172 encoder->nodes[index] = last_node;
175 /* Remove element from hash table. */
176 encoder->map->remove (node);
177 return true;
181 /* Return TRUE if we should encode initializer of NODE (if any). */
183 bool
184 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
185 struct cgraph_node *node)
187 int index = lto_symtab_encoder_lookup (encoder, node);
188 return encoder->nodes[index].body;
191 /* Return TRUE if we should encode body of NODE (if any). */
193 static void
194 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
195 struct cgraph_node *node)
197 int index = lto_symtab_encoder_encode (encoder, node);
198 gcc_checking_assert (encoder->nodes[index].node == node);
199 encoder->nodes[index].body = true;
202 /* Return TRUE if we should encode initializer of NODE (if any). */
204 bool
205 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
206 varpool_node *node)
208 int index = lto_symtab_encoder_lookup (encoder, node);
209 if (index == LCC_NOT_FOUND)
210 return false;
211 return encoder->nodes[index].initializer;
214 /* Return TRUE if we should encode initializer of NODE (if any). */
216 static void
217 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
218 varpool_node *node)
220 int index = lto_symtab_encoder_lookup (encoder, node);
221 encoder->nodes[index].initializer = true;
224 /* Return TRUE if we should encode initializer of NODE (if any). */
226 bool
227 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
228 symtab_node *node)
230 int index = lto_symtab_encoder_lookup (encoder, node);
231 if (index == LCC_NOT_FOUND)
232 return false;
233 return encoder->nodes[index].in_partition;
236 /* Return TRUE if we should encode body of NODE (if any). */
238 void
239 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
240 symtab_node *node)
242 int index = lto_symtab_encoder_encode (encoder, node);
243 encoder->nodes[index].in_partition = true;
246 /* Output the cgraph EDGE to OB using ENCODER. */
248 static void
249 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
250 lto_symtab_encoder_t encoder)
252 unsigned int uid;
253 intptr_t ref;
254 struct bitpack_d bp;
256 if (edge->indirect_unknown_callee)
257 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
258 LTO_symtab_indirect_edge);
259 else
260 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
261 LTO_symtab_edge);
263 ref = lto_symtab_encoder_lookup (encoder, edge->caller);
264 gcc_assert (ref != LCC_NOT_FOUND);
265 streamer_write_hwi_stream (ob->main_stream, ref);
267 if (!edge->indirect_unknown_callee)
269 ref = lto_symtab_encoder_lookup (encoder, edge->callee);
270 gcc_assert (ref != LCC_NOT_FOUND);
271 streamer_write_hwi_stream (ob->main_stream, ref);
274 streamer_write_gcov_count_stream (ob->main_stream, edge->count);
276 bp = bitpack_create (ob->main_stream);
277 uid = (!gimple_has_body_p (edge->caller->decl)
278 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt) + 1);
279 bp_pack_enum (&bp, cgraph_inline_failed_t,
280 CIF_N_REASONS, edge->inline_failed);
281 bp_pack_var_len_unsigned (&bp, uid);
282 bp_pack_var_len_unsigned (&bp, edge->frequency);
283 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
284 bp_pack_value (&bp, edge->speculative, 1);
285 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
286 bp_pack_value (&bp, edge->can_throw_external, 1);
287 if (edge->indirect_unknown_callee)
289 int flags = edge->indirect_info->ecf_flags;
290 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
291 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
292 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
293 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
294 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
295 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
296 /* Flags that should not appear on indirect calls. */
297 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
298 | ECF_MAY_BE_ALLOCA
299 | ECF_SIBCALL
300 | ECF_LEAF
301 | ECF_NOVOPS)));
303 streamer_write_bitpack (&bp);
304 if (edge->indirect_unknown_callee)
306 streamer_write_hwi_stream (ob->main_stream,
307 edge->indirect_info->common_target_id);
308 if (edge->indirect_info->common_target_id)
309 streamer_write_hwi_stream
310 (ob->main_stream, edge->indirect_info->common_target_probability);
314 /* Return if NODE contain references from other partitions. */
316 bool
317 referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
319 int i;
320 struct ipa_ref *ref = NULL;
322 for (i = 0; node->iterate_referring (i, ref); i++)
324 if (ref->referring->in_other_partition
325 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
326 return true;
328 return false;
331 /* Return true when node is reachable from other partition. */
333 bool
334 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
336 struct cgraph_edge *e;
337 if (!node->definition)
338 return false;
339 if (node->global.inlined_to)
340 return false;
341 for (e = node->callers; e; e = e->next_caller)
342 if (e->caller->in_other_partition
343 || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
344 return true;
345 return false;
348 /* Return if NODE contain references from other partitions. */
350 bool
351 referenced_from_this_partition_p (symtab_node *node,
352 lto_symtab_encoder_t encoder)
354 int i;
355 struct ipa_ref *ref = NULL;
357 for (i = 0; node->iterate_referring (i, ref); i++)
358 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
359 return true;
360 return false;
363 /* Return true when node is reachable from other partition. */
365 bool
366 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
368 struct cgraph_edge *e;
369 for (e = node->callers; e; e = e->next_caller)
370 if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
371 return true;
372 return false;
375 /* Output the cgraph NODE to OB. ENCODER is used to find the
376 reference number of NODE->inlined_to. SET is the set of nodes we
377 are writing to the current file. If NODE is not in SET, then NODE
378 is a boundary of a cgraph_node_set and we pretend NODE just has a
379 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
380 that have had their callgraph node written so far. This is used to
381 determine if NODE is a clone of a previously written node. */
383 static void
384 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
385 lto_symtab_encoder_t encoder)
387 unsigned int tag;
388 struct bitpack_d bp;
389 bool boundary_p;
390 intptr_t ref;
391 bool in_other_partition = false;
392 struct cgraph_node *clone_of, *ultimate_clone_of;
393 ipa_opt_pass_d *pass;
394 int i;
395 bool alias_p;
396 const char *comdat;
397 const char *section;
398 tree group;
400 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
402 if (node->analyzed && !boundary_p)
403 tag = LTO_symtab_analyzed_node;
404 else
405 tag = LTO_symtab_unavail_node;
407 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
408 tag);
409 streamer_write_hwi_stream (ob->main_stream, node->order);
411 /* In WPA mode, we only output part of the call-graph. Also, we
412 fake cgraph node attributes. There are two cases that we care.
414 Boundary nodes: There are nodes that are not part of SET but are
415 called from within SET. We artificially make them look like
416 externally visible nodes with no function body.
418 Cherry-picked nodes: These are nodes we pulled from other
419 translation units into SET during IPA-inlining. We make them as
420 local static nodes to prevent clashes with other local statics. */
421 if (boundary_p && node->analyzed
422 && node->get_partitioning_class () == SYMBOL_PARTITION)
424 /* Inline clones can not be part of boundary.
425 gcc_assert (!node->global.inlined_to);
427 FIXME: At the moment they can be, when partition contains an inline
428 clone that is clone of inline clone from outside partition. We can
429 reshape the clone tree and make other tree to be the root, but it
430 needs a bit extra work and will be promplty done by cgraph_remove_node
431 after reading back. */
432 in_other_partition = 1;
435 clone_of = node->clone_of;
436 while (clone_of
437 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
438 if (clone_of->prev_sibling_clone)
439 clone_of = clone_of->prev_sibling_clone;
440 else
441 clone_of = clone_of->clone_of;
443 /* See if body of the master function is output. If not, we are seeing only
444 an declaration and we do not need to pass down clone tree. */
445 ultimate_clone_of = clone_of;
446 while (ultimate_clone_of && ultimate_clone_of->clone_of)
447 ultimate_clone_of = ultimate_clone_of->clone_of;
449 if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
450 clone_of = NULL;
452 if (tag == LTO_symtab_analyzed_node)
453 gcc_assert (clone_of || !node->clone_of);
454 if (!clone_of)
455 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
456 else
457 streamer_write_hwi_stream (ob->main_stream, ref);
460 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
461 streamer_write_gcov_count_stream (ob->main_stream, node->count);
462 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
464 streamer_write_hwi_stream (ob->main_stream,
465 node->ipa_transforms_to_apply.length ());
466 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
467 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
469 if (tag == LTO_symtab_analyzed_node)
471 if (node->global.inlined_to)
473 ref = lto_symtab_encoder_lookup (encoder, node->global.inlined_to);
474 gcc_assert (ref != LCC_NOT_FOUND);
476 else
477 ref = LCC_NOT_FOUND;
479 streamer_write_hwi_stream (ob->main_stream, ref);
482 group = node->get_comdat_group ();
483 if (group)
484 comdat = IDENTIFIER_POINTER (group);
485 else
486 comdat = "";
487 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
489 if (group)
491 if (node->same_comdat_group && !boundary_p)
493 ref = lto_symtab_encoder_lookup (encoder,
494 node->same_comdat_group);
495 gcc_assert (ref != LCC_NOT_FOUND);
497 else
498 ref = LCC_NOT_FOUND;
499 streamer_write_hwi_stream (ob->main_stream, ref);
502 section = node->get_section ();
503 if (!section)
504 section = "";
506 streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
508 bp = bitpack_create (ob->main_stream);
509 bp_pack_value (&bp, node->local.local, 1);
510 bp_pack_value (&bp, node->externally_visible, 1);
511 bp_pack_value (&bp, node->definition, 1);
512 bp_pack_value (&bp, node->local.versionable, 1);
513 bp_pack_value (&bp, node->local.can_change_signature, 1);
514 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
515 bp_pack_value (&bp, node->force_output, 1);
516 bp_pack_value (&bp, node->forced_by_abi, 1);
517 bp_pack_value (&bp, node->unique_name, 1);
518 bp_pack_value (&bp, node->body_removed, 1);
519 bp_pack_value (&bp, node->implicit_section, 1);
520 bp_pack_value (&bp, node->address_taken, 1);
521 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
522 && node->get_partitioning_class () == SYMBOL_PARTITION
523 && (reachable_from_other_partition_p (node, encoder)
524 || referenced_from_other_partition_p (node, encoder)), 1);
525 bp_pack_value (&bp, node->lowered, 1);
526 bp_pack_value (&bp, in_other_partition, 1);
527 /* Real aliases in a boundary become non-aliases. However we still stream
528 alias info on weakrefs.
529 TODO: We lose a bit of information here - when we know that variable is
530 defined in other unit, we may use the info on aliases to resolve
531 symbol1 != symbol2 type tests that we can do only for locally defined objects
532 otherwise. */
533 alias_p = node->alias && (!boundary_p || node->weakref);
534 bp_pack_value (&bp, alias_p, 1);
535 bp_pack_value (&bp, node->weakref, 1);
536 bp_pack_value (&bp, node->frequency, 2);
537 bp_pack_value (&bp, node->only_called_at_startup, 1);
538 bp_pack_value (&bp, node->only_called_at_exit, 1);
539 bp_pack_value (&bp, node->tm_clone, 1);
540 bp_pack_value (&bp, node->calls_comdat_local, 1);
541 bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
542 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
543 LDPR_NUM_KNOWN, node->resolution);
544 streamer_write_bitpack (&bp);
545 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
547 if (node->thunk.thunk_p && !boundary_p)
549 streamer_write_uhwi_stream
550 (ob->main_stream,
551 1 + (node->thunk.this_adjusting != 0) * 2
552 + (node->thunk.virtual_offset_p != 0) * 4);
553 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
554 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
556 streamer_write_hwi_stream (ob->main_stream, node->profile_id);
557 if (DECL_STATIC_CONSTRUCTOR (node->decl))
558 streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
559 if (DECL_STATIC_DESTRUCTOR (node->decl))
560 streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
563 /* Output the varpool NODE to OB.
564 If NODE is not in SET, then NODE is a boundary. */
566 static void
567 lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
568 lto_symtab_encoder_t encoder)
570 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
571 struct bitpack_d bp;
572 int ref;
573 bool alias_p;
574 const char *comdat;
575 const char *section;
576 tree group;
578 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
579 LTO_symtab_variable);
580 streamer_write_hwi_stream (ob->main_stream, node->order);
581 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
582 bp = bitpack_create (ob->main_stream);
583 bp_pack_value (&bp, node->externally_visible, 1);
584 bp_pack_value (&bp, node->force_output, 1);
585 bp_pack_value (&bp, node->forced_by_abi, 1);
586 bp_pack_value (&bp, node->unique_name, 1);
587 bp_pack_value (&bp, node->body_removed, 1);
588 bp_pack_value (&bp, node->implicit_section, 1);
589 bp_pack_value (&bp, node->writeonly, 1);
590 bp_pack_value (&bp, node->definition, 1);
591 alias_p = node->alias && (!boundary_p || node->weakref);
592 bp_pack_value (&bp, alias_p, 1);
593 bp_pack_value (&bp, node->weakref, 1);
594 bp_pack_value (&bp, node->analyzed && !boundary_p, 1);
595 gcc_assert (node->definition || !node->analyzed);
596 /* Constant pool initializers can be de-unified into individual ltrans units.
597 FIXME: Alternatively at -Os we may want to avoid generating for them the local
598 labels and share them across LTRANS partitions. */
599 if (node->get_partitioning_class () != SYMBOL_PARTITION)
601 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
602 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
604 else
606 bp_pack_value (&bp, node->definition
607 && referenced_from_other_partition_p (node, encoder), 1);
608 bp_pack_value (&bp, node->analyzed
609 && boundary_p && !DECL_EXTERNAL (node->decl), 1);
610 /* in_other_partition. */
612 bp_pack_value (&bp, node->tls_model, 3);
613 bp_pack_value (&bp, node->used_by_single_function, 1);
614 streamer_write_bitpack (&bp);
616 group = node->get_comdat_group ();
617 if (group)
618 comdat = IDENTIFIER_POINTER (group);
619 else
620 comdat = "";
621 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
623 if (group)
625 if (node->same_comdat_group && !boundary_p)
627 ref = lto_symtab_encoder_lookup (encoder,
628 node->same_comdat_group);
629 gcc_assert (ref != LCC_NOT_FOUND);
631 else
632 ref = LCC_NOT_FOUND;
633 streamer_write_hwi_stream (ob->main_stream, ref);
636 section = node->get_section ();
637 if (!section)
638 section = "";
639 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
641 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
642 LDPR_NUM_KNOWN, node->resolution);
645 /* Output the varpool NODE to OB.
646 If NODE is not in SET, then NODE is a boundary. */
648 static void
649 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
650 lto_symtab_encoder_t encoder)
652 struct bitpack_d bp;
653 int nref;
654 int uid = ref->lto_stmt_uid;
655 struct cgraph_node *node;
657 bp = bitpack_create (ob->main_stream);
658 bp_pack_value (&bp, ref->use, 2);
659 bp_pack_value (&bp, ref->speculative, 1);
660 streamer_write_bitpack (&bp);
661 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
662 gcc_assert (nref != LCC_NOT_FOUND);
663 streamer_write_hwi_stream (ob->main_stream, nref);
665 node = dyn_cast <cgraph_node *> (ref->referring);
666 if (node)
668 if (ref->stmt)
669 uid = gimple_uid (ref->stmt) + 1;
670 streamer_write_hwi_stream (ob->main_stream, uid);
674 /* Stream out profile_summary to OB. */
676 static void
677 output_profile_summary (struct lto_simple_output_block *ob)
679 unsigned h_ix;
680 struct bitpack_d bp;
682 if (profile_info)
684 /* We do not output num and run_max, they are not used by
685 GCC profile feedback and they are difficult to merge from multiple
686 units. */
687 gcc_assert (profile_info->runs);
688 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
689 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_max);
691 /* sum_all is needed for computing the working set with the
692 histogram. */
693 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_all);
695 /* Create and output a bitpack of non-zero histogram entries indices. */
696 bp = bitpack_create (ob->main_stream);
697 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
698 bp_pack_value (&bp, profile_info->histogram[h_ix].num_counters > 0, 1);
699 streamer_write_bitpack (&bp);
700 /* Now stream out only those non-zero entries. */
701 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
703 if (!profile_info->histogram[h_ix].num_counters)
704 continue;
705 streamer_write_gcov_count_stream (ob->main_stream,
706 profile_info->histogram[h_ix].num_counters);
707 streamer_write_gcov_count_stream (ob->main_stream,
708 profile_info->histogram[h_ix].min_value);
709 streamer_write_gcov_count_stream (ob->main_stream,
710 profile_info->histogram[h_ix].cum_value);
712 /* IPA-profile computes hot bb threshold based on cumulated
713 whole program profile. We need to stream it down to ltrans. */
714 if (flag_wpa)
715 streamer_write_gcov_count_stream (ob->main_stream,
716 get_hot_bb_threshold ());
718 else
719 streamer_write_uhwi_stream (ob->main_stream, 0);
722 /* Output all callees or indirect outgoing edges. EDGE must be the first such
723 edge. */
725 static void
726 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
727 struct lto_simple_output_block *ob,
728 lto_symtab_encoder_t encoder)
730 if (!edge)
731 return;
733 /* Output edges in backward direction, so the reconstructed callgraph match
734 and it is easy to associate call sites in the IPA pass summaries. */
735 while (edge->next_callee)
736 edge = edge->next_callee;
737 for (; edge; edge = edge->prev_callee)
738 lto_output_edge (ob, edge, encoder);
741 /* Output the part of the cgraph in SET. */
743 static void
744 output_refs (lto_symtab_encoder_t encoder)
746 lto_symtab_encoder_iterator lsei;
747 struct lto_simple_output_block *ob;
748 int count;
749 struct ipa_ref *ref;
750 int i;
752 ob = lto_create_simple_output_block (LTO_section_refs);
754 for (lsei = lsei_start_in_partition (encoder); !lsei_end_p (lsei);
755 lsei_next_in_partition (&lsei))
757 symtab_node *node = lsei_node (lsei);
759 count = node->ref_list.nreferences ();
760 if (count)
762 streamer_write_gcov_count_stream (ob->main_stream, count);
763 streamer_write_uhwi_stream (ob->main_stream,
764 lto_symtab_encoder_lookup (encoder, node));
765 for (i = 0; node->iterate_reference (i, ref); i++)
766 lto_output_ref (ob, ref, encoder);
770 streamer_write_uhwi_stream (ob->main_stream, 0);
772 lto_destroy_simple_output_block (ob);
775 /* Add NODE into encoder as well as nodes it is cloned from.
776 Do it in a way so clones appear first. */
778 static void
779 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
780 bool include_body)
782 if (node->clone_of)
783 add_node_to (encoder, node->clone_of, include_body);
784 else if (include_body)
785 lto_set_symtab_encoder_encode_body (encoder, node);
786 lto_symtab_encoder_encode (encoder, node);
789 /* Add all references in NODE to encoders. */
791 static void
792 create_references (lto_symtab_encoder_t encoder, symtab_node *node)
794 int i;
795 struct ipa_ref *ref = NULL;
796 for (i = 0; node->iterate_reference (i, ref); i++)
797 if (is_a <cgraph_node *> (ref->referred))
798 add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
799 else
800 lto_symtab_encoder_encode (encoder, ref->referred);
803 /* Find all symbols we want to stream into given partition and insert them
804 to encoders.
806 The function actually replaces IN_ENCODER by new one. The reason is that
807 streaming code needs clone's origin to be streamed before clone. This
808 means that we need to insert the nodes in specific order. This order is
809 ignored by the partitioning logic earlier. */
811 lto_symtab_encoder_t
812 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
814 struct cgraph_edge *edge;
815 int i;
816 lto_symtab_encoder_t encoder;
817 lto_symtab_encoder_iterator lsei;
818 hash_set<void *> reachable_call_targets;
820 encoder = lto_symtab_encoder_new (false);
822 /* Go over all entries in the IN_ENCODER and duplicate them to
823 ENCODER. At the same time insert masters of clones so
824 every master appears before clone. */
825 for (lsei = lsei_start_function_in_partition (in_encoder);
826 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
828 struct cgraph_node *node = lsei_cgraph_node (lsei);
829 add_node_to (encoder, node, true);
830 lto_set_symtab_encoder_in_partition (encoder, node);
831 create_references (encoder, node);
832 /* For proper debug info, we need to ship the origins, too. */
833 if (DECL_ABSTRACT_ORIGIN (node->decl))
835 struct cgraph_node *origin_node
836 = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
837 add_node_to (encoder, origin_node, true);
840 for (lsei = lsei_start_variable_in_partition (in_encoder);
841 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
843 varpool_node *vnode = lsei_varpool_node (lsei);
845 lto_set_symtab_encoder_in_partition (encoder, vnode);
846 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
847 create_references (encoder, vnode);
848 /* For proper debug info, we need to ship the origins, too. */
849 if (DECL_ABSTRACT_ORIGIN (vnode->decl))
851 varpool_node *origin_node
852 = varpool_node::get (DECL_ABSTRACT_ORIGIN (vnode->decl));
853 lto_set_symtab_encoder_in_partition (encoder, origin_node);
856 /* Pickle in also the initializer of all referenced readonly variables
857 to help folding. Constant pool variables are not shared, so we must
858 pickle those too. */
859 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
861 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
862 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
864 if (!lto_symtab_encoder_encode_initializer_p (encoder,
865 vnode)
866 && vnode->ctor_useable_for_folding_p ())
868 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
869 create_references (encoder, vnode);
874 /* Go over all the nodes again to include callees that are not in
875 SET. */
876 for (lsei = lsei_start_function_in_partition (encoder);
877 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
879 struct cgraph_node *node = lsei_cgraph_node (lsei);
880 for (edge = node->callees; edge; edge = edge->next_callee)
882 struct cgraph_node *callee = edge->callee;
883 if (!lto_symtab_encoder_in_partition_p (encoder, callee))
885 /* We should have moved all the inlines. */
886 gcc_assert (!callee->global.inlined_to);
887 add_node_to (encoder, callee, false);
890 /* Add all possible targets for late devirtualization. */
891 if (flag_devirtualize)
892 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
893 if (edge->indirect_info->polymorphic)
895 unsigned int i;
896 void *cache_token;
897 bool final;
898 vec <cgraph_node *>targets
899 = possible_polymorphic_call_targets
900 (edge, &final, &cache_token);
901 if (!reachable_call_targets.add (cache_token))
903 for (i = 0; i < targets.length (); i++)
905 struct cgraph_node *callee = targets[i];
907 /* Adding an external declarations into the unit serves
908 no purpose and just increases its boundary. */
909 if (callee->definition
910 && !lto_symtab_encoder_in_partition_p
911 (encoder, callee))
913 gcc_assert (!callee->global.inlined_to);
914 add_node_to (encoder, callee, false);
920 lto_symtab_encoder_delete (in_encoder);
921 return encoder;
924 /* Output the part of the symtab in SET and VSET. */
926 void
927 output_symtab (void)
929 struct cgraph_node *node;
930 struct lto_simple_output_block *ob;
931 lto_symtab_encoder_iterator lsei;
932 int i, n_nodes;
933 lto_symtab_encoder_t encoder;
935 if (flag_wpa)
936 output_cgraph_opt_summary ();
938 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
940 output_profile_summary (ob);
942 /* An encoder for cgraph nodes should have been created by
943 ipa_write_summaries_1. */
944 gcc_assert (ob->decl_state->symtab_node_encoder);
945 encoder = ob->decl_state->symtab_node_encoder;
947 /* Write out the nodes. We must first output a node and then its clones,
948 otherwise at a time reading back the node there would be nothing to clone
949 from. */
950 n_nodes = lto_symtab_encoder_size (encoder);
951 for (i = 0; i < n_nodes; i++)
953 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
954 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
955 lto_output_node (ob, cnode, encoder);
956 else
957 lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
960 /* Go over the nodes in SET again to write edges. */
961 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
962 lsei_next_function_in_partition (&lsei))
964 node = lsei_cgraph_node (lsei);
965 output_outgoing_cgraph_edges (node->callees, ob, encoder);
966 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
969 streamer_write_uhwi_stream (ob->main_stream, 0);
971 lto_destroy_simple_output_block (ob);
973 /* Emit toplevel asms.
974 When doing WPA we must output every asm just once. Since we do not partition asm
975 nodes at all, output them to first output. This is kind of hack, but should work
976 well. */
977 if (!asm_nodes_output)
979 asm_nodes_output = true;
980 lto_output_toplevel_asms ();
983 output_refs (encoder);
986 /* Return identifier encoded in IB as a plain string. */
988 static tree
989 read_identifier (struct lto_input_block *ib)
991 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
992 tree id;
994 if (ib->data[ib->p + len])
995 lto_section_overrun (ib);
996 if (!len)
998 ib->p++;
999 return NULL;
1001 id = get_identifier (ib->data + ib->p);
1002 ib->p += len + 1;
1003 return id;
1006 /* Return string encoded in IB, NULL if string is empty. */
1008 static const char *
1009 read_string (struct lto_input_block *ib)
1011 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1012 const char *str;
1014 if (ib->data[ib->p + len])
1015 lto_section_overrun (ib);
1016 if (!len)
1018 ib->p++;
1019 return NULL;
1021 str = ib->data + ib->p;
1022 ib->p += len + 1;
1023 return str;
1026 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1027 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
1028 NODE or to replace the values in it, for instance because the first
1029 time we saw it, the function body was not available but now it
1030 is. BP is a bitpack with all the bitflags for NODE read from the
1031 stream. */
1033 static void
1034 input_overwrite_node (struct lto_file_decl_data *file_data,
1035 struct cgraph_node *node,
1036 enum LTO_symtab_tags tag,
1037 struct bitpack_d *bp)
1039 node->aux = (void *) tag;
1040 node->lto_file_data = file_data;
1042 node->local.local = bp_unpack_value (bp, 1);
1043 node->externally_visible = bp_unpack_value (bp, 1);
1044 node->definition = bp_unpack_value (bp, 1);
1045 node->local.versionable = bp_unpack_value (bp, 1);
1046 node->local.can_change_signature = bp_unpack_value (bp, 1);
1047 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
1048 node->force_output = bp_unpack_value (bp, 1);
1049 node->forced_by_abi = bp_unpack_value (bp, 1);
1050 node->unique_name = bp_unpack_value (bp, 1);
1051 node->body_removed = bp_unpack_value (bp, 1);
1052 node->implicit_section = bp_unpack_value (bp, 1);
1053 node->address_taken = bp_unpack_value (bp, 1);
1054 node->used_from_other_partition = bp_unpack_value (bp, 1);
1055 node->lowered = bp_unpack_value (bp, 1);
1056 node->analyzed = tag == LTO_symtab_analyzed_node;
1057 node->in_other_partition = bp_unpack_value (bp, 1);
1058 if (node->in_other_partition
1059 /* Avoid updating decl when we are seeing just inline clone.
1060 When inlining function that has functions already inlined into it,
1061 we produce clones of inline clones.
1063 WPA partitioning might put each clone into different unit and
1064 we might end up streaming inline clone from other partition
1065 to support clone we are interested in. */
1066 && (!node->clone_of
1067 || node->clone_of->decl != node->decl))
1069 DECL_EXTERNAL (node->decl) = 1;
1070 TREE_STATIC (node->decl) = 0;
1072 node->alias = bp_unpack_value (bp, 1);
1073 node->weakref = bp_unpack_value (bp, 1);
1074 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1075 node->only_called_at_startup = bp_unpack_value (bp, 1);
1076 node->only_called_at_exit = bp_unpack_value (bp, 1);
1077 node->tm_clone = bp_unpack_value (bp, 1);
1078 node->calls_comdat_local = bp_unpack_value (bp, 1);
1079 node->thunk.thunk_p = bp_unpack_value (bp, 1);
1080 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1081 LDPR_NUM_KNOWN);
1082 gcc_assert (flag_ltrans
1083 || (!node->in_other_partition
1084 && !node->used_from_other_partition));
1087 /* Return string alias is alias of. */
1089 static tree
1090 get_alias_symbol (tree decl)
1092 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1093 return get_identifier (TREE_STRING_POINTER
1094 (TREE_VALUE (TREE_VALUE (alias))));
1097 /* Read a node from input_block IB. TAG is the node's tag just read.
1098 Return the node read or overwriten. */
1100 static struct cgraph_node *
1101 input_node (struct lto_file_decl_data *file_data,
1102 struct lto_input_block *ib,
1103 enum LTO_symtab_tags tag,
1104 vec<symtab_node *> nodes)
1106 gcc::pass_manager *passes = g->get_passes ();
1107 tree fn_decl;
1108 struct cgraph_node *node;
1109 struct bitpack_d bp;
1110 unsigned decl_index;
1111 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1112 int clone_ref;
1113 int order;
1114 int i, count;
1115 tree group;
1116 const char *section;
1117 order = streamer_read_hwi (ib) + order_base;
1118 clone_ref = streamer_read_hwi (ib);
1120 decl_index = streamer_read_uhwi (ib);
1121 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1123 if (clone_ref != LCC_NOT_FOUND)
1125 node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1126 0, CGRAPH_FREQ_BASE, false,
1127 vNULL, false, NULL, NULL);
1129 else
1131 /* Declaration of functions can be already merged with a declaration
1132 from other input file. We keep cgraph unmerged until after streaming
1133 of ipa passes is done. Alays forcingly create a fresh node. */
1134 node = symtab->create_empty ();
1135 node->decl = fn_decl;
1136 node->register_symbol ();
1139 node->order = order;
1140 if (order >= symtab->order)
1141 symtab->order = order + 1;
1143 node->count = streamer_read_gcov_count (ib);
1144 node->count_materialization_scale = streamer_read_hwi (ib);
1146 count = streamer_read_hwi (ib);
1147 node->ipa_transforms_to_apply = vNULL;
1148 for (i = 0; i < count; i++)
1150 opt_pass *pass;
1151 int pid = streamer_read_hwi (ib);
1153 gcc_assert (pid < passes->passes_by_id_size);
1154 pass = passes->passes_by_id[pid];
1155 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1158 if (tag == LTO_symtab_analyzed_node)
1159 ref = streamer_read_hwi (ib);
1161 group = read_identifier (ib);
1162 if (group)
1163 ref2 = streamer_read_hwi (ib);
1165 /* Make sure that we have not read this node before. Nodes that
1166 have already been read will have their tag stored in the 'aux'
1167 field. Since built-in functions can be referenced in multiple
1168 functions, they are expected to be read more than once. */
1169 if (node->aux && !DECL_BUILT_IN (node->decl))
1170 internal_error ("bytecode stream: found multiple instances of cgraph "
1171 "node with uid %d", node->uid);
1173 node->tp_first_run = streamer_read_uhwi (ib);
1175 bp = streamer_read_bitpack (ib);
1177 input_overwrite_node (file_data, node, tag, &bp);
1179 /* Store a reference for now, and fix up later to be a pointer. */
1180 node->global.inlined_to = (cgraph_node *) (intptr_t) ref;
1182 if (group)
1184 node->set_comdat_group (group);
1185 /* Store a reference for now, and fix up later to be a pointer. */
1186 node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1188 else
1189 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1190 section = read_string (ib);
1191 if (section)
1192 node->set_section_for_node (section);
1194 if (node->thunk.thunk_p)
1196 int type = streamer_read_uhwi (ib);
1197 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1198 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1200 node->thunk.fixed_offset = fixed_offset;
1201 node->thunk.this_adjusting = (type & 2);
1202 node->thunk.virtual_value = virtual_value;
1203 node->thunk.virtual_offset_p = (type & 4);
1205 if (node->alias && !node->analyzed && node->weakref)
1206 node->alias_target = get_alias_symbol (node->decl);
1207 node->profile_id = streamer_read_hwi (ib);
1208 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1209 node->set_init_priority (streamer_read_hwi (ib));
1210 if (DECL_STATIC_DESTRUCTOR (node->decl))
1211 node->set_fini_priority (streamer_read_hwi (ib));
1212 return node;
1215 /* Read a node from input_block IB. TAG is the node's tag just read.
1216 Return the node read or overwriten. */
1218 static varpool_node *
1219 input_varpool_node (struct lto_file_decl_data *file_data,
1220 struct lto_input_block *ib)
1222 int decl_index;
1223 tree var_decl;
1224 varpool_node *node;
1225 struct bitpack_d bp;
1226 int ref = LCC_NOT_FOUND;
1227 int order;
1228 tree group;
1229 const char *section;
1231 order = streamer_read_hwi (ib) + order_base;
1232 decl_index = streamer_read_uhwi (ib);
1233 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1235 /* Declaration of functions can be already merged with a declaration
1236 from other input file. We keep cgraph unmerged until after streaming
1237 of ipa passes is done. Alays forcingly create a fresh node. */
1238 node = varpool_node::create_empty ();
1239 node->decl = var_decl;
1240 node->register_symbol ();
1242 node->order = order;
1243 if (order >= symtab->order)
1244 symtab->order = order + 1;
1245 node->lto_file_data = file_data;
1247 bp = streamer_read_bitpack (ib);
1248 node->externally_visible = bp_unpack_value (&bp, 1);
1249 node->force_output = bp_unpack_value (&bp, 1);
1250 node->forced_by_abi = bp_unpack_value (&bp, 1);
1251 node->unique_name = bp_unpack_value (&bp, 1);
1252 node->body_removed = bp_unpack_value (&bp, 1);
1253 node->implicit_section = bp_unpack_value (&bp, 1);
1254 node->writeonly = bp_unpack_value (&bp, 1);
1255 node->definition = bp_unpack_value (&bp, 1);
1256 node->alias = bp_unpack_value (&bp, 1);
1257 node->weakref = bp_unpack_value (&bp, 1);
1258 node->analyzed = bp_unpack_value (&bp, 1);
1259 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1260 node->in_other_partition = bp_unpack_value (&bp, 1);
1261 if (node->in_other_partition)
1263 DECL_EXTERNAL (node->decl) = 1;
1264 TREE_STATIC (node->decl) = 0;
1266 if (node->alias && !node->analyzed && node->weakref)
1267 node->alias_target = get_alias_symbol (node->decl);
1268 node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1269 node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1270 group = read_identifier (ib);
1271 if (group)
1273 node->set_comdat_group (group);
1274 ref = streamer_read_hwi (ib);
1275 /* Store a reference for now, and fix up later to be a pointer. */
1276 node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1278 else
1279 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1280 section = read_string (ib);
1281 if (section)
1282 node->set_section_for_node (section);
1283 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1284 LDPR_NUM_KNOWN);
1285 gcc_assert (flag_ltrans
1286 || (!node->in_other_partition
1287 && !node->used_from_other_partition));
1289 return node;
1292 /* Read a node from input_block IB. TAG is the node's tag just read.
1293 Return the node read or overwriten. */
1295 static void
1296 input_ref (struct lto_input_block *ib,
1297 symtab_node *referring_node,
1298 vec<symtab_node *> nodes)
1300 symtab_node *node = NULL;
1301 struct bitpack_d bp;
1302 enum ipa_ref_use use;
1303 bool speculative;
1304 struct ipa_ref *ref;
1306 bp = streamer_read_bitpack (ib);
1307 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1308 speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1309 node = nodes[streamer_read_hwi (ib)];
1310 ref = referring_node->create_reference (node, use);
1311 ref->speculative = speculative;
1312 if (is_a <cgraph_node *> (referring_node))
1313 ref->lto_stmt_uid = streamer_read_hwi (ib);
1316 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1317 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1318 edge being read is indirect (in the sense that it has
1319 indirect_unknown_callee set). */
1321 static void
1322 input_edge (struct lto_input_block *ib, vec<symtab_node *> nodes,
1323 bool indirect)
1325 struct cgraph_node *caller, *callee;
1326 struct cgraph_edge *edge;
1327 unsigned int stmt_id;
1328 gcov_type count;
1329 int freq;
1330 cgraph_inline_failed_t inline_failed;
1331 struct bitpack_d bp;
1332 int ecf_flags = 0;
1334 caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1335 if (caller == NULL || caller->decl == NULL_TREE)
1336 internal_error ("bytecode stream: no caller found while reading edge");
1338 if (!indirect)
1340 callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1341 if (callee == NULL || callee->decl == NULL_TREE)
1342 internal_error ("bytecode stream: no callee found while reading edge");
1344 else
1345 callee = NULL;
1347 count = streamer_read_gcov_count (ib);
1349 bp = streamer_read_bitpack (ib);
1350 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1351 stmt_id = bp_unpack_var_len_unsigned (&bp);
1352 freq = (int) bp_unpack_var_len_unsigned (&bp);
1354 if (indirect)
1355 edge = caller->create_indirect_edge (NULL, 0, count, freq);
1356 else
1357 edge = caller->create_edge (callee, NULL, count, freq);
1359 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1360 edge->speculative = bp_unpack_value (&bp, 1);
1361 edge->lto_stmt_uid = stmt_id;
1362 edge->inline_failed = inline_failed;
1363 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1364 edge->can_throw_external = bp_unpack_value (&bp, 1);
1365 if (indirect)
1367 if (bp_unpack_value (&bp, 1))
1368 ecf_flags |= ECF_CONST;
1369 if (bp_unpack_value (&bp, 1))
1370 ecf_flags |= ECF_PURE;
1371 if (bp_unpack_value (&bp, 1))
1372 ecf_flags |= ECF_NORETURN;
1373 if (bp_unpack_value (&bp, 1))
1374 ecf_flags |= ECF_MALLOC;
1375 if (bp_unpack_value (&bp, 1))
1376 ecf_flags |= ECF_NOTHROW;
1377 if (bp_unpack_value (&bp, 1))
1378 ecf_flags |= ECF_RETURNS_TWICE;
1379 edge->indirect_info->ecf_flags = ecf_flags;
1380 edge->indirect_info->common_target_id = streamer_read_hwi (ib);
1381 if (edge->indirect_info->common_target_id)
1382 edge->indirect_info->common_target_probability = streamer_read_hwi (ib);
1387 /* Read a cgraph from IB using the info in FILE_DATA. */
1389 static vec<symtab_node *>
1390 input_cgraph_1 (struct lto_file_decl_data *file_data,
1391 struct lto_input_block *ib)
1393 enum LTO_symtab_tags tag;
1394 vec<symtab_node *> nodes = vNULL;
1395 symtab_node *node;
1396 unsigned i;
1398 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1399 order_base = symtab->order;
1400 while (tag)
1402 if (tag == LTO_symtab_edge)
1403 input_edge (ib, nodes, false);
1404 else if (tag == LTO_symtab_indirect_edge)
1405 input_edge (ib, nodes, true);
1406 else if (tag == LTO_symtab_variable)
1408 node = input_varpool_node (file_data, ib);
1409 nodes.safe_push (node);
1410 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1412 else
1414 node = input_node (file_data, ib, tag, nodes);
1415 if (node == NULL || node->decl == NULL_TREE)
1416 internal_error ("bytecode stream: found empty cgraph node");
1417 nodes.safe_push (node);
1418 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1421 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1424 lto_input_toplevel_asms (file_data, order_base);
1426 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1427 #ifdef ENABLE_CHECKING
1428 FOR_EACH_VEC_ELT (nodes, i, node)
1429 gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1430 #endif
1431 FOR_EACH_VEC_ELT (nodes, i, node)
1433 int ref;
1434 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1436 ref = (int) (intptr_t) cnode->global.inlined_to;
1438 /* We share declaration of builtins, so we may read same node twice. */
1439 if (!node->aux)
1440 continue;
1441 node->aux = NULL;
1443 /* Fixup inlined_to from reference to pointer. */
1444 if (ref != LCC_NOT_FOUND)
1445 dyn_cast<cgraph_node *> (node)->global.inlined_to
1446 = dyn_cast<cgraph_node *> (nodes[ref]);
1447 else
1448 cnode->global.inlined_to = NULL;
1451 ref = (int) (intptr_t) node->same_comdat_group;
1453 /* Fixup same_comdat_group from reference to pointer. */
1454 if (ref != LCC_NOT_FOUND)
1455 node->same_comdat_group = nodes[ref];
1456 else
1457 node->same_comdat_group = NULL;
1459 FOR_EACH_VEC_ELT (nodes, i, node)
1460 node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1461 return nodes;
1464 /* Input ipa_refs. */
1466 static void
1467 input_refs (struct lto_input_block *ib,
1468 vec<symtab_node *> nodes)
1470 int count;
1471 int idx;
1472 while (true)
1474 symtab_node *node;
1475 count = streamer_read_uhwi (ib);
1476 if (!count)
1477 break;
1478 idx = streamer_read_uhwi (ib);
1479 node = nodes[idx];
1480 while (count)
1482 input_ref (ib, node, nodes);
1483 count--;
1489 static struct gcov_ctr_summary lto_gcov_summary;
1491 /* Input profile_info from IB. */
1492 static void
1493 input_profile_summary (struct lto_input_block *ib,
1494 struct lto_file_decl_data *file_data)
1496 unsigned h_ix;
1497 struct bitpack_d bp;
1498 unsigned int runs = streamer_read_uhwi (ib);
1499 if (runs)
1501 file_data->profile_info.runs = runs;
1502 file_data->profile_info.sum_max = streamer_read_gcov_count (ib);
1503 file_data->profile_info.sum_all = streamer_read_gcov_count (ib);
1505 memset (file_data->profile_info.histogram, 0,
1506 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1507 /* Input the bitpack of non-zero histogram indices. */
1508 bp = streamer_read_bitpack (ib);
1509 /* Read in and unpack the full bitpack, flagging non-zero
1510 histogram entries by setting the num_counters non-zero. */
1511 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1513 file_data->profile_info.histogram[h_ix].num_counters
1514 = bp_unpack_value (&bp, 1);
1516 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1518 if (!file_data->profile_info.histogram[h_ix].num_counters)
1519 continue;
1521 file_data->profile_info.histogram[h_ix].num_counters
1522 = streamer_read_gcov_count (ib);
1523 file_data->profile_info.histogram[h_ix].min_value
1524 = streamer_read_gcov_count (ib);
1525 file_data->profile_info.histogram[h_ix].cum_value
1526 = streamer_read_gcov_count (ib);
1528 /* IPA-profile computes hot bb threshold based on cumulated
1529 whole program profile. We need to stream it down to ltrans. */
1530 if (flag_ltrans)
1531 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1536 /* Rescale profile summaries to the same number of runs in the whole unit. */
1538 static void
1539 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1541 struct lto_file_decl_data *file_data;
1542 unsigned int j, h_ix;
1543 gcov_unsigned_t max_runs = 0;
1544 struct cgraph_node *node;
1545 struct cgraph_edge *edge;
1546 gcov_type saved_sum_all = 0;
1547 gcov_ctr_summary *saved_profile_info = 0;
1548 int saved_scale = 0;
1550 /* Find unit with maximal number of runs. If we ever get serious about
1551 roundoff errors, we might also consider computing smallest common
1552 multiply. */
1553 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1554 if (max_runs < file_data->profile_info.runs)
1555 max_runs = file_data->profile_info.runs;
1557 if (!max_runs)
1558 return;
1560 /* Simple overflow check. We probably don't need to support that many train
1561 runs. Such a large value probably imply data corruption anyway. */
1562 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1564 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1565 INT_MAX / REG_BR_PROB_BASE);
1566 return;
1569 profile_info = &lto_gcov_summary;
1570 lto_gcov_summary.runs = max_runs;
1571 lto_gcov_summary.sum_max = 0;
1572 memset (lto_gcov_summary.histogram, 0,
1573 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1575 /* Rescale all units to the maximal number of runs.
1576 sum_max can not be easily merged, as we have no idea what files come from
1577 the same run. We do not use the info anyway, so leave it 0. */
1578 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1579 if (file_data->profile_info.runs)
1581 int scale = GCOV_COMPUTE_SCALE (max_runs,
1582 file_data->profile_info.runs);
1583 lto_gcov_summary.sum_max
1584 = MAX (lto_gcov_summary.sum_max,
1585 apply_scale (file_data->profile_info.sum_max, scale));
1586 lto_gcov_summary.sum_all
1587 = MAX (lto_gcov_summary.sum_all,
1588 apply_scale (file_data->profile_info.sum_all, scale));
1589 /* Save a pointer to the profile_info with the largest
1590 scaled sum_all and the scale for use in merging the
1591 histogram. */
1592 if (!saved_profile_info
1593 || lto_gcov_summary.sum_all > saved_sum_all)
1595 saved_profile_info = &file_data->profile_info;
1596 saved_sum_all = lto_gcov_summary.sum_all;
1597 saved_scale = scale;
1601 gcc_assert (saved_profile_info);
1603 /* Scale up the histogram from the profile that had the largest
1604 scaled sum_all above. */
1605 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1607 /* Scale up the min value as we did the corresponding sum_all
1608 above. Use that to find the new histogram index. */
1609 gcov_type scaled_min
1610 = apply_scale (saved_profile_info->histogram[h_ix].min_value,
1611 saved_scale);
1612 /* The new index may be shared with another scaled histogram entry,
1613 so we need to account for a non-zero histogram entry at new_ix. */
1614 unsigned new_ix = gcov_histo_index (scaled_min);
1615 lto_gcov_summary.histogram[new_ix].min_value
1616 = (lto_gcov_summary.histogram[new_ix].num_counters
1617 ? MIN (lto_gcov_summary.histogram[new_ix].min_value, scaled_min)
1618 : scaled_min);
1619 /* Some of the scaled counter values would ostensibly need to be placed
1620 into different (larger) histogram buckets, but we keep things simple
1621 here and place the scaled cumulative counter value in the bucket
1622 corresponding to the scaled minimum counter value. */
1623 lto_gcov_summary.histogram[new_ix].cum_value
1624 += apply_scale (saved_profile_info->histogram[h_ix].cum_value,
1625 saved_scale);
1626 lto_gcov_summary.histogram[new_ix].num_counters
1627 += saved_profile_info->histogram[h_ix].num_counters;
1630 /* Watch roundoff errors. */
1631 if (lto_gcov_summary.sum_max < max_runs)
1632 lto_gcov_summary.sum_max = max_runs;
1634 /* If merging already happent at WPA time, we are done. */
1635 if (flag_ltrans)
1636 return;
1638 /* Now compute count_materialization_scale of each node.
1639 During LTRANS we already have values of count_materialization_scale
1640 computed, so just update them. */
1641 FOR_EACH_FUNCTION (node)
1642 if (node->lto_file_data
1643 && node->lto_file_data->profile_info.runs)
1645 int scale;
1647 scale = RDIV (node->count_materialization_scale * max_runs,
1648 node->lto_file_data->profile_info.runs);
1649 node->count_materialization_scale = scale;
1650 if (scale < 0)
1651 fatal_error ("Profile information in %s corrupted",
1652 file_data->file_name);
1654 if (scale == REG_BR_PROB_BASE)
1655 continue;
1656 for (edge = node->callees; edge; edge = edge->next_callee)
1657 edge->count = apply_scale (edge->count, scale);
1658 node->count = apply_scale (node->count, scale);
1662 /* Input and merge the symtab from each of the .o files passed to
1663 lto1. */
1665 void
1666 input_symtab (void)
1668 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1669 struct lto_file_decl_data *file_data;
1670 unsigned int j = 0;
1671 struct cgraph_node *node;
1673 while ((file_data = file_data_vec[j++]))
1675 const char *data;
1676 size_t len;
1677 struct lto_input_block *ib;
1678 vec<symtab_node *> nodes;
1680 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1681 &data, &len);
1682 if (!ib)
1683 fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1684 input_profile_summary (ib, file_data);
1685 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1686 nodes = input_cgraph_1 (file_data, ib);
1687 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1688 ib, data, len);
1690 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1691 &data, &len);
1692 if (!ib)
1693 fatal_error ("cannot find LTO section refs in %s",
1694 file_data->file_name);
1695 input_refs (ib, nodes);
1696 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1697 ib, data, len);
1698 if (flag_ltrans)
1699 input_cgraph_opt_summary (nodes);
1700 nodes.release ();
1703 merge_profile_summaries (file_data_vec);
1704 get_working_sets ();
1707 /* Clear out the aux field that was used to store enough state to
1708 tell which nodes should be overwritten. */
1709 FOR_EACH_FUNCTION (node)
1711 /* Some nodes may have been created by cgraph_node. This
1712 happens when the callgraph contains nested functions. If the
1713 node for the parent function was never emitted to the gimple
1714 file, cgraph_node will create a node for it when setting the
1715 context of the nested function. */
1716 if (node->lto_file_data)
1717 node->aux = NULL;
1721 /* True when we need optimization summary for NODE. */
1723 static int
1724 output_cgraph_opt_summary_p (struct cgraph_node *node)
1726 return (node->clone_of
1727 && (node->clone.tree_map
1728 || node->clone.args_to_skip
1729 || node->clone.combined_args_to_skip));
1732 /* Output optimization summary for EDGE to OB. */
1733 static void
1734 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1735 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1739 /* Output optimization summary for NODE to OB. */
1741 static void
1742 output_node_opt_summary (struct output_block *ob,
1743 struct cgraph_node *node,
1744 lto_symtab_encoder_t encoder)
1746 unsigned int index;
1747 bitmap_iterator bi;
1748 struct ipa_replace_map *map;
1749 struct bitpack_d bp;
1750 int i;
1751 struct cgraph_edge *e;
1753 if (node->clone.args_to_skip)
1755 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1756 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1757 streamer_write_uhwi (ob, index);
1759 else
1760 streamer_write_uhwi (ob, 0);
1761 if (node->clone.combined_args_to_skip)
1763 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1764 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1765 streamer_write_uhwi (ob, index);
1767 else
1768 streamer_write_uhwi (ob, 0);
1769 streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
1770 FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
1772 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1773 mechanism to store function local declarations into summaries. */
1774 gcc_assert (!map->old_tree);
1775 streamer_write_uhwi (ob, map->parm_num);
1776 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
1777 stream_write_tree (ob, map->new_tree, true);
1778 bp = bitpack_create (ob->main_stream);
1779 bp_pack_value (&bp, map->replace_p, 1);
1780 bp_pack_value (&bp, map->ref_p, 1);
1781 streamer_write_bitpack (&bp);
1784 if (lto_symtab_encoder_in_partition_p (encoder, node))
1786 for (e = node->callees; e; e = e->next_callee)
1787 output_edge_opt_summary (ob, e);
1788 for (e = node->indirect_calls; e; e = e->next_callee)
1789 output_edge_opt_summary (ob, e);
1793 /* Output optimization summaries stored in callgraph.
1794 At the moment it is the clone info structure. */
1796 static void
1797 output_cgraph_opt_summary (void)
1799 int i, n_nodes;
1800 lto_symtab_encoder_t encoder;
1801 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1802 unsigned count = 0;
1804 ob->symbol = NULL;
1805 encoder = ob->decl_state->symtab_node_encoder;
1806 n_nodes = lto_symtab_encoder_size (encoder);
1807 for (i = 0; i < n_nodes; i++)
1809 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1810 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1811 if (cnode && output_cgraph_opt_summary_p (cnode))
1812 count++;
1814 streamer_write_uhwi (ob, count);
1815 for (i = 0; i < n_nodes; i++)
1817 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1818 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1819 if (cnode && output_cgraph_opt_summary_p (cnode))
1821 streamer_write_uhwi (ob, i);
1822 output_node_opt_summary (ob, cnode, encoder);
1825 produce_asm (ob, NULL);
1826 destroy_output_block (ob);
1829 /* Input optimisation summary of EDGE. */
1831 static void
1832 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1833 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
1837 /* Input optimisation summary of NODE. */
1839 static void
1840 input_node_opt_summary (struct cgraph_node *node,
1841 struct lto_input_block *ib_main,
1842 struct data_in *data_in)
1844 int i;
1845 int count;
1846 int bit;
1847 struct bitpack_d bp;
1848 struct cgraph_edge *e;
1850 count = streamer_read_uhwi (ib_main);
1851 if (count)
1852 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1853 for (i = 0; i < count; i++)
1855 bit = streamer_read_uhwi (ib_main);
1856 bitmap_set_bit (node->clone.args_to_skip, bit);
1858 count = streamer_read_uhwi (ib_main);
1859 if (count)
1860 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1861 for (i = 0; i < count; i++)
1863 bit = streamer_read_uhwi (ib_main);
1864 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1866 count = streamer_read_uhwi (ib_main);
1867 for (i = 0; i < count; i++)
1869 struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
1871 vec_safe_push (node->clone.tree_map, map);
1872 map->parm_num = streamer_read_uhwi (ib_main);
1873 map->old_tree = NULL;
1874 map->new_tree = stream_read_tree (ib_main, data_in);
1875 bp = streamer_read_bitpack (ib_main);
1876 map->replace_p = bp_unpack_value (&bp, 1);
1877 map->ref_p = bp_unpack_value (&bp, 1);
1879 for (e = node->callees; e; e = e->next_callee)
1880 input_edge_opt_summary (e, ib_main);
1881 for (e = node->indirect_calls; e; e = e->next_callee)
1882 input_edge_opt_summary (e, ib_main);
1885 /* Read section in file FILE_DATA of length LEN with data DATA. */
1887 static void
1888 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1889 const char *data, size_t len,
1890 vec<symtab_node *> nodes)
1892 const struct lto_function_header *header =
1893 (const struct lto_function_header *) data;
1894 const int cfg_offset = sizeof (struct lto_function_header);
1895 const int main_offset = cfg_offset + header->cfg_size;
1896 const int string_offset = main_offset + header->main_size;
1897 struct data_in *data_in;
1898 unsigned int i;
1899 unsigned int count;
1901 lto_input_block ib_main ((const char *) data + main_offset,
1902 header->main_size);
1904 data_in =
1905 lto_data_in_create (file_data, (const char *) data + string_offset,
1906 header->string_size, vNULL);
1907 count = streamer_read_uhwi (&ib_main);
1909 for (i = 0; i < count; i++)
1911 int ref = streamer_read_uhwi (&ib_main);
1912 input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
1913 &ib_main, data_in);
1915 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
1916 len);
1917 lto_data_in_delete (data_in);
1920 /* Input optimization summary of cgraph. */
1922 static void
1923 input_cgraph_opt_summary (vec<symtab_node *> nodes)
1925 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1926 struct lto_file_decl_data *file_data;
1927 unsigned int j = 0;
1929 while ((file_data = file_data_vec[j++]))
1931 size_t len;
1932 const char *data =
1933 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1934 &len);
1936 if (data)
1937 input_cgraph_opt_section (file_data, data, len, nodes);