PR ipa/64481
[official-gcc.git] / gcc / lto-cgraph.c
blob6c6501afc4a150cddcd9d03f4001f1ec30d5b861
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
4 Copyright (C) 2009-2015 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "vec.h"
30 #include "double-int.h"
31 #include "input.h"
32 #include "alias.h"
33 #include "symtab.h"
34 #include "wide-int.h"
35 #include "inchash.h"
36 #include "tree.h"
37 #include "fold-const.h"
38 #include "stringpool.h"
39 #include "predict.h"
40 #include "hard-reg-set.h"
41 #include "input.h"
42 #include "function.h"
43 #include "basic-block.h"
44 #include "tree-ssa-alias.h"
45 #include "internal-fn.h"
46 #include "gimple-expr.h"
47 #include "is-a.h"
48 #include "gimple.h"
49 #include "expr.h"
50 #include "flags.h"
51 #include "params.h"
52 #include "langhooks.h"
53 #include "bitmap.h"
54 #include "diagnostic-core.h"
55 #include "except.h"
56 #include "timevar.h"
57 #include "hash-map.h"
58 #include "plugin-api.h"
59 #include "ipa-ref.h"
60 #include "cgraph.h"
61 #include "lto-streamer.h"
62 #include "data-streamer.h"
63 #include "tree-streamer.h"
64 #include "gcov-io.h"
65 #include "tree-pass.h"
66 #include "profile.h"
67 #include "context.h"
68 #include "pass_manager.h"
69 #include "ipa-utils.h"
70 #include "omp-low.h"
72 /* True when asm nodes has been output. */
73 bool asm_nodes_output = false;
75 static void output_cgraph_opt_summary (void);
76 static void input_cgraph_opt_summary (vec<symtab_node *> nodes);
78 /* Number of LDPR values known to GCC. */
79 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
81 /* All node orders are ofsetted by ORDER_BASE. */
82 static int order_base;
84 /* Cgraph streaming is organized as set of record whose type
85 is indicated by a tag. */
86 enum LTO_symtab_tags
88 /* Must leave 0 for the stopper. */
90 /* Cgraph node without body available. */
91 LTO_symtab_unavail_node = 1,
92 /* Cgraph node with function body. */
93 LTO_symtab_analyzed_node,
94 /* Cgraph edges. */
95 LTO_symtab_edge,
96 LTO_symtab_indirect_edge,
97 LTO_symtab_variable,
98 LTO_symtab_last_tag
101 /* Create a new symtab encoder.
102 if FOR_INPUT, the encoder allocate only datastructures needed
103 to read the symtab. */
105 lto_symtab_encoder_t
106 lto_symtab_encoder_new (bool for_input)
108 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
110 if (!for_input)
111 encoder->map = new hash_map<symtab_node *, size_t>;
112 encoder->nodes.create (0);
113 return encoder;
117 /* Delete ENCODER and its components. */
119 void
120 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
122 encoder->nodes.release ();
123 if (encoder->map)
124 delete encoder->map;
125 free (encoder);
129 /* Return the existing reference number of NODE in the symtab encoder in
130 output block OB. Assign a new reference if this is the first time
131 NODE is encoded. */
134 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
135 symtab_node *node)
137 int ref;
139 if (!encoder->map)
141 lto_encoder_entry entry = {node, false, false, false};
143 ref = encoder->nodes.length ();
144 encoder->nodes.safe_push (entry);
145 return ref;
148 size_t *slot = encoder->map->get (node);
149 if (!slot || !*slot)
151 lto_encoder_entry entry = {node, false, false, false};
152 ref = encoder->nodes.length ();
153 if (!slot)
154 encoder->map->put (node, ref + 1);
155 encoder->nodes.safe_push (entry);
157 else
158 ref = *slot - 1;
160 return ref;
163 /* Remove NODE from encoder. */
165 bool
166 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
167 symtab_node *node)
169 int index;
170 lto_encoder_entry last_node;
172 size_t *slot = encoder->map->get (node);
173 if (slot == NULL || !*slot)
174 return false;
176 index = *slot - 1;
177 gcc_checking_assert (encoder->nodes[index].node == node);
179 /* Remove from vector. We do this by swapping node with the last element
180 of the vector. */
181 last_node = encoder->nodes.pop ();
182 if (last_node.node != node)
184 gcc_assert (encoder->map->put (last_node.node, index + 1));
186 /* Move the last element to the original spot of NODE. */
187 encoder->nodes[index] = last_node;
190 /* Remove element from hash table. */
191 encoder->map->remove (node);
192 return true;
196 /* Return TRUE if we should encode initializer of NODE (if any). */
198 bool
199 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
200 struct cgraph_node *node)
202 int index = lto_symtab_encoder_lookup (encoder, node);
203 return encoder->nodes[index].body;
206 /* Return TRUE if we should encode body of NODE (if any). */
208 static void
209 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
210 struct cgraph_node *node)
212 int index = lto_symtab_encoder_encode (encoder, node);
213 gcc_checking_assert (encoder->nodes[index].node == node);
214 encoder->nodes[index].body = true;
217 /* Return TRUE if we should encode initializer of NODE (if any). */
219 bool
220 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
221 varpool_node *node)
223 int index = lto_symtab_encoder_lookup (encoder, node);
224 if (index == LCC_NOT_FOUND)
225 return false;
226 return encoder->nodes[index].initializer;
229 /* Return TRUE if we should encode initializer of NODE (if any). */
231 static void
232 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
233 varpool_node *node)
235 int index = lto_symtab_encoder_lookup (encoder, node);
236 encoder->nodes[index].initializer = true;
239 /* Return TRUE if we should encode initializer of NODE (if any). */
241 bool
242 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
243 symtab_node *node)
245 int index = lto_symtab_encoder_lookup (encoder, node);
246 if (index == LCC_NOT_FOUND)
247 return false;
248 return encoder->nodes[index].in_partition;
251 /* Return TRUE if we should encode body of NODE (if any). */
253 void
254 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
255 symtab_node *node)
257 int index = lto_symtab_encoder_encode (encoder, node);
258 encoder->nodes[index].in_partition = true;
261 /* Output the cgraph EDGE to OB using ENCODER. */
263 static void
264 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
265 lto_symtab_encoder_t encoder)
267 unsigned int uid;
268 intptr_t ref;
269 struct bitpack_d bp;
271 if (edge->indirect_unknown_callee)
272 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
273 LTO_symtab_indirect_edge);
274 else
275 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
276 LTO_symtab_edge);
278 ref = lto_symtab_encoder_lookup (encoder, edge->caller);
279 gcc_assert (ref != LCC_NOT_FOUND);
280 streamer_write_hwi_stream (ob->main_stream, ref);
282 if (!edge->indirect_unknown_callee)
284 ref = lto_symtab_encoder_lookup (encoder, edge->callee);
285 gcc_assert (ref != LCC_NOT_FOUND);
286 streamer_write_hwi_stream (ob->main_stream, ref);
289 streamer_write_gcov_count_stream (ob->main_stream, edge->count);
291 bp = bitpack_create (ob->main_stream);
292 uid = (!gimple_has_body_p (edge->caller->decl)
293 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt) + 1);
294 bp_pack_enum (&bp, cgraph_inline_failed_t,
295 CIF_N_REASONS, edge->inline_failed);
296 bp_pack_var_len_unsigned (&bp, uid);
297 bp_pack_var_len_unsigned (&bp, edge->frequency);
298 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
299 bp_pack_value (&bp, edge->speculative, 1);
300 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
301 bp_pack_value (&bp, edge->can_throw_external, 1);
302 bp_pack_value (&bp, edge->in_polymorphic_cdtor, 1);
303 if (edge->indirect_unknown_callee)
305 int flags = edge->indirect_info->ecf_flags;
306 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
307 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
308 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
309 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
310 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
311 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
312 /* Flags that should not appear on indirect calls. */
313 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
314 | ECF_MAY_BE_ALLOCA
315 | ECF_SIBCALL
316 | ECF_LEAF
317 | ECF_NOVOPS)));
319 streamer_write_bitpack (&bp);
320 if (edge->indirect_unknown_callee)
322 streamer_write_hwi_stream (ob->main_stream,
323 edge->indirect_info->common_target_id);
324 if (edge->indirect_info->common_target_id)
325 streamer_write_hwi_stream
326 (ob->main_stream, edge->indirect_info->common_target_probability);
330 /* Return if NODE contain references from other partitions. */
332 bool
333 referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
335 int i;
336 struct ipa_ref *ref = NULL;
338 for (i = 0; node->iterate_referring (i, ref); i++)
340 /* Ignore references from non-offloadable nodes while streaming NODE into
341 offload LTO section. */
342 if (!ref->referring->need_lto_streaming)
343 continue;
345 if (ref->referring->in_other_partition
346 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
347 return true;
349 return false;
352 /* Return true when node is reachable from other partition. */
354 bool
355 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
357 struct cgraph_edge *e;
358 if (!node->definition)
359 return false;
360 if (node->global.inlined_to)
361 return false;
362 for (e = node->callers; e; e = e->next_caller)
364 /* Ignore references from non-offloadable nodes while streaming NODE into
365 offload LTO section. */
366 if (!e->caller->need_lto_streaming)
367 continue;
369 if (e->caller->in_other_partition
370 || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
371 return true;
373 return false;
376 /* Return if NODE contain references from other partitions. */
378 bool
379 referenced_from_this_partition_p (symtab_node *node,
380 lto_symtab_encoder_t encoder)
382 int i;
383 struct ipa_ref *ref = NULL;
385 for (i = 0; node->iterate_referring (i, ref); i++)
386 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
387 return true;
388 return false;
391 /* Return true when node is reachable from other partition. */
393 bool
394 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
396 struct cgraph_edge *e;
397 for (e = node->callers; e; e = e->next_caller)
398 if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
399 return true;
400 return false;
403 /* Output the cgraph NODE to OB. ENCODER is used to find the
404 reference number of NODE->inlined_to. SET is the set of nodes we
405 are writing to the current file. If NODE is not in SET, then NODE
406 is a boundary of a cgraph_node_set and we pretend NODE just has a
407 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
408 that have had their callgraph node written so far. This is used to
409 determine if NODE is a clone of a previously written node. */
411 static void
412 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
413 lto_symtab_encoder_t encoder)
415 unsigned int tag;
416 struct bitpack_d bp;
417 bool boundary_p;
418 intptr_t ref;
419 bool in_other_partition = false;
420 struct cgraph_node *clone_of, *ultimate_clone_of;
421 ipa_opt_pass_d *pass;
422 int i;
423 bool alias_p;
424 const char *comdat;
425 const char *section;
426 tree group;
428 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
430 if (node->analyzed && !boundary_p)
431 tag = LTO_symtab_analyzed_node;
432 else
433 tag = LTO_symtab_unavail_node;
435 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
436 tag);
437 streamer_write_hwi_stream (ob->main_stream, node->order);
439 /* In WPA mode, we only output part of the call-graph. Also, we
440 fake cgraph node attributes. There are two cases that we care.
442 Boundary nodes: There are nodes that are not part of SET but are
443 called from within SET. We artificially make them look like
444 externally visible nodes with no function body.
446 Cherry-picked nodes: These are nodes we pulled from other
447 translation units into SET during IPA-inlining. We make them as
448 local static nodes to prevent clashes with other local statics. */
449 if (boundary_p && node->analyzed
450 && node->get_partitioning_class () == SYMBOL_PARTITION)
452 /* Inline clones can not be part of boundary.
453 gcc_assert (!node->global.inlined_to);
455 FIXME: At the moment they can be, when partition contains an inline
456 clone that is clone of inline clone from outside partition. We can
457 reshape the clone tree and make other tree to be the root, but it
458 needs a bit extra work and will be promplty done by cgraph_remove_node
459 after reading back. */
460 in_other_partition = 1;
463 clone_of = node->clone_of;
464 while (clone_of
465 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
466 if (clone_of->prev_sibling_clone)
467 clone_of = clone_of->prev_sibling_clone;
468 else
469 clone_of = clone_of->clone_of;
471 /* See if body of the master function is output. If not, we are seeing only
472 an declaration and we do not need to pass down clone tree. */
473 ultimate_clone_of = clone_of;
474 while (ultimate_clone_of && ultimate_clone_of->clone_of)
475 ultimate_clone_of = ultimate_clone_of->clone_of;
477 if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
478 clone_of = NULL;
480 if (tag == LTO_symtab_analyzed_node)
481 gcc_assert (clone_of || !node->clone_of);
482 if (!clone_of)
483 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
484 else
485 streamer_write_hwi_stream (ob->main_stream, ref);
488 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
489 streamer_write_gcov_count_stream (ob->main_stream, node->count);
490 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
492 streamer_write_hwi_stream (ob->main_stream,
493 node->ipa_transforms_to_apply.length ());
494 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
495 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
497 if (tag == LTO_symtab_analyzed_node)
499 if (node->global.inlined_to)
501 ref = lto_symtab_encoder_lookup (encoder, node->global.inlined_to);
502 gcc_assert (ref != LCC_NOT_FOUND);
504 else
505 ref = LCC_NOT_FOUND;
507 streamer_write_hwi_stream (ob->main_stream, ref);
510 group = node->get_comdat_group ();
511 if (group)
512 comdat = IDENTIFIER_POINTER (group);
513 else
514 comdat = "";
515 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
517 if (group)
519 if (node->same_comdat_group && !boundary_p)
521 ref = lto_symtab_encoder_lookup (encoder,
522 node->same_comdat_group);
523 gcc_assert (ref != LCC_NOT_FOUND);
525 else
526 ref = LCC_NOT_FOUND;
527 streamer_write_hwi_stream (ob->main_stream, ref);
530 section = node->get_section ();
531 if (!section)
532 section = "";
534 streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
536 bp = bitpack_create (ob->main_stream);
537 bp_pack_value (&bp, node->local.local, 1);
538 bp_pack_value (&bp, node->externally_visible, 1);
539 bp_pack_value (&bp, node->no_reorder, 1);
540 bp_pack_value (&bp, node->definition, 1);
541 bp_pack_value (&bp, node->local.versionable, 1);
542 bp_pack_value (&bp, node->local.can_change_signature, 1);
543 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
544 bp_pack_value (&bp, node->force_output, 1);
545 bp_pack_value (&bp, node->forced_by_abi, 1);
546 bp_pack_value (&bp, node->unique_name, 1);
547 bp_pack_value (&bp, node->body_removed, 1);
548 bp_pack_value (&bp, node->implicit_section, 1);
549 bp_pack_value (&bp, node->address_taken, 1);
550 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
551 && node->get_partitioning_class () == SYMBOL_PARTITION
552 && (reachable_from_other_partition_p (node, encoder)
553 || referenced_from_other_partition_p (node, encoder)), 1);
554 bp_pack_value (&bp, node->lowered, 1);
555 bp_pack_value (&bp, in_other_partition, 1);
556 /* Real aliases in a boundary become non-aliases. However we still stream
557 alias info on weakrefs.
558 TODO: We lose a bit of information here - when we know that variable is
559 defined in other unit, we may use the info on aliases to resolve
560 symbol1 != symbol2 type tests that we can do only for locally defined objects
561 otherwise. */
562 alias_p = node->alias && (!boundary_p || node->weakref);
563 bp_pack_value (&bp, alias_p, 1);
564 bp_pack_value (&bp, node->weakref, 1);
565 bp_pack_value (&bp, node->frequency, 2);
566 bp_pack_value (&bp, node->only_called_at_startup, 1);
567 bp_pack_value (&bp, node->only_called_at_exit, 1);
568 bp_pack_value (&bp, node->tm_clone, 1);
569 bp_pack_value (&bp, node->calls_comdat_local, 1);
570 bp_pack_value (&bp, node->icf_merged, 1);
571 bp_pack_value (&bp, node->nonfreeing_fn, 1);
572 bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
573 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
574 LDPR_NUM_KNOWN, node->resolution);
575 bp_pack_value (&bp, node->instrumentation_clone, 1);
576 streamer_write_bitpack (&bp);
577 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
579 if (node->thunk.thunk_p && !boundary_p)
581 streamer_write_uhwi_stream
582 (ob->main_stream,
583 1 + (node->thunk.this_adjusting != 0) * 2
584 + (node->thunk.virtual_offset_p != 0) * 4
585 + (node->thunk.add_pointer_bounds_args != 0) * 8);
586 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
587 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
589 streamer_write_hwi_stream (ob->main_stream, node->profile_id);
590 if (DECL_STATIC_CONSTRUCTOR (node->decl))
591 streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
592 if (DECL_STATIC_DESTRUCTOR (node->decl))
593 streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
595 if (node->instrumentation_clone)
596 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->orig_decl);
599 /* Output the varpool NODE to OB.
600 If NODE is not in SET, then NODE is a boundary. */
602 static void
603 lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
604 lto_symtab_encoder_t encoder)
606 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
607 struct bitpack_d bp;
608 int ref;
609 bool alias_p;
610 const char *comdat;
611 const char *section;
612 tree group;
614 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
615 LTO_symtab_variable);
616 streamer_write_hwi_stream (ob->main_stream, node->order);
617 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
618 bp = bitpack_create (ob->main_stream);
619 bp_pack_value (&bp, node->externally_visible, 1);
620 bp_pack_value (&bp, node->no_reorder, 1);
621 bp_pack_value (&bp, node->force_output, 1);
622 bp_pack_value (&bp, node->forced_by_abi, 1);
623 bp_pack_value (&bp, node->unique_name, 1);
624 bp_pack_value (&bp, node->body_removed
625 || !lto_symtab_encoder_encode_initializer_p (encoder, node), 1);
626 bp_pack_value (&bp, node->implicit_section, 1);
627 bp_pack_value (&bp, node->writeonly, 1);
628 bp_pack_value (&bp, node->definition, 1);
629 alias_p = node->alias && (!boundary_p || node->weakref);
630 bp_pack_value (&bp, alias_p, 1);
631 bp_pack_value (&bp, node->weakref, 1);
632 bp_pack_value (&bp, node->analyzed && !boundary_p, 1);
633 gcc_assert (node->definition || !node->analyzed);
634 /* Constant pool initializers can be de-unified into individual ltrans units.
635 FIXME: Alternatively at -Os we may want to avoid generating for them the local
636 labels and share them across LTRANS partitions. */
637 if (node->get_partitioning_class () != SYMBOL_PARTITION)
639 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
640 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
642 else
644 bp_pack_value (&bp, node->definition
645 && referenced_from_other_partition_p (node, encoder), 1);
646 bp_pack_value (&bp, node->analyzed
647 && boundary_p && !DECL_EXTERNAL (node->decl), 1);
648 /* in_other_partition. */
650 bp_pack_value (&bp, node->tls_model, 3);
651 bp_pack_value (&bp, node->used_by_single_function, 1);
652 bp_pack_value (&bp, node->need_bounds_init, 1);
653 streamer_write_bitpack (&bp);
655 group = node->get_comdat_group ();
656 if (group)
657 comdat = IDENTIFIER_POINTER (group);
658 else
659 comdat = "";
660 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
662 if (group)
664 if (node->same_comdat_group && !boundary_p)
666 ref = lto_symtab_encoder_lookup (encoder,
667 node->same_comdat_group);
668 gcc_assert (ref != LCC_NOT_FOUND);
670 else
671 ref = LCC_NOT_FOUND;
672 streamer_write_hwi_stream (ob->main_stream, ref);
675 section = node->get_section ();
676 if (!section)
677 section = "";
678 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
680 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
681 LDPR_NUM_KNOWN, node->resolution);
684 /* Output the varpool NODE to OB.
685 If NODE is not in SET, then NODE is a boundary. */
687 static void
688 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
689 lto_symtab_encoder_t encoder)
691 struct bitpack_d bp;
692 int nref;
693 int uid = ref->lto_stmt_uid;
694 struct cgraph_node *node;
696 bp = bitpack_create (ob->main_stream);
697 bp_pack_value (&bp, ref->use, 3);
698 bp_pack_value (&bp, ref->speculative, 1);
699 streamer_write_bitpack (&bp);
700 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
701 gcc_assert (nref != LCC_NOT_FOUND);
702 streamer_write_hwi_stream (ob->main_stream, nref);
704 node = dyn_cast <cgraph_node *> (ref->referring);
705 if (node)
707 if (ref->stmt)
708 uid = gimple_uid (ref->stmt) + 1;
709 streamer_write_hwi_stream (ob->main_stream, uid);
713 /* Stream out profile_summary to OB. */
715 static void
716 output_profile_summary (struct lto_simple_output_block *ob)
718 unsigned h_ix;
719 struct bitpack_d bp;
721 if (profile_info)
723 /* We do not output num and run_max, they are not used by
724 GCC profile feedback and they are difficult to merge from multiple
725 units. */
726 gcc_assert (profile_info->runs);
727 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
728 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_max);
730 /* sum_all is needed for computing the working set with the
731 histogram. */
732 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_all);
734 /* Create and output a bitpack of non-zero histogram entries indices. */
735 bp = bitpack_create (ob->main_stream);
736 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
737 bp_pack_value (&bp, profile_info->histogram[h_ix].num_counters > 0, 1);
738 streamer_write_bitpack (&bp);
739 /* Now stream out only those non-zero entries. */
740 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
742 if (!profile_info->histogram[h_ix].num_counters)
743 continue;
744 streamer_write_gcov_count_stream (ob->main_stream,
745 profile_info->histogram[h_ix].num_counters);
746 streamer_write_gcov_count_stream (ob->main_stream,
747 profile_info->histogram[h_ix].min_value);
748 streamer_write_gcov_count_stream (ob->main_stream,
749 profile_info->histogram[h_ix].cum_value);
751 /* IPA-profile computes hot bb threshold based on cumulated
752 whole program profile. We need to stream it down to ltrans. */
753 if (flag_wpa)
754 streamer_write_gcov_count_stream (ob->main_stream,
755 get_hot_bb_threshold ());
757 else
758 streamer_write_uhwi_stream (ob->main_stream, 0);
761 /* Output all callees or indirect outgoing edges. EDGE must be the first such
762 edge. */
764 static void
765 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
766 struct lto_simple_output_block *ob,
767 lto_symtab_encoder_t encoder)
769 if (!edge)
770 return;
772 /* Output edges in backward direction, so the reconstructed callgraph match
773 and it is easy to associate call sites in the IPA pass summaries. */
774 while (edge->next_callee)
775 edge = edge->next_callee;
776 for (; edge; edge = edge->prev_callee)
777 lto_output_edge (ob, edge, encoder);
780 /* Output the part of the cgraph in SET. */
782 static void
783 output_refs (lto_symtab_encoder_t encoder)
785 lto_symtab_encoder_iterator lsei;
786 struct lto_simple_output_block *ob;
787 int count;
788 struct ipa_ref *ref;
789 int i;
791 ob = lto_create_simple_output_block (LTO_section_refs);
793 for (lsei = lsei_start_in_partition (encoder); !lsei_end_p (lsei);
794 lsei_next_in_partition (&lsei))
796 symtab_node *node = lsei_node (lsei);
798 count = node->ref_list.nreferences ();
799 if (count)
801 streamer_write_gcov_count_stream (ob->main_stream, count);
802 streamer_write_uhwi_stream (ob->main_stream,
803 lto_symtab_encoder_lookup (encoder, node));
804 for (i = 0; node->iterate_reference (i, ref); i++)
805 lto_output_ref (ob, ref, encoder);
809 streamer_write_uhwi_stream (ob->main_stream, 0);
811 lto_destroy_simple_output_block (ob);
814 /* Add NODE into encoder as well as nodes it is cloned from.
815 Do it in a way so clones appear first. */
817 static void
818 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
819 bool include_body)
821 if (node->clone_of)
822 add_node_to (encoder, node->clone_of, include_body);
823 else if (include_body)
824 lto_set_symtab_encoder_encode_body (encoder, node);
825 lto_symtab_encoder_encode (encoder, node);
828 /* Add all references in NODE to encoders. */
830 static void
831 create_references (lto_symtab_encoder_t encoder, symtab_node *node)
833 int i;
834 struct ipa_ref *ref = NULL;
835 for (i = 0; node->iterate_reference (i, ref); i++)
836 if (is_a <cgraph_node *> (ref->referred))
837 add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
838 else
839 lto_symtab_encoder_encode (encoder, ref->referred);
842 /* Select what needs to be streamed out. In regular lto mode stream everything.
843 In offload lto mode stream only nodes marked as offloadable. */
844 void
845 select_what_to_stream (bool offload_lto_mode)
847 struct symtab_node *snode;
848 FOR_EACH_SYMBOL (snode)
849 snode->need_lto_streaming = !offload_lto_mode || snode->offloadable;
852 /* Find all symbols we want to stream into given partition and insert them
853 to encoders.
855 The function actually replaces IN_ENCODER by new one. The reason is that
856 streaming code needs clone's origin to be streamed before clone. This
857 means that we need to insert the nodes in specific order. This order is
858 ignored by the partitioning logic earlier. */
860 lto_symtab_encoder_t
861 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
863 struct cgraph_edge *edge;
864 int i;
865 lto_symtab_encoder_t encoder;
866 lto_symtab_encoder_iterator lsei;
867 hash_set<void *> reachable_call_targets;
869 encoder = lto_symtab_encoder_new (false);
871 /* Go over all entries in the IN_ENCODER and duplicate them to
872 ENCODER. At the same time insert masters of clones so
873 every master appears before clone. */
874 for (lsei = lsei_start_function_in_partition (in_encoder);
875 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
877 struct cgraph_node *node = lsei_cgraph_node (lsei);
878 if (!node->need_lto_streaming)
879 continue;
880 add_node_to (encoder, node, true);
881 lto_set_symtab_encoder_in_partition (encoder, node);
882 create_references (encoder, node);
883 /* For proper debug info, we need to ship the origins, too. */
884 if (DECL_ABSTRACT_ORIGIN (node->decl))
886 struct cgraph_node *origin_node
887 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (node->decl));
888 origin_node->used_as_abstract_origin = true;
889 add_node_to (encoder, origin_node, true);
892 for (lsei = lsei_start_variable_in_partition (in_encoder);
893 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
895 varpool_node *vnode = lsei_varpool_node (lsei);
897 if (!vnode->need_lto_streaming)
898 continue;
899 lto_set_symtab_encoder_in_partition (encoder, vnode);
900 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
901 create_references (encoder, vnode);
902 /* For proper debug info, we need to ship the origins, too. */
903 if (DECL_ABSTRACT_ORIGIN (vnode->decl))
905 varpool_node *origin_node
906 = varpool_node::get (DECL_ABSTRACT_ORIGIN (vnode->decl));
907 lto_set_symtab_encoder_in_partition (encoder, origin_node);
910 /* Pickle in also the initializer of all referenced readonly variables
911 to help folding. Constant pool variables are not shared, so we must
912 pickle those too. */
913 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
915 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
916 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
918 if (!lto_symtab_encoder_encode_initializer_p (encoder,
919 vnode)
920 && (((vnode->ctor_useable_for_folding_p ()
921 && (!DECL_VIRTUAL_P (vnode->decl)
922 || !flag_wpa
923 || flag_ltrans_devirtualize))
924 || POINTER_BOUNDS_P (vnode->decl))))
926 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
927 create_references (encoder, vnode);
932 /* Go over all the nodes again to include callees that are not in
933 SET. */
934 for (lsei = lsei_start_function_in_partition (encoder);
935 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
937 struct cgraph_node *node = lsei_cgraph_node (lsei);
938 for (edge = node->callees; edge; edge = edge->next_callee)
940 struct cgraph_node *callee = edge->callee;
941 if (!lto_symtab_encoder_in_partition_p (encoder, callee))
943 /* We should have moved all the inlines. */
944 gcc_assert (!callee->global.inlined_to);
945 add_node_to (encoder, callee, false);
948 /* Add all possible targets for late devirtualization. */
949 if (flag_ltrans_devirtualize || !flag_wpa)
950 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
951 if (edge->indirect_info->polymorphic)
953 unsigned int i;
954 void *cache_token;
955 bool final;
956 vec <cgraph_node *>targets
957 = possible_polymorphic_call_targets
958 (edge, &final, &cache_token);
959 if (!reachable_call_targets.add (cache_token))
961 for (i = 0; i < targets.length (); i++)
963 struct cgraph_node *callee = targets[i];
965 /* Adding an external declarations into the unit serves
966 no purpose and just increases its boundary. */
967 if (callee->definition
968 && !lto_symtab_encoder_in_partition_p
969 (encoder, callee))
971 gcc_assert (!callee->global.inlined_to);
972 add_node_to (encoder, callee, false);
978 lto_symtab_encoder_delete (in_encoder);
979 return encoder;
982 /* Output the part of the symtab in SET and VSET. */
984 void
985 output_symtab (void)
987 struct cgraph_node *node;
988 struct lto_simple_output_block *ob;
989 lto_symtab_encoder_iterator lsei;
990 int i, n_nodes;
991 lto_symtab_encoder_t encoder;
993 if (flag_wpa)
994 output_cgraph_opt_summary ();
996 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
998 output_profile_summary (ob);
1000 /* An encoder for cgraph nodes should have been created by
1001 ipa_write_summaries_1. */
1002 gcc_assert (ob->decl_state->symtab_node_encoder);
1003 encoder = ob->decl_state->symtab_node_encoder;
1005 /* Write out the nodes. We must first output a node and then its clones,
1006 otherwise at a time reading back the node there would be nothing to clone
1007 from. */
1008 n_nodes = lto_symtab_encoder_size (encoder);
1009 for (i = 0; i < n_nodes; i++)
1011 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1012 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1013 lto_output_node (ob, cnode, encoder);
1014 else
1015 lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
1018 /* Go over the nodes in SET again to write edges. */
1019 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1020 lsei_next_function_in_partition (&lsei))
1022 node = lsei_cgraph_node (lsei);
1023 output_outgoing_cgraph_edges (node->callees, ob, encoder);
1024 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
1027 streamer_write_uhwi_stream (ob->main_stream, 0);
1029 lto_destroy_simple_output_block (ob);
1031 /* Emit toplevel asms.
1032 When doing WPA we must output every asm just once. Since we do not partition asm
1033 nodes at all, output them to first output. This is kind of hack, but should work
1034 well. */
1035 if (!asm_nodes_output)
1037 asm_nodes_output = true;
1038 lto_output_toplevel_asms ();
1041 output_refs (encoder);
1044 /* Return identifier encoded in IB as a plain string. */
1046 static tree
1047 read_identifier (struct lto_input_block *ib)
1049 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1050 tree id;
1052 if (ib->data[ib->p + len])
1053 lto_section_overrun (ib);
1054 if (!len)
1056 ib->p++;
1057 return NULL;
1059 id = get_identifier (ib->data + ib->p);
1060 ib->p += len + 1;
1061 return id;
1064 /* Return string encoded in IB, NULL if string is empty. */
1066 static const char *
1067 read_string (struct lto_input_block *ib)
1069 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1070 const char *str;
1072 if (ib->data[ib->p + len])
1073 lto_section_overrun (ib);
1074 if (!len)
1076 ib->p++;
1077 return NULL;
1079 str = ib->data + ib->p;
1080 ib->p += len + 1;
1081 return str;
1084 /* Output function/variable tables that will allow libgomp to look up offload
1085 target code.
1086 OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is filled in
1087 varpool_node::get_create. In WHOPR (partitioned) mode during the WPA stage
1088 both OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables. */
1090 void
1091 output_offload_tables (void)
1093 if (vec_safe_is_empty (offload_funcs) && vec_safe_is_empty (offload_vars))
1094 return;
1096 struct lto_simple_output_block *ob
1097 = lto_create_simple_output_block (LTO_section_offload_table);
1099 for (unsigned i = 0; i < vec_safe_length (offload_funcs); i++)
1101 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1102 LTO_symtab_last_tag, LTO_symtab_unavail_node);
1103 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
1104 (*offload_funcs)[i]);
1107 for (unsigned i = 0; i < vec_safe_length (offload_vars); i++)
1109 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1110 LTO_symtab_last_tag, LTO_symtab_variable);
1111 lto_output_var_decl_index (ob->decl_state, ob->main_stream,
1112 (*offload_vars)[i]);
1115 streamer_write_uhwi_stream (ob->main_stream, 0);
1116 lto_destroy_simple_output_block (ob);
1118 /* In WHOPR mode during the WPA stage the joint offload tables need to be
1119 streamed to one partition only. That's why we free offload_funcs and
1120 offload_vars after the first call of output_offload_tables. */
1121 if (flag_wpa)
1123 vec_free (offload_funcs);
1124 vec_free (offload_vars);
1128 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1129 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
1130 NODE or to replace the values in it, for instance because the first
1131 time we saw it, the function body was not available but now it
1132 is. BP is a bitpack with all the bitflags for NODE read from the
1133 stream. */
1135 static void
1136 input_overwrite_node (struct lto_file_decl_data *file_data,
1137 struct cgraph_node *node,
1138 enum LTO_symtab_tags tag,
1139 struct bitpack_d *bp)
1141 node->aux = (void *) tag;
1142 node->lto_file_data = file_data;
1144 node->local.local = bp_unpack_value (bp, 1);
1145 node->externally_visible = bp_unpack_value (bp, 1);
1146 node->no_reorder = bp_unpack_value (bp, 1);
1147 node->definition = bp_unpack_value (bp, 1);
1148 node->local.versionable = bp_unpack_value (bp, 1);
1149 node->local.can_change_signature = bp_unpack_value (bp, 1);
1150 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
1151 node->force_output = bp_unpack_value (bp, 1);
1152 node->forced_by_abi = bp_unpack_value (bp, 1);
1153 node->unique_name = bp_unpack_value (bp, 1);
1154 node->body_removed = bp_unpack_value (bp, 1);
1155 node->implicit_section = bp_unpack_value (bp, 1);
1156 node->address_taken = bp_unpack_value (bp, 1);
1157 node->used_from_other_partition = bp_unpack_value (bp, 1);
1158 node->lowered = bp_unpack_value (bp, 1);
1159 node->analyzed = tag == LTO_symtab_analyzed_node;
1160 node->in_other_partition = bp_unpack_value (bp, 1);
1161 if (node->in_other_partition
1162 /* Avoid updating decl when we are seeing just inline clone.
1163 When inlining function that has functions already inlined into it,
1164 we produce clones of inline clones.
1166 WPA partitioning might put each clone into different unit and
1167 we might end up streaming inline clone from other partition
1168 to support clone we are interested in. */
1169 && (!node->clone_of
1170 || node->clone_of->decl != node->decl))
1172 DECL_EXTERNAL (node->decl) = 1;
1173 TREE_STATIC (node->decl) = 0;
1175 node->alias = bp_unpack_value (bp, 1);
1176 node->weakref = bp_unpack_value (bp, 1);
1177 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1178 node->only_called_at_startup = bp_unpack_value (bp, 1);
1179 node->only_called_at_exit = bp_unpack_value (bp, 1);
1180 node->tm_clone = bp_unpack_value (bp, 1);
1181 node->calls_comdat_local = bp_unpack_value (bp, 1);
1182 node->icf_merged = bp_unpack_value (bp, 1);
1183 node->nonfreeing_fn = bp_unpack_value (bp, 1);
1184 node->thunk.thunk_p = bp_unpack_value (bp, 1);
1185 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1186 LDPR_NUM_KNOWN);
1187 node->instrumentation_clone = bp_unpack_value (bp, 1);
1188 gcc_assert (flag_ltrans
1189 || (!node->in_other_partition
1190 && !node->used_from_other_partition));
1193 /* Return string alias is alias of. */
1195 static tree
1196 get_alias_symbol (tree decl)
1198 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1199 return get_identifier (TREE_STRING_POINTER
1200 (TREE_VALUE (TREE_VALUE (alias))));
1203 /* Read a node from input_block IB. TAG is the node's tag just read.
1204 Return the node read or overwriten. */
1206 static struct cgraph_node *
1207 input_node (struct lto_file_decl_data *file_data,
1208 struct lto_input_block *ib,
1209 enum LTO_symtab_tags tag,
1210 vec<symtab_node *> nodes)
1212 gcc::pass_manager *passes = g->get_passes ();
1213 tree fn_decl;
1214 struct cgraph_node *node;
1215 struct bitpack_d bp;
1216 unsigned decl_index;
1217 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1218 int clone_ref;
1219 int order;
1220 int i, count;
1221 tree group;
1222 const char *section;
1223 order = streamer_read_hwi (ib) + order_base;
1224 clone_ref = streamer_read_hwi (ib);
1226 decl_index = streamer_read_uhwi (ib);
1227 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1229 if (clone_ref != LCC_NOT_FOUND)
1231 node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1232 0, CGRAPH_FREQ_BASE, false,
1233 vNULL, false, NULL, NULL);
1235 else
1237 /* Declaration of functions can be already merged with a declaration
1238 from other input file. We keep cgraph unmerged until after streaming
1239 of ipa passes is done. Alays forcingly create a fresh node. */
1240 node = symtab->create_empty ();
1241 node->decl = fn_decl;
1242 node->register_symbol ();
1245 node->order = order;
1246 if (order >= symtab->order)
1247 symtab->order = order + 1;
1249 node->count = streamer_read_gcov_count (ib);
1250 node->count_materialization_scale = streamer_read_hwi (ib);
1252 count = streamer_read_hwi (ib);
1253 node->ipa_transforms_to_apply = vNULL;
1254 for (i = 0; i < count; i++)
1256 opt_pass *pass;
1257 int pid = streamer_read_hwi (ib);
1259 gcc_assert (pid < passes->passes_by_id_size);
1260 pass = passes->passes_by_id[pid];
1261 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1264 if (tag == LTO_symtab_analyzed_node)
1265 ref = streamer_read_hwi (ib);
1267 group = read_identifier (ib);
1268 if (group)
1269 ref2 = streamer_read_hwi (ib);
1271 /* Make sure that we have not read this node before. Nodes that
1272 have already been read will have their tag stored in the 'aux'
1273 field. Since built-in functions can be referenced in multiple
1274 functions, they are expected to be read more than once. */
1275 if (node->aux && !DECL_BUILT_IN (node->decl))
1276 internal_error ("bytecode stream: found multiple instances of cgraph "
1277 "node with uid %d", node->uid);
1279 node->tp_first_run = streamer_read_uhwi (ib);
1281 bp = streamer_read_bitpack (ib);
1283 input_overwrite_node (file_data, node, tag, &bp);
1285 /* Store a reference for now, and fix up later to be a pointer. */
1286 node->global.inlined_to = (cgraph_node *) (intptr_t) ref;
1288 if (group)
1290 node->set_comdat_group (group);
1291 /* Store a reference for now, and fix up later to be a pointer. */
1292 node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1294 else
1295 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1296 section = read_string (ib);
1297 if (section)
1298 node->set_section_for_node (section);
1300 if (node->thunk.thunk_p)
1302 int type = streamer_read_uhwi (ib);
1303 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1304 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1306 node->thunk.fixed_offset = fixed_offset;
1307 node->thunk.this_adjusting = (type & 2);
1308 node->thunk.virtual_value = virtual_value;
1309 node->thunk.virtual_offset_p = (type & 4);
1310 node->thunk.add_pointer_bounds_args = (type & 8);
1312 if (node->alias && !node->analyzed && node->weakref)
1313 node->alias_target = get_alias_symbol (node->decl);
1314 node->profile_id = streamer_read_hwi (ib);
1315 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1316 node->set_init_priority (streamer_read_hwi (ib));
1317 if (DECL_STATIC_DESTRUCTOR (node->decl))
1318 node->set_fini_priority (streamer_read_hwi (ib));
1320 if (node->instrumentation_clone)
1322 decl_index = streamer_read_uhwi (ib);
1323 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1324 node->orig_decl = fn_decl;
1327 return node;
1330 /* Read a node from input_block IB. TAG is the node's tag just read.
1331 Return the node read or overwriten. */
1333 static varpool_node *
1334 input_varpool_node (struct lto_file_decl_data *file_data,
1335 struct lto_input_block *ib)
1337 int decl_index;
1338 tree var_decl;
1339 varpool_node *node;
1340 struct bitpack_d bp;
1341 int ref = LCC_NOT_FOUND;
1342 int order;
1343 tree group;
1344 const char *section;
1346 order = streamer_read_hwi (ib) + order_base;
1347 decl_index = streamer_read_uhwi (ib);
1348 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1350 /* Declaration of functions can be already merged with a declaration
1351 from other input file. We keep cgraph unmerged until after streaming
1352 of ipa passes is done. Alays forcingly create a fresh node. */
1353 node = varpool_node::create_empty ();
1354 node->decl = var_decl;
1355 node->register_symbol ();
1357 node->order = order;
1358 if (order >= symtab->order)
1359 symtab->order = order + 1;
1360 node->lto_file_data = file_data;
1362 bp = streamer_read_bitpack (ib);
1363 node->externally_visible = bp_unpack_value (&bp, 1);
1364 node->no_reorder = bp_unpack_value (&bp, 1);
1365 node->force_output = bp_unpack_value (&bp, 1);
1366 node->forced_by_abi = bp_unpack_value (&bp, 1);
1367 node->unique_name = bp_unpack_value (&bp, 1);
1368 node->body_removed = bp_unpack_value (&bp, 1);
1369 node->implicit_section = bp_unpack_value (&bp, 1);
1370 node->writeonly = bp_unpack_value (&bp, 1);
1371 node->definition = bp_unpack_value (&bp, 1);
1372 node->alias = bp_unpack_value (&bp, 1);
1373 node->weakref = bp_unpack_value (&bp, 1);
1374 node->analyzed = bp_unpack_value (&bp, 1);
1375 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1376 node->in_other_partition = bp_unpack_value (&bp, 1);
1377 if (node->in_other_partition)
1379 DECL_EXTERNAL (node->decl) = 1;
1380 TREE_STATIC (node->decl) = 0;
1382 if (node->alias && !node->analyzed && node->weakref)
1383 node->alias_target = get_alias_symbol (node->decl);
1384 node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1385 node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1386 node->need_bounds_init = bp_unpack_value (&bp, 1);
1387 group = read_identifier (ib);
1388 if (group)
1390 node->set_comdat_group (group);
1391 ref = streamer_read_hwi (ib);
1392 /* Store a reference for now, and fix up later to be a pointer. */
1393 node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1395 else
1396 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1397 section = read_string (ib);
1398 if (section)
1399 node->set_section_for_node (section);
1400 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1401 LDPR_NUM_KNOWN);
1402 gcc_assert (flag_ltrans
1403 || (!node->in_other_partition
1404 && !node->used_from_other_partition));
1406 return node;
1409 /* Read a node from input_block IB. TAG is the node's tag just read.
1410 Return the node read or overwriten. */
1412 static void
1413 input_ref (struct lto_input_block *ib,
1414 symtab_node *referring_node,
1415 vec<symtab_node *> nodes)
1417 symtab_node *node = NULL;
1418 struct bitpack_d bp;
1419 enum ipa_ref_use use;
1420 bool speculative;
1421 struct ipa_ref *ref;
1423 bp = streamer_read_bitpack (ib);
1424 use = (enum ipa_ref_use) bp_unpack_value (&bp, 3);
1425 speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1426 node = nodes[streamer_read_hwi (ib)];
1427 ref = referring_node->create_reference (node, use);
1428 ref->speculative = speculative;
1429 if (is_a <cgraph_node *> (referring_node))
1430 ref->lto_stmt_uid = streamer_read_hwi (ib);
1433 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1434 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1435 edge being read is indirect (in the sense that it has
1436 indirect_unknown_callee set). */
1438 static void
1439 input_edge (struct lto_input_block *ib, vec<symtab_node *> nodes,
1440 bool indirect)
1442 struct cgraph_node *caller, *callee;
1443 struct cgraph_edge *edge;
1444 unsigned int stmt_id;
1445 gcov_type count;
1446 int freq;
1447 cgraph_inline_failed_t inline_failed;
1448 struct bitpack_d bp;
1449 int ecf_flags = 0;
1451 caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1452 if (caller == NULL || caller->decl == NULL_TREE)
1453 internal_error ("bytecode stream: no caller found while reading edge");
1455 if (!indirect)
1457 callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1458 if (callee == NULL || callee->decl == NULL_TREE)
1459 internal_error ("bytecode stream: no callee found while reading edge");
1461 else
1462 callee = NULL;
1464 count = streamer_read_gcov_count (ib);
1466 bp = streamer_read_bitpack (ib);
1467 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1468 stmt_id = bp_unpack_var_len_unsigned (&bp);
1469 freq = (int) bp_unpack_var_len_unsigned (&bp);
1471 if (indirect)
1472 edge = caller->create_indirect_edge (NULL, 0, count, freq);
1473 else
1474 edge = caller->create_edge (callee, NULL, count, freq);
1476 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1477 edge->speculative = bp_unpack_value (&bp, 1);
1478 edge->lto_stmt_uid = stmt_id;
1479 edge->inline_failed = inline_failed;
1480 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1481 edge->can_throw_external = bp_unpack_value (&bp, 1);
1482 edge->in_polymorphic_cdtor = bp_unpack_value (&bp, 1);
1483 if (indirect)
1485 if (bp_unpack_value (&bp, 1))
1486 ecf_flags |= ECF_CONST;
1487 if (bp_unpack_value (&bp, 1))
1488 ecf_flags |= ECF_PURE;
1489 if (bp_unpack_value (&bp, 1))
1490 ecf_flags |= ECF_NORETURN;
1491 if (bp_unpack_value (&bp, 1))
1492 ecf_flags |= ECF_MALLOC;
1493 if (bp_unpack_value (&bp, 1))
1494 ecf_flags |= ECF_NOTHROW;
1495 if (bp_unpack_value (&bp, 1))
1496 ecf_flags |= ECF_RETURNS_TWICE;
1497 edge->indirect_info->ecf_flags = ecf_flags;
1498 edge->indirect_info->common_target_id = streamer_read_hwi (ib);
1499 if (edge->indirect_info->common_target_id)
1500 edge->indirect_info->common_target_probability = streamer_read_hwi (ib);
1505 /* Read a cgraph from IB using the info in FILE_DATA. */
1507 static vec<symtab_node *>
1508 input_cgraph_1 (struct lto_file_decl_data *file_data,
1509 struct lto_input_block *ib)
1511 enum LTO_symtab_tags tag;
1512 vec<symtab_node *> nodes = vNULL;
1513 symtab_node *node;
1514 unsigned i;
1516 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1517 order_base = symtab->order;
1518 while (tag)
1520 if (tag == LTO_symtab_edge)
1521 input_edge (ib, nodes, false);
1522 else if (tag == LTO_symtab_indirect_edge)
1523 input_edge (ib, nodes, true);
1524 else if (tag == LTO_symtab_variable)
1526 node = input_varpool_node (file_data, ib);
1527 nodes.safe_push (node);
1528 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1530 else
1532 node = input_node (file_data, ib, tag, nodes);
1533 if (node == NULL || node->decl == NULL_TREE)
1534 internal_error ("bytecode stream: found empty cgraph node");
1535 nodes.safe_push (node);
1536 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1539 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1542 lto_input_toplevel_asms (file_data, order_base);
1544 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1545 #ifdef ENABLE_CHECKING
1546 FOR_EACH_VEC_ELT (nodes, i, node)
1547 gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1548 #endif
1549 FOR_EACH_VEC_ELT (nodes, i, node)
1551 int ref;
1552 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1554 ref = (int) (intptr_t) cnode->global.inlined_to;
1556 /* We share declaration of builtins, so we may read same node twice. */
1557 if (!node->aux)
1558 continue;
1559 node->aux = NULL;
1561 /* Fixup inlined_to from reference to pointer. */
1562 if (ref != LCC_NOT_FOUND)
1563 dyn_cast<cgraph_node *> (node)->global.inlined_to
1564 = dyn_cast<cgraph_node *> (nodes[ref]);
1565 else
1566 cnode->global.inlined_to = NULL;
1568 /* Compute instrumented_version. */
1569 if (cnode->instrumentation_clone)
1571 gcc_assert (cnode->orig_decl);
1573 cnode->instrumented_version = cgraph_node::get (cnode->orig_decl);
1574 if (cnode->instrumented_version)
1576 /* We may have multiple nodes for a single function which
1577 will be merged later. To have a proper merge we need
1578 to keep instrumentation_version reference between nodes
1579 consistent: each instrumented_version reference should
1580 have proper reverse reference. Thus don't break existing
1581 instrumented_version reference if it already exists. */
1582 if (cnode->instrumented_version->instrumented_version)
1583 cnode->instrumented_version = NULL;
1584 else
1585 cnode->instrumented_version->instrumented_version = cnode;
1588 /* Restore decl names reference. */
1589 if (IDENTIFIER_TRANSPARENT_ALIAS (DECL_ASSEMBLER_NAME (cnode->decl))
1590 && !TREE_CHAIN (DECL_ASSEMBLER_NAME (cnode->decl)))
1591 TREE_CHAIN (DECL_ASSEMBLER_NAME (cnode->decl))
1592 = DECL_ASSEMBLER_NAME (cnode->orig_decl);
1596 ref = (int) (intptr_t) node->same_comdat_group;
1598 /* Fixup same_comdat_group from reference to pointer. */
1599 if (ref != LCC_NOT_FOUND)
1600 node->same_comdat_group = nodes[ref];
1601 else
1602 node->same_comdat_group = NULL;
1604 FOR_EACH_VEC_ELT (nodes, i, node)
1605 node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1606 return nodes;
1609 /* Input ipa_refs. */
1611 static void
1612 input_refs (struct lto_input_block *ib,
1613 vec<symtab_node *> nodes)
1615 int count;
1616 int idx;
1617 while (true)
1619 symtab_node *node;
1620 count = streamer_read_uhwi (ib);
1621 if (!count)
1622 break;
1623 idx = streamer_read_uhwi (ib);
1624 node = nodes[idx];
1625 while (count)
1627 input_ref (ib, node, nodes);
1628 count--;
1634 static struct gcov_ctr_summary lto_gcov_summary;
1636 /* Input profile_info from IB. */
1637 static void
1638 input_profile_summary (struct lto_input_block *ib,
1639 struct lto_file_decl_data *file_data)
1641 unsigned h_ix;
1642 struct bitpack_d bp;
1643 unsigned int runs = streamer_read_uhwi (ib);
1644 if (runs)
1646 file_data->profile_info.runs = runs;
1647 file_data->profile_info.sum_max = streamer_read_gcov_count (ib);
1648 file_data->profile_info.sum_all = streamer_read_gcov_count (ib);
1650 memset (file_data->profile_info.histogram, 0,
1651 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1652 /* Input the bitpack of non-zero histogram indices. */
1653 bp = streamer_read_bitpack (ib);
1654 /* Read in and unpack the full bitpack, flagging non-zero
1655 histogram entries by setting the num_counters non-zero. */
1656 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1658 file_data->profile_info.histogram[h_ix].num_counters
1659 = bp_unpack_value (&bp, 1);
1661 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1663 if (!file_data->profile_info.histogram[h_ix].num_counters)
1664 continue;
1666 file_data->profile_info.histogram[h_ix].num_counters
1667 = streamer_read_gcov_count (ib);
1668 file_data->profile_info.histogram[h_ix].min_value
1669 = streamer_read_gcov_count (ib);
1670 file_data->profile_info.histogram[h_ix].cum_value
1671 = streamer_read_gcov_count (ib);
1673 /* IPA-profile computes hot bb threshold based on cumulated
1674 whole program profile. We need to stream it down to ltrans. */
1675 if (flag_ltrans)
1676 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1681 /* Rescale profile summaries to the same number of runs in the whole unit. */
1683 static void
1684 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1686 struct lto_file_decl_data *file_data;
1687 unsigned int j, h_ix;
1688 gcov_unsigned_t max_runs = 0;
1689 struct cgraph_node *node;
1690 struct cgraph_edge *edge;
1691 gcov_type saved_sum_all = 0;
1692 gcov_ctr_summary *saved_profile_info = 0;
1693 int saved_scale = 0;
1695 /* Find unit with maximal number of runs. If we ever get serious about
1696 roundoff errors, we might also consider computing smallest common
1697 multiply. */
1698 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1699 if (max_runs < file_data->profile_info.runs)
1700 max_runs = file_data->profile_info.runs;
1702 if (!max_runs)
1703 return;
1705 /* Simple overflow check. We probably don't need to support that many train
1706 runs. Such a large value probably imply data corruption anyway. */
1707 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1709 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1710 INT_MAX / REG_BR_PROB_BASE);
1711 return;
1714 profile_info = &lto_gcov_summary;
1715 lto_gcov_summary.runs = max_runs;
1716 lto_gcov_summary.sum_max = 0;
1717 memset (lto_gcov_summary.histogram, 0,
1718 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1720 /* Rescale all units to the maximal number of runs.
1721 sum_max can not be easily merged, as we have no idea what files come from
1722 the same run. We do not use the info anyway, so leave it 0. */
1723 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1724 if (file_data->profile_info.runs)
1726 int scale = GCOV_COMPUTE_SCALE (max_runs,
1727 file_data->profile_info.runs);
1728 lto_gcov_summary.sum_max
1729 = MAX (lto_gcov_summary.sum_max,
1730 apply_scale (file_data->profile_info.sum_max, scale));
1731 lto_gcov_summary.sum_all
1732 = MAX (lto_gcov_summary.sum_all,
1733 apply_scale (file_data->profile_info.sum_all, scale));
1734 /* Save a pointer to the profile_info with the largest
1735 scaled sum_all and the scale for use in merging the
1736 histogram. */
1737 if (!saved_profile_info
1738 || lto_gcov_summary.sum_all > saved_sum_all)
1740 saved_profile_info = &file_data->profile_info;
1741 saved_sum_all = lto_gcov_summary.sum_all;
1742 saved_scale = scale;
1746 gcc_assert (saved_profile_info);
1748 /* Scale up the histogram from the profile that had the largest
1749 scaled sum_all above. */
1750 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1752 /* Scale up the min value as we did the corresponding sum_all
1753 above. Use that to find the new histogram index. */
1754 gcov_type scaled_min
1755 = apply_scale (saved_profile_info->histogram[h_ix].min_value,
1756 saved_scale);
1757 /* The new index may be shared with another scaled histogram entry,
1758 so we need to account for a non-zero histogram entry at new_ix. */
1759 unsigned new_ix = gcov_histo_index (scaled_min);
1760 lto_gcov_summary.histogram[new_ix].min_value
1761 = (lto_gcov_summary.histogram[new_ix].num_counters
1762 ? MIN (lto_gcov_summary.histogram[new_ix].min_value, scaled_min)
1763 : scaled_min);
1764 /* Some of the scaled counter values would ostensibly need to be placed
1765 into different (larger) histogram buckets, but we keep things simple
1766 here and place the scaled cumulative counter value in the bucket
1767 corresponding to the scaled minimum counter value. */
1768 lto_gcov_summary.histogram[new_ix].cum_value
1769 += apply_scale (saved_profile_info->histogram[h_ix].cum_value,
1770 saved_scale);
1771 lto_gcov_summary.histogram[new_ix].num_counters
1772 += saved_profile_info->histogram[h_ix].num_counters;
1775 /* Watch roundoff errors. */
1776 if (lto_gcov_summary.sum_max < max_runs)
1777 lto_gcov_summary.sum_max = max_runs;
1779 /* If merging already happent at WPA time, we are done. */
1780 if (flag_ltrans)
1781 return;
1783 /* Now compute count_materialization_scale of each node.
1784 During LTRANS we already have values of count_materialization_scale
1785 computed, so just update them. */
1786 FOR_EACH_FUNCTION (node)
1787 if (node->lto_file_data
1788 && node->lto_file_data->profile_info.runs)
1790 int scale;
1792 scale = RDIV (node->count_materialization_scale * max_runs,
1793 node->lto_file_data->profile_info.runs);
1794 node->count_materialization_scale = scale;
1795 if (scale < 0)
1796 fatal_error ("Profile information in %s corrupted",
1797 file_data->file_name);
1799 if (scale == REG_BR_PROB_BASE)
1800 continue;
1801 for (edge = node->callees; edge; edge = edge->next_callee)
1802 edge->count = apply_scale (edge->count, scale);
1803 node->count = apply_scale (node->count, scale);
1807 /* Input and merge the symtab from each of the .o files passed to
1808 lto1. */
1810 void
1811 input_symtab (void)
1813 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1814 struct lto_file_decl_data *file_data;
1815 unsigned int j = 0;
1816 struct cgraph_node *node;
1818 while ((file_data = file_data_vec[j++]))
1820 const char *data;
1821 size_t len;
1822 struct lto_input_block *ib;
1823 vec<symtab_node *> nodes;
1825 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1826 &data, &len);
1827 if (!ib)
1828 fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1829 input_profile_summary (ib, file_data);
1830 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1831 nodes = input_cgraph_1 (file_data, ib);
1832 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1833 ib, data, len);
1835 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1836 &data, &len);
1837 if (!ib)
1838 fatal_error ("cannot find LTO section refs in %s",
1839 file_data->file_name);
1840 input_refs (ib, nodes);
1841 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1842 ib, data, len);
1843 if (flag_ltrans)
1844 input_cgraph_opt_summary (nodes);
1845 nodes.release ();
1848 merge_profile_summaries (file_data_vec);
1849 get_working_sets ();
1852 /* Clear out the aux field that was used to store enough state to
1853 tell which nodes should be overwritten. */
1854 FOR_EACH_FUNCTION (node)
1856 /* Some nodes may have been created by cgraph_node. This
1857 happens when the callgraph contains nested functions. If the
1858 node for the parent function was never emitted to the gimple
1859 file, cgraph_node will create a node for it when setting the
1860 context of the nested function. */
1861 if (node->lto_file_data)
1862 node->aux = NULL;
1866 /* Input function/variable tables that will allow libgomp to look up offload
1867 target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS. */
1869 void
1870 input_offload_tables (void)
1872 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1873 struct lto_file_decl_data *file_data;
1874 unsigned int j = 0;
1876 while ((file_data = file_data_vec[j++]))
1878 const char *data;
1879 size_t len;
1880 struct lto_input_block *ib
1881 = lto_create_simple_input_block (file_data, LTO_section_offload_table,
1882 &data, &len);
1883 if (!ib)
1884 continue;
1886 enum LTO_symtab_tags tag
1887 = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1888 while (tag)
1890 if (tag == LTO_symtab_unavail_node)
1892 int decl_index = streamer_read_uhwi (ib);
1893 tree fn_decl
1894 = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1895 vec_safe_push (offload_funcs, fn_decl);
1897 else if (tag == LTO_symtab_variable)
1899 int decl_index = streamer_read_uhwi (ib);
1900 tree var_decl
1901 = lto_file_decl_data_get_var_decl (file_data, decl_index);
1902 vec_safe_push (offload_vars, var_decl);
1904 else
1905 fatal_error ("invalid offload table in %s", file_data->file_name);
1907 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1910 lto_destroy_simple_input_block (file_data, LTO_section_offload_table,
1911 ib, data, len);
1915 /* True when we need optimization summary for NODE. */
1917 static int
1918 output_cgraph_opt_summary_p (struct cgraph_node *node)
1920 return (node->clone_of
1921 && (node->clone.tree_map
1922 || node->clone.args_to_skip
1923 || node->clone.combined_args_to_skip));
1926 /* Output optimization summary for EDGE to OB. */
1927 static void
1928 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1929 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1933 /* Output optimization summary for NODE to OB. */
1935 static void
1936 output_node_opt_summary (struct output_block *ob,
1937 struct cgraph_node *node,
1938 lto_symtab_encoder_t encoder)
1940 unsigned int index;
1941 bitmap_iterator bi;
1942 struct ipa_replace_map *map;
1943 struct bitpack_d bp;
1944 int i;
1945 struct cgraph_edge *e;
1947 if (node->clone.args_to_skip)
1949 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1950 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1951 streamer_write_uhwi (ob, index);
1953 else
1954 streamer_write_uhwi (ob, 0);
1955 if (node->clone.combined_args_to_skip)
1957 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1958 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1959 streamer_write_uhwi (ob, index);
1961 else
1962 streamer_write_uhwi (ob, 0);
1963 streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
1964 FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
1966 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1967 mechanism to store function local declarations into summaries. */
1968 gcc_assert (!map->old_tree);
1969 streamer_write_uhwi (ob, map->parm_num);
1970 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
1971 stream_write_tree (ob, map->new_tree, true);
1972 bp = bitpack_create (ob->main_stream);
1973 bp_pack_value (&bp, map->replace_p, 1);
1974 bp_pack_value (&bp, map->ref_p, 1);
1975 streamer_write_bitpack (&bp);
1978 if (lto_symtab_encoder_in_partition_p (encoder, node))
1980 for (e = node->callees; e; e = e->next_callee)
1981 output_edge_opt_summary (ob, e);
1982 for (e = node->indirect_calls; e; e = e->next_callee)
1983 output_edge_opt_summary (ob, e);
1987 /* Output optimization summaries stored in callgraph.
1988 At the moment it is the clone info structure. */
1990 static void
1991 output_cgraph_opt_summary (void)
1993 int i, n_nodes;
1994 lto_symtab_encoder_t encoder;
1995 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1996 unsigned count = 0;
1998 ob->symbol = NULL;
1999 encoder = ob->decl_state->symtab_node_encoder;
2000 n_nodes = lto_symtab_encoder_size (encoder);
2001 for (i = 0; i < n_nodes; i++)
2003 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
2004 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2005 if (cnode && output_cgraph_opt_summary_p (cnode))
2006 count++;
2008 streamer_write_uhwi (ob, count);
2009 for (i = 0; i < n_nodes; i++)
2011 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
2012 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2013 if (cnode && output_cgraph_opt_summary_p (cnode))
2015 streamer_write_uhwi (ob, i);
2016 output_node_opt_summary (ob, cnode, encoder);
2019 produce_asm (ob, NULL);
2020 destroy_output_block (ob);
2023 /* Input optimisation summary of EDGE. */
2025 static void
2026 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
2027 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
2031 /* Input optimisation summary of NODE. */
2033 static void
2034 input_node_opt_summary (struct cgraph_node *node,
2035 struct lto_input_block *ib_main,
2036 struct data_in *data_in)
2038 int i;
2039 int count;
2040 int bit;
2041 struct bitpack_d bp;
2042 struct cgraph_edge *e;
2044 count = streamer_read_uhwi (ib_main);
2045 if (count)
2046 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
2047 for (i = 0; i < count; i++)
2049 bit = streamer_read_uhwi (ib_main);
2050 bitmap_set_bit (node->clone.args_to_skip, bit);
2052 count = streamer_read_uhwi (ib_main);
2053 if (count)
2054 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
2055 for (i = 0; i < count; i++)
2057 bit = streamer_read_uhwi (ib_main);
2058 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
2060 count = streamer_read_uhwi (ib_main);
2061 for (i = 0; i < count; i++)
2063 struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
2065 vec_safe_push (node->clone.tree_map, map);
2066 map->parm_num = streamer_read_uhwi (ib_main);
2067 map->old_tree = NULL;
2068 map->new_tree = stream_read_tree (ib_main, data_in);
2069 bp = streamer_read_bitpack (ib_main);
2070 map->replace_p = bp_unpack_value (&bp, 1);
2071 map->ref_p = bp_unpack_value (&bp, 1);
2073 for (e = node->callees; e; e = e->next_callee)
2074 input_edge_opt_summary (e, ib_main);
2075 for (e = node->indirect_calls; e; e = e->next_callee)
2076 input_edge_opt_summary (e, ib_main);
2079 /* Read section in file FILE_DATA of length LEN with data DATA. */
2081 static void
2082 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
2083 const char *data, size_t len,
2084 vec<symtab_node *> nodes)
2086 const struct lto_function_header *header =
2087 (const struct lto_function_header *) data;
2088 const int cfg_offset = sizeof (struct lto_function_header);
2089 const int main_offset = cfg_offset + header->cfg_size;
2090 const int string_offset = main_offset + header->main_size;
2091 struct data_in *data_in;
2092 unsigned int i;
2093 unsigned int count;
2095 lto_input_block ib_main ((const char *) data + main_offset,
2096 header->main_size);
2098 data_in =
2099 lto_data_in_create (file_data, (const char *) data + string_offset,
2100 header->string_size, vNULL);
2101 count = streamer_read_uhwi (&ib_main);
2103 for (i = 0; i < count; i++)
2105 int ref = streamer_read_uhwi (&ib_main);
2106 input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
2107 &ib_main, data_in);
2109 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
2110 len);
2111 lto_data_in_delete (data_in);
2114 /* Input optimization summary of cgraph. */
2116 static void
2117 input_cgraph_opt_summary (vec<symtab_node *> nodes)
2119 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2120 struct lto_file_decl_data *file_data;
2121 unsigned int j = 0;
2123 while ((file_data = file_data_vec[j++]))
2125 size_t len;
2126 const char *data =
2127 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
2128 &len);
2130 if (data)
2131 input_cgraph_opt_section (file_data, data, len, nodes);