PR ipa/61548
[official-gcc.git] / gcc / lto-cgraph.c
blobab9524b3b73fd4272ee4992e3bd124db337d0dea
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
4 Copyright (C) 2009-2015 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "vec.h"
30 #include "double-int.h"
31 #include "input.h"
32 #include "alias.h"
33 #include "symtab.h"
34 #include "wide-int.h"
35 #include "inchash.h"
36 #include "tree.h"
37 #include "fold-const.h"
38 #include "stringpool.h"
39 #include "predict.h"
40 #include "hard-reg-set.h"
41 #include "function.h"
42 #include "basic-block.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-expr.h"
46 #include "is-a.h"
47 #include "gimple.h"
48 #include "hashtab.h"
49 #include "rtl.h"
50 #include "flags.h"
51 #include "statistics.h"
52 #include "real.h"
53 #include "fixed-value.h"
54 #include "insn-config.h"
55 #include "expmed.h"
56 #include "dojump.h"
57 #include "explow.h"
58 #include "calls.h"
59 #include "emit-rtl.h"
60 #include "varasm.h"
61 #include "stmt.h"
62 #include "expr.h"
63 #include "params.h"
64 #include "langhooks.h"
65 #include "bitmap.h"
66 #include "diagnostic-core.h"
67 #include "except.h"
68 #include "timevar.h"
69 #include "hash-map.h"
70 #include "plugin-api.h"
71 #include "ipa-ref.h"
72 #include "cgraph.h"
73 #include "lto-streamer.h"
74 #include "data-streamer.h"
75 #include "tree-streamer.h"
76 #include "gcov-io.h"
77 #include "tree-pass.h"
78 #include "profile.h"
79 #include "context.h"
80 #include "pass_manager.h"
81 #include "ipa-utils.h"
82 #include "omp-low.h"
84 /* True when asm nodes has been output. */
85 bool asm_nodes_output = false;
87 static void output_cgraph_opt_summary (void);
88 static void input_cgraph_opt_summary (vec<symtab_node *> nodes);
90 /* Number of LDPR values known to GCC. */
91 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
93 /* All node orders are ofsetted by ORDER_BASE. */
94 static int order_base;
96 /* Cgraph streaming is organized as set of record whose type
97 is indicated by a tag. */
98 enum LTO_symtab_tags
100 /* Must leave 0 for the stopper. */
102 /* Cgraph node without body available. */
103 LTO_symtab_unavail_node = 1,
104 /* Cgraph node with function body. */
105 LTO_symtab_analyzed_node,
106 /* Cgraph edges. */
107 LTO_symtab_edge,
108 LTO_symtab_indirect_edge,
109 LTO_symtab_variable,
110 LTO_symtab_last_tag
113 /* Create a new symtab encoder.
114 if FOR_INPUT, the encoder allocate only datastructures needed
115 to read the symtab. */
117 lto_symtab_encoder_t
118 lto_symtab_encoder_new (bool for_input)
120 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
122 if (!for_input)
123 encoder->map = new hash_map<symtab_node *, size_t>;
124 encoder->nodes.create (0);
125 return encoder;
129 /* Delete ENCODER and its components. */
131 void
132 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
134 encoder->nodes.release ();
135 if (encoder->map)
136 delete encoder->map;
137 free (encoder);
141 /* Return the existing reference number of NODE in the symtab encoder in
142 output block OB. Assign a new reference if this is the first time
143 NODE is encoded. */
146 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
147 symtab_node *node)
149 int ref;
151 if (!encoder->map)
153 lto_encoder_entry entry = {node, false, false, false};
155 ref = encoder->nodes.length ();
156 encoder->nodes.safe_push (entry);
157 return ref;
160 size_t *slot = encoder->map->get (node);
161 if (!slot || !*slot)
163 lto_encoder_entry entry = {node, false, false, false};
164 ref = encoder->nodes.length ();
165 if (!slot)
166 encoder->map->put (node, ref + 1);
167 encoder->nodes.safe_push (entry);
169 else
170 ref = *slot - 1;
172 return ref;
175 /* Remove NODE from encoder. */
177 bool
178 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
179 symtab_node *node)
181 int index;
182 lto_encoder_entry last_node;
184 size_t *slot = encoder->map->get (node);
185 if (slot == NULL || !*slot)
186 return false;
188 index = *slot - 1;
189 gcc_checking_assert (encoder->nodes[index].node == node);
191 /* Remove from vector. We do this by swapping node with the last element
192 of the vector. */
193 last_node = encoder->nodes.pop ();
194 if (last_node.node != node)
196 gcc_assert (encoder->map->put (last_node.node, index + 1));
198 /* Move the last element to the original spot of NODE. */
199 encoder->nodes[index] = last_node;
202 /* Remove element from hash table. */
203 encoder->map->remove (node);
204 return true;
208 /* Return TRUE if we should encode the body of NODE (if any). */
210 bool
211 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
212 struct cgraph_node *node)
214 int index = lto_symtab_encoder_lookup (encoder, node);
215 return encoder->nodes[index].body;
218 /* Specify that we encode the body of NODE in this partition. */
220 static void
221 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
222 struct cgraph_node *node)
224 int index = lto_symtab_encoder_encode (encoder, node);
225 gcc_checking_assert (encoder->nodes[index].node == node);
226 encoder->nodes[index].body = true;
229 /* Return TRUE if we should encode initializer of NODE (if any). */
231 bool
232 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
233 varpool_node *node)
235 int index = lto_symtab_encoder_lookup (encoder, node);
236 if (index == LCC_NOT_FOUND)
237 return false;
238 return encoder->nodes[index].initializer;
241 /* Specify that we should encode initializer of NODE (if any). */
243 static void
244 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
245 varpool_node *node)
247 int index = lto_symtab_encoder_lookup (encoder, node);
248 encoder->nodes[index].initializer = true;
251 /* Return TRUE if NODE is in this partition. */
253 bool
254 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
255 symtab_node *node)
257 int index = lto_symtab_encoder_lookup (encoder, node);
258 if (index == LCC_NOT_FOUND)
259 return false;
260 return encoder->nodes[index].in_partition;
263 /* Specify that NODE is in this partition. */
265 void
266 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
267 symtab_node *node)
269 int index = lto_symtab_encoder_encode (encoder, node);
270 encoder->nodes[index].in_partition = true;
273 /* Output the cgraph EDGE to OB using ENCODER. */
275 static void
276 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
277 lto_symtab_encoder_t encoder)
279 unsigned int uid;
280 intptr_t ref;
281 struct bitpack_d bp;
283 if (edge->indirect_unknown_callee)
284 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
285 LTO_symtab_indirect_edge);
286 else
287 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
288 LTO_symtab_edge);
290 ref = lto_symtab_encoder_lookup (encoder, edge->caller);
291 gcc_assert (ref != LCC_NOT_FOUND);
292 streamer_write_hwi_stream (ob->main_stream, ref);
294 if (!edge->indirect_unknown_callee)
296 ref = lto_symtab_encoder_lookup (encoder, edge->callee);
297 gcc_assert (ref != LCC_NOT_FOUND);
298 streamer_write_hwi_stream (ob->main_stream, ref);
301 streamer_write_gcov_count_stream (ob->main_stream, edge->count);
303 bp = bitpack_create (ob->main_stream);
304 uid = (!gimple_has_body_p (edge->caller->decl)
305 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt) + 1);
306 bp_pack_enum (&bp, cgraph_inline_failed_t,
307 CIF_N_REASONS, edge->inline_failed);
308 bp_pack_var_len_unsigned (&bp, uid);
309 bp_pack_var_len_unsigned (&bp, edge->frequency);
310 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
311 bp_pack_value (&bp, edge->speculative, 1);
312 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
313 bp_pack_value (&bp, edge->can_throw_external, 1);
314 bp_pack_value (&bp, edge->in_polymorphic_cdtor, 1);
315 if (edge->indirect_unknown_callee)
317 int flags = edge->indirect_info->ecf_flags;
318 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
319 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
320 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
321 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
322 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
323 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
324 /* Flags that should not appear on indirect calls. */
325 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
326 | ECF_MAY_BE_ALLOCA
327 | ECF_SIBCALL
328 | ECF_LEAF
329 | ECF_NOVOPS)));
331 streamer_write_bitpack (&bp);
332 if (edge->indirect_unknown_callee)
334 streamer_write_hwi_stream (ob->main_stream,
335 edge->indirect_info->common_target_id);
336 if (edge->indirect_info->common_target_id)
337 streamer_write_hwi_stream
338 (ob->main_stream, edge->indirect_info->common_target_probability);
342 /* Return if NODE contain references from other partitions. */
344 bool
345 referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
347 int i;
348 struct ipa_ref *ref = NULL;
350 for (i = 0; node->iterate_referring (i, ref); i++)
352 /* Ignore references from non-offloadable nodes while streaming NODE into
353 offload LTO section. */
354 if (!ref->referring->need_lto_streaming)
355 continue;
357 if (ref->referring->in_other_partition
358 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
359 return true;
361 return false;
364 /* Return true when node is reachable from other partition. */
366 bool
367 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
369 struct cgraph_edge *e;
370 if (!node->definition)
371 return false;
372 if (node->global.inlined_to)
373 return false;
374 for (e = node->callers; e; e = e->next_caller)
376 /* Ignore references from non-offloadable nodes while streaming NODE into
377 offload LTO section. */
378 if (!e->caller->need_lto_streaming)
379 continue;
381 if (e->caller->in_other_partition
382 || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
383 return true;
385 return false;
388 /* Return if NODE contain references from other partitions. */
390 bool
391 referenced_from_this_partition_p (symtab_node *node,
392 lto_symtab_encoder_t encoder)
394 int i;
395 struct ipa_ref *ref = NULL;
397 for (i = 0; node->iterate_referring (i, ref); i++)
398 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
399 return true;
400 return false;
403 /* Return true when node is reachable from other partition. */
405 bool
406 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
408 struct cgraph_edge *e;
409 for (e = node->callers; e; e = e->next_caller)
410 if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
411 return true;
412 return false;
415 /* Output the cgraph NODE to OB. ENCODER is used to find the
416 reference number of NODE->inlined_to. SET is the set of nodes we
417 are writing to the current file. If NODE is not in SET, then NODE
418 is a boundary of a cgraph_node_set and we pretend NODE just has a
419 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
420 that have had their callgraph node written so far. This is used to
421 determine if NODE is a clone of a previously written node. */
423 static void
424 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
425 lto_symtab_encoder_t encoder)
427 unsigned int tag;
428 struct bitpack_d bp;
429 bool boundary_p;
430 intptr_t ref;
431 bool in_other_partition = false;
432 struct cgraph_node *clone_of, *ultimate_clone_of;
433 ipa_opt_pass_d *pass;
434 int i;
435 bool alias_p;
436 const char *comdat;
437 const char *section;
438 tree group;
440 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
442 if (node->analyzed && !boundary_p)
443 tag = LTO_symtab_analyzed_node;
444 else
445 tag = LTO_symtab_unavail_node;
447 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
448 tag);
449 streamer_write_hwi_stream (ob->main_stream, node->order);
451 /* In WPA mode, we only output part of the call-graph. Also, we
452 fake cgraph node attributes. There are two cases that we care.
454 Boundary nodes: There are nodes that are not part of SET but are
455 called from within SET. We artificially make them look like
456 externally visible nodes with no function body.
458 Cherry-picked nodes: These are nodes we pulled from other
459 translation units into SET during IPA-inlining. We make them as
460 local static nodes to prevent clashes with other local statics. */
461 if (boundary_p && node->analyzed
462 && node->get_partitioning_class () == SYMBOL_PARTITION)
464 /* Inline clones can not be part of boundary.
465 gcc_assert (!node->global.inlined_to);
467 FIXME: At the moment they can be, when partition contains an inline
468 clone that is clone of inline clone from outside partition. We can
469 reshape the clone tree and make other tree to be the root, but it
470 needs a bit extra work and will be promplty done by cgraph_remove_node
471 after reading back. */
472 in_other_partition = 1;
475 clone_of = node->clone_of;
476 while (clone_of
477 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
478 if (clone_of->prev_sibling_clone)
479 clone_of = clone_of->prev_sibling_clone;
480 else
481 clone_of = clone_of->clone_of;
483 /* See if body of the master function is output. If not, we are seeing only
484 an declaration and we do not need to pass down clone tree. */
485 ultimate_clone_of = clone_of;
486 while (ultimate_clone_of && ultimate_clone_of->clone_of)
487 ultimate_clone_of = ultimate_clone_of->clone_of;
489 if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
490 clone_of = NULL;
492 if (tag == LTO_symtab_analyzed_node)
493 gcc_assert (clone_of || !node->clone_of);
494 if (!clone_of)
495 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
496 else
497 streamer_write_hwi_stream (ob->main_stream, ref);
500 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
501 streamer_write_gcov_count_stream (ob->main_stream, node->count);
502 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
504 streamer_write_hwi_stream (ob->main_stream,
505 node->ipa_transforms_to_apply.length ());
506 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
507 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
509 if (tag == LTO_symtab_analyzed_node)
511 if (node->global.inlined_to)
513 ref = lto_symtab_encoder_lookup (encoder, node->global.inlined_to);
514 gcc_assert (ref != LCC_NOT_FOUND);
516 else
517 ref = LCC_NOT_FOUND;
519 streamer_write_hwi_stream (ob->main_stream, ref);
522 group = node->get_comdat_group ();
523 if (group)
524 comdat = IDENTIFIER_POINTER (group);
525 else
526 comdat = "";
527 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
529 if (group)
531 if (node->same_comdat_group && !boundary_p)
533 ref = lto_symtab_encoder_lookup (encoder,
534 node->same_comdat_group);
535 gcc_assert (ref != LCC_NOT_FOUND);
537 else
538 ref = LCC_NOT_FOUND;
539 streamer_write_hwi_stream (ob->main_stream, ref);
542 section = node->get_section ();
543 if (!section)
544 section = "";
546 streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
548 bp = bitpack_create (ob->main_stream);
549 bp_pack_value (&bp, node->local.local, 1);
550 bp_pack_value (&bp, node->externally_visible, 1);
551 bp_pack_value (&bp, node->no_reorder, 1);
552 bp_pack_value (&bp, node->definition, 1);
553 bp_pack_value (&bp, node->local.versionable, 1);
554 bp_pack_value (&bp, node->local.can_change_signature, 1);
555 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
556 bp_pack_value (&bp, node->force_output, 1);
557 bp_pack_value (&bp, node->forced_by_abi, 1);
558 bp_pack_value (&bp, node->unique_name, 1);
559 bp_pack_value (&bp, node->body_removed, 1);
560 bp_pack_value (&bp, node->implicit_section, 1);
561 bp_pack_value (&bp, node->address_taken, 1);
562 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
563 && node->get_partitioning_class () == SYMBOL_PARTITION
564 && (reachable_from_other_partition_p (node, encoder)
565 || referenced_from_other_partition_p (node, encoder)), 1);
566 bp_pack_value (&bp, node->lowered, 1);
567 bp_pack_value (&bp, in_other_partition, 1);
568 /* Real aliases in a boundary become non-aliases. However we still stream
569 alias info on weakrefs.
570 TODO: We lose a bit of information here - when we know that variable is
571 defined in other unit, we may use the info on aliases to resolve
572 symbol1 != symbol2 type tests that we can do only for locally defined objects
573 otherwise. */
574 alias_p = node->alias && (!boundary_p || node->weakref);
575 bp_pack_value (&bp, alias_p, 1);
576 bp_pack_value (&bp, node->weakref, 1);
577 bp_pack_value (&bp, node->frequency, 2);
578 bp_pack_value (&bp, node->only_called_at_startup, 1);
579 bp_pack_value (&bp, node->only_called_at_exit, 1);
580 bp_pack_value (&bp, node->tm_clone, 1);
581 bp_pack_value (&bp, node->calls_comdat_local, 1);
582 bp_pack_value (&bp, node->icf_merged, 1);
583 bp_pack_value (&bp, node->nonfreeing_fn, 1);
584 bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
585 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
586 LDPR_NUM_KNOWN, node->resolution);
587 bp_pack_value (&bp, node->instrumentation_clone, 1);
588 streamer_write_bitpack (&bp);
589 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
591 if (node->thunk.thunk_p && !boundary_p)
593 streamer_write_uhwi_stream
594 (ob->main_stream,
595 1 + (node->thunk.this_adjusting != 0) * 2
596 + (node->thunk.virtual_offset_p != 0) * 4
597 + (node->thunk.add_pointer_bounds_args != 0) * 8);
598 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
599 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
601 streamer_write_hwi_stream (ob->main_stream, node->profile_id);
602 if (DECL_STATIC_CONSTRUCTOR (node->decl))
603 streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
604 if (DECL_STATIC_DESTRUCTOR (node->decl))
605 streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
607 if (node->instrumentation_clone)
608 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->orig_decl);
611 /* Output the varpool NODE to OB.
612 If NODE is not in SET, then NODE is a boundary. */
614 static void
615 lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
616 lto_symtab_encoder_t encoder)
618 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
619 struct bitpack_d bp;
620 int ref;
621 bool alias_p;
622 const char *comdat;
623 const char *section;
624 tree group;
626 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
627 LTO_symtab_variable);
628 streamer_write_hwi_stream (ob->main_stream, node->order);
629 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
630 bp = bitpack_create (ob->main_stream);
631 bp_pack_value (&bp, node->externally_visible, 1);
632 bp_pack_value (&bp, node->no_reorder, 1);
633 bp_pack_value (&bp, node->force_output, 1);
634 bp_pack_value (&bp, node->forced_by_abi, 1);
635 bp_pack_value (&bp, node->unique_name, 1);
636 bp_pack_value (&bp, node->body_removed
637 || !lto_symtab_encoder_encode_initializer_p (encoder, node), 1);
638 bp_pack_value (&bp, node->implicit_section, 1);
639 bp_pack_value (&bp, node->writeonly, 1);
640 bp_pack_value (&bp, node->definition, 1);
641 alias_p = node->alias && (!boundary_p || node->weakref);
642 bp_pack_value (&bp, alias_p, 1);
643 bp_pack_value (&bp, node->weakref, 1);
644 bp_pack_value (&bp, node->analyzed && !boundary_p, 1);
645 gcc_assert (node->definition || !node->analyzed);
646 /* Constant pool initializers can be de-unified into individual ltrans units.
647 FIXME: Alternatively at -Os we may want to avoid generating for them the local
648 labels and share them across LTRANS partitions. */
649 if (node->get_partitioning_class () != SYMBOL_PARTITION)
651 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
652 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
654 else
656 bp_pack_value (&bp, node->definition
657 && referenced_from_other_partition_p (node, encoder), 1);
658 bp_pack_value (&bp, node->analyzed
659 && boundary_p && !DECL_EXTERNAL (node->decl), 1);
660 /* in_other_partition. */
662 bp_pack_value (&bp, node->tls_model, 3);
663 bp_pack_value (&bp, node->used_by_single_function, 1);
664 bp_pack_value (&bp, node->need_bounds_init, 1);
665 streamer_write_bitpack (&bp);
667 group = node->get_comdat_group ();
668 if (group)
669 comdat = IDENTIFIER_POINTER (group);
670 else
671 comdat = "";
672 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
674 if (group)
676 if (node->same_comdat_group && !boundary_p)
678 ref = lto_symtab_encoder_lookup (encoder,
679 node->same_comdat_group);
680 gcc_assert (ref != LCC_NOT_FOUND);
682 else
683 ref = LCC_NOT_FOUND;
684 streamer_write_hwi_stream (ob->main_stream, ref);
687 section = node->get_section ();
688 if (!section)
689 section = "";
690 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
692 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
693 LDPR_NUM_KNOWN, node->resolution);
696 /* Output the varpool NODE to OB.
697 If NODE is not in SET, then NODE is a boundary. */
699 static void
700 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
701 lto_symtab_encoder_t encoder)
703 struct bitpack_d bp;
704 int nref;
705 int uid = ref->lto_stmt_uid;
706 struct cgraph_node *node;
708 bp = bitpack_create (ob->main_stream);
709 bp_pack_value (&bp, ref->use, 3);
710 bp_pack_value (&bp, ref->speculative, 1);
711 streamer_write_bitpack (&bp);
712 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
713 gcc_assert (nref != LCC_NOT_FOUND);
714 streamer_write_hwi_stream (ob->main_stream, nref);
716 node = dyn_cast <cgraph_node *> (ref->referring);
717 if (node)
719 if (ref->stmt)
720 uid = gimple_uid (ref->stmt) + 1;
721 streamer_write_hwi_stream (ob->main_stream, uid);
725 /* Stream out profile_summary to OB. */
727 static void
728 output_profile_summary (struct lto_simple_output_block *ob)
730 unsigned h_ix;
731 struct bitpack_d bp;
733 if (profile_info)
735 /* We do not output num and run_max, they are not used by
736 GCC profile feedback and they are difficult to merge from multiple
737 units. */
738 gcc_assert (profile_info->runs);
739 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
740 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_max);
742 /* sum_all is needed for computing the working set with the
743 histogram. */
744 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_all);
746 /* Create and output a bitpack of non-zero histogram entries indices. */
747 bp = bitpack_create (ob->main_stream);
748 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
749 bp_pack_value (&bp, profile_info->histogram[h_ix].num_counters > 0, 1);
750 streamer_write_bitpack (&bp);
751 /* Now stream out only those non-zero entries. */
752 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
754 if (!profile_info->histogram[h_ix].num_counters)
755 continue;
756 streamer_write_gcov_count_stream (ob->main_stream,
757 profile_info->histogram[h_ix].num_counters);
758 streamer_write_gcov_count_stream (ob->main_stream,
759 profile_info->histogram[h_ix].min_value);
760 streamer_write_gcov_count_stream (ob->main_stream,
761 profile_info->histogram[h_ix].cum_value);
763 /* IPA-profile computes hot bb threshold based on cumulated
764 whole program profile. We need to stream it down to ltrans. */
765 if (flag_wpa)
766 streamer_write_gcov_count_stream (ob->main_stream,
767 get_hot_bb_threshold ());
769 else
770 streamer_write_uhwi_stream (ob->main_stream, 0);
773 /* Output all callees or indirect outgoing edges. EDGE must be the first such
774 edge. */
776 static void
777 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
778 struct lto_simple_output_block *ob,
779 lto_symtab_encoder_t encoder)
781 if (!edge)
782 return;
784 /* Output edges in backward direction, so the reconstructed callgraph match
785 and it is easy to associate call sites in the IPA pass summaries. */
786 while (edge->next_callee)
787 edge = edge->next_callee;
788 for (; edge; edge = edge->prev_callee)
789 lto_output_edge (ob, edge, encoder);
792 /* Output the part of the cgraph in SET. */
794 static void
795 output_refs (lto_symtab_encoder_t encoder)
797 lto_symtab_encoder_iterator lsei;
798 struct lto_simple_output_block *ob;
799 int count;
800 struct ipa_ref *ref;
801 int i;
803 ob = lto_create_simple_output_block (LTO_section_refs);
805 for (lsei = lsei_start_in_partition (encoder); !lsei_end_p (lsei);
806 lsei_next_in_partition (&lsei))
808 symtab_node *node = lsei_node (lsei);
810 count = node->ref_list.nreferences ();
811 if (count)
813 streamer_write_gcov_count_stream (ob->main_stream, count);
814 streamer_write_uhwi_stream (ob->main_stream,
815 lto_symtab_encoder_lookup (encoder, node));
816 for (i = 0; node->iterate_reference (i, ref); i++)
817 lto_output_ref (ob, ref, encoder);
821 streamer_write_uhwi_stream (ob->main_stream, 0);
823 lto_destroy_simple_output_block (ob);
826 /* Add NODE into encoder as well as nodes it is cloned from.
827 Do it in a way so clones appear first. */
829 static void
830 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
831 bool include_body)
833 if (node->clone_of)
834 add_node_to (encoder, node->clone_of, include_body);
835 else if (include_body)
836 lto_set_symtab_encoder_encode_body (encoder, node);
837 lto_symtab_encoder_encode (encoder, node);
840 /* Add all references in NODE to encoders. */
842 static void
843 create_references (lto_symtab_encoder_t encoder, symtab_node *node)
845 int i;
846 struct ipa_ref *ref = NULL;
847 for (i = 0; node->iterate_reference (i, ref); i++)
848 if (is_a <cgraph_node *> (ref->referred))
849 add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
850 else
851 lto_symtab_encoder_encode (encoder, ref->referred);
854 /* Select what needs to be streamed out. In regular lto mode stream everything.
855 In offload lto mode stream only nodes marked as offloadable. */
856 void
857 select_what_to_stream (void)
859 struct symtab_node *snode;
860 FOR_EACH_SYMBOL (snode)
861 snode->need_lto_streaming = !lto_stream_offload_p || snode->offloadable;
864 /* Find all symbols we want to stream into given partition and insert them
865 to encoders.
867 The function actually replaces IN_ENCODER by new one. The reason is that
868 streaming code needs clone's origin to be streamed before clone. This
869 means that we need to insert the nodes in specific order. This order is
870 ignored by the partitioning logic earlier. */
872 lto_symtab_encoder_t
873 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
875 struct cgraph_edge *edge;
876 int i;
877 lto_symtab_encoder_t encoder;
878 lto_symtab_encoder_iterator lsei;
879 hash_set<void *> reachable_call_targets;
881 encoder = lto_symtab_encoder_new (false);
883 /* Go over all entries in the IN_ENCODER and duplicate them to
884 ENCODER. At the same time insert masters of clones so
885 every master appears before clone. */
886 for (lsei = lsei_start_function_in_partition (in_encoder);
887 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
889 struct cgraph_node *node = lsei_cgraph_node (lsei);
890 if (!node->need_lto_streaming)
891 continue;
892 add_node_to (encoder, node, true);
893 lto_set_symtab_encoder_in_partition (encoder, node);
894 create_references (encoder, node);
895 /* For proper debug info, we need to ship the origins, too. */
896 if (DECL_ABSTRACT_ORIGIN (node->decl))
898 struct cgraph_node *origin_node
899 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (node->decl));
900 origin_node->used_as_abstract_origin = true;
901 add_node_to (encoder, origin_node, true);
904 for (lsei = lsei_start_variable_in_partition (in_encoder);
905 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
907 varpool_node *vnode = lsei_varpool_node (lsei);
909 if (!vnode->need_lto_streaming)
910 continue;
911 lto_set_symtab_encoder_in_partition (encoder, vnode);
912 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
913 create_references (encoder, vnode);
914 /* For proper debug info, we need to ship the origins, too. */
915 if (DECL_ABSTRACT_ORIGIN (vnode->decl))
917 varpool_node *origin_node
918 = varpool_node::get (DECL_ABSTRACT_ORIGIN (vnode->decl));
919 lto_set_symtab_encoder_in_partition (encoder, origin_node);
922 /* Pickle in also the initializer of all referenced readonly variables
923 to help folding. Constant pool variables are not shared, so we must
924 pickle those too. */
925 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
927 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
928 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
930 if (!lto_symtab_encoder_encode_initializer_p (encoder,
931 vnode)
932 && (((vnode->ctor_useable_for_folding_p ()
933 && (!DECL_VIRTUAL_P (vnode->decl)
934 || !flag_wpa
935 || flag_ltrans_devirtualize))
936 || POINTER_BOUNDS_P (vnode->decl))))
938 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
939 create_references (encoder, vnode);
944 /* Go over all the nodes again to include callees that are not in
945 SET. */
946 for (lsei = lsei_start_function_in_partition (encoder);
947 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
949 struct cgraph_node *node = lsei_cgraph_node (lsei);
950 for (edge = node->callees; edge; edge = edge->next_callee)
952 struct cgraph_node *callee = edge->callee;
953 if (!lto_symtab_encoder_in_partition_p (encoder, callee))
955 /* We should have moved all the inlines. */
956 gcc_assert (!callee->global.inlined_to);
957 add_node_to (encoder, callee, false);
960 /* Add all possible targets for late devirtualization. */
961 if (flag_ltrans_devirtualize || !flag_wpa)
962 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
963 if (edge->indirect_info->polymorphic)
965 unsigned int i;
966 void *cache_token;
967 bool final;
968 vec <cgraph_node *>targets
969 = possible_polymorphic_call_targets
970 (edge, &final, &cache_token);
971 if (!reachable_call_targets.add (cache_token))
973 for (i = 0; i < targets.length (); i++)
975 struct cgraph_node *callee = targets[i];
977 /* Adding an external declarations into the unit serves
978 no purpose and just increases its boundary. */
979 if (callee->definition
980 && !lto_symtab_encoder_in_partition_p
981 (encoder, callee))
983 gcc_assert (!callee->global.inlined_to);
984 add_node_to (encoder, callee, false);
990 lto_symtab_encoder_delete (in_encoder);
991 return encoder;
994 /* Output the part of the symtab in SET and VSET. */
996 void
997 output_symtab (void)
999 struct cgraph_node *node;
1000 struct lto_simple_output_block *ob;
1001 lto_symtab_encoder_iterator lsei;
1002 int i, n_nodes;
1003 lto_symtab_encoder_t encoder;
1005 if (flag_wpa)
1006 output_cgraph_opt_summary ();
1008 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
1010 output_profile_summary (ob);
1012 /* An encoder for cgraph nodes should have been created by
1013 ipa_write_summaries_1. */
1014 gcc_assert (ob->decl_state->symtab_node_encoder);
1015 encoder = ob->decl_state->symtab_node_encoder;
1017 /* Write out the nodes. We must first output a node and then its clones,
1018 otherwise at a time reading back the node there would be nothing to clone
1019 from. */
1020 n_nodes = lto_symtab_encoder_size (encoder);
1021 for (i = 0; i < n_nodes; i++)
1023 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1024 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1025 lto_output_node (ob, cnode, encoder);
1026 else
1027 lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
1030 /* Go over the nodes in SET again to write edges. */
1031 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1032 lsei_next_function_in_partition (&lsei))
1034 node = lsei_cgraph_node (lsei);
1035 output_outgoing_cgraph_edges (node->callees, ob, encoder);
1036 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
1039 streamer_write_uhwi_stream (ob->main_stream, 0);
1041 lto_destroy_simple_output_block (ob);
1043 /* Emit toplevel asms.
1044 When doing WPA we must output every asm just once. Since we do not partition asm
1045 nodes at all, output them to first output. This is kind of hack, but should work
1046 well. */
1047 if (!asm_nodes_output)
1049 asm_nodes_output = true;
1050 lto_output_toplevel_asms ();
1053 output_refs (encoder);
1056 /* Return identifier encoded in IB as a plain string. */
1058 static tree
1059 read_identifier (struct lto_input_block *ib)
1061 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1062 tree id;
1064 if (ib->data[ib->p + len])
1065 lto_section_overrun (ib);
1066 if (!len)
1068 ib->p++;
1069 return NULL;
1071 id = get_identifier (ib->data + ib->p);
1072 ib->p += len + 1;
1073 return id;
1076 /* Return string encoded in IB, NULL if string is empty. */
1078 static const char *
1079 read_string (struct lto_input_block *ib)
1081 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1082 const char *str;
1084 if (ib->data[ib->p + len])
1085 lto_section_overrun (ib);
1086 if (!len)
1088 ib->p++;
1089 return NULL;
1091 str = ib->data + ib->p;
1092 ib->p += len + 1;
1093 return str;
1096 /* Output function/variable tables that will allow libgomp to look up offload
1097 target code.
1098 OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is filled in
1099 varpool_node::get_create. In WHOPR (partitioned) mode during the WPA stage
1100 both OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables. */
1102 void
1103 output_offload_tables (void)
1105 if (vec_safe_is_empty (offload_funcs) && vec_safe_is_empty (offload_vars))
1106 return;
1108 struct lto_simple_output_block *ob
1109 = lto_create_simple_output_block (LTO_section_offload_table);
1111 for (unsigned i = 0; i < vec_safe_length (offload_funcs); i++)
1113 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1114 LTO_symtab_last_tag, LTO_symtab_unavail_node);
1115 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
1116 (*offload_funcs)[i]);
1119 for (unsigned i = 0; i < vec_safe_length (offload_vars); i++)
1121 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1122 LTO_symtab_last_tag, LTO_symtab_variable);
1123 lto_output_var_decl_index (ob->decl_state, ob->main_stream,
1124 (*offload_vars)[i]);
1127 streamer_write_uhwi_stream (ob->main_stream, 0);
1128 lto_destroy_simple_output_block (ob);
1130 /* In WHOPR mode during the WPA stage the joint offload tables need to be
1131 streamed to one partition only. That's why we free offload_funcs and
1132 offload_vars after the first call of output_offload_tables. */
1133 if (flag_wpa)
1135 vec_free (offload_funcs);
1136 vec_free (offload_vars);
1140 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1141 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
1142 NODE or to replace the values in it, for instance because the first
1143 time we saw it, the function body was not available but now it
1144 is. BP is a bitpack with all the bitflags for NODE read from the
1145 stream. */
1147 static void
1148 input_overwrite_node (struct lto_file_decl_data *file_data,
1149 struct cgraph_node *node,
1150 enum LTO_symtab_tags tag,
1151 struct bitpack_d *bp)
1153 node->aux = (void *) tag;
1154 node->lto_file_data = file_data;
1156 node->local.local = bp_unpack_value (bp, 1);
1157 node->externally_visible = bp_unpack_value (bp, 1);
1158 node->no_reorder = bp_unpack_value (bp, 1);
1159 node->definition = bp_unpack_value (bp, 1);
1160 node->local.versionable = bp_unpack_value (bp, 1);
1161 node->local.can_change_signature = bp_unpack_value (bp, 1);
1162 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
1163 node->force_output = bp_unpack_value (bp, 1);
1164 node->forced_by_abi = bp_unpack_value (bp, 1);
1165 node->unique_name = bp_unpack_value (bp, 1);
1166 node->body_removed = bp_unpack_value (bp, 1);
1167 node->implicit_section = bp_unpack_value (bp, 1);
1168 node->address_taken = bp_unpack_value (bp, 1);
1169 node->used_from_other_partition = bp_unpack_value (bp, 1);
1170 node->lowered = bp_unpack_value (bp, 1);
1171 node->analyzed = tag == LTO_symtab_analyzed_node;
1172 node->in_other_partition = bp_unpack_value (bp, 1);
1173 if (node->in_other_partition
1174 /* Avoid updating decl when we are seeing just inline clone.
1175 When inlining function that has functions already inlined into it,
1176 we produce clones of inline clones.
1178 WPA partitioning might put each clone into different unit and
1179 we might end up streaming inline clone from other partition
1180 to support clone we are interested in. */
1181 && (!node->clone_of
1182 || node->clone_of->decl != node->decl))
1184 DECL_EXTERNAL (node->decl) = 1;
1185 TREE_STATIC (node->decl) = 0;
1187 node->alias = bp_unpack_value (bp, 1);
1188 node->weakref = bp_unpack_value (bp, 1);
1189 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1190 node->only_called_at_startup = bp_unpack_value (bp, 1);
1191 node->only_called_at_exit = bp_unpack_value (bp, 1);
1192 node->tm_clone = bp_unpack_value (bp, 1);
1193 node->calls_comdat_local = bp_unpack_value (bp, 1);
1194 node->icf_merged = bp_unpack_value (bp, 1);
1195 node->nonfreeing_fn = bp_unpack_value (bp, 1);
1196 node->thunk.thunk_p = bp_unpack_value (bp, 1);
1197 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1198 LDPR_NUM_KNOWN);
1199 node->instrumentation_clone = bp_unpack_value (bp, 1);
1200 gcc_assert (flag_ltrans
1201 || (!node->in_other_partition
1202 && !node->used_from_other_partition));
1205 /* Return string alias is alias of. */
1207 static tree
1208 get_alias_symbol (tree decl)
1210 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1211 return get_identifier (TREE_STRING_POINTER
1212 (TREE_VALUE (TREE_VALUE (alias))));
1215 /* Read a node from input_block IB. TAG is the node's tag just read.
1216 Return the node read or overwriten. */
1218 static struct cgraph_node *
1219 input_node (struct lto_file_decl_data *file_data,
1220 struct lto_input_block *ib,
1221 enum LTO_symtab_tags tag,
1222 vec<symtab_node *> nodes)
1224 gcc::pass_manager *passes = g->get_passes ();
1225 tree fn_decl;
1226 struct cgraph_node *node;
1227 struct bitpack_d bp;
1228 unsigned decl_index;
1229 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1230 int clone_ref;
1231 int order;
1232 int i, count;
1233 tree group;
1234 const char *section;
1235 order = streamer_read_hwi (ib) + order_base;
1236 clone_ref = streamer_read_hwi (ib);
1238 decl_index = streamer_read_uhwi (ib);
1239 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1241 if (clone_ref != LCC_NOT_FOUND)
1243 node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1244 0, CGRAPH_FREQ_BASE, false,
1245 vNULL, false, NULL, NULL);
1247 else
1249 /* Declaration of functions can be already merged with a declaration
1250 from other input file. We keep cgraph unmerged until after streaming
1251 of ipa passes is done. Alays forcingly create a fresh node. */
1252 node = symtab->create_empty ();
1253 node->decl = fn_decl;
1254 node->register_symbol ();
1257 node->order = order;
1258 if (order >= symtab->order)
1259 symtab->order = order + 1;
1261 node->count = streamer_read_gcov_count (ib);
1262 node->count_materialization_scale = streamer_read_hwi (ib);
1264 count = streamer_read_hwi (ib);
1265 node->ipa_transforms_to_apply = vNULL;
1266 for (i = 0; i < count; i++)
1268 opt_pass *pass;
1269 int pid = streamer_read_hwi (ib);
1271 gcc_assert (pid < passes->passes_by_id_size);
1272 pass = passes->passes_by_id[pid];
1273 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1276 if (tag == LTO_symtab_analyzed_node)
1277 ref = streamer_read_hwi (ib);
1279 group = read_identifier (ib);
1280 if (group)
1281 ref2 = streamer_read_hwi (ib);
1283 /* Make sure that we have not read this node before. Nodes that
1284 have already been read will have their tag stored in the 'aux'
1285 field. Since built-in functions can be referenced in multiple
1286 functions, they are expected to be read more than once. */
1287 if (node->aux && !DECL_BUILT_IN (node->decl))
1288 internal_error ("bytecode stream: found multiple instances of cgraph "
1289 "node with uid %d", node->uid);
1291 node->tp_first_run = streamer_read_uhwi (ib);
1293 bp = streamer_read_bitpack (ib);
1295 input_overwrite_node (file_data, node, tag, &bp);
1297 /* Store a reference for now, and fix up later to be a pointer. */
1298 node->global.inlined_to = (cgraph_node *) (intptr_t) ref;
1300 if (group)
1302 node->set_comdat_group (group);
1303 /* Store a reference for now, and fix up later to be a pointer. */
1304 node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1306 else
1307 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1308 section = read_string (ib);
1309 if (section)
1310 node->set_section_for_node (section);
1312 if (node->thunk.thunk_p)
1314 int type = streamer_read_uhwi (ib);
1315 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1316 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1318 node->thunk.fixed_offset = fixed_offset;
1319 node->thunk.this_adjusting = (type & 2);
1320 node->thunk.virtual_value = virtual_value;
1321 node->thunk.virtual_offset_p = (type & 4);
1322 node->thunk.add_pointer_bounds_args = (type & 8);
1324 if (node->alias && !node->analyzed && node->weakref)
1325 node->alias_target = get_alias_symbol (node->decl);
1326 node->profile_id = streamer_read_hwi (ib);
1327 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1328 node->set_init_priority (streamer_read_hwi (ib));
1329 if (DECL_STATIC_DESTRUCTOR (node->decl))
1330 node->set_fini_priority (streamer_read_hwi (ib));
1332 if (node->instrumentation_clone)
1334 decl_index = streamer_read_uhwi (ib);
1335 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1336 node->orig_decl = fn_decl;
1339 return node;
1342 /* Read a node from input_block IB. TAG is the node's tag just read.
1343 Return the node read or overwriten. */
1345 static varpool_node *
1346 input_varpool_node (struct lto_file_decl_data *file_data,
1347 struct lto_input_block *ib)
1349 int decl_index;
1350 tree var_decl;
1351 varpool_node *node;
1352 struct bitpack_d bp;
1353 int ref = LCC_NOT_FOUND;
1354 int order;
1355 tree group;
1356 const char *section;
1358 order = streamer_read_hwi (ib) + order_base;
1359 decl_index = streamer_read_uhwi (ib);
1360 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1362 /* Declaration of functions can be already merged with a declaration
1363 from other input file. We keep cgraph unmerged until after streaming
1364 of ipa passes is done. Alays forcingly create a fresh node. */
1365 node = varpool_node::create_empty ();
1366 node->decl = var_decl;
1367 node->register_symbol ();
1369 node->order = order;
1370 if (order >= symtab->order)
1371 symtab->order = order + 1;
1372 node->lto_file_data = file_data;
1374 bp = streamer_read_bitpack (ib);
1375 node->externally_visible = bp_unpack_value (&bp, 1);
1376 node->no_reorder = bp_unpack_value (&bp, 1);
1377 node->force_output = bp_unpack_value (&bp, 1);
1378 node->forced_by_abi = bp_unpack_value (&bp, 1);
1379 node->unique_name = bp_unpack_value (&bp, 1);
1380 node->body_removed = bp_unpack_value (&bp, 1);
1381 node->implicit_section = bp_unpack_value (&bp, 1);
1382 node->writeonly = bp_unpack_value (&bp, 1);
1383 node->definition = bp_unpack_value (&bp, 1);
1384 node->alias = bp_unpack_value (&bp, 1);
1385 node->weakref = bp_unpack_value (&bp, 1);
1386 node->analyzed = bp_unpack_value (&bp, 1);
1387 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1388 node->in_other_partition = bp_unpack_value (&bp, 1);
1389 if (node->in_other_partition)
1391 DECL_EXTERNAL (node->decl) = 1;
1392 TREE_STATIC (node->decl) = 0;
1394 if (node->alias && !node->analyzed && node->weakref)
1395 node->alias_target = get_alias_symbol (node->decl);
1396 node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1397 node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1398 node->need_bounds_init = bp_unpack_value (&bp, 1);
1399 group = read_identifier (ib);
1400 if (group)
1402 node->set_comdat_group (group);
1403 ref = streamer_read_hwi (ib);
1404 /* Store a reference for now, and fix up later to be a pointer. */
1405 node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1407 else
1408 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1409 section = read_string (ib);
1410 if (section)
1411 node->set_section_for_node (section);
1412 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1413 LDPR_NUM_KNOWN);
1414 gcc_assert (flag_ltrans
1415 || (!node->in_other_partition
1416 && !node->used_from_other_partition));
1418 return node;
1421 /* Read a node from input_block IB. TAG is the node's tag just read.
1422 Return the node read or overwriten. */
1424 static void
1425 input_ref (struct lto_input_block *ib,
1426 symtab_node *referring_node,
1427 vec<symtab_node *> nodes)
1429 symtab_node *node = NULL;
1430 struct bitpack_d bp;
1431 enum ipa_ref_use use;
1432 bool speculative;
1433 struct ipa_ref *ref;
1435 bp = streamer_read_bitpack (ib);
1436 use = (enum ipa_ref_use) bp_unpack_value (&bp, 3);
1437 speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1438 node = nodes[streamer_read_hwi (ib)];
1439 ref = referring_node->create_reference (node, use);
1440 ref->speculative = speculative;
1441 if (is_a <cgraph_node *> (referring_node))
1442 ref->lto_stmt_uid = streamer_read_hwi (ib);
1445 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1446 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1447 edge being read is indirect (in the sense that it has
1448 indirect_unknown_callee set). */
1450 static void
1451 input_edge (struct lto_input_block *ib, vec<symtab_node *> nodes,
1452 bool indirect)
1454 struct cgraph_node *caller, *callee;
1455 struct cgraph_edge *edge;
1456 unsigned int stmt_id;
1457 gcov_type count;
1458 int freq;
1459 cgraph_inline_failed_t inline_failed;
1460 struct bitpack_d bp;
1461 int ecf_flags = 0;
1463 caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1464 if (caller == NULL || caller->decl == NULL_TREE)
1465 internal_error ("bytecode stream: no caller found while reading edge");
1467 if (!indirect)
1469 callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1470 if (callee == NULL || callee->decl == NULL_TREE)
1471 internal_error ("bytecode stream: no callee found while reading edge");
1473 else
1474 callee = NULL;
1476 count = streamer_read_gcov_count (ib);
1478 bp = streamer_read_bitpack (ib);
1479 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1480 stmt_id = bp_unpack_var_len_unsigned (&bp);
1481 freq = (int) bp_unpack_var_len_unsigned (&bp);
1483 if (indirect)
1484 edge = caller->create_indirect_edge (NULL, 0, count, freq);
1485 else
1486 edge = caller->create_edge (callee, NULL, count, freq);
1488 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1489 edge->speculative = bp_unpack_value (&bp, 1);
1490 edge->lto_stmt_uid = stmt_id;
1491 edge->inline_failed = inline_failed;
1492 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1493 edge->can_throw_external = bp_unpack_value (&bp, 1);
1494 edge->in_polymorphic_cdtor = bp_unpack_value (&bp, 1);
1495 if (indirect)
1497 if (bp_unpack_value (&bp, 1))
1498 ecf_flags |= ECF_CONST;
1499 if (bp_unpack_value (&bp, 1))
1500 ecf_flags |= ECF_PURE;
1501 if (bp_unpack_value (&bp, 1))
1502 ecf_flags |= ECF_NORETURN;
1503 if (bp_unpack_value (&bp, 1))
1504 ecf_flags |= ECF_MALLOC;
1505 if (bp_unpack_value (&bp, 1))
1506 ecf_flags |= ECF_NOTHROW;
1507 if (bp_unpack_value (&bp, 1))
1508 ecf_flags |= ECF_RETURNS_TWICE;
1509 edge->indirect_info->ecf_flags = ecf_flags;
1510 edge->indirect_info->common_target_id = streamer_read_hwi (ib);
1511 if (edge->indirect_info->common_target_id)
1512 edge->indirect_info->common_target_probability = streamer_read_hwi (ib);
1517 /* Read a cgraph from IB using the info in FILE_DATA. */
1519 static vec<symtab_node *>
1520 input_cgraph_1 (struct lto_file_decl_data *file_data,
1521 struct lto_input_block *ib)
1523 enum LTO_symtab_tags tag;
1524 vec<symtab_node *> nodes = vNULL;
1525 symtab_node *node;
1526 unsigned i;
1528 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1529 order_base = symtab->order;
1530 while (tag)
1532 if (tag == LTO_symtab_edge)
1533 input_edge (ib, nodes, false);
1534 else if (tag == LTO_symtab_indirect_edge)
1535 input_edge (ib, nodes, true);
1536 else if (tag == LTO_symtab_variable)
1538 node = input_varpool_node (file_data, ib);
1539 nodes.safe_push (node);
1540 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1542 else
1544 node = input_node (file_data, ib, tag, nodes);
1545 if (node == NULL || node->decl == NULL_TREE)
1546 internal_error ("bytecode stream: found empty cgraph node");
1547 nodes.safe_push (node);
1548 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1551 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1554 lto_input_toplevel_asms (file_data, order_base);
1556 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1557 #ifdef ENABLE_CHECKING
1558 FOR_EACH_VEC_ELT (nodes, i, node)
1559 gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1560 #endif
1561 FOR_EACH_VEC_ELT (nodes, i, node)
1563 int ref;
1564 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1566 ref = (int) (intptr_t) cnode->global.inlined_to;
1568 /* We share declaration of builtins, so we may read same node twice. */
1569 if (!node->aux)
1570 continue;
1571 node->aux = NULL;
1573 /* Fixup inlined_to from reference to pointer. */
1574 if (ref != LCC_NOT_FOUND)
1575 dyn_cast<cgraph_node *> (node)->global.inlined_to
1576 = dyn_cast<cgraph_node *> (nodes[ref]);
1577 else
1578 cnode->global.inlined_to = NULL;
1580 /* Compute instrumented_version. */
1581 if (cnode->instrumentation_clone)
1583 gcc_assert (cnode->orig_decl);
1585 cnode->instrumented_version = cgraph_node::get (cnode->orig_decl);
1586 if (cnode->instrumented_version)
1588 /* We may have multiple nodes for a single function which
1589 will be merged later. To have a proper merge we need
1590 to keep instrumentation_version reference between nodes
1591 consistent: each instrumented_version reference should
1592 have proper reverse reference. Thus don't break existing
1593 instrumented_version reference if it already exists. */
1594 if (cnode->instrumented_version->instrumented_version)
1595 cnode->instrumented_version = NULL;
1596 else
1597 cnode->instrumented_version->instrumented_version = cnode;
1600 /* Restore decl names reference. */
1601 if (IDENTIFIER_TRANSPARENT_ALIAS (DECL_ASSEMBLER_NAME (cnode->decl))
1602 && !TREE_CHAIN (DECL_ASSEMBLER_NAME (cnode->decl)))
1603 TREE_CHAIN (DECL_ASSEMBLER_NAME (cnode->decl))
1604 = DECL_ASSEMBLER_NAME (cnode->orig_decl);
1608 ref = (int) (intptr_t) node->same_comdat_group;
1610 /* Fixup same_comdat_group from reference to pointer. */
1611 if (ref != LCC_NOT_FOUND)
1612 node->same_comdat_group = nodes[ref];
1613 else
1614 node->same_comdat_group = NULL;
1616 FOR_EACH_VEC_ELT (nodes, i, node)
1617 node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1618 return nodes;
1621 /* Input ipa_refs. */
1623 static void
1624 input_refs (struct lto_input_block *ib,
1625 vec<symtab_node *> nodes)
1627 int count;
1628 int idx;
1629 while (true)
1631 symtab_node *node;
1632 count = streamer_read_uhwi (ib);
1633 if (!count)
1634 break;
1635 idx = streamer_read_uhwi (ib);
1636 node = nodes[idx];
1637 while (count)
1639 input_ref (ib, node, nodes);
1640 count--;
1646 static struct gcov_ctr_summary lto_gcov_summary;
1648 /* Input profile_info from IB. */
1649 static void
1650 input_profile_summary (struct lto_input_block *ib,
1651 struct lto_file_decl_data *file_data)
1653 unsigned h_ix;
1654 struct bitpack_d bp;
1655 unsigned int runs = streamer_read_uhwi (ib);
1656 if (runs)
1658 file_data->profile_info.runs = runs;
1659 file_data->profile_info.sum_max = streamer_read_gcov_count (ib);
1660 file_data->profile_info.sum_all = streamer_read_gcov_count (ib);
1662 memset (file_data->profile_info.histogram, 0,
1663 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1664 /* Input the bitpack of non-zero histogram indices. */
1665 bp = streamer_read_bitpack (ib);
1666 /* Read in and unpack the full bitpack, flagging non-zero
1667 histogram entries by setting the num_counters non-zero. */
1668 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1670 file_data->profile_info.histogram[h_ix].num_counters
1671 = bp_unpack_value (&bp, 1);
1673 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1675 if (!file_data->profile_info.histogram[h_ix].num_counters)
1676 continue;
1678 file_data->profile_info.histogram[h_ix].num_counters
1679 = streamer_read_gcov_count (ib);
1680 file_data->profile_info.histogram[h_ix].min_value
1681 = streamer_read_gcov_count (ib);
1682 file_data->profile_info.histogram[h_ix].cum_value
1683 = streamer_read_gcov_count (ib);
1685 /* IPA-profile computes hot bb threshold based on cumulated
1686 whole program profile. We need to stream it down to ltrans. */
1687 if (flag_ltrans)
1688 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1693 /* Rescale profile summaries to the same number of runs in the whole unit. */
1695 static void
1696 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1698 struct lto_file_decl_data *file_data;
1699 unsigned int j, h_ix;
1700 gcov_unsigned_t max_runs = 0;
1701 struct cgraph_node *node;
1702 struct cgraph_edge *edge;
1703 gcov_type saved_sum_all = 0;
1704 gcov_ctr_summary *saved_profile_info = 0;
1705 int saved_scale = 0;
1707 /* Find unit with maximal number of runs. If we ever get serious about
1708 roundoff errors, we might also consider computing smallest common
1709 multiply. */
1710 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1711 if (max_runs < file_data->profile_info.runs)
1712 max_runs = file_data->profile_info.runs;
1714 if (!max_runs)
1715 return;
1717 /* Simple overflow check. We probably don't need to support that many train
1718 runs. Such a large value probably imply data corruption anyway. */
1719 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1721 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1722 INT_MAX / REG_BR_PROB_BASE);
1723 return;
1726 profile_info = &lto_gcov_summary;
1727 lto_gcov_summary.runs = max_runs;
1728 lto_gcov_summary.sum_max = 0;
1729 memset (lto_gcov_summary.histogram, 0,
1730 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1732 /* Rescale all units to the maximal number of runs.
1733 sum_max can not be easily merged, as we have no idea what files come from
1734 the same run. We do not use the info anyway, so leave it 0. */
1735 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1736 if (file_data->profile_info.runs)
1738 int scale = GCOV_COMPUTE_SCALE (max_runs,
1739 file_data->profile_info.runs);
1740 lto_gcov_summary.sum_max
1741 = MAX (lto_gcov_summary.sum_max,
1742 apply_scale (file_data->profile_info.sum_max, scale));
1743 lto_gcov_summary.sum_all
1744 = MAX (lto_gcov_summary.sum_all,
1745 apply_scale (file_data->profile_info.sum_all, scale));
1746 /* Save a pointer to the profile_info with the largest
1747 scaled sum_all and the scale for use in merging the
1748 histogram. */
1749 if (!saved_profile_info
1750 || lto_gcov_summary.sum_all > saved_sum_all)
1752 saved_profile_info = &file_data->profile_info;
1753 saved_sum_all = lto_gcov_summary.sum_all;
1754 saved_scale = scale;
1758 gcc_assert (saved_profile_info);
1760 /* Scale up the histogram from the profile that had the largest
1761 scaled sum_all above. */
1762 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1764 /* Scale up the min value as we did the corresponding sum_all
1765 above. Use that to find the new histogram index. */
1766 gcov_type scaled_min
1767 = apply_scale (saved_profile_info->histogram[h_ix].min_value,
1768 saved_scale);
1769 /* The new index may be shared with another scaled histogram entry,
1770 so we need to account for a non-zero histogram entry at new_ix. */
1771 unsigned new_ix = gcov_histo_index (scaled_min);
1772 lto_gcov_summary.histogram[new_ix].min_value
1773 = (lto_gcov_summary.histogram[new_ix].num_counters
1774 ? MIN (lto_gcov_summary.histogram[new_ix].min_value, scaled_min)
1775 : scaled_min);
1776 /* Some of the scaled counter values would ostensibly need to be placed
1777 into different (larger) histogram buckets, but we keep things simple
1778 here and place the scaled cumulative counter value in the bucket
1779 corresponding to the scaled minimum counter value. */
1780 lto_gcov_summary.histogram[new_ix].cum_value
1781 += apply_scale (saved_profile_info->histogram[h_ix].cum_value,
1782 saved_scale);
1783 lto_gcov_summary.histogram[new_ix].num_counters
1784 += saved_profile_info->histogram[h_ix].num_counters;
1787 /* Watch roundoff errors. */
1788 if (lto_gcov_summary.sum_max < max_runs)
1789 lto_gcov_summary.sum_max = max_runs;
1791 /* If merging already happent at WPA time, we are done. */
1792 if (flag_ltrans)
1793 return;
1795 /* Now compute count_materialization_scale of each node.
1796 During LTRANS we already have values of count_materialization_scale
1797 computed, so just update them. */
1798 FOR_EACH_FUNCTION (node)
1799 if (node->lto_file_data
1800 && node->lto_file_data->profile_info.runs)
1802 int scale;
1804 scale = RDIV (node->count_materialization_scale * max_runs,
1805 node->lto_file_data->profile_info.runs);
1806 node->count_materialization_scale = scale;
1807 if (scale < 0)
1808 fatal_error (input_location, "Profile information in %s corrupted",
1809 file_data->file_name);
1811 if (scale == REG_BR_PROB_BASE)
1812 continue;
1813 for (edge = node->callees; edge; edge = edge->next_callee)
1814 edge->count = apply_scale (edge->count, scale);
1815 node->count = apply_scale (node->count, scale);
1819 /* Input and merge the symtab from each of the .o files passed to
1820 lto1. */
1822 void
1823 input_symtab (void)
1825 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1826 struct lto_file_decl_data *file_data;
1827 unsigned int j = 0;
1828 struct cgraph_node *node;
1830 while ((file_data = file_data_vec[j++]))
1832 const char *data;
1833 size_t len;
1834 struct lto_input_block *ib;
1835 vec<symtab_node *> nodes;
1837 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1838 &data, &len);
1839 if (!ib)
1840 fatal_error (input_location,
1841 "cannot find LTO cgraph in %s", file_data->file_name);
1842 input_profile_summary (ib, file_data);
1843 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1844 nodes = input_cgraph_1 (file_data, ib);
1845 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1846 ib, data, len);
1848 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1849 &data, &len);
1850 if (!ib)
1851 fatal_error (input_location, "cannot find LTO section refs in %s",
1852 file_data->file_name);
1853 input_refs (ib, nodes);
1854 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1855 ib, data, len);
1856 if (flag_ltrans)
1857 input_cgraph_opt_summary (nodes);
1858 nodes.release ();
1861 merge_profile_summaries (file_data_vec);
1862 get_working_sets ();
1865 /* Clear out the aux field that was used to store enough state to
1866 tell which nodes should be overwritten. */
1867 FOR_EACH_FUNCTION (node)
1869 /* Some nodes may have been created by cgraph_node. This
1870 happens when the callgraph contains nested functions. If the
1871 node for the parent function was never emitted to the gimple
1872 file, cgraph_node will create a node for it when setting the
1873 context of the nested function. */
1874 if (node->lto_file_data)
1875 node->aux = NULL;
1879 /* Input function/variable tables that will allow libgomp to look up offload
1880 target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS. */
1882 void
1883 input_offload_tables (void)
1885 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1886 struct lto_file_decl_data *file_data;
1887 unsigned int j = 0;
1889 while ((file_data = file_data_vec[j++]))
1891 const char *data;
1892 size_t len;
1893 struct lto_input_block *ib
1894 = lto_create_simple_input_block (file_data, LTO_section_offload_table,
1895 &data, &len);
1896 if (!ib)
1897 continue;
1899 enum LTO_symtab_tags tag
1900 = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1901 while (tag)
1903 if (tag == LTO_symtab_unavail_node)
1905 int decl_index = streamer_read_uhwi (ib);
1906 tree fn_decl
1907 = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1908 vec_safe_push (offload_funcs, fn_decl);
1910 else if (tag == LTO_symtab_variable)
1912 int decl_index = streamer_read_uhwi (ib);
1913 tree var_decl
1914 = lto_file_decl_data_get_var_decl (file_data, decl_index);
1915 vec_safe_push (offload_vars, var_decl);
1917 else
1918 fatal_error (input_location,
1919 "invalid offload table in %s", file_data->file_name);
1921 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1924 lto_destroy_simple_input_block (file_data, LTO_section_offload_table,
1925 ib, data, len);
1929 /* True when we need optimization summary for NODE. */
1931 static int
1932 output_cgraph_opt_summary_p (struct cgraph_node *node)
1934 return (node->clone_of
1935 && (node->clone.tree_map
1936 || node->clone.args_to_skip
1937 || node->clone.combined_args_to_skip));
1940 /* Output optimization summary for EDGE to OB. */
1941 static void
1942 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1943 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1947 /* Output optimization summary for NODE to OB. */
1949 static void
1950 output_node_opt_summary (struct output_block *ob,
1951 struct cgraph_node *node,
1952 lto_symtab_encoder_t encoder)
1954 unsigned int index;
1955 bitmap_iterator bi;
1956 struct ipa_replace_map *map;
1957 struct bitpack_d bp;
1958 int i;
1959 struct cgraph_edge *e;
1961 if (node->clone.args_to_skip)
1963 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1964 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1965 streamer_write_uhwi (ob, index);
1967 else
1968 streamer_write_uhwi (ob, 0);
1969 if (node->clone.combined_args_to_skip)
1971 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1972 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1973 streamer_write_uhwi (ob, index);
1975 else
1976 streamer_write_uhwi (ob, 0);
1977 streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
1978 FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
1980 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1981 mechanism to store function local declarations into summaries. */
1982 gcc_assert (!map->old_tree);
1983 streamer_write_uhwi (ob, map->parm_num);
1984 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
1985 stream_write_tree (ob, map->new_tree, true);
1986 bp = bitpack_create (ob->main_stream);
1987 bp_pack_value (&bp, map->replace_p, 1);
1988 bp_pack_value (&bp, map->ref_p, 1);
1989 streamer_write_bitpack (&bp);
1992 if (lto_symtab_encoder_in_partition_p (encoder, node))
1994 for (e = node->callees; e; e = e->next_callee)
1995 output_edge_opt_summary (ob, e);
1996 for (e = node->indirect_calls; e; e = e->next_callee)
1997 output_edge_opt_summary (ob, e);
2001 /* Output optimization summaries stored in callgraph.
2002 At the moment it is the clone info structure. */
2004 static void
2005 output_cgraph_opt_summary (void)
2007 int i, n_nodes;
2008 lto_symtab_encoder_t encoder;
2009 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
2010 unsigned count = 0;
2012 ob->symbol = NULL;
2013 encoder = ob->decl_state->symtab_node_encoder;
2014 n_nodes = lto_symtab_encoder_size (encoder);
2015 for (i = 0; i < n_nodes; i++)
2017 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
2018 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2019 if (cnode && output_cgraph_opt_summary_p (cnode))
2020 count++;
2022 streamer_write_uhwi (ob, count);
2023 for (i = 0; i < n_nodes; i++)
2025 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
2026 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2027 if (cnode && output_cgraph_opt_summary_p (cnode))
2029 streamer_write_uhwi (ob, i);
2030 output_node_opt_summary (ob, cnode, encoder);
2033 produce_asm (ob, NULL);
2034 destroy_output_block (ob);
2037 /* Input optimisation summary of EDGE. */
2039 static void
2040 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
2041 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
2045 /* Input optimisation summary of NODE. */
2047 static void
2048 input_node_opt_summary (struct cgraph_node *node,
2049 struct lto_input_block *ib_main,
2050 struct data_in *data_in)
2052 int i;
2053 int count;
2054 int bit;
2055 struct bitpack_d bp;
2056 struct cgraph_edge *e;
2058 count = streamer_read_uhwi (ib_main);
2059 if (count)
2060 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
2061 for (i = 0; i < count; i++)
2063 bit = streamer_read_uhwi (ib_main);
2064 bitmap_set_bit (node->clone.args_to_skip, bit);
2066 count = streamer_read_uhwi (ib_main);
2067 if (count)
2068 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
2069 for (i = 0; i < count; i++)
2071 bit = streamer_read_uhwi (ib_main);
2072 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
2074 count = streamer_read_uhwi (ib_main);
2075 for (i = 0; i < count; i++)
2077 struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
2079 vec_safe_push (node->clone.tree_map, map);
2080 map->parm_num = streamer_read_uhwi (ib_main);
2081 map->old_tree = NULL;
2082 map->new_tree = stream_read_tree (ib_main, data_in);
2083 bp = streamer_read_bitpack (ib_main);
2084 map->replace_p = bp_unpack_value (&bp, 1);
2085 map->ref_p = bp_unpack_value (&bp, 1);
2087 for (e = node->callees; e; e = e->next_callee)
2088 input_edge_opt_summary (e, ib_main);
2089 for (e = node->indirect_calls; e; e = e->next_callee)
2090 input_edge_opt_summary (e, ib_main);
2093 /* Read section in file FILE_DATA of length LEN with data DATA. */
2095 static void
2096 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
2097 const char *data, size_t len,
2098 vec<symtab_node *> nodes)
2100 const struct lto_function_header *header =
2101 (const struct lto_function_header *) data;
2102 const int cfg_offset = sizeof (struct lto_function_header);
2103 const int main_offset = cfg_offset + header->cfg_size;
2104 const int string_offset = main_offset + header->main_size;
2105 struct data_in *data_in;
2106 unsigned int i;
2107 unsigned int count;
2109 lto_input_block ib_main ((const char *) data + main_offset,
2110 header->main_size);
2112 data_in =
2113 lto_data_in_create (file_data, (const char *) data + string_offset,
2114 header->string_size, vNULL);
2115 count = streamer_read_uhwi (&ib_main);
2117 for (i = 0; i < count; i++)
2119 int ref = streamer_read_uhwi (&ib_main);
2120 input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
2121 &ib_main, data_in);
2123 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
2124 len);
2125 lto_data_in_delete (data_in);
2128 /* Input optimization summary of cgraph. */
2130 static void
2131 input_cgraph_opt_summary (vec<symtab_node *> nodes)
2133 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2134 struct lto_file_decl_data *file_data;
2135 unsigned int j = 0;
2137 while ((file_data = file_data_vec[j++]))
2139 size_t len;
2140 const char *data =
2141 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
2142 &len);
2144 if (data)
2145 input_cgraph_opt_section (file_data, data, len, nodes);