PR bootstrap/65150
[official-gcc.git] / gcc / lto-cgraph.c
blobc875fed1dbe36f8d5ec4cbbbad3a6f737e682f26
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
4 Copyright (C) 2009-2015 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "vec.h"
30 #include "double-int.h"
31 #include "input.h"
32 #include "alias.h"
33 #include "symtab.h"
34 #include "wide-int.h"
35 #include "inchash.h"
36 #include "tree.h"
37 #include "fold-const.h"
38 #include "stringpool.h"
39 #include "predict.h"
40 #include "hard-reg-set.h"
41 #include "function.h"
42 #include "basic-block.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-expr.h"
46 #include "is-a.h"
47 #include "gimple.h"
48 #include "hashtab.h"
49 #include "rtl.h"
50 #include "flags.h"
51 #include "statistics.h"
52 #include "real.h"
53 #include "fixed-value.h"
54 #include "insn-config.h"
55 #include "expmed.h"
56 #include "dojump.h"
57 #include "explow.h"
58 #include "calls.h"
59 #include "emit-rtl.h"
60 #include "varasm.h"
61 #include "stmt.h"
62 #include "expr.h"
63 #include "params.h"
64 #include "langhooks.h"
65 #include "bitmap.h"
66 #include "diagnostic-core.h"
67 #include "except.h"
68 #include "timevar.h"
69 #include "hash-map.h"
70 #include "plugin-api.h"
71 #include "ipa-ref.h"
72 #include "cgraph.h"
73 #include "lto-streamer.h"
74 #include "data-streamer.h"
75 #include "tree-streamer.h"
76 #include "gcov-io.h"
77 #include "tree-pass.h"
78 #include "profile.h"
79 #include "context.h"
80 #include "pass_manager.h"
81 #include "ipa-utils.h"
82 #include "omp-low.h"
84 /* True when asm nodes has been output. */
85 bool asm_nodes_output = false;
87 static void output_cgraph_opt_summary (void);
88 static void input_cgraph_opt_summary (vec<symtab_node *> nodes);
90 /* Number of LDPR values known to GCC. */
91 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
93 /* All node orders are ofsetted by ORDER_BASE. */
94 static int order_base;
96 /* Cgraph streaming is organized as set of record whose type
97 is indicated by a tag. */
98 enum LTO_symtab_tags
100 /* Must leave 0 for the stopper. */
102 /* Cgraph node without body available. */
103 LTO_symtab_unavail_node = 1,
104 /* Cgraph node with function body. */
105 LTO_symtab_analyzed_node,
106 /* Cgraph edges. */
107 LTO_symtab_edge,
108 LTO_symtab_indirect_edge,
109 LTO_symtab_variable,
110 LTO_symtab_last_tag
113 /* Create a new symtab encoder.
114 if FOR_INPUT, the encoder allocate only datastructures needed
115 to read the symtab. */
117 lto_symtab_encoder_t
118 lto_symtab_encoder_new (bool for_input)
120 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
122 if (!for_input)
123 encoder->map = new hash_map<symtab_node *, size_t>;
124 encoder->nodes.create (0);
125 return encoder;
129 /* Delete ENCODER and its components. */
131 void
132 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
134 encoder->nodes.release ();
135 if (encoder->map)
136 delete encoder->map;
137 free (encoder);
141 /* Return the existing reference number of NODE in the symtab encoder in
142 output block OB. Assign a new reference if this is the first time
143 NODE is encoded. */
146 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
147 symtab_node *node)
149 int ref;
151 if (!encoder->map)
153 lto_encoder_entry entry = {node, false, false, false};
155 ref = encoder->nodes.length ();
156 encoder->nodes.safe_push (entry);
157 return ref;
160 size_t *slot = encoder->map->get (node);
161 if (!slot || !*slot)
163 lto_encoder_entry entry = {node, false, false, false};
164 ref = encoder->nodes.length ();
165 if (!slot)
166 encoder->map->put (node, ref + 1);
167 encoder->nodes.safe_push (entry);
169 else
170 ref = *slot - 1;
172 return ref;
175 /* Remove NODE from encoder. */
177 bool
178 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
179 symtab_node *node)
181 int index;
182 lto_encoder_entry last_node;
184 size_t *slot = encoder->map->get (node);
185 if (slot == NULL || !*slot)
186 return false;
188 index = *slot - 1;
189 gcc_checking_assert (encoder->nodes[index].node == node);
191 /* Remove from vector. We do this by swapping node with the last element
192 of the vector. */
193 last_node = encoder->nodes.pop ();
194 if (last_node.node != node)
196 gcc_assert (encoder->map->put (last_node.node, index + 1));
198 /* Move the last element to the original spot of NODE. */
199 encoder->nodes[index] = last_node;
202 /* Remove element from hash table. */
203 encoder->map->remove (node);
204 return true;
208 /* Return TRUE if we should encode the body of NODE (if any). */
210 bool
211 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
212 struct cgraph_node *node)
214 int index = lto_symtab_encoder_lookup (encoder, node);
215 return encoder->nodes[index].body;
218 /* Specify that we encode the body of NODE in this partition. */
220 static void
221 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
222 struct cgraph_node *node)
224 int index = lto_symtab_encoder_encode (encoder, node);
225 gcc_checking_assert (encoder->nodes[index].node == node);
226 encoder->nodes[index].body = true;
229 /* Return TRUE if we should encode initializer of NODE (if any). */
231 bool
232 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
233 varpool_node *node)
235 int index = lto_symtab_encoder_lookup (encoder, node);
236 if (index == LCC_NOT_FOUND)
237 return false;
238 return encoder->nodes[index].initializer;
241 /* Specify that we should encode initializer of NODE (if any). */
243 static void
244 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
245 varpool_node *node)
247 int index = lto_symtab_encoder_lookup (encoder, node);
248 encoder->nodes[index].initializer = true;
251 /* Return TRUE if NODE is in this partition. */
253 bool
254 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
255 symtab_node *node)
257 int index = lto_symtab_encoder_lookup (encoder, node);
258 if (index == LCC_NOT_FOUND)
259 return false;
260 return encoder->nodes[index].in_partition;
263 /* Specify that NODE is in this partition. */
265 void
266 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
267 symtab_node *node)
269 int index = lto_symtab_encoder_encode (encoder, node);
270 encoder->nodes[index].in_partition = true;
273 /* Output the cgraph EDGE to OB using ENCODER. */
275 static void
276 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
277 lto_symtab_encoder_t encoder)
279 unsigned int uid;
280 intptr_t ref;
281 struct bitpack_d bp;
283 if (edge->indirect_unknown_callee)
284 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
285 LTO_symtab_indirect_edge);
286 else
287 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
288 LTO_symtab_edge);
290 ref = lto_symtab_encoder_lookup (encoder, edge->caller);
291 gcc_assert (ref != LCC_NOT_FOUND);
292 streamer_write_hwi_stream (ob->main_stream, ref);
294 if (!edge->indirect_unknown_callee)
296 ref = lto_symtab_encoder_lookup (encoder, edge->callee);
297 gcc_assert (ref != LCC_NOT_FOUND);
298 streamer_write_hwi_stream (ob->main_stream, ref);
301 streamer_write_gcov_count_stream (ob->main_stream, edge->count);
303 bp = bitpack_create (ob->main_stream);
304 uid = (!gimple_has_body_p (edge->caller->decl)
305 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt) + 1);
306 bp_pack_enum (&bp, cgraph_inline_failed_t,
307 CIF_N_REASONS, edge->inline_failed);
308 bp_pack_var_len_unsigned (&bp, uid);
309 bp_pack_var_len_unsigned (&bp, edge->frequency);
310 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
311 bp_pack_value (&bp, edge->speculative, 1);
312 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
313 bp_pack_value (&bp, edge->can_throw_external, 1);
314 bp_pack_value (&bp, edge->in_polymorphic_cdtor, 1);
315 if (edge->indirect_unknown_callee)
317 int flags = edge->indirect_info->ecf_flags;
318 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
319 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
320 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
321 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
322 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
323 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
324 /* Flags that should not appear on indirect calls. */
325 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
326 | ECF_MAY_BE_ALLOCA
327 | ECF_SIBCALL
328 | ECF_LEAF
329 | ECF_NOVOPS)));
331 streamer_write_bitpack (&bp);
332 if (edge->indirect_unknown_callee)
334 streamer_write_hwi_stream (ob->main_stream,
335 edge->indirect_info->common_target_id);
336 if (edge->indirect_info->common_target_id)
337 streamer_write_hwi_stream
338 (ob->main_stream, edge->indirect_info->common_target_probability);
342 /* Return if NODE contain references from other partitions. */
344 bool
345 referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
347 int i;
348 struct ipa_ref *ref = NULL;
350 for (i = 0; node->iterate_referring (i, ref); i++)
352 /* Ignore references from non-offloadable nodes while streaming NODE into
353 offload LTO section. */
354 if (!ref->referring->need_lto_streaming)
355 continue;
357 if (ref->referring->in_other_partition
358 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
359 return true;
361 return false;
364 /* Return true when node is reachable from other partition. */
366 bool
367 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
369 struct cgraph_edge *e;
370 if (!node->definition)
371 return false;
372 if (node->global.inlined_to)
373 return false;
374 for (e = node->callers; e; e = e->next_caller)
376 /* Ignore references from non-offloadable nodes while streaming NODE into
377 offload LTO section. */
378 if (!e->caller->need_lto_streaming)
379 continue;
381 if (e->caller->in_other_partition
382 || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
383 return true;
385 return false;
388 /* Return if NODE contain references from other partitions. */
390 bool
391 referenced_from_this_partition_p (symtab_node *node,
392 lto_symtab_encoder_t encoder)
394 int i;
395 struct ipa_ref *ref = NULL;
397 for (i = 0; node->iterate_referring (i, ref); i++)
398 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
399 return true;
400 return false;
403 /* Return true when node is reachable from other partition. */
405 bool
406 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
408 struct cgraph_edge *e;
409 for (e = node->callers; e; e = e->next_caller)
410 if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
411 return true;
412 return false;
415 /* Output the cgraph NODE to OB. ENCODER is used to find the
416 reference number of NODE->inlined_to. SET is the set of nodes we
417 are writing to the current file. If NODE is not in SET, then NODE
418 is a boundary of a cgraph_node_set and we pretend NODE just has a
419 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
420 that have had their callgraph node written so far. This is used to
421 determine if NODE is a clone of a previously written node. */
423 static void
424 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
425 lto_symtab_encoder_t encoder)
427 unsigned int tag;
428 struct bitpack_d bp;
429 bool boundary_p;
430 intptr_t ref;
431 bool in_other_partition = false;
432 struct cgraph_node *clone_of, *ultimate_clone_of;
433 ipa_opt_pass_d *pass;
434 int i;
435 const char *comdat;
436 const char *section;
437 tree group;
439 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
441 if (node->analyzed && (!boundary_p || node->alias || node->thunk.thunk_p))
442 tag = LTO_symtab_analyzed_node;
443 else
444 tag = LTO_symtab_unavail_node;
446 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
447 tag);
448 streamer_write_hwi_stream (ob->main_stream, node->order);
450 /* In WPA mode, we only output part of the call-graph. Also, we
451 fake cgraph node attributes. There are two cases that we care.
453 Boundary nodes: There are nodes that are not part of SET but are
454 called from within SET. We artificially make them look like
455 externally visible nodes with no function body.
457 Cherry-picked nodes: These are nodes we pulled from other
458 translation units into SET during IPA-inlining. We make them as
459 local static nodes to prevent clashes with other local statics. */
460 if (boundary_p && node->analyzed
461 && node->get_partitioning_class () == SYMBOL_PARTITION)
463 /* Inline clones can not be part of boundary.
464 gcc_assert (!node->global.inlined_to);
466 FIXME: At the moment they can be, when partition contains an inline
467 clone that is clone of inline clone from outside partition. We can
468 reshape the clone tree and make other tree to be the root, but it
469 needs a bit extra work and will be promplty done by cgraph_remove_node
470 after reading back. */
471 in_other_partition = 1;
474 clone_of = node->clone_of;
475 while (clone_of
476 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
477 if (clone_of->prev_sibling_clone)
478 clone_of = clone_of->prev_sibling_clone;
479 else
480 clone_of = clone_of->clone_of;
482 /* See if body of the master function is output. If not, we are seeing only
483 an declaration and we do not need to pass down clone tree. */
484 ultimate_clone_of = clone_of;
485 while (ultimate_clone_of && ultimate_clone_of->clone_of)
486 ultimate_clone_of = ultimate_clone_of->clone_of;
488 if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
489 clone_of = NULL;
491 if (tag == LTO_symtab_analyzed_node)
492 gcc_assert (clone_of || !node->clone_of);
493 if (!clone_of)
494 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
495 else
496 streamer_write_hwi_stream (ob->main_stream, ref);
499 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
500 streamer_write_gcov_count_stream (ob->main_stream, node->count);
501 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
503 streamer_write_hwi_stream (ob->main_stream,
504 node->ipa_transforms_to_apply.length ());
505 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
506 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
508 if (tag == LTO_symtab_analyzed_node)
510 if (node->global.inlined_to)
512 ref = lto_symtab_encoder_lookup (encoder, node->global.inlined_to);
513 gcc_assert (ref != LCC_NOT_FOUND);
515 else
516 ref = LCC_NOT_FOUND;
518 streamer_write_hwi_stream (ob->main_stream, ref);
521 group = node->get_comdat_group ();
522 if (group)
523 comdat = IDENTIFIER_POINTER (group);
524 else
525 comdat = "";
526 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
528 if (group)
530 if (node->same_comdat_group && !boundary_p)
532 ref = lto_symtab_encoder_lookup (encoder,
533 node->same_comdat_group);
534 gcc_assert (ref != LCC_NOT_FOUND);
536 else
537 ref = LCC_NOT_FOUND;
538 streamer_write_hwi_stream (ob->main_stream, ref);
541 section = node->get_section ();
542 if (!section)
543 section = "";
545 streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
547 bp = bitpack_create (ob->main_stream);
548 bp_pack_value (&bp, node->local.local, 1);
549 bp_pack_value (&bp, node->externally_visible, 1);
550 bp_pack_value (&bp, node->no_reorder, 1);
551 bp_pack_value (&bp, node->definition, 1);
552 bp_pack_value (&bp, node->local.versionable, 1);
553 bp_pack_value (&bp, node->local.can_change_signature, 1);
554 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
555 bp_pack_value (&bp, node->force_output, 1);
556 bp_pack_value (&bp, node->forced_by_abi, 1);
557 bp_pack_value (&bp, node->unique_name, 1);
558 bp_pack_value (&bp, node->body_removed, 1);
559 bp_pack_value (&bp, node->implicit_section, 1);
560 bp_pack_value (&bp, node->address_taken, 1);
561 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
562 && node->get_partitioning_class () == SYMBOL_PARTITION
563 && (reachable_from_other_partition_p (node, encoder)
564 || referenced_from_other_partition_p (node, encoder)), 1);
565 bp_pack_value (&bp, node->lowered, 1);
566 bp_pack_value (&bp, in_other_partition, 1);
567 bp_pack_value (&bp, node->alias, 1);
568 bp_pack_value (&bp, node->weakref, 1);
569 bp_pack_value (&bp, node->frequency, 2);
570 bp_pack_value (&bp, node->only_called_at_startup, 1);
571 bp_pack_value (&bp, node->only_called_at_exit, 1);
572 bp_pack_value (&bp, node->tm_clone, 1);
573 bp_pack_value (&bp, node->calls_comdat_local, 1);
574 bp_pack_value (&bp, node->icf_merged, 1);
575 bp_pack_value (&bp, node->nonfreeing_fn, 1);
576 bp_pack_value (&bp, node->thunk.thunk_p, 1);
577 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
578 LDPR_NUM_KNOWN, node->resolution);
579 bp_pack_value (&bp, node->instrumentation_clone, 1);
580 streamer_write_bitpack (&bp);
581 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
583 if (node->thunk.thunk_p)
585 streamer_write_uhwi_stream
586 (ob->main_stream,
587 1 + (node->thunk.this_adjusting != 0) * 2
588 + (node->thunk.virtual_offset_p != 0) * 4
589 + (node->thunk.add_pointer_bounds_args != 0) * 8);
590 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
591 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
593 streamer_write_hwi_stream (ob->main_stream, node->profile_id);
594 if (DECL_STATIC_CONSTRUCTOR (node->decl))
595 streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
596 if (DECL_STATIC_DESTRUCTOR (node->decl))
597 streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
599 if (node->instrumentation_clone)
600 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->orig_decl);
603 /* Output the varpool NODE to OB.
604 If NODE is not in SET, then NODE is a boundary. */
606 static void
607 lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
608 lto_symtab_encoder_t encoder)
610 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
611 bool encode_initializer_p
612 = (node->definition
613 && lto_symtab_encoder_encode_initializer_p (encoder, node));
614 struct bitpack_d bp;
615 int ref;
616 const char *comdat;
617 const char *section;
618 tree group;
620 gcc_assert (!encode_initializer_p || node->definition);
621 gcc_assert (boundary_p || encode_initializer_p);
623 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
624 LTO_symtab_variable);
625 streamer_write_hwi_stream (ob->main_stream, node->order);
626 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
627 bp = bitpack_create (ob->main_stream);
628 bp_pack_value (&bp, node->externally_visible, 1);
629 bp_pack_value (&bp, node->no_reorder, 1);
630 bp_pack_value (&bp, node->force_output, 1);
631 bp_pack_value (&bp, node->forced_by_abi, 1);
632 bp_pack_value (&bp, node->unique_name, 1);
633 bp_pack_value (&bp,
634 node->body_removed
635 || (!encode_initializer_p && !node->alias && node->definition),
637 bp_pack_value (&bp, node->implicit_section, 1);
638 bp_pack_value (&bp, node->writeonly, 1);
639 bp_pack_value (&bp, node->definition && (encode_initializer_p || node->alias),
641 bp_pack_value (&bp, node->alias, 1);
642 bp_pack_value (&bp, node->weakref, 1);
643 bp_pack_value (&bp, node->analyzed && !boundary_p, 1);
644 gcc_assert (node->definition || !node->analyzed);
645 /* Constant pool initializers can be de-unified into individual ltrans units.
646 FIXME: Alternatively at -Os we may want to avoid generating for them the local
647 labels and share them across LTRANS partitions. */
648 if (node->get_partitioning_class () != SYMBOL_PARTITION)
650 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
651 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
653 else
655 bp_pack_value (&bp, node->definition
656 && referenced_from_other_partition_p (node, encoder), 1);
657 bp_pack_value (&bp, node->analyzed
658 && boundary_p && !DECL_EXTERNAL (node->decl), 1);
659 /* in_other_partition. */
661 bp_pack_value (&bp, node->tls_model, 3);
662 bp_pack_value (&bp, node->used_by_single_function, 1);
663 bp_pack_value (&bp, node->need_bounds_init, 1);
664 streamer_write_bitpack (&bp);
666 group = node->get_comdat_group ();
667 if (group)
668 comdat = IDENTIFIER_POINTER (group);
669 else
670 comdat = "";
671 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
673 if (group)
675 if (node->same_comdat_group && !boundary_p)
677 ref = lto_symtab_encoder_lookup (encoder,
678 node->same_comdat_group);
679 gcc_assert (ref != LCC_NOT_FOUND);
681 else
682 ref = LCC_NOT_FOUND;
683 streamer_write_hwi_stream (ob->main_stream, ref);
686 section = node->get_section ();
687 if (!section)
688 section = "";
689 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
691 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
692 LDPR_NUM_KNOWN, node->resolution);
695 /* Output the varpool NODE to OB.
696 If NODE is not in SET, then NODE is a boundary. */
698 static void
699 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
700 lto_symtab_encoder_t encoder)
702 struct bitpack_d bp;
703 int nref;
704 int uid = ref->lto_stmt_uid;
705 struct cgraph_node *node;
707 bp = bitpack_create (ob->main_stream);
708 bp_pack_value (&bp, ref->use, 3);
709 bp_pack_value (&bp, ref->speculative, 1);
710 streamer_write_bitpack (&bp);
711 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
712 gcc_assert (nref != LCC_NOT_FOUND);
713 streamer_write_hwi_stream (ob->main_stream, nref);
715 node = dyn_cast <cgraph_node *> (ref->referring);
716 if (node)
718 if (ref->stmt)
719 uid = gimple_uid (ref->stmt) + 1;
720 streamer_write_hwi_stream (ob->main_stream, uid);
724 /* Stream out profile_summary to OB. */
726 static void
727 output_profile_summary (struct lto_simple_output_block *ob)
729 unsigned h_ix;
730 struct bitpack_d bp;
732 if (profile_info)
734 /* We do not output num and run_max, they are not used by
735 GCC profile feedback and they are difficult to merge from multiple
736 units. */
737 gcc_assert (profile_info->runs);
738 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
739 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_max);
741 /* sum_all is needed for computing the working set with the
742 histogram. */
743 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_all);
745 /* Create and output a bitpack of non-zero histogram entries indices. */
746 bp = bitpack_create (ob->main_stream);
747 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
748 bp_pack_value (&bp, profile_info->histogram[h_ix].num_counters > 0, 1);
749 streamer_write_bitpack (&bp);
750 /* Now stream out only those non-zero entries. */
751 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
753 if (!profile_info->histogram[h_ix].num_counters)
754 continue;
755 streamer_write_gcov_count_stream (ob->main_stream,
756 profile_info->histogram[h_ix].num_counters);
757 streamer_write_gcov_count_stream (ob->main_stream,
758 profile_info->histogram[h_ix].min_value);
759 streamer_write_gcov_count_stream (ob->main_stream,
760 profile_info->histogram[h_ix].cum_value);
762 /* IPA-profile computes hot bb threshold based on cumulated
763 whole program profile. We need to stream it down to ltrans. */
764 if (flag_wpa)
765 streamer_write_gcov_count_stream (ob->main_stream,
766 get_hot_bb_threshold ());
768 else
769 streamer_write_uhwi_stream (ob->main_stream, 0);
772 /* Output all callees or indirect outgoing edges. EDGE must be the first such
773 edge. */
775 static void
776 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
777 struct lto_simple_output_block *ob,
778 lto_symtab_encoder_t encoder)
780 if (!edge)
781 return;
783 /* Output edges in backward direction, so the reconstructed callgraph match
784 and it is easy to associate call sites in the IPA pass summaries. */
785 while (edge->next_callee)
786 edge = edge->next_callee;
787 for (; edge; edge = edge->prev_callee)
788 lto_output_edge (ob, edge, encoder);
791 /* Output the part of the cgraph in SET. */
793 static void
794 output_refs (lto_symtab_encoder_t encoder)
796 struct lto_simple_output_block *ob;
797 int count;
798 struct ipa_ref *ref;
800 ob = lto_create_simple_output_block (LTO_section_refs);
802 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
804 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
806 if (!node->alias && !lto_symtab_encoder_in_partition_p (encoder, node))
807 continue;
809 count = node->ref_list.nreferences ();
810 if (count)
812 streamer_write_gcov_count_stream (ob->main_stream, count);
813 streamer_write_uhwi_stream (ob->main_stream,
814 lto_symtab_encoder_lookup (encoder, node));
815 for (int i = 0; node->iterate_reference (i, ref); i++)
816 lto_output_ref (ob, ref, encoder);
820 streamer_write_uhwi_stream (ob->main_stream, 0);
822 lto_destroy_simple_output_block (ob);
825 /* Add NODE into encoder as well as nodes it is cloned from.
826 Do it in a way so clones appear first. */
828 static void
829 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
830 bool include_body)
832 if (node->clone_of)
833 add_node_to (encoder, node->clone_of, include_body);
834 else if (include_body)
835 lto_set_symtab_encoder_encode_body (encoder, node);
836 lto_symtab_encoder_encode (encoder, node);
839 /* Add all references in NODE to encoders. */
841 static void
842 create_references (lto_symtab_encoder_t encoder, symtab_node *node)
844 int i;
845 struct ipa_ref *ref = NULL;
846 for (i = 0; node->iterate_reference (i, ref); i++)
847 if (is_a <cgraph_node *> (ref->referred))
848 add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
849 else
850 lto_symtab_encoder_encode (encoder, ref->referred);
853 /* Select what needs to be streamed out. In regular lto mode stream everything.
854 In offload lto mode stream only nodes marked as offloadable. */
855 void
856 select_what_to_stream (void)
858 struct symtab_node *snode;
859 FOR_EACH_SYMBOL (snode)
860 snode->need_lto_streaming = !lto_stream_offload_p || snode->offloadable;
863 /* Find all symbols we want to stream into given partition and insert them
864 to encoders.
866 The function actually replaces IN_ENCODER by new one. The reason is that
867 streaming code needs clone's origin to be streamed before clone. This
868 means that we need to insert the nodes in specific order. This order is
869 ignored by the partitioning logic earlier. */
871 lto_symtab_encoder_t
872 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
874 struct cgraph_edge *edge;
875 int i;
876 lto_symtab_encoder_t encoder;
877 lto_symtab_encoder_iterator lsei;
878 hash_set<void *> reachable_call_targets;
880 encoder = lto_symtab_encoder_new (false);
882 /* Go over all entries in the IN_ENCODER and duplicate them to
883 ENCODER. At the same time insert masters of clones so
884 every master appears before clone. */
885 for (lsei = lsei_start_function_in_partition (in_encoder);
886 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
888 struct cgraph_node *node = lsei_cgraph_node (lsei);
889 if (!node->need_lto_streaming)
890 continue;
891 add_node_to (encoder, node, true);
892 lto_set_symtab_encoder_in_partition (encoder, node);
893 create_references (encoder, node);
894 /* For proper debug info, we need to ship the origins, too. */
895 if (DECL_ABSTRACT_ORIGIN (node->decl))
897 struct cgraph_node *origin_node
898 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (node->decl));
899 origin_node->used_as_abstract_origin = true;
900 add_node_to (encoder, origin_node, true);
903 for (lsei = lsei_start_variable_in_partition (in_encoder);
904 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
906 varpool_node *vnode = lsei_varpool_node (lsei);
908 if (!vnode->need_lto_streaming)
909 continue;
910 lto_set_symtab_encoder_in_partition (encoder, vnode);
911 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
912 create_references (encoder, vnode);
913 /* For proper debug info, we need to ship the origins, too. */
914 if (DECL_ABSTRACT_ORIGIN (vnode->decl))
916 varpool_node *origin_node
917 = varpool_node::get (DECL_ABSTRACT_ORIGIN (vnode->decl));
918 lto_set_symtab_encoder_in_partition (encoder, origin_node);
921 /* Pickle in also the initializer of all referenced readonly variables
922 to help folding. Constant pool variables are not shared, so we must
923 pickle those too. */
924 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
926 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
927 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
929 if (!lto_symtab_encoder_encode_initializer_p (encoder,
930 vnode)
931 && (((vnode->ctor_useable_for_folding_p ()
932 && (!DECL_VIRTUAL_P (vnode->decl)
933 || !flag_wpa
934 || flag_ltrans_devirtualize))
935 || POINTER_BOUNDS_P (vnode->decl))))
937 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
938 create_references (encoder, vnode);
943 /* Go over all the nodes again to include callees that are not in
944 SET. */
945 for (lsei = lsei_start_function_in_partition (encoder);
946 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
948 struct cgraph_node *node = lsei_cgraph_node (lsei);
949 for (edge = node->callees; edge; edge = edge->next_callee)
951 struct cgraph_node *callee = edge->callee;
952 if (!lto_symtab_encoder_in_partition_p (encoder, callee))
954 /* We should have moved all the inlines. */
955 gcc_assert (!callee->global.inlined_to);
956 add_node_to (encoder, callee, false);
959 /* Add all possible targets for late devirtualization. */
960 if (flag_ltrans_devirtualize || !flag_wpa)
961 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
962 if (edge->indirect_info->polymorphic)
964 unsigned int i;
965 void *cache_token;
966 bool final;
967 vec <cgraph_node *>targets
968 = possible_polymorphic_call_targets
969 (edge, &final, &cache_token);
970 if (!reachable_call_targets.add (cache_token))
972 for (i = 0; i < targets.length (); i++)
974 struct cgraph_node *callee = targets[i];
976 /* Adding an external declarations into the unit serves
977 no purpose and just increases its boundary. */
978 if (callee->definition
979 && !lto_symtab_encoder_in_partition_p
980 (encoder, callee))
982 gcc_assert (!callee->global.inlined_to);
983 add_node_to (encoder, callee, false);
989 /* Be sure to also insert alias targert and thunk callees. These needs
990 to stay to aid local calling conventions. */
991 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
993 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
994 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
996 if (node->alias && node->analyzed)
997 create_references (encoder, node);
998 if (cnode
999 && cnode->thunk.thunk_p)
1000 add_node_to (encoder, cnode->callees->callee, false);
1002 lto_symtab_encoder_delete (in_encoder);
1003 return encoder;
1006 /* Output the part of the symtab in SET and VSET. */
1008 void
1009 output_symtab (void)
1011 struct cgraph_node *node;
1012 struct lto_simple_output_block *ob;
1013 int i, n_nodes;
1014 lto_symtab_encoder_t encoder;
1016 if (flag_wpa)
1017 output_cgraph_opt_summary ();
1019 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
1021 output_profile_summary (ob);
1023 /* An encoder for cgraph nodes should have been created by
1024 ipa_write_summaries_1. */
1025 gcc_assert (ob->decl_state->symtab_node_encoder);
1026 encoder = ob->decl_state->symtab_node_encoder;
1028 /* Write out the nodes. We must first output a node and then its clones,
1029 otherwise at a time reading back the node there would be nothing to clone
1030 from. */
1031 n_nodes = lto_symtab_encoder_size (encoder);
1032 for (i = 0; i < n_nodes; i++)
1034 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1035 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1036 lto_output_node (ob, cnode, encoder);
1037 else
1038 lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
1041 /* Go over the nodes in SET again to write edges. */
1042 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
1044 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder, i));
1045 if (node
1046 && (node->thunk.thunk_p
1047 || lto_symtab_encoder_in_partition_p (encoder, node)))
1049 output_outgoing_cgraph_edges (node->callees, ob, encoder);
1050 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
1054 streamer_write_uhwi_stream (ob->main_stream, 0);
1056 lto_destroy_simple_output_block (ob);
1058 /* Emit toplevel asms.
1059 When doing WPA we must output every asm just once. Since we do not partition asm
1060 nodes at all, output them to first output. This is kind of hack, but should work
1061 well. */
1062 if (!asm_nodes_output)
1064 asm_nodes_output = true;
1065 lto_output_toplevel_asms ();
1068 output_refs (encoder);
1071 /* Return identifier encoded in IB as a plain string. */
1073 static tree
1074 read_identifier (struct lto_input_block *ib)
1076 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1077 tree id;
1079 if (ib->data[ib->p + len])
1080 lto_section_overrun (ib);
1081 if (!len)
1083 ib->p++;
1084 return NULL;
1086 id = get_identifier (ib->data + ib->p);
1087 ib->p += len + 1;
1088 return id;
1091 /* Return string encoded in IB, NULL if string is empty. */
1093 static const char *
1094 read_string (struct lto_input_block *ib)
1096 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1097 const char *str;
1099 if (ib->data[ib->p + len])
1100 lto_section_overrun (ib);
1101 if (!len)
1103 ib->p++;
1104 return NULL;
1106 str = ib->data + ib->p;
1107 ib->p += len + 1;
1108 return str;
1111 /* Output function/variable tables that will allow libgomp to look up offload
1112 target code.
1113 OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is filled in
1114 varpool_node::get_create. In WHOPR (partitioned) mode during the WPA stage
1115 both OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables. */
1117 void
1118 output_offload_tables (void)
1120 if (vec_safe_is_empty (offload_funcs) && vec_safe_is_empty (offload_vars))
1121 return;
1123 struct lto_simple_output_block *ob
1124 = lto_create_simple_output_block (LTO_section_offload_table);
1126 for (unsigned i = 0; i < vec_safe_length (offload_funcs); i++)
1128 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1129 LTO_symtab_last_tag, LTO_symtab_unavail_node);
1130 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
1131 (*offload_funcs)[i]);
1134 for (unsigned i = 0; i < vec_safe_length (offload_vars); i++)
1136 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1137 LTO_symtab_last_tag, LTO_symtab_variable);
1138 lto_output_var_decl_index (ob->decl_state, ob->main_stream,
1139 (*offload_vars)[i]);
1142 streamer_write_uhwi_stream (ob->main_stream, 0);
1143 lto_destroy_simple_output_block (ob);
1145 /* In WHOPR mode during the WPA stage the joint offload tables need to be
1146 streamed to one partition only. That's why we free offload_funcs and
1147 offload_vars after the first call of output_offload_tables. */
1148 if (flag_wpa)
1150 vec_free (offload_funcs);
1151 vec_free (offload_vars);
1155 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1156 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
1157 NODE or to replace the values in it, for instance because the first
1158 time we saw it, the function body was not available but now it
1159 is. BP is a bitpack with all the bitflags for NODE read from the
1160 stream. */
1162 static void
1163 input_overwrite_node (struct lto_file_decl_data *file_data,
1164 struct cgraph_node *node,
1165 enum LTO_symtab_tags tag,
1166 struct bitpack_d *bp)
1168 node->aux = (void *) tag;
1169 node->lto_file_data = file_data;
1171 node->local.local = bp_unpack_value (bp, 1);
1172 node->externally_visible = bp_unpack_value (bp, 1);
1173 node->no_reorder = bp_unpack_value (bp, 1);
1174 node->definition = bp_unpack_value (bp, 1);
1175 node->local.versionable = bp_unpack_value (bp, 1);
1176 node->local.can_change_signature = bp_unpack_value (bp, 1);
1177 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
1178 node->force_output = bp_unpack_value (bp, 1);
1179 node->forced_by_abi = bp_unpack_value (bp, 1);
1180 node->unique_name = bp_unpack_value (bp, 1);
1181 node->body_removed = bp_unpack_value (bp, 1);
1182 node->implicit_section = bp_unpack_value (bp, 1);
1183 node->address_taken = bp_unpack_value (bp, 1);
1184 node->used_from_other_partition = bp_unpack_value (bp, 1);
1185 node->lowered = bp_unpack_value (bp, 1);
1186 node->analyzed = tag == LTO_symtab_analyzed_node;
1187 node->in_other_partition = bp_unpack_value (bp, 1);
1188 if (node->in_other_partition
1189 /* Avoid updating decl when we are seeing just inline clone.
1190 When inlining function that has functions already inlined into it,
1191 we produce clones of inline clones.
1193 WPA partitioning might put each clone into different unit and
1194 we might end up streaming inline clone from other partition
1195 to support clone we are interested in. */
1196 && (!node->clone_of
1197 || node->clone_of->decl != node->decl))
1199 DECL_EXTERNAL (node->decl) = 1;
1200 TREE_STATIC (node->decl) = 0;
1202 node->alias = bp_unpack_value (bp, 1);
1203 node->weakref = bp_unpack_value (bp, 1);
1204 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1205 node->only_called_at_startup = bp_unpack_value (bp, 1);
1206 node->only_called_at_exit = bp_unpack_value (bp, 1);
1207 node->tm_clone = bp_unpack_value (bp, 1);
1208 node->calls_comdat_local = bp_unpack_value (bp, 1);
1209 node->icf_merged = bp_unpack_value (bp, 1);
1210 node->nonfreeing_fn = bp_unpack_value (bp, 1);
1211 node->thunk.thunk_p = bp_unpack_value (bp, 1);
1212 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1213 LDPR_NUM_KNOWN);
1214 node->instrumentation_clone = bp_unpack_value (bp, 1);
1215 gcc_assert (flag_ltrans
1216 || (!node->in_other_partition
1217 && !node->used_from_other_partition));
1220 /* Return string alias is alias of. */
1222 static tree
1223 get_alias_symbol (tree decl)
1225 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1226 return get_identifier (TREE_STRING_POINTER
1227 (TREE_VALUE (TREE_VALUE (alias))));
1230 /* Read a node from input_block IB. TAG is the node's tag just read.
1231 Return the node read or overwriten. */
1233 static struct cgraph_node *
1234 input_node (struct lto_file_decl_data *file_data,
1235 struct lto_input_block *ib,
1236 enum LTO_symtab_tags tag,
1237 vec<symtab_node *> nodes)
1239 gcc::pass_manager *passes = g->get_passes ();
1240 tree fn_decl;
1241 struct cgraph_node *node;
1242 struct bitpack_d bp;
1243 unsigned decl_index;
1244 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1245 int clone_ref;
1246 int order;
1247 int i, count;
1248 tree group;
1249 const char *section;
1250 order = streamer_read_hwi (ib) + order_base;
1251 clone_ref = streamer_read_hwi (ib);
1253 decl_index = streamer_read_uhwi (ib);
1254 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1256 if (clone_ref != LCC_NOT_FOUND)
1258 node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1259 0, CGRAPH_FREQ_BASE, false,
1260 vNULL, false, NULL, NULL);
1262 else
1264 /* Declaration of functions can be already merged with a declaration
1265 from other input file. We keep cgraph unmerged until after streaming
1266 of ipa passes is done. Alays forcingly create a fresh node. */
1267 node = symtab->create_empty ();
1268 node->decl = fn_decl;
1269 node->register_symbol ();
1272 node->order = order;
1273 if (order >= symtab->order)
1274 symtab->order = order + 1;
1276 node->count = streamer_read_gcov_count (ib);
1277 node->count_materialization_scale = streamer_read_hwi (ib);
1279 count = streamer_read_hwi (ib);
1280 node->ipa_transforms_to_apply = vNULL;
1281 for (i = 0; i < count; i++)
1283 opt_pass *pass;
1284 int pid = streamer_read_hwi (ib);
1286 gcc_assert (pid < passes->passes_by_id_size);
1287 pass = passes->passes_by_id[pid];
1288 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1291 if (tag == LTO_symtab_analyzed_node)
1292 ref = streamer_read_hwi (ib);
1294 group = read_identifier (ib);
1295 if (group)
1296 ref2 = streamer_read_hwi (ib);
1298 /* Make sure that we have not read this node before. Nodes that
1299 have already been read will have their tag stored in the 'aux'
1300 field. Since built-in functions can be referenced in multiple
1301 functions, they are expected to be read more than once. */
1302 if (node->aux && !DECL_BUILT_IN (node->decl))
1303 internal_error ("bytecode stream: found multiple instances of cgraph "
1304 "node with uid %d", node->uid);
1306 node->tp_first_run = streamer_read_uhwi (ib);
1308 bp = streamer_read_bitpack (ib);
1310 input_overwrite_node (file_data, node, tag, &bp);
1312 /* Store a reference for now, and fix up later to be a pointer. */
1313 node->global.inlined_to = (cgraph_node *) (intptr_t) ref;
1315 if (group)
1317 node->set_comdat_group (group);
1318 /* Store a reference for now, and fix up later to be a pointer. */
1319 node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1321 else
1322 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1323 section = read_string (ib);
1324 if (section)
1325 node->set_section_for_node (section);
1327 if (node->thunk.thunk_p)
1329 int type = streamer_read_uhwi (ib);
1330 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1331 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1333 node->thunk.fixed_offset = fixed_offset;
1334 node->thunk.this_adjusting = (type & 2);
1335 node->thunk.virtual_value = virtual_value;
1336 node->thunk.virtual_offset_p = (type & 4);
1337 node->thunk.add_pointer_bounds_args = (type & 8);
1339 if (node->alias && !node->analyzed && node->weakref)
1340 node->alias_target = get_alias_symbol (node->decl);
1341 node->profile_id = streamer_read_hwi (ib);
1342 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1343 node->set_init_priority (streamer_read_hwi (ib));
1344 if (DECL_STATIC_DESTRUCTOR (node->decl))
1345 node->set_fini_priority (streamer_read_hwi (ib));
1347 if (node->instrumentation_clone)
1349 decl_index = streamer_read_uhwi (ib);
1350 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1351 node->orig_decl = fn_decl;
1354 return node;
1357 /* Read a node from input_block IB. TAG is the node's tag just read.
1358 Return the node read or overwriten. */
1360 static varpool_node *
1361 input_varpool_node (struct lto_file_decl_data *file_data,
1362 struct lto_input_block *ib)
1364 int decl_index;
1365 tree var_decl;
1366 varpool_node *node;
1367 struct bitpack_d bp;
1368 int ref = LCC_NOT_FOUND;
1369 int order;
1370 tree group;
1371 const char *section;
1373 order = streamer_read_hwi (ib) + order_base;
1374 decl_index = streamer_read_uhwi (ib);
1375 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1377 /* Declaration of functions can be already merged with a declaration
1378 from other input file. We keep cgraph unmerged until after streaming
1379 of ipa passes is done. Alays forcingly create a fresh node. */
1380 node = varpool_node::create_empty ();
1381 node->decl = var_decl;
1382 node->register_symbol ();
1384 node->order = order;
1385 if (order >= symtab->order)
1386 symtab->order = order + 1;
1387 node->lto_file_data = file_data;
1389 bp = streamer_read_bitpack (ib);
1390 node->externally_visible = bp_unpack_value (&bp, 1);
1391 node->no_reorder = bp_unpack_value (&bp, 1);
1392 node->force_output = bp_unpack_value (&bp, 1);
1393 node->forced_by_abi = bp_unpack_value (&bp, 1);
1394 node->unique_name = bp_unpack_value (&bp, 1);
1395 node->body_removed = bp_unpack_value (&bp, 1);
1396 node->implicit_section = bp_unpack_value (&bp, 1);
1397 node->writeonly = bp_unpack_value (&bp, 1);
1398 node->definition = bp_unpack_value (&bp, 1);
1399 node->alias = bp_unpack_value (&bp, 1);
1400 node->weakref = bp_unpack_value (&bp, 1);
1401 node->analyzed = bp_unpack_value (&bp, 1);
1402 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1403 node->in_other_partition = bp_unpack_value (&bp, 1);
1404 if (node->in_other_partition)
1406 DECL_EXTERNAL (node->decl) = 1;
1407 TREE_STATIC (node->decl) = 0;
1409 if (node->alias && !node->analyzed && node->weakref)
1410 node->alias_target = get_alias_symbol (node->decl);
1411 node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1412 node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1413 node->need_bounds_init = bp_unpack_value (&bp, 1);
1414 group = read_identifier (ib);
1415 if (group)
1417 node->set_comdat_group (group);
1418 ref = streamer_read_hwi (ib);
1419 /* Store a reference for now, and fix up later to be a pointer. */
1420 node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1422 else
1423 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1424 section = read_string (ib);
1425 if (section)
1426 node->set_section_for_node (section);
1427 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1428 LDPR_NUM_KNOWN);
1429 gcc_assert (flag_ltrans
1430 || (!node->in_other_partition
1431 && !node->used_from_other_partition));
1433 return node;
1436 /* Read a node from input_block IB. TAG is the node's tag just read.
1437 Return the node read or overwriten. */
1439 static void
1440 input_ref (struct lto_input_block *ib,
1441 symtab_node *referring_node,
1442 vec<symtab_node *> nodes)
1444 symtab_node *node = NULL;
1445 struct bitpack_d bp;
1446 enum ipa_ref_use use;
1447 bool speculative;
1448 struct ipa_ref *ref;
1450 bp = streamer_read_bitpack (ib);
1451 use = (enum ipa_ref_use) bp_unpack_value (&bp, 3);
1452 speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1453 node = nodes[streamer_read_hwi (ib)];
1454 ref = referring_node->create_reference (node, use);
1455 ref->speculative = speculative;
1456 if (is_a <cgraph_node *> (referring_node))
1457 ref->lto_stmt_uid = streamer_read_hwi (ib);
1460 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1461 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1462 edge being read is indirect (in the sense that it has
1463 indirect_unknown_callee set). */
1465 static void
1466 input_edge (struct lto_input_block *ib, vec<symtab_node *> nodes,
1467 bool indirect)
1469 struct cgraph_node *caller, *callee;
1470 struct cgraph_edge *edge;
1471 unsigned int stmt_id;
1472 gcov_type count;
1473 int freq;
1474 cgraph_inline_failed_t inline_failed;
1475 struct bitpack_d bp;
1476 int ecf_flags = 0;
1478 caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1479 if (caller == NULL || caller->decl == NULL_TREE)
1480 internal_error ("bytecode stream: no caller found while reading edge");
1482 if (!indirect)
1484 callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1485 if (callee == NULL || callee->decl == NULL_TREE)
1486 internal_error ("bytecode stream: no callee found while reading edge");
1488 else
1489 callee = NULL;
1491 count = streamer_read_gcov_count (ib);
1493 bp = streamer_read_bitpack (ib);
1494 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1495 stmt_id = bp_unpack_var_len_unsigned (&bp);
1496 freq = (int) bp_unpack_var_len_unsigned (&bp);
1498 if (indirect)
1499 edge = caller->create_indirect_edge (NULL, 0, count, freq);
1500 else
1501 edge = caller->create_edge (callee, NULL, count, freq);
1503 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1504 edge->speculative = bp_unpack_value (&bp, 1);
1505 edge->lto_stmt_uid = stmt_id;
1506 edge->inline_failed = inline_failed;
1507 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1508 edge->can_throw_external = bp_unpack_value (&bp, 1);
1509 edge->in_polymorphic_cdtor = bp_unpack_value (&bp, 1);
1510 if (indirect)
1512 if (bp_unpack_value (&bp, 1))
1513 ecf_flags |= ECF_CONST;
1514 if (bp_unpack_value (&bp, 1))
1515 ecf_flags |= ECF_PURE;
1516 if (bp_unpack_value (&bp, 1))
1517 ecf_flags |= ECF_NORETURN;
1518 if (bp_unpack_value (&bp, 1))
1519 ecf_flags |= ECF_MALLOC;
1520 if (bp_unpack_value (&bp, 1))
1521 ecf_flags |= ECF_NOTHROW;
1522 if (bp_unpack_value (&bp, 1))
1523 ecf_flags |= ECF_RETURNS_TWICE;
1524 edge->indirect_info->ecf_flags = ecf_flags;
1525 edge->indirect_info->common_target_id = streamer_read_hwi (ib);
1526 if (edge->indirect_info->common_target_id)
1527 edge->indirect_info->common_target_probability = streamer_read_hwi (ib);
1532 /* Read a cgraph from IB using the info in FILE_DATA. */
1534 static vec<symtab_node *>
1535 input_cgraph_1 (struct lto_file_decl_data *file_data,
1536 struct lto_input_block *ib)
1538 enum LTO_symtab_tags tag;
1539 vec<symtab_node *> nodes = vNULL;
1540 symtab_node *node;
1541 unsigned i;
1543 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1544 order_base = symtab->order;
1545 while (tag)
1547 if (tag == LTO_symtab_edge)
1548 input_edge (ib, nodes, false);
1549 else if (tag == LTO_symtab_indirect_edge)
1550 input_edge (ib, nodes, true);
1551 else if (tag == LTO_symtab_variable)
1553 node = input_varpool_node (file_data, ib);
1554 nodes.safe_push (node);
1555 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1557 else
1559 node = input_node (file_data, ib, tag, nodes);
1560 if (node == NULL || node->decl == NULL_TREE)
1561 internal_error ("bytecode stream: found empty cgraph node");
1562 nodes.safe_push (node);
1563 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1566 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1569 lto_input_toplevel_asms (file_data, order_base);
1571 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1572 #ifdef ENABLE_CHECKING
1573 FOR_EACH_VEC_ELT (nodes, i, node)
1574 gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1575 #endif
1576 FOR_EACH_VEC_ELT (nodes, i, node)
1578 int ref;
1579 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1581 ref = (int) (intptr_t) cnode->global.inlined_to;
1583 /* We share declaration of builtins, so we may read same node twice. */
1584 if (!node->aux)
1585 continue;
1586 node->aux = NULL;
1588 /* Fixup inlined_to from reference to pointer. */
1589 if (ref != LCC_NOT_FOUND)
1590 dyn_cast<cgraph_node *> (node)->global.inlined_to
1591 = dyn_cast<cgraph_node *> (nodes[ref]);
1592 else
1593 cnode->global.inlined_to = NULL;
1595 /* Compute instrumented_version. */
1596 if (cnode->instrumentation_clone)
1598 gcc_assert (cnode->orig_decl);
1600 cnode->instrumented_version = cgraph_node::get (cnode->orig_decl);
1601 if (cnode->instrumented_version)
1603 /* We may have multiple nodes for a single function which
1604 will be merged later. To have a proper merge we need
1605 to keep instrumentation_version reference between nodes
1606 consistent: each instrumented_version reference should
1607 have proper reverse reference. Thus don't break existing
1608 instrumented_version reference if it already exists. */
1609 if (cnode->instrumented_version->instrumented_version)
1610 cnode->instrumented_version = NULL;
1611 else
1612 cnode->instrumented_version->instrumented_version = cnode;
1615 /* Restore decl names reference. */
1616 if (IDENTIFIER_TRANSPARENT_ALIAS (DECL_ASSEMBLER_NAME (cnode->decl))
1617 && !TREE_CHAIN (DECL_ASSEMBLER_NAME (cnode->decl)))
1618 TREE_CHAIN (DECL_ASSEMBLER_NAME (cnode->decl))
1619 = DECL_ASSEMBLER_NAME (cnode->orig_decl);
1623 ref = (int) (intptr_t) node->same_comdat_group;
1625 /* Fixup same_comdat_group from reference to pointer. */
1626 if (ref != LCC_NOT_FOUND)
1627 node->same_comdat_group = nodes[ref];
1628 else
1629 node->same_comdat_group = NULL;
1631 FOR_EACH_VEC_ELT (nodes, i, node)
1632 node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1633 return nodes;
1636 /* Input ipa_refs. */
1638 static void
1639 input_refs (struct lto_input_block *ib,
1640 vec<symtab_node *> nodes)
1642 int count;
1643 int idx;
1644 while (true)
1646 symtab_node *node;
1647 count = streamer_read_uhwi (ib);
1648 if (!count)
1649 break;
1650 idx = streamer_read_uhwi (ib);
1651 node = nodes[idx];
1652 while (count)
1654 input_ref (ib, node, nodes);
1655 count--;
1661 static struct gcov_ctr_summary lto_gcov_summary;
1663 /* Input profile_info from IB. */
1664 static void
1665 input_profile_summary (struct lto_input_block *ib,
1666 struct lto_file_decl_data *file_data)
1668 unsigned h_ix;
1669 struct bitpack_d bp;
1670 unsigned int runs = streamer_read_uhwi (ib);
1671 if (runs)
1673 file_data->profile_info.runs = runs;
1674 file_data->profile_info.sum_max = streamer_read_gcov_count (ib);
1675 file_data->profile_info.sum_all = streamer_read_gcov_count (ib);
1677 memset (file_data->profile_info.histogram, 0,
1678 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1679 /* Input the bitpack of non-zero histogram indices. */
1680 bp = streamer_read_bitpack (ib);
1681 /* Read in and unpack the full bitpack, flagging non-zero
1682 histogram entries by setting the num_counters non-zero. */
1683 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1685 file_data->profile_info.histogram[h_ix].num_counters
1686 = bp_unpack_value (&bp, 1);
1688 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1690 if (!file_data->profile_info.histogram[h_ix].num_counters)
1691 continue;
1693 file_data->profile_info.histogram[h_ix].num_counters
1694 = streamer_read_gcov_count (ib);
1695 file_data->profile_info.histogram[h_ix].min_value
1696 = streamer_read_gcov_count (ib);
1697 file_data->profile_info.histogram[h_ix].cum_value
1698 = streamer_read_gcov_count (ib);
1700 /* IPA-profile computes hot bb threshold based on cumulated
1701 whole program profile. We need to stream it down to ltrans. */
1702 if (flag_ltrans)
1703 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1708 /* Rescale profile summaries to the same number of runs in the whole unit. */
1710 static void
1711 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1713 struct lto_file_decl_data *file_data;
1714 unsigned int j, h_ix;
1715 gcov_unsigned_t max_runs = 0;
1716 struct cgraph_node *node;
1717 struct cgraph_edge *edge;
1718 gcov_type saved_sum_all = 0;
1719 gcov_ctr_summary *saved_profile_info = 0;
1720 int saved_scale = 0;
1722 /* Find unit with maximal number of runs. If we ever get serious about
1723 roundoff errors, we might also consider computing smallest common
1724 multiply. */
1725 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1726 if (max_runs < file_data->profile_info.runs)
1727 max_runs = file_data->profile_info.runs;
1729 if (!max_runs)
1730 return;
1732 /* Simple overflow check. We probably don't need to support that many train
1733 runs. Such a large value probably imply data corruption anyway. */
1734 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1736 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1737 INT_MAX / REG_BR_PROB_BASE);
1738 return;
1741 profile_info = &lto_gcov_summary;
1742 lto_gcov_summary.runs = max_runs;
1743 lto_gcov_summary.sum_max = 0;
1744 memset (lto_gcov_summary.histogram, 0,
1745 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1747 /* Rescale all units to the maximal number of runs.
1748 sum_max can not be easily merged, as we have no idea what files come from
1749 the same run. We do not use the info anyway, so leave it 0. */
1750 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1751 if (file_data->profile_info.runs)
1753 int scale = GCOV_COMPUTE_SCALE (max_runs,
1754 file_data->profile_info.runs);
1755 lto_gcov_summary.sum_max
1756 = MAX (lto_gcov_summary.sum_max,
1757 apply_scale (file_data->profile_info.sum_max, scale));
1758 lto_gcov_summary.sum_all
1759 = MAX (lto_gcov_summary.sum_all,
1760 apply_scale (file_data->profile_info.sum_all, scale));
1761 /* Save a pointer to the profile_info with the largest
1762 scaled sum_all and the scale for use in merging the
1763 histogram. */
1764 if (!saved_profile_info
1765 || lto_gcov_summary.sum_all > saved_sum_all)
1767 saved_profile_info = &file_data->profile_info;
1768 saved_sum_all = lto_gcov_summary.sum_all;
1769 saved_scale = scale;
1773 gcc_assert (saved_profile_info);
1775 /* Scale up the histogram from the profile that had the largest
1776 scaled sum_all above. */
1777 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1779 /* Scale up the min value as we did the corresponding sum_all
1780 above. Use that to find the new histogram index. */
1781 gcov_type scaled_min
1782 = apply_scale (saved_profile_info->histogram[h_ix].min_value,
1783 saved_scale);
1784 /* The new index may be shared with another scaled histogram entry,
1785 so we need to account for a non-zero histogram entry at new_ix. */
1786 unsigned new_ix = gcov_histo_index (scaled_min);
1787 lto_gcov_summary.histogram[new_ix].min_value
1788 = (lto_gcov_summary.histogram[new_ix].num_counters
1789 ? MIN (lto_gcov_summary.histogram[new_ix].min_value, scaled_min)
1790 : scaled_min);
1791 /* Some of the scaled counter values would ostensibly need to be placed
1792 into different (larger) histogram buckets, but we keep things simple
1793 here and place the scaled cumulative counter value in the bucket
1794 corresponding to the scaled minimum counter value. */
1795 lto_gcov_summary.histogram[new_ix].cum_value
1796 += apply_scale (saved_profile_info->histogram[h_ix].cum_value,
1797 saved_scale);
1798 lto_gcov_summary.histogram[new_ix].num_counters
1799 += saved_profile_info->histogram[h_ix].num_counters;
1802 /* Watch roundoff errors. */
1803 if (lto_gcov_summary.sum_max < max_runs)
1804 lto_gcov_summary.sum_max = max_runs;
1806 /* If merging already happent at WPA time, we are done. */
1807 if (flag_ltrans)
1808 return;
1810 /* Now compute count_materialization_scale of each node.
1811 During LTRANS we already have values of count_materialization_scale
1812 computed, so just update them. */
1813 FOR_EACH_FUNCTION (node)
1814 if (node->lto_file_data
1815 && node->lto_file_data->profile_info.runs)
1817 int scale;
1819 scale = RDIV (node->count_materialization_scale * max_runs,
1820 node->lto_file_data->profile_info.runs);
1821 node->count_materialization_scale = scale;
1822 if (scale < 0)
1823 fatal_error (input_location, "Profile information in %s corrupted",
1824 file_data->file_name);
1826 if (scale == REG_BR_PROB_BASE)
1827 continue;
1828 for (edge = node->callees; edge; edge = edge->next_callee)
1829 edge->count = apply_scale (edge->count, scale);
1830 node->count = apply_scale (node->count, scale);
1834 /* Input and merge the symtab from each of the .o files passed to
1835 lto1. */
1837 void
1838 input_symtab (void)
1840 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1841 struct lto_file_decl_data *file_data;
1842 unsigned int j = 0;
1843 struct cgraph_node *node;
1845 while ((file_data = file_data_vec[j++]))
1847 const char *data;
1848 size_t len;
1849 struct lto_input_block *ib;
1850 vec<symtab_node *> nodes;
1852 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1853 &data, &len);
1854 if (!ib)
1855 fatal_error (input_location,
1856 "cannot find LTO cgraph in %s", file_data->file_name);
1857 input_profile_summary (ib, file_data);
1858 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1859 nodes = input_cgraph_1 (file_data, ib);
1860 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1861 ib, data, len);
1863 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1864 &data, &len);
1865 if (!ib)
1866 fatal_error (input_location, "cannot find LTO section refs in %s",
1867 file_data->file_name);
1868 input_refs (ib, nodes);
1869 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1870 ib, data, len);
1871 if (flag_ltrans)
1872 input_cgraph_opt_summary (nodes);
1873 nodes.release ();
1876 merge_profile_summaries (file_data_vec);
1877 get_working_sets ();
1880 /* Clear out the aux field that was used to store enough state to
1881 tell which nodes should be overwritten. */
1882 FOR_EACH_FUNCTION (node)
1884 /* Some nodes may have been created by cgraph_node. This
1885 happens when the callgraph contains nested functions. If the
1886 node for the parent function was never emitted to the gimple
1887 file, cgraph_node will create a node for it when setting the
1888 context of the nested function. */
1889 if (node->lto_file_data)
1890 node->aux = NULL;
1894 /* Input function/variable tables that will allow libgomp to look up offload
1895 target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS. */
1897 void
1898 input_offload_tables (void)
1900 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1901 struct lto_file_decl_data *file_data;
1902 unsigned int j = 0;
1904 while ((file_data = file_data_vec[j++]))
1906 const char *data;
1907 size_t len;
1908 struct lto_input_block *ib
1909 = lto_create_simple_input_block (file_data, LTO_section_offload_table,
1910 &data, &len);
1911 if (!ib)
1912 continue;
1914 enum LTO_symtab_tags tag
1915 = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1916 while (tag)
1918 if (tag == LTO_symtab_unavail_node)
1920 int decl_index = streamer_read_uhwi (ib);
1921 tree fn_decl
1922 = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1923 vec_safe_push (offload_funcs, fn_decl);
1925 else if (tag == LTO_symtab_variable)
1927 int decl_index = streamer_read_uhwi (ib);
1928 tree var_decl
1929 = lto_file_decl_data_get_var_decl (file_data, decl_index);
1930 vec_safe_push (offload_vars, var_decl);
1932 else
1933 fatal_error (input_location,
1934 "invalid offload table in %s", file_data->file_name);
1936 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1939 lto_destroy_simple_input_block (file_data, LTO_section_offload_table,
1940 ib, data, len);
1944 /* True when we need optimization summary for NODE. */
1946 static int
1947 output_cgraph_opt_summary_p (struct cgraph_node *node)
1949 return (node->clone_of
1950 && (node->clone.tree_map
1951 || node->clone.args_to_skip
1952 || node->clone.combined_args_to_skip));
1955 /* Output optimization summary for EDGE to OB. */
1956 static void
1957 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1958 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1962 /* Output optimization summary for NODE to OB. */
1964 static void
1965 output_node_opt_summary (struct output_block *ob,
1966 struct cgraph_node *node,
1967 lto_symtab_encoder_t encoder)
1969 unsigned int index;
1970 bitmap_iterator bi;
1971 struct ipa_replace_map *map;
1972 struct bitpack_d bp;
1973 int i;
1974 struct cgraph_edge *e;
1976 if (node->clone.args_to_skip)
1978 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1979 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1980 streamer_write_uhwi (ob, index);
1982 else
1983 streamer_write_uhwi (ob, 0);
1984 if (node->clone.combined_args_to_skip)
1986 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1987 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1988 streamer_write_uhwi (ob, index);
1990 else
1991 streamer_write_uhwi (ob, 0);
1992 streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
1993 FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
1995 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1996 mechanism to store function local declarations into summaries. */
1997 gcc_assert (!map->old_tree);
1998 streamer_write_uhwi (ob, map->parm_num);
1999 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
2000 stream_write_tree (ob, map->new_tree, true);
2001 bp = bitpack_create (ob->main_stream);
2002 bp_pack_value (&bp, map->replace_p, 1);
2003 bp_pack_value (&bp, map->ref_p, 1);
2004 streamer_write_bitpack (&bp);
2007 if (lto_symtab_encoder_in_partition_p (encoder, node))
2009 for (e = node->callees; e; e = e->next_callee)
2010 output_edge_opt_summary (ob, e);
2011 for (e = node->indirect_calls; e; e = e->next_callee)
2012 output_edge_opt_summary (ob, e);
2016 /* Output optimization summaries stored in callgraph.
2017 At the moment it is the clone info structure. */
2019 static void
2020 output_cgraph_opt_summary (void)
2022 int i, n_nodes;
2023 lto_symtab_encoder_t encoder;
2024 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
2025 unsigned count = 0;
2027 ob->symbol = NULL;
2028 encoder = ob->decl_state->symtab_node_encoder;
2029 n_nodes = lto_symtab_encoder_size (encoder);
2030 for (i = 0; i < n_nodes; i++)
2032 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
2033 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2034 if (cnode && output_cgraph_opt_summary_p (cnode))
2035 count++;
2037 streamer_write_uhwi (ob, count);
2038 for (i = 0; i < n_nodes; i++)
2040 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
2041 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2042 if (cnode && output_cgraph_opt_summary_p (cnode))
2044 streamer_write_uhwi (ob, i);
2045 output_node_opt_summary (ob, cnode, encoder);
2048 produce_asm (ob, NULL);
2049 destroy_output_block (ob);
2052 /* Input optimisation summary of EDGE. */
2054 static void
2055 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
2056 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
2060 /* Input optimisation summary of NODE. */
2062 static void
2063 input_node_opt_summary (struct cgraph_node *node,
2064 struct lto_input_block *ib_main,
2065 struct data_in *data_in)
2067 int i;
2068 int count;
2069 int bit;
2070 struct bitpack_d bp;
2071 struct cgraph_edge *e;
2073 count = streamer_read_uhwi (ib_main);
2074 if (count)
2075 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
2076 for (i = 0; i < count; i++)
2078 bit = streamer_read_uhwi (ib_main);
2079 bitmap_set_bit (node->clone.args_to_skip, bit);
2081 count = streamer_read_uhwi (ib_main);
2082 if (count)
2083 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
2084 for (i = 0; i < count; i++)
2086 bit = streamer_read_uhwi (ib_main);
2087 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
2089 count = streamer_read_uhwi (ib_main);
2090 for (i = 0; i < count; i++)
2092 struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
2094 vec_safe_push (node->clone.tree_map, map);
2095 map->parm_num = streamer_read_uhwi (ib_main);
2096 map->old_tree = NULL;
2097 map->new_tree = stream_read_tree (ib_main, data_in);
2098 bp = streamer_read_bitpack (ib_main);
2099 map->replace_p = bp_unpack_value (&bp, 1);
2100 map->ref_p = bp_unpack_value (&bp, 1);
2102 for (e = node->callees; e; e = e->next_callee)
2103 input_edge_opt_summary (e, ib_main);
2104 for (e = node->indirect_calls; e; e = e->next_callee)
2105 input_edge_opt_summary (e, ib_main);
2108 /* Read section in file FILE_DATA of length LEN with data DATA. */
2110 static void
2111 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
2112 const char *data, size_t len,
2113 vec<symtab_node *> nodes)
2115 const struct lto_function_header *header =
2116 (const struct lto_function_header *) data;
2117 const int cfg_offset = sizeof (struct lto_function_header);
2118 const int main_offset = cfg_offset + header->cfg_size;
2119 const int string_offset = main_offset + header->main_size;
2120 struct data_in *data_in;
2121 unsigned int i;
2122 unsigned int count;
2124 lto_input_block ib_main ((const char *) data + main_offset,
2125 header->main_size, file_data->mode_table);
2127 data_in =
2128 lto_data_in_create (file_data, (const char *) data + string_offset,
2129 header->string_size, vNULL);
2130 count = streamer_read_uhwi (&ib_main);
2132 for (i = 0; i < count; i++)
2134 int ref = streamer_read_uhwi (&ib_main);
2135 input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
2136 &ib_main, data_in);
2138 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
2139 len);
2140 lto_data_in_delete (data_in);
2143 /* Input optimization summary of cgraph. */
2145 static void
2146 input_cgraph_opt_summary (vec<symtab_node *> nodes)
2148 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2149 struct lto_file_decl_data *file_data;
2150 unsigned int j = 0;
2152 while ((file_data = file_data_vec[j++]))
2154 size_t len;
2155 const char *data =
2156 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
2157 &len);
2159 if (data)
2160 input_cgraph_opt_section (file_data, data, len, nodes);