gcc/
[official-gcc.git] / gcc / lto-cgraph.c
blob77605f70109241633dde3e9bf4e9779c6ea73ce6
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
4 Copyright (C) 2009-2017 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "predict.h"
31 #include "stringpool.h"
32 #include "tree-streamer.h"
33 #include "cgraph.h"
34 #include "tree-pass.h"
35 #include "profile.h"
36 #include "context.h"
37 #include "pass_manager.h"
38 #include "ipa-utils.h"
39 #include "omp-offload.h"
40 #include "ipa-chkp.h"
42 /* True when asm nodes has been output. */
43 bool asm_nodes_output = false;
45 static void output_cgraph_opt_summary (void);
46 static void input_cgraph_opt_summary (vec<symtab_node *> nodes);
48 /* Number of LDPR values known to GCC. */
49 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
51 /* All node orders are ofsetted by ORDER_BASE. */
52 static int order_base;
54 /* Cgraph streaming is organized as set of record whose type
55 is indicated by a tag. */
56 enum LTO_symtab_tags
58 /* Must leave 0 for the stopper. */
60 /* Cgraph node without body available. */
61 LTO_symtab_unavail_node = 1,
62 /* Cgraph node with function body. */
63 LTO_symtab_analyzed_node,
64 /* Cgraph edges. */
65 LTO_symtab_edge,
66 LTO_symtab_indirect_edge,
67 LTO_symtab_variable,
68 LTO_symtab_last_tag
71 /* Create a new symtab encoder.
72 if FOR_INPUT, the encoder allocate only datastructures needed
73 to read the symtab. */
75 lto_symtab_encoder_t
76 lto_symtab_encoder_new (bool for_input)
78 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
80 if (!for_input)
81 encoder->map = new hash_map<symtab_node *, size_t>;
82 encoder->nodes.create (0);
83 return encoder;
87 /* Delete ENCODER and its components. */
89 void
90 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
92 encoder->nodes.release ();
93 if (encoder->map)
94 delete encoder->map;
95 free (encoder);
99 /* Return the existing reference number of NODE in the symtab encoder in
100 output block OB. Assign a new reference if this is the first time
101 NODE is encoded. */
104 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
105 symtab_node *node)
107 int ref;
109 if (!encoder->map)
111 lto_encoder_entry entry = {node, false, false, false};
113 ref = encoder->nodes.length ();
114 encoder->nodes.safe_push (entry);
115 return ref;
118 size_t *slot = encoder->map->get (node);
119 if (!slot || !*slot)
121 lto_encoder_entry entry = {node, false, false, false};
122 ref = encoder->nodes.length ();
123 if (!slot)
124 encoder->map->put (node, ref + 1);
125 encoder->nodes.safe_push (entry);
127 else
128 ref = *slot - 1;
130 return ref;
133 /* Remove NODE from encoder. */
135 bool
136 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
137 symtab_node *node)
139 int index;
140 lto_encoder_entry last_node;
142 size_t *slot = encoder->map->get (node);
143 if (slot == NULL || !*slot)
144 return false;
146 index = *slot - 1;
147 gcc_checking_assert (encoder->nodes[index].node == node);
149 /* Remove from vector. We do this by swapping node with the last element
150 of the vector. */
151 last_node = encoder->nodes.pop ();
152 if (last_node.node != node)
154 gcc_assert (encoder->map->put (last_node.node, index + 1));
156 /* Move the last element to the original spot of NODE. */
157 encoder->nodes[index] = last_node;
160 /* Remove element from hash table. */
161 encoder->map->remove (node);
162 return true;
166 /* Return TRUE if we should encode the body of NODE (if any). */
168 bool
169 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
170 struct cgraph_node *node)
172 int index = lto_symtab_encoder_lookup (encoder, node);
173 return encoder->nodes[index].body;
176 /* Specify that we encode the body of NODE in this partition. */
178 static void
179 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
180 struct cgraph_node *node)
182 int index = lto_symtab_encoder_encode (encoder, node);
183 gcc_checking_assert (encoder->nodes[index].node == node);
184 encoder->nodes[index].body = true;
187 /* Return TRUE if we should encode initializer of NODE (if any). */
189 bool
190 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
191 varpool_node *node)
193 int index = lto_symtab_encoder_lookup (encoder, node);
194 if (index == LCC_NOT_FOUND)
195 return false;
196 return encoder->nodes[index].initializer;
199 /* Specify that we should encode initializer of NODE (if any). */
201 static void
202 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
203 varpool_node *node)
205 int index = lto_symtab_encoder_lookup (encoder, node);
206 encoder->nodes[index].initializer = true;
209 /* Return TRUE if NODE is in this partition. */
211 bool
212 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
213 symtab_node *node)
215 int index = lto_symtab_encoder_lookup (encoder, node);
216 if (index == LCC_NOT_FOUND)
217 return false;
218 return encoder->nodes[index].in_partition;
221 /* Specify that NODE is in this partition. */
223 void
224 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
225 symtab_node *node)
227 int index = lto_symtab_encoder_encode (encoder, node);
228 encoder->nodes[index].in_partition = true;
231 /* Output the cgraph EDGE to OB using ENCODER. */
233 static void
234 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
235 lto_symtab_encoder_t encoder)
237 unsigned int uid;
238 intptr_t ref;
239 struct bitpack_d bp;
241 if (edge->indirect_unknown_callee)
242 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
243 LTO_symtab_indirect_edge);
244 else
245 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
246 LTO_symtab_edge);
248 ref = lto_symtab_encoder_lookup (encoder, edge->caller);
249 gcc_assert (ref != LCC_NOT_FOUND);
250 streamer_write_hwi_stream (ob->main_stream, ref);
252 if (!edge->indirect_unknown_callee)
254 ref = lto_symtab_encoder_lookup (encoder, edge->callee);
255 gcc_assert (ref != LCC_NOT_FOUND);
256 streamer_write_hwi_stream (ob->main_stream, ref);
259 streamer_write_gcov_count_stream (ob->main_stream, edge->count);
261 bp = bitpack_create (ob->main_stream);
262 uid = (!gimple_has_body_p (edge->caller->decl) || edge->caller->thunk.thunk_p
263 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt) + 1);
264 bp_pack_enum (&bp, cgraph_inline_failed_t,
265 CIF_N_REASONS, edge->inline_failed);
266 bp_pack_var_len_unsigned (&bp, uid);
267 bp_pack_var_len_unsigned (&bp, edge->frequency);
268 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
269 bp_pack_value (&bp, edge->speculative, 1);
270 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
271 gcc_assert (!edge->call_stmt_cannot_inline_p
272 || edge->inline_failed != CIF_BODY_NOT_AVAILABLE);
273 bp_pack_value (&bp, edge->can_throw_external, 1);
274 bp_pack_value (&bp, edge->in_polymorphic_cdtor, 1);
275 if (edge->indirect_unknown_callee)
277 int flags = edge->indirect_info->ecf_flags;
278 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
279 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
280 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
281 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
282 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
283 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
284 /* Flags that should not appear on indirect calls. */
285 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
286 | ECF_MAY_BE_ALLOCA
287 | ECF_SIBCALL
288 | ECF_LEAF
289 | ECF_NOVOPS)));
291 streamer_write_bitpack (&bp);
292 if (edge->indirect_unknown_callee)
294 streamer_write_hwi_stream (ob->main_stream,
295 edge->indirect_info->common_target_id);
296 if (edge->indirect_info->common_target_id)
297 streamer_write_hwi_stream
298 (ob->main_stream, edge->indirect_info->common_target_probability);
302 /* Return if NODE contain references from other partitions. */
304 bool
305 referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
307 int i;
308 struct ipa_ref *ref = NULL;
310 for (i = 0; node->iterate_referring (i, ref); i++)
312 /* Ignore references from non-offloadable nodes while streaming NODE into
313 offload LTO section. */
314 if (!ref->referring->need_lto_streaming)
315 continue;
317 if (ref->referring->in_other_partition
318 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
319 return true;
321 return false;
324 /* Return true when node is reachable from other partition. */
326 bool
327 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
329 struct cgraph_edge *e;
330 if (!node->definition)
331 return false;
332 if (node->global.inlined_to)
333 return false;
334 for (e = node->callers; e; e = e->next_caller)
336 /* Ignore references from non-offloadable nodes while streaming NODE into
337 offload LTO section. */
338 if (!e->caller->need_lto_streaming)
339 continue;
341 if (e->caller->in_other_partition
342 || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
343 return true;
345 return false;
348 /* Return if NODE contain references from other partitions. */
350 bool
351 referenced_from_this_partition_p (symtab_node *node,
352 lto_symtab_encoder_t encoder)
354 int i;
355 struct ipa_ref *ref = NULL;
357 for (i = 0; node->iterate_referring (i, ref); i++)
358 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
359 return true;
360 return false;
363 /* Return true when node is reachable from other partition. */
365 bool
366 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
368 struct cgraph_edge *e;
369 for (e = node->callers; e; e = e->next_caller)
370 if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
371 return true;
372 return false;
375 /* Output the cgraph NODE to OB. ENCODER is used to find the
376 reference number of NODE->inlined_to. SET is the set of nodes we
377 are writing to the current file. If NODE is not in SET, then NODE
378 is a boundary of a cgraph_node_set and we pretend NODE just has a
379 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
380 that have had their callgraph node written so far. This is used to
381 determine if NODE is a clone of a previously written node. */
383 static void
384 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
385 lto_symtab_encoder_t encoder)
387 unsigned int tag;
388 struct bitpack_d bp;
389 bool boundary_p;
390 intptr_t ref;
391 bool in_other_partition = false;
392 struct cgraph_node *clone_of, *ultimate_clone_of;
393 ipa_opt_pass_d *pass;
394 int i;
395 const char *comdat;
396 const char *section;
397 tree group;
399 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
401 if (node->analyzed && (!boundary_p || node->alias
402 || (node->thunk.thunk_p && !node->global.inlined_to)))
403 tag = LTO_symtab_analyzed_node;
404 else
405 tag = LTO_symtab_unavail_node;
407 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
408 tag);
409 streamer_write_hwi_stream (ob->main_stream, node->order);
411 /* In WPA mode, we only output part of the call-graph. Also, we
412 fake cgraph node attributes. There are two cases that we care.
414 Boundary nodes: There are nodes that are not part of SET but are
415 called from within SET. We artificially make them look like
416 externally visible nodes with no function body.
418 Cherry-picked nodes: These are nodes we pulled from other
419 translation units into SET during IPA-inlining. We make them as
420 local static nodes to prevent clashes with other local statics. */
421 if (boundary_p && node->analyzed
422 && node->get_partitioning_class () == SYMBOL_PARTITION)
424 /* Inline clones can not be part of boundary.
425 gcc_assert (!node->global.inlined_to);
427 FIXME: At the moment they can be, when partition contains an inline
428 clone that is clone of inline clone from outside partition. We can
429 reshape the clone tree and make other tree to be the root, but it
430 needs a bit extra work and will be promplty done by cgraph_remove_node
431 after reading back. */
432 in_other_partition = 1;
435 clone_of = node->clone_of;
436 while (clone_of
437 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
438 if (clone_of->prev_sibling_clone)
439 clone_of = clone_of->prev_sibling_clone;
440 else
441 clone_of = clone_of->clone_of;
443 /* See if body of the master function is output. If not, we are seeing only
444 an declaration and we do not need to pass down clone tree. */
445 ultimate_clone_of = clone_of;
446 while (ultimate_clone_of && ultimate_clone_of->clone_of)
447 ultimate_clone_of = ultimate_clone_of->clone_of;
449 if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
450 clone_of = NULL;
452 if (tag == LTO_symtab_analyzed_node)
453 gcc_assert (clone_of || !node->clone_of);
454 if (!clone_of)
455 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
456 else
457 streamer_write_hwi_stream (ob->main_stream, ref);
460 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
461 streamer_write_gcov_count_stream (ob->main_stream, node->count);
462 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
464 streamer_write_hwi_stream (ob->main_stream,
465 node->ipa_transforms_to_apply.length ());
466 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
467 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
469 if (tag == LTO_symtab_analyzed_node)
471 if (node->global.inlined_to)
473 ref = lto_symtab_encoder_lookup (encoder, node->global.inlined_to);
474 gcc_assert (ref != LCC_NOT_FOUND);
476 else
477 ref = LCC_NOT_FOUND;
479 streamer_write_hwi_stream (ob->main_stream, ref);
482 group = node->get_comdat_group ();
483 if (group)
484 comdat = IDENTIFIER_POINTER (group);
485 else
486 comdat = "";
487 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
489 if (group)
491 if (node->same_comdat_group)
493 ref = LCC_NOT_FOUND;
494 for (struct symtab_node *n = node->same_comdat_group;
495 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
496 ref = lto_symtab_encoder_lookup (encoder, n);
498 else
499 ref = LCC_NOT_FOUND;
500 streamer_write_hwi_stream (ob->main_stream, ref);
503 section = node->get_section ();
504 if (!section)
505 section = "";
507 streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
509 bp = bitpack_create (ob->main_stream);
510 bp_pack_value (&bp, node->local.local, 1);
511 bp_pack_value (&bp, node->externally_visible, 1);
512 bp_pack_value (&bp, node->no_reorder, 1);
513 bp_pack_value (&bp, node->definition, 1);
514 bp_pack_value (&bp, node->local.versionable, 1);
515 bp_pack_value (&bp, node->local.can_change_signature, 1);
516 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
517 bp_pack_value (&bp, node->force_output, 1);
518 bp_pack_value (&bp, node->forced_by_abi, 1);
519 bp_pack_value (&bp, node->unique_name, 1);
520 bp_pack_value (&bp, node->body_removed, 1);
521 bp_pack_value (&bp, node->implicit_section, 1);
522 bp_pack_value (&bp, node->address_taken, 1);
523 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
524 && node->get_partitioning_class () == SYMBOL_PARTITION
525 && (reachable_from_other_partition_p (node, encoder)
526 || referenced_from_other_partition_p (node, encoder)), 1);
527 bp_pack_value (&bp, node->lowered, 1);
528 bp_pack_value (&bp, in_other_partition, 1);
529 bp_pack_value (&bp, node->alias, 1);
530 bp_pack_value (&bp, node->transparent_alias, 1);
531 bp_pack_value (&bp, node->weakref, 1);
532 bp_pack_value (&bp, node->frequency, 2);
533 bp_pack_value (&bp, node->only_called_at_startup, 1);
534 bp_pack_value (&bp, node->only_called_at_exit, 1);
535 bp_pack_value (&bp, node->tm_clone, 1);
536 bp_pack_value (&bp, node->calls_comdat_local, 1);
537 bp_pack_value (&bp, node->icf_merged, 1);
538 bp_pack_value (&bp, node->nonfreeing_fn, 1);
539 bp_pack_value (&bp, node->thunk.thunk_p, 1);
540 bp_pack_value (&bp, node->parallelized_function, 1);
541 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
542 LDPR_NUM_KNOWN, node->resolution);
543 bp_pack_value (&bp, node->instrumentation_clone, 1);
544 bp_pack_value (&bp, node->split_part, 1);
545 streamer_write_bitpack (&bp);
546 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
548 if (node->thunk.thunk_p)
550 streamer_write_uhwi_stream
551 (ob->main_stream,
552 1 + (node->thunk.this_adjusting != 0) * 2
553 + (node->thunk.virtual_offset_p != 0) * 4
554 + (node->thunk.add_pointer_bounds_args != 0) * 8);
555 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
556 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
558 streamer_write_hwi_stream (ob->main_stream, node->profile_id);
559 if (DECL_STATIC_CONSTRUCTOR (node->decl))
560 streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
561 if (DECL_STATIC_DESTRUCTOR (node->decl))
562 streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
564 if (node->instrumentation_clone)
565 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->orig_decl);
568 /* Output the varpool NODE to OB.
569 If NODE is not in SET, then NODE is a boundary. */
571 static void
572 lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
573 lto_symtab_encoder_t encoder)
575 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
576 bool encode_initializer_p
577 = (node->definition
578 && lto_symtab_encoder_encode_initializer_p (encoder, node));
579 struct bitpack_d bp;
580 int ref;
581 const char *comdat;
582 const char *section;
583 tree group;
585 gcc_assert (!encode_initializer_p || node->definition);
586 gcc_assert (boundary_p || encode_initializer_p);
588 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
589 LTO_symtab_variable);
590 streamer_write_hwi_stream (ob->main_stream, node->order);
591 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
592 bp = bitpack_create (ob->main_stream);
593 bp_pack_value (&bp, node->externally_visible, 1);
594 bp_pack_value (&bp, node->no_reorder, 1);
595 bp_pack_value (&bp, node->force_output, 1);
596 bp_pack_value (&bp, node->forced_by_abi, 1);
597 bp_pack_value (&bp, node->unique_name, 1);
598 bp_pack_value (&bp,
599 node->body_removed
600 || (!encode_initializer_p && !node->alias && node->definition),
602 bp_pack_value (&bp, node->implicit_section, 1);
603 bp_pack_value (&bp, node->writeonly, 1);
604 bp_pack_value (&bp, node->definition && (encode_initializer_p || node->alias),
606 bp_pack_value (&bp, node->alias, 1);
607 bp_pack_value (&bp, node->transparent_alias, 1);
608 bp_pack_value (&bp, node->weakref, 1);
609 bp_pack_value (&bp, node->analyzed && (!boundary_p || node->alias), 1);
610 gcc_assert (node->definition || !node->analyzed);
611 /* Constant pool initializers can be de-unified into individual ltrans units.
612 FIXME: Alternatively at -Os we may want to avoid generating for them the local
613 labels and share them across LTRANS partitions. */
614 if (node->get_partitioning_class () != SYMBOL_PARTITION)
616 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
617 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
619 else
621 bp_pack_value (&bp, node->definition
622 && referenced_from_other_partition_p (node, encoder), 1);
623 bp_pack_value (&bp, node->analyzed
624 && boundary_p && !DECL_EXTERNAL (node->decl), 1);
625 /* in_other_partition. */
627 bp_pack_value (&bp, node->tls_model, 3);
628 bp_pack_value (&bp, node->used_by_single_function, 1);
629 bp_pack_value (&bp, node->dynamically_initialized, 1);
630 bp_pack_value (&bp, node->need_bounds_init, 1);
631 streamer_write_bitpack (&bp);
633 group = node->get_comdat_group ();
634 if (group)
635 comdat = IDENTIFIER_POINTER (group);
636 else
637 comdat = "";
638 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
640 if (group)
642 if (node->same_comdat_group)
644 ref = LCC_NOT_FOUND;
645 for (struct symtab_node *n = node->same_comdat_group;
646 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
647 ref = lto_symtab_encoder_lookup (encoder, n);
649 else
650 ref = LCC_NOT_FOUND;
651 streamer_write_hwi_stream (ob->main_stream, ref);
654 section = node->get_section ();
655 if (!section)
656 section = "";
657 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
659 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
660 LDPR_NUM_KNOWN, node->resolution);
663 /* Output the varpool NODE to OB.
664 If NODE is not in SET, then NODE is a boundary. */
666 static void
667 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
668 lto_symtab_encoder_t encoder)
670 struct bitpack_d bp;
671 int nref;
672 int uid = ref->lto_stmt_uid;
673 struct cgraph_node *node;
675 bp = bitpack_create (ob->main_stream);
676 bp_pack_value (&bp, ref->use, 3);
677 bp_pack_value (&bp, ref->speculative, 1);
678 streamer_write_bitpack (&bp);
679 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
680 gcc_assert (nref != LCC_NOT_FOUND);
681 streamer_write_hwi_stream (ob->main_stream, nref);
683 node = dyn_cast <cgraph_node *> (ref->referring);
684 if (node)
686 if (ref->stmt)
687 uid = gimple_uid (ref->stmt) + 1;
688 streamer_write_hwi_stream (ob->main_stream, uid);
692 /* Stream out profile_summary to OB. */
694 static void
695 output_profile_summary (struct lto_simple_output_block *ob)
697 unsigned h_ix;
698 struct bitpack_d bp;
700 if (profile_info)
702 /* We do not output num and run_max, they are not used by
703 GCC profile feedback and they are difficult to merge from multiple
704 units. */
705 gcc_assert (profile_info->runs);
706 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
707 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_max);
709 /* sum_all is needed for computing the working set with the
710 histogram. */
711 streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_all);
713 /* Create and output a bitpack of non-zero histogram entries indices. */
714 bp = bitpack_create (ob->main_stream);
715 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
716 bp_pack_value (&bp, profile_info->histogram[h_ix].num_counters > 0, 1);
717 streamer_write_bitpack (&bp);
718 /* Now stream out only those non-zero entries. */
719 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
721 if (!profile_info->histogram[h_ix].num_counters)
722 continue;
723 streamer_write_gcov_count_stream (ob->main_stream,
724 profile_info->histogram[h_ix].num_counters);
725 streamer_write_gcov_count_stream (ob->main_stream,
726 profile_info->histogram[h_ix].min_value);
727 streamer_write_gcov_count_stream (ob->main_stream,
728 profile_info->histogram[h_ix].cum_value);
730 /* IPA-profile computes hot bb threshold based on cumulated
731 whole program profile. We need to stream it down to ltrans. */
732 if (flag_wpa)
733 streamer_write_gcov_count_stream (ob->main_stream,
734 get_hot_bb_threshold ());
736 else
737 streamer_write_uhwi_stream (ob->main_stream, 0);
740 /* Output all callees or indirect outgoing edges. EDGE must be the first such
741 edge. */
743 static void
744 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
745 struct lto_simple_output_block *ob,
746 lto_symtab_encoder_t encoder)
748 if (!edge)
749 return;
751 /* Output edges in backward direction, so the reconstructed callgraph match
752 and it is easy to associate call sites in the IPA pass summaries. */
753 while (edge->next_callee)
754 edge = edge->next_callee;
755 for (; edge; edge = edge->prev_callee)
756 lto_output_edge (ob, edge, encoder);
759 /* Output the part of the cgraph in SET. */
761 static void
762 output_refs (lto_symtab_encoder_t encoder)
764 struct lto_simple_output_block *ob;
765 int count;
766 struct ipa_ref *ref;
768 ob = lto_create_simple_output_block (LTO_section_refs);
770 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
772 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
774 /* IPA_REF_ALIAS and IPA_REF_CHKP references are always preserved
775 in the boundary. Alias node can't have other references and
776 can be always handled as if it's not in the boundary. */
777 if (!node->alias && !lto_symtab_encoder_in_partition_p (encoder, node))
779 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
780 /* Output IPA_REF_CHKP reference. */
781 if (cnode
782 && cnode->instrumented_version
783 && !cnode->instrumentation_clone)
785 for (int i = 0; node->iterate_reference (i, ref); i++)
786 if (ref->use == IPA_REF_CHKP)
788 if (lto_symtab_encoder_lookup (encoder, ref->referred)
789 != LCC_NOT_FOUND)
791 int nref = lto_symtab_encoder_lookup (encoder, node);
792 streamer_write_gcov_count_stream (ob->main_stream, 1);
793 streamer_write_uhwi_stream (ob->main_stream, nref);
794 lto_output_ref (ob, ref, encoder);
796 break;
799 continue;
802 count = node->ref_list.nreferences ();
803 if (count)
805 streamer_write_gcov_count_stream (ob->main_stream, count);
806 streamer_write_uhwi_stream (ob->main_stream,
807 lto_symtab_encoder_lookup (encoder, node));
808 for (int i = 0; node->iterate_reference (i, ref); i++)
809 lto_output_ref (ob, ref, encoder);
813 streamer_write_uhwi_stream (ob->main_stream, 0);
815 lto_destroy_simple_output_block (ob);
818 /* Add NODE into encoder as well as nodes it is cloned from.
819 Do it in a way so clones appear first. */
821 static void
822 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
823 bool include_body)
825 if (node->clone_of)
826 add_node_to (encoder, node->clone_of, include_body);
827 else if (include_body)
828 lto_set_symtab_encoder_encode_body (encoder, node);
829 lto_symtab_encoder_encode (encoder, node);
832 /* Add all references in NODE to encoders. */
834 static void
835 create_references (lto_symtab_encoder_t encoder, symtab_node *node)
837 int i;
838 struct ipa_ref *ref = NULL;
839 for (i = 0; node->iterate_reference (i, ref); i++)
840 if (is_a <cgraph_node *> (ref->referred))
841 add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
842 else
843 lto_symtab_encoder_encode (encoder, ref->referred);
846 /* Select what needs to be streamed out. In regular lto mode stream everything.
847 In offload lto mode stream only nodes marked as offloadable. */
848 void
849 select_what_to_stream (void)
851 struct symtab_node *snode;
852 FOR_EACH_SYMBOL (snode)
853 snode->need_lto_streaming = !lto_stream_offload_p || snode->offloadable;
856 /* Find all symbols we want to stream into given partition and insert them
857 to encoders.
859 The function actually replaces IN_ENCODER by new one. The reason is that
860 streaming code needs clone's origin to be streamed before clone. This
861 means that we need to insert the nodes in specific order. This order is
862 ignored by the partitioning logic earlier. */
864 lto_symtab_encoder_t
865 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
867 struct cgraph_edge *edge;
868 int i;
869 lto_symtab_encoder_t encoder;
870 lto_symtab_encoder_iterator lsei;
871 hash_set<void *> reachable_call_targets;
873 encoder = lto_symtab_encoder_new (false);
875 /* Go over all entries in the IN_ENCODER and duplicate them to
876 ENCODER. At the same time insert masters of clones so
877 every master appears before clone. */
878 for (lsei = lsei_start_function_in_partition (in_encoder);
879 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
881 struct cgraph_node *node = lsei_cgraph_node (lsei);
882 if (!node->need_lto_streaming)
883 continue;
884 add_node_to (encoder, node, true);
885 lto_set_symtab_encoder_in_partition (encoder, node);
886 create_references (encoder, node);
888 for (lsei = lsei_start_variable_in_partition (in_encoder);
889 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
891 varpool_node *vnode = lsei_varpool_node (lsei);
893 if (!vnode->need_lto_streaming)
894 continue;
895 lto_set_symtab_encoder_in_partition (encoder, vnode);
896 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
897 create_references (encoder, vnode);
899 /* Pickle in also the initializer of all referenced readonly variables
900 to help folding. Constant pool variables are not shared, so we must
901 pickle those too. */
902 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
904 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
905 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
907 if (!lto_symtab_encoder_encode_initializer_p (encoder,
908 vnode)
909 && (((vnode->ctor_useable_for_folding_p ()
910 && (!DECL_VIRTUAL_P (vnode->decl)
911 || !flag_wpa
912 || flag_ltrans_devirtualize))
913 || POINTER_BOUNDS_P (vnode->decl))))
915 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
916 create_references (encoder, vnode);
921 /* Go over all the nodes again to include callees that are not in
922 SET. */
923 for (lsei = lsei_start_function_in_partition (encoder);
924 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
926 struct cgraph_node *node = lsei_cgraph_node (lsei);
927 for (edge = node->callees; edge; edge = edge->next_callee)
929 struct cgraph_node *callee = edge->callee;
930 if (!lto_symtab_encoder_in_partition_p (encoder, callee))
932 /* We should have moved all the inlines. */
933 gcc_assert (!callee->global.inlined_to);
934 add_node_to (encoder, callee, false);
937 /* Add all possible targets for late devirtualization. */
938 if (flag_ltrans_devirtualize || !flag_wpa)
939 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
940 if (edge->indirect_info->polymorphic)
942 unsigned int i;
943 void *cache_token;
944 bool final;
945 vec <cgraph_node *>targets
946 = possible_polymorphic_call_targets
947 (edge, &final, &cache_token);
948 if (!reachable_call_targets.add (cache_token))
950 for (i = 0; i < targets.length (); i++)
952 struct cgraph_node *callee = targets[i];
954 /* Adding an external declarations into the unit serves
955 no purpose and just increases its boundary. */
956 if (callee->definition
957 && !lto_symtab_encoder_in_partition_p
958 (encoder, callee))
960 gcc_assert (!callee->global.inlined_to);
961 add_node_to (encoder, callee, false);
967 /* Be sure to also insert alias targert and thunk callees. These needs
968 to stay to aid local calling conventions. */
969 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
971 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
972 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
974 if (node->alias && node->analyzed)
975 create_references (encoder, node);
976 if (cnode
977 && cnode->thunk.thunk_p && !cnode->global.inlined_to)
978 add_node_to (encoder, cnode->callees->callee, false);
979 while (node->transparent_alias && node->analyzed)
981 node = node->get_alias_target ();
982 if (is_a <cgraph_node *> (node))
983 add_node_to (encoder, dyn_cast <cgraph_node *> (node),
984 false);
985 else
986 lto_symtab_encoder_encode (encoder, node);
989 lto_symtab_encoder_delete (in_encoder);
990 return encoder;
993 /* Output the part of the symtab in SET and VSET. */
995 void
996 output_symtab (void)
998 struct cgraph_node *node;
999 struct lto_simple_output_block *ob;
1000 int i, n_nodes;
1001 lto_symtab_encoder_t encoder;
1003 if (flag_wpa)
1004 output_cgraph_opt_summary ();
1006 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
1008 output_profile_summary (ob);
1010 /* An encoder for cgraph nodes should have been created by
1011 ipa_write_summaries_1. */
1012 gcc_assert (ob->decl_state->symtab_node_encoder);
1013 encoder = ob->decl_state->symtab_node_encoder;
1015 /* Write out the nodes. We must first output a node and then its clones,
1016 otherwise at a time reading back the node there would be nothing to clone
1017 from. */
1018 n_nodes = lto_symtab_encoder_size (encoder);
1019 for (i = 0; i < n_nodes; i++)
1021 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1022 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1023 lto_output_node (ob, cnode, encoder);
1024 else
1025 lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
1028 /* Go over the nodes in SET again to write edges. */
1029 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
1031 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder, i));
1032 if (node
1033 && ((node->thunk.thunk_p && !node->global.inlined_to)
1034 || lto_symtab_encoder_in_partition_p (encoder, node)))
1036 output_outgoing_cgraph_edges (node->callees, ob, encoder);
1037 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
1041 streamer_write_uhwi_stream (ob->main_stream, 0);
1043 lto_destroy_simple_output_block (ob);
1045 /* Emit toplevel asms.
1046 When doing WPA we must output every asm just once. Since we do not partition asm
1047 nodes at all, output them to first output. This is kind of hack, but should work
1048 well. */
1049 if (!asm_nodes_output)
1051 asm_nodes_output = true;
1052 lto_output_toplevel_asms ();
1055 output_refs (encoder);
1058 /* Return identifier encoded in IB as a plain string. */
1060 static tree
1061 read_identifier (struct lto_input_block *ib)
1063 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1064 tree id;
1066 if (ib->data[ib->p + len])
1067 lto_section_overrun (ib);
1068 if (!len)
1070 ib->p++;
1071 return NULL;
1073 id = get_identifier (ib->data + ib->p);
1074 ib->p += len + 1;
1075 return id;
1078 /* Return string encoded in IB, NULL if string is empty. */
1080 static const char *
1081 read_string (struct lto_input_block *ib)
1083 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1084 const char *str;
1086 if (ib->data[ib->p + len])
1087 lto_section_overrun (ib);
1088 if (!len)
1090 ib->p++;
1091 return NULL;
1093 str = ib->data + ib->p;
1094 ib->p += len + 1;
1095 return str;
1098 /* Output function/variable tables that will allow libgomp to look up offload
1099 target code.
1100 OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is filled in
1101 varpool_node::get_create. In WHOPR (partitioned) mode during the WPA stage
1102 both OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables. */
1104 void
1105 output_offload_tables (void)
1107 if (vec_safe_is_empty (offload_funcs) && vec_safe_is_empty (offload_vars))
1108 return;
1110 struct lto_simple_output_block *ob
1111 = lto_create_simple_output_block (LTO_section_offload_table);
1113 for (unsigned i = 0; i < vec_safe_length (offload_funcs); i++)
1115 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1116 LTO_symtab_last_tag, LTO_symtab_unavail_node);
1117 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
1118 (*offload_funcs)[i]);
1121 for (unsigned i = 0; i < vec_safe_length (offload_vars); i++)
1123 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1124 LTO_symtab_last_tag, LTO_symtab_variable);
1125 lto_output_var_decl_index (ob->decl_state, ob->main_stream,
1126 (*offload_vars)[i]);
1129 streamer_write_uhwi_stream (ob->main_stream, 0);
1130 lto_destroy_simple_output_block (ob);
1132 /* In WHOPR mode during the WPA stage the joint offload tables need to be
1133 streamed to one partition only. That's why we free offload_funcs and
1134 offload_vars after the first call of output_offload_tables. */
1135 if (flag_wpa)
1137 vec_free (offload_funcs);
1138 vec_free (offload_vars);
1142 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1143 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
1144 NODE or to replace the values in it, for instance because the first
1145 time we saw it, the function body was not available but now it
1146 is. BP is a bitpack with all the bitflags for NODE read from the
1147 stream. */
1149 static void
1150 input_overwrite_node (struct lto_file_decl_data *file_data,
1151 struct cgraph_node *node,
1152 enum LTO_symtab_tags tag,
1153 struct bitpack_d *bp)
1155 node->aux = (void *) tag;
1156 node->lto_file_data = file_data;
1158 node->local.local = bp_unpack_value (bp, 1);
1159 node->externally_visible = bp_unpack_value (bp, 1);
1160 node->no_reorder = bp_unpack_value (bp, 1);
1161 node->definition = bp_unpack_value (bp, 1);
1162 node->local.versionable = bp_unpack_value (bp, 1);
1163 node->local.can_change_signature = bp_unpack_value (bp, 1);
1164 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
1165 node->force_output = bp_unpack_value (bp, 1);
1166 node->forced_by_abi = bp_unpack_value (bp, 1);
1167 node->unique_name = bp_unpack_value (bp, 1);
1168 node->body_removed = bp_unpack_value (bp, 1);
1169 node->implicit_section = bp_unpack_value (bp, 1);
1170 node->address_taken = bp_unpack_value (bp, 1);
1171 node->used_from_other_partition = bp_unpack_value (bp, 1);
1172 node->lowered = bp_unpack_value (bp, 1);
1173 node->analyzed = tag == LTO_symtab_analyzed_node;
1174 node->in_other_partition = bp_unpack_value (bp, 1);
1175 if (node->in_other_partition
1176 /* Avoid updating decl when we are seeing just inline clone.
1177 When inlining function that has functions already inlined into it,
1178 we produce clones of inline clones.
1180 WPA partitioning might put each clone into different unit and
1181 we might end up streaming inline clone from other partition
1182 to support clone we are interested in. */
1183 && (!node->clone_of
1184 || node->clone_of->decl != node->decl))
1186 DECL_EXTERNAL (node->decl) = 1;
1187 TREE_STATIC (node->decl) = 0;
1189 node->alias = bp_unpack_value (bp, 1);
1190 node->transparent_alias = bp_unpack_value (bp, 1);
1191 node->weakref = bp_unpack_value (bp, 1);
1192 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1193 node->only_called_at_startup = bp_unpack_value (bp, 1);
1194 node->only_called_at_exit = bp_unpack_value (bp, 1);
1195 node->tm_clone = bp_unpack_value (bp, 1);
1196 node->calls_comdat_local = bp_unpack_value (bp, 1);
1197 node->icf_merged = bp_unpack_value (bp, 1);
1198 node->nonfreeing_fn = bp_unpack_value (bp, 1);
1199 node->thunk.thunk_p = bp_unpack_value (bp, 1);
1200 node->parallelized_function = bp_unpack_value (bp, 1);
1201 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1202 LDPR_NUM_KNOWN);
1203 node->instrumentation_clone = bp_unpack_value (bp, 1);
1204 node->split_part = bp_unpack_value (bp, 1);
1205 gcc_assert (flag_ltrans
1206 || (!node->in_other_partition
1207 && !node->used_from_other_partition));
1210 /* Return string alias is alias of. */
1212 static tree
1213 get_alias_symbol (tree decl)
1215 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1216 return get_identifier (TREE_STRING_POINTER
1217 (TREE_VALUE (TREE_VALUE (alias))));
1220 /* Read a node from input_block IB. TAG is the node's tag just read.
1221 Return the node read or overwriten. */
1223 static struct cgraph_node *
1224 input_node (struct lto_file_decl_data *file_data,
1225 struct lto_input_block *ib,
1226 enum LTO_symtab_tags tag,
1227 vec<symtab_node *> nodes)
1229 gcc::pass_manager *passes = g->get_passes ();
1230 tree fn_decl;
1231 struct cgraph_node *node;
1232 struct bitpack_d bp;
1233 unsigned decl_index;
1234 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1235 int clone_ref;
1236 int order;
1237 int i, count;
1238 tree group;
1239 const char *section;
1240 order = streamer_read_hwi (ib) + order_base;
1241 clone_ref = streamer_read_hwi (ib);
1243 decl_index = streamer_read_uhwi (ib);
1244 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1246 if (clone_ref != LCC_NOT_FOUND)
1248 node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1249 0, CGRAPH_FREQ_BASE, false,
1250 vNULL, false, NULL, NULL);
1252 else
1254 /* Declaration of functions can be already merged with a declaration
1255 from other input file. We keep cgraph unmerged until after streaming
1256 of ipa passes is done. Alays forcingly create a fresh node. */
1257 node = symtab->create_empty ();
1258 node->decl = fn_decl;
1259 node->register_symbol ();
1262 node->order = order;
1263 if (order >= symtab->order)
1264 symtab->order = order + 1;
1266 node->count = streamer_read_gcov_count (ib);
1267 node->count_materialization_scale = streamer_read_hwi (ib);
1269 count = streamer_read_hwi (ib);
1270 node->ipa_transforms_to_apply = vNULL;
1271 for (i = 0; i < count; i++)
1273 opt_pass *pass;
1274 int pid = streamer_read_hwi (ib);
1276 gcc_assert (pid < passes->passes_by_id_size);
1277 pass = passes->passes_by_id[pid];
1278 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1281 if (tag == LTO_symtab_analyzed_node)
1282 ref = streamer_read_hwi (ib);
1284 group = read_identifier (ib);
1285 if (group)
1286 ref2 = streamer_read_hwi (ib);
1288 /* Make sure that we have not read this node before. Nodes that
1289 have already been read will have their tag stored in the 'aux'
1290 field. Since built-in functions can be referenced in multiple
1291 functions, they are expected to be read more than once. */
1292 if (node->aux && !DECL_BUILT_IN (node->decl))
1293 internal_error ("bytecode stream: found multiple instances of cgraph "
1294 "node with uid %d", node->uid);
1296 node->tp_first_run = streamer_read_uhwi (ib);
1298 bp = streamer_read_bitpack (ib);
1300 input_overwrite_node (file_data, node, tag, &bp);
1302 /* Store a reference for now, and fix up later to be a pointer. */
1303 node->global.inlined_to = (cgraph_node *) (intptr_t) ref;
1305 if (group)
1307 node->set_comdat_group (group);
1308 /* Store a reference for now, and fix up later to be a pointer. */
1309 node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1311 else
1312 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1313 section = read_string (ib);
1314 if (section)
1315 node->set_section_for_node (section);
1317 if (node->thunk.thunk_p)
1319 int type = streamer_read_uhwi (ib);
1320 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1321 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1323 node->thunk.fixed_offset = fixed_offset;
1324 node->thunk.this_adjusting = (type & 2);
1325 node->thunk.virtual_value = virtual_value;
1326 node->thunk.virtual_offset_p = (type & 4);
1327 node->thunk.add_pointer_bounds_args = (type & 8);
1329 if (node->alias && !node->analyzed && node->weakref)
1330 node->alias_target = get_alias_symbol (node->decl);
1331 node->profile_id = streamer_read_hwi (ib);
1332 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1333 node->set_init_priority (streamer_read_hwi (ib));
1334 if (DECL_STATIC_DESTRUCTOR (node->decl))
1335 node->set_fini_priority (streamer_read_hwi (ib));
1337 if (node->instrumentation_clone)
1339 decl_index = streamer_read_uhwi (ib);
1340 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1341 node->orig_decl = fn_decl;
1344 return node;
1347 /* Read a node from input_block IB. TAG is the node's tag just read.
1348 Return the node read or overwriten. */
1350 static varpool_node *
1351 input_varpool_node (struct lto_file_decl_data *file_data,
1352 struct lto_input_block *ib)
1354 int decl_index;
1355 tree var_decl;
1356 varpool_node *node;
1357 struct bitpack_d bp;
1358 int ref = LCC_NOT_FOUND;
1359 int order;
1360 tree group;
1361 const char *section;
1363 order = streamer_read_hwi (ib) + order_base;
1364 decl_index = streamer_read_uhwi (ib);
1365 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1367 /* Declaration of functions can be already merged with a declaration
1368 from other input file. We keep cgraph unmerged until after streaming
1369 of ipa passes is done. Alays forcingly create a fresh node. */
1370 node = varpool_node::create_empty ();
1371 node->decl = var_decl;
1372 node->register_symbol ();
1374 node->order = order;
1375 if (order >= symtab->order)
1376 symtab->order = order + 1;
1377 node->lto_file_data = file_data;
1379 bp = streamer_read_bitpack (ib);
1380 node->externally_visible = bp_unpack_value (&bp, 1);
1381 node->no_reorder = bp_unpack_value (&bp, 1);
1382 node->force_output = bp_unpack_value (&bp, 1);
1383 node->forced_by_abi = bp_unpack_value (&bp, 1);
1384 node->unique_name = bp_unpack_value (&bp, 1);
1385 node->body_removed = bp_unpack_value (&bp, 1);
1386 node->implicit_section = bp_unpack_value (&bp, 1);
1387 node->writeonly = bp_unpack_value (&bp, 1);
1388 node->definition = bp_unpack_value (&bp, 1);
1389 node->alias = bp_unpack_value (&bp, 1);
1390 node->transparent_alias = bp_unpack_value (&bp, 1);
1391 node->weakref = bp_unpack_value (&bp, 1);
1392 node->analyzed = bp_unpack_value (&bp, 1);
1393 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1394 node->in_other_partition = bp_unpack_value (&bp, 1);
1395 if (node->in_other_partition)
1397 DECL_EXTERNAL (node->decl) = 1;
1398 TREE_STATIC (node->decl) = 0;
1400 if (node->alias && !node->analyzed && node->weakref)
1401 node->alias_target = get_alias_symbol (node->decl);
1402 node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1403 node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1404 node->dynamically_initialized = bp_unpack_value (&bp, 1);
1405 node->need_bounds_init = bp_unpack_value (&bp, 1);
1406 group = read_identifier (ib);
1407 if (group)
1409 node->set_comdat_group (group);
1410 ref = streamer_read_hwi (ib);
1411 /* Store a reference for now, and fix up later to be a pointer. */
1412 node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1414 else
1415 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1416 section = read_string (ib);
1417 if (section)
1418 node->set_section_for_node (section);
1419 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1420 LDPR_NUM_KNOWN);
1421 gcc_assert (flag_ltrans
1422 || (!node->in_other_partition
1423 && !node->used_from_other_partition));
1425 return node;
1428 /* Read a node from input_block IB. TAG is the node's tag just read.
1429 Return the node read or overwriten. */
1431 static void
1432 input_ref (struct lto_input_block *ib,
1433 symtab_node *referring_node,
1434 vec<symtab_node *> nodes)
1436 symtab_node *node = NULL;
1437 struct bitpack_d bp;
1438 enum ipa_ref_use use;
1439 bool speculative;
1440 struct ipa_ref *ref;
1442 bp = streamer_read_bitpack (ib);
1443 use = (enum ipa_ref_use) bp_unpack_value (&bp, 3);
1444 speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1445 node = nodes[streamer_read_hwi (ib)];
1446 ref = referring_node->create_reference (node, use);
1447 ref->speculative = speculative;
1448 if (is_a <cgraph_node *> (referring_node))
1449 ref->lto_stmt_uid = streamer_read_hwi (ib);
1452 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1453 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1454 edge being read is indirect (in the sense that it has
1455 indirect_unknown_callee set). */
1457 static void
1458 input_edge (struct lto_input_block *ib, vec<symtab_node *> nodes,
1459 bool indirect)
1461 struct cgraph_node *caller, *callee;
1462 struct cgraph_edge *edge;
1463 unsigned int stmt_id;
1464 gcov_type count;
1465 int freq;
1466 cgraph_inline_failed_t inline_failed;
1467 struct bitpack_d bp;
1468 int ecf_flags = 0;
1470 caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1471 if (caller == NULL || caller->decl == NULL_TREE)
1472 internal_error ("bytecode stream: no caller found while reading edge");
1474 if (!indirect)
1476 callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1477 if (callee == NULL || callee->decl == NULL_TREE)
1478 internal_error ("bytecode stream: no callee found while reading edge");
1480 else
1481 callee = NULL;
1483 count = streamer_read_gcov_count (ib);
1485 bp = streamer_read_bitpack (ib);
1486 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1487 stmt_id = bp_unpack_var_len_unsigned (&bp);
1488 freq = (int) bp_unpack_var_len_unsigned (&bp);
1490 if (indirect)
1491 edge = caller->create_indirect_edge (NULL, 0, count, freq);
1492 else
1493 edge = caller->create_edge (callee, NULL, count, freq);
1495 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1496 edge->speculative = bp_unpack_value (&bp, 1);
1497 edge->lto_stmt_uid = stmt_id;
1498 edge->inline_failed = inline_failed;
1499 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1500 edge->can_throw_external = bp_unpack_value (&bp, 1);
1501 edge->in_polymorphic_cdtor = bp_unpack_value (&bp, 1);
1502 if (indirect)
1504 if (bp_unpack_value (&bp, 1))
1505 ecf_flags |= ECF_CONST;
1506 if (bp_unpack_value (&bp, 1))
1507 ecf_flags |= ECF_PURE;
1508 if (bp_unpack_value (&bp, 1))
1509 ecf_flags |= ECF_NORETURN;
1510 if (bp_unpack_value (&bp, 1))
1511 ecf_flags |= ECF_MALLOC;
1512 if (bp_unpack_value (&bp, 1))
1513 ecf_flags |= ECF_NOTHROW;
1514 if (bp_unpack_value (&bp, 1))
1515 ecf_flags |= ECF_RETURNS_TWICE;
1516 edge->indirect_info->ecf_flags = ecf_flags;
1517 edge->indirect_info->common_target_id = streamer_read_hwi (ib);
1518 if (edge->indirect_info->common_target_id)
1519 edge->indirect_info->common_target_probability = streamer_read_hwi (ib);
1524 /* Read a cgraph from IB using the info in FILE_DATA. */
1526 static vec<symtab_node *>
1527 input_cgraph_1 (struct lto_file_decl_data *file_data,
1528 struct lto_input_block *ib)
1530 enum LTO_symtab_tags tag;
1531 vec<symtab_node *> nodes = vNULL;
1532 symtab_node *node;
1533 unsigned i;
1535 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1536 order_base = symtab->order;
1537 while (tag)
1539 if (tag == LTO_symtab_edge)
1540 input_edge (ib, nodes, false);
1541 else if (tag == LTO_symtab_indirect_edge)
1542 input_edge (ib, nodes, true);
1543 else if (tag == LTO_symtab_variable)
1545 node = input_varpool_node (file_data, ib);
1546 nodes.safe_push (node);
1547 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1549 else
1551 node = input_node (file_data, ib, tag, nodes);
1552 if (node == NULL || node->decl == NULL_TREE)
1553 internal_error ("bytecode stream: found empty cgraph node");
1554 nodes.safe_push (node);
1555 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1558 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1561 lto_input_toplevel_asms (file_data, order_base);
1563 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1564 if (flag_checking)
1566 FOR_EACH_VEC_ELT (nodes, i, node)
1567 gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1569 FOR_EACH_VEC_ELT (nodes, i, node)
1571 int ref;
1572 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1574 ref = (int) (intptr_t) cnode->global.inlined_to;
1576 /* We share declaration of builtins, so we may read same node twice. */
1577 if (!node->aux)
1578 continue;
1579 node->aux = NULL;
1581 /* Fixup inlined_to from reference to pointer. */
1582 if (ref != LCC_NOT_FOUND)
1583 dyn_cast<cgraph_node *> (node)->global.inlined_to
1584 = dyn_cast<cgraph_node *> (nodes[ref]);
1585 else
1586 cnode->global.inlined_to = NULL;
1588 /* Compute instrumented_version. */
1589 if (cnode->instrumentation_clone)
1591 gcc_assert (cnode->orig_decl);
1593 cnode->instrumented_version = cgraph_node::get (cnode->orig_decl);
1594 if (cnode->instrumented_version)
1596 /* We may have multiple nodes for a single function which
1597 will be merged later. To have a proper merge we need
1598 to keep instrumentation_version reference between nodes
1599 consistent: each instrumented_version reference should
1600 have proper reverse reference. Thus don't break existing
1601 instrumented_version reference if it already exists. */
1602 if (cnode->instrumented_version->instrumented_version)
1603 cnode->instrumented_version = NULL;
1604 else
1605 cnode->instrumented_version->instrumented_version = cnode;
1608 /* Restore decl names reference except for wrapper functions. */
1609 if (!chkp_wrap_function (cnode->orig_decl))
1611 tree name = DECL_ASSEMBLER_NAME (cnode->decl);
1612 IDENTIFIER_TRANSPARENT_ALIAS (name) = 1;
1613 TREE_CHAIN (name) = DECL_ASSEMBLER_NAME (cnode->orig_decl);
1618 ref = (int) (intptr_t) node->same_comdat_group;
1620 /* Fixup same_comdat_group from reference to pointer. */
1621 if (ref != LCC_NOT_FOUND)
1622 node->same_comdat_group = nodes[ref];
1623 else
1624 node->same_comdat_group = NULL;
1626 FOR_EACH_VEC_ELT (nodes, i, node)
1627 node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1628 return nodes;
1631 /* Input ipa_refs. */
1633 static void
1634 input_refs (struct lto_input_block *ib,
1635 vec<symtab_node *> nodes)
1637 int count;
1638 int idx;
1639 while (true)
1641 symtab_node *node;
1642 count = streamer_read_uhwi (ib);
1643 if (!count)
1644 break;
1645 idx = streamer_read_uhwi (ib);
1646 node = nodes[idx];
1647 while (count)
1649 input_ref (ib, node, nodes);
1650 count--;
1656 static struct gcov_ctr_summary lto_gcov_summary;
1658 /* Input profile_info from IB. */
1659 static void
1660 input_profile_summary (struct lto_input_block *ib,
1661 struct lto_file_decl_data *file_data)
1663 unsigned h_ix;
1664 struct bitpack_d bp;
1665 unsigned int runs = streamer_read_uhwi (ib);
1666 if (runs)
1668 file_data->profile_info.runs = runs;
1669 file_data->profile_info.sum_max = streamer_read_gcov_count (ib);
1670 file_data->profile_info.sum_all = streamer_read_gcov_count (ib);
1672 memset (file_data->profile_info.histogram, 0,
1673 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1674 /* Input the bitpack of non-zero histogram indices. */
1675 bp = streamer_read_bitpack (ib);
1676 /* Read in and unpack the full bitpack, flagging non-zero
1677 histogram entries by setting the num_counters non-zero. */
1678 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1680 file_data->profile_info.histogram[h_ix].num_counters
1681 = bp_unpack_value (&bp, 1);
1683 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1685 if (!file_data->profile_info.histogram[h_ix].num_counters)
1686 continue;
1688 file_data->profile_info.histogram[h_ix].num_counters
1689 = streamer_read_gcov_count (ib);
1690 file_data->profile_info.histogram[h_ix].min_value
1691 = streamer_read_gcov_count (ib);
1692 file_data->profile_info.histogram[h_ix].cum_value
1693 = streamer_read_gcov_count (ib);
1695 /* IPA-profile computes hot bb threshold based on cumulated
1696 whole program profile. We need to stream it down to ltrans. */
1697 if (flag_ltrans)
1698 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1703 /* Rescale profile summaries to the same number of runs in the whole unit. */
1705 static void
1706 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1708 struct lto_file_decl_data *file_data;
1709 unsigned int j, h_ix;
1710 gcov_unsigned_t max_runs = 0;
1711 struct cgraph_node *node;
1712 struct cgraph_edge *edge;
1713 gcov_type saved_sum_all = 0;
1714 gcov_ctr_summary *saved_profile_info = 0;
1715 int saved_scale = 0;
1717 /* Find unit with maximal number of runs. If we ever get serious about
1718 roundoff errors, we might also consider computing smallest common
1719 multiply. */
1720 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1721 if (max_runs < file_data->profile_info.runs)
1722 max_runs = file_data->profile_info.runs;
1724 if (!max_runs)
1725 return;
1727 /* Simple overflow check. We probably don't need to support that many train
1728 runs. Such a large value probably imply data corruption anyway. */
1729 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1731 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1732 INT_MAX / REG_BR_PROB_BASE);
1733 return;
1736 profile_info = &lto_gcov_summary;
1737 lto_gcov_summary.runs = max_runs;
1738 lto_gcov_summary.sum_max = 0;
1739 memset (lto_gcov_summary.histogram, 0,
1740 sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1742 /* Rescale all units to the maximal number of runs.
1743 sum_max can not be easily merged, as we have no idea what files come from
1744 the same run. We do not use the info anyway, so leave it 0. */
1745 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1746 if (file_data->profile_info.runs)
1748 int scale = GCOV_COMPUTE_SCALE (max_runs,
1749 file_data->profile_info.runs);
1750 lto_gcov_summary.sum_max
1751 = MAX (lto_gcov_summary.sum_max,
1752 apply_scale (file_data->profile_info.sum_max, scale));
1753 lto_gcov_summary.sum_all
1754 = MAX (lto_gcov_summary.sum_all,
1755 apply_scale (file_data->profile_info.sum_all, scale));
1756 /* Save a pointer to the profile_info with the largest
1757 scaled sum_all and the scale for use in merging the
1758 histogram. */
1759 if (!saved_profile_info
1760 || lto_gcov_summary.sum_all > saved_sum_all)
1762 saved_profile_info = &file_data->profile_info;
1763 saved_sum_all = lto_gcov_summary.sum_all;
1764 saved_scale = scale;
1768 gcc_assert (saved_profile_info);
1770 /* Scale up the histogram from the profile that had the largest
1771 scaled sum_all above. */
1772 for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1774 /* Scale up the min value as we did the corresponding sum_all
1775 above. Use that to find the new histogram index. */
1776 gcov_type scaled_min
1777 = apply_scale (saved_profile_info->histogram[h_ix].min_value,
1778 saved_scale);
1779 /* The new index may be shared with another scaled histogram entry,
1780 so we need to account for a non-zero histogram entry at new_ix. */
1781 unsigned new_ix = gcov_histo_index (scaled_min);
1782 lto_gcov_summary.histogram[new_ix].min_value
1783 = (lto_gcov_summary.histogram[new_ix].num_counters
1784 ? MIN (lto_gcov_summary.histogram[new_ix].min_value, scaled_min)
1785 : scaled_min);
1786 /* Some of the scaled counter values would ostensibly need to be placed
1787 into different (larger) histogram buckets, but we keep things simple
1788 here and place the scaled cumulative counter value in the bucket
1789 corresponding to the scaled minimum counter value. */
1790 lto_gcov_summary.histogram[new_ix].cum_value
1791 += apply_scale (saved_profile_info->histogram[h_ix].cum_value,
1792 saved_scale);
1793 lto_gcov_summary.histogram[new_ix].num_counters
1794 += saved_profile_info->histogram[h_ix].num_counters;
1797 /* Watch roundoff errors. */
1798 if (lto_gcov_summary.sum_max < max_runs)
1799 lto_gcov_summary.sum_max = max_runs;
1801 /* If merging already happent at WPA time, we are done. */
1802 if (flag_ltrans)
1803 return;
1805 /* Now compute count_materialization_scale of each node.
1806 During LTRANS we already have values of count_materialization_scale
1807 computed, so just update them. */
1808 FOR_EACH_FUNCTION (node)
1809 if (node->lto_file_data
1810 && node->lto_file_data->profile_info.runs)
1812 int scale;
1814 scale = RDIV (node->count_materialization_scale * max_runs,
1815 node->lto_file_data->profile_info.runs);
1816 node->count_materialization_scale = scale;
1817 if (scale < 0)
1818 fatal_error (input_location, "Profile information in %s corrupted",
1819 file_data->file_name);
1821 if (scale == REG_BR_PROB_BASE)
1822 continue;
1823 for (edge = node->callees; edge; edge = edge->next_callee)
1824 edge->count = apply_scale (edge->count, scale);
1825 node->count = apply_scale (node->count, scale);
1829 /* Input and merge the symtab from each of the .o files passed to
1830 lto1. */
1832 void
1833 input_symtab (void)
1835 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1836 struct lto_file_decl_data *file_data;
1837 unsigned int j = 0;
1838 struct cgraph_node *node;
1840 while ((file_data = file_data_vec[j++]))
1842 const char *data;
1843 size_t len;
1844 struct lto_input_block *ib;
1845 vec<symtab_node *> nodes;
1847 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1848 &data, &len);
1849 if (!ib)
1850 fatal_error (input_location,
1851 "cannot find LTO cgraph in %s", file_data->file_name);
1852 input_profile_summary (ib, file_data);
1853 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1854 nodes = input_cgraph_1 (file_data, ib);
1855 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1856 ib, data, len);
1858 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1859 &data, &len);
1860 if (!ib)
1861 fatal_error (input_location, "cannot find LTO section refs in %s",
1862 file_data->file_name);
1863 input_refs (ib, nodes);
1864 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1865 ib, data, len);
1866 if (flag_ltrans)
1867 input_cgraph_opt_summary (nodes);
1868 nodes.release ();
1871 merge_profile_summaries (file_data_vec);
1873 if (!flag_auto_profile)
1874 get_working_sets ();
1877 /* Clear out the aux field that was used to store enough state to
1878 tell which nodes should be overwritten. */
1879 FOR_EACH_FUNCTION (node)
1881 /* Some nodes may have been created by cgraph_node. This
1882 happens when the callgraph contains nested functions. If the
1883 node for the parent function was never emitted to the gimple
1884 file, cgraph_node will create a node for it when setting the
1885 context of the nested function. */
1886 if (node->lto_file_data)
1887 node->aux = NULL;
1891 /* Input function/variable tables that will allow libgomp to look up offload
1892 target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS. */
1894 void
1895 input_offload_tables (bool do_force_output)
1897 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1898 struct lto_file_decl_data *file_data;
1899 unsigned int j = 0;
1901 while ((file_data = file_data_vec[j++]))
1903 const char *data;
1904 size_t len;
1905 struct lto_input_block *ib
1906 = lto_create_simple_input_block (file_data, LTO_section_offload_table,
1907 &data, &len);
1908 if (!ib)
1909 continue;
1911 enum LTO_symtab_tags tag
1912 = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1913 while (tag)
1915 if (tag == LTO_symtab_unavail_node)
1917 int decl_index = streamer_read_uhwi (ib);
1918 tree fn_decl
1919 = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1920 vec_safe_push (offload_funcs, fn_decl);
1922 /* Prevent IPA from removing fn_decl as unreachable, since there
1923 may be no refs from the parent function to child_fn in offload
1924 LTO mode. */
1925 if (do_force_output)
1926 cgraph_node::get (fn_decl)->mark_force_output ();
1928 else if (tag == LTO_symtab_variable)
1930 int decl_index = streamer_read_uhwi (ib);
1931 tree var_decl
1932 = lto_file_decl_data_get_var_decl (file_data, decl_index);
1933 vec_safe_push (offload_vars, var_decl);
1935 /* Prevent IPA from removing var_decl as unused, since there
1936 may be no refs to var_decl in offload LTO mode. */
1937 if (do_force_output)
1938 varpool_node::get (var_decl)->force_output = 1;
1940 else
1941 fatal_error (input_location,
1942 "invalid offload table in %s", file_data->file_name);
1944 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1947 lto_destroy_simple_input_block (file_data, LTO_section_offload_table,
1948 ib, data, len);
1952 /* True when we need optimization summary for NODE. */
1954 static int
1955 output_cgraph_opt_summary_p (struct cgraph_node *node)
1957 return (node->clone_of
1958 && (node->clone.tree_map
1959 || node->clone.args_to_skip
1960 || node->clone.combined_args_to_skip));
1963 /* Output optimization summary for EDGE to OB. */
1964 static void
1965 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1966 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1970 /* Output optimization summary for NODE to OB. */
1972 static void
1973 output_node_opt_summary (struct output_block *ob,
1974 struct cgraph_node *node,
1975 lto_symtab_encoder_t encoder)
1977 unsigned int index;
1978 bitmap_iterator bi;
1979 struct ipa_replace_map *map;
1980 struct bitpack_d bp;
1981 int i;
1982 struct cgraph_edge *e;
1984 if (node->clone.args_to_skip)
1986 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1987 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1988 streamer_write_uhwi (ob, index);
1990 else
1991 streamer_write_uhwi (ob, 0);
1992 if (node->clone.combined_args_to_skip)
1994 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1995 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1996 streamer_write_uhwi (ob, index);
1998 else
1999 streamer_write_uhwi (ob, 0);
2000 streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
2001 FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
2003 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
2004 mechanism to store function local declarations into summaries. */
2005 gcc_assert (!map->old_tree);
2006 streamer_write_uhwi (ob, map->parm_num);
2007 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
2008 stream_write_tree (ob, map->new_tree, true);
2009 bp = bitpack_create (ob->main_stream);
2010 bp_pack_value (&bp, map->replace_p, 1);
2011 bp_pack_value (&bp, map->ref_p, 1);
2012 streamer_write_bitpack (&bp);
2015 if (lto_symtab_encoder_in_partition_p (encoder, node))
2017 for (e = node->callees; e; e = e->next_callee)
2018 output_edge_opt_summary (ob, e);
2019 for (e = node->indirect_calls; e; e = e->next_callee)
2020 output_edge_opt_summary (ob, e);
2024 /* Output optimization summaries stored in callgraph.
2025 At the moment it is the clone info structure. */
2027 static void
2028 output_cgraph_opt_summary (void)
2030 int i, n_nodes;
2031 lto_symtab_encoder_t encoder;
2032 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
2033 unsigned count = 0;
2035 ob->symbol = NULL;
2036 encoder = ob->decl_state->symtab_node_encoder;
2037 n_nodes = lto_symtab_encoder_size (encoder);
2038 for (i = 0; i < n_nodes; i++)
2040 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
2041 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2042 if (cnode && output_cgraph_opt_summary_p (cnode))
2043 count++;
2045 streamer_write_uhwi (ob, count);
2046 for (i = 0; i < n_nodes; i++)
2048 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
2049 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2050 if (cnode && output_cgraph_opt_summary_p (cnode))
2052 streamer_write_uhwi (ob, i);
2053 output_node_opt_summary (ob, cnode, encoder);
2056 produce_asm (ob, NULL);
2057 destroy_output_block (ob);
2060 /* Input optimisation summary of EDGE. */
2062 static void
2063 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
2064 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
2068 /* Input optimisation summary of NODE. */
2070 static void
2071 input_node_opt_summary (struct cgraph_node *node,
2072 struct lto_input_block *ib_main,
2073 struct data_in *data_in)
2075 int i;
2076 int count;
2077 int bit;
2078 struct bitpack_d bp;
2079 struct cgraph_edge *e;
2081 count = streamer_read_uhwi (ib_main);
2082 if (count)
2083 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
2084 for (i = 0; i < count; i++)
2086 bit = streamer_read_uhwi (ib_main);
2087 bitmap_set_bit (node->clone.args_to_skip, bit);
2089 count = streamer_read_uhwi (ib_main);
2090 if (count)
2091 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
2092 for (i = 0; i < count; i++)
2094 bit = streamer_read_uhwi (ib_main);
2095 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
2097 count = streamer_read_uhwi (ib_main);
2098 for (i = 0; i < count; i++)
2100 struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
2102 vec_safe_push (node->clone.tree_map, map);
2103 map->parm_num = streamer_read_uhwi (ib_main);
2104 map->old_tree = NULL;
2105 map->new_tree = stream_read_tree (ib_main, data_in);
2106 bp = streamer_read_bitpack (ib_main);
2107 map->replace_p = bp_unpack_value (&bp, 1);
2108 map->ref_p = bp_unpack_value (&bp, 1);
2110 for (e = node->callees; e; e = e->next_callee)
2111 input_edge_opt_summary (e, ib_main);
2112 for (e = node->indirect_calls; e; e = e->next_callee)
2113 input_edge_opt_summary (e, ib_main);
2116 /* Read section in file FILE_DATA of length LEN with data DATA. */
2118 static void
2119 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
2120 const char *data, size_t len,
2121 vec<symtab_node *> nodes)
2123 const struct lto_function_header *header =
2124 (const struct lto_function_header *) data;
2125 const int cfg_offset = sizeof (struct lto_function_header);
2126 const int main_offset = cfg_offset + header->cfg_size;
2127 const int string_offset = main_offset + header->main_size;
2128 struct data_in *data_in;
2129 unsigned int i;
2130 unsigned int count;
2132 lto_input_block ib_main ((const char *) data + main_offset,
2133 header->main_size, file_data->mode_table);
2135 data_in =
2136 lto_data_in_create (file_data, (const char *) data + string_offset,
2137 header->string_size, vNULL);
2138 count = streamer_read_uhwi (&ib_main);
2140 for (i = 0; i < count; i++)
2142 int ref = streamer_read_uhwi (&ib_main);
2143 input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
2144 &ib_main, data_in);
2146 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
2147 len);
2148 lto_data_in_delete (data_in);
2151 /* Input optimization summary of cgraph. */
2153 static void
2154 input_cgraph_opt_summary (vec<symtab_node *> nodes)
2156 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2157 struct lto_file_decl_data *file_data;
2158 unsigned int j = 0;
2160 while ((file_data = file_data_vec[j++]))
2162 size_t len;
2163 const char *data =
2164 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
2165 &len);
2167 if (data)
2168 input_cgraph_opt_section (file_data, data, len, nodes);