libgomp, testsuite: Do not call nonstandard functions
[official-gcc.git] / gcc / lto-cgraph.cc
blob32c0f5ac6dbc133efc4e6fc5c2647e43a4dc2acb
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
4 Copyright (C) 2009-2023 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "predict.h"
31 #include "stringpool.h"
32 #include "tree-streamer.h"
33 #include "cgraph.h"
34 #include "tree-pass.h"
35 #include "profile.h"
36 #include "context.h"
37 #include "pass_manager.h"
38 #include "ipa-utils.h"
39 #include "omp-offload.h"
40 #include "omp-general.h"
41 #include "stringpool.h"
42 #include "attribs.h"
43 #include "alloc-pool.h"
44 #include "symbol-summary.h"
45 #include "symtab-thunks.h"
46 #include "symtab-clones.h"
48 /* True when asm nodes has been output. */
49 bool asm_nodes_output = false;
51 static void output_cgraph_opt_summary (void);
52 static void input_cgraph_opt_summary (vec<symtab_node *> nodes);
54 /* Number of LDPR values known to GCC. */
55 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
57 /* Cgraph streaming is organized as set of record whose type
58 is indicated by a tag. */
59 enum LTO_symtab_tags
61 /* Must leave 0 for the stopper. */
63 /* Cgraph node without body available. */
64 LTO_symtab_unavail_node = 1,
65 /* Cgraph node with function body. */
66 LTO_symtab_analyzed_node,
67 /* Cgraph edges. */
68 LTO_symtab_edge,
69 LTO_symtab_indirect_edge,
70 LTO_symtab_variable,
71 LTO_symtab_last_tag
74 /* Create a new symtab encoder.
75 if FOR_INPUT, the encoder allocate only datastructures needed
76 to read the symtab. */
78 lto_symtab_encoder_t
79 lto_symtab_encoder_new (bool for_input)
81 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
83 if (!for_input)
84 encoder->map = new hash_map<symtab_node *, size_t>;
85 encoder->nodes.create (0);
86 return encoder;
90 /* Delete ENCODER and its components. */
92 void
93 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
95 encoder->nodes.release ();
96 if (encoder->map)
97 delete encoder->map;
98 free (encoder);
102 /* Return the existing reference number of NODE in the symtab encoder in
103 output block OB. Assign a new reference if this is the first time
104 NODE is encoded. */
107 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
108 symtab_node *node)
110 int ref;
112 if (!encoder->map)
114 lto_encoder_entry entry = {node, false, false, false};
116 ref = encoder->nodes.length ();
117 encoder->nodes.safe_push (entry);
118 return ref;
121 size_t *slot = encoder->map->get (node);
122 if (!slot || !*slot)
124 lto_encoder_entry entry = {node, false, false, false};
125 ref = encoder->nodes.length ();
126 if (!slot)
127 encoder->map->put (node, ref + 1);
128 encoder->nodes.safe_push (entry);
130 else
131 ref = *slot - 1;
133 return ref;
136 /* Remove NODE from encoder. */
138 bool
139 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
140 symtab_node *node)
142 int index;
143 lto_encoder_entry last_node;
145 size_t *slot = encoder->map->get (node);
146 if (slot == NULL || !*slot)
147 return false;
149 index = *slot - 1;
150 gcc_checking_assert (encoder->nodes[index].node == node);
152 /* Remove from vector. We do this by swapping node with the last element
153 of the vector. */
154 last_node = encoder->nodes.pop ();
155 if (last_node.node != node)
157 gcc_assert (encoder->map->put (last_node.node, index + 1));
159 /* Move the last element to the original spot of NODE. */
160 encoder->nodes[index] = last_node;
163 /* Remove element from hash table. */
164 encoder->map->remove (node);
165 return true;
169 /* Return TRUE if we should encode the body of NODE (if any). */
171 bool
172 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
173 struct cgraph_node *node)
175 int index = lto_symtab_encoder_lookup (encoder, node);
176 return encoder->nodes[index].body;
179 /* Specify that we encode the body of NODE in this partition. */
181 static void
182 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
183 struct cgraph_node *node)
185 int index = lto_symtab_encoder_encode (encoder, node);
186 gcc_checking_assert (encoder->nodes[index].node == node);
187 encoder->nodes[index].body = true;
190 /* Return TRUE if we should encode initializer of NODE (if any). */
192 bool
193 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
194 varpool_node *node)
196 int index = lto_symtab_encoder_lookup (encoder, node);
197 if (index == LCC_NOT_FOUND)
198 return false;
199 return encoder->nodes[index].initializer;
202 /* Specify that we should encode initializer of NODE (if any). */
204 static void
205 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
206 varpool_node *node)
208 int index = lto_symtab_encoder_lookup (encoder, node);
209 encoder->nodes[index].initializer = true;
212 /* Return TRUE if NODE is in this partition. */
214 bool
215 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
216 symtab_node *node)
218 int index = lto_symtab_encoder_lookup (encoder, node);
219 if (index == LCC_NOT_FOUND)
220 return false;
221 return encoder->nodes[index].in_partition;
224 /* Specify that NODE is in this partition. */
226 void
227 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
228 symtab_node *node)
230 int index = lto_symtab_encoder_encode (encoder, node);
231 encoder->nodes[index].in_partition = true;
234 /* Output the cgraph EDGE to OB using ENCODER. */
236 static void
237 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
238 lto_symtab_encoder_t encoder)
240 unsigned int uid;
241 intptr_t ref;
242 struct bitpack_d bp;
244 if (edge->indirect_unknown_callee)
245 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
246 LTO_symtab_indirect_edge);
247 else
248 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
249 LTO_symtab_edge);
251 ref = lto_symtab_encoder_lookup (encoder, edge->caller);
252 gcc_assert (ref != LCC_NOT_FOUND);
253 streamer_write_hwi_stream (ob->main_stream, ref);
255 if (!edge->indirect_unknown_callee)
257 ref = lto_symtab_encoder_lookup (encoder, edge->callee);
258 gcc_assert (ref != LCC_NOT_FOUND);
259 streamer_write_hwi_stream (ob->main_stream, ref);
262 edge->count.stream_out (ob->main_stream);
264 bp = bitpack_create (ob->main_stream);
265 uid = !edge->call_stmt ? edge->lto_stmt_uid
266 : gimple_uid (edge->call_stmt) + 1;
267 bp_pack_enum (&bp, cgraph_inline_failed_t,
268 CIF_N_REASONS, edge->inline_failed);
269 gcc_checking_assert (uid || edge->caller->thunk);
270 bp_pack_var_len_unsigned (&bp, uid);
271 bp_pack_value (&bp, edge->speculative_id, 16);
272 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
273 bp_pack_value (&bp, edge->speculative, 1);
274 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
275 gcc_assert (!edge->call_stmt_cannot_inline_p
276 || edge->inline_failed != CIF_BODY_NOT_AVAILABLE);
277 bp_pack_value (&bp, edge->can_throw_external, 1);
278 bp_pack_value (&bp, edge->in_polymorphic_cdtor, 1);
279 if (edge->indirect_unknown_callee)
281 int flags = edge->indirect_info->ecf_flags;
282 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
283 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
284 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
285 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
286 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
287 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
288 /* Flags that should not appear on indirect calls. */
289 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
290 | ECF_MAY_BE_ALLOCA
291 | ECF_SIBCALL
292 | ECF_LEAF
293 | ECF_NOVOPS)));
295 bp_pack_value (&bp, edge->indirect_info->num_speculative_call_targets,
296 16);
298 streamer_write_bitpack (&bp);
301 /* Return if NODE contain references from other partitions. */
303 bool
304 referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
306 int i;
307 struct ipa_ref *ref = NULL;
309 for (i = 0; node->iterate_referring (i, ref); i++)
311 /* Ignore references from non-offloadable nodes while streaming NODE into
312 offload LTO section. */
313 if (!ref->referring->need_lto_streaming)
314 continue;
316 if (ref->referring->in_other_partition
317 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
318 return true;
320 return false;
323 /* Return true when node is reachable from other partition. */
325 bool
326 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
328 struct cgraph_edge *e;
329 if (!node->definition)
330 return false;
331 if (node->inlined_to)
332 return false;
333 for (e = node->callers; e; e = e->next_caller)
335 /* Ignore references from non-offloadable nodes while streaming NODE into
336 offload LTO section. */
337 if (!e->caller->need_lto_streaming)
338 continue;
340 if (e->caller->in_other_partition
341 || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
342 return true;
344 return false;
347 /* Return if NODE contain references from other partitions. */
349 bool
350 referenced_from_this_partition_p (symtab_node *node,
351 lto_symtab_encoder_t encoder)
353 int i;
354 struct ipa_ref *ref = NULL;
356 for (i = 0; node->iterate_referring (i, ref); i++)
357 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
358 return true;
359 return false;
362 /* Return true when node is reachable from other partition. */
364 bool
365 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
367 struct cgraph_edge *e;
368 for (e = node->callers; e; e = e->next_caller)
369 if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
370 return true;
371 return false;
374 /* Output the cgraph NODE to OB. ENCODER is used to find the
375 reference number of NODE->inlined_to. SET is the set of nodes we
376 are writing to the current file. If NODE is not in SET, then NODE
377 is a boundary of a cgraph_node_set and we pretend NODE just has a
378 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
379 that have had their callgraph node written so far. This is used to
380 determine if NODE is a clone of a previously written node. */
382 static void
383 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
384 lto_symtab_encoder_t encoder)
386 unsigned int tag;
387 struct bitpack_d bp;
388 bool boundary_p;
389 intptr_t ref;
390 bool in_other_partition = false;
391 struct cgraph_node *clone_of, *ultimate_clone_of;
392 ipa_opt_pass_d *pass;
393 int i;
394 const char *comdat;
395 const char *section;
396 tree group;
398 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
400 if (node->analyzed && (!boundary_p || node->alias
401 || (node->thunk && !node->inlined_to)))
402 tag = LTO_symtab_analyzed_node;
403 else
404 tag = LTO_symtab_unavail_node;
406 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
407 tag);
408 streamer_write_hwi_stream (ob->main_stream, node->order);
410 /* In WPA mode, we only output part of the call-graph. Also, we
411 fake cgraph node attributes. There are two cases that we care.
413 Boundary nodes: There are nodes that are not part of SET but are
414 called from within SET. We artificially make them look like
415 externally visible nodes with no function body.
417 Cherry-picked nodes: These are nodes we pulled from other
418 translation units into SET during IPA-inlining. We make them as
419 local static nodes to prevent clashes with other local statics. */
420 if (boundary_p && node->analyzed
421 && node->get_partitioning_class () == SYMBOL_PARTITION)
423 /* Inline clones cannot be part of boundary.
424 gcc_assert (!node->inlined_to);
426 FIXME: At the moment they can be, when partition contains an inline
427 clone that is clone of inline clone from outside partition. We can
428 reshape the clone tree and make other tree to be the root, but it
429 needs a bit extra work and will be promplty done by cgraph_remove_node
430 after reading back. */
431 in_other_partition = 1;
433 else if (UNLIKELY (lto_stream_offload_p
434 && lookup_attribute ("omp target device_ancestor_host",
435 DECL_ATTRIBUTES (node->decl))))
436 /* This symbol is only used as argument to IFN_GOMP_TARGET_REV; this IFN
437 is ignored on ACCEL_COMPILER. Thus, mark it as in_other_partition to silence
438 verify_node_partition diagnostic. */
439 in_other_partition = 1;
441 clone_of = node->clone_of;
442 while (clone_of
443 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
444 if (clone_of->prev_sibling_clone)
445 clone_of = clone_of->prev_sibling_clone;
446 else
447 clone_of = clone_of->clone_of;
449 /* See if body of the master function is output. If not, we are seeing only
450 an declaration and we do not need to pass down clone tree. */
451 ultimate_clone_of = clone_of;
452 while (ultimate_clone_of && ultimate_clone_of->clone_of)
453 ultimate_clone_of = ultimate_clone_of->clone_of;
455 if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
456 clone_of = NULL;
458 if (tag == LTO_symtab_analyzed_node)
459 gcc_assert (clone_of || !node->clone_of);
460 if (!clone_of)
461 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
462 else
463 streamer_write_hwi_stream (ob->main_stream, ref);
466 lto_output_fn_decl_ref (ob->decl_state, ob->main_stream, node->decl);
467 node->count.stream_out (ob->main_stream);
468 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
470 streamer_write_hwi_stream (ob->main_stream,
471 node->ipa_transforms_to_apply.length ());
472 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
473 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
475 if (tag == LTO_symtab_analyzed_node)
477 if (node->inlined_to)
479 ref = lto_symtab_encoder_lookup (encoder, node->inlined_to);
480 gcc_assert (ref != LCC_NOT_FOUND);
482 else
483 ref = LCC_NOT_FOUND;
485 streamer_write_hwi_stream (ob->main_stream, ref);
488 group = node->get_comdat_group ();
489 if (group)
490 comdat = IDENTIFIER_POINTER (group);
491 else
492 comdat = "";
493 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
495 if (group)
497 if (node->same_comdat_group)
499 ref = LCC_NOT_FOUND;
500 for (struct symtab_node *n = node->same_comdat_group;
501 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
502 ref = lto_symtab_encoder_lookup (encoder, n);
504 else
505 ref = LCC_NOT_FOUND;
506 streamer_write_hwi_stream (ob->main_stream, ref);
509 section = node->get_section ();
510 if (!section)
511 section = "";
513 streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
515 bp = bitpack_create (ob->main_stream);
516 bp_pack_value (&bp, node->local, 1);
517 bp_pack_value (&bp, node->externally_visible, 1);
518 bp_pack_value (&bp, node->no_reorder, 1);
519 bp_pack_value (&bp, node->definition, 1);
520 bp_pack_value (&bp, node->versionable, 1);
521 bp_pack_value (&bp, node->can_change_signature, 1);
522 bp_pack_value (&bp, node->redefined_extern_inline, 1);
523 bp_pack_value (&bp, node->force_output, 1);
524 bp_pack_value (&bp, node->forced_by_abi, 1);
525 bp_pack_value (&bp, node->unique_name, 1);
526 bp_pack_value (&bp, node->body_removed, 1);
527 bp_pack_value (&bp, node->semantic_interposition, 1);
528 bp_pack_value (&bp, node->implicit_section, 1);
529 bp_pack_value (&bp, node->address_taken, 1);
530 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
531 && node->get_partitioning_class () == SYMBOL_PARTITION
532 && (reachable_from_other_partition_p (node, encoder)
533 || referenced_from_other_partition_p (node, encoder)), 1);
534 bp_pack_value (&bp, node->lowered, 1);
535 bp_pack_value (&bp, in_other_partition, 1);
536 bp_pack_value (&bp, node->alias, 1);
537 bp_pack_value (&bp, node->transparent_alias, 1);
538 bp_pack_value (&bp, node->weakref, 1);
539 bp_pack_value (&bp, node->symver, 1);
540 bp_pack_value (&bp, node->frequency, 2);
541 bp_pack_value (&bp, node->only_called_at_startup, 1);
542 bp_pack_value (&bp, node->only_called_at_exit, 1);
543 bp_pack_value (&bp, node->tm_clone, 1);
544 bp_pack_value (&bp, node->calls_comdat_local, 1);
545 bp_pack_value (&bp, node->icf_merged, 1);
546 bp_pack_value (&bp, node->nonfreeing_fn, 1);
547 bp_pack_value (&bp, node->merged_comdat, 1);
548 bp_pack_value (&bp, node->merged_extern_inline, 1);
549 bp_pack_value (&bp, node->thunk, 1);
550 bp_pack_value (&bp, node->parallelized_function, 1);
551 bp_pack_value (&bp, node->declare_variant_alt, 1);
552 bp_pack_value (&bp, node->calls_declare_variant_alt, 1);
554 /* Stream thunk info always because we use it in
555 ipa_polymorphic_call_context::ipa_polymorphic_call_context
556 to properly interpret THIS pointers for thunks that has been converted
557 to Gimple. */
558 struct thunk_info *thunk = node->definition ? thunk_info::get (node) : NULL;
560 bp_pack_value (&bp, thunk != NULL, 1);
562 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
563 LDPR_NUM_KNOWN,
564 /* When doing incremental link, we will get new resolution
565 info next time we process the file. */
566 flag_incremental_link == INCREMENTAL_LINK_LTO
567 ? LDPR_UNKNOWN : node->resolution);
568 bp_pack_value (&bp, node->split_part, 1);
569 streamer_write_bitpack (&bp);
570 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
572 streamer_write_hwi_stream (ob->main_stream, node->profile_id);
573 streamer_write_hwi_stream (ob->main_stream, node->unit_id);
574 if (DECL_STATIC_CONSTRUCTOR (node->decl))
575 streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
576 if (DECL_STATIC_DESTRUCTOR (node->decl))
577 streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
579 if (thunk)
580 thunk_info::get (node)->stream_out (ob);
583 /* Output the varpool NODE to OB.
584 If NODE is not in SET, then NODE is a boundary. */
586 static void
587 lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
588 lto_symtab_encoder_t encoder)
590 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
591 bool encode_initializer_p
592 = (node->definition
593 && lto_symtab_encoder_encode_initializer_p (encoder, node));
594 struct bitpack_d bp;
595 int ref;
596 const char *comdat;
597 const char *section;
598 tree group;
600 gcc_assert (!encode_initializer_p || node->definition);
601 gcc_assert (boundary_p || encode_initializer_p);
603 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
604 LTO_symtab_variable);
605 streamer_write_hwi_stream (ob->main_stream, node->order);
606 lto_output_var_decl_ref (ob->decl_state, ob->main_stream, node->decl);
607 bp = bitpack_create (ob->main_stream);
608 bp_pack_value (&bp, node->externally_visible, 1);
609 bp_pack_value (&bp, node->no_reorder, 1);
610 bp_pack_value (&bp, node->force_output, 1);
611 bp_pack_value (&bp, node->forced_by_abi, 1);
612 bp_pack_value (&bp, node->unique_name, 1);
613 bp_pack_value (&bp,
614 node->body_removed
615 || (!encode_initializer_p && !node->alias && node->definition),
617 bp_pack_value (&bp, node->semantic_interposition, 1);
618 bp_pack_value (&bp, node->implicit_section, 1);
619 bp_pack_value (&bp, node->writeonly, 1);
620 bp_pack_value (&bp, node->definition && (encode_initializer_p || node->alias),
622 bp_pack_value (&bp, node->alias, 1);
623 bp_pack_value (&bp, node->transparent_alias, 1);
624 bp_pack_value (&bp, node->weakref, 1);
625 bp_pack_value (&bp, node->symver, 1);
626 bp_pack_value (&bp, node->analyzed && (!boundary_p || node->alias), 1);
627 gcc_assert (node->definition || !node->analyzed);
628 /* Constant pool initializers can be de-unified into individual ltrans units.
629 FIXME: Alternatively at -Os we may want to avoid generating for them the local
630 labels and share them across LTRANS partitions. */
631 if (node->get_partitioning_class () != SYMBOL_PARTITION)
633 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
634 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
636 else
638 bp_pack_value (&bp, node->definition
639 && referenced_from_other_partition_p (node, encoder), 1);
640 bp_pack_value (&bp, node->analyzed
641 && boundary_p && !DECL_EXTERNAL (node->decl), 1);
642 /* in_other_partition. */
644 bp_pack_value (&bp, node->tls_model, 3);
645 bp_pack_value (&bp, node->used_by_single_function, 1);
646 bp_pack_value (&bp, node->dynamically_initialized, 1);
647 streamer_write_bitpack (&bp);
649 group = node->get_comdat_group ();
650 if (group)
651 comdat = IDENTIFIER_POINTER (group);
652 else
653 comdat = "";
654 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
656 if (group)
658 if (node->same_comdat_group)
660 ref = LCC_NOT_FOUND;
661 for (struct symtab_node *n = node->same_comdat_group;
662 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
663 ref = lto_symtab_encoder_lookup (encoder, n);
665 else
666 ref = LCC_NOT_FOUND;
667 streamer_write_hwi_stream (ob->main_stream, ref);
670 section = node->get_section ();
671 if (!section)
672 section = "";
673 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
675 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
676 LDPR_NUM_KNOWN, node->resolution);
679 /* Output the varpool NODE to OB.
680 If NODE is not in SET, then NODE is a boundary. */
682 static void
683 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
684 lto_symtab_encoder_t encoder)
686 struct bitpack_d bp;
687 int nref;
688 int uid = !ref->stmt ? ref->lto_stmt_uid : gimple_uid (ref->stmt) + 1;
689 struct cgraph_node *node;
691 bp = bitpack_create (ob->main_stream);
692 bp_pack_value (&bp, ref->use, 3);
693 bp_pack_value (&bp, ref->speculative, 1);
694 streamer_write_bitpack (&bp);
695 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
696 gcc_assert (nref != LCC_NOT_FOUND);
697 streamer_write_hwi_stream (ob->main_stream, nref);
699 node = dyn_cast <cgraph_node *> (ref->referring);
700 if (node)
702 if (ref->stmt)
703 uid = gimple_uid (ref->stmt) + 1;
704 streamer_write_hwi_stream (ob->main_stream, uid);
705 bp_pack_value (&bp, ref->speculative_id, 16);
706 streamer_write_bitpack (&bp);
710 /* Stream out profile_summary to OB. */
712 static void
713 output_profile_summary (struct lto_simple_output_block *ob)
715 if (profile_info)
717 /* We do not output num and run_max, they are not used by
718 GCC profile feedback and they are difficult to merge from multiple
719 units. */
720 unsigned runs = (profile_info->runs);
721 streamer_write_uhwi_stream (ob->main_stream, runs);
723 /* IPA-profile computes hot bb threshold based on cumulated
724 whole program profile. We need to stream it down to ltrans. */
725 if (flag_wpa)
726 streamer_write_gcov_count_stream (ob->main_stream,
727 get_hot_bb_threshold ());
729 else
730 streamer_write_uhwi_stream (ob->main_stream, 0);
733 /* Output all callees or indirect outgoing edges. EDGE must be the first such
734 edge. */
736 static void
737 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
738 struct lto_simple_output_block *ob,
739 lto_symtab_encoder_t encoder)
741 if (!edge)
742 return;
744 /* Output edges in backward direction, so the reconstructed callgraph match
745 and it is easy to associate call sites in the IPA pass summaries. */
746 while (edge->next_callee)
747 edge = edge->next_callee;
748 for (; edge; edge = edge->prev_callee)
749 lto_output_edge (ob, edge, encoder);
752 /* Output the part of the cgraph in SET. */
754 static void
755 output_refs (lto_symtab_encoder_t encoder)
757 struct lto_simple_output_block *ob;
758 int count;
759 struct ipa_ref *ref;
761 ob = lto_create_simple_output_block (LTO_section_refs);
763 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
765 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
767 /* IPA_REF_ALIAS references are always preserved
768 in the boundary. Alias node can't have other references and
769 can be always handled as if it's not in the boundary. */
770 if (!node->alias && !lto_symtab_encoder_in_partition_p (encoder, node))
771 continue;
773 count = node->ref_list.nreferences ();
774 if (count)
776 streamer_write_gcov_count_stream (ob->main_stream, count);
777 streamer_write_uhwi_stream (ob->main_stream,
778 lto_symtab_encoder_lookup (encoder, node));
779 for (int i = 0; node->iterate_reference (i, ref); i++)
780 lto_output_ref (ob, ref, encoder);
782 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
783 if (cnode->declare_variant_alt)
784 omp_lto_output_declare_variant_alt (ob, cnode, encoder);
787 streamer_write_uhwi_stream (ob->main_stream, 0);
789 lto_destroy_simple_output_block (ob);
792 /* Add NODE into encoder as well as nodes it is cloned from.
793 Do it in a way so clones appear first. */
795 static void
796 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
797 bool include_body)
799 if (node->clone_of)
800 add_node_to (encoder, node->clone_of, include_body);
801 if (include_body)
802 lto_set_symtab_encoder_encode_body (encoder, node);
803 lto_symtab_encoder_encode (encoder, node);
806 /* Add all references in NODE to encoders. */
808 static void
809 create_references (lto_symtab_encoder_t encoder, symtab_node *node)
811 int i;
812 struct ipa_ref *ref = NULL;
813 for (i = 0; node->iterate_reference (i, ref); i++)
814 if (is_a <cgraph_node *> (ref->referred))
815 add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
816 else
817 lto_symtab_encoder_encode (encoder, ref->referred);
820 /* Select what needs to be streamed out. In regular lto mode stream everything.
821 In offload lto mode stream only nodes marked as offloadable. */
822 void
823 select_what_to_stream (void)
825 struct symtab_node *snode;
826 FOR_EACH_SYMBOL (snode)
827 snode->need_lto_streaming = !lto_stream_offload_p || snode->offloadable;
830 /* Find all symbols we want to stream into given partition and insert them
831 to encoders.
833 The function actually replaces IN_ENCODER by new one. The reason is that
834 streaming code needs clone's origin to be streamed before clone. This
835 means that we need to insert the nodes in specific order. This order is
836 ignored by the partitioning logic earlier. */
838 lto_symtab_encoder_t
839 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
841 struct cgraph_edge *edge;
842 int i;
843 lto_symtab_encoder_t encoder;
844 lto_symtab_encoder_iterator lsei;
845 hash_set<void *> reachable_call_targets;
847 encoder = lto_symtab_encoder_new (false);
849 /* Go over all entries in the IN_ENCODER and duplicate them to
850 ENCODER. At the same time insert masters of clones so
851 every master appears before clone. */
852 for (lsei = lsei_start_function_in_partition (in_encoder);
853 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
855 struct cgraph_node *node = lsei_cgraph_node (lsei);
856 if (!node->need_lto_streaming)
857 continue;
858 add_node_to (encoder, node, true);
859 lto_set_symtab_encoder_in_partition (encoder, node);
860 create_references (encoder, node);
862 for (lsei = lsei_start_variable_in_partition (in_encoder);
863 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
865 varpool_node *vnode = lsei_varpool_node (lsei);
867 if (!vnode->need_lto_streaming)
868 continue;
869 lto_set_symtab_encoder_in_partition (encoder, vnode);
870 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
871 create_references (encoder, vnode);
873 /* Pickle in also the initializer of all referenced readonly variables
874 to help folding. Constant pool variables are not shared, so we must
875 pickle those too. */
876 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
878 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
879 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
881 if (!lto_symtab_encoder_encode_initializer_p (encoder,
882 vnode)
883 && (((vnode->ctor_useable_for_folding_p ()
884 && (!DECL_VIRTUAL_P (vnode->decl)
885 || !flag_wpa
886 || flag_ltrans_devirtualize)))))
888 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
889 create_references (encoder, vnode);
894 /* Go over all the nodes again to include callees that are not in
895 SET. */
896 for (lsei = lsei_start_function_in_partition (encoder);
897 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
899 struct cgraph_node *node = lsei_cgraph_node (lsei);
900 for (edge = node->callees; edge; edge = edge->next_callee)
902 struct cgraph_node *callee = edge->callee;
903 if (!lto_symtab_encoder_in_partition_p (encoder, callee))
905 /* We should have moved all the inlines. */
906 gcc_assert (!callee->inlined_to);
907 add_node_to (encoder, callee, false);
910 /* Add all possible targets for late devirtualization. */
911 if (flag_ltrans_devirtualize || !flag_wpa)
912 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
913 if (edge->indirect_info->polymorphic)
915 unsigned int i;
916 void *cache_token;
917 bool final;
918 vec <cgraph_node *>targets
919 = possible_polymorphic_call_targets
920 (edge, &final, &cache_token);
921 if (cache_token != NULL
922 && !reachable_call_targets.add (cache_token))
924 for (i = 0; i < targets.length (); i++)
926 struct cgraph_node *callee = targets[i];
928 /* Adding an external declarations into the unit serves
929 no purpose and just increases its boundary. */
930 if (callee->definition
931 && !lto_symtab_encoder_in_partition_p
932 (encoder, callee))
934 gcc_assert (!callee->inlined_to);
935 add_node_to (encoder, callee, false);
941 /* Be sure to also insert alias targert and thunk callees. These needs
942 to stay to aid local calling conventions. */
943 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
945 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
946 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
948 if (node->alias && node->analyzed)
949 create_references (encoder, node);
950 if (cnode
951 && cnode->thunk && !cnode->inlined_to)
952 add_node_to (encoder, cnode->callees->callee, false);
953 while (node->transparent_alias && node->analyzed)
955 node = node->get_alias_target ();
956 if (is_a <cgraph_node *> (node))
957 add_node_to (encoder, dyn_cast <cgraph_node *> (node),
958 false);
959 else
960 lto_symtab_encoder_encode (encoder, node);
963 lto_symtab_encoder_delete (in_encoder);
964 return encoder;
967 /* Output the part of the symtab in SET and VSET. */
969 void
970 output_symtab (void)
972 struct cgraph_node *node;
973 struct lto_simple_output_block *ob;
974 int i, n_nodes;
975 lto_symtab_encoder_t encoder;
977 if (flag_wpa)
978 output_cgraph_opt_summary ();
980 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
982 output_profile_summary (ob);
984 /* An encoder for cgraph nodes should have been created by
985 ipa_write_summaries_1. */
986 gcc_assert (ob->decl_state->symtab_node_encoder);
987 encoder = ob->decl_state->symtab_node_encoder;
989 /* Write out the nodes. We must first output a node and then its clones,
990 otherwise at a time reading back the node there would be nothing to clone
991 from. */
992 n_nodes = lto_symtab_encoder_size (encoder);
993 for (i = 0; i < n_nodes; i++)
995 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
996 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
997 lto_output_node (ob, cnode, encoder);
998 else
999 lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
1002 /* Go over the nodes in SET again to write edges. */
1003 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
1005 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder, i));
1006 if (node
1007 && ((node->thunk && !node->inlined_to)
1008 || lto_symtab_encoder_in_partition_p (encoder, node)))
1010 output_outgoing_cgraph_edges (node->callees, ob, encoder);
1011 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
1015 streamer_write_uhwi_stream (ob->main_stream, 0);
1017 lto_destroy_simple_output_block (ob);
1019 /* Emit toplevel asms.
1020 When doing WPA we must output every asm just once. Since we do not partition asm
1021 nodes at all, output them to first output. This is kind of hack, but should work
1022 well. */
1023 if (!asm_nodes_output && !lto_stream_offload_p)
1025 asm_nodes_output = true;
1026 lto_output_toplevel_asms ();
1029 output_refs (encoder);
1032 /* Return identifier encoded in IB as a plain string. */
1034 static tree
1035 read_identifier (class lto_input_block *ib)
1037 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1038 tree id;
1040 if (ib->data[ib->p + len])
1041 lto_section_overrun (ib);
1042 if (!len)
1044 ib->p++;
1045 return NULL;
1047 id = get_identifier (ib->data + ib->p);
1048 ib->p += len + 1;
1049 return id;
1052 /* Return string encoded in IB, NULL if string is empty. */
1054 static const char *
1055 read_string (class lto_input_block *ib)
1057 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1058 const char *str;
1060 if (ib->data[ib->p + len])
1061 lto_section_overrun (ib);
1062 if (!len)
1064 ib->p++;
1065 return NULL;
1067 str = ib->data + ib->p;
1068 ib->p += len + 1;
1069 return str;
1072 /* Output function/variable tables that will allow libgomp to look up offload
1073 target code.
1074 OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is filled in
1075 varpool_node::get_create. In WHOPR (partitioned) mode during the WPA stage
1076 both OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables. */
1078 void
1079 output_offload_tables (void)
1081 bool output_requires = (flag_openmp
1082 && (omp_requires_mask & OMP_REQUIRES_TARGET_USED) != 0);
1083 if (vec_safe_is_empty (offload_funcs) && vec_safe_is_empty (offload_vars)
1084 && !output_requires)
1085 return;
1087 struct lto_simple_output_block *ob
1088 = lto_create_simple_output_block (LTO_section_offload_table);
1090 for (unsigned i = 0; i < vec_safe_length (offload_funcs); i++)
1092 symtab_node *node = symtab_node::get ((*offload_funcs)[i]);
1093 if (!node)
1094 continue;
1095 node->force_output = true;
1096 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1097 LTO_symtab_last_tag, LTO_symtab_unavail_node);
1098 lto_output_fn_decl_ref (ob->decl_state, ob->main_stream,
1099 (*offload_funcs)[i]);
1102 for (unsigned i = 0; i < vec_safe_length (offload_vars); i++)
1104 symtab_node *node = symtab_node::get ((*offload_vars)[i]);
1105 if (!node)
1106 continue;
1107 node->force_output = true;
1108 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1109 LTO_symtab_last_tag, LTO_symtab_variable);
1110 lto_output_var_decl_ref (ob->decl_state, ob->main_stream,
1111 (*offload_vars)[i]);
1114 if (output_requires)
1116 HOST_WIDE_INT val = ((HOST_WIDE_INT) omp_requires_mask
1117 & (OMP_REQUIRES_UNIFIED_ADDRESS
1118 | OMP_REQUIRES_UNIFIED_SHARED_MEMORY
1119 | OMP_REQUIRES_REVERSE_OFFLOAD
1120 | OMP_REQUIRES_TARGET_USED));
1121 /* (Mis)use LTO_symtab_edge for this variable. */
1122 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1123 LTO_symtab_last_tag, LTO_symtab_edge);
1124 streamer_write_hwi_stream (ob->main_stream, val);
1127 streamer_write_uhwi_stream (ob->main_stream, 0);
1128 lto_destroy_simple_output_block (ob);
1130 /* In WHOPR mode during the WPA stage the joint offload tables need to be
1131 streamed to one partition only. That's why we free offload_funcs and
1132 offload_vars after the first call of output_offload_tables. */
1133 if (flag_wpa)
1135 vec_free (offload_funcs);
1136 vec_free (offload_vars);
1140 /* Verify the partitioning of NODE. */
1142 static inline void
1143 verify_node_partition (symtab_node *node)
1145 if (flag_ltrans)
1146 return;
1148 #ifdef ACCEL_COMPILER
1149 if (node->in_other_partition)
1151 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1153 if (lookup_attribute ("omp target device_ancestor_host",
1154 DECL_ATTRIBUTES (node->decl)) != NULL)
1155 return;
1156 error_at (DECL_SOURCE_LOCATION (node->decl),
1157 "function %qs has been referenced in offloaded code but"
1158 " hasn%'t been marked to be included in the offloaded code",
1159 node->name ());
1161 else if (VAR_P (node->decl))
1162 error_at (DECL_SOURCE_LOCATION (node->decl),
1163 "variable %qs has been referenced in offloaded code but"
1164 " hasn%'t been marked to be included in the offloaded code",
1165 node->name ());
1166 else
1167 gcc_unreachable ();
1169 #else
1170 gcc_assert (!node->in_other_partition
1171 && !node->used_from_other_partition);
1172 #endif
1175 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1176 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
1177 NODE or to replace the values in it, for instance because the first
1178 time we saw it, the function body was not available but now it
1179 is. BP is a bitpack with all the bitflags for NODE read from the
1180 stream. Initialize HAS_THUNK_INFO to indicate if thunk info should
1181 be streamed in. */
1183 static void
1184 input_overwrite_node (struct lto_file_decl_data *file_data,
1185 struct cgraph_node *node,
1186 enum LTO_symtab_tags tag,
1187 struct bitpack_d *bp, bool *has_thunk_info)
1189 node->aux = (void *) tag;
1190 node->lto_file_data = file_data;
1192 node->local = bp_unpack_value (bp, 1);
1193 node->externally_visible = bp_unpack_value (bp, 1);
1194 node->no_reorder = bp_unpack_value (bp, 1);
1195 node->definition = bp_unpack_value (bp, 1);
1196 node->versionable = bp_unpack_value (bp, 1);
1197 node->can_change_signature = bp_unpack_value (bp, 1);
1198 node->redefined_extern_inline = bp_unpack_value (bp, 1);
1199 node->force_output = bp_unpack_value (bp, 1);
1200 node->forced_by_abi = bp_unpack_value (bp, 1);
1201 node->unique_name = bp_unpack_value (bp, 1);
1202 node->body_removed = bp_unpack_value (bp, 1);
1203 node->semantic_interposition = bp_unpack_value (bp, 1);
1204 node->implicit_section = bp_unpack_value (bp, 1);
1205 node->address_taken = bp_unpack_value (bp, 1);
1206 node->used_from_other_partition = bp_unpack_value (bp, 1);
1207 node->lowered = bp_unpack_value (bp, 1);
1208 node->analyzed = tag == LTO_symtab_analyzed_node;
1209 node->in_other_partition = bp_unpack_value (bp, 1);
1210 if (node->in_other_partition
1211 /* Avoid updating decl when we are seeing just inline clone.
1212 When inlining function that has functions already inlined into it,
1213 we produce clones of inline clones.
1215 WPA partitioning might put each clone into different unit and
1216 we might end up streaming inline clone from other partition
1217 to support clone we are interested in. */
1218 && (!node->clone_of
1219 || node->clone_of->decl != node->decl))
1221 DECL_EXTERNAL (node->decl) = 1;
1222 TREE_STATIC (node->decl) = 0;
1224 node->alias = bp_unpack_value (bp, 1);
1225 node->transparent_alias = bp_unpack_value (bp, 1);
1226 node->weakref = bp_unpack_value (bp, 1);
1227 node->symver = bp_unpack_value (bp, 1);
1228 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1229 node->only_called_at_startup = bp_unpack_value (bp, 1);
1230 node->only_called_at_exit = bp_unpack_value (bp, 1);
1231 node->tm_clone = bp_unpack_value (bp, 1);
1232 node->calls_comdat_local = bp_unpack_value (bp, 1);
1233 node->icf_merged = bp_unpack_value (bp, 1);
1234 node->nonfreeing_fn = bp_unpack_value (bp, 1);
1235 node->merged_comdat = bp_unpack_value (bp, 1);
1236 node->merged_extern_inline = bp_unpack_value (bp, 1);
1237 node->thunk = bp_unpack_value (bp, 1);
1238 node->parallelized_function = bp_unpack_value (bp, 1);
1239 node->declare_variant_alt = bp_unpack_value (bp, 1);
1240 node->calls_declare_variant_alt = bp_unpack_value (bp, 1);
1241 *has_thunk_info = bp_unpack_value (bp, 1);
1242 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1243 LDPR_NUM_KNOWN);
1244 node->split_part = bp_unpack_value (bp, 1);
1245 verify_node_partition (node);
1248 /* Return string alias is alias of. */
1250 static tree
1251 get_alias_symbol (tree decl)
1253 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1254 return get_identifier (TREE_STRING_POINTER
1255 (TREE_VALUE (TREE_VALUE (alias))));
1258 /* Read a node from input_block IB. TAG is the node's tag just read.
1259 Return the node read or overwriten. */
1261 static struct cgraph_node *
1262 input_node (struct lto_file_decl_data *file_data,
1263 class lto_input_block *ib,
1264 enum LTO_symtab_tags tag,
1265 vec<symtab_node *> nodes)
1267 gcc::pass_manager *passes = g->get_passes ();
1268 tree fn_decl;
1269 struct cgraph_node *node;
1270 struct bitpack_d bp;
1271 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1272 int clone_ref;
1273 int order;
1274 int i, count;
1275 tree group;
1276 const char *section;
1277 order = streamer_read_hwi (ib) + file_data->order_base;
1278 clone_ref = streamer_read_hwi (ib);
1279 bool has_thunk_info;
1281 fn_decl = lto_input_fn_decl_ref (ib, file_data);
1283 if (clone_ref != LCC_NOT_FOUND)
1285 node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1286 profile_count::uninitialized (), false,
1287 vNULL, false, NULL, NULL);
1289 else
1291 /* Declaration of functions can be already merged with a declaration
1292 from other input file. We keep cgraph unmerged until after streaming
1293 of ipa passes is done. Alays forcingly create a fresh node. */
1294 node = symtab->create_empty ();
1295 node->decl = fn_decl;
1296 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (fn_decl)))
1297 node->ifunc_resolver = 1;
1298 node->register_symbol ();
1301 node->order = order;
1302 if (order >= symtab->order)
1303 symtab->order = order + 1;
1305 node->count = profile_count::stream_in (ib);
1306 node->count_materialization_scale = streamer_read_hwi (ib);
1308 count = streamer_read_hwi (ib);
1309 node->ipa_transforms_to_apply = vNULL;
1310 for (i = 0; i < count; i++)
1312 opt_pass *pass;
1313 int pid = streamer_read_hwi (ib);
1315 gcc_assert (pid < passes->passes_by_id_size);
1316 pass = passes->passes_by_id[pid];
1317 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1320 if (tag == LTO_symtab_analyzed_node)
1321 ref = streamer_read_hwi (ib);
1323 group = read_identifier (ib);
1324 if (group)
1325 ref2 = streamer_read_hwi (ib);
1327 /* Make sure that we have not read this node before. Nodes that
1328 have already been read will have their tag stored in the 'aux'
1329 field. Since built-in functions can be referenced in multiple
1330 functions, they are expected to be read more than once. */
1331 if (node->aux && !fndecl_built_in_p (node->decl))
1332 internal_error ("bytecode stream: found multiple instances of cgraph "
1333 "node with uid %d", node->get_uid ());
1335 node->tp_first_run = streamer_read_uhwi (ib);
1337 bp = streamer_read_bitpack (ib);
1339 input_overwrite_node (file_data, node, tag, &bp, &has_thunk_info);
1341 /* Store a reference for now, and fix up later to be a pointer. */
1342 node->inlined_to = (cgraph_node *) (intptr_t) ref;
1344 if (group)
1346 node->set_comdat_group (group);
1347 /* Store a reference for now, and fix up later to be a pointer. */
1348 node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1350 else
1351 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1352 section = read_string (ib);
1353 if (section)
1354 node->set_section_for_node (section);
1356 if (node->alias && !node->analyzed && node->weakref)
1357 node->alias_target = get_alias_symbol (node->decl);
1358 node->profile_id = streamer_read_hwi (ib);
1359 node->unit_id = streamer_read_hwi (ib) + file_data->unit_base;
1360 if (symtab->max_unit < node->unit_id)
1361 symtab->max_unit = node->unit_id;
1362 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1363 node->set_init_priority (streamer_read_hwi (ib));
1364 if (DECL_STATIC_DESTRUCTOR (node->decl))
1365 node->set_fini_priority (streamer_read_hwi (ib));
1367 if (has_thunk_info)
1368 thunk_info::get_create (node)->stream_in (ib);
1370 return node;
1373 /* Read a node from input_block IB. TAG is the node's tag just read.
1374 Return the node read or overwriten. */
1376 static varpool_node *
1377 input_varpool_node (struct lto_file_decl_data *file_data,
1378 class lto_input_block *ib)
1380 tree var_decl;
1381 varpool_node *node;
1382 struct bitpack_d bp;
1383 int ref = LCC_NOT_FOUND;
1384 int order;
1385 tree group;
1386 const char *section;
1388 order = streamer_read_hwi (ib) + file_data->order_base;
1389 var_decl = lto_input_var_decl_ref (ib, file_data);
1391 /* Declaration of functions can be already merged with a declaration
1392 from other input file. We keep cgraph unmerged until after streaming
1393 of ipa passes is done. Alays forcingly create a fresh node. */
1394 node = varpool_node::create_empty ();
1395 node->decl = var_decl;
1396 node->register_symbol ();
1398 node->order = order;
1399 if (order >= symtab->order)
1400 symtab->order = order + 1;
1401 node->lto_file_data = file_data;
1403 bp = streamer_read_bitpack (ib);
1404 node->externally_visible = bp_unpack_value (&bp, 1);
1405 node->no_reorder = bp_unpack_value (&bp, 1);
1406 node->force_output = bp_unpack_value (&bp, 1);
1407 node->forced_by_abi = bp_unpack_value (&bp, 1);
1408 node->unique_name = bp_unpack_value (&bp, 1);
1409 node->body_removed = bp_unpack_value (&bp, 1);
1410 node->semantic_interposition = bp_unpack_value (&bp, 1);
1411 node->implicit_section = bp_unpack_value (&bp, 1);
1412 node->writeonly = bp_unpack_value (&bp, 1);
1413 node->definition = bp_unpack_value (&bp, 1);
1414 node->alias = bp_unpack_value (&bp, 1);
1415 node->transparent_alias = bp_unpack_value (&bp, 1);
1416 node->weakref = bp_unpack_value (&bp, 1);
1417 node->symver = bp_unpack_value (&bp, 1);
1418 node->analyzed = bp_unpack_value (&bp, 1);
1419 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1420 node->in_other_partition = bp_unpack_value (&bp, 1);
1421 if (node->in_other_partition)
1423 DECL_EXTERNAL (node->decl) = 1;
1424 TREE_STATIC (node->decl) = 0;
1426 if (node->alias && !node->analyzed && node->weakref)
1427 node->alias_target = get_alias_symbol (node->decl);
1428 node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1429 node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1430 node->dynamically_initialized = bp_unpack_value (&bp, 1);
1431 group = read_identifier (ib);
1432 if (group)
1434 node->set_comdat_group (group);
1435 ref = streamer_read_hwi (ib);
1436 /* Store a reference for now, and fix up later to be a pointer. */
1437 node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1439 else
1440 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1441 section = read_string (ib);
1442 if (section)
1443 node->set_section_for_node (section);
1444 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1445 LDPR_NUM_KNOWN);
1446 verify_node_partition (node);
1447 return node;
1450 /* Read a node from input_block IB. TAG is the node's tag just read.
1451 Return the node read or overwriten. */
1453 static void
1454 input_ref (class lto_input_block *ib,
1455 symtab_node *referring_node,
1456 vec<symtab_node *> nodes)
1458 symtab_node *node = NULL;
1459 struct bitpack_d bp;
1460 enum ipa_ref_use use;
1461 bool speculative;
1462 struct ipa_ref *ref;
1464 bp = streamer_read_bitpack (ib);
1465 use = (enum ipa_ref_use) bp_unpack_value (&bp, 3);
1466 speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1467 node = nodes[streamer_read_hwi (ib)];
1468 ref = referring_node->create_reference (node, use);
1469 ref->speculative = speculative;
1470 if (is_a <cgraph_node *> (referring_node))
1472 ref->lto_stmt_uid = streamer_read_hwi (ib);
1473 bp = streamer_read_bitpack (ib);
1474 ref->speculative_id = bp_unpack_value (&bp, 16);
1478 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1479 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1480 edge being read is indirect (in the sense that it has
1481 indirect_unknown_callee set). */
1483 static void
1484 input_edge (class lto_input_block *ib, vec<symtab_node *> nodes,
1485 bool indirect)
1487 struct cgraph_node *caller, *callee;
1488 struct cgraph_edge *edge;
1489 unsigned int stmt_id, speculative_id;
1490 profile_count count;
1491 cgraph_inline_failed_t inline_failed;
1492 struct bitpack_d bp;
1493 int ecf_flags = 0;
1495 caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1496 if (caller == NULL || caller->decl == NULL_TREE)
1497 internal_error ("bytecode stream: no caller found while reading edge");
1499 if (!indirect)
1501 callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1502 if (callee == NULL || callee->decl == NULL_TREE)
1503 internal_error ("bytecode stream: no callee found while reading edge");
1505 else
1506 callee = NULL;
1508 count = profile_count::stream_in (ib);
1510 bp = streamer_read_bitpack (ib);
1511 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1512 stmt_id = bp_unpack_var_len_unsigned (&bp);
1513 speculative_id = bp_unpack_value (&bp, 16);
1515 if (indirect)
1516 edge = caller->create_indirect_edge (NULL, 0, count);
1517 else
1518 edge = caller->create_edge (callee, NULL, count);
1520 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1521 edge->speculative = bp_unpack_value (&bp, 1);
1522 edge->lto_stmt_uid = stmt_id;
1523 edge->speculative_id = speculative_id;
1524 edge->inline_failed = inline_failed;
1525 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1526 edge->can_throw_external = bp_unpack_value (&bp, 1);
1527 edge->in_polymorphic_cdtor = bp_unpack_value (&bp, 1);
1528 if (indirect)
1530 if (bp_unpack_value (&bp, 1))
1531 ecf_flags |= ECF_CONST;
1532 if (bp_unpack_value (&bp, 1))
1533 ecf_flags |= ECF_PURE;
1534 if (bp_unpack_value (&bp, 1))
1535 ecf_flags |= ECF_NORETURN;
1536 if (bp_unpack_value (&bp, 1))
1537 ecf_flags |= ECF_MALLOC;
1538 if (bp_unpack_value (&bp, 1))
1539 ecf_flags |= ECF_NOTHROW;
1540 if (bp_unpack_value (&bp, 1))
1541 ecf_flags |= ECF_RETURNS_TWICE;
1542 edge->indirect_info->ecf_flags = ecf_flags;
1544 edge->indirect_info->num_speculative_call_targets
1545 = bp_unpack_value (&bp, 16);
1550 /* Read a cgraph from IB using the info in FILE_DATA. */
1552 static vec<symtab_node *>
1553 input_cgraph_1 (struct lto_file_decl_data *file_data,
1554 class lto_input_block *ib)
1556 enum LTO_symtab_tags tag;
1557 vec<symtab_node *> nodes = vNULL;
1558 symtab_node *node;
1559 unsigned i;
1561 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1562 file_data->order_base = symtab->order;
1563 file_data->unit_base = symtab->max_unit + 1;
1564 while (tag)
1566 if (tag == LTO_symtab_edge)
1567 input_edge (ib, nodes, false);
1568 else if (tag == LTO_symtab_indirect_edge)
1569 input_edge (ib, nodes, true);
1570 else if (tag == LTO_symtab_variable)
1572 node = input_varpool_node (file_data, ib);
1573 nodes.safe_push (node);
1574 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1576 else
1578 node = input_node (file_data, ib, tag, nodes);
1579 if (node == NULL || node->decl == NULL_TREE)
1580 internal_error ("bytecode stream: found empty cgraph node");
1581 nodes.safe_push (node);
1582 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1585 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1588 lto_input_toplevel_asms (file_data, file_data->order_base);
1590 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1591 if (flag_checking)
1593 FOR_EACH_VEC_ELT (nodes, i, node)
1594 gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1596 FOR_EACH_VEC_ELT (nodes, i, node)
1598 int ref;
1599 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1601 ref = (int) (intptr_t) cnode->inlined_to;
1603 /* We share declaration of builtins, so we may read same node twice. */
1604 if (!node->aux)
1605 continue;
1606 node->aux = NULL;
1608 /* Fixup inlined_to from reference to pointer. */
1609 if (ref != LCC_NOT_FOUND)
1610 dyn_cast<cgraph_node *> (node)->inlined_to
1611 = dyn_cast<cgraph_node *> (nodes[ref]);
1612 else
1613 cnode->inlined_to = NULL;
1616 ref = (int) (intptr_t) node->same_comdat_group;
1618 /* Fixup same_comdat_group from reference to pointer. */
1619 if (ref != LCC_NOT_FOUND)
1620 node->same_comdat_group = nodes[ref];
1621 else
1622 node->same_comdat_group = NULL;
1624 FOR_EACH_VEC_ELT (nodes, i, node)
1625 node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1626 return nodes;
1629 /* Input ipa_refs. */
1631 static void
1632 input_refs (class lto_input_block *ib,
1633 vec<symtab_node *> nodes)
1635 int count;
1636 int idx;
1637 while (true)
1639 symtab_node *node;
1640 count = streamer_read_uhwi (ib);
1641 if (!count)
1642 break;
1643 idx = streamer_read_uhwi (ib);
1644 node = nodes[idx];
1645 while (count)
1647 input_ref (ib, node, nodes);
1648 count--;
1650 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1651 if (cnode->declare_variant_alt)
1652 omp_lto_input_declare_variant_alt (ib, cnode, nodes);
1656 /* Input profile_info from IB. */
1657 static void
1658 input_profile_summary (class lto_input_block *ib,
1659 struct lto_file_decl_data *file_data)
1661 unsigned int runs = streamer_read_uhwi (ib);
1662 if (runs)
1664 file_data->profile_info.runs = runs;
1666 /* IPA-profile computes hot bb threshold based on cumulated
1667 whole program profile. We need to stream it down to ltrans. */
1668 if (flag_ltrans)
1669 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1674 /* Rescale profile summaries to the same number of runs in the whole unit. */
1676 static void
1677 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1679 struct lto_file_decl_data *file_data;
1680 unsigned int j;
1681 gcov_unsigned_t max_runs = 0;
1682 struct cgraph_node *node;
1683 struct cgraph_edge *edge;
1685 /* Find unit with maximal number of runs. If we ever get serious about
1686 roundoff errors, we might also consider computing smallest common
1687 multiply. */
1688 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1689 if (max_runs < file_data->profile_info.runs)
1690 max_runs = file_data->profile_info.runs;
1692 if (!max_runs)
1693 return;
1695 /* Simple overflow check. We probably don't need to support that many train
1696 runs. Such a large value probably imply data corruption anyway. */
1697 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1699 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1700 INT_MAX / REG_BR_PROB_BASE);
1701 return;
1704 profile_info = XCNEW (gcov_summary);
1705 profile_info->runs = max_runs;
1707 /* If merging already happent at WPA time, we are done. */
1708 if (flag_ltrans)
1709 return;
1711 /* Now compute count_materialization_scale of each node.
1712 During LTRANS we already have values of count_materialization_scale
1713 computed, so just update them. */
1714 FOR_EACH_FUNCTION (node)
1715 if (node->lto_file_data
1716 && node->lto_file_data->profile_info.runs)
1718 int scale;
1720 scale = RDIV (node->count_materialization_scale * max_runs,
1721 node->lto_file_data->profile_info.runs);
1722 node->count_materialization_scale = scale;
1723 if (scale < 0)
1724 fatal_error (input_location, "Profile information in %s corrupted",
1725 file_data->file_name);
1727 if (scale == REG_BR_PROB_BASE)
1728 continue;
1729 for (edge = node->callees; edge; edge = edge->next_callee)
1730 if (edge->count.ipa ().nonzero_p ())
1731 edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1732 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1733 if (edge->count.ipa ().nonzero_p ())
1734 edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1735 if (node->count.ipa ().nonzero_p ())
1736 node->count = node->count.apply_scale (scale, REG_BR_PROB_BASE);
1740 /* Input and merge the symtab from each of the .o files passed to
1741 lto1. */
1743 void
1744 input_symtab (void)
1746 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1747 struct lto_file_decl_data *file_data;
1748 unsigned int j = 0;
1749 struct cgraph_node *node;
1751 while ((file_data = file_data_vec[j++]))
1753 const char *data;
1754 size_t len;
1755 class lto_input_block *ib;
1756 vec<symtab_node *> nodes;
1758 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1759 &data, &len);
1760 if (!ib)
1761 fatal_error (input_location,
1762 "cannot find LTO cgraph in %s", file_data->file_name);
1763 input_profile_summary (ib, file_data);
1764 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1765 nodes = input_cgraph_1 (file_data, ib);
1766 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1767 ib, data, len);
1769 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1770 &data, &len);
1771 if (!ib)
1772 fatal_error (input_location, "cannot find LTO section refs in %s",
1773 file_data->file_name);
1774 input_refs (ib, nodes);
1775 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1776 ib, data, len);
1777 if (flag_ltrans)
1778 input_cgraph_opt_summary (nodes);
1779 nodes.release ();
1782 merge_profile_summaries (file_data_vec);
1784 /* Clear out the aux field that was used to store enough state to
1785 tell which nodes should be overwritten. */
1786 FOR_EACH_FUNCTION (node)
1788 /* Some nodes may have been created by cgraph_node. This
1789 happens when the callgraph contains nested functions. If the
1790 node for the parent function was never emitted to the gimple
1791 file, cgraph_node will create a node for it when setting the
1792 context of the nested function. */
1793 if (node->lto_file_data)
1794 node->aux = NULL;
1798 static void
1799 omp_requires_to_name (char *buf, size_t size, HOST_WIDE_INT requires_mask)
1801 char *end = buf + size, *p = buf;
1802 if (requires_mask & GOMP_REQUIRES_UNIFIED_ADDRESS)
1803 p += snprintf (p, end - p, "unified_address");
1804 if (requires_mask & GOMP_REQUIRES_UNIFIED_SHARED_MEMORY)
1805 p += snprintf (p, end - p, "%sunified_shared_memory",
1806 (p == buf ? "" : ", "));
1807 if (requires_mask & GOMP_REQUIRES_REVERSE_OFFLOAD)
1808 p += snprintf (p, end - p, "%sreverse_offload",
1809 (p == buf ? "" : ", "));
1812 /* Input function/variable tables that will allow libgomp to look up offload
1813 target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS. */
1815 void
1816 input_offload_tables (bool do_force_output)
1818 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1819 struct lto_file_decl_data *file_data;
1820 unsigned int j = 0;
1821 const char *requires_fn = NULL;
1822 tree requires_decl = NULL_TREE;
1824 omp_requires_mask = (omp_requires) 0;
1826 while ((file_data = file_data_vec[j++]))
1828 const char *data;
1829 size_t len;
1830 class lto_input_block *ib
1831 = lto_create_simple_input_block (file_data, LTO_section_offload_table,
1832 &data, &len);
1833 if (!ib)
1834 continue;
1836 tree tmp_decl = NULL_TREE;
1837 enum LTO_symtab_tags tag
1838 = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1839 while (tag)
1841 if (tag == LTO_symtab_unavail_node)
1843 tree fn_decl
1844 = lto_input_fn_decl_ref (ib, file_data);
1845 vec_safe_push (offload_funcs, fn_decl);
1847 /* Prevent IPA from removing fn_decl as unreachable, since there
1848 may be no refs from the parent function to child_fn in offload
1849 LTO mode. */
1850 if (do_force_output)
1851 cgraph_node::get (fn_decl)->mark_force_output ();
1852 tmp_decl = fn_decl;
1854 else if (tag == LTO_symtab_variable)
1856 tree var_decl
1857 = lto_input_var_decl_ref (ib, file_data);
1858 vec_safe_push (offload_vars, var_decl);
1860 /* Prevent IPA from removing var_decl as unused, since there
1861 may be no refs to var_decl in offload LTO mode. */
1862 if (do_force_output)
1863 varpool_node::get (var_decl)->force_output = 1;
1864 tmp_decl = var_decl;
1866 else if (tag == LTO_symtab_edge)
1868 static bool error_emitted = false;
1869 HOST_WIDE_INT val = streamer_read_hwi (ib);
1871 if (omp_requires_mask == 0)
1873 omp_requires_mask = (omp_requires) val;
1874 requires_decl = tmp_decl;
1875 requires_fn = file_data->file_name;
1877 else if (omp_requires_mask != val && !error_emitted)
1879 const char *fn1 = requires_fn;
1880 if (requires_decl != NULL_TREE)
1882 while (DECL_CONTEXT (requires_decl) != NULL_TREE
1883 && TREE_CODE (requires_decl) != TRANSLATION_UNIT_DECL)
1884 requires_decl = DECL_CONTEXT (requires_decl);
1885 if (requires_decl != NULL_TREE)
1886 fn1 = IDENTIFIER_POINTER (DECL_NAME (requires_decl));
1889 const char *fn2 = file_data->file_name;
1890 if (tmp_decl != NULL_TREE)
1892 while (DECL_CONTEXT (tmp_decl) != NULL_TREE
1893 && TREE_CODE (tmp_decl) != TRANSLATION_UNIT_DECL)
1894 tmp_decl = DECL_CONTEXT (tmp_decl);
1895 if (tmp_decl != NULL_TREE)
1896 fn2 = IDENTIFIER_POINTER (DECL_NAME (tmp_decl));
1898 if (fn1 == fn2)
1900 fn1 = requires_fn;
1901 fn2 = file_data->file_name;
1904 char buf1[sizeof ("unified_address, unified_shared_memory, "
1905 "reverse_offload")];
1906 char buf2[sizeof ("unified_address, unified_shared_memory, "
1907 "reverse_offload")];
1908 omp_requires_to_name (buf2, sizeof (buf2),
1909 val != OMP_REQUIRES_TARGET_USED
1910 ? val
1911 : (HOST_WIDE_INT) omp_requires_mask);
1912 if (val != OMP_REQUIRES_TARGET_USED
1913 && omp_requires_mask != OMP_REQUIRES_TARGET_USED)
1915 omp_requires_to_name (buf1, sizeof (buf1),
1916 omp_requires_mask);
1917 error ("OpenMP %<requires%> directive with non-identical "
1918 "clauses in multiple compilation units: %qs vs. "
1919 "%qs", buf1, buf2);
1920 inform (UNKNOWN_LOCATION, "%qs has %qs", fn1, buf1);
1921 inform (UNKNOWN_LOCATION, "%qs has %qs", fn2, buf2);
1923 else
1925 error ("OpenMP %<requires%> directive with %qs specified "
1926 "only in some compilation units", buf2);
1927 inform (UNKNOWN_LOCATION, "%qs has %qs",
1928 val != OMP_REQUIRES_TARGET_USED ? fn2 : fn1,
1929 buf2);
1930 inform (UNKNOWN_LOCATION, "but %qs has not",
1931 val != OMP_REQUIRES_TARGET_USED ? fn1 : fn2);
1933 error_emitted = true;
1936 else
1937 fatal_error (input_location,
1938 "invalid offload table in %s", file_data->file_name);
1940 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1943 lto_destroy_simple_input_block (file_data, LTO_section_offload_table,
1944 ib, data, len);
1946 #ifdef ACCEL_COMPILER
1947 char *omp_requires_file = getenv ("GCC_OFFLOAD_OMP_REQUIRES_FILE");
1948 if (omp_requires_file == NULL || omp_requires_file[0] == '\0')
1949 fatal_error (input_location, "GCC_OFFLOAD_OMP_REQUIRES_FILE unset");
1950 FILE *f = fopen (omp_requires_file, "wb");
1951 if (!f)
1952 fatal_error (input_location, "Cannot open omp_requires file %qs",
1953 omp_requires_file);
1954 uint32_t req_mask = omp_requires_mask;
1955 fwrite (&req_mask, sizeof (req_mask), 1, f);
1956 fclose (f);
1957 #endif
1960 /* True when we need optimization summary for NODE. */
1962 static int
1963 output_cgraph_opt_summary_p (struct cgraph_node *node)
1965 if (node->clone_of || node->former_clone_of)
1966 return true;
1967 clone_info *info = clone_info::get (node);
1968 return info && (info->tree_map || info->param_adjustments);
1971 /* Output optimization summary for EDGE to OB. */
1972 static void
1973 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1974 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1978 /* Output optimization summary for NODE to OB. */
1980 static void
1981 output_node_opt_summary (struct output_block *ob,
1982 struct cgraph_node *node,
1983 lto_symtab_encoder_t encoder)
1985 struct ipa_replace_map *map;
1986 int i;
1987 struct cgraph_edge *e;
1989 /* TODO: Should this code be moved to ipa-param-manipulation? */
1990 struct bitpack_d bp;
1991 bp = bitpack_create (ob->main_stream);
1992 clone_info *info = clone_info::get (node);
1994 bp_pack_value (&bp, (info && info->param_adjustments != NULL), 1);
1995 streamer_write_bitpack (&bp);
1996 if (ipa_param_adjustments *adjustments
1997 = info ? info->param_adjustments : NULL)
1999 streamer_write_uhwi (ob, vec_safe_length (adjustments->m_adj_params));
2000 ipa_adjusted_param *adj;
2001 FOR_EACH_VEC_SAFE_ELT (adjustments->m_adj_params, i, adj)
2003 bp = bitpack_create (ob->main_stream);
2004 bp_pack_value (&bp, adj->base_index, IPA_PARAM_MAX_INDEX_BITS);
2005 bp_pack_value (&bp, adj->prev_clone_index, IPA_PARAM_MAX_INDEX_BITS);
2006 bp_pack_value (&bp, adj->op, 2);
2007 bp_pack_value (&bp, adj->param_prefix_index, 2);
2008 bp_pack_value (&bp, adj->prev_clone_adjustment, 1);
2009 bp_pack_value (&bp, adj->reverse, 1);
2010 bp_pack_value (&bp, adj->user_flag, 1);
2011 streamer_write_bitpack (&bp);
2012 if (adj->op == IPA_PARAM_OP_SPLIT
2013 || adj->op == IPA_PARAM_OP_NEW)
2015 stream_write_tree (ob, adj->type, true);
2016 if (adj->op == IPA_PARAM_OP_SPLIT)
2018 stream_write_tree (ob, adj->alias_ptr_type, true);
2019 streamer_write_uhwi (ob, adj->unit_offset);
2023 streamer_write_hwi (ob, adjustments->m_always_copy_start);
2024 bp = bitpack_create (ob->main_stream);
2025 bp_pack_value (&bp, info->param_adjustments->m_skip_return, 1);
2026 streamer_write_bitpack (&bp);
2029 streamer_write_uhwi (ob, info ? vec_safe_length (info->tree_map) : 0);
2030 if (info)
2031 FOR_EACH_VEC_SAFE_ELT (info->tree_map, i, map)
2033 streamer_write_uhwi (ob, map->parm_num);
2034 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
2035 stream_write_tree (ob, map->new_tree, true);
2038 if (lto_symtab_encoder_in_partition_p (encoder, node))
2040 for (e = node->callees; e; e = e->next_callee)
2041 output_edge_opt_summary (ob, e);
2042 for (e = node->indirect_calls; e; e = e->next_callee)
2043 output_edge_opt_summary (ob, e);
2047 /* Output optimization summaries stored in callgraph.
2048 At the moment it is the clone info structure. */
2050 static void
2051 output_cgraph_opt_summary (void)
2053 int i, n_nodes;
2054 lto_symtab_encoder_t encoder;
2055 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
2056 unsigned count = 0;
2058 ob->symbol = NULL;
2059 encoder = ob->decl_state->symtab_node_encoder;
2060 n_nodes = lto_symtab_encoder_size (encoder);
2061 for (i = 0; i < n_nodes; i++)
2063 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
2064 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2065 if (cnode && output_cgraph_opt_summary_p (cnode))
2066 count++;
2068 streamer_write_uhwi (ob, count);
2069 for (i = 0; i < n_nodes; i++)
2071 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
2072 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2073 if (cnode && output_cgraph_opt_summary_p (cnode))
2075 streamer_write_uhwi (ob, i);
2076 output_node_opt_summary (ob, cnode, encoder);
2079 produce_asm (ob, NULL);
2080 destroy_output_block (ob);
2083 /* Input optimisation summary of EDGE. */
2085 static void
2086 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
2087 class lto_input_block *ib_main ATTRIBUTE_UNUSED)
2091 /* Input optimisation summary of NODE. */
2093 static void
2094 input_node_opt_summary (struct cgraph_node *node,
2095 class lto_input_block *ib_main,
2096 class data_in *data_in)
2098 int i;
2099 int count;
2100 struct cgraph_edge *e;
2102 /* TODO: Should this code be moved to ipa-param-manipulation? */
2103 struct bitpack_d bp;
2104 bp = streamer_read_bitpack (ib_main);
2105 bool have_adjustments = bp_unpack_value (&bp, 1);
2106 clone_info *info = clone_info::get_create (node);
2108 if (have_adjustments)
2110 count = streamer_read_uhwi (ib_main);
2111 vec<ipa_adjusted_param, va_gc> *new_params = NULL;
2112 for (i = 0; i < count; i++)
2114 ipa_adjusted_param adj;
2115 memset (&adj, 0, sizeof (adj));
2116 bp = streamer_read_bitpack (ib_main);
2117 adj.base_index = bp_unpack_value (&bp, IPA_PARAM_MAX_INDEX_BITS);
2118 adj.prev_clone_index
2119 = bp_unpack_value (&bp, IPA_PARAM_MAX_INDEX_BITS);
2120 adj.op = (enum ipa_parm_op) bp_unpack_value (&bp, 2);
2121 adj.param_prefix_index = bp_unpack_value (&bp, 2);
2122 adj.prev_clone_adjustment = bp_unpack_value (&bp, 1);
2123 adj.reverse = bp_unpack_value (&bp, 1);
2124 adj.user_flag = bp_unpack_value (&bp, 1);
2125 if (adj.op == IPA_PARAM_OP_SPLIT
2126 || adj.op == IPA_PARAM_OP_NEW)
2128 adj.type = stream_read_tree (ib_main, data_in);
2129 if (adj.op == IPA_PARAM_OP_SPLIT)
2131 adj.alias_ptr_type = stream_read_tree (ib_main, data_in);
2132 adj.unit_offset = streamer_read_uhwi (ib_main);
2135 vec_safe_push (new_params, adj);
2137 int always_copy_start = streamer_read_hwi (ib_main);
2138 bp = streamer_read_bitpack (ib_main);
2139 bool skip_return = bp_unpack_value (&bp, 1);
2140 info->param_adjustments
2141 = (new (ggc_alloc <ipa_param_adjustments> ())
2142 ipa_param_adjustments (new_params, always_copy_start, skip_return));
2145 count = streamer_read_uhwi (ib_main);
2146 for (i = 0; i < count; i++)
2148 struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
2150 vec_safe_push (info->tree_map, map);
2151 map->parm_num = streamer_read_uhwi (ib_main);
2152 map->new_tree = stream_read_tree (ib_main, data_in);
2154 for (e = node->callees; e; e = e->next_callee)
2155 input_edge_opt_summary (e, ib_main);
2156 for (e = node->indirect_calls; e; e = e->next_callee)
2157 input_edge_opt_summary (e, ib_main);
2160 /* Read section in file FILE_DATA of length LEN with data DATA. */
2162 static void
2163 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
2164 const char *data, size_t len,
2165 vec<symtab_node *> nodes)
2167 const struct lto_function_header *header =
2168 (const struct lto_function_header *) data;
2169 const int cfg_offset = sizeof (struct lto_function_header);
2170 const int main_offset = cfg_offset + header->cfg_size;
2171 const int string_offset = main_offset + header->main_size;
2172 class data_in *data_in;
2173 unsigned int i;
2174 unsigned int count;
2176 lto_input_block ib_main ((const char *) data + main_offset,
2177 header->main_size, file_data);
2179 data_in =
2180 lto_data_in_create (file_data, (const char *) data + string_offset,
2181 header->string_size, vNULL);
2182 count = streamer_read_uhwi (&ib_main);
2184 for (i = 0; i < count; i++)
2186 int ref = streamer_read_uhwi (&ib_main);
2187 input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
2188 &ib_main, data_in);
2190 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
2191 len);
2192 lto_data_in_delete (data_in);
2195 /* Input optimization summary of cgraph. */
2197 static void
2198 input_cgraph_opt_summary (vec<symtab_node *> nodes)
2200 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2201 struct lto_file_decl_data *file_data;
2202 unsigned int j = 0;
2204 while ((file_data = file_data_vec[j++]))
2206 size_t len;
2207 const char *data
2208 = lto_get_summary_section_data (file_data, LTO_section_cgraph_opt_sum,
2209 &len);
2210 if (data)
2211 input_cgraph_opt_section (file_data, data, len, nodes);