c++: Implement modules ABI for vtable emissions
[official-gcc.git] / gcc / lto-cgraph.cc
blob6395033ab9df2721122ccaf7b5dffb85f571e3ef
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
4 Copyright (C) 2009-2024 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "predict.h"
31 #include "stringpool.h"
32 #include "tree-streamer.h"
33 #include "cgraph.h"
34 #include "tree-pass.h"
35 #include "profile.h"
36 #include "context.h"
37 #include "pass_manager.h"
38 #include "ipa-utils.h"
39 #include "omp-offload.h"
40 #include "omp-general.h"
41 #include "stringpool.h"
42 #include "attribs.h"
43 #include "alloc-pool.h"
44 #include "symbol-summary.h"
45 #include "symtab-thunks.h"
46 #include "symtab-clones.h"
48 /* True when asm nodes has been output. */
49 bool asm_nodes_output = false;
51 static void output_cgraph_opt_summary (void);
52 static void input_cgraph_opt_summary (vec<symtab_node *> nodes);
54 /* Number of LDPR values known to GCC. */
55 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
57 /* Cgraph streaming is organized as set of record whose type
58 is indicated by a tag. */
59 enum LTO_symtab_tags
61 /* Must leave 0 for the stopper. */
63 /* Cgraph node without body available. */
64 LTO_symtab_unavail_node = 1,
65 /* Cgraph node with function body. */
66 LTO_symtab_analyzed_node,
67 /* Cgraph edges. */
68 LTO_symtab_edge,
69 LTO_symtab_indirect_edge,
70 LTO_symtab_variable,
71 LTO_symtab_indirect_function,
72 LTO_symtab_last_tag
75 /* Create a new symtab encoder.
76 if FOR_INPUT, the encoder allocate only datastructures needed
77 to read the symtab. */
79 lto_symtab_encoder_t
80 lto_symtab_encoder_new (bool for_input)
82 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
84 if (!for_input)
85 encoder->map = new hash_map<symtab_node *, size_t>;
86 encoder->nodes.create (0);
87 return encoder;
91 /* Delete ENCODER and its components. */
93 void
94 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
96 encoder->nodes.release ();
97 if (encoder->map)
98 delete encoder->map;
99 free (encoder);
103 /* Return the existing reference number of NODE in the symtab encoder in
104 output block OB. Assign a new reference if this is the first time
105 NODE is encoded. */
108 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
109 symtab_node *node)
111 int ref;
113 if (!encoder->map)
115 lto_encoder_entry entry = {node, false, false, false};
117 ref = encoder->nodes.length ();
118 encoder->nodes.safe_push (entry);
119 return ref;
122 size_t *slot = encoder->map->get (node);
123 if (!slot || !*slot)
125 lto_encoder_entry entry = {node, false, false, false};
126 ref = encoder->nodes.length ();
127 if (!slot)
128 encoder->map->put (node, ref + 1);
129 encoder->nodes.safe_push (entry);
131 else
132 ref = *slot - 1;
134 return ref;
137 /* Remove NODE from encoder. */
139 bool
140 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
141 symtab_node *node)
143 int index;
144 lto_encoder_entry last_node;
146 size_t *slot = encoder->map->get (node);
147 if (slot == NULL || !*slot)
148 return false;
150 index = *slot - 1;
151 gcc_checking_assert (encoder->nodes[index].node == node);
153 /* Remove from vector. We do this by swapping node with the last element
154 of the vector. */
155 last_node = encoder->nodes.pop ();
156 if (last_node.node != node)
158 gcc_assert (encoder->map->put (last_node.node, index + 1));
160 /* Move the last element to the original spot of NODE. */
161 encoder->nodes[index] = last_node;
164 /* Remove element from hash table. */
165 encoder->map->remove (node);
166 return true;
170 /* Return TRUE if we should encode the body of NODE (if any). */
172 bool
173 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
174 struct cgraph_node *node)
176 int index = lto_symtab_encoder_lookup (encoder, node);
177 return encoder->nodes[index].body;
180 /* Specify that we encode the body of NODE in this partition. */
182 static void
183 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
184 struct cgraph_node *node)
186 int index = lto_symtab_encoder_encode (encoder, node);
187 gcc_checking_assert (encoder->nodes[index].node == node);
188 encoder->nodes[index].body = true;
191 /* Return TRUE if we should encode initializer of NODE (if any). */
193 bool
194 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
195 varpool_node *node)
197 int index = lto_symtab_encoder_lookup (encoder, node);
198 if (index == LCC_NOT_FOUND)
199 return false;
200 return encoder->nodes[index].initializer;
203 /* Specify that we should encode initializer of NODE (if any). */
205 static void
206 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
207 varpool_node *node)
209 int index = lto_symtab_encoder_lookup (encoder, node);
210 encoder->nodes[index].initializer = true;
213 /* Return TRUE if NODE is in this partition. */
215 bool
216 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
217 symtab_node *node)
219 int index = lto_symtab_encoder_lookup (encoder, node);
220 if (index == LCC_NOT_FOUND)
221 return false;
222 return encoder->nodes[index].in_partition;
225 /* Specify that NODE is in this partition. */
227 void
228 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
229 symtab_node *node)
231 int index = lto_symtab_encoder_encode (encoder, node);
232 encoder->nodes[index].in_partition = true;
235 /* Output the cgraph EDGE to OB using ENCODER. */
237 static void
238 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
239 lto_symtab_encoder_t encoder)
241 unsigned int uid;
242 intptr_t ref;
243 struct bitpack_d bp;
245 if (edge->indirect_unknown_callee)
246 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
247 LTO_symtab_indirect_edge);
248 else
249 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
250 LTO_symtab_edge);
252 ref = lto_symtab_encoder_lookup (encoder, edge->caller);
253 gcc_assert (ref != LCC_NOT_FOUND);
254 streamer_write_hwi_stream (ob->main_stream, ref);
256 if (!edge->indirect_unknown_callee)
258 ref = lto_symtab_encoder_lookup (encoder, edge->callee);
259 gcc_assert (ref != LCC_NOT_FOUND);
260 streamer_write_hwi_stream (ob->main_stream, ref);
263 edge->count.stream_out (ob->main_stream);
265 bp = bitpack_create (ob->main_stream);
266 uid = !edge->call_stmt ? edge->lto_stmt_uid
267 : gimple_uid (edge->call_stmt) + 1;
268 bp_pack_enum (&bp, cgraph_inline_failed_t,
269 CIF_N_REASONS, edge->inline_failed);
270 gcc_checking_assert (uid || edge->caller->thunk);
271 bp_pack_var_len_unsigned (&bp, uid);
272 bp_pack_value (&bp, edge->speculative_id, 16);
273 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
274 bp_pack_value (&bp, edge->speculative, 1);
275 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
276 gcc_assert (!edge->call_stmt_cannot_inline_p
277 || edge->inline_failed != CIF_BODY_NOT_AVAILABLE);
278 bp_pack_value (&bp, edge->can_throw_external, 1);
279 bp_pack_value (&bp, edge->in_polymorphic_cdtor, 1);
280 if (edge->indirect_unknown_callee)
282 int flags = edge->indirect_info->ecf_flags;
283 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
284 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
285 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
286 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
287 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
288 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
289 /* Flags that should not appear on indirect calls. */
290 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
291 | ECF_MAY_BE_ALLOCA
292 | ECF_SIBCALL
293 | ECF_LEAF
294 | ECF_NOVOPS)));
296 bp_pack_value (&bp, edge->indirect_info->num_speculative_call_targets,
297 16);
299 streamer_write_bitpack (&bp);
302 /* Return if NODE contain references from other partitions. */
304 bool
305 referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
307 int i;
308 struct ipa_ref *ref = NULL;
310 for (i = 0; node->iterate_referring (i, ref); i++)
312 /* Ignore references from non-offloadable nodes while streaming NODE into
313 offload LTO section. */
314 if (!ref->referring->need_lto_streaming)
315 continue;
317 if (ref->referring->in_other_partition
318 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
319 return true;
321 return false;
324 /* Return true when node is reachable from other partition. */
326 bool
327 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
329 struct cgraph_edge *e;
330 if (!node->definition)
331 return false;
332 if (node->inlined_to)
333 return false;
334 for (e = node->callers; e; e = e->next_caller)
336 /* Ignore references from non-offloadable nodes while streaming NODE into
337 offload LTO section. */
338 if (!e->caller->need_lto_streaming)
339 continue;
341 if (e->caller->in_other_partition
342 || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
343 return true;
345 return false;
348 /* Return if NODE contain references from other partitions. */
350 bool
351 referenced_from_this_partition_p (symtab_node *node,
352 lto_symtab_encoder_t encoder)
354 int i;
355 struct ipa_ref *ref = NULL;
357 for (i = 0; node->iterate_referring (i, ref); i++)
358 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
359 return true;
360 return false;
363 /* Return true when node is reachable from other partition. */
365 bool
366 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
368 struct cgraph_edge *e;
369 for (e = node->callers; e; e = e->next_caller)
370 if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
371 return true;
372 return false;
375 /* Output the cgraph NODE to OB. ENCODER is used to find the
376 reference number of NODE->inlined_to. SET is the set of nodes we
377 are writing to the current file. If NODE is not in SET, then NODE
378 is a boundary of a cgraph_node_set and we pretend NODE just has a
379 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
380 that have had their callgraph node written so far. This is used to
381 determine if NODE is a clone of a previously written node. */
383 static void
384 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
385 lto_symtab_encoder_t encoder)
387 unsigned int tag;
388 struct bitpack_d bp;
389 bool boundary_p;
390 intptr_t ref;
391 bool in_other_partition = false;
392 struct cgraph_node *clone_of, *ultimate_clone_of;
393 ipa_opt_pass_d *pass;
394 int i;
395 const char *comdat;
396 const char *section;
397 tree group;
399 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
401 if (node->analyzed && (!boundary_p || node->alias
402 || (node->thunk && !node->inlined_to)))
403 tag = LTO_symtab_analyzed_node;
404 else
405 tag = LTO_symtab_unavail_node;
407 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
408 tag);
409 streamer_write_hwi_stream (ob->main_stream, node->order);
411 /* In WPA mode, we only output part of the call-graph. Also, we
412 fake cgraph node attributes. There are two cases that we care.
414 Boundary nodes: There are nodes that are not part of SET but are
415 called from within SET. We artificially make them look like
416 externally visible nodes with no function body.
418 Cherry-picked nodes: These are nodes we pulled from other
419 translation units into SET during IPA-inlining. We make them as
420 local static nodes to prevent clashes with other local statics. */
421 if (boundary_p && node->analyzed
422 && node->get_partitioning_class () == SYMBOL_PARTITION)
424 /* Inline clones cannot be part of boundary.
425 gcc_assert (!node->inlined_to);
427 FIXME: At the moment they can be, when partition contains an inline
428 clone that is clone of inline clone from outside partition. We can
429 reshape the clone tree and make other tree to be the root, but it
430 needs a bit extra work and will be promplty done by cgraph_remove_node
431 after reading back. */
432 in_other_partition = 1;
434 else if (UNLIKELY (lto_stream_offload_p
435 && lookup_attribute ("omp target device_ancestor_host",
436 DECL_ATTRIBUTES (node->decl))))
437 /* This symbol is only used as argument to IFN_GOMP_TARGET_REV; this IFN
438 is ignored on ACCEL_COMPILER. Thus, mark it as in_other_partition to silence
439 verify_node_partition diagnostic. */
440 in_other_partition = 1;
442 clone_of = node->clone_of;
443 while (clone_of
444 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
445 if (clone_of->prev_sibling_clone)
446 clone_of = clone_of->prev_sibling_clone;
447 else
448 clone_of = clone_of->clone_of;
450 /* See if body of the master function is output. If not, we are seeing only
451 an declaration and we do not need to pass down clone tree. */
452 ultimate_clone_of = clone_of;
453 while (ultimate_clone_of && ultimate_clone_of->clone_of)
454 ultimate_clone_of = ultimate_clone_of->clone_of;
456 if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
457 clone_of = NULL;
459 if (tag == LTO_symtab_analyzed_node)
460 gcc_assert (clone_of || !node->clone_of);
461 if (!clone_of)
462 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
463 else
464 streamer_write_hwi_stream (ob->main_stream, ref);
467 lto_output_fn_decl_ref (ob->decl_state, ob->main_stream, node->decl);
468 node->count.stream_out (ob->main_stream);
469 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
471 streamer_write_hwi_stream (ob->main_stream,
472 node->ipa_transforms_to_apply.length ());
473 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
474 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
476 if (tag == LTO_symtab_analyzed_node)
478 if (node->inlined_to)
480 ref = lto_symtab_encoder_lookup (encoder, node->inlined_to);
481 gcc_assert (ref != LCC_NOT_FOUND);
483 else
484 ref = LCC_NOT_FOUND;
486 streamer_write_hwi_stream (ob->main_stream, ref);
489 group = node->get_comdat_group ();
490 if (group)
491 comdat = IDENTIFIER_POINTER (group);
492 else
493 comdat = "";
494 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
496 if (group)
498 if (node->same_comdat_group)
500 ref = LCC_NOT_FOUND;
501 for (struct symtab_node *n = node->same_comdat_group;
502 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
503 ref = lto_symtab_encoder_lookup (encoder, n);
505 else
506 ref = LCC_NOT_FOUND;
507 streamer_write_hwi_stream (ob->main_stream, ref);
510 section = node->get_section ();
511 if (!section)
512 section = "";
514 streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
516 bp = bitpack_create (ob->main_stream);
517 bp_pack_value (&bp, node->local, 1);
518 bp_pack_value (&bp, node->externally_visible, 1);
519 bp_pack_value (&bp, node->no_reorder, 1);
520 bp_pack_value (&bp, node->definition, 1);
521 bp_pack_value (&bp, node->versionable, 1);
522 bp_pack_value (&bp, node->can_change_signature, 1);
523 bp_pack_value (&bp, node->redefined_extern_inline, 1);
524 bp_pack_value (&bp, node->force_output, 1);
525 bp_pack_value (&bp, node->forced_by_abi, 1);
526 bp_pack_value (&bp, node->unique_name, 1);
527 bp_pack_value (&bp, node->body_removed, 1);
528 bp_pack_value (&bp, node->semantic_interposition, 1);
529 bp_pack_value (&bp, node->implicit_section, 1);
530 bp_pack_value (&bp, node->address_taken, 1);
531 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
532 && node->get_partitioning_class () == SYMBOL_PARTITION
533 && (reachable_from_other_partition_p (node, encoder)
534 || referenced_from_other_partition_p (node, encoder)), 1);
535 bp_pack_value (&bp, node->lowered, 1);
536 bp_pack_value (&bp, in_other_partition, 1);
537 bp_pack_value (&bp, node->alias, 1);
538 bp_pack_value (&bp, node->transparent_alias, 1);
539 bp_pack_value (&bp, node->weakref, 1);
540 bp_pack_value (&bp, node->symver, 1);
541 bp_pack_value (&bp, node->frequency, 2);
542 bp_pack_value (&bp, node->only_called_at_startup, 1);
543 bp_pack_value (&bp, node->only_called_at_exit, 1);
544 bp_pack_value (&bp, node->tm_clone, 1);
545 bp_pack_value (&bp, node->calls_comdat_local, 1);
546 bp_pack_value (&bp, node->icf_merged, 1);
547 bp_pack_value (&bp, node->nonfreeing_fn, 1);
548 bp_pack_value (&bp, node->merged_comdat, 1);
549 bp_pack_value (&bp, node->merged_extern_inline, 1);
550 bp_pack_value (&bp, node->thunk, 1);
551 bp_pack_value (&bp, node->parallelized_function, 1);
552 bp_pack_value (&bp, node->declare_variant_alt, 1);
553 bp_pack_value (&bp, node->calls_declare_variant_alt, 1);
555 /* Stream thunk info always because we use it in
556 ipa_polymorphic_call_context::ipa_polymorphic_call_context
557 to properly interpret THIS pointers for thunks that has been converted
558 to Gimple. */
559 struct thunk_info *thunk = node->definition ? thunk_info::get (node) : NULL;
561 bp_pack_value (&bp, thunk != NULL, 1);
563 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
564 LDPR_NUM_KNOWN,
565 /* When doing incremental link, we will get new resolution
566 info next time we process the file. */
567 flag_incremental_link == INCREMENTAL_LINK_LTO
568 ? LDPR_UNKNOWN : node->resolution);
569 bp_pack_value (&bp, node->split_part, 1);
570 streamer_write_bitpack (&bp);
571 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
573 streamer_write_hwi_stream (ob->main_stream, node->profile_id);
574 streamer_write_hwi_stream (ob->main_stream, node->unit_id);
575 if (DECL_STATIC_CONSTRUCTOR (node->decl))
576 streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
577 if (DECL_STATIC_DESTRUCTOR (node->decl))
578 streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
580 if (thunk)
581 thunk_info::get (node)->stream_out (ob);
584 /* Output the varpool NODE to OB.
585 If NODE is not in SET, then NODE is a boundary. */
587 static void
588 lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
589 lto_symtab_encoder_t encoder)
591 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
592 bool encode_initializer_p
593 = (node->definition
594 && lto_symtab_encoder_encode_initializer_p (encoder, node));
595 struct bitpack_d bp;
596 int ref;
597 const char *comdat;
598 const char *section;
599 tree group;
601 gcc_assert (!encode_initializer_p || node->definition);
602 gcc_assert (boundary_p || encode_initializer_p);
604 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
605 LTO_symtab_variable);
606 streamer_write_hwi_stream (ob->main_stream, node->order);
607 lto_output_var_decl_ref (ob->decl_state, ob->main_stream, node->decl);
608 bp = bitpack_create (ob->main_stream);
609 bp_pack_value (&bp, node->externally_visible, 1);
610 bp_pack_value (&bp, node->no_reorder, 1);
611 bp_pack_value (&bp, node->force_output, 1);
612 bp_pack_value (&bp, node->forced_by_abi, 1);
613 bp_pack_value (&bp, node->unique_name, 1);
614 bp_pack_value (&bp,
615 node->body_removed
616 || (!encode_initializer_p && !node->alias && node->definition),
618 bp_pack_value (&bp, node->semantic_interposition, 1);
619 bp_pack_value (&bp, node->implicit_section, 1);
620 bp_pack_value (&bp, node->writeonly, 1);
621 bp_pack_value (&bp, node->definition && (encode_initializer_p || node->alias),
623 bp_pack_value (&bp, node->alias, 1);
624 bp_pack_value (&bp, node->transparent_alias, 1);
625 bp_pack_value (&bp, node->weakref, 1);
626 bp_pack_value (&bp, node->symver, 1);
627 bp_pack_value (&bp, node->analyzed && (!boundary_p || node->alias), 1);
628 gcc_assert (node->definition || !node->analyzed);
629 /* Constant pool initializers can be de-unified into individual ltrans units.
630 FIXME: Alternatively at -Os we may want to avoid generating for them the local
631 labels and share them across LTRANS partitions. */
632 if (node->get_partitioning_class () != SYMBOL_PARTITION)
634 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
635 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
637 else
639 bp_pack_value (&bp, node->definition
640 && referenced_from_other_partition_p (node, encoder), 1);
641 bp_pack_value (&bp, node->analyzed
642 && boundary_p && !DECL_EXTERNAL (node->decl), 1);
643 /* in_other_partition. */
645 bp_pack_value (&bp, node->tls_model, 3);
646 bp_pack_value (&bp, node->used_by_single_function, 1);
647 bp_pack_value (&bp, node->dynamically_initialized, 1);
648 streamer_write_bitpack (&bp);
650 group = node->get_comdat_group ();
651 if (group)
652 comdat = IDENTIFIER_POINTER (group);
653 else
654 comdat = "";
655 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
657 if (group)
659 if (node->same_comdat_group)
661 ref = LCC_NOT_FOUND;
662 for (struct symtab_node *n = node->same_comdat_group;
663 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
664 ref = lto_symtab_encoder_lookup (encoder, n);
666 else
667 ref = LCC_NOT_FOUND;
668 streamer_write_hwi_stream (ob->main_stream, ref);
671 section = node->get_section ();
672 if (!section)
673 section = "";
674 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
676 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
677 LDPR_NUM_KNOWN, node->resolution);
680 /* Output the varpool NODE to OB.
681 If NODE is not in SET, then NODE is a boundary. */
683 static void
684 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
685 lto_symtab_encoder_t encoder)
687 struct bitpack_d bp;
688 int nref;
689 int uid = !ref->stmt ? ref->lto_stmt_uid : gimple_uid (ref->stmt) + 1;
690 struct cgraph_node *node;
692 bp = bitpack_create (ob->main_stream);
693 bp_pack_value (&bp, ref->use, 3);
694 bp_pack_value (&bp, ref->speculative, 1);
695 streamer_write_bitpack (&bp);
696 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
697 gcc_assert (nref != LCC_NOT_FOUND);
698 streamer_write_hwi_stream (ob->main_stream, nref);
700 node = dyn_cast <cgraph_node *> (ref->referring);
701 if (node)
703 if (ref->stmt)
704 uid = gimple_uid (ref->stmt) + 1;
705 streamer_write_hwi_stream (ob->main_stream, uid);
706 bp_pack_value (&bp, ref->speculative_id, 16);
707 streamer_write_bitpack (&bp);
711 /* Stream out profile_summary to OB. */
713 static void
714 output_profile_summary (struct lto_simple_output_block *ob)
716 if (profile_info)
718 /* We do not output num and run_max, they are not used by
719 GCC profile feedback and they are difficult to merge from multiple
720 units. */
721 unsigned runs = (profile_info->runs);
722 streamer_write_uhwi_stream (ob->main_stream, runs);
724 /* IPA-profile computes hot bb threshold based on cumulated
725 whole program profile. We need to stream it down to ltrans. */
726 if (flag_wpa)
727 streamer_write_gcov_count_stream (ob->main_stream,
728 get_hot_bb_threshold ());
730 else
731 streamer_write_uhwi_stream (ob->main_stream, 0);
734 /* Output all callees or indirect outgoing edges. EDGE must be the first such
735 edge. */
737 static void
738 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
739 struct lto_simple_output_block *ob,
740 lto_symtab_encoder_t encoder)
742 if (!edge)
743 return;
745 /* Output edges in backward direction, so the reconstructed callgraph match
746 and it is easy to associate call sites in the IPA pass summaries. */
747 while (edge->next_callee)
748 edge = edge->next_callee;
749 for (; edge; edge = edge->prev_callee)
750 lto_output_edge (ob, edge, encoder);
753 /* Output the part of the cgraph in SET. */
755 static void
756 output_refs (lto_symtab_encoder_t encoder)
758 struct lto_simple_output_block *ob;
759 int count;
760 struct ipa_ref *ref;
762 ob = lto_create_simple_output_block (LTO_section_refs);
764 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
766 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
768 /* IPA_REF_ALIAS references are always preserved
769 in the boundary. Alias node can't have other references and
770 can be always handled as if it's not in the boundary. */
771 if (!node->alias && !lto_symtab_encoder_in_partition_p (encoder, node))
772 continue;
774 count = node->ref_list.nreferences ();
775 if (count)
777 streamer_write_gcov_count_stream (ob->main_stream, count);
778 streamer_write_uhwi_stream (ob->main_stream,
779 lto_symtab_encoder_lookup (encoder, node));
780 for (int i = 0; node->iterate_reference (i, ref); i++)
781 lto_output_ref (ob, ref, encoder);
783 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
784 if (cnode->declare_variant_alt)
785 omp_lto_output_declare_variant_alt (ob, cnode, encoder);
788 streamer_write_uhwi_stream (ob->main_stream, 0);
790 lto_destroy_simple_output_block (ob);
793 /* Add NODE into encoder as well as nodes it is cloned from.
794 Do it in a way so clones appear first. */
796 static void
797 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
798 bool include_body)
800 if (node->clone_of)
801 add_node_to (encoder, node->clone_of, include_body);
802 if (include_body)
803 lto_set_symtab_encoder_encode_body (encoder, node);
804 lto_symtab_encoder_encode (encoder, node);
807 /* Add all references in NODE to encoders. */
809 static void
810 create_references (lto_symtab_encoder_t encoder, symtab_node *node)
812 int i;
813 struct ipa_ref *ref = NULL;
814 for (i = 0; node->iterate_reference (i, ref); i++)
815 if (is_a <cgraph_node *> (ref->referred))
816 add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
817 else
818 lto_symtab_encoder_encode (encoder, ref->referred);
821 /* Select what needs to be streamed out. In regular lto mode stream everything.
822 In offload lto mode stream only nodes marked as offloadable. */
823 void
824 select_what_to_stream (void)
826 struct symtab_node *snode;
827 FOR_EACH_SYMBOL (snode)
828 snode->need_lto_streaming = !lto_stream_offload_p || snode->offloadable;
831 /* Find all symbols we want to stream into given partition and insert them
832 to encoders.
834 The function actually replaces IN_ENCODER by new one. The reason is that
835 streaming code needs clone's origin to be streamed before clone. This
836 means that we need to insert the nodes in specific order. This order is
837 ignored by the partitioning logic earlier. */
839 lto_symtab_encoder_t
840 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
842 struct cgraph_edge *edge;
843 int i;
844 lto_symtab_encoder_t encoder;
845 lto_symtab_encoder_iterator lsei;
846 hash_set<void *> reachable_call_targets;
848 encoder = lto_symtab_encoder_new (false);
850 /* Go over all entries in the IN_ENCODER and duplicate them to
851 ENCODER. At the same time insert masters of clones so
852 every master appears before clone. */
853 for (lsei = lsei_start_function_in_partition (in_encoder);
854 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
856 struct cgraph_node *node = lsei_cgraph_node (lsei);
857 if (!node->need_lto_streaming)
858 continue;
859 add_node_to (encoder, node, true);
860 lto_set_symtab_encoder_in_partition (encoder, node);
861 create_references (encoder, node);
863 for (lsei = lsei_start_variable_in_partition (in_encoder);
864 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
866 varpool_node *vnode = lsei_varpool_node (lsei);
868 if (!vnode->need_lto_streaming)
869 continue;
870 lto_set_symtab_encoder_in_partition (encoder, vnode);
871 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
872 create_references (encoder, vnode);
874 /* Pickle in also the initializer of all referenced readonly variables
875 to help folding. Constant pool variables are not shared, so we must
876 pickle those too. */
877 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
879 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
880 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
882 if (!lto_symtab_encoder_encode_initializer_p (encoder,
883 vnode)
884 && (((vnode->ctor_useable_for_folding_p ()
885 && (!DECL_VIRTUAL_P (vnode->decl)
886 || !flag_wpa
887 || flag_ltrans_devirtualize)))))
889 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
890 create_references (encoder, vnode);
895 /* Go over all the nodes again to include callees that are not in
896 SET. */
897 for (lsei = lsei_start_function_in_partition (encoder);
898 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
900 struct cgraph_node *node = lsei_cgraph_node (lsei);
901 for (edge = node->callees; edge; edge = edge->next_callee)
903 struct cgraph_node *callee = edge->callee;
904 if (!lto_symtab_encoder_in_partition_p (encoder, callee))
906 /* We should have moved all the inlines. */
907 gcc_assert (!callee->inlined_to);
908 add_node_to (encoder, callee, false);
911 /* Add all possible targets for late devirtualization. */
912 if (flag_ltrans_devirtualize || !flag_wpa)
913 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
914 if (edge->indirect_info->polymorphic)
916 unsigned int i;
917 void *cache_token;
918 bool final;
919 vec <cgraph_node *>targets
920 = possible_polymorphic_call_targets
921 (edge, &final, &cache_token);
922 if (cache_token != NULL
923 && !reachable_call_targets.add (cache_token))
925 for (i = 0; i < targets.length (); i++)
927 struct cgraph_node *callee = targets[i];
929 /* Adding an external declarations into the unit serves
930 no purpose and just increases its boundary. */
931 if (callee->definition
932 && !lto_symtab_encoder_in_partition_p
933 (encoder, callee))
935 gcc_assert (!callee->inlined_to);
936 add_node_to (encoder, callee, false);
942 /* Be sure to also insert alias targert and thunk callees. These needs
943 to stay to aid local calling conventions. */
944 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
946 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
947 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
949 if (node->alias && node->analyzed)
950 create_references (encoder, node);
951 if (cnode
952 && cnode->thunk && !cnode->inlined_to)
953 add_node_to (encoder, cnode->callees->callee, false);
954 while (node->transparent_alias && node->analyzed)
956 node = node->get_alias_target ();
957 if (is_a <cgraph_node *> (node))
958 add_node_to (encoder, dyn_cast <cgraph_node *> (node),
959 false);
960 else
961 lto_symtab_encoder_encode (encoder, node);
964 lto_symtab_encoder_delete (in_encoder);
965 return encoder;
968 /* Output the part of the symtab in SET and VSET. */
970 void
971 output_symtab (void)
973 struct cgraph_node *node;
974 struct lto_simple_output_block *ob;
975 int i, n_nodes;
976 lto_symtab_encoder_t encoder;
978 if (flag_wpa)
979 output_cgraph_opt_summary ();
981 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
983 output_profile_summary (ob);
985 /* An encoder for cgraph nodes should have been created by
986 ipa_write_summaries_1. */
987 gcc_assert (ob->decl_state->symtab_node_encoder);
988 encoder = ob->decl_state->symtab_node_encoder;
990 /* Write out the nodes. We must first output a node and then its clones,
991 otherwise at a time reading back the node there would be nothing to clone
992 from. */
993 n_nodes = lto_symtab_encoder_size (encoder);
994 for (i = 0; i < n_nodes; i++)
996 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
997 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
998 lto_output_node (ob, cnode, encoder);
999 else
1000 lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
1003 /* Go over the nodes in SET again to write edges. */
1004 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
1006 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder, i));
1007 if (node
1008 && ((node->thunk && !node->inlined_to)
1009 || lto_symtab_encoder_in_partition_p (encoder, node)))
1011 output_outgoing_cgraph_edges (node->callees, ob, encoder);
1012 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
1016 streamer_write_uhwi_stream (ob->main_stream, 0);
1018 lto_destroy_simple_output_block (ob);
1020 /* Emit toplevel asms.
1021 When doing WPA we must output every asm just once. Since we do not partition asm
1022 nodes at all, output them to first output. This is kind of hack, but should work
1023 well. */
1024 if (!asm_nodes_output && !lto_stream_offload_p)
1026 asm_nodes_output = true;
1027 lto_output_toplevel_asms ();
1030 output_refs (encoder);
1033 /* Return identifier encoded in IB as a plain string. */
1035 static tree
1036 read_identifier (class lto_input_block *ib)
1038 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1039 tree id;
1041 if (ib->data[ib->p + len])
1042 lto_section_overrun (ib);
1043 if (!len)
1045 ib->p++;
1046 return NULL;
1048 id = get_identifier (ib->data + ib->p);
1049 ib->p += len + 1;
1050 return id;
1053 /* Return string encoded in IB, NULL if string is empty. */
1055 static const char *
1056 read_string (class lto_input_block *ib)
1058 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1059 const char *str;
1061 if (ib->data[ib->p + len])
1062 lto_section_overrun (ib);
1063 if (!len)
1065 ib->p++;
1066 return NULL;
1068 str = ib->data + ib->p;
1069 ib->p += len + 1;
1070 return str;
1073 /* Output function/variable tables that will allow libgomp to look up offload
1074 target code.
1075 OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is filled in
1076 varpool_node::get_create. In WHOPR (partitioned) mode during the WPA stage
1077 both OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables. */
1079 void
1080 output_offload_tables (void)
1082 bool output_requires = (flag_openmp
1083 && (omp_requires_mask & OMP_REQUIRES_TARGET_USED) != 0);
1084 if (vec_safe_is_empty (offload_funcs) && vec_safe_is_empty (offload_vars)
1085 && !output_requires)
1086 return;
1088 struct lto_simple_output_block *ob
1089 = lto_create_simple_output_block (LTO_section_offload_table);
1091 for (unsigned i = 0; i < vec_safe_length (offload_funcs); i++)
1093 symtab_node *node = symtab_node::get ((*offload_funcs)[i]);
1094 if (!node)
1095 continue;
1096 node->force_output = true;
1097 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1098 LTO_symtab_last_tag, LTO_symtab_unavail_node);
1099 lto_output_fn_decl_ref (ob->decl_state, ob->main_stream,
1100 (*offload_funcs)[i]);
1103 for (unsigned i = 0; i < vec_safe_length (offload_vars); i++)
1105 symtab_node *node = symtab_node::get ((*offload_vars)[i]);
1106 if (!node)
1107 continue;
1108 node->force_output = true;
1109 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1110 LTO_symtab_last_tag, LTO_symtab_variable);
1111 lto_output_var_decl_ref (ob->decl_state, ob->main_stream,
1112 (*offload_vars)[i]);
1115 for (unsigned i = 0; i < vec_safe_length (offload_ind_funcs); i++)
1117 symtab_node *node = symtab_node::get ((*offload_ind_funcs)[i]);
1118 if (!node)
1119 continue;
1120 node->force_output = true;
1121 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1122 LTO_symtab_last_tag, LTO_symtab_indirect_function);
1123 lto_output_fn_decl_ref (ob->decl_state, ob->main_stream,
1124 (*offload_ind_funcs)[i]);
1127 if (output_requires)
1129 HOST_WIDE_INT val = ((HOST_WIDE_INT) omp_requires_mask
1130 & (OMP_REQUIRES_UNIFIED_ADDRESS
1131 | OMP_REQUIRES_UNIFIED_SHARED_MEMORY
1132 | OMP_REQUIRES_REVERSE_OFFLOAD
1133 | OMP_REQUIRES_TARGET_USED));
1134 /* (Mis)use LTO_symtab_edge for this variable. */
1135 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1136 LTO_symtab_last_tag, LTO_symtab_edge);
1137 streamer_write_hwi_stream (ob->main_stream, val);
1140 streamer_write_uhwi_stream (ob->main_stream, 0);
1141 lto_destroy_simple_output_block (ob);
1143 /* In WHOPR mode during the WPA stage the joint offload tables need to be
1144 streamed to one partition only. That's why we free offload_funcs and
1145 offload_vars after the first call of output_offload_tables. */
1146 if (flag_wpa)
1148 vec_free (offload_funcs);
1149 vec_free (offload_vars);
1150 vec_free (offload_ind_funcs);
1154 /* Verify the partitioning of NODE. */
1156 static inline void
1157 verify_node_partition (symtab_node *node)
1159 if (flag_ltrans)
1160 return;
1162 #ifdef ACCEL_COMPILER
1163 if (node->in_other_partition)
1165 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1167 if (lookup_attribute ("omp target device_ancestor_host",
1168 DECL_ATTRIBUTES (node->decl)) != NULL)
1169 return;
1170 error_at (DECL_SOURCE_LOCATION (node->decl),
1171 "function %qs has been referenced in offloaded code but"
1172 " hasn%'t been marked to be included in the offloaded code",
1173 node->name ());
1175 else if (VAR_P (node->decl))
1176 error_at (DECL_SOURCE_LOCATION (node->decl),
1177 "variable %qs has been referenced in offloaded code but"
1178 " hasn%'t been marked to be included in the offloaded code",
1179 node->name ());
1180 else
1181 gcc_unreachable ();
1183 #else
1184 gcc_assert (!node->in_other_partition
1185 && !node->used_from_other_partition);
1186 #endif
1189 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1190 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
1191 NODE or to replace the values in it, for instance because the first
1192 time we saw it, the function body was not available but now it
1193 is. BP is a bitpack with all the bitflags for NODE read from the
1194 stream. Initialize HAS_THUNK_INFO to indicate if thunk info should
1195 be streamed in. */
1197 static void
1198 input_overwrite_node (struct lto_file_decl_data *file_data,
1199 struct cgraph_node *node,
1200 enum LTO_symtab_tags tag,
1201 struct bitpack_d *bp, bool *has_thunk_info)
1203 node->aux = (void *) tag;
1204 node->lto_file_data = file_data;
1206 node->local = bp_unpack_value (bp, 1);
1207 node->externally_visible = bp_unpack_value (bp, 1);
1208 node->no_reorder = bp_unpack_value (bp, 1);
1209 node->definition = bp_unpack_value (bp, 1);
1210 node->versionable = bp_unpack_value (bp, 1);
1211 node->can_change_signature = bp_unpack_value (bp, 1);
1212 node->redefined_extern_inline = bp_unpack_value (bp, 1);
1213 node->force_output = bp_unpack_value (bp, 1);
1214 node->forced_by_abi = bp_unpack_value (bp, 1);
1215 node->unique_name = bp_unpack_value (bp, 1);
1216 node->body_removed = bp_unpack_value (bp, 1);
1217 node->semantic_interposition = bp_unpack_value (bp, 1);
1218 node->implicit_section = bp_unpack_value (bp, 1);
1219 node->address_taken = bp_unpack_value (bp, 1);
1220 node->used_from_other_partition = bp_unpack_value (bp, 1);
1221 node->lowered = bp_unpack_value (bp, 1);
1222 node->analyzed = tag == LTO_symtab_analyzed_node;
1223 node->in_other_partition = bp_unpack_value (bp, 1);
1224 if (node->in_other_partition
1225 /* Avoid updating decl when we are seeing just inline clone.
1226 When inlining function that has functions already inlined into it,
1227 we produce clones of inline clones.
1229 WPA partitioning might put each clone into different unit and
1230 we might end up streaming inline clone from other partition
1231 to support clone we are interested in. */
1232 && (!node->clone_of
1233 || node->clone_of->decl != node->decl))
1235 DECL_EXTERNAL (node->decl) = 1;
1236 TREE_STATIC (node->decl) = 0;
1238 node->alias = bp_unpack_value (bp, 1);
1239 node->transparent_alias = bp_unpack_value (bp, 1);
1240 node->weakref = bp_unpack_value (bp, 1);
1241 node->symver = bp_unpack_value (bp, 1);
1242 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1243 node->only_called_at_startup = bp_unpack_value (bp, 1);
1244 node->only_called_at_exit = bp_unpack_value (bp, 1);
1245 node->tm_clone = bp_unpack_value (bp, 1);
1246 node->calls_comdat_local = bp_unpack_value (bp, 1);
1247 node->icf_merged = bp_unpack_value (bp, 1);
1248 node->nonfreeing_fn = bp_unpack_value (bp, 1);
1249 node->merged_comdat = bp_unpack_value (bp, 1);
1250 node->merged_extern_inline = bp_unpack_value (bp, 1);
1251 node->thunk = bp_unpack_value (bp, 1);
1252 node->parallelized_function = bp_unpack_value (bp, 1);
1253 node->declare_variant_alt = bp_unpack_value (bp, 1);
1254 node->calls_declare_variant_alt = bp_unpack_value (bp, 1);
1255 *has_thunk_info = bp_unpack_value (bp, 1);
1256 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1257 LDPR_NUM_KNOWN);
1258 node->split_part = bp_unpack_value (bp, 1);
1259 verify_node_partition (node);
1262 /* Return string alias is alias of. */
1264 static tree
1265 get_alias_symbol (tree decl)
1267 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1268 return get_identifier (TREE_STRING_POINTER
1269 (TREE_VALUE (TREE_VALUE (alias))));
1272 /* Read a node from input_block IB. TAG is the node's tag just read.
1273 Return the node read or overwriten. */
1275 static struct cgraph_node *
1276 input_node (struct lto_file_decl_data *file_data,
1277 class lto_input_block *ib,
1278 enum LTO_symtab_tags tag,
1279 vec<symtab_node *> nodes)
1281 gcc::pass_manager *passes = g->get_passes ();
1282 tree fn_decl;
1283 struct cgraph_node *node;
1284 struct bitpack_d bp;
1285 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1286 int clone_ref;
1287 int order;
1288 int i, count;
1289 tree group;
1290 const char *section;
1291 order = streamer_read_hwi (ib) + file_data->order_base;
1292 clone_ref = streamer_read_hwi (ib);
1293 bool has_thunk_info;
1295 fn_decl = lto_input_fn_decl_ref (ib, file_data);
1297 if (clone_ref != LCC_NOT_FOUND)
1299 node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1300 profile_count::uninitialized (), false,
1301 vNULL, false, NULL, NULL);
1303 else
1305 /* Declaration of functions can be already merged with a declaration
1306 from other input file. We keep cgraph unmerged until after streaming
1307 of ipa passes is done. Alays forcingly create a fresh node. */
1308 node = symtab->create_empty ();
1309 node->decl = fn_decl;
1310 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (fn_decl)))
1311 node->ifunc_resolver = 1;
1312 node->register_symbol ();
1315 node->order = order;
1316 if (order >= symtab->order)
1317 symtab->order = order + 1;
1319 node->count = profile_count::stream_in (ib);
1320 node->count_materialization_scale = streamer_read_hwi (ib);
1322 count = streamer_read_hwi (ib);
1323 node->ipa_transforms_to_apply = vNULL;
1324 for (i = 0; i < count; i++)
1326 opt_pass *pass;
1327 int pid = streamer_read_hwi (ib);
1329 gcc_assert (pid < passes->passes_by_id_size);
1330 pass = passes->passes_by_id[pid];
1331 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1334 if (tag == LTO_symtab_analyzed_node)
1335 ref = streamer_read_hwi (ib);
1337 group = read_identifier (ib);
1338 if (group)
1339 ref2 = streamer_read_hwi (ib);
1341 /* Make sure that we have not read this node before. Nodes that
1342 have already been read will have their tag stored in the 'aux'
1343 field. Since built-in functions can be referenced in multiple
1344 functions, they are expected to be read more than once. */
1345 if (node->aux && !fndecl_built_in_p (node->decl))
1346 internal_error ("bytecode stream: found multiple instances of cgraph "
1347 "node with uid %d", node->get_uid ());
1349 node->tp_first_run = streamer_read_uhwi (ib);
1351 bp = streamer_read_bitpack (ib);
1353 input_overwrite_node (file_data, node, tag, &bp, &has_thunk_info);
1355 /* Store a reference for now, and fix up later to be a pointer. */
1356 node->inlined_to = (cgraph_node *) (intptr_t) ref;
1358 if (group)
1360 node->set_comdat_group (group);
1361 /* Store a reference for now, and fix up later to be a pointer. */
1362 node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1364 else
1365 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1366 section = read_string (ib);
1367 if (section)
1368 node->set_section_for_node (section);
1370 if (node->alias && !node->analyzed && node->weakref)
1371 node->alias_target = get_alias_symbol (node->decl);
1372 node->profile_id = streamer_read_hwi (ib);
1373 node->unit_id = streamer_read_hwi (ib) + file_data->unit_base;
1374 if (symtab->max_unit < node->unit_id)
1375 symtab->max_unit = node->unit_id;
1376 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1377 node->set_init_priority (streamer_read_hwi (ib));
1378 if (DECL_STATIC_DESTRUCTOR (node->decl))
1379 node->set_fini_priority (streamer_read_hwi (ib));
1381 if (has_thunk_info)
1382 thunk_info::get_create (node)->stream_in (ib);
1384 return node;
1387 /* Read a node from input_block IB. TAG is the node's tag just read.
1388 Return the node read or overwriten. */
1390 static varpool_node *
1391 input_varpool_node (struct lto_file_decl_data *file_data,
1392 class lto_input_block *ib)
1394 tree var_decl;
1395 varpool_node *node;
1396 struct bitpack_d bp;
1397 int ref = LCC_NOT_FOUND;
1398 int order;
1399 tree group;
1400 const char *section;
1402 order = streamer_read_hwi (ib) + file_data->order_base;
1403 var_decl = lto_input_var_decl_ref (ib, file_data);
1405 /* Declaration of functions can be already merged with a declaration
1406 from other input file. We keep cgraph unmerged until after streaming
1407 of ipa passes is done. Alays forcingly create a fresh node. */
1408 node = varpool_node::create_empty ();
1409 node->decl = var_decl;
1410 node->register_symbol ();
1412 node->order = order;
1413 if (order >= symtab->order)
1414 symtab->order = order + 1;
1415 node->lto_file_data = file_data;
1417 bp = streamer_read_bitpack (ib);
1418 node->externally_visible = bp_unpack_value (&bp, 1);
1419 node->no_reorder = bp_unpack_value (&bp, 1);
1420 node->force_output = bp_unpack_value (&bp, 1);
1421 node->forced_by_abi = bp_unpack_value (&bp, 1);
1422 node->unique_name = bp_unpack_value (&bp, 1);
1423 node->body_removed = bp_unpack_value (&bp, 1);
1424 node->semantic_interposition = bp_unpack_value (&bp, 1);
1425 node->implicit_section = bp_unpack_value (&bp, 1);
1426 node->writeonly = bp_unpack_value (&bp, 1);
1427 node->definition = bp_unpack_value (&bp, 1);
1428 node->alias = bp_unpack_value (&bp, 1);
1429 node->transparent_alias = bp_unpack_value (&bp, 1);
1430 node->weakref = bp_unpack_value (&bp, 1);
1431 node->symver = bp_unpack_value (&bp, 1);
1432 node->analyzed = bp_unpack_value (&bp, 1);
1433 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1434 node->in_other_partition = bp_unpack_value (&bp, 1);
1435 if (node->in_other_partition)
1437 DECL_EXTERNAL (node->decl) = 1;
1438 TREE_STATIC (node->decl) = 0;
1440 if (node->alias && !node->analyzed && node->weakref)
1441 node->alias_target = get_alias_symbol (node->decl);
1442 node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1443 node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1444 node->dynamically_initialized = bp_unpack_value (&bp, 1);
1445 group = read_identifier (ib);
1446 if (group)
1448 node->set_comdat_group (group);
1449 ref = streamer_read_hwi (ib);
1450 /* Store a reference for now, and fix up later to be a pointer. */
1451 node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1453 else
1454 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1455 section = read_string (ib);
1456 if (section)
1457 node->set_section_for_node (section);
1458 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1459 LDPR_NUM_KNOWN);
1460 verify_node_partition (node);
1461 return node;
1464 /* Read a node from input_block IB. TAG is the node's tag just read.
1465 Return the node read or overwriten. */
1467 static void
1468 input_ref (class lto_input_block *ib,
1469 symtab_node *referring_node,
1470 vec<symtab_node *> nodes)
1472 symtab_node *node = NULL;
1473 struct bitpack_d bp;
1474 enum ipa_ref_use use;
1475 bool speculative;
1476 struct ipa_ref *ref;
1478 bp = streamer_read_bitpack (ib);
1479 use = (enum ipa_ref_use) bp_unpack_value (&bp, 3);
1480 speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1481 node = nodes[streamer_read_hwi (ib)];
1482 ref = referring_node->create_reference (node, use);
1483 ref->speculative = speculative;
1484 if (is_a <cgraph_node *> (referring_node))
1486 ref->lto_stmt_uid = streamer_read_hwi (ib);
1487 bp = streamer_read_bitpack (ib);
1488 ref->speculative_id = bp_unpack_value (&bp, 16);
1492 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1493 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1494 edge being read is indirect (in the sense that it has
1495 indirect_unknown_callee set). */
1497 static void
1498 input_edge (class lto_input_block *ib, vec<symtab_node *> nodes,
1499 bool indirect)
1501 struct cgraph_node *caller, *callee;
1502 struct cgraph_edge *edge;
1503 unsigned int stmt_id, speculative_id;
1504 profile_count count;
1505 cgraph_inline_failed_t inline_failed;
1506 struct bitpack_d bp;
1507 int ecf_flags = 0;
1509 caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1510 if (caller == NULL || caller->decl == NULL_TREE)
1511 internal_error ("bytecode stream: no caller found while reading edge");
1513 if (!indirect)
1515 callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1516 if (callee == NULL || callee->decl == NULL_TREE)
1517 internal_error ("bytecode stream: no callee found while reading edge");
1519 else
1520 callee = NULL;
1522 count = profile_count::stream_in (ib);
1524 bp = streamer_read_bitpack (ib);
1525 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1526 stmt_id = bp_unpack_var_len_unsigned (&bp);
1527 speculative_id = bp_unpack_value (&bp, 16);
1529 if (indirect)
1530 edge = caller->create_indirect_edge (NULL, 0, count);
1531 else
1532 edge = caller->create_edge (callee, NULL, count);
1534 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1535 edge->speculative = bp_unpack_value (&bp, 1);
1536 edge->lto_stmt_uid = stmt_id;
1537 edge->speculative_id = speculative_id;
1538 edge->inline_failed = inline_failed;
1539 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1540 edge->can_throw_external = bp_unpack_value (&bp, 1);
1541 edge->in_polymorphic_cdtor = bp_unpack_value (&bp, 1);
1542 if (indirect)
1544 if (bp_unpack_value (&bp, 1))
1545 ecf_flags |= ECF_CONST;
1546 if (bp_unpack_value (&bp, 1))
1547 ecf_flags |= ECF_PURE;
1548 if (bp_unpack_value (&bp, 1))
1549 ecf_flags |= ECF_NORETURN;
1550 if (bp_unpack_value (&bp, 1))
1551 ecf_flags |= ECF_MALLOC;
1552 if (bp_unpack_value (&bp, 1))
1553 ecf_flags |= ECF_NOTHROW;
1554 if (bp_unpack_value (&bp, 1))
1555 ecf_flags |= ECF_RETURNS_TWICE;
1556 edge->indirect_info->ecf_flags = ecf_flags;
1558 edge->indirect_info->num_speculative_call_targets
1559 = bp_unpack_value (&bp, 16);
1564 /* Read a cgraph from IB using the info in FILE_DATA. */
1566 static vec<symtab_node *>
1567 input_cgraph_1 (struct lto_file_decl_data *file_data,
1568 class lto_input_block *ib)
1570 enum LTO_symtab_tags tag;
1571 vec<symtab_node *> nodes = vNULL;
1572 symtab_node *node;
1573 unsigned i;
1575 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1576 file_data->order_base = symtab->order;
1577 file_data->unit_base = symtab->max_unit + 1;
1578 while (tag)
1580 if (tag == LTO_symtab_edge)
1581 input_edge (ib, nodes, false);
1582 else if (tag == LTO_symtab_indirect_edge)
1583 input_edge (ib, nodes, true);
1584 else if (tag == LTO_symtab_variable)
1586 node = input_varpool_node (file_data, ib);
1587 nodes.safe_push (node);
1588 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1590 else
1592 node = input_node (file_data, ib, tag, nodes);
1593 if (node == NULL || node->decl == NULL_TREE)
1594 internal_error ("bytecode stream: found empty cgraph node");
1595 nodes.safe_push (node);
1596 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1599 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1602 lto_input_toplevel_asms (file_data, file_data->order_base);
1604 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1605 if (flag_checking)
1607 FOR_EACH_VEC_ELT (nodes, i, node)
1608 gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1610 FOR_EACH_VEC_ELT (nodes, i, node)
1612 int ref;
1613 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1615 ref = (int) (intptr_t) cnode->inlined_to;
1617 /* We share declaration of builtins, so we may read same node twice. */
1618 if (!node->aux)
1619 continue;
1620 node->aux = NULL;
1622 /* Fixup inlined_to from reference to pointer. */
1623 if (ref != LCC_NOT_FOUND)
1624 dyn_cast<cgraph_node *> (node)->inlined_to
1625 = dyn_cast<cgraph_node *> (nodes[ref]);
1626 else
1627 cnode->inlined_to = NULL;
1630 ref = (int) (intptr_t) node->same_comdat_group;
1632 /* Fixup same_comdat_group from reference to pointer. */
1633 if (ref != LCC_NOT_FOUND)
1634 node->same_comdat_group = nodes[ref];
1635 else
1636 node->same_comdat_group = NULL;
1638 FOR_EACH_VEC_ELT (nodes, i, node)
1639 node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1640 return nodes;
1643 /* Input ipa_refs. */
1645 static void
1646 input_refs (class lto_input_block *ib,
1647 vec<symtab_node *> nodes)
1649 int count;
1650 int idx;
1651 while (true)
1653 symtab_node *node;
1654 count = streamer_read_uhwi (ib);
1655 if (!count)
1656 break;
1657 idx = streamer_read_uhwi (ib);
1658 node = nodes[idx];
1659 while (count)
1661 input_ref (ib, node, nodes);
1662 count--;
1664 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1665 if (cnode->declare_variant_alt)
1666 omp_lto_input_declare_variant_alt (ib, cnode, nodes);
1670 /* Input profile_info from IB. */
1671 static void
1672 input_profile_summary (class lto_input_block *ib,
1673 struct lto_file_decl_data *file_data)
1675 unsigned int runs = streamer_read_uhwi (ib);
1676 if (runs)
1678 file_data->profile_info.runs = runs;
1680 /* IPA-profile computes hot bb threshold based on cumulated
1681 whole program profile. We need to stream it down to ltrans. */
1682 if (flag_ltrans)
1683 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1688 /* Rescale profile summaries to the same number of runs in the whole unit. */
1690 static void
1691 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1693 struct lto_file_decl_data *file_data;
1694 unsigned int j;
1695 gcov_unsigned_t max_runs = 0;
1696 struct cgraph_node *node;
1697 struct cgraph_edge *edge;
1699 /* Find unit with maximal number of runs. If we ever get serious about
1700 roundoff errors, we might also consider computing smallest common
1701 multiply. */
1702 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1703 if (max_runs < file_data->profile_info.runs)
1704 max_runs = file_data->profile_info.runs;
1706 if (!max_runs)
1707 return;
1709 /* Simple overflow check. We probably don't need to support that many train
1710 runs. Such a large value probably imply data corruption anyway. */
1711 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1713 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1714 INT_MAX / REG_BR_PROB_BASE);
1715 return;
1718 profile_info = XCNEW (gcov_summary);
1719 profile_info->runs = max_runs;
1721 /* If merging already happent at WPA time, we are done. */
1722 if (flag_ltrans)
1723 return;
1725 /* Now compute count_materialization_scale of each node.
1726 During LTRANS we already have values of count_materialization_scale
1727 computed, so just update them. */
1728 FOR_EACH_FUNCTION (node)
1729 if (node->lto_file_data
1730 && node->lto_file_data->profile_info.runs)
1732 int scale;
1734 scale = RDIV (node->count_materialization_scale * max_runs,
1735 node->lto_file_data->profile_info.runs);
1736 node->count_materialization_scale = scale;
1737 if (scale < 0)
1738 fatal_error (input_location, "Profile information in %s corrupted",
1739 file_data->file_name);
1741 if (scale == REG_BR_PROB_BASE)
1742 continue;
1743 for (edge = node->callees; edge; edge = edge->next_callee)
1744 if (edge->count.ipa ().nonzero_p ())
1745 edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1746 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1747 if (edge->count.ipa ().nonzero_p ())
1748 edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1749 if (node->count.ipa ().nonzero_p ())
1750 node->count = node->count.apply_scale (scale, REG_BR_PROB_BASE);
1754 /* Input and merge the symtab from each of the .o files passed to
1755 lto1. */
1757 void
1758 input_symtab (void)
1760 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1761 struct lto_file_decl_data *file_data;
1762 unsigned int j = 0;
1763 struct cgraph_node *node;
1765 while ((file_data = file_data_vec[j++]))
1767 const char *data;
1768 size_t len;
1769 class lto_input_block *ib;
1770 vec<symtab_node *> nodes;
1772 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1773 &data, &len);
1774 if (!ib)
1775 fatal_error (input_location,
1776 "cannot find LTO cgraph in %s", file_data->file_name);
1777 input_profile_summary (ib, file_data);
1778 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1779 nodes = input_cgraph_1 (file_data, ib);
1780 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1781 ib, data, len);
1783 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1784 &data, &len);
1785 if (!ib)
1786 fatal_error (input_location, "cannot find LTO section refs in %s",
1787 file_data->file_name);
1788 input_refs (ib, nodes);
1789 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1790 ib, data, len);
1791 if (flag_ltrans)
1792 input_cgraph_opt_summary (nodes);
1793 nodes.release ();
1796 merge_profile_summaries (file_data_vec);
1798 /* Clear out the aux field that was used to store enough state to
1799 tell which nodes should be overwritten. */
1800 FOR_EACH_FUNCTION (node)
1802 /* Some nodes may have been created by cgraph_node. This
1803 happens when the callgraph contains nested functions. If the
1804 node for the parent function was never emitted to the gimple
1805 file, cgraph_node will create a node for it when setting the
1806 context of the nested function. */
1807 if (node->lto_file_data)
1808 node->aux = NULL;
1812 static void
1813 omp_requires_to_name (char *buf, size_t size, HOST_WIDE_INT requires_mask)
1815 char *end = buf + size, *p = buf;
1816 if (requires_mask & GOMP_REQUIRES_UNIFIED_ADDRESS)
1817 p += snprintf (p, end - p, "unified_address");
1818 if (requires_mask & GOMP_REQUIRES_UNIFIED_SHARED_MEMORY)
1819 p += snprintf (p, end - p, "%sunified_shared_memory",
1820 (p == buf ? "" : ", "));
1821 if (requires_mask & GOMP_REQUIRES_REVERSE_OFFLOAD)
1822 p += snprintf (p, end - p, "%sreverse_offload",
1823 (p == buf ? "" : ", "));
1826 /* Input function/variable tables that will allow libgomp to look up offload
1827 target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS. */
1829 void
1830 input_offload_tables (bool do_force_output)
1832 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1833 struct lto_file_decl_data *file_data;
1834 unsigned int j = 0;
1835 const char *requires_fn = NULL;
1836 tree requires_decl = NULL_TREE;
1838 omp_requires_mask = (omp_requires) 0;
1840 while ((file_data = file_data_vec[j++]))
1842 const char *data;
1843 size_t len;
1844 class lto_input_block *ib
1845 = lto_create_simple_input_block (file_data, LTO_section_offload_table,
1846 &data, &len);
1847 if (!ib)
1848 continue;
1850 tree tmp_decl = NULL_TREE;
1851 enum LTO_symtab_tags tag
1852 = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1853 while (tag)
1855 if (tag == LTO_symtab_unavail_node)
1857 tree fn_decl
1858 = lto_input_fn_decl_ref (ib, file_data);
1859 vec_safe_push (offload_funcs, fn_decl);
1861 /* Prevent IPA from removing fn_decl as unreachable, since there
1862 may be no refs from the parent function to child_fn in offload
1863 LTO mode. */
1864 if (do_force_output)
1865 cgraph_node::get (fn_decl)->mark_force_output ();
1866 tmp_decl = fn_decl;
1868 else if (tag == LTO_symtab_variable)
1870 tree var_decl
1871 = lto_input_var_decl_ref (ib, file_data);
1872 vec_safe_push (offload_vars, var_decl);
1874 /* Prevent IPA from removing var_decl as unused, since there
1875 may be no refs to var_decl in offload LTO mode. */
1876 if (do_force_output)
1877 varpool_node::get (var_decl)->force_output = 1;
1878 tmp_decl = var_decl;
1880 else if (tag == LTO_symtab_indirect_function)
1882 tree fn_decl
1883 = lto_input_fn_decl_ref (ib, file_data);
1884 vec_safe_push (offload_ind_funcs, fn_decl);
1886 /* Prevent IPA from removing fn_decl as unreachable, since there
1887 may be no refs from the parent function to child_fn in offload
1888 LTO mode. */
1889 if (do_force_output)
1890 cgraph_node::get (fn_decl)->mark_force_output ();
1891 tmp_decl = fn_decl;
1893 else if (tag == LTO_symtab_edge)
1895 static bool error_emitted = false;
1896 HOST_WIDE_INT val = streamer_read_hwi (ib);
1898 if (omp_requires_mask == 0)
1900 omp_requires_mask = (omp_requires) val;
1901 requires_decl = tmp_decl;
1902 requires_fn = file_data->file_name;
1904 else if (omp_requires_mask != val && !error_emitted)
1906 const char *fn1 = requires_fn;
1907 if (requires_decl != NULL_TREE)
1909 while (DECL_CONTEXT (requires_decl) != NULL_TREE
1910 && TREE_CODE (requires_decl) != TRANSLATION_UNIT_DECL)
1911 requires_decl = DECL_CONTEXT (requires_decl);
1912 if (requires_decl != NULL_TREE)
1913 fn1 = IDENTIFIER_POINTER (DECL_NAME (requires_decl));
1916 const char *fn2 = file_data->file_name;
1917 if (tmp_decl != NULL_TREE)
1919 while (DECL_CONTEXT (tmp_decl) != NULL_TREE
1920 && TREE_CODE (tmp_decl) != TRANSLATION_UNIT_DECL)
1921 tmp_decl = DECL_CONTEXT (tmp_decl);
1922 if (tmp_decl != NULL_TREE)
1923 fn2 = IDENTIFIER_POINTER (DECL_NAME (tmp_decl));
1925 if (fn1 == fn2)
1927 fn1 = requires_fn;
1928 fn2 = file_data->file_name;
1931 char buf1[sizeof ("unified_address, unified_shared_memory, "
1932 "reverse_offload")];
1933 char buf2[sizeof ("unified_address, unified_shared_memory, "
1934 "reverse_offload")];
1935 omp_requires_to_name (buf2, sizeof (buf2),
1936 val != OMP_REQUIRES_TARGET_USED
1937 ? val
1938 : (HOST_WIDE_INT) omp_requires_mask);
1939 if (val != OMP_REQUIRES_TARGET_USED
1940 && omp_requires_mask != OMP_REQUIRES_TARGET_USED)
1942 omp_requires_to_name (buf1, sizeof (buf1),
1943 omp_requires_mask);
1944 error ("OpenMP %<requires%> directive with non-identical "
1945 "clauses in multiple compilation units: %qs vs. "
1946 "%qs", buf1, buf2);
1947 inform (UNKNOWN_LOCATION, "%qs has %qs", fn1, buf1);
1948 inform (UNKNOWN_LOCATION, "%qs has %qs", fn2, buf2);
1950 else
1952 error ("OpenMP %<requires%> directive with %qs specified "
1953 "only in some compilation units", buf2);
1954 inform (UNKNOWN_LOCATION, "%qs has %qs",
1955 val != OMP_REQUIRES_TARGET_USED ? fn2 : fn1,
1956 buf2);
1957 inform (UNKNOWN_LOCATION, "but %qs has not",
1958 val != OMP_REQUIRES_TARGET_USED ? fn1 : fn2);
1960 error_emitted = true;
1963 else
1964 fatal_error (input_location,
1965 "invalid offload table in %s", file_data->file_name);
1967 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1970 lto_destroy_simple_input_block (file_data, LTO_section_offload_table,
1971 ib, data, len);
1973 #ifdef ACCEL_COMPILER
1974 char *omp_requires_file = getenv ("GCC_OFFLOAD_OMP_REQUIRES_FILE");
1975 if (omp_requires_file == NULL || omp_requires_file[0] == '\0')
1976 fatal_error (input_location, "GCC_OFFLOAD_OMP_REQUIRES_FILE unset");
1977 FILE *f = fopen (omp_requires_file, "wb");
1978 if (!f)
1979 fatal_error (input_location, "Cannot open omp_requires file %qs",
1980 omp_requires_file);
1981 uint32_t req_mask = omp_requires_mask;
1982 fwrite (&req_mask, sizeof (req_mask), 1, f);
1983 fclose (f);
1984 #endif
1987 /* True when we need optimization summary for NODE. */
1989 static int
1990 output_cgraph_opt_summary_p (struct cgraph_node *node)
1992 if (node->clone_of || node->former_clone_of)
1993 return true;
1994 clone_info *info = clone_info::get (node);
1995 return info && (info->tree_map || info->param_adjustments);
1998 /* Output optimization summary for EDGE to OB. */
1999 static void
2000 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
2001 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
2005 /* Output optimization summary for NODE to OB. */
2007 static void
2008 output_node_opt_summary (struct output_block *ob,
2009 struct cgraph_node *node,
2010 lto_symtab_encoder_t encoder)
2012 struct ipa_replace_map *map;
2013 int i;
2014 struct cgraph_edge *e;
2016 /* TODO: Should this code be moved to ipa-param-manipulation? */
2017 struct bitpack_d bp;
2018 bp = bitpack_create (ob->main_stream);
2019 clone_info *info = clone_info::get (node);
2021 bp_pack_value (&bp, (info && info->param_adjustments != NULL), 1);
2022 streamer_write_bitpack (&bp);
2023 if (ipa_param_adjustments *adjustments
2024 = info ? info->param_adjustments : NULL)
2026 streamer_write_uhwi (ob, vec_safe_length (adjustments->m_adj_params));
2027 ipa_adjusted_param *adj;
2028 FOR_EACH_VEC_SAFE_ELT (adjustments->m_adj_params, i, adj)
2030 bp = bitpack_create (ob->main_stream);
2031 bp_pack_value (&bp, adj->base_index, IPA_PARAM_MAX_INDEX_BITS);
2032 bp_pack_value (&bp, adj->prev_clone_index, IPA_PARAM_MAX_INDEX_BITS);
2033 bp_pack_value (&bp, adj->op, 2);
2034 bp_pack_value (&bp, adj->param_prefix_index, 2);
2035 bp_pack_value (&bp, adj->prev_clone_adjustment, 1);
2036 bp_pack_value (&bp, adj->reverse, 1);
2037 bp_pack_value (&bp, adj->user_flag, 1);
2038 streamer_write_bitpack (&bp);
2039 if (adj->op == IPA_PARAM_OP_SPLIT
2040 || adj->op == IPA_PARAM_OP_NEW)
2042 stream_write_tree (ob, adj->type, true);
2043 if (adj->op == IPA_PARAM_OP_SPLIT)
2045 stream_write_tree (ob, adj->alias_ptr_type, true);
2046 streamer_write_uhwi (ob, adj->unit_offset);
2050 streamer_write_hwi (ob, adjustments->m_always_copy_start);
2051 bp = bitpack_create (ob->main_stream);
2052 bp_pack_value (&bp, info->param_adjustments->m_skip_return, 1);
2053 streamer_write_bitpack (&bp);
2056 streamer_write_uhwi (ob, info ? vec_safe_length (info->tree_map) : 0);
2057 if (info)
2058 FOR_EACH_VEC_SAFE_ELT (info->tree_map, i, map)
2060 streamer_write_uhwi (ob, map->parm_num);
2061 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
2062 stream_write_tree (ob, map->new_tree, true);
2065 if (lto_symtab_encoder_in_partition_p (encoder, node))
2067 for (e = node->callees; e; e = e->next_callee)
2068 output_edge_opt_summary (ob, e);
2069 for (e = node->indirect_calls; e; e = e->next_callee)
2070 output_edge_opt_summary (ob, e);
2074 /* Output optimization summaries stored in callgraph.
2075 At the moment it is the clone info structure. */
2077 static void
2078 output_cgraph_opt_summary (void)
2080 int i, n_nodes;
2081 lto_symtab_encoder_t encoder;
2082 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
2083 unsigned count = 0;
2085 ob->symbol = NULL;
2086 encoder = ob->decl_state->symtab_node_encoder;
2087 n_nodes = lto_symtab_encoder_size (encoder);
2088 for (i = 0; i < n_nodes; i++)
2090 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
2091 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2092 if (cnode && output_cgraph_opt_summary_p (cnode))
2093 count++;
2095 streamer_write_uhwi (ob, count);
2096 for (i = 0; i < n_nodes; i++)
2098 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
2099 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2100 if (cnode && output_cgraph_opt_summary_p (cnode))
2102 streamer_write_uhwi (ob, i);
2103 output_node_opt_summary (ob, cnode, encoder);
2106 produce_asm (ob, NULL);
2107 destroy_output_block (ob);
2110 /* Input optimisation summary of EDGE. */
2112 static void
2113 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
2114 class lto_input_block *ib_main ATTRIBUTE_UNUSED)
2118 /* Input optimisation summary of NODE. */
2120 static void
2121 input_node_opt_summary (struct cgraph_node *node,
2122 class lto_input_block *ib_main,
2123 class data_in *data_in)
2125 int i;
2126 int count;
2127 struct cgraph_edge *e;
2129 /* TODO: Should this code be moved to ipa-param-manipulation? */
2130 struct bitpack_d bp;
2131 bp = streamer_read_bitpack (ib_main);
2132 bool have_adjustments = bp_unpack_value (&bp, 1);
2133 clone_info *info = clone_info::get_create (node);
2135 if (have_adjustments)
2137 count = streamer_read_uhwi (ib_main);
2138 vec<ipa_adjusted_param, va_gc> *new_params = NULL;
2139 for (i = 0; i < count; i++)
2141 ipa_adjusted_param adj;
2142 memset (&adj, 0, sizeof (adj));
2143 bp = streamer_read_bitpack (ib_main);
2144 adj.base_index = bp_unpack_value (&bp, IPA_PARAM_MAX_INDEX_BITS);
2145 adj.prev_clone_index
2146 = bp_unpack_value (&bp, IPA_PARAM_MAX_INDEX_BITS);
2147 adj.op = (enum ipa_parm_op) bp_unpack_value (&bp, 2);
2148 adj.param_prefix_index = bp_unpack_value (&bp, 2);
2149 adj.prev_clone_adjustment = bp_unpack_value (&bp, 1);
2150 adj.reverse = bp_unpack_value (&bp, 1);
2151 adj.user_flag = bp_unpack_value (&bp, 1);
2152 if (adj.op == IPA_PARAM_OP_SPLIT
2153 || adj.op == IPA_PARAM_OP_NEW)
2155 adj.type = stream_read_tree (ib_main, data_in);
2156 if (adj.op == IPA_PARAM_OP_SPLIT)
2158 adj.alias_ptr_type = stream_read_tree (ib_main, data_in);
2159 adj.unit_offset = streamer_read_uhwi (ib_main);
2162 vec_safe_push (new_params, adj);
2164 int always_copy_start = streamer_read_hwi (ib_main);
2165 bp = streamer_read_bitpack (ib_main);
2166 bool skip_return = bp_unpack_value (&bp, 1);
2167 info->param_adjustments
2168 = (new (ggc_alloc <ipa_param_adjustments> ())
2169 ipa_param_adjustments (new_params, always_copy_start, skip_return));
2172 count = streamer_read_uhwi (ib_main);
2173 for (i = 0; i < count; i++)
2175 struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
2177 vec_safe_push (info->tree_map, map);
2178 map->parm_num = streamer_read_uhwi (ib_main);
2179 map->new_tree = stream_read_tree (ib_main, data_in);
2181 for (e = node->callees; e; e = e->next_callee)
2182 input_edge_opt_summary (e, ib_main);
2183 for (e = node->indirect_calls; e; e = e->next_callee)
2184 input_edge_opt_summary (e, ib_main);
2187 /* Read section in file FILE_DATA of length LEN with data DATA. */
2189 static void
2190 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
2191 const char *data, size_t len,
2192 vec<symtab_node *> nodes)
2194 const struct lto_function_header *header =
2195 (const struct lto_function_header *) data;
2196 const int cfg_offset = sizeof (struct lto_function_header);
2197 const int main_offset = cfg_offset + header->cfg_size;
2198 const int string_offset = main_offset + header->main_size;
2199 class data_in *data_in;
2200 unsigned int i;
2201 unsigned int count;
2203 lto_input_block ib_main ((const char *) data + main_offset,
2204 header->main_size, file_data);
2206 data_in =
2207 lto_data_in_create (file_data, (const char *) data + string_offset,
2208 header->string_size, vNULL);
2209 count = streamer_read_uhwi (&ib_main);
2211 for (i = 0; i < count; i++)
2213 int ref = streamer_read_uhwi (&ib_main);
2214 input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
2215 &ib_main, data_in);
2217 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
2218 len);
2219 lto_data_in_delete (data_in);
2222 /* Input optimization summary of cgraph. */
2224 static void
2225 input_cgraph_opt_summary (vec<symtab_node *> nodes)
2227 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2228 struct lto_file_decl_data *file_data;
2229 unsigned int j = 0;
2231 while ((file_data = file_data_vec[j++]))
2233 size_t len;
2234 const char *data
2235 = lto_get_summary_section_data (file_data, LTO_section_cgraph_opt_sum,
2236 &len);
2237 if (data)
2238 input_cgraph_opt_section (file_data, data, len, nodes);