2013-06-05 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / lto / lto.c
blobc90a2d8496eb301a80a4da75d1a46ea78452ce3e
1 /* Top-level LTO routines.
2 Copyright (C) 2009-2013 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "opts.h"
25 #include "toplev.h"
26 #include "tree.h"
27 #include "tree-flow.h"
28 #include "diagnostic-core.h"
29 #include "tm.h"
30 #include "cgraph.h"
31 #include "ggc.h"
32 #include "tree-ssa-operands.h"
33 #include "tree-pass.h"
34 #include "langhooks.h"
35 #include "vec.h"
36 #include "bitmap.h"
37 #include "pointer-set.h"
38 #include "ipa-prop.h"
39 #include "common.h"
40 #include "debug.h"
41 #include "gimple.h"
42 #include "lto.h"
43 #include "lto-tree.h"
44 #include "lto-streamer.h"
45 #include "tree-streamer.h"
46 #include "splay-tree.h"
47 #include "lto-partition.h"
49 static GTY(()) tree first_personality_decl;
51 /* Returns a hash code for P. */
53 static hashval_t
54 hash_name (const void *p)
56 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
57 return (hashval_t) htab_hash_string (ds->name);
61 /* Returns nonzero if P1 and P2 are equal. */
63 static int
64 eq_name (const void *p1, const void *p2)
66 const struct lto_section_slot *s1 =
67 (const struct lto_section_slot *) p1;
68 const struct lto_section_slot *s2 =
69 (const struct lto_section_slot *) p2;
71 return strcmp (s1->name, s2->name) == 0;
74 /* Free lto_section_slot */
76 static void
77 free_with_string (void *arg)
79 struct lto_section_slot *s = (struct lto_section_slot *)arg;
81 free (CONST_CAST (char *, s->name));
82 free (arg);
85 /* Create section hash table */
87 htab_t
88 lto_obj_create_section_hash_table (void)
90 return htab_create (37, hash_name, eq_name, free_with_string);
93 /* Delete an allocated integer KEY in the splay tree. */
95 static void
96 lto_splay_tree_delete_id (splay_tree_key key)
98 free ((void *) key);
101 /* Compare splay tree node ids A and B. */
103 static int
104 lto_splay_tree_compare_ids (splay_tree_key a, splay_tree_key b)
106 unsigned HOST_WIDE_INT ai;
107 unsigned HOST_WIDE_INT bi;
109 ai = *(unsigned HOST_WIDE_INT *) a;
110 bi = *(unsigned HOST_WIDE_INT *) b;
112 if (ai < bi)
113 return -1;
114 else if (ai > bi)
115 return 1;
116 return 0;
119 /* Look up splay tree node by ID in splay tree T. */
121 static splay_tree_node
122 lto_splay_tree_lookup (splay_tree t, unsigned HOST_WIDE_INT id)
124 return splay_tree_lookup (t, (splay_tree_key) &id);
127 /* Check if KEY has ID. */
129 static bool
130 lto_splay_tree_id_equal_p (splay_tree_key key, unsigned HOST_WIDE_INT id)
132 return *(unsigned HOST_WIDE_INT *) key == id;
135 /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
136 The ID is allocated separately because we need HOST_WIDE_INTs which may
137 be wider than a splay_tree_key. */
139 static void
140 lto_splay_tree_insert (splay_tree t, unsigned HOST_WIDE_INT id,
141 struct lto_file_decl_data *file_data)
143 unsigned HOST_WIDE_INT *idp = XCNEW (unsigned HOST_WIDE_INT);
144 *idp = id;
145 splay_tree_insert (t, (splay_tree_key) idp, (splay_tree_value) file_data);
148 /* Create a splay tree. */
150 static splay_tree
151 lto_splay_tree_new (void)
153 return splay_tree_new (lto_splay_tree_compare_ids,
154 lto_splay_tree_delete_id,
155 NULL);
158 /* Return true when NODE has a clone that is analyzed (i.e. we need
159 to load its body even if the node itself is not needed). */
161 static bool
162 has_analyzed_clone_p (struct cgraph_node *node)
164 struct cgraph_node *orig = node;
165 node = node->clones;
166 if (node)
167 while (node != orig)
169 if (node->symbol.analyzed)
170 return true;
171 if (node->clones)
172 node = node->clones;
173 else if (node->next_sibling_clone)
174 node = node->next_sibling_clone;
175 else
177 while (node != orig && !node->next_sibling_clone)
178 node = node->clone_of;
179 if (node != orig)
180 node = node->next_sibling_clone;
183 return false;
186 /* Read the function body for the function associated with NODE. */
188 static void
189 lto_materialize_function (struct cgraph_node *node)
191 tree decl;
192 struct lto_file_decl_data *file_data;
193 const char *data, *name;
194 size_t len;
196 decl = node->symbol.decl;
197 /* Read in functions with body (analyzed nodes)
198 and also functions that are needed to produce virtual clones. */
199 if ((cgraph_function_with_gimple_body_p (node) && node->symbol.analyzed)
200 || has_analyzed_clone_p (node))
202 /* Clones don't need to be read. */
203 if (node->clone_of)
204 return;
206 /* Load the function body only if not operating in WPA mode. In
207 WPA mode, the body of the function is not needed. */
208 if (!flag_wpa)
210 file_data = node->symbol.lto_file_data;
211 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
213 /* We may have renamed the declaration, e.g., a static function. */
214 name = lto_get_decl_name_mapping (file_data, name);
216 data = lto_get_section_data (file_data, LTO_section_function_body,
217 name, &len);
218 if (!data)
219 fatal_error ("%s: section %s is missing",
220 file_data->file_name,
221 name);
223 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
225 push_struct_function (decl);
226 announce_function (decl);
227 lto_input_function_body (file_data, decl, data);
228 if (DECL_FUNCTION_PERSONALITY (decl) && !first_personality_decl)
229 first_personality_decl = DECL_FUNCTION_PERSONALITY (decl);
230 lto_stats.num_function_bodies++;
231 lto_free_section_data (file_data, LTO_section_function_body, name,
232 data, len);
233 pop_cfun ();
234 ggc_collect ();
238 /* Let the middle end know about the function. */
239 rest_of_decl_compilation (decl, 1, 0);
243 /* Decode the content of memory pointed to by DATA in the in decl
244 state object STATE. DATA_IN points to a data_in structure for
245 decoding. Return the address after the decoded object in the
246 input. */
248 static const uint32_t *
249 lto_read_in_decl_state (struct data_in *data_in, const uint32_t *data,
250 struct lto_in_decl_state *state)
252 uint32_t ix;
253 tree decl;
254 uint32_t i, j;
256 ix = *data++;
257 decl = streamer_tree_cache_get (data_in->reader_cache, ix);
258 if (TREE_CODE (decl) != FUNCTION_DECL)
260 gcc_assert (decl == void_type_node);
261 decl = NULL_TREE;
263 state->fn_decl = decl;
265 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
267 uint32_t size = *data++;
268 tree *decls = ggc_alloc_vec_tree (size);
270 for (j = 0; j < size; j++)
271 decls[j] = streamer_tree_cache_get (data_in->reader_cache, data[j]);
273 state->streams[i].size = size;
274 state->streams[i].trees = decls;
275 data += size;
278 return data;
283 /* Global type table. FIXME, it should be possible to re-use some
284 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
285 etc), but those assume that types were built with the various
286 build_*_type routines which is not the case with the streamer. */
287 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
288 htab_t gimple_types;
289 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
290 htab_t type_hash_cache;
292 static hashval_t gimple_type_hash (const void *);
294 /* Structure used to maintain a cache of some type pairs compared by
295 gimple_types_compatible_p when comparing aggregate types. There are
296 three possible values for SAME_P:
298 -2: The pair (T1, T2) has just been inserted in the table.
299 0: T1 and T2 are different types.
300 1: T1 and T2 are the same type. */
302 struct type_pair_d
304 unsigned int uid1;
305 unsigned int uid2;
306 signed char same_p;
308 typedef struct type_pair_d *type_pair_t;
310 #define GIMPLE_TYPE_PAIR_SIZE 16381
311 struct type_pair_d *type_pair_cache;
314 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
315 entry if none existed. */
317 static inline type_pair_t
318 lookup_type_pair (tree t1, tree t2)
320 unsigned int index;
321 unsigned int uid1, uid2;
323 if (TYPE_UID (t1) < TYPE_UID (t2))
325 uid1 = TYPE_UID (t1);
326 uid2 = TYPE_UID (t2);
328 else
330 uid1 = TYPE_UID (t2);
331 uid2 = TYPE_UID (t1);
333 gcc_checking_assert (uid1 != uid2);
335 /* iterative_hash_hashval_t imply an function calls.
336 We know that UIDS are in limited range. */
337 index = ((((unsigned HOST_WIDE_INT)uid1 << HOST_BITS_PER_WIDE_INT / 2) + uid2)
338 % GIMPLE_TYPE_PAIR_SIZE);
339 if (type_pair_cache [index].uid1 == uid1
340 && type_pair_cache [index].uid2 == uid2)
341 return &type_pair_cache[index];
343 type_pair_cache [index].uid1 = uid1;
344 type_pair_cache [index].uid2 = uid2;
345 type_pair_cache [index].same_p = -2;
347 return &type_pair_cache[index];
350 /* Per pointer state for the SCC finding. The on_sccstack flag
351 is not strictly required, it is true when there is no hash value
352 recorded for the type and false otherwise. But querying that
353 is slower. */
355 struct sccs
357 unsigned int dfsnum;
358 unsigned int low;
359 bool on_sccstack;
360 union {
361 hashval_t hash;
362 signed char same_p;
363 } u;
366 static unsigned int next_dfs_num;
367 static unsigned int gtc_next_dfs_num;
369 /* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
371 typedef struct GTY(()) gimple_type_leader_entry_s {
372 tree type;
373 tree leader;
374 } gimple_type_leader_entry;
376 #define GIMPLE_TYPE_LEADER_SIZE 16381
377 static GTY((length("GIMPLE_TYPE_LEADER_SIZE")))
378 gimple_type_leader_entry *gimple_type_leader;
380 /* Lookup an existing leader for T and return it or NULL_TREE, if
381 there is none in the cache. */
383 static inline tree
384 gimple_lookup_type_leader (tree t)
386 gimple_type_leader_entry *leader;
388 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
389 if (leader->type != t)
390 return NULL_TREE;
392 return leader->leader;
396 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
397 true then if any type has no name return false, otherwise return
398 true if both types have no names. */
400 static bool
401 compare_type_names_p (tree t1, tree t2)
403 tree name1 = TYPE_NAME (t1);
404 tree name2 = TYPE_NAME (t2);
406 if ((name1 != NULL_TREE) != (name2 != NULL_TREE))
407 return false;
409 if (name1 == NULL_TREE)
410 return true;
412 /* Either both should be a TYPE_DECL or both an IDENTIFIER_NODE. */
413 if (TREE_CODE (name1) != TREE_CODE (name2))
414 return false;
416 if (TREE_CODE (name1) == TYPE_DECL)
417 name1 = DECL_NAME (name1);
418 gcc_checking_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
420 if (TREE_CODE (name2) == TYPE_DECL)
421 name2 = DECL_NAME (name2);
422 gcc_checking_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
424 /* Identifiers can be compared with pointer equality rather
425 than a string comparison. */
426 if (name1 == name2)
427 return true;
429 return false;
432 static bool
433 gimple_types_compatible_p_1 (tree, tree, type_pair_t,
434 vec<type_pair_t> *,
435 struct pointer_map_t *, struct obstack *);
437 /* DFS visit the edge from the callers type pair with state *STATE to
438 the pair T1, T2 while operating in FOR_MERGING_P mode.
439 Update the merging status if it is not part of the SCC containing the
440 callers pair and return it.
441 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
443 static bool
444 gtc_visit (tree t1, tree t2,
445 struct sccs *state,
446 vec<type_pair_t> *sccstack,
447 struct pointer_map_t *sccstate,
448 struct obstack *sccstate_obstack)
450 struct sccs *cstate = NULL;
451 type_pair_t p;
452 void **slot;
453 tree leader1, leader2;
455 /* Check first for the obvious case of pointer identity. */
456 if (t1 == t2)
457 return true;
459 /* Check that we have two types to compare. */
460 if (t1 == NULL_TREE || t2 == NULL_TREE)
461 return false;
463 /* Can't be the same type if the types don't have the same code. */
464 if (TREE_CODE (t1) != TREE_CODE (t2))
465 return false;
467 /* Can't be the same type if they have different CV qualifiers. */
468 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
469 return false;
471 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
472 return false;
474 /* Void types and nullptr types are always the same. */
475 if (TREE_CODE (t1) == VOID_TYPE
476 || TREE_CODE (t1) == NULLPTR_TYPE)
477 return true;
479 /* Can't be the same type if they have different alignment or mode. */
480 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
481 || TYPE_MODE (t1) != TYPE_MODE (t2))
482 return false;
484 /* Do some simple checks before doing three hashtable queries. */
485 if (INTEGRAL_TYPE_P (t1)
486 || SCALAR_FLOAT_TYPE_P (t1)
487 || FIXED_POINT_TYPE_P (t1)
488 || TREE_CODE (t1) == VECTOR_TYPE
489 || TREE_CODE (t1) == COMPLEX_TYPE
490 || TREE_CODE (t1) == OFFSET_TYPE
491 || POINTER_TYPE_P (t1))
493 /* Can't be the same type if they have different sign or precision. */
494 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
495 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
496 return false;
498 if (TREE_CODE (t1) == INTEGER_TYPE
499 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
500 return false;
502 /* That's all we need to check for float and fixed-point types. */
503 if (SCALAR_FLOAT_TYPE_P (t1)
504 || FIXED_POINT_TYPE_P (t1))
505 return true;
507 /* For other types fall through to more complex checks. */
510 /* If the types have been previously registered and found equal
511 they still are. */
512 leader1 = gimple_lookup_type_leader (t1);
513 leader2 = gimple_lookup_type_leader (t2);
514 if (leader1 == t2
515 || t1 == leader2
516 || (leader1 && leader1 == leader2))
517 return true;
519 /* If the hash values of t1 and t2 are different the types can't
520 possibly be the same. This helps keeping the type-pair hashtable
521 small, only tracking comparisons for hash collisions. */
522 if (gimple_type_hash (t1) != gimple_type_hash (t2))
523 return false;
525 /* Allocate a new cache entry for this comparison. */
526 p = lookup_type_pair (t1, t2);
527 if (p->same_p == 0 || p->same_p == 1)
529 /* We have already decided whether T1 and T2 are the
530 same, return the cached result. */
531 return p->same_p == 1;
534 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
535 cstate = (struct sccs *)*slot;
536 /* Not yet visited. DFS recurse. */
537 if (!cstate)
539 gimple_types_compatible_p_1 (t1, t2, p,
540 sccstack, sccstate, sccstate_obstack);
541 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
542 state->low = MIN (state->low, cstate->low);
544 /* If the type is still on the SCC stack adjust the parents low. */
545 if (cstate->dfsnum < state->dfsnum
546 && cstate->on_sccstack)
547 state->low = MIN (cstate->dfsnum, state->low);
549 /* Return the current lattice value. We start with an equality
550 assumption so types part of a SCC will be optimistically
551 treated equal unless proven otherwise. */
552 return cstate->u.same_p;
555 /* Worker for gimple_types_compatible.
556 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
558 static bool
559 gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
560 vec<type_pair_t> *sccstack,
561 struct pointer_map_t *sccstate,
562 struct obstack *sccstate_obstack)
564 struct sccs *state;
566 gcc_assert (p->same_p == -2);
568 state = XOBNEW (sccstate_obstack, struct sccs);
569 *pointer_map_insert (sccstate, p) = state;
571 sccstack->safe_push (p);
572 state->dfsnum = gtc_next_dfs_num++;
573 state->low = state->dfsnum;
574 state->on_sccstack = true;
575 /* Start with an equality assumption. As we DFS recurse into child
576 SCCs this assumption may get revisited. */
577 state->u.same_p = 1;
579 /* The struct tags shall compare equal. */
580 if (!compare_type_names_p (t1, t2))
581 goto different_types;
583 /* The main variant of both types should compare equal. */
584 if (TYPE_MAIN_VARIANT (t1) != t1
585 || TYPE_MAIN_VARIANT (t2) != t2)
587 if (!gtc_visit (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2),
588 state, sccstack, sccstate, sccstate_obstack))
589 goto different_types;
592 /* We may not merge typedef types to the same type in different
593 contexts. */
594 if (TYPE_NAME (t1)
595 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
596 && DECL_CONTEXT (TYPE_NAME (t1))
597 && TYPE_P (DECL_CONTEXT (TYPE_NAME (t1))))
599 if (!gtc_visit (DECL_CONTEXT (TYPE_NAME (t1)),
600 DECL_CONTEXT (TYPE_NAME (t2)),
601 state, sccstack, sccstate, sccstate_obstack))
602 goto different_types;
605 /* If their attributes are not the same they can't be the same type. */
606 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
607 goto different_types;
609 /* Do type-specific comparisons. */
610 switch (TREE_CODE (t1))
612 case VECTOR_TYPE:
613 case COMPLEX_TYPE:
614 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
615 state, sccstack, sccstate, sccstate_obstack))
616 goto different_types;
617 goto same_types;
619 case ARRAY_TYPE:
620 /* Array types are the same if the element types are the same and
621 the number of elements are the same. */
622 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
623 state, sccstack, sccstate, sccstate_obstack)
624 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
625 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
626 goto different_types;
627 else
629 tree i1 = TYPE_DOMAIN (t1);
630 tree i2 = TYPE_DOMAIN (t2);
632 /* For an incomplete external array, the type domain can be
633 NULL_TREE. Check this condition also. */
634 if (i1 == NULL_TREE && i2 == NULL_TREE)
635 goto same_types;
636 else if (i1 == NULL_TREE || i2 == NULL_TREE)
637 goto different_types;
638 else
640 tree min1 = TYPE_MIN_VALUE (i1);
641 tree min2 = TYPE_MIN_VALUE (i2);
642 tree max1 = TYPE_MAX_VALUE (i1);
643 tree max2 = TYPE_MAX_VALUE (i2);
645 /* The minimum/maximum values have to be the same. */
646 if ((min1 == min2
647 || (min1 && min2
648 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
649 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
650 || operand_equal_p (min1, min2, 0))))
651 && (max1 == max2
652 || (max1 && max2
653 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
654 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
655 || operand_equal_p (max1, max2, 0)))))
656 goto same_types;
657 else
658 goto different_types;
662 case METHOD_TYPE:
663 /* Method types should belong to the same class. */
664 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
665 state, sccstack, sccstate, sccstate_obstack))
666 goto different_types;
668 /* Fallthru */
670 case FUNCTION_TYPE:
671 /* Function types are the same if the return type and arguments types
672 are the same. */
673 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
674 state, sccstack, sccstate, sccstate_obstack))
675 goto different_types;
677 if (!comp_type_attributes (t1, t2))
678 goto different_types;
680 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
681 goto same_types;
682 else
684 tree parms1, parms2;
686 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
687 parms1 && parms2;
688 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
690 if (!gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2),
691 state, sccstack, sccstate, sccstate_obstack))
692 goto different_types;
695 if (parms1 || parms2)
696 goto different_types;
698 goto same_types;
701 case OFFSET_TYPE:
703 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
704 state, sccstack, sccstate, sccstate_obstack)
705 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
706 TYPE_OFFSET_BASETYPE (t2),
707 state, sccstack, sccstate, sccstate_obstack))
708 goto different_types;
710 goto same_types;
713 case POINTER_TYPE:
714 case REFERENCE_TYPE:
716 /* If the two pointers have different ref-all attributes,
717 they can't be the same type. */
718 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
719 goto different_types;
721 /* Otherwise, pointer and reference types are the same if the
722 pointed-to types are the same. */
723 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
724 state, sccstack, sccstate, sccstate_obstack))
725 goto same_types;
727 goto different_types;
730 case INTEGER_TYPE:
731 case BOOLEAN_TYPE:
733 tree min1 = TYPE_MIN_VALUE (t1);
734 tree max1 = TYPE_MAX_VALUE (t1);
735 tree min2 = TYPE_MIN_VALUE (t2);
736 tree max2 = TYPE_MAX_VALUE (t2);
737 bool min_equal_p = false;
738 bool max_equal_p = false;
740 /* If either type has a minimum value, the other type must
741 have the same. */
742 if (min1 == NULL_TREE && min2 == NULL_TREE)
743 min_equal_p = true;
744 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
745 min_equal_p = true;
747 /* Likewise, if either type has a maximum value, the other
748 type must have the same. */
749 if (max1 == NULL_TREE && max2 == NULL_TREE)
750 max_equal_p = true;
751 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
752 max_equal_p = true;
754 if (!min_equal_p || !max_equal_p)
755 goto different_types;
757 goto same_types;
760 case ENUMERAL_TYPE:
762 /* FIXME lto, we cannot check bounds on enumeral types because
763 different front ends will produce different values.
764 In C, enumeral types are integers, while in C++ each element
765 will have its own symbolic value. We should decide how enums
766 are to be represented in GIMPLE and have each front end lower
767 to that. */
768 tree v1, v2;
770 /* For enumeral types, all the values must be the same. */
771 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
772 goto same_types;
774 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
775 v1 && v2;
776 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
778 tree c1 = TREE_VALUE (v1);
779 tree c2 = TREE_VALUE (v2);
781 if (TREE_CODE (c1) == CONST_DECL)
782 c1 = DECL_INITIAL (c1);
784 if (TREE_CODE (c2) == CONST_DECL)
785 c2 = DECL_INITIAL (c2);
787 if (tree_int_cst_equal (c1, c2) != 1)
788 goto different_types;
790 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
791 goto different_types;
794 /* If one enumeration has more values than the other, they
795 are not the same. */
796 if (v1 || v2)
797 goto different_types;
799 goto same_types;
802 case RECORD_TYPE:
803 case UNION_TYPE:
804 case QUAL_UNION_TYPE:
806 tree f1, f2;
808 /* For aggregate types, all the fields must be the same. */
809 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
810 f1 && f2;
811 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
813 /* Different field kinds are not compatible. */
814 if (TREE_CODE (f1) != TREE_CODE (f2))
815 goto different_types;
816 /* Field decls must have the same name and offset. */
817 if (TREE_CODE (f1) == FIELD_DECL
818 && (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
819 || !gimple_compare_field_offset (f1, f2)))
820 goto different_types;
821 /* All entities should have the same name and type. */
822 if (DECL_NAME (f1) != DECL_NAME (f2)
823 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2),
824 state, sccstack, sccstate, sccstate_obstack))
825 goto different_types;
828 /* If one aggregate has more fields than the other, they
829 are not the same. */
830 if (f1 || f2)
831 goto different_types;
833 goto same_types;
836 default:
837 gcc_unreachable ();
840 /* Common exit path for types that are not compatible. */
841 different_types:
842 state->u.same_p = 0;
843 goto pop;
845 /* Common exit path for types that are compatible. */
846 same_types:
847 gcc_assert (state->u.same_p == 1);
849 pop:
850 if (state->low == state->dfsnum)
852 type_pair_t x;
854 /* Pop off the SCC and set its cache values to the final
855 comparison result. */
858 struct sccs *cstate;
859 x = sccstack->pop ();
860 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
861 cstate->on_sccstack = false;
862 x->same_p = state->u.same_p;
864 while (x != p);
867 return state->u.same_p;
870 /* Return true iff T1 and T2 are structurally identical. When
871 FOR_MERGING_P is true the an incomplete type and a complete type
872 are considered different, otherwise they are considered compatible. */
874 static bool
875 gimple_types_compatible_p (tree t1, tree t2)
877 vec<type_pair_t> sccstack = vNULL;
878 struct pointer_map_t *sccstate;
879 struct obstack sccstate_obstack;
880 type_pair_t p = NULL;
881 bool res;
882 tree leader1, leader2;
884 /* Before starting to set up the SCC machinery handle simple cases. */
886 /* Check first for the obvious case of pointer identity. */
887 if (t1 == t2)
888 return true;
890 /* Check that we have two types to compare. */
891 if (t1 == NULL_TREE || t2 == NULL_TREE)
892 return false;
894 /* Can't be the same type if the types don't have the same code. */
895 if (TREE_CODE (t1) != TREE_CODE (t2))
896 return false;
898 /* Can't be the same type if they have different CV qualifiers. */
899 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
900 return false;
902 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
903 return false;
905 /* Void types and nullptr types are always the same. */
906 if (TREE_CODE (t1) == VOID_TYPE
907 || TREE_CODE (t1) == NULLPTR_TYPE)
908 return true;
910 /* Can't be the same type if they have different alignment or mode. */
911 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
912 || TYPE_MODE (t1) != TYPE_MODE (t2))
913 return false;
915 /* Do some simple checks before doing three hashtable queries. */
916 if (INTEGRAL_TYPE_P (t1)
917 || SCALAR_FLOAT_TYPE_P (t1)
918 || FIXED_POINT_TYPE_P (t1)
919 || TREE_CODE (t1) == VECTOR_TYPE
920 || TREE_CODE (t1) == COMPLEX_TYPE
921 || TREE_CODE (t1) == OFFSET_TYPE
922 || POINTER_TYPE_P (t1))
924 /* Can't be the same type if they have different sign or precision. */
925 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
926 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
927 return false;
929 if (TREE_CODE (t1) == INTEGER_TYPE
930 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
931 return false;
933 /* That's all we need to check for float and fixed-point types. */
934 if (SCALAR_FLOAT_TYPE_P (t1)
935 || FIXED_POINT_TYPE_P (t1))
936 return true;
938 /* For other types fall through to more complex checks. */
941 /* If the types have been previously registered and found equal
942 they still are. */
943 leader1 = gimple_lookup_type_leader (t1);
944 leader2 = gimple_lookup_type_leader (t2);
945 if (leader1 == t2
946 || t1 == leader2
947 || (leader1 && leader1 == leader2))
948 return true;
950 /* If the hash values of t1 and t2 are different the types can't
951 possibly be the same. This helps keeping the type-pair hashtable
952 small, only tracking comparisons for hash collisions. */
953 if (gimple_type_hash (t1) != gimple_type_hash (t2))
954 return false;
956 /* If we've visited this type pair before (in the case of aggregates
957 with self-referential types), and we made a decision, return it. */
958 p = lookup_type_pair (t1, t2);
959 if (p->same_p == 0 || p->same_p == 1)
961 /* We have already decided whether T1 and T2 are the
962 same, return the cached result. */
963 return p->same_p == 1;
966 /* Now set up the SCC machinery for the comparison. */
967 gtc_next_dfs_num = 1;
968 sccstate = pointer_map_create ();
969 gcc_obstack_init (&sccstate_obstack);
970 res = gimple_types_compatible_p_1 (t1, t2, p,
971 &sccstack, sccstate, &sccstate_obstack);
972 sccstack.release ();
973 pointer_map_destroy (sccstate);
974 obstack_free (&sccstate_obstack, NULL);
976 return res;
979 static hashval_t
980 iterative_hash_gimple_type (tree, hashval_t, vec<tree> *,
981 struct pointer_map_t *, struct obstack *);
983 /* DFS visit the edge from the callers type with state *STATE to T.
984 Update the callers type hash V with the hash for T if it is not part
985 of the SCC containing the callers type and return it.
986 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
988 static hashval_t
989 visit (tree t, struct sccs *state, hashval_t v,
990 vec<tree> *sccstack,
991 struct pointer_map_t *sccstate,
992 struct obstack *sccstate_obstack)
994 struct sccs *cstate = NULL;
995 struct tree_int_map m;
996 void **slot;
998 /* If there is a hash value recorded for this type then it can't
999 possibly be part of our parent SCC. Simply mix in its hash. */
1000 m.base.from = t;
1001 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
1002 && *slot)
1003 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
1005 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
1006 cstate = (struct sccs *)*slot;
1007 if (!cstate)
1009 hashval_t tem;
1010 /* Not yet visited. DFS recurse. */
1011 tem = iterative_hash_gimple_type (t, v,
1012 sccstack, sccstate, sccstate_obstack);
1013 if (!cstate)
1014 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
1015 state->low = MIN (state->low, cstate->low);
1016 /* If the type is no longer on the SCC stack and thus is not part
1017 of the parents SCC mix in its hash value. Otherwise we will
1018 ignore the type for hashing purposes and return the unaltered
1019 hash value. */
1020 if (!cstate->on_sccstack)
1021 return tem;
1023 if (cstate->dfsnum < state->dfsnum
1024 && cstate->on_sccstack)
1025 state->low = MIN (cstate->dfsnum, state->low);
1027 /* We are part of our parents SCC, skip this type during hashing
1028 and return the unaltered hash value. */
1029 return v;
1032 /* Hash NAME with the previous hash value V and return it. */
1034 static hashval_t
1035 iterative_hash_name (tree name, hashval_t v)
1037 if (!name)
1038 return v;
1039 v = iterative_hash_hashval_t (TREE_CODE (name), v);
1040 if (TREE_CODE (name) == TYPE_DECL)
1041 name = DECL_NAME (name);
1042 if (!name)
1043 return v;
1044 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
1045 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
1048 /* A type, hashvalue pair for sorting SCC members. */
1050 struct type_hash_pair {
1051 tree type;
1052 hashval_t hash;
1055 /* Compare two type, hashvalue pairs. */
1057 static int
1058 type_hash_pair_compare (const void *p1_, const void *p2_)
1060 const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_;
1061 const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_;
1062 if (p1->hash < p2->hash)
1063 return -1;
1064 else if (p1->hash > p2->hash)
1065 return 1;
1066 return 0;
1069 /* Returning a hash value for gimple type TYPE combined with VAL.
1070 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
1072 To hash a type we end up hashing in types that are reachable.
1073 Through pointers we can end up with cycles which messes up the
1074 required property that we need to compute the same hash value
1075 for structurally equivalent types. To avoid this we have to
1076 hash all types in a cycle (the SCC) in a commutative way. The
1077 easiest way is to not mix in the hashes of the SCC members at
1078 all. To make this work we have to delay setting the hash
1079 values of the SCC until it is complete. */
1081 static hashval_t
1082 iterative_hash_gimple_type (tree type, hashval_t val,
1083 vec<tree> *sccstack,
1084 struct pointer_map_t *sccstate,
1085 struct obstack *sccstate_obstack)
1087 hashval_t v;
1088 void **slot;
1089 struct sccs *state;
1091 /* Not visited during this DFS walk. */
1092 gcc_checking_assert (!pointer_map_contains (sccstate, type));
1093 state = XOBNEW (sccstate_obstack, struct sccs);
1094 *pointer_map_insert (sccstate, type) = state;
1096 sccstack->safe_push (type);
1097 state->dfsnum = next_dfs_num++;
1098 state->low = state->dfsnum;
1099 state->on_sccstack = true;
1101 /* Combine a few common features of types so that types are grouped into
1102 smaller sets; when searching for existing matching types to merge,
1103 only existing types having the same features as the new type will be
1104 checked. */
1105 v = iterative_hash_name (TYPE_NAME (type), 0);
1106 if (TYPE_NAME (type)
1107 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1108 && DECL_CONTEXT (TYPE_NAME (type))
1109 && TYPE_P (DECL_CONTEXT (TYPE_NAME (type))))
1110 v = visit (DECL_CONTEXT (TYPE_NAME (type)), state, v,
1111 sccstack, sccstate, sccstate_obstack);
1113 /* Factor in the variant structure. */
1114 if (TYPE_MAIN_VARIANT (type) != type)
1115 v = visit (TYPE_MAIN_VARIANT (type), state, v,
1116 sccstack, sccstate, sccstate_obstack);
1118 v = iterative_hash_hashval_t (TREE_CODE (type), v);
1119 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
1120 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
1122 /* Do not hash the types size as this will cause differences in
1123 hash values for the complete vs. the incomplete type variant. */
1125 /* Incorporate common features of numerical types. */
1126 if (INTEGRAL_TYPE_P (type)
1127 || SCALAR_FLOAT_TYPE_P (type)
1128 || FIXED_POINT_TYPE_P (type))
1130 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
1131 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
1132 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
1135 /* For pointer and reference types, fold in information about the type
1136 pointed to. */
1137 if (POINTER_TYPE_P (type))
1138 v = visit (TREE_TYPE (type), state, v,
1139 sccstack, sccstate, sccstate_obstack);
1141 /* For integer types hash the types min/max values and the string flag. */
1142 if (TREE_CODE (type) == INTEGER_TYPE)
1144 /* OMP lowering can introduce error_mark_node in place of
1145 random local decls in types. */
1146 if (TYPE_MIN_VALUE (type) != error_mark_node)
1147 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
1148 if (TYPE_MAX_VALUE (type) != error_mark_node)
1149 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
1150 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
1153 /* For array types hash the domain and the string flag. */
1154 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
1156 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
1157 v = visit (TYPE_DOMAIN (type), state, v,
1158 sccstack, sccstate, sccstate_obstack);
1161 /* Recurse for aggregates with a single element type. */
1162 if (TREE_CODE (type) == ARRAY_TYPE
1163 || TREE_CODE (type) == COMPLEX_TYPE
1164 || TREE_CODE (type) == VECTOR_TYPE)
1165 v = visit (TREE_TYPE (type), state, v,
1166 sccstack, sccstate, sccstate_obstack);
1168 /* Incorporate function return and argument types. */
1169 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
1171 unsigned na;
1172 tree p;
1174 /* For method types also incorporate their parent class. */
1175 if (TREE_CODE (type) == METHOD_TYPE)
1176 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
1177 sccstack, sccstate, sccstate_obstack);
1179 /* Check result and argument types. */
1180 v = visit (TREE_TYPE (type), state, v,
1181 sccstack, sccstate, sccstate_obstack);
1182 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
1184 v = visit (TREE_VALUE (p), state, v,
1185 sccstack, sccstate, sccstate_obstack);
1186 na++;
1189 v = iterative_hash_hashval_t (na, v);
1192 if (RECORD_OR_UNION_TYPE_P (type))
1194 unsigned nf;
1195 tree f;
1197 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
1199 v = iterative_hash_name (DECL_NAME (f), v);
1200 v = visit (TREE_TYPE (f), state, v,
1201 sccstack, sccstate, sccstate_obstack);
1202 nf++;
1205 v = iterative_hash_hashval_t (nf, v);
1208 /* Record hash for us. */
1209 state->u.hash = v;
1211 /* See if we found an SCC. */
1212 if (state->low == state->dfsnum)
1214 tree x;
1215 struct tree_int_map *m;
1217 /* Pop off the SCC and set its hash values. */
1218 x = sccstack->pop ();
1219 /* Optimize SCC size one. */
1220 if (x == type)
1222 state->on_sccstack = false;
1223 m = ggc_alloc_cleared_tree_int_map ();
1224 m->base.from = x;
1225 m->to = v;
1226 slot = htab_find_slot (type_hash_cache, m, INSERT);
1227 gcc_assert (!*slot);
1228 *slot = (void *) m;
1230 else
1232 struct sccs *cstate;
1233 unsigned first, i, size, j;
1234 struct type_hash_pair *pairs;
1235 /* Pop off the SCC and build an array of type, hash pairs. */
1236 first = sccstack->length () - 1;
1237 while ((*sccstack)[first] != type)
1238 --first;
1239 size = sccstack->length () - first + 1;
1240 pairs = XALLOCAVEC (struct type_hash_pair, size);
1241 i = 0;
1242 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
1243 cstate->on_sccstack = false;
1244 pairs[i].type = x;
1245 pairs[i].hash = cstate->u.hash;
1248 x = sccstack->pop ();
1249 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
1250 cstate->on_sccstack = false;
1251 ++i;
1252 pairs[i].type = x;
1253 pairs[i].hash = cstate->u.hash;
1255 while (x != type);
1256 gcc_assert (i + 1 == size);
1257 /* Sort the arrays of type, hash pairs so that when we mix in
1258 all members of the SCC the hash value becomes independent on
1259 the order we visited the SCC. Disregard hashes equal to
1260 the hash of the type we mix into because we cannot guarantee
1261 a stable sort for those across different TUs. */
1262 qsort (pairs, size, sizeof (struct type_hash_pair),
1263 type_hash_pair_compare);
1264 for (i = 0; i < size; ++i)
1266 hashval_t hash;
1267 m = ggc_alloc_cleared_tree_int_map ();
1268 m->base.from = pairs[i].type;
1269 hash = pairs[i].hash;
1270 /* Skip same hashes. */
1271 for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j)
1273 for (; j < size; ++j)
1274 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
1275 for (j = 0; pairs[j].hash != pairs[i].hash; ++j)
1276 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
1277 m->to = hash;
1278 if (pairs[i].type == type)
1279 v = hash;
1280 slot = htab_find_slot (type_hash_cache, m, INSERT);
1281 gcc_assert (!*slot);
1282 *slot = (void *) m;
1287 return iterative_hash_hashval_t (v, val);
1290 /* Returns a hash value for P (assumed to be a type). The hash value
1291 is computed using some distinguishing features of the type. Note
1292 that we cannot use pointer hashing here as we may be dealing with
1293 two distinct instances of the same type.
1295 This function should produce the same hash value for two compatible
1296 types according to gimple_types_compatible_p. */
1298 static hashval_t
1299 gimple_type_hash (const void *p)
1301 const_tree t = (const_tree) p;
1302 vec<tree> sccstack = vNULL;
1303 struct pointer_map_t *sccstate;
1304 struct obstack sccstate_obstack;
1305 hashval_t val;
1306 void **slot;
1307 struct tree_int_map m;
1309 m.base.from = CONST_CAST_TREE (t);
1310 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
1311 && *slot)
1312 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
1314 /* Perform a DFS walk and pre-hash all reachable types. */
1315 next_dfs_num = 1;
1316 sccstate = pointer_map_create ();
1317 gcc_obstack_init (&sccstate_obstack);
1318 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
1319 &sccstack, sccstate, &sccstate_obstack);
1320 sccstack.release ();
1321 pointer_map_destroy (sccstate);
1322 obstack_free (&sccstate_obstack, NULL);
1324 return val;
1327 /* Returns nonzero if P1 and P2 are equal. */
1329 static int
1330 gimple_type_eq (const void *p1, const void *p2)
1332 const_tree t1 = (const_tree) p1;
1333 const_tree t2 = (const_tree) p2;
1334 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
1335 CONST_CAST_TREE (t2));
1339 /* Worker for gimple_register_type.
1340 Register type T in the global type table gimple_types.
1341 When REGISTERING_MV is false first recurse for the main variant of T. */
1343 static tree
1344 gimple_register_type_1 (tree t, bool registering_mv)
1346 void **slot;
1347 gimple_type_leader_entry *leader;
1349 /* If we registered this type before return the cached result. */
1350 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
1351 if (leader->type == t)
1352 return leader->leader;
1354 /* Always register the main variant first. This is important so we
1355 pick up the non-typedef variants as canonical, otherwise we'll end
1356 up taking typedef ids for structure tags during comparison.
1357 It also makes sure that main variants will be merged to main variants.
1358 As we are operating on a possibly partially fixed up type graph
1359 do not bother to recurse more than once, otherwise we may end up
1360 walking in circles.
1361 If we are registering a main variant it will either remain its
1362 own main variant or it will be merged to something else in which
1363 case we do not care for the main variant leader. */
1364 if (!registering_mv
1365 && TYPE_MAIN_VARIANT (t) != t)
1366 gimple_register_type_1 (TYPE_MAIN_VARIANT (t), true);
1368 /* See if we already have an equivalent type registered. */
1369 slot = htab_find_slot (gimple_types, t, INSERT);
1370 if (*slot
1371 && *(tree *)slot != t)
1373 tree new_type = (tree) *((tree *) slot);
1374 leader->type = t;
1375 leader->leader = new_type;
1376 return new_type;
1379 /* If not, insert it to the cache and the hash. */
1380 leader->type = t;
1381 leader->leader = t;
1382 *slot = (void *) t;
1383 return t;
1386 /* Register type T in the global type table gimple_types.
1387 If another type T', compatible with T, already existed in
1388 gimple_types then return T', otherwise return T. This is used by
1389 LTO to merge identical types read from different TUs. */
1391 static tree
1392 gimple_register_type (tree t)
1394 gcc_assert (TYPE_P (t));
1395 return gimple_register_type_1 (t, false);
1398 #define GIMPLE_REGISTER_TYPE(tt) \
1399 (TREE_VISITED (tt) ? gimple_register_type (tt) : tt)
1403 /* A hashtable of trees that potentially refer to variables or functions
1404 that must be replaced with their prevailing variant. */
1405 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node))) htab_t
1406 tree_with_vars;
1408 /* Remember that T is a tree that (potentially) refers to a variable
1409 or function decl that may be replaced with its prevailing variant. */
1410 static void
1411 remember_with_vars (tree t)
1413 *(tree *) htab_find_slot (tree_with_vars, t, INSERT) = t;
1416 #define LTO_FIXUP_TREE(tt) \
1417 do \
1419 if (tt) \
1421 if (TYPE_P (tt)) \
1422 (tt) = GIMPLE_REGISTER_TYPE (tt); \
1423 if (VAR_OR_FUNCTION_DECL_P (tt) && TREE_PUBLIC (tt)) \
1424 remember_with_vars (t); \
1425 if (TREE_CODE (tt) == INTEGER_CST) \
1426 (tt) = fixup_integer_cst (tt); \
1428 } while (0)
1430 static void lto_fixup_types (tree);
1432 /* Return integer_cst T with updated type. */
1434 static tree
1435 fixup_integer_cst (tree t)
1437 tree type = GIMPLE_REGISTER_TYPE (TREE_TYPE (t));
1439 if (type == TREE_TYPE (t))
1440 return t;
1442 /* If overflow was set, streamer_read_integer_cst
1443 produced local copy of T. */
1444 if (TREE_OVERFLOW (t))
1446 TREE_TYPE (t) = type;
1447 return t;
1449 else
1450 /* Otherwise produce new shared node for the new type. */
1451 return build_int_cst_wide (type, TREE_INT_CST_LOW (t),
1452 TREE_INT_CST_HIGH (t));
1455 /* Fix up fields of a tree_typed T. */
1457 static void
1458 lto_ft_typed (tree t)
1460 LTO_FIXUP_TREE (TREE_TYPE (t));
1463 /* Fix up fields of a tree_common T. */
1465 static void
1466 lto_ft_common (tree t)
1468 lto_ft_typed (t);
1469 LTO_FIXUP_TREE (TREE_CHAIN (t));
1472 /* Fix up fields of a decl_minimal T. */
1474 static void
1475 lto_ft_decl_minimal (tree t)
1477 lto_ft_common (t);
1478 LTO_FIXUP_TREE (DECL_NAME (t));
1479 LTO_FIXUP_TREE (DECL_CONTEXT (t));
1482 /* Fix up fields of a decl_common T. */
1484 static void
1485 lto_ft_decl_common (tree t)
1487 lto_ft_decl_minimal (t);
1488 LTO_FIXUP_TREE (DECL_SIZE (t));
1489 LTO_FIXUP_TREE (DECL_SIZE_UNIT (t));
1490 LTO_FIXUP_TREE (DECL_INITIAL (t));
1491 LTO_FIXUP_TREE (DECL_ATTRIBUTES (t));
1492 LTO_FIXUP_TREE (DECL_ABSTRACT_ORIGIN (t));
1495 /* Fix up fields of a decl_with_vis T. */
1497 static void
1498 lto_ft_decl_with_vis (tree t)
1500 lto_ft_decl_common (t);
1502 /* Accessor macro has side-effects, use field-name here. */
1503 LTO_FIXUP_TREE (t->decl_with_vis.assembler_name);
1504 LTO_FIXUP_TREE (DECL_SECTION_NAME (t));
1507 /* Fix up fields of a decl_non_common T. */
1509 static void
1510 lto_ft_decl_non_common (tree t)
1512 lto_ft_decl_with_vis (t);
1513 LTO_FIXUP_TREE (DECL_ARGUMENT_FLD (t));
1514 LTO_FIXUP_TREE (DECL_RESULT_FLD (t));
1515 LTO_FIXUP_TREE (DECL_VINDEX (t));
1516 /* The C frontends may create exact duplicates for DECL_ORIGINAL_TYPE
1517 like for 'typedef enum foo foo'. We have no way of avoiding to
1518 merge them and dwarf2out.c cannot deal with this,
1519 so fix this up by clearing DECL_ORIGINAL_TYPE in this case. */
1520 if (TREE_CODE (t) == TYPE_DECL
1521 && DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
1522 DECL_ORIGINAL_TYPE (t) = NULL_TREE;
1525 /* Fix up fields of a decl_non_common T. */
1527 static void
1528 lto_ft_function (tree t)
1530 lto_ft_decl_non_common (t);
1531 LTO_FIXUP_TREE (DECL_FUNCTION_PERSONALITY (t));
1534 /* Fix up fields of a field_decl T. */
1536 static void
1537 lto_ft_field_decl (tree t)
1539 lto_ft_decl_common (t);
1540 LTO_FIXUP_TREE (DECL_FIELD_OFFSET (t));
1541 LTO_FIXUP_TREE (DECL_BIT_FIELD_TYPE (t));
1542 LTO_FIXUP_TREE (DECL_QUALIFIER (t));
1543 LTO_FIXUP_TREE (DECL_FIELD_BIT_OFFSET (t));
1544 LTO_FIXUP_TREE (DECL_FCONTEXT (t));
1547 /* Fix up fields of a type T. */
1549 static void
1550 lto_ft_type (tree t)
1552 lto_ft_common (t);
1553 LTO_FIXUP_TREE (TYPE_CACHED_VALUES (t));
1554 LTO_FIXUP_TREE (TYPE_SIZE (t));
1555 LTO_FIXUP_TREE (TYPE_SIZE_UNIT (t));
1556 LTO_FIXUP_TREE (TYPE_ATTRIBUTES (t));
1557 LTO_FIXUP_TREE (TYPE_NAME (t));
1559 /* Accessors are for derived node types only. */
1560 if (!POINTER_TYPE_P (t))
1561 LTO_FIXUP_TREE (TYPE_MINVAL (t));
1562 LTO_FIXUP_TREE (TYPE_MAXVAL (t));
1564 /* Accessor is for derived node types only. */
1565 LTO_FIXUP_TREE (t->type_non_common.binfo);
1567 LTO_FIXUP_TREE (TYPE_CONTEXT (t));
1570 /* Fix up fields of a BINFO T. */
1572 static void
1573 lto_ft_binfo (tree t)
1575 unsigned HOST_WIDE_INT i, n;
1576 tree base, saved_base;
1578 lto_ft_common (t);
1579 LTO_FIXUP_TREE (BINFO_VTABLE (t));
1580 LTO_FIXUP_TREE (BINFO_OFFSET (t));
1581 LTO_FIXUP_TREE (BINFO_VIRTUALS (t));
1582 LTO_FIXUP_TREE (BINFO_VPTR_FIELD (t));
1583 n = vec_safe_length (BINFO_BASE_ACCESSES (t));
1584 for (i = 0; i < n; i++)
1586 saved_base = base = BINFO_BASE_ACCESS (t, i);
1587 LTO_FIXUP_TREE (base);
1588 if (base != saved_base)
1589 (*BINFO_BASE_ACCESSES (t))[i] = base;
1591 LTO_FIXUP_TREE (BINFO_INHERITANCE_CHAIN (t));
1592 LTO_FIXUP_TREE (BINFO_SUBVTT_INDEX (t));
1593 LTO_FIXUP_TREE (BINFO_VPTR_INDEX (t));
1594 n = BINFO_N_BASE_BINFOS (t);
1595 for (i = 0; i < n; i++)
1597 saved_base = base = BINFO_BASE_BINFO (t, i);
1598 LTO_FIXUP_TREE (base);
1599 if (base != saved_base)
1600 (*BINFO_BASE_BINFOS (t))[i] = base;
1604 /* Fix up fields of a CONSTRUCTOR T. */
1606 static void
1607 lto_ft_constructor (tree t)
1609 unsigned HOST_WIDE_INT idx;
1610 constructor_elt *ce;
1612 lto_ft_typed (t);
1614 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
1616 LTO_FIXUP_TREE (ce->index);
1617 LTO_FIXUP_TREE (ce->value);
1621 /* Fix up fields of an expression tree T. */
1623 static void
1624 lto_ft_expr (tree t)
1626 int i;
1627 lto_ft_typed (t);
1628 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
1629 LTO_FIXUP_TREE (TREE_OPERAND (t, i));
1632 /* Given a tree T fixup fields of T by replacing types with their merged
1633 variant and other entities by an equal entity from an earlier compilation
1634 unit, or an entity being canonical in a different way. This includes
1635 for instance integer or string constants. */
1637 static void
1638 lto_fixup_types (tree t)
1640 switch (TREE_CODE (t))
1642 case IDENTIFIER_NODE:
1643 break;
1645 case TREE_LIST:
1646 LTO_FIXUP_TREE (TREE_VALUE (t));
1647 LTO_FIXUP_TREE (TREE_PURPOSE (t));
1648 LTO_FIXUP_TREE (TREE_CHAIN (t));
1649 break;
1651 case FIELD_DECL:
1652 lto_ft_field_decl (t);
1653 break;
1655 case LABEL_DECL:
1656 case CONST_DECL:
1657 case PARM_DECL:
1658 case RESULT_DECL:
1659 case IMPORTED_DECL:
1660 lto_ft_decl_common (t);
1661 break;
1663 case VAR_DECL:
1664 lto_ft_decl_with_vis (t);
1665 break;
1667 case TYPE_DECL:
1668 lto_ft_decl_non_common (t);
1669 break;
1671 case FUNCTION_DECL:
1672 lto_ft_function (t);
1673 break;
1675 case TREE_BINFO:
1676 lto_ft_binfo (t);
1677 break;
1679 case PLACEHOLDER_EXPR:
1680 lto_ft_common (t);
1681 break;
1683 case BLOCK:
1684 case TRANSLATION_UNIT_DECL:
1685 case OPTIMIZATION_NODE:
1686 case TARGET_OPTION_NODE:
1687 break;
1689 default:
1690 if (TYPE_P (t))
1691 lto_ft_type (t);
1692 else if (TREE_CODE (t) == CONSTRUCTOR)
1693 lto_ft_constructor (t);
1694 else if (CONSTANT_CLASS_P (t))
1695 LTO_FIXUP_TREE (TREE_TYPE (t));
1696 else if (EXPR_P (t))
1698 lto_ft_expr (t);
1700 else
1702 remember_with_vars (t);
1708 /* Return the resolution for the decl with index INDEX from DATA_IN. */
1710 static enum ld_plugin_symbol_resolution
1711 get_resolution (struct data_in *data_in, unsigned index)
1713 if (data_in->globals_resolution.exists ())
1715 ld_plugin_symbol_resolution_t ret;
1716 /* We can have references to not emitted functions in
1717 DECL_FUNCTION_PERSONALITY at least. So we can and have
1718 to indeed return LDPR_UNKNOWN in some cases. */
1719 if (data_in->globals_resolution.length () <= index)
1720 return LDPR_UNKNOWN;
1721 ret = data_in->globals_resolution[index];
1722 return ret;
1724 else
1725 /* Delay resolution finding until decl merging. */
1726 return LDPR_UNKNOWN;
1729 /* Map assigning declarations their resolutions. */
1730 static pointer_map_t *resolution_map;
1732 /* We need to record resolutions until symbol table is read. */
1733 static void
1734 register_resolution (tree decl, enum ld_plugin_symbol_resolution resolution)
1736 if (resolution == LDPR_UNKNOWN)
1737 return;
1738 if (!resolution_map)
1739 resolution_map = pointer_map_create ();
1740 *pointer_map_insert (resolution_map, decl) = (void *)(size_t)resolution;
1743 /* Register DECL with the global symbol table and change its
1744 name if necessary to avoid name clashes for static globals across
1745 different files. */
1747 static void
1748 lto_register_var_decl_in_symtab (struct data_in *data_in, tree decl)
1750 tree context;
1752 /* Variable has file scope, not local. */
1753 if (!TREE_PUBLIC (decl)
1754 && !((context = decl_function_context (decl))
1755 && auto_var_in_fn_p (decl, context)))
1757 rest_of_decl_compilation (decl, 1, 0);
1760 /* If this variable has already been declared, queue the
1761 declaration for merging. */
1762 if (TREE_PUBLIC (decl))
1764 unsigned ix;
1765 if (!streamer_tree_cache_lookup (data_in->reader_cache, decl, &ix))
1766 gcc_unreachable ();
1767 register_resolution (decl, get_resolution (data_in, ix));
1772 /* Register DECL with the global symbol table and change its
1773 name if necessary to avoid name clashes for static globals across
1774 different files. DATA_IN contains descriptors and tables for the
1775 file being read. */
1777 static void
1778 lto_register_function_decl_in_symtab (struct data_in *data_in, tree decl)
1780 /* If this variable has already been declared, queue the
1781 declaration for merging. */
1782 if (TREE_PUBLIC (decl) && !DECL_ABSTRACT (decl))
1784 unsigned ix;
1785 if (!streamer_tree_cache_lookup (data_in->reader_cache, decl, &ix))
1786 gcc_unreachable ();
1787 register_resolution (decl, get_resolution (data_in, ix));
1791 static unsigned long num_merged_types = 0;
1793 /* Given a streamer cache structure DATA_IN (holding a sequence of trees
1794 for one compilation unit) go over all trees starting at index FROM until the
1795 end of the sequence and replace fields of those trees, and the trees
1796 themself with their canonical variants as per gimple_register_type. */
1798 static void
1799 uniquify_nodes (struct data_in *data_in, unsigned from)
1801 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1802 unsigned len = cache->nodes.length ();
1803 unsigned i;
1805 /* Go backwards because children streamed for the first time come
1806 as part of their parents, and hence are created after them. */
1808 /* First register all the types in the cache. This makes sure to
1809 have the original structure in the type cycles when registering
1810 them and computing hashes. */
1811 for (i = len; i-- > from;)
1813 tree t = cache->nodes[i];
1814 if (t && TYPE_P (t))
1816 tree newt = gimple_register_type (t);
1817 /* Mark non-prevailing types so we fix them up. No need
1818 to reset that flag afterwards - nothing that refers
1819 to those types is left and they are collected. */
1820 if (newt != t)
1822 num_merged_types++;
1823 TREE_VISITED (t) = 1;
1828 /* Second fixup all trees in the new cache entries. */
1829 for (i = len; i-- > from;)
1831 tree t = cache->nodes[i];
1832 tree oldt = t;
1833 if (!t)
1834 continue;
1836 /* First fixup the fields of T. */
1837 lto_fixup_types (t);
1839 if (!TYPE_P (t))
1840 continue;
1842 /* Now try to find a canonical variant of T itself. */
1843 t = GIMPLE_REGISTER_TYPE (t);
1845 if (t == oldt)
1847 /* The following re-creates proper variant lists while fixing up
1848 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
1849 variant list state before fixup is broken. */
1850 tree tem, mv;
1852 #ifdef ENABLE_CHECKING
1853 /* Remove us from our main variant list if we are not the
1854 variant leader. */
1855 if (TYPE_MAIN_VARIANT (t) != t)
1857 tem = TYPE_MAIN_VARIANT (t);
1858 while (tem && TYPE_NEXT_VARIANT (tem) != t)
1859 tem = TYPE_NEXT_VARIANT (tem);
1860 gcc_assert (!tem && !TYPE_NEXT_VARIANT (t));
1862 #endif
1864 /* Query our new main variant. */
1865 mv = GIMPLE_REGISTER_TYPE (TYPE_MAIN_VARIANT (t));
1867 /* If we were the variant leader and we get replaced ourselves drop
1868 all variants from our list. */
1869 if (TYPE_MAIN_VARIANT (t) == t
1870 && mv != t)
1872 tem = t;
1873 while (tem)
1875 tree tem2 = TYPE_NEXT_VARIANT (tem);
1876 TYPE_NEXT_VARIANT (tem) = NULL_TREE;
1877 tem = tem2;
1881 /* If we are not our own variant leader link us into our new leaders
1882 variant list. */
1883 if (mv != t)
1885 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
1886 TYPE_NEXT_VARIANT (mv) = t;
1887 if (RECORD_OR_UNION_TYPE_P (t))
1888 TYPE_BINFO (t) = TYPE_BINFO (mv);
1889 /* Preserve the invariant that type variants share their
1890 TYPE_FIELDS. */
1891 if (RECORD_OR_UNION_TYPE_P (t)
1892 && TYPE_FIELDS (mv) != TYPE_FIELDS (t))
1894 tree f1, f2;
1895 for (f1 = TYPE_FIELDS (mv), f2 = TYPE_FIELDS (t);
1896 f1 && f2; f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1898 unsigned ix;
1899 gcc_assert (f1 != f2
1900 && DECL_NAME (f1) == DECL_NAME (f2));
1901 if (!streamer_tree_cache_lookup (cache, f2, &ix))
1902 gcc_unreachable ();
1903 /* If we're going to replace an element which we'd
1904 still visit in the next iterations, we wouldn't
1905 handle it, so do it here. We do have to handle it
1906 even though the field_decl itself will be removed,
1907 as it could refer to e.g. integer_cst which we
1908 wouldn't reach via any other way, hence they
1909 (and their type) would stay uncollected. */
1910 /* ??? We should rather make sure to replace all
1911 references to f2 with f1. That means handling
1912 COMPONENT_REFs and CONSTRUCTOR elements in
1913 lto_fixup_types and special-case the field-decl
1914 operand handling. */
1915 /* ??? Not sure the above is all relevant in this
1916 path canonicalizing TYPE_FIELDS to that of the
1917 main variant. */
1918 if (ix < i)
1919 lto_fixup_types (f2);
1920 streamer_tree_cache_insert_at (cache, f1, ix);
1922 TYPE_FIELDS (t) = TYPE_FIELDS (mv);
1926 /* Finally adjust our main variant and fix it up. */
1927 TYPE_MAIN_VARIANT (t) = mv;
1929 /* The following reconstructs the pointer chains
1930 of the new pointed-to type if we are a main variant. We do
1931 not stream those so they are broken before fixup. */
1932 if (TREE_CODE (t) == POINTER_TYPE
1933 && TYPE_MAIN_VARIANT (t) == t)
1935 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
1936 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
1938 else if (TREE_CODE (t) == REFERENCE_TYPE
1939 && TYPE_MAIN_VARIANT (t) == t)
1941 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
1942 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
1946 else
1948 if (RECORD_OR_UNION_TYPE_P (t))
1950 tree f1, f2;
1951 if (TYPE_FIELDS (t) != TYPE_FIELDS (oldt))
1952 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (oldt);
1953 f1 && f2; f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1955 unsigned ix;
1956 gcc_assert (f1 != f2 && DECL_NAME (f1) == DECL_NAME (f2));
1957 if (!streamer_tree_cache_lookup (cache, f2, &ix))
1958 gcc_unreachable ();
1959 /* If we're going to replace an element which we'd
1960 still visit in the next iterations, we wouldn't
1961 handle it, so do it here. We do have to handle it
1962 even though the field_decl itself will be removed,
1963 as it could refer to e.g. integer_cst which we
1964 wouldn't reach via any other way, hence they
1965 (and their type) would stay uncollected. */
1966 /* ??? We should rather make sure to replace all
1967 references to f2 with f1. That means handling
1968 COMPONENT_REFs and CONSTRUCTOR elements in
1969 lto_fixup_types and special-case the field-decl
1970 operand handling. */
1971 if (ix < i)
1972 lto_fixup_types (f2);
1973 streamer_tree_cache_insert_at (cache, f1, ix);
1977 /* If we found a tree that is equal to oldt replace it in the
1978 cache, so that further users (in the various LTO sections)
1979 make use of it. */
1980 streamer_tree_cache_insert_at (cache, t, i);
1984 /* Finally compute the canonical type of all TREE_TYPEs and register
1985 VAR_DECL and FUNCTION_DECL nodes in the symbol table.
1986 From this point there are no longer any types with
1987 TYPE_STRUCTURAL_EQUALITY_P and its type-based alias problems.
1988 This step requires the TYPE_POINTER_TO lists being present, so
1989 make sure it is done last. */
1990 for (i = len; i-- > from;)
1992 tree t = cache->nodes[i];
1993 if (t == NULL_TREE)
1994 continue;
1996 if (TREE_CODE (t) == VAR_DECL)
1997 lto_register_var_decl_in_symtab (data_in, t);
1998 else if (TREE_CODE (t) == FUNCTION_DECL && !DECL_BUILT_IN (t))
1999 lto_register_function_decl_in_symtab (data_in, t);
2000 else if (!flag_wpa
2001 && TREE_CODE (t) == TYPE_DECL)
2002 debug_hooks->type_decl (t, !DECL_FILE_SCOPE_P (t));
2003 else if (TYPE_P (t) && !TYPE_CANONICAL (t))
2004 TYPE_CANONICAL (t) = gimple_register_canonical_type (t);
2009 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
2010 RESOLUTIONS is the set of symbols picked by the linker (read from the
2011 resolution file when the linker plugin is being used). */
2013 static void
2014 lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
2015 vec<ld_plugin_symbol_resolution_t> resolutions)
2017 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
2018 const int decl_offset = sizeof (struct lto_decl_header);
2019 const int main_offset = decl_offset + header->decl_state_size;
2020 const int string_offset = main_offset + header->main_size;
2021 struct lto_input_block ib_main;
2022 struct data_in *data_in;
2023 unsigned int i;
2024 const uint32_t *data_ptr, *data_end;
2025 uint32_t num_decl_states;
2027 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2028 header->main_size);
2030 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
2031 header->string_size, resolutions);
2033 /* We do not uniquify the pre-loaded cache entries, those are middle-end
2034 internal types that should not be merged. */
2036 /* Read the global declarations and types. */
2037 while (ib_main.p < ib_main.len)
2039 tree t;
2040 unsigned from = data_in->reader_cache->nodes.length ();
2041 t = stream_read_tree (&ib_main, data_in);
2042 gcc_assert (t && ib_main.p <= ib_main.len);
2043 uniquify_nodes (data_in, from);
2046 /* Read in lto_in_decl_state objects. */
2047 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
2048 data_end =
2049 (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
2050 num_decl_states = *data_ptr++;
2052 gcc_assert (num_decl_states > 0);
2053 decl_data->global_decl_state = lto_new_in_decl_state ();
2054 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
2055 decl_data->global_decl_state);
2057 /* Read in per-function decl states and enter them in hash table. */
2058 decl_data->function_decl_states =
2059 htab_create_ggc (37, lto_hash_in_decl_state, lto_eq_in_decl_state, NULL);
2061 for (i = 1; i < num_decl_states; i++)
2063 struct lto_in_decl_state *state = lto_new_in_decl_state ();
2064 void **slot;
2066 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
2067 slot = htab_find_slot (decl_data->function_decl_states, state, INSERT);
2068 gcc_assert (*slot == NULL);
2069 *slot = state;
2072 if (data_ptr != data_end)
2073 internal_error ("bytecode stream: garbage at the end of symbols section");
2075 /* Set the current decl state to be the global state. */
2076 decl_data->current_decl_state = decl_data->global_decl_state;
2078 lto_data_in_delete (data_in);
2081 /* Custom version of strtoll, which is not portable. */
2083 static HOST_WIDEST_INT
2084 lto_parse_hex (const char *p)
2086 HOST_WIDEST_INT ret = 0;
2088 for (; *p != '\0'; ++p)
2090 char c = *p;
2091 unsigned char part;
2092 ret <<= 4;
2093 if (c >= '0' && c <= '9')
2094 part = c - '0';
2095 else if (c >= 'a' && c <= 'f')
2096 part = c - 'a' + 10;
2097 else if (c >= 'A' && c <= 'F')
2098 part = c - 'A' + 10;
2099 else
2100 internal_error ("could not parse hex number");
2101 ret |= part;
2104 return ret;
2107 /* Read resolution for file named FILE_NAME. The resolution is read from
2108 RESOLUTION. */
2110 static void
2111 lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
2113 /* We require that objects in the resolution file are in the same
2114 order as the lto1 command line. */
2115 unsigned int name_len;
2116 char *obj_name;
2117 unsigned int num_symbols;
2118 unsigned int i;
2119 struct lto_file_decl_data *file_data;
2120 splay_tree_node nd = NULL;
2122 if (!resolution)
2123 return;
2125 name_len = strlen (file->filename);
2126 obj_name = XNEWVEC (char, name_len + 1);
2127 fscanf (resolution, " "); /* Read white space. */
2129 fread (obj_name, sizeof (char), name_len, resolution);
2130 obj_name[name_len] = '\0';
2131 if (filename_cmp (obj_name, file->filename) != 0)
2132 internal_error ("unexpected file name %s in linker resolution file. "
2133 "Expected %s", obj_name, file->filename);
2134 if (file->offset != 0)
2136 int t;
2137 char offset_p[17];
2138 HOST_WIDEST_INT offset;
2139 t = fscanf (resolution, "@0x%16s", offset_p);
2140 if (t != 1)
2141 internal_error ("could not parse file offset");
2142 offset = lto_parse_hex (offset_p);
2143 if (offset != file->offset)
2144 internal_error ("unexpected offset");
2147 free (obj_name);
2149 fscanf (resolution, "%u", &num_symbols);
2151 for (i = 0; i < num_symbols; i++)
2153 int t;
2154 unsigned index;
2155 unsigned HOST_WIDE_INT id;
2156 char r_str[27];
2157 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
2158 unsigned int j;
2159 unsigned int lto_resolution_str_len =
2160 sizeof (lto_resolution_str) / sizeof (char *);
2161 res_pair rp;
2163 t = fscanf (resolution, "%u " HOST_WIDE_INT_PRINT_HEX_PURE " %26s %*[^\n]\n",
2164 &index, &id, r_str);
2165 if (t != 3)
2166 internal_error ("invalid line in the resolution file");
2168 for (j = 0; j < lto_resolution_str_len; j++)
2170 if (strcmp (lto_resolution_str[j], r_str) == 0)
2172 r = (enum ld_plugin_symbol_resolution) j;
2173 break;
2176 if (j == lto_resolution_str_len)
2177 internal_error ("invalid resolution in the resolution file");
2179 if (!(nd && lto_splay_tree_id_equal_p (nd->key, id)))
2181 nd = lto_splay_tree_lookup (file_ids, id);
2182 if (nd == NULL)
2183 internal_error ("resolution sub id %wx not in object file", id);
2186 file_data = (struct lto_file_decl_data *)nd->value;
2187 /* The indexes are very sparse. To save memory save them in a compact
2188 format that is only unpacked later when the subfile is processed. */
2189 rp.res = r;
2190 rp.index = index;
2191 file_data->respairs.safe_push (rp);
2192 if (file_data->max_index < index)
2193 file_data->max_index = index;
2197 /* List of file_decl_datas */
2198 struct file_data_list
2200 struct lto_file_decl_data *first, *last;
2203 /* Is the name for a id'ed LTO section? */
2205 static int
2206 lto_section_with_id (const char *name, unsigned HOST_WIDE_INT *id)
2208 const char *s;
2210 if (strncmp (name, LTO_SECTION_NAME_PREFIX, strlen (LTO_SECTION_NAME_PREFIX)))
2211 return 0;
2212 s = strrchr (name, '.');
2213 return s && sscanf (s, "." HOST_WIDE_INT_PRINT_HEX_PURE, id) == 1;
2216 /* Create file_data of each sub file id */
2218 static int
2219 create_subid_section_table (struct lto_section_slot *ls, splay_tree file_ids,
2220 struct file_data_list *list)
2222 struct lto_section_slot s_slot, *new_slot;
2223 unsigned HOST_WIDE_INT id;
2224 splay_tree_node nd;
2225 void **hash_slot;
2226 char *new_name;
2227 struct lto_file_decl_data *file_data;
2229 if (!lto_section_with_id (ls->name, &id))
2230 return 1;
2232 /* Find hash table of sub module id */
2233 nd = lto_splay_tree_lookup (file_ids, id);
2234 if (nd != NULL)
2236 file_data = (struct lto_file_decl_data *)nd->value;
2238 else
2240 file_data = ggc_alloc_lto_file_decl_data ();
2241 memset(file_data, 0, sizeof (struct lto_file_decl_data));
2242 file_data->id = id;
2243 file_data->section_hash_table = lto_obj_create_section_hash_table ();;
2244 lto_splay_tree_insert (file_ids, id, file_data);
2246 /* Maintain list in linker order */
2247 if (!list->first)
2248 list->first = file_data;
2249 if (list->last)
2250 list->last->next = file_data;
2251 list->last = file_data;
2254 /* Copy section into sub module hash table */
2255 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
2256 s_slot.name = new_name;
2257 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
2258 gcc_assert (*hash_slot == NULL);
2260 new_slot = XDUP (struct lto_section_slot, ls);
2261 new_slot->name = new_name;
2262 *hash_slot = new_slot;
2263 return 1;
2266 /* Read declarations and other initializations for a FILE_DATA. */
2268 static void
2269 lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file)
2271 const char *data;
2272 size_t len;
2273 vec<ld_plugin_symbol_resolution_t>
2274 resolutions = vNULL;
2275 int i;
2276 res_pair *rp;
2278 /* Create vector for fast access of resolution. We do this lazily
2279 to save memory. */
2280 resolutions.safe_grow_cleared (file_data->max_index + 1);
2281 for (i = 0; file_data->respairs.iterate (i, &rp); i++)
2282 resolutions[rp->index] = rp->res;
2283 file_data->respairs.release ();
2285 file_data->renaming_hash_table = lto_create_renaming_table ();
2286 file_data->file_name = file->filename;
2287 data = lto_get_section_data (file_data, LTO_section_decls, NULL, &len);
2288 if (data == NULL)
2290 internal_error ("cannot read LTO decls from %s", file_data->file_name);
2291 return;
2293 /* Frees resolutions */
2294 lto_read_decls (file_data, data, resolutions);
2295 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
2298 /* Finalize FILE_DATA in FILE and increase COUNT. */
2300 static int
2301 lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data,
2302 int *count)
2304 lto_file_finalize (file_data, file);
2305 if (cgraph_dump_file)
2306 fprintf (cgraph_dump_file, "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX "\n",
2307 file_data->file_name, file_data->id);
2308 (*count)++;
2309 return 0;
2312 /* Generate a TREE representation for all types and external decls
2313 entities in FILE.
2315 Read all of the globals out of the file. Then read the cgraph
2316 and process the .o index into the cgraph nodes so that it can open
2317 the .o file to load the functions and ipa information. */
2319 static struct lto_file_decl_data *
2320 lto_file_read (lto_file *file, FILE *resolution_file, int *count)
2322 struct lto_file_decl_data *file_data = NULL;
2323 splay_tree file_ids;
2324 htab_t section_hash_table;
2325 struct lto_section_slot *section;
2326 struct file_data_list file_list;
2327 struct lto_section_list section_list;
2329 memset (&section_list, 0, sizeof (struct lto_section_list));
2330 section_hash_table = lto_obj_build_section_table (file, &section_list);
2332 /* Find all sub modules in the object and put their sections into new hash
2333 tables in a splay tree. */
2334 file_ids = lto_splay_tree_new ();
2335 memset (&file_list, 0, sizeof (struct file_data_list));
2336 for (section = section_list.first; section != NULL; section = section->next)
2337 create_subid_section_table (section, file_ids, &file_list);
2339 /* Add resolutions to file ids */
2340 lto_resolution_read (file_ids, resolution_file, file);
2342 /* Finalize each lto file for each submodule in the merged object */
2343 for (file_data = file_list.first; file_data != NULL; file_data = file_data->next)
2344 lto_create_files_from_ids (file, file_data, count);
2346 splay_tree_delete (file_ids);
2347 htab_delete (section_hash_table);
2349 return file_list.first;
2352 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
2353 #define LTO_MMAP_IO 1
2354 #endif
2356 #if LTO_MMAP_IO
2357 /* Page size of machine is used for mmap and munmap calls. */
2358 static size_t page_mask;
2359 #endif
2361 /* Get the section data of length LEN from FILENAME starting at
2362 OFFSET. The data segment must be freed by the caller when the
2363 caller is finished. Returns NULL if all was not well. */
2365 static char *
2366 lto_read_section_data (struct lto_file_decl_data *file_data,
2367 intptr_t offset, size_t len)
2369 char *result;
2370 static int fd = -1;
2371 static char *fd_name;
2372 #if LTO_MMAP_IO
2373 intptr_t computed_len;
2374 intptr_t computed_offset;
2375 intptr_t diff;
2376 #endif
2378 /* Keep a single-entry file-descriptor cache. The last file we
2379 touched will get closed at exit.
2380 ??? Eventually we want to add a more sophisticated larger cache
2381 or rather fix function body streaming to not stream them in
2382 practically random order. */
2383 if (fd != -1
2384 && filename_cmp (fd_name, file_data->file_name) != 0)
2386 free (fd_name);
2387 close (fd);
2388 fd = -1;
2390 if (fd == -1)
2392 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
2393 if (fd == -1)
2395 fatal_error ("Cannot open %s", file_data->file_name);
2396 return NULL;
2398 fd_name = xstrdup (file_data->file_name);
2401 #if LTO_MMAP_IO
2402 if (!page_mask)
2404 size_t page_size = sysconf (_SC_PAGE_SIZE);
2405 page_mask = ~(page_size - 1);
2408 computed_offset = offset & page_mask;
2409 diff = offset - computed_offset;
2410 computed_len = len + diff;
2412 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
2413 fd, computed_offset);
2414 if (result == MAP_FAILED)
2416 fatal_error ("Cannot map %s", file_data->file_name);
2417 return NULL;
2420 return result + diff;
2421 #else
2422 result = (char *) xmalloc (len);
2423 if (lseek (fd, offset, SEEK_SET) != offset
2424 || read (fd, result, len) != (ssize_t) len)
2426 free (result);
2427 fatal_error ("Cannot read %s", file_data->file_name);
2428 result = NULL;
2430 #ifdef __MINGW32__
2431 /* Native windows doesn't supports delayed unlink on opened file. So
2432 we close file here again. This produces higher I/O load, but at least
2433 it prevents to have dangling file handles preventing unlink. */
2434 free (fd_name);
2435 fd_name = NULL;
2436 close (fd);
2437 fd = -1;
2438 #endif
2439 return result;
2440 #endif
2444 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
2445 NAME will be NULL unless the section type is for a function
2446 body. */
2448 static const char *
2449 get_section_data (struct lto_file_decl_data *file_data,
2450 enum lto_section_type section_type,
2451 const char *name,
2452 size_t *len)
2454 htab_t section_hash_table = file_data->section_hash_table;
2455 struct lto_section_slot *f_slot;
2456 struct lto_section_slot s_slot;
2457 const char *section_name = lto_get_section_name (section_type, name, file_data);
2458 char *data = NULL;
2460 *len = 0;
2461 s_slot.name = section_name;
2462 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
2463 if (f_slot)
2465 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
2466 *len = f_slot->len;
2469 free (CONST_CAST (char *, section_name));
2470 return data;
2474 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
2475 starts at OFFSET and has LEN bytes. */
2477 static void
2478 free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
2479 enum lto_section_type section_type ATTRIBUTE_UNUSED,
2480 const char *name ATTRIBUTE_UNUSED,
2481 const char *offset, size_t len ATTRIBUTE_UNUSED)
2483 #if LTO_MMAP_IO
2484 intptr_t computed_len;
2485 intptr_t computed_offset;
2486 intptr_t diff;
2487 #endif
2489 #if LTO_MMAP_IO
2490 computed_offset = ((intptr_t) offset) & page_mask;
2491 diff = (intptr_t) offset - computed_offset;
2492 computed_len = len + diff;
2494 munmap ((caddr_t) computed_offset, computed_len);
2495 #else
2496 free (CONST_CAST(char *, offset));
2497 #endif
2500 static lto_file *current_lto_file;
2502 /* Helper for qsort; compare partitions and return one with smaller size.
2503 We sort from greatest to smallest so parallel build doesn't stale on the
2504 longest compilation being executed too late. */
2506 static int
2507 cmp_partitions_size (const void *a, const void *b)
2509 const struct ltrans_partition_def *pa
2510 = *(struct ltrans_partition_def *const *)a;
2511 const struct ltrans_partition_def *pb
2512 = *(struct ltrans_partition_def *const *)b;
2513 return pb->insns - pa->insns;
2516 /* Helper for qsort; compare partitions and return one with smaller order. */
2518 static int
2519 cmp_partitions_order (const void *a, const void *b)
2521 const struct ltrans_partition_def *pa
2522 = *(struct ltrans_partition_def *const *)a;
2523 const struct ltrans_partition_def *pb
2524 = *(struct ltrans_partition_def *const *)b;
2525 int ordera = -1, orderb = -1;
2527 if (lto_symtab_encoder_size (pa->encoder))
2528 ordera = lto_symtab_encoder_deref (pa->encoder, 0)->symbol.order;
2529 if (lto_symtab_encoder_size (pb->encoder))
2530 orderb = lto_symtab_encoder_deref (pb->encoder, 0)->symbol.order;
2531 return orderb - ordera;
2534 /* Write all output files in WPA mode and the file with the list of
2535 LTRANS units. */
2537 static void
2538 lto_wpa_write_files (void)
2540 unsigned i, n_sets;
2541 lto_file *file;
2542 ltrans_partition part;
2543 FILE *ltrans_output_list_stream;
2544 char *temp_filename;
2545 size_t blen;
2547 /* Open the LTRANS output list. */
2548 if (!ltrans_output_list)
2549 fatal_error ("no LTRANS output list filename provided");
2550 ltrans_output_list_stream = fopen (ltrans_output_list, "w");
2551 if (ltrans_output_list_stream == NULL)
2552 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list);
2554 timevar_push (TV_WHOPR_WPA);
2556 FOR_EACH_VEC_ELT (ltrans_partitions, i, part)
2557 lto_stats.num_output_symtab_nodes += lto_symtab_encoder_size (part->encoder);
2559 /* Find out statics that need to be promoted
2560 to globals with hidden visibility because they are accessed from multiple
2561 partitions. */
2562 lto_promote_cross_file_statics ();
2564 timevar_pop (TV_WHOPR_WPA);
2566 timevar_push (TV_WHOPR_WPA_IO);
2568 /* Generate a prefix for the LTRANS unit files. */
2569 blen = strlen (ltrans_output_list);
2570 temp_filename = (char *) xmalloc (blen + sizeof ("2147483648.o"));
2571 strcpy (temp_filename, ltrans_output_list);
2572 if (blen > sizeof (".out")
2573 && strcmp (temp_filename + blen - sizeof (".out") + 1,
2574 ".out") == 0)
2575 temp_filename[blen - sizeof (".out") + 1] = '\0';
2576 blen = strlen (temp_filename);
2578 n_sets = ltrans_partitions.length ();
2580 /* Sort partitions by size so small ones are compiled last.
2581 FIXME: Even when not reordering we may want to output one list for parallel make
2582 and other for final link command. */
2583 ltrans_partitions.qsort (flag_toplevel_reorder
2584 ? cmp_partitions_size
2585 : cmp_partitions_order);
2586 for (i = 0; i < n_sets; i++)
2588 size_t len;
2589 ltrans_partition part = ltrans_partitions[i];
2591 /* Write all the nodes in SET. */
2592 sprintf (temp_filename + blen, "%u.o", i);
2593 file = lto_obj_file_open (temp_filename, true);
2594 if (!file)
2595 fatal_error ("lto_obj_file_open() failed");
2597 if (!quiet_flag)
2598 fprintf (stderr, " %s (%s %i insns)", temp_filename, part->name, part->insns);
2599 if (cgraph_dump_file)
2601 lto_symtab_encoder_iterator lsei;
2603 fprintf (cgraph_dump_file, "Writing partition %s to file %s, %i insns\n",
2604 part->name, temp_filename, part->insns);
2605 fprintf (cgraph_dump_file, " Symbols in partition: ");
2606 for (lsei = lsei_start_in_partition (part->encoder); !lsei_end_p (lsei);
2607 lsei_next_in_partition (&lsei))
2609 symtab_node node = lsei_node (lsei);
2610 fprintf (cgraph_dump_file, "%s ", symtab_node_asm_name (node));
2612 fprintf (cgraph_dump_file, "\n Symbols in boundary: ");
2613 for (lsei = lsei_start (part->encoder); !lsei_end_p (lsei);
2614 lsei_next (&lsei))
2616 symtab_node node = lsei_node (lsei);
2617 if (!lto_symtab_encoder_in_partition_p (part->encoder, node))
2619 fprintf (cgraph_dump_file, "%s ", symtab_node_asm_name (node));
2620 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
2621 if (cnode
2622 && lto_symtab_encoder_encode_body_p (part->encoder, cnode))
2623 fprintf (cgraph_dump_file, "(body included)");
2624 else
2626 varpool_node *vnode = dyn_cast <varpool_node> (node);
2627 if (vnode
2628 && lto_symtab_encoder_encode_initializer_p (part->encoder, vnode))
2629 fprintf (cgraph_dump_file, "(initializer included)");
2633 fprintf (cgraph_dump_file, "\n");
2635 gcc_checking_assert (lto_symtab_encoder_size (part->encoder) || !i);
2637 lto_set_current_out_file (file);
2639 ipa_write_optimization_summaries (part->encoder);
2641 lto_set_current_out_file (NULL);
2642 lto_obj_file_close (file);
2643 free (file);
2644 part->encoder = NULL;
2646 len = strlen (temp_filename);
2647 if (fwrite (temp_filename, 1, len, ltrans_output_list_stream) < len
2648 || fwrite ("\n", 1, 1, ltrans_output_list_stream) < 1)
2649 fatal_error ("writing to LTRANS output list %s: %m",
2650 ltrans_output_list);
2653 lto_stats.num_output_files += n_sets;
2655 /* Close the LTRANS output list. */
2656 if (fclose (ltrans_output_list_stream))
2657 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list);
2659 free_ltrans_partitions();
2660 free (temp_filename);
2662 timevar_pop (TV_WHOPR_WPA_IO);
2666 /* If TT is a variable or function decl replace it with its
2667 prevailing variant. */
2668 #define LTO_SET_PREVAIL(tt) \
2669 do {\
2670 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt)) \
2671 tt = lto_symtab_prevailing_decl (tt); \
2672 } while (0)
2674 /* Ensure that TT isn't a replacable var of function decl. */
2675 #define LTO_NO_PREVAIL(tt) \
2676 gcc_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
2678 /* Given a tree T replace all fields referring to variables or functions
2679 with their prevailing variant. */
2680 static void
2681 lto_fixup_prevailing_decls (tree t)
2683 enum tree_code code = TREE_CODE (t);
2684 LTO_NO_PREVAIL (TREE_TYPE (t));
2685 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
2686 LTO_NO_PREVAIL (TREE_CHAIN (t));
2687 if (DECL_P (t))
2689 LTO_NO_PREVAIL (DECL_NAME (t));
2690 LTO_SET_PREVAIL (DECL_CONTEXT (t));
2691 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
2693 LTO_SET_PREVAIL (DECL_SIZE (t));
2694 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t));
2695 LTO_SET_PREVAIL (DECL_INITIAL (t));
2696 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t));
2697 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t));
2699 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
2701 LTO_NO_PREVAIL (t->decl_with_vis.assembler_name);
2702 LTO_NO_PREVAIL (DECL_SECTION_NAME (t));
2704 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
2706 LTO_NO_PREVAIL (DECL_ARGUMENT_FLD (t));
2707 LTO_NO_PREVAIL (DECL_RESULT_FLD (t));
2708 LTO_NO_PREVAIL (DECL_VINDEX (t));
2710 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
2711 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t));
2712 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
2714 LTO_NO_PREVAIL (DECL_FIELD_OFFSET (t));
2715 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t));
2716 LTO_NO_PREVAIL (DECL_QUALIFIER (t));
2717 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t));
2718 LTO_NO_PREVAIL (DECL_FCONTEXT (t));
2721 else if (TYPE_P (t))
2723 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t));
2724 LTO_SET_PREVAIL (TYPE_SIZE (t));
2725 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t));
2726 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t));
2727 LTO_NO_PREVAIL (TYPE_NAME (t));
2729 LTO_SET_PREVAIL (TYPE_MINVAL (t));
2730 LTO_SET_PREVAIL (TYPE_MAXVAL (t));
2731 LTO_SET_PREVAIL (t->type_non_common.binfo);
2733 LTO_SET_PREVAIL (TYPE_CONTEXT (t));
2735 LTO_NO_PREVAIL (TYPE_CANONICAL (t));
2736 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t));
2737 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t));
2739 else if (EXPR_P (t))
2741 int i;
2742 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
2743 LTO_SET_PREVAIL (TREE_OPERAND (t, i));
2745 else
2747 switch (code)
2749 case TREE_LIST:
2750 LTO_SET_PREVAIL (TREE_VALUE (t));
2751 LTO_SET_PREVAIL (TREE_PURPOSE (t));
2752 break;
2753 default:
2754 gcc_unreachable ();
2758 #undef LTO_SET_PREVAIL
2759 #undef LTO_NO_PREVAIL
2761 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
2762 replaces var and function decls with the corresponding prevailing def. */
2764 static void
2765 lto_fixup_state (struct lto_in_decl_state *state)
2767 unsigned i, si;
2768 struct lto_tree_ref_table *table;
2770 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
2771 we still need to walk from all DECLs to find the reachable
2772 FUNCTION_DECLs and VAR_DECLs. */
2773 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
2775 table = &state->streams[si];
2776 for (i = 0; i < table->size; i++)
2778 tree *tp = table->trees + i;
2779 if (VAR_OR_FUNCTION_DECL_P (*tp))
2780 *tp = lto_symtab_prevailing_decl (*tp);
2785 /* A callback of htab_traverse. Just extracts a state from SLOT
2786 and calls lto_fixup_state. */
2788 static int
2789 lto_fixup_state_aux (void **slot, void *aux ATTRIBUTE_UNUSED)
2791 struct lto_in_decl_state *state = (struct lto_in_decl_state *) *slot;
2792 lto_fixup_state (state);
2793 return 1;
2796 /* Fix the decls from all FILES. Replaces each decl with the corresponding
2797 prevailing one. */
2799 static void
2800 lto_fixup_decls (struct lto_file_decl_data **files)
2802 unsigned int i;
2803 htab_iterator hi;
2804 tree t;
2806 FOR_EACH_HTAB_ELEMENT (tree_with_vars, t, tree, hi)
2807 lto_fixup_prevailing_decls (t);
2809 for (i = 0; files[i]; i++)
2811 struct lto_file_decl_data *file = files[i];
2812 struct lto_in_decl_state *state = file->global_decl_state;
2813 lto_fixup_state (state);
2815 htab_traverse (file->function_decl_states, lto_fixup_state_aux, NULL);
2819 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
2821 /* Turn file datas for sub files into a single array, so that they look
2822 like separate files for further passes. */
2824 static void
2825 lto_flatten_files (struct lto_file_decl_data **orig, int count, int last_file_ix)
2827 struct lto_file_decl_data *n, *next;
2828 int i, k;
2830 lto_stats.num_input_files = count;
2831 all_file_decl_data
2832 = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (count + 1);
2833 /* Set the hooks so that all of the ipa passes can read in their data. */
2834 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2835 for (i = 0, k = 0; i < last_file_ix; i++)
2837 for (n = orig[i]; n != NULL; n = next)
2839 all_file_decl_data[k++] = n;
2840 next = n->next;
2841 n->next = NULL;
2844 all_file_decl_data[k] = NULL;
2845 gcc_assert (k == count);
2848 /* Input file data before flattening (i.e. splitting them to subfiles to support
2849 incremental linking. */
2850 static int real_file_count;
2851 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
2853 static void print_lto_report_1 (void);
2855 /* Read all the symbols from the input files FNAMES. NFILES is the
2856 number of files requested in the command line. Instantiate a
2857 global call graph by aggregating all the sub-graphs found in each
2858 file. */
2860 static void
2861 read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
2863 unsigned int i, last_file_ix;
2864 FILE *resolution;
2865 struct cgraph_node *node;
2866 int count = 0;
2867 struct lto_file_decl_data **decl_data;
2869 init_cgraph ();
2871 timevar_push (TV_IPA_LTO_DECL_IN);
2873 real_file_decl_data
2874 = decl_data = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (nfiles + 1);
2875 real_file_count = nfiles;
2877 /* Read the resolution file. */
2878 resolution = NULL;
2879 if (resolution_file_name)
2881 int t;
2882 unsigned num_objects;
2884 resolution = fopen (resolution_file_name, "r");
2885 if (resolution == NULL)
2886 fatal_error ("could not open symbol resolution file: %m");
2888 t = fscanf (resolution, "%u", &num_objects);
2889 gcc_assert (t == 1);
2891 /* True, since the plugin splits the archives. */
2892 gcc_assert (num_objects == nfiles);
2895 tree_with_vars = htab_create_ggc (101, htab_hash_pointer, htab_eq_pointer,
2896 NULL);
2897 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
2898 tree_int_map_eq, NULL);
2899 type_pair_cache = XCNEWVEC (struct type_pair_d, GIMPLE_TYPE_PAIR_SIZE);
2900 gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
2901 (GIMPLE_TYPE_LEADER_SIZE);
2902 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
2904 if (!quiet_flag)
2905 fprintf (stderr, "Reading object files:");
2907 /* Read all of the object files specified on the command line. */
2908 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
2910 struct lto_file_decl_data *file_data = NULL;
2911 if (!quiet_flag)
2913 fprintf (stderr, " %s", fnames[i]);
2914 fflush (stderr);
2917 current_lto_file = lto_obj_file_open (fnames[i], false);
2918 if (!current_lto_file)
2919 break;
2921 file_data = lto_file_read (current_lto_file, resolution, &count);
2922 if (!file_data)
2924 lto_obj_file_close (current_lto_file);
2925 free (current_lto_file);
2926 current_lto_file = NULL;
2927 break;
2930 decl_data[last_file_ix++] = file_data;
2932 lto_obj_file_close (current_lto_file);
2933 free (current_lto_file);
2934 current_lto_file = NULL;
2935 ggc_collect ();
2938 lto_flatten_files (decl_data, count, last_file_ix);
2939 lto_stats.num_input_files = count;
2940 ggc_free(decl_data);
2941 real_file_decl_data = NULL;
2943 if (resolution_file_name)
2944 fclose (resolution);
2946 /* Show the LTO report before launching LTRANS. */
2947 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
2948 print_lto_report_1 ();
2950 /* Free gimple type merging datastructures. */
2951 htab_delete (gimple_types);
2952 gimple_types = NULL;
2953 htab_delete (type_hash_cache);
2954 type_hash_cache = NULL;
2955 free (type_pair_cache);
2956 type_pair_cache = NULL;
2957 gimple_type_leader = NULL;
2958 free_gimple_type_tables ();
2959 ggc_collect ();
2961 /* Set the hooks so that all of the ipa passes can read in their data. */
2962 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2964 timevar_pop (TV_IPA_LTO_DECL_IN);
2966 if (!quiet_flag)
2967 fprintf (stderr, "\nReading the callgraph\n");
2969 timevar_push (TV_IPA_LTO_CGRAPH_IO);
2970 /* Read the symtab. */
2971 input_symtab ();
2973 /* Store resolutions into the symbol table. */
2974 if (resolution_map)
2976 void **res;
2977 symtab_node snode;
2979 FOR_EACH_SYMBOL (snode)
2980 if (symtab_real_symbol_p (snode)
2981 && (res = pointer_map_contains (resolution_map,
2982 snode->symbol.decl)))
2983 snode->symbol.resolution
2984 = (enum ld_plugin_symbol_resolution)(size_t)*res;
2986 pointer_map_destroy (resolution_map);
2987 resolution_map = NULL;
2990 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
2992 if (!quiet_flag)
2993 fprintf (stderr, "Merging declarations\n");
2995 timevar_push (TV_IPA_LTO_DECL_MERGE);
2996 /* Merge global decls. In ltrans mode we read merged cgraph, we do not
2997 need to care about resolving symbols again, we only need to replace
2998 duplicated declarations read from the callgraph and from function
2999 sections. */
3000 if (!flag_ltrans)
3002 lto_symtab_merge_decls ();
3004 /* If there were errors during symbol merging bail out, we have no
3005 good way to recover here. */
3006 if (seen_error ())
3007 fatal_error ("errors during merging of translation units");
3009 /* Fixup all decls. */
3010 lto_fixup_decls (all_file_decl_data);
3012 htab_delete (tree_with_vars);
3013 tree_with_vars = NULL;
3014 ggc_collect ();
3016 timevar_pop (TV_IPA_LTO_DECL_MERGE);
3017 /* Each pass will set the appropriate timer. */
3019 if (!quiet_flag)
3020 fprintf (stderr, "Reading summaries\n");
3022 /* Read the IPA summary data. */
3023 if (flag_ltrans)
3024 ipa_read_optimization_summaries ();
3025 else
3026 ipa_read_summaries ();
3028 for (i = 0; all_file_decl_data[i]; i++)
3030 gcc_assert (all_file_decl_data[i]->symtab_node_encoder);
3031 lto_symtab_encoder_delete (all_file_decl_data[i]->symtab_node_encoder);
3032 all_file_decl_data[i]->symtab_node_encoder = NULL;
3035 /* Finally merge the cgraph according to the decl merging decisions. */
3036 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
3037 if (cgraph_dump_file)
3039 fprintf (cgraph_dump_file, "Before merging:\n");
3040 dump_symtab (cgraph_dump_file);
3042 lto_symtab_merge_symbols ();
3043 ggc_collect ();
3045 /* FIXME: ipa_transforms_to_apply holds list of passes that have optimization
3046 summaries computed and needs to apply changes. At the moment WHOPR only
3047 supports inlining, so we can push it here by hand. In future we need to stream
3048 this field into ltrans compilation. */
3049 if (flag_ltrans)
3050 FOR_EACH_DEFINED_FUNCTION (node)
3051 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass)&pass_ipa_inline);
3053 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
3055 timevar_push (TV_IPA_LTO_DECL_INIT_IO);
3057 /* Indicate that the cgraph is built and ready. */
3058 cgraph_function_flags_ready = true;
3060 timevar_pop (TV_IPA_LTO_DECL_INIT_IO);
3061 ggc_free (all_file_decl_data);
3062 all_file_decl_data = NULL;
3066 /* Materialize all the bodies for all the nodes in the callgraph. */
3068 static void
3069 materialize_cgraph (void)
3071 tree decl;
3072 struct cgraph_node *node;
3073 unsigned i;
3074 timevar_id_t lto_timer;
3076 if (!quiet_flag)
3077 fprintf (stderr,
3078 flag_wpa ? "Materializing decls:" : "Reading function bodies:");
3080 /* Now that we have input the cgraph, we need to clear all of the aux
3081 nodes and read the functions if we are not running in WPA mode. */
3082 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3084 FOR_EACH_FUNCTION (node)
3086 if (node->symbol.lto_file_data)
3088 lto_materialize_function (node);
3089 lto_stats.num_input_cgraph_nodes++;
3093 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3095 /* Start the appropriate timer depending on the mode that we are
3096 operating in. */
3097 lto_timer = (flag_wpa) ? TV_WHOPR_WPA
3098 : (flag_ltrans) ? TV_WHOPR_LTRANS
3099 : TV_LTO;
3100 timevar_push (lto_timer);
3102 current_function_decl = NULL;
3103 set_cfun (NULL);
3105 /* Inform the middle end about the global variables we have seen. */
3106 FOR_EACH_VEC_ELT (*lto_global_var_decls, i, decl)
3107 rest_of_decl_compilation (decl, 1, 0);
3109 if (!quiet_flag)
3110 fprintf (stderr, "\n");
3112 timevar_pop (lto_timer);
3116 /* Show various memory usage statistics related to LTO. */
3117 static void
3118 print_lto_report_1 (void)
3120 const char *pfx = (flag_lto) ? "LTO" : (flag_wpa) ? "WPA" : "LTRANS";
3121 fprintf (stderr, "%s statistics\n", pfx);
3123 if (gimple_types)
3124 fprintf (stderr, "[%s] GIMPLE type table: size %ld, %ld elements, "
3125 "%ld searches, %ld collisions (ratio: %f)\n", pfx,
3126 (long) htab_size (gimple_types),
3127 (long) htab_elements (gimple_types),
3128 (long) gimple_types->searches,
3129 (long) gimple_types->collisions,
3130 htab_collisions (gimple_types));
3131 else
3132 fprintf (stderr, "[%s] GIMPLE type table is empty\n", pfx);
3133 if (type_hash_cache)
3134 fprintf (stderr, "[%s] GIMPLE type hash cache table: size %ld, %ld elements, "
3135 "%ld searches, %ld collisions (ratio: %f)\n", pfx,
3136 (long) htab_size (type_hash_cache),
3137 (long) htab_elements (type_hash_cache),
3138 (long) type_hash_cache->searches,
3139 (long) type_hash_cache->collisions,
3140 htab_collisions (type_hash_cache));
3141 else
3142 fprintf (stderr, "[%s] GIMPLE type hash cache table is empty\n", pfx);
3143 fprintf (stderr, "[%s] Merged %lu types\n", pfx, num_merged_types);
3145 print_gimple_types_stats (pfx);
3146 print_lto_report (pfx);
3149 /* Perform whole program analysis (WPA) on the callgraph and write out the
3150 optimization plan. */
3152 static void
3153 do_whole_program_analysis (void)
3155 symtab_node node;
3157 timevar_start (TV_PHASE_OPT_GEN);
3159 /* Note that since we are in WPA mode, materialize_cgraph will not
3160 actually read in all the function bodies. It only materializes
3161 the decls and cgraph nodes so that analysis can be performed. */
3162 materialize_cgraph ();
3164 /* Reading in the cgraph uses different timers, start timing WPA now. */
3165 timevar_push (TV_WHOPR_WPA);
3167 if (pre_ipa_mem_report)
3169 fprintf (stderr, "Memory consumption before IPA\n");
3170 dump_memory_report (false);
3173 cgraph_function_flags_ready = true;
3175 if (cgraph_dump_file)
3176 dump_symtab (cgraph_dump_file);
3177 bitmap_obstack_initialize (NULL);
3178 cgraph_state = CGRAPH_STATE_IPA_SSA;
3180 execute_ipa_pass_list (all_regular_ipa_passes);
3181 symtab_remove_unreachable_nodes (false, dump_file);
3183 if (cgraph_dump_file)
3185 fprintf (cgraph_dump_file, "Optimized ");
3186 dump_symtab (cgraph_dump_file);
3188 #ifdef ENABLE_CHECKING
3189 verify_cgraph ();
3190 #endif
3191 bitmap_obstack_release (NULL);
3193 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
3194 timevar_pop (TV_WHOPR_WPA);
3196 timevar_push (TV_WHOPR_PARTITIONING);
3197 if (flag_lto_partition_1to1)
3198 lto_1_to_1_map ();
3199 else if (flag_lto_partition_max)
3200 lto_max_map ();
3201 else
3202 lto_balanced_map ();
3204 /* AUX pointers are used by partitioning code to bookkeep number of
3205 partitions symbol is in. This is no longer needed. */
3206 FOR_EACH_SYMBOL (node)
3207 node->symbol.aux = NULL;
3209 lto_stats.num_cgraph_partitions += ltrans_partitions.length ();
3210 timevar_pop (TV_WHOPR_PARTITIONING);
3212 timevar_stop (TV_PHASE_OPT_GEN);
3213 timevar_start (TV_PHASE_STREAM_OUT);
3215 if (!quiet_flag)
3217 fprintf (stderr, "\nStreaming out");
3218 fflush (stderr);
3220 lto_wpa_write_files ();
3221 if (!quiet_flag)
3222 fprintf (stderr, "\n");
3224 timevar_stop (TV_PHASE_STREAM_OUT);
3226 ggc_collect ();
3227 if (post_ipa_mem_report)
3229 fprintf (stderr, "Memory consumption after IPA\n");
3230 dump_memory_report (false);
3233 /* Show the LTO report before launching LTRANS. */
3234 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3235 print_lto_report_1 ();
3236 if (mem_report_wpa)
3237 dump_memory_report (true);
3241 static GTY(()) tree lto_eh_personality_decl;
3243 /* Return the LTO personality function decl. */
3245 tree
3246 lto_eh_personality (void)
3248 if (!lto_eh_personality_decl)
3250 /* Use the first personality DECL for our personality if we don't
3251 support multiple ones. This ensures that we don't artificially
3252 create the need for them in a single-language program. */
3253 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
3254 lto_eh_personality_decl = first_personality_decl;
3255 else
3256 lto_eh_personality_decl = lhd_gcc_personality ();
3259 return lto_eh_personality_decl;
3262 /* Set the process name based on the LTO mode. */
3264 static void
3265 lto_process_name (void)
3267 if (flag_lto)
3268 setproctitle ("lto1-lto");
3269 if (flag_wpa)
3270 setproctitle ("lto1-wpa");
3271 if (flag_ltrans)
3272 setproctitle ("lto1-ltrans");
3276 /* Initialize the LTO front end. */
3278 static void
3279 lto_init (void)
3281 lto_process_name ();
3282 lto_streamer_hooks_init ();
3283 lto_reader_init ();
3284 lto_set_in_hooks (NULL, get_section_data, free_section_data);
3285 memset (&lto_stats, 0, sizeof (lto_stats));
3286 bitmap_obstack_initialize (NULL);
3287 gimple_register_cfg_hooks ();
3291 /* Main entry point for the GIMPLE front end. This front end has
3292 three main personalities:
3294 - LTO (-flto). All the object files on the command line are
3295 loaded in memory and processed as a single translation unit.
3296 This is the traditional link-time optimization behavior.
3298 - WPA (-fwpa). Only the callgraph and summary information for
3299 files in the command file are loaded. A single callgraph
3300 (without function bodies) is instantiated for the whole set of
3301 files. IPA passes are only allowed to analyze the call graph
3302 and make transformation decisions. The callgraph is
3303 partitioned, each partition is written to a new object file
3304 together with the transformation decisions.
3306 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
3307 summary files from running again. Since WPA computed summary
3308 information and decided what transformations to apply, LTRANS
3309 simply applies them. */
3311 void
3312 lto_main (void)
3314 /* LTO is called as a front end, even though it is not a front end.
3315 Because it is called as a front end, TV_PHASE_PARSING and
3316 TV_PARSE_GLOBAL are active, and we need to turn them off while
3317 doing LTO. Later we turn them back on so they are active up in
3318 toplev.c. */
3319 timevar_pop (TV_PARSE_GLOBAL);
3320 timevar_stop (TV_PHASE_PARSING);
3322 timevar_start (TV_PHASE_SETUP);
3324 /* Initialize the LTO front end. */
3325 lto_init ();
3327 timevar_stop (TV_PHASE_SETUP);
3328 timevar_start (TV_PHASE_STREAM_IN);
3330 /* Read all the symbols and call graph from all the files in the
3331 command line. */
3332 read_cgraph_and_symbols (num_in_fnames, in_fnames);
3334 timevar_stop (TV_PHASE_STREAM_IN);
3336 if (!seen_error ())
3338 /* If WPA is enabled analyze the whole call graph and create an
3339 optimization plan. Otherwise, read in all the function
3340 bodies and continue with optimization. */
3341 if (flag_wpa)
3342 do_whole_program_analysis ();
3343 else
3345 struct varpool_node *vnode;
3347 timevar_start (TV_PHASE_OPT_GEN);
3349 materialize_cgraph ();
3350 if (!flag_ltrans)
3351 lto_promote_statics_nonwpa ();
3353 /* Let the middle end know that we have read and merged all of
3354 the input files. */
3355 compile ();
3357 timevar_stop (TV_PHASE_OPT_GEN);
3359 /* FIXME lto, if the processes spawned by WPA fail, we miss
3360 the chance to print WPA's report, so WPA will call
3361 print_lto_report before launching LTRANS. If LTRANS was
3362 launched directly by the driver we would not need to do
3363 this. */
3364 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3365 print_lto_report_1 ();
3367 /* Record the global variables. */
3368 FOR_EACH_DEFINED_VARIABLE (vnode)
3369 vec_safe_push (lto_global_var_decls, vnode->symbol.decl);
3373 /* Here we make LTO pretend to be a parser. */
3374 timevar_start (TV_PHASE_PARSING);
3375 timevar_push (TV_PARSE_GLOBAL);
3378 #include "gt-lto-lto.h"