1 /* Top-level LTO routines.
2 Copyright (C) 2009-2013 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "tree-flow.h"
28 #include "diagnostic-core.h"
32 #include "tree-ssa-operands.h"
33 #include "tree-pass.h"
34 #include "langhooks.h"
37 #include "pointer-set.h"
44 #include "lto-streamer.h"
45 #include "tree-streamer.h"
46 #include "splay-tree.h"
47 #include "lto-partition.h"
49 static GTY(()) tree first_personality_decl
;
51 /* Returns a hash code for P. */
54 hash_name (const void *p
)
56 const struct lto_section_slot
*ds
= (const struct lto_section_slot
*) p
;
57 return (hashval_t
) htab_hash_string (ds
->name
);
61 /* Returns nonzero if P1 and P2 are equal. */
64 eq_name (const void *p1
, const void *p2
)
66 const struct lto_section_slot
*s1
=
67 (const struct lto_section_slot
*) p1
;
68 const struct lto_section_slot
*s2
=
69 (const struct lto_section_slot
*) p2
;
71 return strcmp (s1
->name
, s2
->name
) == 0;
74 /* Free lto_section_slot */
77 free_with_string (void *arg
)
79 struct lto_section_slot
*s
= (struct lto_section_slot
*)arg
;
81 free (CONST_CAST (char *, s
->name
));
85 /* Create section hash table */
88 lto_obj_create_section_hash_table (void)
90 return htab_create (37, hash_name
, eq_name
, free_with_string
);
93 /* Delete an allocated integer KEY in the splay tree. */
96 lto_splay_tree_delete_id (splay_tree_key key
)
101 /* Compare splay tree node ids A and B. */
104 lto_splay_tree_compare_ids (splay_tree_key a
, splay_tree_key b
)
106 unsigned HOST_WIDE_INT ai
;
107 unsigned HOST_WIDE_INT bi
;
109 ai
= *(unsigned HOST_WIDE_INT
*) a
;
110 bi
= *(unsigned HOST_WIDE_INT
*) b
;
119 /* Look up splay tree node by ID in splay tree T. */
121 static splay_tree_node
122 lto_splay_tree_lookup (splay_tree t
, unsigned HOST_WIDE_INT id
)
124 return splay_tree_lookup (t
, (splay_tree_key
) &id
);
127 /* Check if KEY has ID. */
130 lto_splay_tree_id_equal_p (splay_tree_key key
, unsigned HOST_WIDE_INT id
)
132 return *(unsigned HOST_WIDE_INT
*) key
== id
;
135 /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
136 The ID is allocated separately because we need HOST_WIDE_INTs which may
137 be wider than a splay_tree_key. */
140 lto_splay_tree_insert (splay_tree t
, unsigned HOST_WIDE_INT id
,
141 struct lto_file_decl_data
*file_data
)
143 unsigned HOST_WIDE_INT
*idp
= XCNEW (unsigned HOST_WIDE_INT
);
145 splay_tree_insert (t
, (splay_tree_key
) idp
, (splay_tree_value
) file_data
);
148 /* Create a splay tree. */
151 lto_splay_tree_new (void)
153 return splay_tree_new (lto_splay_tree_compare_ids
,
154 lto_splay_tree_delete_id
,
158 /* Return true when NODE has a clone that is analyzed (i.e. we need
159 to load its body even if the node itself is not needed). */
162 has_analyzed_clone_p (struct cgraph_node
*node
)
164 struct cgraph_node
*orig
= node
;
173 else if (node
->next_sibling_clone
)
174 node
= node
->next_sibling_clone
;
177 while (node
!= orig
&& !node
->next_sibling_clone
)
178 node
= node
->clone_of
;
180 node
= node
->next_sibling_clone
;
186 /* Read the function body for the function associated with NODE. */
189 lto_materialize_function (struct cgraph_node
*node
)
192 struct lto_file_decl_data
*file_data
;
193 const char *data
, *name
;
196 decl
= node
->symbol
.decl
;
197 /* Read in functions with body (analyzed nodes)
198 and also functions that are needed to produce virtual clones. */
199 if (cgraph_function_with_gimple_body_p (node
) || has_analyzed_clone_p (node
))
201 /* Clones don't need to be read. */
205 /* Load the function body only if not operating in WPA mode. In
206 WPA mode, the body of the function is not needed. */
209 file_data
= node
->symbol
.lto_file_data
;
210 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
212 /* We may have renamed the declaration, e.g., a static function. */
213 name
= lto_get_decl_name_mapping (file_data
, name
);
215 data
= lto_get_section_data (file_data
, LTO_section_function_body
,
218 fatal_error ("%s: section %s is missing",
219 file_data
->file_name
,
222 gcc_assert (DECL_STRUCT_FUNCTION (decl
) == NULL
);
224 push_struct_function (decl
);
225 announce_function (decl
);
226 lto_input_function_body (file_data
, decl
, data
);
227 if (DECL_FUNCTION_PERSONALITY (decl
) && !first_personality_decl
)
228 first_personality_decl
= DECL_FUNCTION_PERSONALITY (decl
);
229 lto_stats
.num_function_bodies
++;
230 lto_free_section_data (file_data
, LTO_section_function_body
, name
,
237 /* Let the middle end know about the function. */
238 rest_of_decl_compilation (decl
, 1, 0);
242 /* Decode the content of memory pointed to by DATA in the in decl
243 state object STATE. DATA_IN points to a data_in structure for
244 decoding. Return the address after the decoded object in the
247 static const uint32_t *
248 lto_read_in_decl_state (struct data_in
*data_in
, const uint32_t *data
,
249 struct lto_in_decl_state
*state
)
256 decl
= streamer_tree_cache_get (data_in
->reader_cache
, ix
);
257 if (TREE_CODE (decl
) != FUNCTION_DECL
)
259 gcc_assert (decl
== void_type_node
);
262 state
->fn_decl
= decl
;
264 for (i
= 0; i
< LTO_N_DECL_STREAMS
; i
++)
266 uint32_t size
= *data
++;
267 tree
*decls
= ggc_alloc_vec_tree (size
);
269 for (j
= 0; j
< size
; j
++)
270 decls
[j
] = streamer_tree_cache_get (data_in
->reader_cache
, data
[j
]);
272 state
->streams
[i
].size
= size
;
273 state
->streams
[i
].trees
= decls
;
282 /* Global type table. FIXME, it should be possible to re-use some
283 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
284 etc), but those assume that types were built with the various
285 build_*_type routines which is not the case with the streamer. */
286 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
288 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map
)))
289 htab_t type_hash_cache
;
291 static hashval_t
gimple_type_hash (const void *);
293 /* Structure used to maintain a cache of some type pairs compared by
294 gimple_types_compatible_p when comparing aggregate types. There are
295 three possible values for SAME_P:
297 -2: The pair (T1, T2) has just been inserted in the table.
298 0: T1 and T2 are different types.
299 1: T1 and T2 are the same type. */
307 typedef struct type_pair_d
*type_pair_t
;
309 #define GIMPLE_TYPE_PAIR_SIZE 16381
310 struct type_pair_d
*type_pair_cache
;
313 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
314 entry if none existed. */
316 static inline type_pair_t
317 lookup_type_pair (tree t1
, tree t2
)
320 unsigned int uid1
, uid2
;
322 if (TYPE_UID (t1
) < TYPE_UID (t2
))
324 uid1
= TYPE_UID (t1
);
325 uid2
= TYPE_UID (t2
);
329 uid1
= TYPE_UID (t2
);
330 uid2
= TYPE_UID (t1
);
332 gcc_checking_assert (uid1
!= uid2
);
334 /* iterative_hash_hashval_t imply an function calls.
335 We know that UIDS are in limited range. */
336 index
= ((((unsigned HOST_WIDE_INT
)uid1
<< HOST_BITS_PER_WIDE_INT
/ 2) + uid2
)
337 % GIMPLE_TYPE_PAIR_SIZE
);
338 if (type_pair_cache
[index
].uid1
== uid1
339 && type_pair_cache
[index
].uid2
== uid2
)
340 return &type_pair_cache
[index
];
342 type_pair_cache
[index
].uid1
= uid1
;
343 type_pair_cache
[index
].uid2
= uid2
;
344 type_pair_cache
[index
].same_p
= -2;
346 return &type_pair_cache
[index
];
349 /* Per pointer state for the SCC finding. The on_sccstack flag
350 is not strictly required, it is true when there is no hash value
351 recorded for the type and false otherwise. But querying that
365 static unsigned int next_dfs_num
;
366 static unsigned int gtc_next_dfs_num
;
368 /* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
370 typedef struct GTY(()) gimple_type_leader_entry_s
{
373 } gimple_type_leader_entry
;
375 #define GIMPLE_TYPE_LEADER_SIZE 16381
376 static GTY((length("GIMPLE_TYPE_LEADER_SIZE")))
377 gimple_type_leader_entry
*gimple_type_leader
;
379 /* Lookup an existing leader for T and return it or NULL_TREE, if
380 there is none in the cache. */
383 gimple_lookup_type_leader (tree t
)
385 gimple_type_leader_entry
*leader
;
387 leader
= &gimple_type_leader
[TYPE_UID (t
) % GIMPLE_TYPE_LEADER_SIZE
];
388 if (leader
->type
!= t
)
391 return leader
->leader
;
395 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
396 true then if any type has no name return false, otherwise return
397 true if both types have no names. */
400 compare_type_names_p (tree t1
, tree t2
)
402 tree name1
= TYPE_NAME (t1
);
403 tree name2
= TYPE_NAME (t2
);
405 if ((name1
!= NULL_TREE
) != (name2
!= NULL_TREE
))
408 if (name1
== NULL_TREE
)
411 /* Either both should be a TYPE_DECL or both an IDENTIFIER_NODE. */
412 if (TREE_CODE (name1
) != TREE_CODE (name2
))
415 if (TREE_CODE (name1
) == TYPE_DECL
)
416 name1
= DECL_NAME (name1
);
417 gcc_checking_assert (!name1
|| TREE_CODE (name1
) == IDENTIFIER_NODE
);
419 if (TREE_CODE (name2
) == TYPE_DECL
)
420 name2
= DECL_NAME (name2
);
421 gcc_checking_assert (!name2
|| TREE_CODE (name2
) == IDENTIFIER_NODE
);
423 /* Identifiers can be compared with pointer equality rather
424 than a string comparison. */
432 gimple_types_compatible_p_1 (tree
, tree
, type_pair_t
,
434 struct pointer_map_t
*, struct obstack
*);
436 /* DFS visit the edge from the callers type pair with state *STATE to
437 the pair T1, T2 while operating in FOR_MERGING_P mode.
438 Update the merging status if it is not part of the SCC containing the
439 callers pair and return it.
440 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
443 gtc_visit (tree t1
, tree t2
,
445 vec
<type_pair_t
> *sccstack
,
446 struct pointer_map_t
*sccstate
,
447 struct obstack
*sccstate_obstack
)
449 struct sccs
*cstate
= NULL
;
452 tree leader1
, leader2
;
454 /* Check first for the obvious case of pointer identity. */
458 /* Check that we have two types to compare. */
459 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
462 /* Can't be the same type if the types don't have the same code. */
463 if (TREE_CODE (t1
) != TREE_CODE (t2
))
466 /* Can't be the same type if they have different CV qualifiers. */
467 if (TYPE_QUALS (t1
) != TYPE_QUALS (t2
))
470 if (TREE_ADDRESSABLE (t1
) != TREE_ADDRESSABLE (t2
))
473 /* Void types and nullptr types are always the same. */
474 if (TREE_CODE (t1
) == VOID_TYPE
475 || TREE_CODE (t1
) == NULLPTR_TYPE
)
478 /* Can't be the same type if they have different alignment or mode. */
479 if (TYPE_ALIGN (t1
) != TYPE_ALIGN (t2
)
480 || TYPE_MODE (t1
) != TYPE_MODE (t2
))
483 /* Do some simple checks before doing three hashtable queries. */
484 if (INTEGRAL_TYPE_P (t1
)
485 || SCALAR_FLOAT_TYPE_P (t1
)
486 || FIXED_POINT_TYPE_P (t1
)
487 || TREE_CODE (t1
) == VECTOR_TYPE
488 || TREE_CODE (t1
) == COMPLEX_TYPE
489 || TREE_CODE (t1
) == OFFSET_TYPE
490 || POINTER_TYPE_P (t1
))
492 /* Can't be the same type if they have different sign or precision. */
493 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
)
494 || TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
))
497 if (TREE_CODE (t1
) == INTEGER_TYPE
498 && TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
))
501 /* That's all we need to check for float and fixed-point types. */
502 if (SCALAR_FLOAT_TYPE_P (t1
)
503 || FIXED_POINT_TYPE_P (t1
))
506 /* For other types fall through to more complex checks. */
509 /* If the types have been previously registered and found equal
511 leader1
= gimple_lookup_type_leader (t1
);
512 leader2
= gimple_lookup_type_leader (t2
);
515 || (leader1
&& leader1
== leader2
))
518 /* If the hash values of t1 and t2 are different the types can't
519 possibly be the same. This helps keeping the type-pair hashtable
520 small, only tracking comparisons for hash collisions. */
521 if (gimple_type_hash (t1
) != gimple_type_hash (t2
))
524 /* Allocate a new cache entry for this comparison. */
525 p
= lookup_type_pair (t1
, t2
);
526 if (p
->same_p
== 0 || p
->same_p
== 1)
528 /* We have already decided whether T1 and T2 are the
529 same, return the cached result. */
530 return p
->same_p
== 1;
533 if ((slot
= pointer_map_contains (sccstate
, p
)) != NULL
)
534 cstate
= (struct sccs
*)*slot
;
535 /* Not yet visited. DFS recurse. */
538 gimple_types_compatible_p_1 (t1
, t2
, p
,
539 sccstack
, sccstate
, sccstate_obstack
);
540 cstate
= (struct sccs
*)* pointer_map_contains (sccstate
, p
);
541 state
->low
= MIN (state
->low
, cstate
->low
);
543 /* If the type is still on the SCC stack adjust the parents low. */
544 if (cstate
->dfsnum
< state
->dfsnum
545 && cstate
->on_sccstack
)
546 state
->low
= MIN (cstate
->dfsnum
, state
->low
);
548 /* Return the current lattice value. We start with an equality
549 assumption so types part of a SCC will be optimistically
550 treated equal unless proven otherwise. */
551 return cstate
->u
.same_p
;
554 /* Worker for gimple_types_compatible.
555 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
558 gimple_types_compatible_p_1 (tree t1
, tree t2
, type_pair_t p
,
559 vec
<type_pair_t
> *sccstack
,
560 struct pointer_map_t
*sccstate
,
561 struct obstack
*sccstate_obstack
)
565 gcc_assert (p
->same_p
== -2);
567 state
= XOBNEW (sccstate_obstack
, struct sccs
);
568 *pointer_map_insert (sccstate
, p
) = state
;
570 sccstack
->safe_push (p
);
571 state
->dfsnum
= gtc_next_dfs_num
++;
572 state
->low
= state
->dfsnum
;
573 state
->on_sccstack
= true;
574 /* Start with an equality assumption. As we DFS recurse into child
575 SCCs this assumption may get revisited. */
578 /* The struct tags shall compare equal. */
579 if (!compare_type_names_p (t1
, t2
))
580 goto different_types
;
582 /* The main variant of both types should compare equal. */
583 if (TYPE_MAIN_VARIANT (t1
) != t1
584 || TYPE_MAIN_VARIANT (t2
) != t2
)
586 if (!gtc_visit (TYPE_MAIN_VARIANT (t1
), TYPE_MAIN_VARIANT (t2
),
587 state
, sccstack
, sccstate
, sccstate_obstack
))
588 goto different_types
;
591 /* We may not merge typedef types to the same type in different
594 && TREE_CODE (TYPE_NAME (t1
)) == TYPE_DECL
595 && DECL_CONTEXT (TYPE_NAME (t1
))
596 && TYPE_P (DECL_CONTEXT (TYPE_NAME (t1
))))
598 if (!gtc_visit (DECL_CONTEXT (TYPE_NAME (t1
)),
599 DECL_CONTEXT (TYPE_NAME (t2
)),
600 state
, sccstack
, sccstate
, sccstate_obstack
))
601 goto different_types
;
604 /* If their attributes are not the same they can't be the same type. */
605 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1
), TYPE_ATTRIBUTES (t2
)))
606 goto different_types
;
608 /* Do type-specific comparisons. */
609 switch (TREE_CODE (t1
))
613 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
614 state
, sccstack
, sccstate
, sccstate_obstack
))
615 goto different_types
;
619 /* Array types are the same if the element types are the same and
620 the number of elements are the same. */
621 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
622 state
, sccstack
, sccstate
, sccstate_obstack
)
623 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)
624 || TYPE_NONALIASED_COMPONENT (t1
) != TYPE_NONALIASED_COMPONENT (t2
))
625 goto different_types
;
628 tree i1
= TYPE_DOMAIN (t1
);
629 tree i2
= TYPE_DOMAIN (t2
);
631 /* For an incomplete external array, the type domain can be
632 NULL_TREE. Check this condition also. */
633 if (i1
== NULL_TREE
&& i2
== NULL_TREE
)
635 else if (i1
== NULL_TREE
|| i2
== NULL_TREE
)
636 goto different_types
;
639 tree min1
= TYPE_MIN_VALUE (i1
);
640 tree min2
= TYPE_MIN_VALUE (i2
);
641 tree max1
= TYPE_MAX_VALUE (i1
);
642 tree max2
= TYPE_MAX_VALUE (i2
);
644 /* The minimum/maximum values have to be the same. */
647 && ((TREE_CODE (min1
) == PLACEHOLDER_EXPR
648 && TREE_CODE (min2
) == PLACEHOLDER_EXPR
)
649 || operand_equal_p (min1
, min2
, 0))))
652 && ((TREE_CODE (max1
) == PLACEHOLDER_EXPR
653 && TREE_CODE (max2
) == PLACEHOLDER_EXPR
)
654 || operand_equal_p (max1
, max2
, 0)))))
657 goto different_types
;
662 /* Method types should belong to the same class. */
663 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1
), TYPE_METHOD_BASETYPE (t2
),
664 state
, sccstack
, sccstate
, sccstate_obstack
))
665 goto different_types
;
670 /* Function types are the same if the return type and arguments types
672 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
673 state
, sccstack
, sccstate
, sccstate_obstack
))
674 goto different_types
;
676 if (!comp_type_attributes (t1
, t2
))
677 goto different_types
;
679 if (TYPE_ARG_TYPES (t1
) == TYPE_ARG_TYPES (t2
))
685 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
687 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
))
689 if (!gtc_visit (TREE_VALUE (parms1
), TREE_VALUE (parms2
),
690 state
, sccstack
, sccstate
, sccstate_obstack
))
691 goto different_types
;
694 if (parms1
|| parms2
)
695 goto different_types
;
702 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
703 state
, sccstack
, sccstate
, sccstate_obstack
)
704 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1
),
705 TYPE_OFFSET_BASETYPE (t2
),
706 state
, sccstack
, sccstate
, sccstate_obstack
))
707 goto different_types
;
715 /* If the two pointers have different ref-all attributes,
716 they can't be the same type. */
717 if (TYPE_REF_CAN_ALIAS_ALL (t1
) != TYPE_REF_CAN_ALIAS_ALL (t2
))
718 goto different_types
;
720 /* Otherwise, pointer and reference types are the same if the
721 pointed-to types are the same. */
722 if (gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
723 state
, sccstack
, sccstate
, sccstate_obstack
))
726 goto different_types
;
732 tree min1
= TYPE_MIN_VALUE (t1
);
733 tree max1
= TYPE_MAX_VALUE (t1
);
734 tree min2
= TYPE_MIN_VALUE (t2
);
735 tree max2
= TYPE_MAX_VALUE (t2
);
736 bool min_equal_p
= false;
737 bool max_equal_p
= false;
739 /* If either type has a minimum value, the other type must
741 if (min1
== NULL_TREE
&& min2
== NULL_TREE
)
743 else if (min1
&& min2
&& operand_equal_p (min1
, min2
, 0))
746 /* Likewise, if either type has a maximum value, the other
747 type must have the same. */
748 if (max1
== NULL_TREE
&& max2
== NULL_TREE
)
750 else if (max1
&& max2
&& operand_equal_p (max1
, max2
, 0))
753 if (!min_equal_p
|| !max_equal_p
)
754 goto different_types
;
761 /* FIXME lto, we cannot check bounds on enumeral types because
762 different front ends will produce different values.
763 In C, enumeral types are integers, while in C++ each element
764 will have its own symbolic value. We should decide how enums
765 are to be represented in GIMPLE and have each front end lower
769 /* For enumeral types, all the values must be the same. */
770 if (TYPE_VALUES (t1
) == TYPE_VALUES (t2
))
773 for (v1
= TYPE_VALUES (t1
), v2
= TYPE_VALUES (t2
);
775 v1
= TREE_CHAIN (v1
), v2
= TREE_CHAIN (v2
))
777 tree c1
= TREE_VALUE (v1
);
778 tree c2
= TREE_VALUE (v2
);
780 if (TREE_CODE (c1
) == CONST_DECL
)
781 c1
= DECL_INITIAL (c1
);
783 if (TREE_CODE (c2
) == CONST_DECL
)
784 c2
= DECL_INITIAL (c2
);
786 if (tree_int_cst_equal (c1
, c2
) != 1)
787 goto different_types
;
789 if (TREE_PURPOSE (v1
) != TREE_PURPOSE (v2
))
790 goto different_types
;
793 /* If one enumeration has more values than the other, they
796 goto different_types
;
803 case QUAL_UNION_TYPE
:
807 /* For aggregate types, all the fields must be the same. */
808 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
810 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
812 /* Different field kinds are not compatible. */
813 if (TREE_CODE (f1
) != TREE_CODE (f2
))
814 goto different_types
;
815 /* Field decls must have the same name and offset. */
816 if (TREE_CODE (f1
) == FIELD_DECL
817 && (DECL_NONADDRESSABLE_P (f1
) != DECL_NONADDRESSABLE_P (f2
)
818 || !gimple_compare_field_offset (f1
, f2
)))
819 goto different_types
;
820 /* All entities should have the same name and type. */
821 if (DECL_NAME (f1
) != DECL_NAME (f2
)
822 || !gtc_visit (TREE_TYPE (f1
), TREE_TYPE (f2
),
823 state
, sccstack
, sccstate
, sccstate_obstack
))
824 goto different_types
;
827 /* If one aggregate has more fields than the other, they
830 goto different_types
;
839 /* Common exit path for types that are not compatible. */
844 /* Common exit path for types that are compatible. */
846 gcc_assert (state
->u
.same_p
== 1);
849 if (state
->low
== state
->dfsnum
)
853 /* Pop off the SCC and set its cache values to the final
854 comparison result. */
858 x
= sccstack
->pop ();
859 cstate
= (struct sccs
*)*pointer_map_contains (sccstate
, x
);
860 cstate
->on_sccstack
= false;
861 x
->same_p
= state
->u
.same_p
;
866 return state
->u
.same_p
;
869 /* Return true iff T1 and T2 are structurally identical. When
870 FOR_MERGING_P is true the an incomplete type and a complete type
871 are considered different, otherwise they are considered compatible. */
874 gimple_types_compatible_p (tree t1
, tree t2
)
876 vec
<type_pair_t
> sccstack
= vNULL
;
877 struct pointer_map_t
*sccstate
;
878 struct obstack sccstate_obstack
;
879 type_pair_t p
= NULL
;
881 tree leader1
, leader2
;
883 /* Before starting to set up the SCC machinery handle simple cases. */
885 /* Check first for the obvious case of pointer identity. */
889 /* Check that we have two types to compare. */
890 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
893 /* Can't be the same type if the types don't have the same code. */
894 if (TREE_CODE (t1
) != TREE_CODE (t2
))
897 /* Can't be the same type if they have different CV qualifiers. */
898 if (TYPE_QUALS (t1
) != TYPE_QUALS (t2
))
901 if (TREE_ADDRESSABLE (t1
) != TREE_ADDRESSABLE (t2
))
904 /* Void types and nullptr types are always the same. */
905 if (TREE_CODE (t1
) == VOID_TYPE
906 || TREE_CODE (t1
) == NULLPTR_TYPE
)
909 /* Can't be the same type if they have different alignment or mode. */
910 if (TYPE_ALIGN (t1
) != TYPE_ALIGN (t2
)
911 || TYPE_MODE (t1
) != TYPE_MODE (t2
))
914 /* Do some simple checks before doing three hashtable queries. */
915 if (INTEGRAL_TYPE_P (t1
)
916 || SCALAR_FLOAT_TYPE_P (t1
)
917 || FIXED_POINT_TYPE_P (t1
)
918 || TREE_CODE (t1
) == VECTOR_TYPE
919 || TREE_CODE (t1
) == COMPLEX_TYPE
920 || TREE_CODE (t1
) == OFFSET_TYPE
921 || POINTER_TYPE_P (t1
))
923 /* Can't be the same type if they have different sign or precision. */
924 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
)
925 || TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
))
928 if (TREE_CODE (t1
) == INTEGER_TYPE
929 && TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
))
932 /* That's all we need to check for float and fixed-point types. */
933 if (SCALAR_FLOAT_TYPE_P (t1
)
934 || FIXED_POINT_TYPE_P (t1
))
937 /* For other types fall through to more complex checks. */
940 /* If the types have been previously registered and found equal
942 leader1
= gimple_lookup_type_leader (t1
);
943 leader2
= gimple_lookup_type_leader (t2
);
946 || (leader1
&& leader1
== leader2
))
949 /* If the hash values of t1 and t2 are different the types can't
950 possibly be the same. This helps keeping the type-pair hashtable
951 small, only tracking comparisons for hash collisions. */
952 if (gimple_type_hash (t1
) != gimple_type_hash (t2
))
955 /* If we've visited this type pair before (in the case of aggregates
956 with self-referential types), and we made a decision, return it. */
957 p
= lookup_type_pair (t1
, t2
);
958 if (p
->same_p
== 0 || p
->same_p
== 1)
960 /* We have already decided whether T1 and T2 are the
961 same, return the cached result. */
962 return p
->same_p
== 1;
965 /* Now set up the SCC machinery for the comparison. */
966 gtc_next_dfs_num
= 1;
967 sccstate
= pointer_map_create ();
968 gcc_obstack_init (&sccstate_obstack
);
969 res
= gimple_types_compatible_p_1 (t1
, t2
, p
,
970 &sccstack
, sccstate
, &sccstate_obstack
);
972 pointer_map_destroy (sccstate
);
973 obstack_free (&sccstate_obstack
, NULL
);
979 iterative_hash_gimple_type (tree
, hashval_t
, vec
<tree
> *,
980 struct pointer_map_t
*, struct obstack
*);
982 /* DFS visit the edge from the callers type with state *STATE to T.
983 Update the callers type hash V with the hash for T if it is not part
984 of the SCC containing the callers type and return it.
985 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
988 visit (tree t
, struct sccs
*state
, hashval_t v
,
990 struct pointer_map_t
*sccstate
,
991 struct obstack
*sccstate_obstack
)
993 struct sccs
*cstate
= NULL
;
994 struct tree_int_map m
;
997 /* If there is a hash value recorded for this type then it can't
998 possibly be part of our parent SCC. Simply mix in its hash. */
1000 if ((slot
= htab_find_slot (type_hash_cache
, &m
, NO_INSERT
))
1002 return iterative_hash_hashval_t (((struct tree_int_map
*) *slot
)->to
, v
);
1004 if ((slot
= pointer_map_contains (sccstate
, t
)) != NULL
)
1005 cstate
= (struct sccs
*)*slot
;
1009 /* Not yet visited. DFS recurse. */
1010 tem
= iterative_hash_gimple_type (t
, v
,
1011 sccstack
, sccstate
, sccstate_obstack
);
1013 cstate
= (struct sccs
*)* pointer_map_contains (sccstate
, t
);
1014 state
->low
= MIN (state
->low
, cstate
->low
);
1015 /* If the type is no longer on the SCC stack and thus is not part
1016 of the parents SCC mix in its hash value. Otherwise we will
1017 ignore the type for hashing purposes and return the unaltered
1019 if (!cstate
->on_sccstack
)
1022 if (cstate
->dfsnum
< state
->dfsnum
1023 && cstate
->on_sccstack
)
1024 state
->low
= MIN (cstate
->dfsnum
, state
->low
);
1026 /* We are part of our parents SCC, skip this type during hashing
1027 and return the unaltered hash value. */
1031 /* Hash NAME with the previous hash value V and return it. */
1034 iterative_hash_name (tree name
, hashval_t v
)
1038 v
= iterative_hash_hashval_t (TREE_CODE (name
), v
);
1039 if (TREE_CODE (name
) == TYPE_DECL
)
1040 name
= DECL_NAME (name
);
1043 gcc_assert (TREE_CODE (name
) == IDENTIFIER_NODE
);
1044 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name
), v
);
1047 /* A type, hashvalue pair for sorting SCC members. */
1049 struct type_hash_pair
{
1054 /* Compare two type, hashvalue pairs. */
1057 type_hash_pair_compare (const void *p1_
, const void *p2_
)
1059 const struct type_hash_pair
*p1
= (const struct type_hash_pair
*) p1_
;
1060 const struct type_hash_pair
*p2
= (const struct type_hash_pair
*) p2_
;
1061 if (p1
->hash
< p2
->hash
)
1063 else if (p1
->hash
> p2
->hash
)
1068 /* Returning a hash value for gimple type TYPE combined with VAL.
1069 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
1071 To hash a type we end up hashing in types that are reachable.
1072 Through pointers we can end up with cycles which messes up the
1073 required property that we need to compute the same hash value
1074 for structurally equivalent types. To avoid this we have to
1075 hash all types in a cycle (the SCC) in a commutative way. The
1076 easiest way is to not mix in the hashes of the SCC members at
1077 all. To make this work we have to delay setting the hash
1078 values of the SCC until it is complete. */
1081 iterative_hash_gimple_type (tree type
, hashval_t val
,
1082 vec
<tree
> *sccstack
,
1083 struct pointer_map_t
*sccstate
,
1084 struct obstack
*sccstate_obstack
)
1090 /* Not visited during this DFS walk. */
1091 gcc_checking_assert (!pointer_map_contains (sccstate
, type
));
1092 state
= XOBNEW (sccstate_obstack
, struct sccs
);
1093 *pointer_map_insert (sccstate
, type
) = state
;
1095 sccstack
->safe_push (type
);
1096 state
->dfsnum
= next_dfs_num
++;
1097 state
->low
= state
->dfsnum
;
1098 state
->on_sccstack
= true;
1100 /* Combine a few common features of types so that types are grouped into
1101 smaller sets; when searching for existing matching types to merge,
1102 only existing types having the same features as the new type will be
1104 v
= iterative_hash_name (TYPE_NAME (type
), 0);
1105 if (TYPE_NAME (type
)
1106 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1107 && DECL_CONTEXT (TYPE_NAME (type
))
1108 && TYPE_P (DECL_CONTEXT (TYPE_NAME (type
))))
1109 v
= visit (DECL_CONTEXT (TYPE_NAME (type
)), state
, v
,
1110 sccstack
, sccstate
, sccstate_obstack
);
1112 /* Factor in the variant structure. */
1113 if (TYPE_MAIN_VARIANT (type
) != type
)
1114 v
= visit (TYPE_MAIN_VARIANT (type
), state
, v
,
1115 sccstack
, sccstate
, sccstate_obstack
);
1117 v
= iterative_hash_hashval_t (TREE_CODE (type
), v
);
1118 v
= iterative_hash_hashval_t (TYPE_QUALS (type
), v
);
1119 v
= iterative_hash_hashval_t (TREE_ADDRESSABLE (type
), v
);
1121 /* Do not hash the types size as this will cause differences in
1122 hash values for the complete vs. the incomplete type variant. */
1124 /* Incorporate common features of numerical types. */
1125 if (INTEGRAL_TYPE_P (type
)
1126 || SCALAR_FLOAT_TYPE_P (type
)
1127 || FIXED_POINT_TYPE_P (type
))
1129 v
= iterative_hash_hashval_t (TYPE_PRECISION (type
), v
);
1130 v
= iterative_hash_hashval_t (TYPE_MODE (type
), v
);
1131 v
= iterative_hash_hashval_t (TYPE_UNSIGNED (type
), v
);
1134 /* For pointer and reference types, fold in information about the type
1136 if (POINTER_TYPE_P (type
))
1137 v
= visit (TREE_TYPE (type
), state
, v
,
1138 sccstack
, sccstate
, sccstate_obstack
);
1140 /* For integer types hash the types min/max values and the string flag. */
1141 if (TREE_CODE (type
) == INTEGER_TYPE
)
1143 /* OMP lowering can introduce error_mark_node in place of
1144 random local decls in types. */
1145 if (TYPE_MIN_VALUE (type
) != error_mark_node
)
1146 v
= iterative_hash_expr (TYPE_MIN_VALUE (type
), v
);
1147 if (TYPE_MAX_VALUE (type
) != error_mark_node
)
1148 v
= iterative_hash_expr (TYPE_MAX_VALUE (type
), v
);
1149 v
= iterative_hash_hashval_t (TYPE_STRING_FLAG (type
), v
);
1152 /* For array types hash the domain and the string flag. */
1153 if (TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_DOMAIN (type
))
1155 v
= iterative_hash_hashval_t (TYPE_STRING_FLAG (type
), v
);
1156 v
= visit (TYPE_DOMAIN (type
), state
, v
,
1157 sccstack
, sccstate
, sccstate_obstack
);
1160 /* Recurse for aggregates with a single element type. */
1161 if (TREE_CODE (type
) == ARRAY_TYPE
1162 || TREE_CODE (type
) == COMPLEX_TYPE
1163 || TREE_CODE (type
) == VECTOR_TYPE
)
1164 v
= visit (TREE_TYPE (type
), state
, v
,
1165 sccstack
, sccstate
, sccstate_obstack
);
1167 /* Incorporate function return and argument types. */
1168 if (TREE_CODE (type
) == FUNCTION_TYPE
|| TREE_CODE (type
) == METHOD_TYPE
)
1173 /* For method types also incorporate their parent class. */
1174 if (TREE_CODE (type
) == METHOD_TYPE
)
1175 v
= visit (TYPE_METHOD_BASETYPE (type
), state
, v
,
1176 sccstack
, sccstate
, sccstate_obstack
);
1178 /* Check result and argument types. */
1179 v
= visit (TREE_TYPE (type
), state
, v
,
1180 sccstack
, sccstate
, sccstate_obstack
);
1181 for (p
= TYPE_ARG_TYPES (type
), na
= 0; p
; p
= TREE_CHAIN (p
))
1183 v
= visit (TREE_VALUE (p
), state
, v
,
1184 sccstack
, sccstate
, sccstate_obstack
);
1188 v
= iterative_hash_hashval_t (na
, v
);
1191 if (RECORD_OR_UNION_TYPE_P (type
))
1196 for (f
= TYPE_FIELDS (type
), nf
= 0; f
; f
= TREE_CHAIN (f
))
1198 v
= iterative_hash_name (DECL_NAME (f
), v
);
1199 v
= visit (TREE_TYPE (f
), state
, v
,
1200 sccstack
, sccstate
, sccstate_obstack
);
1204 v
= iterative_hash_hashval_t (nf
, v
);
1207 /* Record hash for us. */
1210 /* See if we found an SCC. */
1211 if (state
->low
== state
->dfsnum
)
1214 struct tree_int_map
*m
;
1216 /* Pop off the SCC and set its hash values. */
1217 x
= sccstack
->pop ();
1218 /* Optimize SCC size one. */
1221 state
->on_sccstack
= false;
1222 m
= ggc_alloc_cleared_tree_int_map ();
1225 slot
= htab_find_slot (type_hash_cache
, m
, INSERT
);
1226 gcc_assert (!*slot
);
1231 struct sccs
*cstate
;
1232 unsigned first
, i
, size
, j
;
1233 struct type_hash_pair
*pairs
;
1234 /* Pop off the SCC and build an array of type, hash pairs. */
1235 first
= sccstack
->length () - 1;
1236 while ((*sccstack
)[first
] != type
)
1238 size
= sccstack
->length () - first
+ 1;
1239 pairs
= XALLOCAVEC (struct type_hash_pair
, size
);
1241 cstate
= (struct sccs
*)*pointer_map_contains (sccstate
, x
);
1242 cstate
->on_sccstack
= false;
1244 pairs
[i
].hash
= cstate
->u
.hash
;
1247 x
= sccstack
->pop ();
1248 cstate
= (struct sccs
*)*pointer_map_contains (sccstate
, x
);
1249 cstate
->on_sccstack
= false;
1252 pairs
[i
].hash
= cstate
->u
.hash
;
1255 gcc_assert (i
+ 1 == size
);
1256 /* Sort the arrays of type, hash pairs so that when we mix in
1257 all members of the SCC the hash value becomes independent on
1258 the order we visited the SCC. Disregard hashes equal to
1259 the hash of the type we mix into because we cannot guarantee
1260 a stable sort for those across different TUs. */
1261 qsort (pairs
, size
, sizeof (struct type_hash_pair
),
1262 type_hash_pair_compare
);
1263 for (i
= 0; i
< size
; ++i
)
1266 m
= ggc_alloc_cleared_tree_int_map ();
1267 m
->base
.from
= pairs
[i
].type
;
1268 hash
= pairs
[i
].hash
;
1269 /* Skip same hashes. */
1270 for (j
= i
+ 1; j
< size
&& pairs
[j
].hash
== pairs
[i
].hash
; ++j
)
1272 for (; j
< size
; ++j
)
1273 hash
= iterative_hash_hashval_t (pairs
[j
].hash
, hash
);
1274 for (j
= 0; pairs
[j
].hash
!= pairs
[i
].hash
; ++j
)
1275 hash
= iterative_hash_hashval_t (pairs
[j
].hash
, hash
);
1277 if (pairs
[i
].type
== type
)
1279 slot
= htab_find_slot (type_hash_cache
, m
, INSERT
);
1280 gcc_assert (!*slot
);
1286 return iterative_hash_hashval_t (v
, val
);
1289 /* Returns a hash value for P (assumed to be a type). The hash value
1290 is computed using some distinguishing features of the type. Note
1291 that we cannot use pointer hashing here as we may be dealing with
1292 two distinct instances of the same type.
1294 This function should produce the same hash value for two compatible
1295 types according to gimple_types_compatible_p. */
1298 gimple_type_hash (const void *p
)
1300 const_tree t
= (const_tree
) p
;
1301 vec
<tree
> sccstack
= vNULL
;
1302 struct pointer_map_t
*sccstate
;
1303 struct obstack sccstate_obstack
;
1306 struct tree_int_map m
;
1308 m
.base
.from
= CONST_CAST_TREE (t
);
1309 if ((slot
= htab_find_slot (type_hash_cache
, &m
, NO_INSERT
))
1311 return iterative_hash_hashval_t (((struct tree_int_map
*) *slot
)->to
, 0);
1313 /* Perform a DFS walk and pre-hash all reachable types. */
1315 sccstate
= pointer_map_create ();
1316 gcc_obstack_init (&sccstate_obstack
);
1317 val
= iterative_hash_gimple_type (CONST_CAST_TREE (t
), 0,
1318 &sccstack
, sccstate
, &sccstate_obstack
);
1319 sccstack
.release ();
1320 pointer_map_destroy (sccstate
);
1321 obstack_free (&sccstate_obstack
, NULL
);
1326 /* Returns nonzero if P1 and P2 are equal. */
1329 gimple_type_eq (const void *p1
, const void *p2
)
1331 const_tree t1
= (const_tree
) p1
;
1332 const_tree t2
= (const_tree
) p2
;
1333 return gimple_types_compatible_p (CONST_CAST_TREE (t1
),
1334 CONST_CAST_TREE (t2
));
1338 /* Worker for gimple_register_type.
1339 Register type T in the global type table gimple_types.
1340 When REGISTERING_MV is false first recurse for the main variant of T. */
1343 gimple_register_type_1 (tree t
, bool registering_mv
)
1346 gimple_type_leader_entry
*leader
;
1348 /* If we registered this type before return the cached result. */
1349 leader
= &gimple_type_leader
[TYPE_UID (t
) % GIMPLE_TYPE_LEADER_SIZE
];
1350 if (leader
->type
== t
)
1351 return leader
->leader
;
1353 /* Always register the main variant first. This is important so we
1354 pick up the non-typedef variants as canonical, otherwise we'll end
1355 up taking typedef ids for structure tags during comparison.
1356 It also makes sure that main variants will be merged to main variants.
1357 As we are operating on a possibly partially fixed up type graph
1358 do not bother to recurse more than once, otherwise we may end up
1360 If we are registering a main variant it will either remain its
1361 own main variant or it will be merged to something else in which
1362 case we do not care for the main variant leader. */
1364 && TYPE_MAIN_VARIANT (t
) != t
)
1365 gimple_register_type_1 (TYPE_MAIN_VARIANT (t
), true);
1367 /* See if we already have an equivalent type registered. */
1368 slot
= htab_find_slot (gimple_types
, t
, INSERT
);
1370 && *(tree
*)slot
!= t
)
1372 tree new_type
= (tree
) *((tree
*) slot
);
1374 leader
->leader
= new_type
;
1378 /* If not, insert it to the cache and the hash. */
1385 /* Register type T in the global type table gimple_types.
1386 If another type T', compatible with T, already existed in
1387 gimple_types then return T', otherwise return T. This is used by
1388 LTO to merge identical types read from different TUs. */
1391 gimple_register_type (tree t
)
1393 gcc_assert (TYPE_P (t
));
1394 return gimple_register_type_1 (t
, false);
1397 #define GIMPLE_REGISTER_TYPE(tt) \
1398 (TREE_VISITED (tt) ? gimple_register_type (tt) : tt)
1402 /* A hashtable of trees that potentially refer to variables or functions
1403 that must be replaced with their prevailing variant. */
1404 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node
))) htab_t
1407 /* Remember that T is a tree that (potentially) refers to a variable
1408 or function decl that may be replaced with its prevailing variant. */
1410 remember_with_vars (tree t
)
1412 *(tree
*) htab_find_slot (tree_with_vars
, t
, INSERT
) = t
;
1415 #define LTO_FIXUP_TREE(tt) \
1421 (tt) = GIMPLE_REGISTER_TYPE (tt); \
1422 if (VAR_OR_FUNCTION_DECL_P (tt) && TREE_PUBLIC (tt)) \
1423 remember_with_vars (t); \
1424 if (TREE_CODE (tt) == INTEGER_CST) \
1425 (tt) = fixup_integer_cst (tt); \
1429 static void lto_fixup_types (tree
);
1431 /* Return integer_cst T with updated type. */
1434 fixup_integer_cst (tree t
)
1436 tree type
= GIMPLE_REGISTER_TYPE (TREE_TYPE (t
));
1438 if (type
== TREE_TYPE (t
))
1441 /* If overflow was set, streamer_read_integer_cst
1442 produced local copy of T. */
1443 if (TREE_OVERFLOW (t
))
1445 TREE_TYPE (t
) = type
;
1449 /* Otherwise produce new shared node for the new type. */
1450 return build_int_cst_wide (type
, TREE_INT_CST_LOW (t
),
1451 TREE_INT_CST_HIGH (t
));
1454 /* Fix up fields of a tree_typed T. */
1457 lto_ft_typed (tree t
)
1459 LTO_FIXUP_TREE (TREE_TYPE (t
));
1462 /* Fix up fields of a tree_common T. */
1465 lto_ft_common (tree t
)
1468 LTO_FIXUP_TREE (TREE_CHAIN (t
));
1471 /* Fix up fields of a decl_minimal T. */
1474 lto_ft_decl_minimal (tree t
)
1477 LTO_FIXUP_TREE (DECL_NAME (t
));
1478 LTO_FIXUP_TREE (DECL_CONTEXT (t
));
1481 /* Fix up fields of a decl_common T. */
1484 lto_ft_decl_common (tree t
)
1486 lto_ft_decl_minimal (t
);
1487 LTO_FIXUP_TREE (DECL_SIZE (t
));
1488 LTO_FIXUP_TREE (DECL_SIZE_UNIT (t
));
1489 LTO_FIXUP_TREE (DECL_INITIAL (t
));
1490 LTO_FIXUP_TREE (DECL_ATTRIBUTES (t
));
1491 LTO_FIXUP_TREE (DECL_ABSTRACT_ORIGIN (t
));
1494 /* Fix up fields of a decl_with_vis T. */
1497 lto_ft_decl_with_vis (tree t
)
1499 lto_ft_decl_common (t
);
1501 /* Accessor macro has side-effects, use field-name here. */
1502 LTO_FIXUP_TREE (t
->decl_with_vis
.assembler_name
);
1503 LTO_FIXUP_TREE (DECL_SECTION_NAME (t
));
1506 /* Fix up fields of a decl_non_common T. */
1509 lto_ft_decl_non_common (tree t
)
1511 lto_ft_decl_with_vis (t
);
1512 LTO_FIXUP_TREE (DECL_ARGUMENT_FLD (t
));
1513 LTO_FIXUP_TREE (DECL_RESULT_FLD (t
));
1514 LTO_FIXUP_TREE (DECL_VINDEX (t
));
1515 /* The C frontends may create exact duplicates for DECL_ORIGINAL_TYPE
1516 like for 'typedef enum foo foo'. We have no way of avoiding to
1517 merge them and dwarf2out.c cannot deal with this,
1518 so fix this up by clearing DECL_ORIGINAL_TYPE in this case. */
1519 if (TREE_CODE (t
) == TYPE_DECL
1520 && DECL_ORIGINAL_TYPE (t
) == TREE_TYPE (t
))
1521 DECL_ORIGINAL_TYPE (t
) = NULL_TREE
;
1524 /* Fix up fields of a decl_non_common T. */
1527 lto_ft_function (tree t
)
1529 lto_ft_decl_non_common (t
);
1530 LTO_FIXUP_TREE (DECL_FUNCTION_PERSONALITY (t
));
1533 /* Fix up fields of a field_decl T. */
1536 lto_ft_field_decl (tree t
)
1538 lto_ft_decl_common (t
);
1539 LTO_FIXUP_TREE (DECL_FIELD_OFFSET (t
));
1540 LTO_FIXUP_TREE (DECL_BIT_FIELD_TYPE (t
));
1541 LTO_FIXUP_TREE (DECL_QUALIFIER (t
));
1542 LTO_FIXUP_TREE (DECL_FIELD_BIT_OFFSET (t
));
1543 LTO_FIXUP_TREE (DECL_FCONTEXT (t
));
1546 /* Fix up fields of a type T. */
1549 lto_ft_type (tree t
)
1552 LTO_FIXUP_TREE (TYPE_CACHED_VALUES (t
));
1553 LTO_FIXUP_TREE (TYPE_SIZE (t
));
1554 LTO_FIXUP_TREE (TYPE_SIZE_UNIT (t
));
1555 LTO_FIXUP_TREE (TYPE_ATTRIBUTES (t
));
1556 LTO_FIXUP_TREE (TYPE_NAME (t
));
1558 /* Accessors are for derived node types only. */
1559 if (!POINTER_TYPE_P (t
))
1560 LTO_FIXUP_TREE (TYPE_MINVAL (t
));
1561 LTO_FIXUP_TREE (TYPE_MAXVAL (t
));
1563 /* Accessor is for derived node types only. */
1564 LTO_FIXUP_TREE (t
->type_non_common
.binfo
);
1566 LTO_FIXUP_TREE (TYPE_CONTEXT (t
));
1569 /* Fix up fields of a BINFO T. */
1572 lto_ft_binfo (tree t
)
1574 unsigned HOST_WIDE_INT i
, n
;
1575 tree base
, saved_base
;
1578 LTO_FIXUP_TREE (BINFO_VTABLE (t
));
1579 LTO_FIXUP_TREE (BINFO_OFFSET (t
));
1580 LTO_FIXUP_TREE (BINFO_VIRTUALS (t
));
1581 LTO_FIXUP_TREE (BINFO_VPTR_FIELD (t
));
1582 n
= vec_safe_length (BINFO_BASE_ACCESSES (t
));
1583 for (i
= 0; i
< n
; i
++)
1585 saved_base
= base
= BINFO_BASE_ACCESS (t
, i
);
1586 LTO_FIXUP_TREE (base
);
1587 if (base
!= saved_base
)
1588 (*BINFO_BASE_ACCESSES (t
))[i
] = base
;
1590 LTO_FIXUP_TREE (BINFO_INHERITANCE_CHAIN (t
));
1591 LTO_FIXUP_TREE (BINFO_SUBVTT_INDEX (t
));
1592 LTO_FIXUP_TREE (BINFO_VPTR_INDEX (t
));
1593 n
= BINFO_N_BASE_BINFOS (t
);
1594 for (i
= 0; i
< n
; i
++)
1596 saved_base
= base
= BINFO_BASE_BINFO (t
, i
);
1597 LTO_FIXUP_TREE (base
);
1598 if (base
!= saved_base
)
1599 (*BINFO_BASE_BINFOS (t
))[i
] = base
;
1603 /* Fix up fields of a CONSTRUCTOR T. */
1606 lto_ft_constructor (tree t
)
1608 unsigned HOST_WIDE_INT idx
;
1609 constructor_elt
*ce
;
1613 for (idx
= 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t
), idx
, &ce
); idx
++)
1615 LTO_FIXUP_TREE (ce
->index
);
1616 LTO_FIXUP_TREE (ce
->value
);
1620 /* Fix up fields of an expression tree T. */
1623 lto_ft_expr (tree t
)
1627 for (i
= TREE_OPERAND_LENGTH (t
) - 1; i
>= 0; --i
)
1628 LTO_FIXUP_TREE (TREE_OPERAND (t
, i
));
1631 /* Given a tree T fixup fields of T by replacing types with their merged
1632 variant and other entities by an equal entity from an earlier compilation
1633 unit, or an entity being canonical in a different way. This includes
1634 for instance integer or string constants. */
1637 lto_fixup_types (tree t
)
1639 switch (TREE_CODE (t
))
1641 case IDENTIFIER_NODE
:
1645 LTO_FIXUP_TREE (TREE_VALUE (t
));
1646 LTO_FIXUP_TREE (TREE_PURPOSE (t
));
1647 LTO_FIXUP_TREE (TREE_CHAIN (t
));
1651 lto_ft_field_decl (t
);
1659 lto_ft_decl_common (t
);
1663 lto_ft_decl_with_vis (t
);
1667 lto_ft_decl_non_common (t
);
1671 lto_ft_function (t
);
1678 case PLACEHOLDER_EXPR
:
1683 case TRANSLATION_UNIT_DECL
:
1684 case OPTIMIZATION_NODE
:
1685 case TARGET_OPTION_NODE
:
1691 else if (TREE_CODE (t
) == CONSTRUCTOR
)
1692 lto_ft_constructor (t
);
1693 else if (CONSTANT_CLASS_P (t
))
1694 LTO_FIXUP_TREE (TREE_TYPE (t
));
1695 else if (EXPR_P (t
))
1701 remember_with_vars (t
);
1707 /* Return the resolution for the decl with index INDEX from DATA_IN. */
1709 static enum ld_plugin_symbol_resolution
1710 get_resolution (struct data_in
*data_in
, unsigned index
)
1712 if (data_in
->globals_resolution
.exists ())
1714 ld_plugin_symbol_resolution_t ret
;
1715 /* We can have references to not emitted functions in
1716 DECL_FUNCTION_PERSONALITY at least. So we can and have
1717 to indeed return LDPR_UNKNOWN in some cases. */
1718 if (data_in
->globals_resolution
.length () <= index
)
1719 return LDPR_UNKNOWN
;
1720 ret
= data_in
->globals_resolution
[index
];
1724 /* Delay resolution finding until decl merging. */
1725 return LDPR_UNKNOWN
;
1728 /* Map assigning declarations their resolutions. */
1729 static pointer_map_t
*resolution_map
;
1731 /* We need to record resolutions until symbol table is read. */
1733 register_resolution (tree decl
, enum ld_plugin_symbol_resolution resolution
)
1735 if (resolution
== LDPR_UNKNOWN
)
1737 if (!resolution_map
)
1738 resolution_map
= pointer_map_create ();
1739 *pointer_map_insert (resolution_map
, decl
) = (void *)(size_t)resolution
;
1742 /* Register DECL with the global symbol table and change its
1743 name if necessary to avoid name clashes for static globals across
1747 lto_register_var_decl_in_symtab (struct data_in
*data_in
, tree decl
)
1751 /* Variable has file scope, not local. Need to ensure static variables
1752 between different files don't clash unexpectedly. */
1753 if (!TREE_PUBLIC (decl
)
1754 && !((context
= decl_function_context (decl
))
1755 && auto_var_in_fn_p (decl
, context
)))
1757 /* ??? We normally pre-mangle names before we serialize them
1758 out. Here, in lto1, we do not know the language, and
1759 thus cannot do the mangling again. Instead, we just
1760 append a suffix to the mangled name. The resulting name,
1761 however, is not a properly-formed mangled name, and will
1762 confuse any attempt to unmangle it. */
1763 const char *name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
1766 ASM_FORMAT_PRIVATE_NAME (label
, name
, DECL_UID (decl
));
1767 SET_DECL_ASSEMBLER_NAME (decl
, get_identifier (label
));
1768 rest_of_decl_compilation (decl
, 1, 0);
1771 /* If this variable has already been declared, queue the
1772 declaration for merging. */
1773 if (TREE_PUBLIC (decl
))
1776 if (!streamer_tree_cache_lookup (data_in
->reader_cache
, decl
, &ix
))
1778 register_resolution (decl
, get_resolution (data_in
, ix
));
1783 /* Register DECL with the global symbol table and change its
1784 name if necessary to avoid name clashes for static globals across
1785 different files. DATA_IN contains descriptors and tables for the
1789 lto_register_function_decl_in_symtab (struct data_in
*data_in
, tree decl
)
1791 /* Need to ensure static entities between different files
1792 don't clash unexpectedly. */
1793 if (!TREE_PUBLIC (decl
))
1795 /* We must not use the DECL_ASSEMBLER_NAME macro here, as it
1796 may set the assembler name where it was previously empty. */
1797 tree old_assembler_name
= decl
->decl_with_vis
.assembler_name
;
1799 /* FIXME lto: We normally pre-mangle names before we serialize
1800 them out. Here, in lto1, we do not know the language, and
1801 thus cannot do the mangling again. Instead, we just append a
1802 suffix to the mangled name. The resulting name, however, is
1803 not a properly-formed mangled name, and will confuse any
1804 attempt to unmangle it. */
1805 const char *name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
1808 ASM_FORMAT_PRIVATE_NAME (label
, name
, DECL_UID (decl
));
1809 SET_DECL_ASSEMBLER_NAME (decl
, get_identifier (label
));
1811 /* We may arrive here with the old assembler name not set
1812 if the function body is not needed, e.g., it has been
1813 inlined away and does not appear in the cgraph. */
1814 if (old_assembler_name
)
1816 tree new_assembler_name
= DECL_ASSEMBLER_NAME (decl
);
1818 /* Make the original assembler name available for later use.
1819 We may have used it to indicate the section within its
1820 object file where the function body may be found.
1821 FIXME lto: Find a better way to maintain the function decl
1822 to body section mapping so we don't need this hack. */
1823 lto_record_renamed_decl (data_in
->file_data
,
1824 IDENTIFIER_POINTER (old_assembler_name
),
1825 IDENTIFIER_POINTER (new_assembler_name
));
1829 /* If this variable has already been declared, queue the
1830 declaration for merging. */
1831 if (TREE_PUBLIC (decl
) && !DECL_ABSTRACT (decl
))
1834 if (!streamer_tree_cache_lookup (data_in
->reader_cache
, decl
, &ix
))
1836 register_resolution (decl
, get_resolution (data_in
, ix
));
1841 /* Given a streamer cache structure DATA_IN (holding a sequence of trees
1842 for one compilation unit) go over all trees starting at index FROM until the
1843 end of the sequence and replace fields of those trees, and the trees
1844 themself with their canonical variants as per gimple_register_type. */
1847 uniquify_nodes (struct data_in
*data_in
, unsigned from
)
1849 struct streamer_tree_cache_d
*cache
= data_in
->reader_cache
;
1850 unsigned len
= cache
->nodes
.length ();
1853 /* Go backwards because children streamed for the first time come
1854 as part of their parents, and hence are created after them. */
1856 /* First register all the types in the cache. This makes sure to
1857 have the original structure in the type cycles when registering
1858 them and computing hashes. */
1859 for (i
= len
; i
-- > from
;)
1861 tree t
= cache
->nodes
[i
];
1862 if (t
&& TYPE_P (t
))
1864 tree newt
= gimple_register_type (t
);
1865 /* Mark non-prevailing types so we fix them up. No need
1866 to reset that flag afterwards - nothing that refers
1867 to those types is left and they are collected. */
1869 TREE_VISITED (t
) = 1;
1873 /* Second fixup all trees in the new cache entries. */
1874 for (i
= len
; i
-- > from
;)
1876 tree t
= cache
->nodes
[i
];
1881 /* First fixup the fields of T. */
1882 lto_fixup_types (t
);
1887 /* Now try to find a canonical variant of T itself. */
1888 t
= GIMPLE_REGISTER_TYPE (t
);
1892 /* The following re-creates proper variant lists while fixing up
1893 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
1894 variant list state before fixup is broken. */
1897 #ifdef ENABLE_CHECKING
1898 /* Remove us from our main variant list if we are not the
1900 if (TYPE_MAIN_VARIANT (t
) != t
)
1902 tem
= TYPE_MAIN_VARIANT (t
);
1903 while (tem
&& TYPE_NEXT_VARIANT (tem
) != t
)
1904 tem
= TYPE_NEXT_VARIANT (tem
);
1905 gcc_assert (!tem
&& !TYPE_NEXT_VARIANT (t
));
1909 /* Query our new main variant. */
1910 mv
= GIMPLE_REGISTER_TYPE (TYPE_MAIN_VARIANT (t
));
1912 /* If we were the variant leader and we get replaced ourselves drop
1913 all variants from our list. */
1914 if (TYPE_MAIN_VARIANT (t
) == t
1920 tree tem2
= TYPE_NEXT_VARIANT (tem
);
1921 TYPE_NEXT_VARIANT (tem
) = NULL_TREE
;
1926 /* If we are not our own variant leader link us into our new leaders
1930 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (mv
);
1931 TYPE_NEXT_VARIANT (mv
) = t
;
1932 if (RECORD_OR_UNION_TYPE_P (t
))
1933 TYPE_BINFO (t
) = TYPE_BINFO (mv
);
1934 /* Preserve the invariant that type variants share their
1936 if (RECORD_OR_UNION_TYPE_P (t
)
1937 && TYPE_FIELDS (mv
) != TYPE_FIELDS (t
))
1940 for (f1
= TYPE_FIELDS (mv
), f2
= TYPE_FIELDS (t
);
1941 f1
&& f2
; f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
1944 gcc_assert (f1
!= f2
1945 && DECL_NAME (f1
) == DECL_NAME (f2
));
1946 if (!streamer_tree_cache_lookup (cache
, f2
, &ix
))
1948 /* If we're going to replace an element which we'd
1949 still visit in the next iterations, we wouldn't
1950 handle it, so do it here. We do have to handle it
1951 even though the field_decl itself will be removed,
1952 as it could refer to e.g. integer_cst which we
1953 wouldn't reach via any other way, hence they
1954 (and their type) would stay uncollected. */
1955 /* ??? We should rather make sure to replace all
1956 references to f2 with f1. That means handling
1957 COMPONENT_REFs and CONSTRUCTOR elements in
1958 lto_fixup_types and special-case the field-decl
1959 operand handling. */
1960 /* ??? Not sure the above is all relevant in this
1961 path canonicalizing TYPE_FIELDS to that of the
1964 lto_fixup_types (f2
);
1965 streamer_tree_cache_insert_at (cache
, f1
, ix
);
1967 TYPE_FIELDS (t
) = TYPE_FIELDS (mv
);
1971 /* Finally adjust our main variant and fix it up. */
1972 TYPE_MAIN_VARIANT (t
) = mv
;
1974 /* The following reconstructs the pointer chains
1975 of the new pointed-to type if we are a main variant. We do
1976 not stream those so they are broken before fixup. */
1977 if (TREE_CODE (t
) == POINTER_TYPE
1978 && TYPE_MAIN_VARIANT (t
) == t
)
1980 TYPE_NEXT_PTR_TO (t
) = TYPE_POINTER_TO (TREE_TYPE (t
));
1981 TYPE_POINTER_TO (TREE_TYPE (t
)) = t
;
1983 else if (TREE_CODE (t
) == REFERENCE_TYPE
1984 && TYPE_MAIN_VARIANT (t
) == t
)
1986 TYPE_NEXT_REF_TO (t
) = TYPE_REFERENCE_TO (TREE_TYPE (t
));
1987 TYPE_REFERENCE_TO (TREE_TYPE (t
)) = t
;
1993 if (RECORD_OR_UNION_TYPE_P (t
))
1996 if (TYPE_FIELDS (t
) != TYPE_FIELDS (oldt
))
1997 for (f1
= TYPE_FIELDS (t
), f2
= TYPE_FIELDS (oldt
);
1998 f1
&& f2
; f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
2001 gcc_assert (f1
!= f2
&& DECL_NAME (f1
) == DECL_NAME (f2
));
2002 if (!streamer_tree_cache_lookup (cache
, f2
, &ix
))
2004 /* If we're going to replace an element which we'd
2005 still visit in the next iterations, we wouldn't
2006 handle it, so do it here. We do have to handle it
2007 even though the field_decl itself will be removed,
2008 as it could refer to e.g. integer_cst which we
2009 wouldn't reach via any other way, hence they
2010 (and their type) would stay uncollected. */
2011 /* ??? We should rather make sure to replace all
2012 references to f2 with f1. That means handling
2013 COMPONENT_REFs and CONSTRUCTOR elements in
2014 lto_fixup_types and special-case the field-decl
2015 operand handling. */
2017 lto_fixup_types (f2
);
2018 streamer_tree_cache_insert_at (cache
, f1
, ix
);
2022 /* If we found a tree that is equal to oldt replace it in the
2023 cache, so that further users (in the various LTO sections)
2025 streamer_tree_cache_insert_at (cache
, t
, i
);
2029 /* Finally compute the canonical type of all TREE_TYPEs and register
2030 VAR_DECL and FUNCTION_DECL nodes in the symbol table.
2031 From this point there are no longer any types with
2032 TYPE_STRUCTURAL_EQUALITY_P and its type-based alias problems.
2033 This step requires the TYPE_POINTER_TO lists being present, so
2034 make sure it is done last. */
2035 for (i
= len
; i
-- > from
;)
2037 tree t
= cache
->nodes
[i
];
2041 if (TREE_CODE (t
) == VAR_DECL
)
2042 lto_register_var_decl_in_symtab (data_in
, t
);
2043 else if (TREE_CODE (t
) == FUNCTION_DECL
&& !DECL_BUILT_IN (t
))
2044 lto_register_function_decl_in_symtab (data_in
, t
);
2046 && TREE_CODE (t
) == TYPE_DECL
)
2047 debug_hooks
->type_decl (t
, !DECL_FILE_SCOPE_P (t
));
2048 else if (TYPE_P (t
) && !TYPE_CANONICAL (t
))
2049 TYPE_CANONICAL (t
) = gimple_register_canonical_type (t
);
2054 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
2055 RESOLUTIONS is the set of symbols picked by the linker (read from the
2056 resolution file when the linker plugin is being used). */
2059 lto_read_decls (struct lto_file_decl_data
*decl_data
, const void *data
,
2060 vec
<ld_plugin_symbol_resolution_t
> resolutions
)
2062 const struct lto_decl_header
*header
= (const struct lto_decl_header
*) data
;
2063 const int decl_offset
= sizeof (struct lto_decl_header
);
2064 const int main_offset
= decl_offset
+ header
->decl_state_size
;
2065 const int string_offset
= main_offset
+ header
->main_size
;
2066 struct lto_input_block ib_main
;
2067 struct data_in
*data_in
;
2069 const uint32_t *data_ptr
, *data_end
;
2070 uint32_t num_decl_states
;
2072 LTO_INIT_INPUT_BLOCK (ib_main
, (const char *) data
+ main_offset
, 0,
2075 data_in
= lto_data_in_create (decl_data
, (const char *) data
+ string_offset
,
2076 header
->string_size
, resolutions
);
2078 /* We do not uniquify the pre-loaded cache entries, those are middle-end
2079 internal types that should not be merged. */
2081 /* Read the global declarations and types. */
2082 while (ib_main
.p
< ib_main
.len
)
2085 unsigned from
= data_in
->reader_cache
->nodes
.length ();
2086 t
= stream_read_tree (&ib_main
, data_in
);
2087 gcc_assert (t
&& ib_main
.p
<= ib_main
.len
);
2088 uniquify_nodes (data_in
, from
);
2091 /* Read in lto_in_decl_state objects. */
2092 data_ptr
= (const uint32_t *) ((const char*) data
+ decl_offset
);
2094 (const uint32_t *) ((const char*) data_ptr
+ header
->decl_state_size
);
2095 num_decl_states
= *data_ptr
++;
2097 gcc_assert (num_decl_states
> 0);
2098 decl_data
->global_decl_state
= lto_new_in_decl_state ();
2099 data_ptr
= lto_read_in_decl_state (data_in
, data_ptr
,
2100 decl_data
->global_decl_state
);
2102 /* Read in per-function decl states and enter them in hash table. */
2103 decl_data
->function_decl_states
=
2104 htab_create_ggc (37, lto_hash_in_decl_state
, lto_eq_in_decl_state
, NULL
);
2106 for (i
= 1; i
< num_decl_states
; i
++)
2108 struct lto_in_decl_state
*state
= lto_new_in_decl_state ();
2111 data_ptr
= lto_read_in_decl_state (data_in
, data_ptr
, state
);
2112 slot
= htab_find_slot (decl_data
->function_decl_states
, state
, INSERT
);
2113 gcc_assert (*slot
== NULL
);
2117 if (data_ptr
!= data_end
)
2118 internal_error ("bytecode stream: garbage at the end of symbols section");
2120 /* Set the current decl state to be the global state. */
2121 decl_data
->current_decl_state
= decl_data
->global_decl_state
;
2123 lto_data_in_delete (data_in
);
2126 /* Custom version of strtoll, which is not portable. */
2128 static HOST_WIDEST_INT
2129 lto_parse_hex (const char *p
)
2131 HOST_WIDEST_INT ret
= 0;
2133 for (; *p
!= '\0'; ++p
)
2138 if (c
>= '0' && c
<= '9')
2140 else if (c
>= 'a' && c
<= 'f')
2141 part
= c
- 'a' + 10;
2142 else if (c
>= 'A' && c
<= 'F')
2143 part
= c
- 'A' + 10;
2145 internal_error ("could not parse hex number");
2152 /* Read resolution for file named FILE_NAME. The resolution is read from
2156 lto_resolution_read (splay_tree file_ids
, FILE *resolution
, lto_file
*file
)
2158 /* We require that objects in the resolution file are in the same
2159 order as the lto1 command line. */
2160 unsigned int name_len
;
2162 unsigned int num_symbols
;
2164 struct lto_file_decl_data
*file_data
;
2165 splay_tree_node nd
= NULL
;
2170 name_len
= strlen (file
->filename
);
2171 obj_name
= XNEWVEC (char, name_len
+ 1);
2172 fscanf (resolution
, " "); /* Read white space. */
2174 fread (obj_name
, sizeof (char), name_len
, resolution
);
2175 obj_name
[name_len
] = '\0';
2176 if (filename_cmp (obj_name
, file
->filename
) != 0)
2177 internal_error ("unexpected file name %s in linker resolution file. "
2178 "Expected %s", obj_name
, file
->filename
);
2179 if (file
->offset
!= 0)
2183 HOST_WIDEST_INT offset
;
2184 t
= fscanf (resolution
, "@0x%16s", offset_p
);
2186 internal_error ("could not parse file offset");
2187 offset
= lto_parse_hex (offset_p
);
2188 if (offset
!= file
->offset
)
2189 internal_error ("unexpected offset");
2194 fscanf (resolution
, "%u", &num_symbols
);
2196 for (i
= 0; i
< num_symbols
; i
++)
2200 unsigned HOST_WIDE_INT id
;
2202 enum ld_plugin_symbol_resolution r
= (enum ld_plugin_symbol_resolution
) 0;
2204 unsigned int lto_resolution_str_len
=
2205 sizeof (lto_resolution_str
) / sizeof (char *);
2208 t
= fscanf (resolution
, "%u " HOST_WIDE_INT_PRINT_HEX_PURE
" %26s %*[^\n]\n",
2209 &index
, &id
, r_str
);
2211 internal_error ("invalid line in the resolution file");
2213 for (j
= 0; j
< lto_resolution_str_len
; j
++)
2215 if (strcmp (lto_resolution_str
[j
], r_str
) == 0)
2217 r
= (enum ld_plugin_symbol_resolution
) j
;
2221 if (j
== lto_resolution_str_len
)
2222 internal_error ("invalid resolution in the resolution file");
2224 if (!(nd
&& lto_splay_tree_id_equal_p (nd
->key
, id
)))
2226 nd
= lto_splay_tree_lookup (file_ids
, id
);
2228 internal_error ("resolution sub id %wx not in object file", id
);
2231 file_data
= (struct lto_file_decl_data
*)nd
->value
;
2232 /* The indexes are very sparse. To save memory save them in a compact
2233 format that is only unpacked later when the subfile is processed. */
2236 file_data
->respairs
.safe_push (rp
);
2237 if (file_data
->max_index
< index
)
2238 file_data
->max_index
= index
;
2242 /* List of file_decl_datas */
2243 struct file_data_list
2245 struct lto_file_decl_data
*first
, *last
;
2248 /* Is the name for a id'ed LTO section? */
2251 lto_section_with_id (const char *name
, unsigned HOST_WIDE_INT
*id
)
2255 if (strncmp (name
, LTO_SECTION_NAME_PREFIX
, strlen (LTO_SECTION_NAME_PREFIX
)))
2257 s
= strrchr (name
, '.');
2258 return s
&& sscanf (s
, "." HOST_WIDE_INT_PRINT_HEX_PURE
, id
) == 1;
2261 /* Create file_data of each sub file id */
2264 create_subid_section_table (struct lto_section_slot
*ls
, splay_tree file_ids
,
2265 struct file_data_list
*list
)
2267 struct lto_section_slot s_slot
, *new_slot
;
2268 unsigned HOST_WIDE_INT id
;
2272 struct lto_file_decl_data
*file_data
;
2274 if (!lto_section_with_id (ls
->name
, &id
))
2277 /* Find hash table of sub module id */
2278 nd
= lto_splay_tree_lookup (file_ids
, id
);
2281 file_data
= (struct lto_file_decl_data
*)nd
->value
;
2285 file_data
= ggc_alloc_lto_file_decl_data ();
2286 memset(file_data
, 0, sizeof (struct lto_file_decl_data
));
2288 file_data
->section_hash_table
= lto_obj_create_section_hash_table ();;
2289 lto_splay_tree_insert (file_ids
, id
, file_data
);
2291 /* Maintain list in linker order */
2293 list
->first
= file_data
;
2295 list
->last
->next
= file_data
;
2296 list
->last
= file_data
;
2299 /* Copy section into sub module hash table */
2300 new_name
= XDUPVEC (char, ls
->name
, strlen (ls
->name
) + 1);
2301 s_slot
.name
= new_name
;
2302 hash_slot
= htab_find_slot (file_data
->section_hash_table
, &s_slot
, INSERT
);
2303 gcc_assert (*hash_slot
== NULL
);
2305 new_slot
= XDUP (struct lto_section_slot
, ls
);
2306 new_slot
->name
= new_name
;
2307 *hash_slot
= new_slot
;
2311 /* Read declarations and other initializations for a FILE_DATA. */
2314 lto_file_finalize (struct lto_file_decl_data
*file_data
, lto_file
*file
)
2318 vec
<ld_plugin_symbol_resolution_t
>
2319 resolutions
= vNULL
;
2323 /* Create vector for fast access of resolution. We do this lazily
2325 resolutions
.safe_grow_cleared (file_data
->max_index
+ 1);
2326 for (i
= 0; file_data
->respairs
.iterate (i
, &rp
); i
++)
2327 resolutions
[rp
->index
] = rp
->res
;
2328 file_data
->respairs
.release ();
2330 file_data
->renaming_hash_table
= lto_create_renaming_table ();
2331 file_data
->file_name
= file
->filename
;
2332 data
= lto_get_section_data (file_data
, LTO_section_decls
, NULL
, &len
);
2335 internal_error ("cannot read LTO decls from %s", file_data
->file_name
);
2338 /* Frees resolutions */
2339 lto_read_decls (file_data
, data
, resolutions
);
2340 lto_free_section_data (file_data
, LTO_section_decls
, NULL
, data
, len
);
2343 /* Finalize FILE_DATA in FILE and increase COUNT. */
2346 lto_create_files_from_ids (lto_file
*file
, struct lto_file_decl_data
*file_data
,
2349 lto_file_finalize (file_data
, file
);
2350 if (cgraph_dump_file
)
2351 fprintf (cgraph_dump_file
, "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX
"\n",
2352 file_data
->file_name
, file_data
->id
);
2357 /* Generate a TREE representation for all types and external decls
2360 Read all of the globals out of the file. Then read the cgraph
2361 and process the .o index into the cgraph nodes so that it can open
2362 the .o file to load the functions and ipa information. */
2364 static struct lto_file_decl_data
*
2365 lto_file_read (lto_file
*file
, FILE *resolution_file
, int *count
)
2367 struct lto_file_decl_data
*file_data
= NULL
;
2368 splay_tree file_ids
;
2369 htab_t section_hash_table
;
2370 struct lto_section_slot
*section
;
2371 struct file_data_list file_list
;
2372 struct lto_section_list section_list
;
2374 memset (§ion_list
, 0, sizeof (struct lto_section_list
));
2375 section_hash_table
= lto_obj_build_section_table (file
, §ion_list
);
2377 /* Find all sub modules in the object and put their sections into new hash
2378 tables in a splay tree. */
2379 file_ids
= lto_splay_tree_new ();
2380 memset (&file_list
, 0, sizeof (struct file_data_list
));
2381 for (section
= section_list
.first
; section
!= NULL
; section
= section
->next
)
2382 create_subid_section_table (section
, file_ids
, &file_list
);
2384 /* Add resolutions to file ids */
2385 lto_resolution_read (file_ids
, resolution_file
, file
);
2387 /* Finalize each lto file for each submodule in the merged object */
2388 for (file_data
= file_list
.first
; file_data
!= NULL
; file_data
= file_data
->next
)
2389 lto_create_files_from_ids (file
, file_data
, count
);
2391 splay_tree_delete (file_ids
);
2392 htab_delete (section_hash_table
);
2394 return file_list
.first
;
2397 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
2398 #define LTO_MMAP_IO 1
2402 /* Page size of machine is used for mmap and munmap calls. */
2403 static size_t page_mask
;
2406 /* Get the section data of length LEN from FILENAME starting at
2407 OFFSET. The data segment must be freed by the caller when the
2408 caller is finished. Returns NULL if all was not well. */
2411 lto_read_section_data (struct lto_file_decl_data
*file_data
,
2412 intptr_t offset
, size_t len
)
2416 static char *fd_name
;
2418 intptr_t computed_len
;
2419 intptr_t computed_offset
;
2423 /* Keep a single-entry file-descriptor cache. The last file we
2424 touched will get closed at exit.
2425 ??? Eventually we want to add a more sophisticated larger cache
2426 or rather fix function body streaming to not stream them in
2427 practically random order. */
2429 && filename_cmp (fd_name
, file_data
->file_name
) != 0)
2437 fd
= open (file_data
->file_name
, O_RDONLY
|O_BINARY
);
2440 fatal_error ("Cannot open %s", file_data
->file_name
);
2443 fd_name
= xstrdup (file_data
->file_name
);
2449 size_t page_size
= sysconf (_SC_PAGE_SIZE
);
2450 page_mask
= ~(page_size
- 1);
2453 computed_offset
= offset
& page_mask
;
2454 diff
= offset
- computed_offset
;
2455 computed_len
= len
+ diff
;
2457 result
= (char *) mmap (NULL
, computed_len
, PROT_READ
, MAP_PRIVATE
,
2458 fd
, computed_offset
);
2459 if (result
== MAP_FAILED
)
2461 fatal_error ("Cannot map %s", file_data
->file_name
);
2465 return result
+ diff
;
2467 result
= (char *) xmalloc (len
);
2468 if (lseek (fd
, offset
, SEEK_SET
) != offset
2469 || read (fd
, result
, len
) != (ssize_t
) len
)
2472 fatal_error ("Cannot read %s", file_data
->file_name
);
2476 /* Native windows doesn't supports delayed unlink on opened file. So
2477 we close file here again. This produces higher I/O load, but at least
2478 it prevents to have dangling file handles preventing unlink. */
2489 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
2490 NAME will be NULL unless the section type is for a function
2494 get_section_data (struct lto_file_decl_data
*file_data
,
2495 enum lto_section_type section_type
,
2499 htab_t section_hash_table
= file_data
->section_hash_table
;
2500 struct lto_section_slot
*f_slot
;
2501 struct lto_section_slot s_slot
;
2502 const char *section_name
= lto_get_section_name (section_type
, name
, file_data
);
2506 s_slot
.name
= section_name
;
2507 f_slot
= (struct lto_section_slot
*) htab_find (section_hash_table
, &s_slot
);
2510 data
= lto_read_section_data (file_data
, f_slot
->start
, f_slot
->len
);
2514 free (CONST_CAST (char *, section_name
));
2519 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
2520 starts at OFFSET and has LEN bytes. */
2523 free_section_data (struct lto_file_decl_data
*file_data ATTRIBUTE_UNUSED
,
2524 enum lto_section_type section_type ATTRIBUTE_UNUSED
,
2525 const char *name ATTRIBUTE_UNUSED
,
2526 const char *offset
, size_t len ATTRIBUTE_UNUSED
)
2529 intptr_t computed_len
;
2530 intptr_t computed_offset
;
2535 computed_offset
= ((intptr_t) offset
) & page_mask
;
2536 diff
= (intptr_t) offset
- computed_offset
;
2537 computed_len
= len
+ diff
;
2539 munmap ((caddr_t
) computed_offset
, computed_len
);
2541 free (CONST_CAST(char *, offset
));
2545 static lto_file
*current_lto_file
;
2547 /* Helper for qsort; compare partitions and return one with smaller size.
2548 We sort from greatest to smallest so parallel build doesn't stale on the
2549 longest compilation being executed too late. */
2552 cmp_partitions_size (const void *a
, const void *b
)
2554 const struct ltrans_partition_def
*pa
2555 = *(struct ltrans_partition_def
*const *)a
;
2556 const struct ltrans_partition_def
*pb
2557 = *(struct ltrans_partition_def
*const *)b
;
2558 return pb
->insns
- pa
->insns
;
2561 /* Helper for qsort; compare partitions and return one with smaller order. */
2564 cmp_partitions_order (const void *a
, const void *b
)
2566 const struct ltrans_partition_def
*pa
2567 = *(struct ltrans_partition_def
*const *)a
;
2568 const struct ltrans_partition_def
*pb
2569 = *(struct ltrans_partition_def
*const *)b
;
2570 int ordera
= -1, orderb
= -1;
2572 if (lto_symtab_encoder_size (pa
->encoder
))
2573 ordera
= lto_symtab_encoder_deref (pa
->encoder
, 0)->symbol
.order
;
2574 if (lto_symtab_encoder_size (pb
->encoder
))
2575 orderb
= lto_symtab_encoder_deref (pb
->encoder
, 0)->symbol
.order
;
2576 return orderb
- ordera
;
2579 /* Write all output files in WPA mode and the file with the list of
2583 lto_wpa_write_files (void)
2587 ltrans_partition part
;
2588 FILE *ltrans_output_list_stream
;
2589 char *temp_filename
;
2592 /* Open the LTRANS output list. */
2593 if (!ltrans_output_list
)
2594 fatal_error ("no LTRANS output list filename provided");
2595 ltrans_output_list_stream
= fopen (ltrans_output_list
, "w");
2596 if (ltrans_output_list_stream
== NULL
)
2597 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list
);
2599 timevar_push (TV_WHOPR_WPA
);
2601 FOR_EACH_VEC_ELT (ltrans_partitions
, i
, part
)
2602 lto_stats
.num_output_symtab_nodes
+= lto_symtab_encoder_size (part
->encoder
);
2604 /* Find out statics that need to be promoted
2605 to globals with hidden visibility because they are accessed from multiple
2607 lto_promote_cross_file_statics ();
2609 timevar_pop (TV_WHOPR_WPA
);
2611 timevar_push (TV_WHOPR_WPA_IO
);
2613 /* Generate a prefix for the LTRANS unit files. */
2614 blen
= strlen (ltrans_output_list
);
2615 temp_filename
= (char *) xmalloc (blen
+ sizeof ("2147483648.o"));
2616 strcpy (temp_filename
, ltrans_output_list
);
2617 if (blen
> sizeof (".out")
2618 && strcmp (temp_filename
+ blen
- sizeof (".out") + 1,
2620 temp_filename
[blen
- sizeof (".out") + 1] = '\0';
2621 blen
= strlen (temp_filename
);
2623 n_sets
= ltrans_partitions
.length ();
2625 /* Sort partitions by size so small ones are compiled last.
2626 FIXME: Even when not reordering we may want to output one list for parallel make
2627 and other for final link command. */
2628 ltrans_partitions
.qsort (flag_toplevel_reorder
2629 ? cmp_partitions_size
2630 : cmp_partitions_order
);
2631 for (i
= 0; i
< n_sets
; i
++)
2634 ltrans_partition part
= ltrans_partitions
[i
];
2636 /* Write all the nodes in SET. */
2637 sprintf (temp_filename
+ blen
, "%u.o", i
);
2638 file
= lto_obj_file_open (temp_filename
, true);
2640 fatal_error ("lto_obj_file_open() failed");
2643 fprintf (stderr
, " %s (%s %i insns)", temp_filename
, part
->name
, part
->insns
);
2644 if (cgraph_dump_file
)
2646 lto_symtab_encoder_iterator lsei
;
2648 fprintf (cgraph_dump_file
, "Writing partition %s to file %s, %i insns\n",
2649 part
->name
, temp_filename
, part
->insns
);
2650 fprintf (cgraph_dump_file
, " Symbols in partition: ");
2651 for (lsei
= lsei_start_in_partition (part
->encoder
); !lsei_end_p (lsei
);
2652 lsei_next_in_partition (&lsei
))
2654 symtab_node node
= lsei_node (lsei
);
2655 fprintf (cgraph_dump_file
, "%s ", symtab_node_asm_name (node
));
2657 fprintf (cgraph_dump_file
, "\n Symbols in boundary: ");
2658 for (lsei
= lsei_start (part
->encoder
); !lsei_end_p (lsei
);
2661 symtab_node node
= lsei_node (lsei
);
2662 if (!lto_symtab_encoder_in_partition_p (part
->encoder
, node
))
2664 fprintf (cgraph_dump_file
, "%s ", symtab_node_asm_name (node
));
2665 cgraph_node
*cnode
= dyn_cast
<cgraph_node
> (node
);
2667 && lto_symtab_encoder_encode_body_p (part
->encoder
, cnode
))
2668 fprintf (cgraph_dump_file
, "(body included)");
2671 varpool_node
*vnode
= dyn_cast
<varpool_node
> (node
);
2673 && lto_symtab_encoder_encode_initializer_p (part
->encoder
, vnode
))
2674 fprintf (cgraph_dump_file
, "(initializer included)");
2678 fprintf (cgraph_dump_file
, "\n");
2680 gcc_checking_assert (lto_symtab_encoder_size (part
->encoder
) || !i
);
2682 lto_set_current_out_file (file
);
2684 ipa_write_optimization_summaries (part
->encoder
);
2686 lto_set_current_out_file (NULL
);
2687 lto_obj_file_close (file
);
2689 part
->encoder
= NULL
;
2691 len
= strlen (temp_filename
);
2692 if (fwrite (temp_filename
, 1, len
, ltrans_output_list_stream
) < len
2693 || fwrite ("\n", 1, 1, ltrans_output_list_stream
) < 1)
2694 fatal_error ("writing to LTRANS output list %s: %m",
2695 ltrans_output_list
);
2698 lto_stats
.num_output_files
+= n_sets
;
2700 /* Close the LTRANS output list. */
2701 if (fclose (ltrans_output_list_stream
))
2702 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list
);
2704 free_ltrans_partitions();
2705 free (temp_filename
);
2707 timevar_pop (TV_WHOPR_WPA_IO
);
2711 /* If TT is a variable or function decl replace it with its
2712 prevailing variant. */
2713 #define LTO_SET_PREVAIL(tt) \
2715 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt)) \
2716 tt = lto_symtab_prevailing_decl (tt); \
2719 /* Ensure that TT isn't a replacable var of function decl. */
2720 #define LTO_NO_PREVAIL(tt) \
2721 gcc_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
2723 /* Given a tree T replace all fields referring to variables or functions
2724 with their prevailing variant. */
2726 lto_fixup_prevailing_decls (tree t
)
2728 enum tree_code code
= TREE_CODE (t
);
2729 LTO_NO_PREVAIL (TREE_TYPE (t
));
2730 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
2731 LTO_NO_PREVAIL (TREE_CHAIN (t
));
2734 LTO_NO_PREVAIL (DECL_NAME (t
));
2735 LTO_SET_PREVAIL (DECL_CONTEXT (t
));
2736 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_COMMON
))
2738 LTO_SET_PREVAIL (DECL_SIZE (t
));
2739 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t
));
2740 LTO_SET_PREVAIL (DECL_INITIAL (t
));
2741 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t
));
2742 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t
));
2744 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_WITH_VIS
))
2746 LTO_NO_PREVAIL (t
->decl_with_vis
.assembler_name
);
2747 LTO_NO_PREVAIL (DECL_SECTION_NAME (t
));
2749 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_NON_COMMON
))
2751 LTO_NO_PREVAIL (DECL_ARGUMENT_FLD (t
));
2752 LTO_NO_PREVAIL (DECL_RESULT_FLD (t
));
2753 LTO_NO_PREVAIL (DECL_VINDEX (t
));
2755 if (CODE_CONTAINS_STRUCT (code
, TS_FUNCTION_DECL
))
2756 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t
));
2757 if (CODE_CONTAINS_STRUCT (code
, TS_FIELD_DECL
))
2759 LTO_NO_PREVAIL (DECL_FIELD_OFFSET (t
));
2760 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t
));
2761 LTO_NO_PREVAIL (DECL_QUALIFIER (t
));
2762 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t
));
2763 LTO_NO_PREVAIL (DECL_FCONTEXT (t
));
2766 else if (TYPE_P (t
))
2768 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t
));
2769 LTO_SET_PREVAIL (TYPE_SIZE (t
));
2770 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t
));
2771 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t
));
2772 LTO_NO_PREVAIL (TYPE_NAME (t
));
2774 LTO_SET_PREVAIL (TYPE_MINVAL (t
));
2775 LTO_SET_PREVAIL (TYPE_MAXVAL (t
));
2776 LTO_SET_PREVAIL (t
->type_non_common
.binfo
);
2778 LTO_SET_PREVAIL (TYPE_CONTEXT (t
));
2780 LTO_NO_PREVAIL (TYPE_CANONICAL (t
));
2781 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t
));
2782 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t
));
2784 else if (EXPR_P (t
))
2787 for (i
= TREE_OPERAND_LENGTH (t
) - 1; i
>= 0; --i
)
2788 LTO_SET_PREVAIL (TREE_OPERAND (t
, i
));
2795 LTO_SET_PREVAIL (TREE_VALUE (t
));
2796 LTO_SET_PREVAIL (TREE_PURPOSE (t
));
2803 #undef LTO_SET_PREVAIL
2804 #undef LTO_NO_PREVAIL
2806 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
2807 replaces var and function decls with the corresponding prevailing def. */
2810 lto_fixup_state (struct lto_in_decl_state
*state
)
2813 struct lto_tree_ref_table
*table
;
2815 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
2816 we still need to walk from all DECLs to find the reachable
2817 FUNCTION_DECLs and VAR_DECLs. */
2818 for (si
= 0; si
< LTO_N_DECL_STREAMS
; si
++)
2820 table
= &state
->streams
[si
];
2821 for (i
= 0; i
< table
->size
; i
++)
2823 tree
*tp
= table
->trees
+ i
;
2824 if (VAR_OR_FUNCTION_DECL_P (*tp
))
2825 *tp
= lto_symtab_prevailing_decl (*tp
);
2830 /* A callback of htab_traverse. Just extracts a state from SLOT
2831 and calls lto_fixup_state. */
2834 lto_fixup_state_aux (void **slot
, void *aux ATTRIBUTE_UNUSED
)
2836 struct lto_in_decl_state
*state
= (struct lto_in_decl_state
*) *slot
;
2837 lto_fixup_state (state
);
2841 /* Fix the decls from all FILES. Replaces each decl with the corresponding
2845 lto_fixup_decls (struct lto_file_decl_data
**files
)
2851 FOR_EACH_HTAB_ELEMENT (tree_with_vars
, t
, tree
, hi
)
2852 lto_fixup_prevailing_decls (t
);
2854 for (i
= 0; files
[i
]; i
++)
2856 struct lto_file_decl_data
*file
= files
[i
];
2857 struct lto_in_decl_state
*state
= file
->global_decl_state
;
2858 lto_fixup_state (state
);
2860 htab_traverse (file
->function_decl_states
, lto_fixup_state_aux
, NULL
);
2864 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data
**all_file_decl_data
;
2866 /* Turn file datas for sub files into a single array, so that they look
2867 like separate files for further passes. */
2870 lto_flatten_files (struct lto_file_decl_data
**orig
, int count
, int last_file_ix
)
2872 struct lto_file_decl_data
*n
, *next
;
2875 lto_stats
.num_input_files
= count
;
2877 = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (count
+ 1);
2878 /* Set the hooks so that all of the ipa passes can read in their data. */
2879 lto_set_in_hooks (all_file_decl_data
, get_section_data
, free_section_data
);
2880 for (i
= 0, k
= 0; i
< last_file_ix
; i
++)
2882 for (n
= orig
[i
]; n
!= NULL
; n
= next
)
2884 all_file_decl_data
[k
++] = n
;
2889 all_file_decl_data
[k
] = NULL
;
2890 gcc_assert (k
== count
);
2893 /* Input file data before flattening (i.e. splitting them to subfiles to support
2894 incremental linking. */
2895 static int real_file_count
;
2896 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data
**real_file_decl_data
;
2898 /* Read all the symbols from the input files FNAMES. NFILES is the
2899 number of files requested in the command line. Instantiate a
2900 global call graph by aggregating all the sub-graphs found in each
2904 read_cgraph_and_symbols (unsigned nfiles
, const char **fnames
)
2906 unsigned int i
, last_file_ix
;
2908 struct cgraph_node
*node
;
2910 struct lto_file_decl_data
**decl_data
;
2914 timevar_push (TV_IPA_LTO_DECL_IN
);
2917 = decl_data
= ggc_alloc_cleared_vec_lto_file_decl_data_ptr (nfiles
+ 1);
2918 real_file_count
= nfiles
;
2920 /* Read the resolution file. */
2922 if (resolution_file_name
)
2925 unsigned num_objects
;
2927 resolution
= fopen (resolution_file_name
, "r");
2928 if (resolution
== NULL
)
2929 fatal_error ("could not open symbol resolution file: %m");
2931 t
= fscanf (resolution
, "%u", &num_objects
);
2932 gcc_assert (t
== 1);
2934 /* True, since the plugin splits the archives. */
2935 gcc_assert (num_objects
== nfiles
);
2938 tree_with_vars
= htab_create_ggc (101, htab_hash_pointer
, htab_eq_pointer
,
2940 type_hash_cache
= htab_create_ggc (512, tree_int_map_hash
,
2941 tree_int_map_eq
, NULL
);
2942 type_pair_cache
= XCNEWVEC (struct type_pair_d
, GIMPLE_TYPE_PAIR_SIZE
);
2943 gimple_type_leader
= ggc_alloc_cleared_vec_gimple_type_leader_entry_s
2944 (GIMPLE_TYPE_LEADER_SIZE
);
2945 gimple_types
= htab_create_ggc (16381, gimple_type_hash
, gimple_type_eq
, 0);
2948 fprintf (stderr
, "Reading object files:");
2950 /* Read all of the object files specified on the command line. */
2951 for (i
= 0, last_file_ix
= 0; i
< nfiles
; ++i
)
2953 struct lto_file_decl_data
*file_data
= NULL
;
2956 fprintf (stderr
, " %s", fnames
[i
]);
2960 current_lto_file
= lto_obj_file_open (fnames
[i
], false);
2961 if (!current_lto_file
)
2964 file_data
= lto_file_read (current_lto_file
, resolution
, &count
);
2967 lto_obj_file_close (current_lto_file
);
2968 free (current_lto_file
);
2969 current_lto_file
= NULL
;
2973 decl_data
[last_file_ix
++] = file_data
;
2975 lto_obj_file_close (current_lto_file
);
2976 free (current_lto_file
);
2977 current_lto_file
= NULL
;
2981 lto_flatten_files (decl_data
, count
, last_file_ix
);
2982 lto_stats
.num_input_files
= count
;
2983 ggc_free(decl_data
);
2984 real_file_decl_data
= NULL
;
2986 if (resolution_file_name
)
2987 fclose (resolution
);
2989 /* Free gimple type merging datastructures. */
2990 htab_delete (gimple_types
);
2991 gimple_types
= NULL
;
2992 htab_delete (type_hash_cache
);
2993 type_hash_cache
= NULL
;
2994 free (type_pair_cache
);
2995 type_pair_cache
= NULL
;
2996 gimple_type_leader
= NULL
;
2997 free_gimple_type_tables ();
3000 /* Set the hooks so that all of the ipa passes can read in their data. */
3001 lto_set_in_hooks (all_file_decl_data
, get_section_data
, free_section_data
);
3003 timevar_pop (TV_IPA_LTO_DECL_IN
);
3006 fprintf (stderr
, "\nReading the callgraph\n");
3008 timevar_push (TV_IPA_LTO_CGRAPH_IO
);
3009 /* Read the symtab. */
3012 /* Store resolutions into the symbol table. */
3018 FOR_EACH_SYMBOL (snode
)
3019 if (symtab_real_symbol_p (snode
)
3020 && (res
= pointer_map_contains (resolution_map
,
3021 snode
->symbol
.decl
)))
3022 snode
->symbol
.resolution
3023 = (enum ld_plugin_symbol_resolution
)(size_t)*res
;
3025 pointer_map_destroy (resolution_map
);
3026 resolution_map
= NULL
;
3029 timevar_pop (TV_IPA_LTO_CGRAPH_IO
);
3032 fprintf (stderr
, "Merging declarations\n");
3034 timevar_push (TV_IPA_LTO_DECL_MERGE
);
3035 /* Merge global decls. In ltrans mode we read merged cgraph, we do not
3036 need to care about resolving symbols again, we only need to replace
3037 duplicated declarations read from the callgraph and from function
3041 lto_symtab_merge_decls ();
3043 /* If there were errors during symbol merging bail out, we have no
3044 good way to recover here. */
3046 fatal_error ("errors during merging of translation units");
3048 /* Fixup all decls. */
3049 lto_fixup_decls (all_file_decl_data
);
3051 htab_delete (tree_with_vars
);
3052 tree_with_vars
= NULL
;
3055 timevar_pop (TV_IPA_LTO_DECL_MERGE
);
3056 /* Each pass will set the appropriate timer. */
3059 fprintf (stderr
, "Reading summaries\n");
3061 /* Read the IPA summary data. */
3063 ipa_read_optimization_summaries ();
3065 ipa_read_summaries ();
3067 for (i
= 0; all_file_decl_data
[i
]; i
++)
3069 gcc_assert (all_file_decl_data
[i
]->symtab_node_encoder
);
3070 lto_symtab_encoder_delete (all_file_decl_data
[i
]->symtab_node_encoder
);
3071 all_file_decl_data
[i
]->symtab_node_encoder
= NULL
;
3074 /* Finally merge the cgraph according to the decl merging decisions. */
3075 timevar_push (TV_IPA_LTO_CGRAPH_MERGE
);
3076 if (cgraph_dump_file
)
3078 fprintf (cgraph_dump_file
, "Before merging:\n");
3079 dump_cgraph (cgraph_dump_file
);
3080 dump_varpool (cgraph_dump_file
);
3082 lto_symtab_merge_cgraph_nodes ();
3085 /* FIXME: ipa_transforms_to_apply holds list of passes that have optimization
3086 summaries computed and needs to apply changes. At the moment WHOPR only
3087 supports inlining, so we can push it here by hand. In future we need to stream
3088 this field into ltrans compilation. */
3090 FOR_EACH_DEFINED_FUNCTION (node
)
3091 node
->ipa_transforms_to_apply
.safe_push ((ipa_opt_pass
)&pass_ipa_inline
);
3093 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE
);
3095 timevar_push (TV_IPA_LTO_DECL_INIT_IO
);
3097 /* Indicate that the cgraph is built and ready. */
3098 cgraph_function_flags_ready
= true;
3100 timevar_pop (TV_IPA_LTO_DECL_INIT_IO
);
3101 ggc_free (all_file_decl_data
);
3102 all_file_decl_data
= NULL
;
3106 /* Materialize all the bodies for all the nodes in the callgraph. */
3109 materialize_cgraph (void)
3112 struct cgraph_node
*node
;
3114 timevar_id_t lto_timer
;
3118 flag_wpa
? "Materializing decls:" : "Reading function bodies:");
3120 /* Now that we have input the cgraph, we need to clear all of the aux
3121 nodes and read the functions if we are not running in WPA mode. */
3122 timevar_push (TV_IPA_LTO_GIMPLE_IN
);
3124 FOR_EACH_FUNCTION (node
)
3126 if (node
->symbol
.lto_file_data
)
3128 lto_materialize_function (node
);
3129 lto_stats
.num_input_cgraph_nodes
++;
3133 timevar_pop (TV_IPA_LTO_GIMPLE_IN
);
3135 /* Start the appropriate timer depending on the mode that we are
3137 lto_timer
= (flag_wpa
) ? TV_WHOPR_WPA
3138 : (flag_ltrans
) ? TV_WHOPR_LTRANS
3140 timevar_push (lto_timer
);
3142 current_function_decl
= NULL
;
3145 /* Inform the middle end about the global variables we have seen. */
3146 FOR_EACH_VEC_ELT (*lto_global_var_decls
, i
, decl
)
3147 rest_of_decl_compilation (decl
, 1, 0);
3150 fprintf (stderr
, "\n");
3152 timevar_pop (lto_timer
);
3156 /* Show various memory usage statistics related to LTO. */
3158 print_lto_report_1 (void)
3160 const char *pfx
= (flag_lto
) ? "LTO" : (flag_wpa
) ? "WPA" : "LTRANS";
3161 fprintf (stderr
, "%s statistics\n", pfx
);
3164 fprintf (stderr
, "[%s] GIMPLE type table: size %ld, %ld elements, "
3165 "%ld searches, %ld collisions (ratio: %f)\n", pfx
,
3166 (long) htab_size (gimple_types
),
3167 (long) htab_elements (gimple_types
),
3168 (long) gimple_types
->searches
,
3169 (long) gimple_types
->collisions
,
3170 htab_collisions (gimple_types
));
3172 fprintf (stderr
, "[%s] GIMPLE type table is empty\n", pfx
);
3173 if (type_hash_cache
)
3174 fprintf (stderr
, "[%s] GIMPLE type hash table: size %ld, %ld elements, "
3175 "%ld searches, %ld collisions (ratio: %f)\n", pfx
,
3176 (long) htab_size (type_hash_cache
),
3177 (long) htab_elements (type_hash_cache
),
3178 (long) type_hash_cache
->searches
,
3179 (long) type_hash_cache
->collisions
,
3180 htab_collisions (type_hash_cache
));
3182 fprintf (stderr
, "[%s] GIMPLE type hash table is empty\n", pfx
);
3184 print_gimple_types_stats (pfx
);
3185 print_lto_report (pfx
);
3188 /* Perform whole program analysis (WPA) on the callgraph and write out the
3189 optimization plan. */
3192 do_whole_program_analysis (void)
3196 timevar_start (TV_PHASE_OPT_GEN
);
3198 /* Note that since we are in WPA mode, materialize_cgraph will not
3199 actually read in all the function bodies. It only materializes
3200 the decls and cgraph nodes so that analysis can be performed. */
3201 materialize_cgraph ();
3203 /* Reading in the cgraph uses different timers, start timing WPA now. */
3204 timevar_push (TV_WHOPR_WPA
);
3206 if (pre_ipa_mem_report
)
3208 fprintf (stderr
, "Memory consumption before IPA\n");
3209 dump_memory_report (false);
3212 cgraph_function_flags_ready
= true;
3214 if (cgraph_dump_file
)
3216 dump_cgraph (cgraph_dump_file
);
3217 dump_varpool (cgraph_dump_file
);
3219 bitmap_obstack_initialize (NULL
);
3220 cgraph_state
= CGRAPH_STATE_IPA_SSA
;
3222 execute_ipa_pass_list (all_regular_ipa_passes
);
3223 symtab_remove_unreachable_nodes (false, dump_file
);
3225 if (cgraph_dump_file
)
3227 fprintf (cgraph_dump_file
, "Optimized ");
3228 dump_cgraph (cgraph_dump_file
);
3229 dump_varpool (cgraph_dump_file
);
3231 #ifdef ENABLE_CHECKING
3234 bitmap_obstack_release (NULL
);
3236 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
3237 timevar_pop (TV_WHOPR_WPA
);
3239 timevar_push (TV_WHOPR_PARTITIONING
);
3240 if (flag_lto_partition_1to1
)
3242 else if (flag_lto_partition_max
)
3245 lto_balanced_map ();
3247 /* AUX pointers are used by partitioning code to bookkeep number of
3248 partitions symbol is in. This is no longer needed. */
3249 FOR_EACH_SYMBOL (node
)
3250 node
->symbol
.aux
= NULL
;
3252 lto_stats
.num_cgraph_partitions
+= ltrans_partitions
.length ();
3253 timevar_pop (TV_WHOPR_PARTITIONING
);
3255 timevar_stop (TV_PHASE_OPT_GEN
);
3256 timevar_start (TV_PHASE_STREAM_OUT
);
3260 fprintf (stderr
, "\nStreaming out");
3263 lto_wpa_write_files ();
3265 fprintf (stderr
, "\n");
3267 timevar_stop (TV_PHASE_STREAM_OUT
);
3270 if (post_ipa_mem_report
)
3272 fprintf (stderr
, "Memory consumption after IPA\n");
3273 dump_memory_report (false);
3276 /* Show the LTO report before launching LTRANS. */
3277 if (flag_lto_report
)
3278 print_lto_report_1 ();
3280 dump_memory_report (true);
3284 static GTY(()) tree lto_eh_personality_decl
;
3286 /* Return the LTO personality function decl. */
3289 lto_eh_personality (void)
3291 if (!lto_eh_personality_decl
)
3293 /* Use the first personality DECL for our personality if we don't
3294 support multiple ones. This ensures that we don't artificially
3295 create the need for them in a single-language program. */
3296 if (first_personality_decl
&& !dwarf2out_do_cfi_asm ())
3297 lto_eh_personality_decl
= first_personality_decl
;
3299 lto_eh_personality_decl
= lhd_gcc_personality ();
3302 return lto_eh_personality_decl
;
3305 /* Set the process name based on the LTO mode. */
3308 lto_process_name (void)
3311 setproctitle ("lto1-lto");
3313 setproctitle ("lto1-wpa");
3315 setproctitle ("lto1-ltrans");
3319 /* Initialize the LTO front end. */
3324 lto_process_name ();
3325 lto_streamer_hooks_init ();
3327 lto_set_in_hooks (NULL
, get_section_data
, free_section_data
);
3328 memset (<o_stats
, 0, sizeof (lto_stats
));
3329 bitmap_obstack_initialize (NULL
);
3330 gimple_register_cfg_hooks ();
3334 /* Main entry point for the GIMPLE front end. This front end has
3335 three main personalities:
3337 - LTO (-flto). All the object files on the command line are
3338 loaded in memory and processed as a single translation unit.
3339 This is the traditional link-time optimization behavior.
3341 - WPA (-fwpa). Only the callgraph and summary information for
3342 files in the command file are loaded. A single callgraph
3343 (without function bodies) is instantiated for the whole set of
3344 files. IPA passes are only allowed to analyze the call graph
3345 and make transformation decisions. The callgraph is
3346 partitioned, each partition is written to a new object file
3347 together with the transformation decisions.
3349 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
3350 summary files from running again. Since WPA computed summary
3351 information and decided what transformations to apply, LTRANS
3352 simply applies them. */
3357 /* LTO is called as a front end, even though it is not a front end.
3358 Because it is called as a front end, TV_PHASE_PARSING and
3359 TV_PARSE_GLOBAL are active, and we need to turn them off while
3360 doing LTO. Later we turn them back on so they are active up in
3362 timevar_pop (TV_PARSE_GLOBAL
);
3363 timevar_stop (TV_PHASE_PARSING
);
3365 timevar_start (TV_PHASE_SETUP
);
3367 /* Initialize the LTO front end. */
3370 timevar_stop (TV_PHASE_SETUP
);
3371 timevar_start (TV_PHASE_STREAM_IN
);
3373 /* Read all the symbols and call graph from all the files in the
3375 read_cgraph_and_symbols (num_in_fnames
, in_fnames
);
3377 timevar_stop (TV_PHASE_STREAM_IN
);
3381 /* If WPA is enabled analyze the whole call graph and create an
3382 optimization plan. Otherwise, read in all the function
3383 bodies and continue with optimization. */
3385 do_whole_program_analysis ();
3388 struct varpool_node
*vnode
;
3390 timevar_start (TV_PHASE_OPT_GEN
);
3392 materialize_cgraph ();
3394 /* Let the middle end know that we have read and merged all of
3398 timevar_stop (TV_PHASE_OPT_GEN
);
3400 /* FIXME lto, if the processes spawned by WPA fail, we miss
3401 the chance to print WPA's report, so WPA will call
3402 print_lto_report before launching LTRANS. If LTRANS was
3403 launched directly by the driver we would not need to do
3405 if (flag_lto_report
)
3406 print_lto_report_1 ();
3408 /* Record the global variables. */
3409 FOR_EACH_DEFINED_VARIABLE (vnode
)
3410 vec_safe_push (lto_global_var_decls
, vnode
->symbol
.decl
);
3414 /* Here we make LTO pretend to be a parser. */
3415 timevar_start (TV_PHASE_PARSING
);
3416 timevar_push (TV_PARSE_GLOBAL
);
3419 #include "gt-lto-lto.h"