2012-10-17 Michael Meissner <meissner@linux.vnet.ibm.com>
[official-gcc.git] / gcc / lto / lto.c
blob7f64daee2192c9f0d47d145ea13409e6f6d6ef30
1 /* Top-level LTO routines.
2 Copyright 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "opts.h"
25 #include "toplev.h"
26 #include "tree.h"
27 #include "tree-flow.h"
28 #include "diagnostic-core.h"
29 #include "tm.h"
30 #include "cgraph.h"
31 #include "ggc.h"
32 #include "tree-ssa-operands.h"
33 #include "tree-pass.h"
34 #include "langhooks.h"
35 #include "vec.h"
36 #include "bitmap.h"
37 #include "pointer-set.h"
38 #include "ipa-prop.h"
39 #include "common.h"
40 #include "debug.h"
41 #include "gimple.h"
42 #include "lto.h"
43 #include "lto-tree.h"
44 #include "lto-streamer.h"
45 #include "tree-streamer.h"
46 #include "splay-tree.h"
47 #include "lto-partition.h"
49 static GTY(()) tree first_personality_decl;
51 /* Returns a hash code for P. */
53 static hashval_t
54 hash_name (const void *p)
56 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
57 return (hashval_t) htab_hash_string (ds->name);
61 /* Returns nonzero if P1 and P2 are equal. */
63 static int
64 eq_name (const void *p1, const void *p2)
66 const struct lto_section_slot *s1 =
67 (const struct lto_section_slot *) p1;
68 const struct lto_section_slot *s2 =
69 (const struct lto_section_slot *) p2;
71 return strcmp (s1->name, s2->name) == 0;
74 /* Free lto_section_slot */
76 static void
77 free_with_string (void *arg)
79 struct lto_section_slot *s = (struct lto_section_slot *)arg;
81 free (CONST_CAST (char *, s->name));
82 free (arg);
85 /* Create section hash table */
87 htab_t
88 lto_obj_create_section_hash_table (void)
90 return htab_create (37, hash_name, eq_name, free_with_string);
93 /* Delete an allocated integer KEY in the splay tree. */
95 static void
96 lto_splay_tree_delete_id (splay_tree_key key)
98 free ((void *) key);
101 /* Compare splay tree node ids A and B. */
103 static int
104 lto_splay_tree_compare_ids (splay_tree_key a, splay_tree_key b)
106 unsigned HOST_WIDE_INT ai;
107 unsigned HOST_WIDE_INT bi;
109 ai = *(unsigned HOST_WIDE_INT *) a;
110 bi = *(unsigned HOST_WIDE_INT *) b;
112 if (ai < bi)
113 return -1;
114 else if (ai > bi)
115 return 1;
116 return 0;
119 /* Look up splay tree node by ID in splay tree T. */
121 static splay_tree_node
122 lto_splay_tree_lookup (splay_tree t, unsigned HOST_WIDE_INT id)
124 return splay_tree_lookup (t, (splay_tree_key) &id);
127 /* Check if KEY has ID. */
129 static bool
130 lto_splay_tree_id_equal_p (splay_tree_key key, unsigned HOST_WIDE_INT id)
132 return *(unsigned HOST_WIDE_INT *) key == id;
135 /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
136 The ID is allocated separately because we need HOST_WIDE_INTs which may
137 be wider than a splay_tree_key. */
139 static void
140 lto_splay_tree_insert (splay_tree t, unsigned HOST_WIDE_INT id,
141 struct lto_file_decl_data *file_data)
143 unsigned HOST_WIDE_INT *idp = XCNEW (unsigned HOST_WIDE_INT);
144 *idp = id;
145 splay_tree_insert (t, (splay_tree_key) idp, (splay_tree_value) file_data);
148 /* Create a splay tree. */
150 static splay_tree
151 lto_splay_tree_new (void)
153 return splay_tree_new (lto_splay_tree_compare_ids,
154 lto_splay_tree_delete_id,
155 NULL);
158 /* Return true when NODE has a clone that is analyzed (i.e. we need
159 to load its body even if the node itself is not needed). */
161 static bool
162 has_analyzed_clone_p (struct cgraph_node *node)
164 struct cgraph_node *orig = node;
165 node = node->clones;
166 if (node)
167 while (node != orig)
169 if (node->analyzed)
170 return true;
171 if (node->clones)
172 node = node->clones;
173 else if (node->next_sibling_clone)
174 node = node->next_sibling_clone;
175 else
177 while (node != orig && !node->next_sibling_clone)
178 node = node->clone_of;
179 if (node != orig)
180 node = node->next_sibling_clone;
183 return false;
186 /* Read the function body for the function associated with NODE. */
188 static void
189 lto_materialize_function (struct cgraph_node *node)
191 tree decl;
192 struct lto_file_decl_data *file_data;
193 const char *data, *name;
194 size_t len;
196 decl = node->symbol.decl;
197 /* Read in functions with body (analyzed nodes)
198 and also functions that are needed to produce virtual clones. */
199 if (cgraph_function_with_gimple_body_p (node) || has_analyzed_clone_p (node))
201 /* Clones don't need to be read. */
202 if (node->clone_of)
203 return;
205 /* Load the function body only if not operating in WPA mode. In
206 WPA mode, the body of the function is not needed. */
207 if (!flag_wpa)
209 file_data = node->symbol.lto_file_data;
210 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
212 /* We may have renamed the declaration, e.g., a static function. */
213 name = lto_get_decl_name_mapping (file_data, name);
215 data = lto_get_section_data (file_data, LTO_section_function_body,
216 name, &len);
217 if (!data)
218 fatal_error ("%s: section %s is missing",
219 file_data->file_name,
220 name);
222 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
224 push_struct_function (decl);
225 announce_function (decl);
226 lto_input_function_body (file_data, decl, data);
227 if (DECL_FUNCTION_PERSONALITY (decl) && !first_personality_decl)
228 first_personality_decl = DECL_FUNCTION_PERSONALITY (decl);
229 lto_stats.num_function_bodies++;
230 lto_free_section_data (file_data, LTO_section_function_body, name,
231 data, len);
232 pop_cfun ();
233 ggc_collect ();
237 /* Let the middle end know about the function. */
238 rest_of_decl_compilation (decl, 1, 0);
242 /* Decode the content of memory pointed to by DATA in the in decl
243 state object STATE. DATA_IN points to a data_in structure for
244 decoding. Return the address after the decoded object in the
245 input. */
247 static const uint32_t *
248 lto_read_in_decl_state (struct data_in *data_in, const uint32_t *data,
249 struct lto_in_decl_state *state)
251 uint32_t ix;
252 tree decl;
253 uint32_t i, j;
255 ix = *data++;
256 decl = streamer_tree_cache_get (data_in->reader_cache, ix);
257 if (TREE_CODE (decl) != FUNCTION_DECL)
259 gcc_assert (decl == void_type_node);
260 decl = NULL_TREE;
262 state->fn_decl = decl;
264 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
266 uint32_t size = *data++;
267 tree *decls = ggc_alloc_vec_tree (size);
269 for (j = 0; j < size; j++)
270 decls[j] = streamer_tree_cache_get (data_in->reader_cache, data[j]);
272 state->streams[i].size = size;
273 state->streams[i].trees = decls;
274 data += size;
277 return data;
282 /* Global type table. FIXME, it should be possible to re-use some
283 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
284 etc), but those assume that types were built with the various
285 build_*_type routines which is not the case with the streamer. */
286 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
287 htab_t gimple_types;
288 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
289 htab_t type_hash_cache;
291 static hashval_t gimple_type_hash (const void *);
293 /* Structure used to maintain a cache of some type pairs compared by
294 gimple_types_compatible_p when comparing aggregate types. There are
295 three possible values for SAME_P:
297 -2: The pair (T1, T2) has just been inserted in the table.
298 0: T1 and T2 are different types.
299 1: T1 and T2 are the same type. */
301 struct type_pair_d
303 unsigned int uid1;
304 unsigned int uid2;
305 signed char same_p;
307 typedef struct type_pair_d *type_pair_t;
308 DEF_VEC_P(type_pair_t);
309 DEF_VEC_ALLOC_P(type_pair_t,heap);
311 #define GIMPLE_TYPE_PAIR_SIZE 16381
312 struct type_pair_d *type_pair_cache;
315 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
316 entry if none existed. */
318 static inline type_pair_t
319 lookup_type_pair (tree t1, tree t2)
321 unsigned int index;
322 unsigned int uid1, uid2;
324 if (TYPE_UID (t1) < TYPE_UID (t2))
326 uid1 = TYPE_UID (t1);
327 uid2 = TYPE_UID (t2);
329 else
331 uid1 = TYPE_UID (t2);
332 uid2 = TYPE_UID (t1);
334 gcc_checking_assert (uid1 != uid2);
336 /* iterative_hash_hashval_t imply an function calls.
337 We know that UIDS are in limited range. */
338 index = ((((unsigned HOST_WIDE_INT)uid1 << HOST_BITS_PER_WIDE_INT / 2) + uid2)
339 % GIMPLE_TYPE_PAIR_SIZE);
340 if (type_pair_cache [index].uid1 == uid1
341 && type_pair_cache [index].uid2 == uid2)
342 return &type_pair_cache[index];
344 type_pair_cache [index].uid1 = uid1;
345 type_pair_cache [index].uid2 = uid2;
346 type_pair_cache [index].same_p = -2;
348 return &type_pair_cache[index];
351 /* Per pointer state for the SCC finding. The on_sccstack flag
352 is not strictly required, it is true when there is no hash value
353 recorded for the type and false otherwise. But querying that
354 is slower. */
356 struct sccs
358 unsigned int dfsnum;
359 unsigned int low;
360 bool on_sccstack;
361 union {
362 hashval_t hash;
363 signed char same_p;
364 } u;
367 static unsigned int next_dfs_num;
368 static unsigned int gtc_next_dfs_num;
370 /* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
372 typedef struct GTY(()) gimple_type_leader_entry_s {
373 tree type;
374 tree leader;
375 } gimple_type_leader_entry;
377 #define GIMPLE_TYPE_LEADER_SIZE 16381
378 static GTY((length("GIMPLE_TYPE_LEADER_SIZE")))
379 gimple_type_leader_entry *gimple_type_leader;
381 /* Lookup an existing leader for T and return it or NULL_TREE, if
382 there is none in the cache. */
384 static inline tree
385 gimple_lookup_type_leader (tree t)
387 gimple_type_leader_entry *leader;
389 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
390 if (leader->type != t)
391 return NULL_TREE;
393 return leader->leader;
397 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
398 true then if any type has no name return false, otherwise return
399 true if both types have no names. */
401 static bool
402 compare_type_names_p (tree t1, tree t2)
404 tree name1 = TYPE_NAME (t1);
405 tree name2 = TYPE_NAME (t2);
407 if ((name1 != NULL_TREE) != (name2 != NULL_TREE))
408 return false;
410 if (name1 == NULL_TREE)
411 return true;
413 /* Either both should be a TYPE_DECL or both an IDENTIFIER_NODE. */
414 if (TREE_CODE (name1) != TREE_CODE (name2))
415 return false;
417 if (TREE_CODE (name1) == TYPE_DECL)
418 name1 = DECL_NAME (name1);
419 gcc_checking_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
421 if (TREE_CODE (name2) == TYPE_DECL)
422 name2 = DECL_NAME (name2);
423 gcc_checking_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
425 /* Identifiers can be compared with pointer equality rather
426 than a string comparison. */
427 if (name1 == name2)
428 return true;
430 return false;
433 static bool
434 gimple_types_compatible_p_1 (tree, tree, type_pair_t,
435 VEC(type_pair_t, heap) **,
436 struct pointer_map_t *, struct obstack *);
438 /* DFS visit the edge from the callers type pair with state *STATE to
439 the pair T1, T2 while operating in FOR_MERGING_P mode.
440 Update the merging status if it is not part of the SCC containing the
441 callers pair and return it.
442 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
444 static bool
445 gtc_visit (tree t1, tree t2,
446 struct sccs *state,
447 VEC(type_pair_t, heap) **sccstack,
448 struct pointer_map_t *sccstate,
449 struct obstack *sccstate_obstack)
451 struct sccs *cstate = NULL;
452 type_pair_t p;
453 void **slot;
454 tree leader1, leader2;
456 /* Check first for the obvious case of pointer identity. */
457 if (t1 == t2)
458 return true;
460 /* Check that we have two types to compare. */
461 if (t1 == NULL_TREE || t2 == NULL_TREE)
462 return false;
464 /* Can't be the same type if the types don't have the same code. */
465 if (TREE_CODE (t1) != TREE_CODE (t2))
466 return false;
468 /* Can't be the same type if they have different CV qualifiers. */
469 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
470 return false;
472 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
473 return false;
475 /* Void types and nullptr types are always the same. */
476 if (TREE_CODE (t1) == VOID_TYPE
477 || TREE_CODE (t1) == NULLPTR_TYPE)
478 return true;
480 /* Can't be the same type if they have different alignment or mode. */
481 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
482 || TYPE_MODE (t1) != TYPE_MODE (t2))
483 return false;
485 /* Do some simple checks before doing three hashtable queries. */
486 if (INTEGRAL_TYPE_P (t1)
487 || SCALAR_FLOAT_TYPE_P (t1)
488 || FIXED_POINT_TYPE_P (t1)
489 || TREE_CODE (t1) == VECTOR_TYPE
490 || TREE_CODE (t1) == COMPLEX_TYPE
491 || TREE_CODE (t1) == OFFSET_TYPE
492 || POINTER_TYPE_P (t1))
494 /* Can't be the same type if they have different sign or precision. */
495 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
496 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
497 return false;
499 if (TREE_CODE (t1) == INTEGER_TYPE
500 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
501 return false;
503 /* That's all we need to check for float and fixed-point types. */
504 if (SCALAR_FLOAT_TYPE_P (t1)
505 || FIXED_POINT_TYPE_P (t1))
506 return true;
508 /* For other types fall through to more complex checks. */
511 /* If the types have been previously registered and found equal
512 they still are. */
513 leader1 = gimple_lookup_type_leader (t1);
514 leader2 = gimple_lookup_type_leader (t2);
515 if (leader1 == t2
516 || t1 == leader2
517 || (leader1 && leader1 == leader2))
518 return true;
520 /* If the hash values of t1 and t2 are different the types can't
521 possibly be the same. This helps keeping the type-pair hashtable
522 small, only tracking comparisons for hash collisions. */
523 if (gimple_type_hash (t1) != gimple_type_hash (t2))
524 return false;
526 /* Allocate a new cache entry for this comparison. */
527 p = lookup_type_pair (t1, t2);
528 if (p->same_p == 0 || p->same_p == 1)
530 /* We have already decided whether T1 and T2 are the
531 same, return the cached result. */
532 return p->same_p == 1;
535 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
536 cstate = (struct sccs *)*slot;
537 /* Not yet visited. DFS recurse. */
538 if (!cstate)
540 gimple_types_compatible_p_1 (t1, t2, p,
541 sccstack, sccstate, sccstate_obstack);
542 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
543 state->low = MIN (state->low, cstate->low);
545 /* If the type is still on the SCC stack adjust the parents low. */
546 if (cstate->dfsnum < state->dfsnum
547 && cstate->on_sccstack)
548 state->low = MIN (cstate->dfsnum, state->low);
550 /* Return the current lattice value. We start with an equality
551 assumption so types part of a SCC will be optimistically
552 treated equal unless proven otherwise. */
553 return cstate->u.same_p;
556 /* Worker for gimple_types_compatible.
557 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
559 static bool
560 gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
561 VEC(type_pair_t, heap) **sccstack,
562 struct pointer_map_t *sccstate,
563 struct obstack *sccstate_obstack)
565 struct sccs *state;
567 gcc_assert (p->same_p == -2);
569 state = XOBNEW (sccstate_obstack, struct sccs);
570 *pointer_map_insert (sccstate, p) = state;
572 VEC_safe_push (type_pair_t, heap, *sccstack, p);
573 state->dfsnum = gtc_next_dfs_num++;
574 state->low = state->dfsnum;
575 state->on_sccstack = true;
576 /* Start with an equality assumption. As we DFS recurse into child
577 SCCs this assumption may get revisited. */
578 state->u.same_p = 1;
580 /* The struct tags shall compare equal. */
581 if (!compare_type_names_p (t1, t2))
582 goto different_types;
584 /* The main variant of both types should compare equal. */
585 if (TYPE_MAIN_VARIANT (t1) != t1
586 || TYPE_MAIN_VARIANT (t2) != t2)
588 if (!gtc_visit (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2),
589 state, sccstack, sccstate, sccstate_obstack))
590 goto different_types;
593 /* We may not merge typedef types to the same type in different
594 contexts. */
595 if (TYPE_NAME (t1)
596 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
597 && DECL_CONTEXT (TYPE_NAME (t1))
598 && TYPE_P (DECL_CONTEXT (TYPE_NAME (t1))))
600 if (!gtc_visit (DECL_CONTEXT (TYPE_NAME (t1)),
601 DECL_CONTEXT (TYPE_NAME (t2)),
602 state, sccstack, sccstate, sccstate_obstack))
603 goto different_types;
606 /* If their attributes are not the same they can't be the same type. */
607 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
608 goto different_types;
610 /* Do type-specific comparisons. */
611 switch (TREE_CODE (t1))
613 case VECTOR_TYPE:
614 case COMPLEX_TYPE:
615 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
616 state, sccstack, sccstate, sccstate_obstack))
617 goto different_types;
618 goto same_types;
620 case ARRAY_TYPE:
621 /* Array types are the same if the element types are the same and
622 the number of elements are the same. */
623 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
624 state, sccstack, sccstate, sccstate_obstack)
625 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
626 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
627 goto different_types;
628 else
630 tree i1 = TYPE_DOMAIN (t1);
631 tree i2 = TYPE_DOMAIN (t2);
633 /* For an incomplete external array, the type domain can be
634 NULL_TREE. Check this condition also. */
635 if (i1 == NULL_TREE && i2 == NULL_TREE)
636 goto same_types;
637 else if (i1 == NULL_TREE || i2 == NULL_TREE)
638 goto different_types;
639 else
641 tree min1 = TYPE_MIN_VALUE (i1);
642 tree min2 = TYPE_MIN_VALUE (i2);
643 tree max1 = TYPE_MAX_VALUE (i1);
644 tree max2 = TYPE_MAX_VALUE (i2);
646 /* The minimum/maximum values have to be the same. */
647 if ((min1 == min2
648 || (min1 && min2
649 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
650 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
651 || operand_equal_p (min1, min2, 0))))
652 && (max1 == max2
653 || (max1 && max2
654 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
655 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
656 || operand_equal_p (max1, max2, 0)))))
657 goto same_types;
658 else
659 goto different_types;
663 case METHOD_TYPE:
664 /* Method types should belong to the same class. */
665 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
666 state, sccstack, sccstate, sccstate_obstack))
667 goto different_types;
669 /* Fallthru */
671 case FUNCTION_TYPE:
672 /* Function types are the same if the return type and arguments types
673 are the same. */
674 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
675 state, sccstack, sccstate, sccstate_obstack))
676 goto different_types;
678 if (!comp_type_attributes (t1, t2))
679 goto different_types;
681 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
682 goto same_types;
683 else
685 tree parms1, parms2;
687 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
688 parms1 && parms2;
689 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
691 if (!gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2),
692 state, sccstack, sccstate, sccstate_obstack))
693 goto different_types;
696 if (parms1 || parms2)
697 goto different_types;
699 goto same_types;
702 case OFFSET_TYPE:
704 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
705 state, sccstack, sccstate, sccstate_obstack)
706 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
707 TYPE_OFFSET_BASETYPE (t2),
708 state, sccstack, sccstate, sccstate_obstack))
709 goto different_types;
711 goto same_types;
714 case POINTER_TYPE:
715 case REFERENCE_TYPE:
717 /* If the two pointers have different ref-all attributes,
718 they can't be the same type. */
719 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
720 goto different_types;
722 /* Otherwise, pointer and reference types are the same if the
723 pointed-to types are the same. */
724 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
725 state, sccstack, sccstate, sccstate_obstack))
726 goto same_types;
728 goto different_types;
731 case INTEGER_TYPE:
732 case BOOLEAN_TYPE:
734 tree min1 = TYPE_MIN_VALUE (t1);
735 tree max1 = TYPE_MAX_VALUE (t1);
736 tree min2 = TYPE_MIN_VALUE (t2);
737 tree max2 = TYPE_MAX_VALUE (t2);
738 bool min_equal_p = false;
739 bool max_equal_p = false;
741 /* If either type has a minimum value, the other type must
742 have the same. */
743 if (min1 == NULL_TREE && min2 == NULL_TREE)
744 min_equal_p = true;
745 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
746 min_equal_p = true;
748 /* Likewise, if either type has a maximum value, the other
749 type must have the same. */
750 if (max1 == NULL_TREE && max2 == NULL_TREE)
751 max_equal_p = true;
752 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
753 max_equal_p = true;
755 if (!min_equal_p || !max_equal_p)
756 goto different_types;
758 goto same_types;
761 case ENUMERAL_TYPE:
763 /* FIXME lto, we cannot check bounds on enumeral types because
764 different front ends will produce different values.
765 In C, enumeral types are integers, while in C++ each element
766 will have its own symbolic value. We should decide how enums
767 are to be represented in GIMPLE and have each front end lower
768 to that. */
769 tree v1, v2;
771 /* For enumeral types, all the values must be the same. */
772 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
773 goto same_types;
775 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
776 v1 && v2;
777 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
779 tree c1 = TREE_VALUE (v1);
780 tree c2 = TREE_VALUE (v2);
782 if (TREE_CODE (c1) == CONST_DECL)
783 c1 = DECL_INITIAL (c1);
785 if (TREE_CODE (c2) == CONST_DECL)
786 c2 = DECL_INITIAL (c2);
788 if (tree_int_cst_equal (c1, c2) != 1)
789 goto different_types;
791 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
792 goto different_types;
795 /* If one enumeration has more values than the other, they
796 are not the same. */
797 if (v1 || v2)
798 goto different_types;
800 goto same_types;
803 case RECORD_TYPE:
804 case UNION_TYPE:
805 case QUAL_UNION_TYPE:
807 tree f1, f2;
809 /* For aggregate types, all the fields must be the same. */
810 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
811 f1 && f2;
812 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
814 /* Different field kinds are not compatible. */
815 if (TREE_CODE (f1) != TREE_CODE (f2))
816 goto different_types;
817 /* Field decls must have the same name and offset. */
818 if (TREE_CODE (f1) == FIELD_DECL
819 && (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
820 || !gimple_compare_field_offset (f1, f2)))
821 goto different_types;
822 /* All entities should have the same name and type. */
823 if (DECL_NAME (f1) != DECL_NAME (f2)
824 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2),
825 state, sccstack, sccstate, sccstate_obstack))
826 goto different_types;
829 /* If one aggregate has more fields than the other, they
830 are not the same. */
831 if (f1 || f2)
832 goto different_types;
834 goto same_types;
837 default:
838 gcc_unreachable ();
841 /* Common exit path for types that are not compatible. */
842 different_types:
843 state->u.same_p = 0;
844 goto pop;
846 /* Common exit path for types that are compatible. */
847 same_types:
848 gcc_assert (state->u.same_p == 1);
850 pop:
851 if (state->low == state->dfsnum)
853 type_pair_t x;
855 /* Pop off the SCC and set its cache values to the final
856 comparison result. */
859 struct sccs *cstate;
860 x = VEC_pop (type_pair_t, *sccstack);
861 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
862 cstate->on_sccstack = false;
863 x->same_p = state->u.same_p;
865 while (x != p);
868 return state->u.same_p;
871 /* Return true iff T1 and T2 are structurally identical. When
872 FOR_MERGING_P is true the an incomplete type and a complete type
873 are considered different, otherwise they are considered compatible. */
875 static bool
876 gimple_types_compatible_p (tree t1, tree t2)
878 VEC(type_pair_t, heap) *sccstack = NULL;
879 struct pointer_map_t *sccstate;
880 struct obstack sccstate_obstack;
881 type_pair_t p = NULL;
882 bool res;
883 tree leader1, leader2;
885 /* Before starting to set up the SCC machinery handle simple cases. */
887 /* Check first for the obvious case of pointer identity. */
888 if (t1 == t2)
889 return true;
891 /* Check that we have two types to compare. */
892 if (t1 == NULL_TREE || t2 == NULL_TREE)
893 return false;
895 /* Can't be the same type if the types don't have the same code. */
896 if (TREE_CODE (t1) != TREE_CODE (t2))
897 return false;
899 /* Can't be the same type if they have different CV qualifiers. */
900 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
901 return false;
903 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
904 return false;
906 /* Void types and nullptr types are always the same. */
907 if (TREE_CODE (t1) == VOID_TYPE
908 || TREE_CODE (t1) == NULLPTR_TYPE)
909 return true;
911 /* Can't be the same type if they have different alignment or mode. */
912 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
913 || TYPE_MODE (t1) != TYPE_MODE (t2))
914 return false;
916 /* Do some simple checks before doing three hashtable queries. */
917 if (INTEGRAL_TYPE_P (t1)
918 || SCALAR_FLOAT_TYPE_P (t1)
919 || FIXED_POINT_TYPE_P (t1)
920 || TREE_CODE (t1) == VECTOR_TYPE
921 || TREE_CODE (t1) == COMPLEX_TYPE
922 || TREE_CODE (t1) == OFFSET_TYPE
923 || POINTER_TYPE_P (t1))
925 /* Can't be the same type if they have different sign or precision. */
926 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
927 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
928 return false;
930 if (TREE_CODE (t1) == INTEGER_TYPE
931 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
932 return false;
934 /* That's all we need to check for float and fixed-point types. */
935 if (SCALAR_FLOAT_TYPE_P (t1)
936 || FIXED_POINT_TYPE_P (t1))
937 return true;
939 /* For other types fall through to more complex checks. */
942 /* If the types have been previously registered and found equal
943 they still are. */
944 leader1 = gimple_lookup_type_leader (t1);
945 leader2 = gimple_lookup_type_leader (t2);
946 if (leader1 == t2
947 || t1 == leader2
948 || (leader1 && leader1 == leader2))
949 return true;
951 /* If the hash values of t1 and t2 are different the types can't
952 possibly be the same. This helps keeping the type-pair hashtable
953 small, only tracking comparisons for hash collisions. */
954 if (gimple_type_hash (t1) != gimple_type_hash (t2))
955 return false;
957 /* If we've visited this type pair before (in the case of aggregates
958 with self-referential types), and we made a decision, return it. */
959 p = lookup_type_pair (t1, t2);
960 if (p->same_p == 0 || p->same_p == 1)
962 /* We have already decided whether T1 and T2 are the
963 same, return the cached result. */
964 return p->same_p == 1;
967 /* Now set up the SCC machinery for the comparison. */
968 gtc_next_dfs_num = 1;
969 sccstate = pointer_map_create ();
970 gcc_obstack_init (&sccstate_obstack);
971 res = gimple_types_compatible_p_1 (t1, t2, p,
972 &sccstack, sccstate, &sccstate_obstack);
973 VEC_free (type_pair_t, heap, sccstack);
974 pointer_map_destroy (sccstate);
975 obstack_free (&sccstate_obstack, NULL);
977 return res;
980 static hashval_t
981 iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
982 struct pointer_map_t *, struct obstack *);
984 /* DFS visit the edge from the callers type with state *STATE to T.
985 Update the callers type hash V with the hash for T if it is not part
986 of the SCC containing the callers type and return it.
987 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
989 static hashval_t
990 visit (tree t, struct sccs *state, hashval_t v,
991 VEC (tree, heap) **sccstack,
992 struct pointer_map_t *sccstate,
993 struct obstack *sccstate_obstack)
995 struct sccs *cstate = NULL;
996 struct tree_int_map m;
997 void **slot;
999 /* If there is a hash value recorded for this type then it can't
1000 possibly be part of our parent SCC. Simply mix in its hash. */
1001 m.base.from = t;
1002 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
1003 && *slot)
1004 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
1006 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
1007 cstate = (struct sccs *)*slot;
1008 if (!cstate)
1010 hashval_t tem;
1011 /* Not yet visited. DFS recurse. */
1012 tem = iterative_hash_gimple_type (t, v,
1013 sccstack, sccstate, sccstate_obstack);
1014 if (!cstate)
1015 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
1016 state->low = MIN (state->low, cstate->low);
1017 /* If the type is no longer on the SCC stack and thus is not part
1018 of the parents SCC mix in its hash value. Otherwise we will
1019 ignore the type for hashing purposes and return the unaltered
1020 hash value. */
1021 if (!cstate->on_sccstack)
1022 return tem;
1024 if (cstate->dfsnum < state->dfsnum
1025 && cstate->on_sccstack)
1026 state->low = MIN (cstate->dfsnum, state->low);
1028 /* We are part of our parents SCC, skip this type during hashing
1029 and return the unaltered hash value. */
1030 return v;
1033 /* Hash NAME with the previous hash value V and return it. */
1035 static hashval_t
1036 iterative_hash_name (tree name, hashval_t v)
1038 if (!name)
1039 return v;
1040 v = iterative_hash_hashval_t (TREE_CODE (name), v);
1041 if (TREE_CODE (name) == TYPE_DECL)
1042 name = DECL_NAME (name);
1043 if (!name)
1044 return v;
1045 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
1046 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
1049 /* A type, hashvalue pair for sorting SCC members. */
1051 struct type_hash_pair {
1052 tree type;
1053 hashval_t hash;
1056 /* Compare two type, hashvalue pairs. */
1058 static int
1059 type_hash_pair_compare (const void *p1_, const void *p2_)
1061 const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_;
1062 const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_;
1063 if (p1->hash < p2->hash)
1064 return -1;
1065 else if (p1->hash > p2->hash)
1066 return 1;
1067 return 0;
1070 /* Returning a hash value for gimple type TYPE combined with VAL.
1071 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
1073 To hash a type we end up hashing in types that are reachable.
1074 Through pointers we can end up with cycles which messes up the
1075 required property that we need to compute the same hash value
1076 for structurally equivalent types. To avoid this we have to
1077 hash all types in a cycle (the SCC) in a commutative way. The
1078 easiest way is to not mix in the hashes of the SCC members at
1079 all. To make this work we have to delay setting the hash
1080 values of the SCC until it is complete. */
1082 static hashval_t
1083 iterative_hash_gimple_type (tree type, hashval_t val,
1084 VEC(tree, heap) **sccstack,
1085 struct pointer_map_t *sccstate,
1086 struct obstack *sccstate_obstack)
1088 hashval_t v;
1089 void **slot;
1090 struct sccs *state;
1092 /* Not visited during this DFS walk. */
1093 gcc_checking_assert (!pointer_map_contains (sccstate, type));
1094 state = XOBNEW (sccstate_obstack, struct sccs);
1095 *pointer_map_insert (sccstate, type) = state;
1097 VEC_safe_push (tree, heap, *sccstack, type);
1098 state->dfsnum = next_dfs_num++;
1099 state->low = state->dfsnum;
1100 state->on_sccstack = true;
1102 /* Combine a few common features of types so that types are grouped into
1103 smaller sets; when searching for existing matching types to merge,
1104 only existing types having the same features as the new type will be
1105 checked. */
1106 v = iterative_hash_name (TYPE_NAME (type), 0);
1107 if (TYPE_NAME (type)
1108 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1109 && DECL_CONTEXT (TYPE_NAME (type))
1110 && TYPE_P (DECL_CONTEXT (TYPE_NAME (type))))
1111 v = visit (DECL_CONTEXT (TYPE_NAME (type)), state, v,
1112 sccstack, sccstate, sccstate_obstack);
1114 /* Factor in the variant structure. */
1115 if (TYPE_MAIN_VARIANT (type) != type)
1116 v = visit (TYPE_MAIN_VARIANT (type), state, v,
1117 sccstack, sccstate, sccstate_obstack);
1119 v = iterative_hash_hashval_t (TREE_CODE (type), v);
1120 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
1121 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
1123 /* Do not hash the types size as this will cause differences in
1124 hash values for the complete vs. the incomplete type variant. */
1126 /* Incorporate common features of numerical types. */
1127 if (INTEGRAL_TYPE_P (type)
1128 || SCALAR_FLOAT_TYPE_P (type)
1129 || FIXED_POINT_TYPE_P (type))
1131 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
1132 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
1133 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
1136 /* For pointer and reference types, fold in information about the type
1137 pointed to. */
1138 if (POINTER_TYPE_P (type))
1139 v = visit (TREE_TYPE (type), state, v,
1140 sccstack, sccstate, sccstate_obstack);
1142 /* For integer types hash the types min/max values and the string flag. */
1143 if (TREE_CODE (type) == INTEGER_TYPE)
1145 /* OMP lowering can introduce error_mark_node in place of
1146 random local decls in types. */
1147 if (TYPE_MIN_VALUE (type) != error_mark_node)
1148 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
1149 if (TYPE_MAX_VALUE (type) != error_mark_node)
1150 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
1151 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
1154 /* For array types hash the domain and the string flag. */
1155 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
1157 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
1158 v = visit (TYPE_DOMAIN (type), state, v,
1159 sccstack, sccstate, sccstate_obstack);
1162 /* Recurse for aggregates with a single element type. */
1163 if (TREE_CODE (type) == ARRAY_TYPE
1164 || TREE_CODE (type) == COMPLEX_TYPE
1165 || TREE_CODE (type) == VECTOR_TYPE)
1166 v = visit (TREE_TYPE (type), state, v,
1167 sccstack, sccstate, sccstate_obstack);
1169 /* Incorporate function return and argument types. */
1170 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
1172 unsigned na;
1173 tree p;
1175 /* For method types also incorporate their parent class. */
1176 if (TREE_CODE (type) == METHOD_TYPE)
1177 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
1178 sccstack, sccstate, sccstate_obstack);
1180 /* Check result and argument types. */
1181 v = visit (TREE_TYPE (type), state, v,
1182 sccstack, sccstate, sccstate_obstack);
1183 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
1185 v = visit (TREE_VALUE (p), state, v,
1186 sccstack, sccstate, sccstate_obstack);
1187 na++;
1190 v = iterative_hash_hashval_t (na, v);
1193 if (RECORD_OR_UNION_TYPE_P (type))
1195 unsigned nf;
1196 tree f;
1198 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
1200 v = iterative_hash_name (DECL_NAME (f), v);
1201 v = visit (TREE_TYPE (f), state, v,
1202 sccstack, sccstate, sccstate_obstack);
1203 nf++;
1206 v = iterative_hash_hashval_t (nf, v);
1209 /* Record hash for us. */
1210 state->u.hash = v;
1212 /* See if we found an SCC. */
1213 if (state->low == state->dfsnum)
1215 tree x;
1216 struct tree_int_map *m;
1218 /* Pop off the SCC and set its hash values. */
1219 x = VEC_pop (tree, *sccstack);
1220 /* Optimize SCC size one. */
1221 if (x == type)
1223 state->on_sccstack = false;
1224 m = ggc_alloc_cleared_tree_int_map ();
1225 m->base.from = x;
1226 m->to = v;
1227 slot = htab_find_slot (type_hash_cache, m, INSERT);
1228 gcc_assert (!*slot);
1229 *slot = (void *) m;
1231 else
1233 struct sccs *cstate;
1234 unsigned first, i, size, j;
1235 struct type_hash_pair *pairs;
1236 /* Pop off the SCC and build an array of type, hash pairs. */
1237 first = VEC_length (tree, *sccstack) - 1;
1238 while (VEC_index (tree, *sccstack, first) != type)
1239 --first;
1240 size = VEC_length (tree, *sccstack) - first + 1;
1241 pairs = XALLOCAVEC (struct type_hash_pair, size);
1242 i = 0;
1243 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
1244 cstate->on_sccstack = false;
1245 pairs[i].type = x;
1246 pairs[i].hash = cstate->u.hash;
1249 x = VEC_pop (tree, *sccstack);
1250 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
1251 cstate->on_sccstack = false;
1252 ++i;
1253 pairs[i].type = x;
1254 pairs[i].hash = cstate->u.hash;
1256 while (x != type);
1257 gcc_assert (i + 1 == size);
1258 /* Sort the arrays of type, hash pairs so that when we mix in
1259 all members of the SCC the hash value becomes independent on
1260 the order we visited the SCC. Disregard hashes equal to
1261 the hash of the type we mix into because we cannot guarantee
1262 a stable sort for those across different TUs. */
1263 qsort (pairs, size, sizeof (struct type_hash_pair),
1264 type_hash_pair_compare);
1265 for (i = 0; i < size; ++i)
1267 hashval_t hash;
1268 m = ggc_alloc_cleared_tree_int_map ();
1269 m->base.from = pairs[i].type;
1270 hash = pairs[i].hash;
1271 /* Skip same hashes. */
1272 for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j)
1274 for (; j < size; ++j)
1275 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
1276 for (j = 0; pairs[j].hash != pairs[i].hash; ++j)
1277 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
1278 m->to = hash;
1279 if (pairs[i].type == type)
1280 v = hash;
1281 slot = htab_find_slot (type_hash_cache, m, INSERT);
1282 gcc_assert (!*slot);
1283 *slot = (void *) m;
1288 return iterative_hash_hashval_t (v, val);
1291 /* Returns a hash value for P (assumed to be a type). The hash value
1292 is computed using some distinguishing features of the type. Note
1293 that we cannot use pointer hashing here as we may be dealing with
1294 two distinct instances of the same type.
1296 This function should produce the same hash value for two compatible
1297 types according to gimple_types_compatible_p. */
1299 static hashval_t
1300 gimple_type_hash (const void *p)
1302 const_tree t = (const_tree) p;
1303 VEC(tree, heap) *sccstack = NULL;
1304 struct pointer_map_t *sccstate;
1305 struct obstack sccstate_obstack;
1306 hashval_t val;
1307 void **slot;
1308 struct tree_int_map m;
1310 m.base.from = CONST_CAST_TREE (t);
1311 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
1312 && *slot)
1313 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
1315 /* Perform a DFS walk and pre-hash all reachable types. */
1316 next_dfs_num = 1;
1317 sccstate = pointer_map_create ();
1318 gcc_obstack_init (&sccstate_obstack);
1319 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
1320 &sccstack, sccstate, &sccstate_obstack);
1321 VEC_free (tree, heap, sccstack);
1322 pointer_map_destroy (sccstate);
1323 obstack_free (&sccstate_obstack, NULL);
1325 return val;
1328 /* Returns nonzero if P1 and P2 are equal. */
1330 static int
1331 gimple_type_eq (const void *p1, const void *p2)
1333 const_tree t1 = (const_tree) p1;
1334 const_tree t2 = (const_tree) p2;
1335 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
1336 CONST_CAST_TREE (t2));
1340 /* Worker for gimple_register_type.
1341 Register type T in the global type table gimple_types.
1342 When REGISTERING_MV is false first recurse for the main variant of T. */
1344 static tree
1345 gimple_register_type_1 (tree t, bool registering_mv)
1347 void **slot;
1348 gimple_type_leader_entry *leader;
1350 /* If we registered this type before return the cached result. */
1351 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
1352 if (leader->type == t)
1353 return leader->leader;
1355 /* Always register the main variant first. This is important so we
1356 pick up the non-typedef variants as canonical, otherwise we'll end
1357 up taking typedef ids for structure tags during comparison.
1358 It also makes sure that main variants will be merged to main variants.
1359 As we are operating on a possibly partially fixed up type graph
1360 do not bother to recurse more than once, otherwise we may end up
1361 walking in circles.
1362 If we are registering a main variant it will either remain its
1363 own main variant or it will be merged to something else in which
1364 case we do not care for the main variant leader. */
1365 if (!registering_mv
1366 && TYPE_MAIN_VARIANT (t) != t)
1367 gimple_register_type_1 (TYPE_MAIN_VARIANT (t), true);
1369 /* See if we already have an equivalent type registered. */
1370 slot = htab_find_slot (gimple_types, t, INSERT);
1371 if (*slot
1372 && *(tree *)slot != t)
1374 tree new_type = (tree) *((tree *) slot);
1375 leader->type = t;
1376 leader->leader = new_type;
1377 return new_type;
1380 /* If not, insert it to the cache and the hash. */
1381 leader->type = t;
1382 leader->leader = t;
1383 *slot = (void *) t;
1384 return t;
1387 /* Register type T in the global type table gimple_types.
1388 If another type T', compatible with T, already existed in
1389 gimple_types then return T', otherwise return T. This is used by
1390 LTO to merge identical types read from different TUs. */
1392 static tree
1393 gimple_register_type (tree t)
1395 gcc_assert (TYPE_P (t));
1396 return gimple_register_type_1 (t, false);
1399 #define GIMPLE_REGISTER_TYPE(tt) \
1400 (TREE_VISITED (tt) ? gimple_register_type (tt) : tt)
1404 /* A hashtable of trees that potentially refer to variables or functions
1405 that must be replaced with their prevailing variant. */
1406 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node))) htab_t
1407 tree_with_vars;
1409 /* Remember that T is a tree that (potentially) refers to a variable
1410 or function decl that may be replaced with its prevailing variant. */
1411 static void
1412 remember_with_vars (tree t)
1414 *(tree *) htab_find_slot (tree_with_vars, t, INSERT) = t;
1417 #define LTO_FIXUP_TREE(tt) \
1418 do \
1420 if (tt) \
1422 if (TYPE_P (tt)) \
1423 (tt) = GIMPLE_REGISTER_TYPE (tt); \
1424 if (VAR_OR_FUNCTION_DECL_P (tt) && TREE_PUBLIC (tt)) \
1425 remember_with_vars (t); \
1426 if (TREE_CODE (tt) == INTEGER_CST) \
1427 (tt) = fixup_integer_cst (tt); \
1429 } while (0)
1431 static void lto_fixup_types (tree);
1433 /* Return integer_cst T with updated type. */
1435 static tree
1436 fixup_integer_cst (tree t)
1438 tree type = GIMPLE_REGISTER_TYPE (TREE_TYPE (t));
1440 if (type == TREE_TYPE (t))
1441 return t;
1443 /* If overflow was set, streamer_read_integer_cst
1444 produced local copy of T. */
1445 if (TREE_OVERFLOW (t))
1447 TREE_TYPE (t) = type;
1448 return t;
1450 else
1451 /* Otherwise produce new shared node for the new type. */
1452 return build_int_cst_wide (type, TREE_INT_CST_LOW (t),
1453 TREE_INT_CST_HIGH (t));
1456 /* Fix up fields of a tree_typed T. */
1458 static void
1459 lto_ft_typed (tree t)
1461 LTO_FIXUP_TREE (TREE_TYPE (t));
1464 /* Fix up fields of a tree_common T. */
1466 static void
1467 lto_ft_common (tree t)
1469 lto_ft_typed (t);
1470 LTO_FIXUP_TREE (TREE_CHAIN (t));
1473 /* Fix up fields of a decl_minimal T. */
1475 static void
1476 lto_ft_decl_minimal (tree t)
1478 lto_ft_common (t);
1479 LTO_FIXUP_TREE (DECL_NAME (t));
1480 LTO_FIXUP_TREE (DECL_CONTEXT (t));
1483 /* Fix up fields of a decl_common T. */
1485 static void
1486 lto_ft_decl_common (tree t)
1488 lto_ft_decl_minimal (t);
1489 LTO_FIXUP_TREE (DECL_SIZE (t));
1490 LTO_FIXUP_TREE (DECL_SIZE_UNIT (t));
1491 LTO_FIXUP_TREE (DECL_INITIAL (t));
1492 LTO_FIXUP_TREE (DECL_ATTRIBUTES (t));
1493 LTO_FIXUP_TREE (DECL_ABSTRACT_ORIGIN (t));
1496 /* Fix up fields of a decl_with_vis T. */
1498 static void
1499 lto_ft_decl_with_vis (tree t)
1501 lto_ft_decl_common (t);
1503 /* Accessor macro has side-effects, use field-name here. */
1504 LTO_FIXUP_TREE (t->decl_with_vis.assembler_name);
1505 LTO_FIXUP_TREE (DECL_SECTION_NAME (t));
1508 /* Fix up fields of a decl_non_common T. */
1510 static void
1511 lto_ft_decl_non_common (tree t)
1513 lto_ft_decl_with_vis (t);
1514 LTO_FIXUP_TREE (DECL_ARGUMENT_FLD (t));
1515 LTO_FIXUP_TREE (DECL_RESULT_FLD (t));
1516 LTO_FIXUP_TREE (DECL_VINDEX (t));
1517 /* The C frontends may create exact duplicates for DECL_ORIGINAL_TYPE
1518 like for 'typedef enum foo foo'. We have no way of avoiding to
1519 merge them and dwarf2out.c cannot deal with this,
1520 so fix this up by clearing DECL_ORIGINAL_TYPE in this case. */
1521 if (TREE_CODE (t) == TYPE_DECL
1522 && DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
1523 DECL_ORIGINAL_TYPE (t) = NULL_TREE;
1526 /* Fix up fields of a decl_non_common T. */
1528 static void
1529 lto_ft_function (tree t)
1531 lto_ft_decl_non_common (t);
1532 LTO_FIXUP_TREE (DECL_FUNCTION_PERSONALITY (t));
1535 /* Fix up fields of a field_decl T. */
1537 static void
1538 lto_ft_field_decl (tree t)
1540 lto_ft_decl_common (t);
1541 LTO_FIXUP_TREE (DECL_FIELD_OFFSET (t));
1542 LTO_FIXUP_TREE (DECL_BIT_FIELD_TYPE (t));
1543 LTO_FIXUP_TREE (DECL_QUALIFIER (t));
1544 LTO_FIXUP_TREE (DECL_FIELD_BIT_OFFSET (t));
1545 LTO_FIXUP_TREE (DECL_FCONTEXT (t));
1548 /* Fix up fields of a type T. */
1550 static void
1551 lto_ft_type (tree t)
1553 lto_ft_common (t);
1554 LTO_FIXUP_TREE (TYPE_CACHED_VALUES (t));
1555 LTO_FIXUP_TREE (TYPE_SIZE (t));
1556 LTO_FIXUP_TREE (TYPE_SIZE_UNIT (t));
1557 LTO_FIXUP_TREE (TYPE_ATTRIBUTES (t));
1558 LTO_FIXUP_TREE (TYPE_NAME (t));
1560 /* Accessors are for derived node types only. */
1561 if (!POINTER_TYPE_P (t))
1562 LTO_FIXUP_TREE (TYPE_MINVAL (t));
1563 LTO_FIXUP_TREE (TYPE_MAXVAL (t));
1565 /* Accessor is for derived node types only. */
1566 LTO_FIXUP_TREE (t->type_non_common.binfo);
1568 LTO_FIXUP_TREE (TYPE_CONTEXT (t));
1571 /* Fix up fields of a BINFO T. */
1573 static void
1574 lto_ft_binfo (tree t)
1576 unsigned HOST_WIDE_INT i, n;
1577 tree base, saved_base;
1579 lto_ft_common (t);
1580 LTO_FIXUP_TREE (BINFO_VTABLE (t));
1581 LTO_FIXUP_TREE (BINFO_OFFSET (t));
1582 LTO_FIXUP_TREE (BINFO_VIRTUALS (t));
1583 LTO_FIXUP_TREE (BINFO_VPTR_FIELD (t));
1584 n = VEC_length (tree, BINFO_BASE_ACCESSES (t));
1585 for (i = 0; i < n; i++)
1587 saved_base = base = BINFO_BASE_ACCESS (t, i);
1588 LTO_FIXUP_TREE (base);
1589 if (base != saved_base)
1590 VEC_replace (tree, BINFO_BASE_ACCESSES (t), i, base);
1592 LTO_FIXUP_TREE (BINFO_INHERITANCE_CHAIN (t));
1593 LTO_FIXUP_TREE (BINFO_SUBVTT_INDEX (t));
1594 LTO_FIXUP_TREE (BINFO_VPTR_INDEX (t));
1595 n = BINFO_N_BASE_BINFOS (t);
1596 for (i = 0; i < n; i++)
1598 saved_base = base = BINFO_BASE_BINFO (t, i);
1599 LTO_FIXUP_TREE (base);
1600 if (base != saved_base)
1601 VEC_replace (tree, BINFO_BASE_BINFOS (t), i, base);
1605 /* Fix up fields of a CONSTRUCTOR T. */
1607 static void
1608 lto_ft_constructor (tree t)
1610 unsigned HOST_WIDE_INT idx;
1611 constructor_elt *ce;
1613 lto_ft_typed (t);
1615 for (idx = 0;
1616 VEC_iterate(constructor_elt, CONSTRUCTOR_ELTS (t), idx, ce);
1617 idx++)
1619 LTO_FIXUP_TREE (ce->index);
1620 LTO_FIXUP_TREE (ce->value);
1624 /* Fix up fields of an expression tree T. */
1626 static void
1627 lto_ft_expr (tree t)
1629 int i;
1630 lto_ft_typed (t);
1631 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
1632 LTO_FIXUP_TREE (TREE_OPERAND (t, i));
1635 /* Given a tree T fixup fields of T by replacing types with their merged
1636 variant and other entities by an equal entity from an earlier compilation
1637 unit, or an entity being canonical in a different way. This includes
1638 for instance integer or string constants. */
1640 static void
1641 lto_fixup_types (tree t)
1643 switch (TREE_CODE (t))
1645 case IDENTIFIER_NODE:
1646 break;
1648 case TREE_LIST:
1649 LTO_FIXUP_TREE (TREE_VALUE (t));
1650 LTO_FIXUP_TREE (TREE_PURPOSE (t));
1651 LTO_FIXUP_TREE (TREE_CHAIN (t));
1652 break;
1654 case FIELD_DECL:
1655 lto_ft_field_decl (t);
1656 break;
1658 case LABEL_DECL:
1659 case CONST_DECL:
1660 case PARM_DECL:
1661 case RESULT_DECL:
1662 case IMPORTED_DECL:
1663 lto_ft_decl_common (t);
1664 break;
1666 case VAR_DECL:
1667 lto_ft_decl_with_vis (t);
1668 break;
1670 case TYPE_DECL:
1671 lto_ft_decl_non_common (t);
1672 break;
1674 case FUNCTION_DECL:
1675 lto_ft_function (t);
1676 break;
1678 case TREE_BINFO:
1679 lto_ft_binfo (t);
1680 break;
1682 case PLACEHOLDER_EXPR:
1683 lto_ft_common (t);
1684 break;
1686 case BLOCK:
1687 case TRANSLATION_UNIT_DECL:
1688 case OPTIMIZATION_NODE:
1689 case TARGET_OPTION_NODE:
1690 break;
1692 default:
1693 if (TYPE_P (t))
1694 lto_ft_type (t);
1695 else if (TREE_CODE (t) == CONSTRUCTOR)
1696 lto_ft_constructor (t);
1697 else if (CONSTANT_CLASS_P (t))
1698 LTO_FIXUP_TREE (TREE_TYPE (t));
1699 else if (EXPR_P (t))
1701 lto_ft_expr (t);
1703 else
1705 remember_with_vars (t);
1711 /* Return the resolution for the decl with index INDEX from DATA_IN. */
1713 static enum ld_plugin_symbol_resolution
1714 get_resolution (struct data_in *data_in, unsigned index)
1716 if (data_in->globals_resolution)
1718 ld_plugin_symbol_resolution_t ret;
1719 /* We can have references to not emitted functions in
1720 DECL_FUNCTION_PERSONALITY at least. So we can and have
1721 to indeed return LDPR_UNKNOWN in some cases. */
1722 if (VEC_length (ld_plugin_symbol_resolution_t,
1723 data_in->globals_resolution) <= index)
1724 return LDPR_UNKNOWN;
1725 ret = VEC_index (ld_plugin_symbol_resolution_t,
1726 data_in->globals_resolution,
1727 index);
1728 return ret;
1730 else
1731 /* Delay resolution finding until decl merging. */
1732 return LDPR_UNKNOWN;
1735 /* Map assigning declarations their resolutions. */
1736 static pointer_map_t *resolution_map;
1738 /* We need to record resolutions until symbol table is read. */
1739 static void
1740 register_resolution (tree decl, enum ld_plugin_symbol_resolution resolution)
1742 if (resolution == LDPR_UNKNOWN)
1743 return;
1744 if (!resolution_map)
1745 resolution_map = pointer_map_create ();
1746 *pointer_map_insert (resolution_map, decl) = (void *)(size_t)resolution;
1749 /* Register DECL with the global symbol table and change its
1750 name if necessary to avoid name clashes for static globals across
1751 different files. */
1753 static void
1754 lto_register_var_decl_in_symtab (struct data_in *data_in, tree decl)
1756 tree context;
1758 /* Variable has file scope, not local. Need to ensure static variables
1759 between different files don't clash unexpectedly. */
1760 if (!TREE_PUBLIC (decl)
1761 && !((context = decl_function_context (decl))
1762 && auto_var_in_fn_p (decl, context)))
1764 /* ??? We normally pre-mangle names before we serialize them
1765 out. Here, in lto1, we do not know the language, and
1766 thus cannot do the mangling again. Instead, we just
1767 append a suffix to the mangled name. The resulting name,
1768 however, is not a properly-formed mangled name, and will
1769 confuse any attempt to unmangle it. */
1770 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
1771 char *label;
1773 ASM_FORMAT_PRIVATE_NAME (label, name, DECL_UID (decl));
1774 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (label));
1775 rest_of_decl_compilation (decl, 1, 0);
1776 VEC_safe_push (tree, gc, lto_global_var_decls, decl);
1779 /* If this variable has already been declared, queue the
1780 declaration for merging. */
1781 if (TREE_PUBLIC (decl))
1783 unsigned ix;
1784 if (!streamer_tree_cache_lookup (data_in->reader_cache, decl, &ix))
1785 gcc_unreachable ();
1786 register_resolution (decl, get_resolution (data_in, ix));
1791 /* Register DECL with the global symbol table and change its
1792 name if necessary to avoid name clashes for static globals across
1793 different files. DATA_IN contains descriptors and tables for the
1794 file being read. */
1796 static void
1797 lto_register_function_decl_in_symtab (struct data_in *data_in, tree decl)
1799 /* Need to ensure static entities between different files
1800 don't clash unexpectedly. */
1801 if (!TREE_PUBLIC (decl))
1803 /* We must not use the DECL_ASSEMBLER_NAME macro here, as it
1804 may set the assembler name where it was previously empty. */
1805 tree old_assembler_name = decl->decl_with_vis.assembler_name;
1807 /* FIXME lto: We normally pre-mangle names before we serialize
1808 them out. Here, in lto1, we do not know the language, and
1809 thus cannot do the mangling again. Instead, we just append a
1810 suffix to the mangled name. The resulting name, however, is
1811 not a properly-formed mangled name, and will confuse any
1812 attempt to unmangle it. */
1813 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
1814 char *label;
1816 ASM_FORMAT_PRIVATE_NAME (label, name, DECL_UID (decl));
1817 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (label));
1819 /* We may arrive here with the old assembler name not set
1820 if the function body is not needed, e.g., it has been
1821 inlined away and does not appear in the cgraph. */
1822 if (old_assembler_name)
1824 tree new_assembler_name = DECL_ASSEMBLER_NAME (decl);
1826 /* Make the original assembler name available for later use.
1827 We may have used it to indicate the section within its
1828 object file where the function body may be found.
1829 FIXME lto: Find a better way to maintain the function decl
1830 to body section mapping so we don't need this hack. */
1831 lto_record_renamed_decl (data_in->file_data,
1832 IDENTIFIER_POINTER (old_assembler_name),
1833 IDENTIFIER_POINTER (new_assembler_name));
1837 /* If this variable has already been declared, queue the
1838 declaration for merging. */
1839 if (TREE_PUBLIC (decl) && !DECL_ABSTRACT (decl))
1841 unsigned ix;
1842 if (!streamer_tree_cache_lookup (data_in->reader_cache, decl, &ix))
1843 gcc_unreachable ();
1844 register_resolution (decl, get_resolution (data_in, ix));
1849 /* Given a streamer cache structure DATA_IN (holding a sequence of trees
1850 for one compilation unit) go over all trees starting at index FROM until the
1851 end of the sequence and replace fields of those trees, and the trees
1852 themself with their canonical variants as per gimple_register_type. */
1854 static void
1855 uniquify_nodes (struct data_in *data_in, unsigned from)
1857 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1858 unsigned len = VEC_length (tree, cache->nodes);
1859 unsigned i;
1861 /* Go backwards because children streamed for the first time come
1862 as part of their parents, and hence are created after them. */
1864 /* First register all the types in the cache. This makes sure to
1865 have the original structure in the type cycles when registering
1866 them and computing hashes. */
1867 for (i = len; i-- > from;)
1869 tree t = VEC_index (tree, cache->nodes, i);
1870 if (t && TYPE_P (t))
1872 tree newt = gimple_register_type (t);
1873 /* Mark non-prevailing types so we fix them up. No need
1874 to reset that flag afterwards - nothing that refers
1875 to those types is left and they are collected. */
1876 if (newt != t)
1877 TREE_VISITED (t) = 1;
1881 /* Second fixup all trees in the new cache entries. */
1882 for (i = len; i-- > from;)
1884 tree t = VEC_index (tree, cache->nodes, i);
1885 tree oldt = t;
1886 if (!t)
1887 continue;
1889 /* First fixup the fields of T. */
1890 lto_fixup_types (t);
1892 if (!TYPE_P (t))
1893 continue;
1895 /* Now try to find a canonical variant of T itself. */
1896 t = GIMPLE_REGISTER_TYPE (t);
1898 if (t == oldt)
1900 /* The following re-creates proper variant lists while fixing up
1901 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
1902 variant list state before fixup is broken. */
1903 tree tem, mv;
1905 #ifdef ENABLE_CHECKING
1906 /* Remove us from our main variant list if we are not the
1907 variant leader. */
1908 if (TYPE_MAIN_VARIANT (t) != t)
1910 tem = TYPE_MAIN_VARIANT (t);
1911 while (tem && TYPE_NEXT_VARIANT (tem) != t)
1912 tem = TYPE_NEXT_VARIANT (tem);
1913 gcc_assert (!tem && !TYPE_NEXT_VARIANT (t));
1915 #endif
1917 /* Query our new main variant. */
1918 mv = GIMPLE_REGISTER_TYPE (TYPE_MAIN_VARIANT (t));
1920 /* If we were the variant leader and we get replaced ourselves drop
1921 all variants from our list. */
1922 if (TYPE_MAIN_VARIANT (t) == t
1923 && mv != t)
1925 tem = t;
1926 while (tem)
1928 tree tem2 = TYPE_NEXT_VARIANT (tem);
1929 TYPE_NEXT_VARIANT (tem) = NULL_TREE;
1930 tem = tem2;
1934 /* If we are not our own variant leader link us into our new leaders
1935 variant list. */
1936 if (mv != t)
1938 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
1939 TYPE_NEXT_VARIANT (mv) = t;
1940 if (RECORD_OR_UNION_TYPE_P (t))
1941 TYPE_BINFO (t) = TYPE_BINFO (mv);
1942 /* Preserve the invariant that type variants share their
1943 TYPE_FIELDS. */
1944 if (RECORD_OR_UNION_TYPE_P (t)
1945 && TYPE_FIELDS (mv) != TYPE_FIELDS (t))
1947 tree f1, f2;
1948 for (f1 = TYPE_FIELDS (mv), f2 = TYPE_FIELDS (t);
1949 f1 && f2; f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1951 unsigned ix;
1952 gcc_assert (f1 != f2
1953 && DECL_NAME (f1) == DECL_NAME (f2));
1954 if (!streamer_tree_cache_lookup (cache, f2, &ix))
1955 gcc_unreachable ();
1956 /* If we're going to replace an element which we'd
1957 still visit in the next iterations, we wouldn't
1958 handle it, so do it here. We do have to handle it
1959 even though the field_decl itself will be removed,
1960 as it could refer to e.g. integer_cst which we
1961 wouldn't reach via any other way, hence they
1962 (and their type) would stay uncollected. */
1963 /* ??? We should rather make sure to replace all
1964 references to f2 with f1. That means handling
1965 COMPONENT_REFs and CONSTRUCTOR elements in
1966 lto_fixup_types and special-case the field-decl
1967 operand handling. */
1968 /* ??? Not sure the above is all relevant in this
1969 path canonicalizing TYPE_FIELDS to that of the
1970 main variant. */
1971 if (ix < i)
1972 lto_fixup_types (f2);
1973 streamer_tree_cache_insert_at (cache, f1, ix);
1975 TYPE_FIELDS (t) = TYPE_FIELDS (mv);
1979 /* Finally adjust our main variant and fix it up. */
1980 TYPE_MAIN_VARIANT (t) = mv;
1982 /* The following reconstructs the pointer chains
1983 of the new pointed-to type if we are a main variant. We do
1984 not stream those so they are broken before fixup. */
1985 if (TREE_CODE (t) == POINTER_TYPE
1986 && TYPE_MAIN_VARIANT (t) == t)
1988 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
1989 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
1991 else if (TREE_CODE (t) == REFERENCE_TYPE
1992 && TYPE_MAIN_VARIANT (t) == t)
1994 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
1995 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
1999 else
2001 if (RECORD_OR_UNION_TYPE_P (t))
2003 tree f1, f2;
2004 if (TYPE_FIELDS (t) != TYPE_FIELDS (oldt))
2005 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (oldt);
2006 f1 && f2; f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
2008 unsigned ix;
2009 gcc_assert (f1 != f2 && DECL_NAME (f1) == DECL_NAME (f2));
2010 if (!streamer_tree_cache_lookup (cache, f2, &ix))
2011 gcc_unreachable ();
2012 /* If we're going to replace an element which we'd
2013 still visit in the next iterations, we wouldn't
2014 handle it, so do it here. We do have to handle it
2015 even though the field_decl itself will be removed,
2016 as it could refer to e.g. integer_cst which we
2017 wouldn't reach via any other way, hence they
2018 (and their type) would stay uncollected. */
2019 /* ??? We should rather make sure to replace all
2020 references to f2 with f1. That means handling
2021 COMPONENT_REFs and CONSTRUCTOR elements in
2022 lto_fixup_types and special-case the field-decl
2023 operand handling. */
2024 if (ix < i)
2025 lto_fixup_types (f2);
2026 streamer_tree_cache_insert_at (cache, f1, ix);
2030 /* If we found a tree that is equal to oldt replace it in the
2031 cache, so that further users (in the various LTO sections)
2032 make use of it. */
2033 streamer_tree_cache_insert_at (cache, t, i);
2037 /* Finally compute the canonical type of all TREE_TYPEs and register
2038 VAR_DECL and FUNCTION_DECL nodes in the symbol table.
2039 From this point there are no longer any types with
2040 TYPE_STRUCTURAL_EQUALITY_P and its type-based alias problems.
2041 This step requires the TYPE_POINTER_TO lists being present, so
2042 make sure it is done last. */
2043 for (i = len; i-- > from;)
2045 tree t = VEC_index (tree, cache->nodes, i);
2046 if (t == NULL_TREE)
2047 continue;
2049 if (TREE_CODE (t) == VAR_DECL)
2050 lto_register_var_decl_in_symtab (data_in, t);
2051 else if (TREE_CODE (t) == FUNCTION_DECL && !DECL_BUILT_IN (t))
2052 lto_register_function_decl_in_symtab (data_in, t);
2053 else if (!flag_wpa
2054 && TREE_CODE (t) == TYPE_DECL)
2055 debug_hooks->type_decl (t, !DECL_FILE_SCOPE_P (t));
2056 else if (TYPE_P (t) && !TYPE_CANONICAL (t))
2057 TYPE_CANONICAL (t) = gimple_register_canonical_type (t);
2062 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
2063 RESOLUTIONS is the set of symbols picked by the linker (read from the
2064 resolution file when the linker plugin is being used). */
2066 static void
2067 lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
2068 VEC(ld_plugin_symbol_resolution_t,heap) *resolutions)
2070 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
2071 const int decl_offset = sizeof (struct lto_decl_header);
2072 const int main_offset = decl_offset + header->decl_state_size;
2073 const int string_offset = main_offset + header->main_size;
2074 struct lto_input_block ib_main;
2075 struct data_in *data_in;
2076 unsigned int i;
2077 const uint32_t *data_ptr, *data_end;
2078 uint32_t num_decl_states;
2080 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2081 header->main_size);
2083 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
2084 header->string_size, resolutions);
2086 /* We do not uniquify the pre-loaded cache entries, those are middle-end
2087 internal types that should not be merged. */
2089 /* Read the global declarations and types. */
2090 while (ib_main.p < ib_main.len)
2092 tree t;
2093 unsigned from = VEC_length (tree, data_in->reader_cache->nodes);
2094 t = stream_read_tree (&ib_main, data_in);
2095 gcc_assert (t && ib_main.p <= ib_main.len);
2096 uniquify_nodes (data_in, from);
2099 /* Read in lto_in_decl_state objects. */
2100 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
2101 data_end =
2102 (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
2103 num_decl_states = *data_ptr++;
2105 gcc_assert (num_decl_states > 0);
2106 decl_data->global_decl_state = lto_new_in_decl_state ();
2107 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
2108 decl_data->global_decl_state);
2110 /* Read in per-function decl states and enter them in hash table. */
2111 decl_data->function_decl_states =
2112 htab_create_ggc (37, lto_hash_in_decl_state, lto_eq_in_decl_state, NULL);
2114 for (i = 1; i < num_decl_states; i++)
2116 struct lto_in_decl_state *state = lto_new_in_decl_state ();
2117 void **slot;
2119 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
2120 slot = htab_find_slot (decl_data->function_decl_states, state, INSERT);
2121 gcc_assert (*slot == NULL);
2122 *slot = state;
2125 if (data_ptr != data_end)
2126 internal_error ("bytecode stream: garbage at the end of symbols section");
2128 /* Set the current decl state to be the global state. */
2129 decl_data->current_decl_state = decl_data->global_decl_state;
2131 lto_data_in_delete (data_in);
2134 /* Custom version of strtoll, which is not portable. */
2136 static HOST_WIDEST_INT
2137 lto_parse_hex (const char *p)
2139 HOST_WIDEST_INT ret = 0;
2141 for (; *p != '\0'; ++p)
2143 char c = *p;
2144 unsigned char part;
2145 ret <<= 4;
2146 if (c >= '0' && c <= '9')
2147 part = c - '0';
2148 else if (c >= 'a' && c <= 'f')
2149 part = c - 'a' + 10;
2150 else if (c >= 'A' && c <= 'F')
2151 part = c - 'A' + 10;
2152 else
2153 internal_error ("could not parse hex number");
2154 ret |= part;
2157 return ret;
2160 /* Read resolution for file named FILE_NAME. The resolution is read from
2161 RESOLUTION. */
2163 static void
2164 lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
2166 /* We require that objects in the resolution file are in the same
2167 order as the lto1 command line. */
2168 unsigned int name_len;
2169 char *obj_name;
2170 unsigned int num_symbols;
2171 unsigned int i;
2172 struct lto_file_decl_data *file_data;
2173 splay_tree_node nd = NULL;
2175 if (!resolution)
2176 return;
2178 name_len = strlen (file->filename);
2179 obj_name = XNEWVEC (char, name_len + 1);
2180 fscanf (resolution, " "); /* Read white space. */
2182 fread (obj_name, sizeof (char), name_len, resolution);
2183 obj_name[name_len] = '\0';
2184 if (filename_cmp (obj_name, file->filename) != 0)
2185 internal_error ("unexpected file name %s in linker resolution file. "
2186 "Expected %s", obj_name, file->filename);
2187 if (file->offset != 0)
2189 int t;
2190 char offset_p[17];
2191 HOST_WIDEST_INT offset;
2192 t = fscanf (resolution, "@0x%16s", offset_p);
2193 if (t != 1)
2194 internal_error ("could not parse file offset");
2195 offset = lto_parse_hex (offset_p);
2196 if (offset != file->offset)
2197 internal_error ("unexpected offset");
2200 free (obj_name);
2202 fscanf (resolution, "%u", &num_symbols);
2204 for (i = 0; i < num_symbols; i++)
2206 int t;
2207 unsigned index;
2208 unsigned HOST_WIDE_INT id;
2209 char r_str[27];
2210 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
2211 unsigned int j;
2212 unsigned int lto_resolution_str_len =
2213 sizeof (lto_resolution_str) / sizeof (char *);
2214 res_pair rp;
2216 t = fscanf (resolution, "%u " HOST_WIDE_INT_PRINT_HEX_PURE " %26s %*[^\n]\n",
2217 &index, &id, r_str);
2218 if (t != 3)
2219 internal_error ("invalid line in the resolution file");
2221 for (j = 0; j < lto_resolution_str_len; j++)
2223 if (strcmp (lto_resolution_str[j], r_str) == 0)
2225 r = (enum ld_plugin_symbol_resolution) j;
2226 break;
2229 if (j == lto_resolution_str_len)
2230 internal_error ("invalid resolution in the resolution file");
2232 if (!(nd && lto_splay_tree_id_equal_p (nd->key, id)))
2234 nd = lto_splay_tree_lookup (file_ids, id);
2235 if (nd == NULL)
2236 internal_error ("resolution sub id " HOST_WIDE_INT_PRINT_HEX_PURE
2237 " not in object file", id);
2240 file_data = (struct lto_file_decl_data *)nd->value;
2241 /* The indexes are very sparse. To save memory save them in a compact
2242 format that is only unpacked later when the subfile is processed. */
2243 rp.res = r;
2244 rp.index = index;
2245 VEC_safe_push (res_pair, heap, file_data->respairs, rp);
2246 if (file_data->max_index < index)
2247 file_data->max_index = index;
2251 /* List of file_decl_datas */
2252 struct file_data_list
2254 struct lto_file_decl_data *first, *last;
2257 /* Is the name for a id'ed LTO section? */
2259 static int
2260 lto_section_with_id (const char *name, unsigned HOST_WIDE_INT *id)
2262 const char *s;
2264 if (strncmp (name, LTO_SECTION_NAME_PREFIX, strlen (LTO_SECTION_NAME_PREFIX)))
2265 return 0;
2266 s = strrchr (name, '.');
2267 return s && sscanf (s, "." HOST_WIDE_INT_PRINT_HEX_PURE, id) == 1;
2270 /* Create file_data of each sub file id */
2272 static int
2273 create_subid_section_table (struct lto_section_slot *ls, splay_tree file_ids,
2274 struct file_data_list *list)
2276 struct lto_section_slot s_slot, *new_slot;
2277 unsigned HOST_WIDE_INT id;
2278 splay_tree_node nd;
2279 void **hash_slot;
2280 char *new_name;
2281 struct lto_file_decl_data *file_data;
2283 if (!lto_section_with_id (ls->name, &id))
2284 return 1;
2286 /* Find hash table of sub module id */
2287 nd = lto_splay_tree_lookup (file_ids, id);
2288 if (nd != NULL)
2290 file_data = (struct lto_file_decl_data *)nd->value;
2292 else
2294 file_data = ggc_alloc_lto_file_decl_data ();
2295 memset(file_data, 0, sizeof (struct lto_file_decl_data));
2296 file_data->id = id;
2297 file_data->section_hash_table = lto_obj_create_section_hash_table ();;
2298 lto_splay_tree_insert (file_ids, id, file_data);
2300 /* Maintain list in linker order */
2301 if (!list->first)
2302 list->first = file_data;
2303 if (list->last)
2304 list->last->next = file_data;
2305 list->last = file_data;
2308 /* Copy section into sub module hash table */
2309 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
2310 s_slot.name = new_name;
2311 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
2312 gcc_assert (*hash_slot == NULL);
2314 new_slot = XDUP (struct lto_section_slot, ls);
2315 new_slot->name = new_name;
2316 *hash_slot = new_slot;
2317 return 1;
2320 /* Read declarations and other initializations for a FILE_DATA. */
2322 static void
2323 lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file)
2325 const char *data;
2326 size_t len;
2327 VEC(ld_plugin_symbol_resolution_t,heap) *resolutions = NULL;
2328 int i;
2329 res_pair *rp;
2331 /* Create vector for fast access of resolution. We do this lazily
2332 to save memory. */
2333 VEC_safe_grow_cleared (ld_plugin_symbol_resolution_t, heap,
2334 resolutions,
2335 file_data->max_index + 1);
2336 for (i = 0; VEC_iterate (res_pair, file_data->respairs, i, rp); i++)
2337 VEC_replace (ld_plugin_symbol_resolution_t, resolutions, rp->index, rp->res);
2338 VEC_free (res_pair, heap, file_data->respairs);
2340 file_data->renaming_hash_table = lto_create_renaming_table ();
2341 file_data->file_name = file->filename;
2342 data = lto_get_section_data (file_data, LTO_section_decls, NULL, &len);
2343 if (data == NULL)
2345 internal_error ("cannot read LTO decls from %s", file_data->file_name);
2346 return;
2348 /* Frees resolutions */
2349 lto_read_decls (file_data, data, resolutions);
2350 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
2353 /* Finalize FILE_DATA in FILE and increase COUNT. */
2355 static int
2356 lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data,
2357 int *count)
2359 lto_file_finalize (file_data, file);
2360 if (cgraph_dump_file)
2361 fprintf (cgraph_dump_file, "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX "\n",
2362 file_data->file_name, file_data->id);
2363 (*count)++;
2364 return 0;
2367 /* Generate a TREE representation for all types and external decls
2368 entities in FILE.
2370 Read all of the globals out of the file. Then read the cgraph
2371 and process the .o index into the cgraph nodes so that it can open
2372 the .o file to load the functions and ipa information. */
2374 static struct lto_file_decl_data *
2375 lto_file_read (lto_file *file, FILE *resolution_file, int *count)
2377 struct lto_file_decl_data *file_data = NULL;
2378 splay_tree file_ids;
2379 htab_t section_hash_table;
2380 struct lto_section_slot *section;
2381 struct file_data_list file_list;
2382 struct lto_section_list section_list;
2384 memset (&section_list, 0, sizeof (struct lto_section_list));
2385 section_hash_table = lto_obj_build_section_table (file, &section_list);
2387 /* Find all sub modules in the object and put their sections into new hash
2388 tables in a splay tree. */
2389 file_ids = lto_splay_tree_new ();
2390 memset (&file_list, 0, sizeof (struct file_data_list));
2391 for (section = section_list.first; section != NULL; section = section->next)
2392 create_subid_section_table (section, file_ids, &file_list);
2394 /* Add resolutions to file ids */
2395 lto_resolution_read (file_ids, resolution_file, file);
2397 /* Finalize each lto file for each submodule in the merged object */
2398 for (file_data = file_list.first; file_data != NULL; file_data = file_data->next)
2399 lto_create_files_from_ids (file, file_data, count);
2401 splay_tree_delete (file_ids);
2402 htab_delete (section_hash_table);
2404 return file_list.first;
2407 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
2408 #define LTO_MMAP_IO 1
2409 #endif
2411 #if LTO_MMAP_IO
2412 /* Page size of machine is used for mmap and munmap calls. */
2413 static size_t page_mask;
2414 #endif
2416 /* Get the section data of length LEN from FILENAME starting at
2417 OFFSET. The data segment must be freed by the caller when the
2418 caller is finished. Returns NULL if all was not well. */
2420 static char *
2421 lto_read_section_data (struct lto_file_decl_data *file_data,
2422 intptr_t offset, size_t len)
2424 char *result;
2425 static int fd = -1;
2426 static char *fd_name;
2427 #if LTO_MMAP_IO
2428 intptr_t computed_len;
2429 intptr_t computed_offset;
2430 intptr_t diff;
2431 #endif
2433 /* Keep a single-entry file-descriptor cache. The last file we
2434 touched will get closed at exit.
2435 ??? Eventually we want to add a more sophisticated larger cache
2436 or rather fix function body streaming to not stream them in
2437 practically random order. */
2438 if (fd != -1
2439 && filename_cmp (fd_name, file_data->file_name) != 0)
2441 free (fd_name);
2442 close (fd);
2443 fd = -1;
2445 if (fd == -1)
2447 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
2448 if (fd == -1)
2450 fatal_error ("Cannot open %s", file_data->file_name);
2451 return NULL;
2453 fd_name = xstrdup (file_data->file_name);
2456 #if LTO_MMAP_IO
2457 if (!page_mask)
2459 size_t page_size = sysconf (_SC_PAGE_SIZE);
2460 page_mask = ~(page_size - 1);
2463 computed_offset = offset & page_mask;
2464 diff = offset - computed_offset;
2465 computed_len = len + diff;
2467 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
2468 fd, computed_offset);
2469 if (result == MAP_FAILED)
2471 fatal_error ("Cannot map %s", file_data->file_name);
2472 return NULL;
2475 return result + diff;
2476 #else
2477 result = (char *) xmalloc (len);
2478 if (lseek (fd, offset, SEEK_SET) != offset
2479 || read (fd, result, len) != (ssize_t) len)
2481 free (result);
2482 fatal_error ("Cannot read %s", file_data->file_name);
2483 result = NULL;
2485 #ifdef __MINGW32__
2486 /* Native windows doesn't supports delayed unlink on opened file. So
2487 we close file here again. This produces higher I/O load, but at least
2488 it prevents to have dangling file handles preventing unlink. */
2489 free (fd_name);
2490 fd_name = NULL;
2491 close (fd);
2492 fd = -1;
2493 #endif
2494 return result;
2495 #endif
2499 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
2500 NAME will be NULL unless the section type is for a function
2501 body. */
2503 static const char *
2504 get_section_data (struct lto_file_decl_data *file_data,
2505 enum lto_section_type section_type,
2506 const char *name,
2507 size_t *len)
2509 htab_t section_hash_table = file_data->section_hash_table;
2510 struct lto_section_slot *f_slot;
2511 struct lto_section_slot s_slot;
2512 const char *section_name = lto_get_section_name (section_type, name, file_data);
2513 char *data = NULL;
2515 *len = 0;
2516 s_slot.name = section_name;
2517 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
2518 if (f_slot)
2520 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
2521 *len = f_slot->len;
2524 free (CONST_CAST (char *, section_name));
2525 return data;
2529 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
2530 starts at OFFSET and has LEN bytes. */
2532 static void
2533 free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
2534 enum lto_section_type section_type ATTRIBUTE_UNUSED,
2535 const char *name ATTRIBUTE_UNUSED,
2536 const char *offset, size_t len ATTRIBUTE_UNUSED)
2538 #if LTO_MMAP_IO
2539 intptr_t computed_len;
2540 intptr_t computed_offset;
2541 intptr_t diff;
2542 #endif
2544 #if LTO_MMAP_IO
2545 computed_offset = ((intptr_t) offset) & page_mask;
2546 diff = (intptr_t) offset - computed_offset;
2547 computed_len = len + diff;
2549 munmap ((caddr_t) computed_offset, computed_len);
2550 #else
2551 free (CONST_CAST(char *, offset));
2552 #endif
2555 static lto_file *current_lto_file;
2557 /* Helper for qsort; compare partitions and return one with smaller size.
2558 We sort from greatest to smallest so parallel build doesn't stale on the
2559 longest compilation being executed too late. */
2561 static int
2562 cmp_partitions_size (const void *a, const void *b)
2564 const struct ltrans_partition_def *pa
2565 = *(struct ltrans_partition_def *const *)a;
2566 const struct ltrans_partition_def *pb
2567 = *(struct ltrans_partition_def *const *)b;
2568 return pb->insns - pa->insns;
2571 /* Helper for qsort; compare partitions and return one with smaller order. */
2573 static int
2574 cmp_partitions_order (const void *a, const void *b)
2576 const struct ltrans_partition_def *pa
2577 = *(struct ltrans_partition_def *const *)a;
2578 const struct ltrans_partition_def *pb
2579 = *(struct ltrans_partition_def *const *)b;
2580 int ordera = -1, orderb = -1;
2582 if (lto_symtab_encoder_size (pa->encoder))
2583 ordera = lto_symtab_encoder_deref (pa->encoder, 0)->symbol.order;
2584 if (lto_symtab_encoder_size (pb->encoder))
2585 orderb = lto_symtab_encoder_deref (pb->encoder, 0)->symbol.order;
2586 return orderb - ordera;
2589 /* Write all output files in WPA mode and the file with the list of
2590 LTRANS units. */
2592 static void
2593 lto_wpa_write_files (void)
2595 unsigned i, n_sets;
2596 lto_file *file;
2597 ltrans_partition part;
2598 FILE *ltrans_output_list_stream;
2599 char *temp_filename;
2600 size_t blen;
2602 /* Open the LTRANS output list. */
2603 if (!ltrans_output_list)
2604 fatal_error ("no LTRANS output list filename provided");
2605 ltrans_output_list_stream = fopen (ltrans_output_list, "w");
2606 if (ltrans_output_list_stream == NULL)
2607 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list);
2609 timevar_push (TV_WHOPR_WPA);
2611 FOR_EACH_VEC_ELT (ltrans_partition, ltrans_partitions, i, part)
2612 lto_stats.num_output_symtab_nodes += lto_symtab_encoder_size (part->encoder);
2614 /* Find out statics that need to be promoted
2615 to globals with hidden visibility because they are accessed from multiple
2616 partitions. */
2617 lto_promote_cross_file_statics ();
2619 timevar_pop (TV_WHOPR_WPA);
2621 timevar_push (TV_WHOPR_WPA_IO);
2623 /* Generate a prefix for the LTRANS unit files. */
2624 blen = strlen (ltrans_output_list);
2625 temp_filename = (char *) xmalloc (blen + sizeof ("2147483648.o"));
2626 strcpy (temp_filename, ltrans_output_list);
2627 if (blen > sizeof (".out")
2628 && strcmp (temp_filename + blen - sizeof (".out") + 1,
2629 ".out") == 0)
2630 temp_filename[blen - sizeof (".out") + 1] = '\0';
2631 blen = strlen (temp_filename);
2633 n_sets = VEC_length (ltrans_partition, ltrans_partitions);
2635 /* Sort partitions by size so small ones are compiled last.
2636 FIXME: Even when not reordering we may want to output one list for parallel make
2637 and other for final link command. */
2638 VEC_qsort (ltrans_partition, ltrans_partitions,
2639 flag_toplevel_reorder ? cmp_partitions_size : cmp_partitions_order);
2640 for (i = 0; i < n_sets; i++)
2642 size_t len;
2643 ltrans_partition part = VEC_index (ltrans_partition, ltrans_partitions, i);
2645 /* Write all the nodes in SET. */
2646 sprintf (temp_filename + blen, "%u.o", i);
2647 file = lto_obj_file_open (temp_filename, true);
2648 if (!file)
2649 fatal_error ("lto_obj_file_open() failed");
2651 if (!quiet_flag)
2652 fprintf (stderr, " %s (%s %i insns)", temp_filename, part->name, part->insns);
2653 if (cgraph_dump_file)
2655 lto_symtab_encoder_iterator lsei;
2657 fprintf (cgraph_dump_file, "Writing partition %s to file %s, %i insns\n",
2658 part->name, temp_filename, part->insns);
2659 fprintf (cgraph_dump_file, " Symbols in partition: ");
2660 for (lsei = lsei_start_in_partition (part->encoder); !lsei_end_p (lsei);
2661 lsei_next_in_partition (&lsei))
2663 symtab_node node = lsei_node (lsei);
2664 fprintf (cgraph_dump_file, "%s ", symtab_node_asm_name (node));
2666 fprintf (cgraph_dump_file, "\n Symbols in boundary: ");
2667 for (lsei = lsei_start (part->encoder); !lsei_end_p (lsei);
2668 lsei_next (&lsei))
2670 symtab_node node = lsei_node (lsei);
2671 if (!lto_symtab_encoder_in_partition_p (part->encoder, node))
2673 fprintf (cgraph_dump_file, "%s ", symtab_node_asm_name (node));
2674 if (symtab_function_p (node)
2675 && lto_symtab_encoder_encode_body_p (part->encoder, cgraph (node)))
2676 fprintf (cgraph_dump_file, "(body included)");
2677 else if (symtab_variable_p (node)
2678 && lto_symtab_encoder_encode_initializer_p (part->encoder, varpool (node)))
2679 fprintf (cgraph_dump_file, "(initializer included)");
2682 fprintf (cgraph_dump_file, "\n");
2684 gcc_checking_assert (lto_symtab_encoder_size (part->encoder) || !i);
2686 lto_set_current_out_file (file);
2688 ipa_write_optimization_summaries (part->encoder);
2690 lto_set_current_out_file (NULL);
2691 lto_obj_file_close (file);
2692 free (file);
2693 part->encoder = NULL;
2695 len = strlen (temp_filename);
2696 if (fwrite (temp_filename, 1, len, ltrans_output_list_stream) < len
2697 || fwrite ("\n", 1, 1, ltrans_output_list_stream) < 1)
2698 fatal_error ("writing to LTRANS output list %s: %m",
2699 ltrans_output_list);
2702 lto_stats.num_output_files += n_sets;
2704 /* Close the LTRANS output list. */
2705 if (fclose (ltrans_output_list_stream))
2706 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list);
2708 free_ltrans_partitions();
2709 free (temp_filename);
2711 timevar_pop (TV_WHOPR_WPA_IO);
2715 /* If TT is a variable or function decl replace it with its
2716 prevailing variant. */
2717 #define LTO_SET_PREVAIL(tt) \
2718 do {\
2719 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt)) \
2720 tt = lto_symtab_prevailing_decl (tt); \
2721 } while (0)
2723 /* Ensure that TT isn't a replacable var of function decl. */
2724 #define LTO_NO_PREVAIL(tt) \
2725 gcc_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
2727 /* Given a tree T replace all fields referring to variables or functions
2728 with their prevailing variant. */
2729 static void
2730 lto_fixup_prevailing_decls (tree t)
2732 enum tree_code code = TREE_CODE (t);
2733 LTO_NO_PREVAIL (TREE_TYPE (t));
2734 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
2735 LTO_NO_PREVAIL (TREE_CHAIN (t));
2736 if (DECL_P (t))
2738 LTO_NO_PREVAIL (DECL_NAME (t));
2739 LTO_SET_PREVAIL (DECL_CONTEXT (t));
2740 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
2742 LTO_SET_PREVAIL (DECL_SIZE (t));
2743 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t));
2744 LTO_SET_PREVAIL (DECL_INITIAL (t));
2745 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t));
2746 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t));
2748 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
2750 LTO_NO_PREVAIL (t->decl_with_vis.assembler_name);
2751 LTO_NO_PREVAIL (DECL_SECTION_NAME (t));
2753 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
2755 LTO_NO_PREVAIL (DECL_ARGUMENT_FLD (t));
2756 LTO_NO_PREVAIL (DECL_RESULT_FLD (t));
2757 LTO_NO_PREVAIL (DECL_VINDEX (t));
2759 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
2760 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t));
2761 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
2763 LTO_NO_PREVAIL (DECL_FIELD_OFFSET (t));
2764 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t));
2765 LTO_NO_PREVAIL (DECL_QUALIFIER (t));
2766 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t));
2767 LTO_NO_PREVAIL (DECL_FCONTEXT (t));
2770 else if (TYPE_P (t))
2772 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t));
2773 LTO_SET_PREVAIL (TYPE_SIZE (t));
2774 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t));
2775 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t));
2776 LTO_NO_PREVAIL (TYPE_NAME (t));
2778 LTO_SET_PREVAIL (TYPE_MINVAL (t));
2779 LTO_SET_PREVAIL (TYPE_MAXVAL (t));
2780 LTO_SET_PREVAIL (t->type_non_common.binfo);
2782 LTO_SET_PREVAIL (TYPE_CONTEXT (t));
2784 LTO_NO_PREVAIL (TYPE_CANONICAL (t));
2785 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t));
2786 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t));
2788 else if (EXPR_P (t))
2790 int i;
2791 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
2792 LTO_SET_PREVAIL (TREE_OPERAND (t, i));
2794 else
2796 switch (code)
2798 case TREE_LIST:
2799 LTO_SET_PREVAIL (TREE_VALUE (t));
2800 LTO_SET_PREVAIL (TREE_PURPOSE (t));
2801 break;
2802 default:
2803 gcc_unreachable ();
2807 #undef LTO_SET_PREVAIL
2808 #undef LTO_NO_PREVAIL
2810 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
2811 replaces var and function decls with the corresponding prevailing def. */
2813 static void
2814 lto_fixup_state (struct lto_in_decl_state *state)
2816 unsigned i, si;
2817 struct lto_tree_ref_table *table;
2819 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
2820 we still need to walk from all DECLs to find the reachable
2821 FUNCTION_DECLs and VAR_DECLs. */
2822 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
2824 table = &state->streams[si];
2825 for (i = 0; i < table->size; i++)
2827 tree *tp = table->trees + i;
2828 if (VAR_OR_FUNCTION_DECL_P (*tp))
2829 *tp = lto_symtab_prevailing_decl (*tp);
2834 /* A callback of htab_traverse. Just extracts a state from SLOT
2835 and calls lto_fixup_state. */
2837 static int
2838 lto_fixup_state_aux (void **slot, void *aux ATTRIBUTE_UNUSED)
2840 struct lto_in_decl_state *state = (struct lto_in_decl_state *) *slot;
2841 lto_fixup_state (state);
2842 return 1;
2845 /* Fix the decls from all FILES. Replaces each decl with the corresponding
2846 prevailing one. */
2848 static void
2849 lto_fixup_decls (struct lto_file_decl_data **files)
2851 unsigned int i;
2852 htab_iterator hi;
2853 tree t;
2855 FOR_EACH_HTAB_ELEMENT (tree_with_vars, t, tree, hi)
2856 lto_fixup_prevailing_decls (t);
2858 for (i = 0; files[i]; i++)
2860 struct lto_file_decl_data *file = files[i];
2861 struct lto_in_decl_state *state = file->global_decl_state;
2862 lto_fixup_state (state);
2864 htab_traverse (file->function_decl_states, lto_fixup_state_aux, NULL);
2868 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
2870 /* Turn file datas for sub files into a single array, so that they look
2871 like separate files for further passes. */
2873 static void
2874 lto_flatten_files (struct lto_file_decl_data **orig, int count, int last_file_ix)
2876 struct lto_file_decl_data *n, *next;
2877 int i, k;
2879 lto_stats.num_input_files = count;
2880 all_file_decl_data
2881 = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (count + 1);
2882 /* Set the hooks so that all of the ipa passes can read in their data. */
2883 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2884 for (i = 0, k = 0; i < last_file_ix; i++)
2886 for (n = orig[i]; n != NULL; n = next)
2888 all_file_decl_data[k++] = n;
2889 next = n->next;
2890 n->next = NULL;
2893 all_file_decl_data[k] = NULL;
2894 gcc_assert (k == count);
2897 /* Input file data before flattening (i.e. splitting them to subfiles to support
2898 incremental linking. */
2899 static int real_file_count;
2900 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
2902 /* Read all the symbols from the input files FNAMES. NFILES is the
2903 number of files requested in the command line. Instantiate a
2904 global call graph by aggregating all the sub-graphs found in each
2905 file. */
2907 static void
2908 read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
2910 unsigned int i, last_file_ix;
2911 FILE *resolution;
2912 struct cgraph_node *node;
2913 int count = 0;
2914 struct lto_file_decl_data **decl_data;
2916 init_cgraph ();
2918 timevar_push (TV_IPA_LTO_DECL_IN);
2920 real_file_decl_data
2921 = decl_data = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (nfiles + 1);
2922 real_file_count = nfiles;
2924 /* Read the resolution file. */
2925 resolution = NULL;
2926 if (resolution_file_name)
2928 int t;
2929 unsigned num_objects;
2931 resolution = fopen (resolution_file_name, "r");
2932 if (resolution == NULL)
2933 fatal_error ("could not open symbol resolution file: %m");
2935 t = fscanf (resolution, "%u", &num_objects);
2936 gcc_assert (t == 1);
2938 /* True, since the plugin splits the archives. */
2939 gcc_assert (num_objects == nfiles);
2942 tree_with_vars = htab_create_ggc (101, htab_hash_pointer, htab_eq_pointer,
2943 NULL);
2944 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
2945 tree_int_map_eq, NULL);
2946 type_pair_cache = XCNEWVEC (struct type_pair_d, GIMPLE_TYPE_PAIR_SIZE);
2947 gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
2948 (GIMPLE_TYPE_LEADER_SIZE);
2949 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
2951 if (!quiet_flag)
2952 fprintf (stderr, "Reading object files:");
2954 /* Read all of the object files specified on the command line. */
2955 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
2957 struct lto_file_decl_data *file_data = NULL;
2958 if (!quiet_flag)
2960 fprintf (stderr, " %s", fnames[i]);
2961 fflush (stderr);
2964 current_lto_file = lto_obj_file_open (fnames[i], false);
2965 if (!current_lto_file)
2966 break;
2968 file_data = lto_file_read (current_lto_file, resolution, &count);
2969 if (!file_data)
2971 lto_obj_file_close (current_lto_file);
2972 free (current_lto_file);
2973 current_lto_file = NULL;
2974 break;
2977 decl_data[last_file_ix++] = file_data;
2979 lto_obj_file_close (current_lto_file);
2980 free (current_lto_file);
2981 current_lto_file = NULL;
2982 ggc_collect ();
2985 lto_flatten_files (decl_data, count, last_file_ix);
2986 lto_stats.num_input_files = count;
2987 ggc_free(decl_data);
2988 real_file_decl_data = NULL;
2990 if (resolution_file_name)
2991 fclose (resolution);
2993 /* Free gimple type merging datastructures. */
2994 htab_delete (gimple_types);
2995 gimple_types = NULL;
2996 htab_delete (type_hash_cache);
2997 type_hash_cache = NULL;
2998 free (type_pair_cache);
2999 type_pair_cache = NULL;
3000 gimple_type_leader = NULL;
3001 free_gimple_type_tables ();
3002 ggc_collect ();
3004 /* Set the hooks so that all of the ipa passes can read in their data. */
3005 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
3007 timevar_pop (TV_IPA_LTO_DECL_IN);
3009 if (!quiet_flag)
3010 fprintf (stderr, "\nReading the callgraph\n");
3012 timevar_push (TV_IPA_LTO_CGRAPH_IO);
3013 /* Read the symtab. */
3014 input_symtab ();
3016 /* Store resolutions into the symbol table. */
3017 if (resolution_map)
3019 void **res;
3020 symtab_node snode;
3022 FOR_EACH_SYMBOL (snode)
3023 if (symtab_real_symbol_p (snode)
3024 && (res = pointer_map_contains (resolution_map,
3025 snode->symbol.decl)))
3026 snode->symbol.resolution
3027 = (enum ld_plugin_symbol_resolution)(size_t)*res;
3029 pointer_map_destroy (resolution_map);
3030 resolution_map = NULL;
3033 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
3035 if (!quiet_flag)
3036 fprintf (stderr, "Merging declarations\n");
3038 timevar_push (TV_IPA_LTO_DECL_MERGE);
3039 /* Merge global decls. */
3040 lto_symtab_merge_decls ();
3042 /* If there were errors during symbol merging bail out, we have no
3043 good way to recover here. */
3044 if (seen_error ())
3045 fatal_error ("errors during merging of translation units");
3047 /* Fixup all decls. */
3048 lto_fixup_decls (all_file_decl_data);
3049 htab_delete (tree_with_vars);
3050 tree_with_vars = NULL;
3051 ggc_collect ();
3053 timevar_pop (TV_IPA_LTO_DECL_MERGE);
3054 /* Each pass will set the appropriate timer. */
3056 if (!quiet_flag)
3057 fprintf (stderr, "Reading summaries\n");
3059 /* Read the IPA summary data. */
3060 if (flag_ltrans)
3061 ipa_read_optimization_summaries ();
3062 else
3063 ipa_read_summaries ();
3065 for (i = 0; all_file_decl_data[i]; i++)
3067 gcc_assert (all_file_decl_data[i]->symtab_node_encoder);
3068 lto_symtab_encoder_delete (all_file_decl_data[i]->symtab_node_encoder);
3069 all_file_decl_data[i]->symtab_node_encoder = NULL;
3072 /* Finally merge the cgraph according to the decl merging decisions. */
3073 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
3074 if (cgraph_dump_file)
3076 fprintf (cgraph_dump_file, "Before merging:\n");
3077 dump_cgraph (cgraph_dump_file);
3078 dump_varpool (cgraph_dump_file);
3080 lto_symtab_merge_cgraph_nodes ();
3081 ggc_collect ();
3083 /* FIXME: ipa_transforms_to_apply holds list of passes that have optimization
3084 summaries computed and needs to apply changes. At the moment WHOPR only
3085 supports inlining, so we can push it here by hand. In future we need to stream
3086 this field into ltrans compilation. */
3087 if (flag_ltrans)
3088 FOR_EACH_DEFINED_FUNCTION (node)
3089 VEC_safe_push (ipa_opt_pass, heap,
3090 node->ipa_transforms_to_apply,
3091 (ipa_opt_pass)&pass_ipa_inline);
3093 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
3095 timevar_push (TV_IPA_LTO_DECL_INIT_IO);
3097 /* Indicate that the cgraph is built and ready. */
3098 cgraph_function_flags_ready = true;
3100 timevar_pop (TV_IPA_LTO_DECL_INIT_IO);
3101 ggc_free (all_file_decl_data);
3102 all_file_decl_data = NULL;
3106 /* Materialize all the bodies for all the nodes in the callgraph. */
3108 static void
3109 materialize_cgraph (void)
3111 tree decl;
3112 struct cgraph_node *node;
3113 unsigned i;
3114 timevar_id_t lto_timer;
3116 if (!quiet_flag)
3117 fprintf (stderr,
3118 flag_wpa ? "Materializing decls:" : "Reading function bodies:");
3120 /* Now that we have input the cgraph, we need to clear all of the aux
3121 nodes and read the functions if we are not running in WPA mode. */
3122 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3124 FOR_EACH_FUNCTION (node)
3126 if (node->symbol.lto_file_data)
3128 lto_materialize_function (node);
3129 lto_stats.num_input_cgraph_nodes++;
3133 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3135 /* Start the appropriate timer depending on the mode that we are
3136 operating in. */
3137 lto_timer = (flag_wpa) ? TV_WHOPR_WPA
3138 : (flag_ltrans) ? TV_WHOPR_LTRANS
3139 : TV_LTO;
3140 timevar_push (lto_timer);
3142 current_function_decl = NULL;
3143 set_cfun (NULL);
3145 /* Inform the middle end about the global variables we have seen. */
3146 FOR_EACH_VEC_ELT (tree, lto_global_var_decls, i, decl)
3147 rest_of_decl_compilation (decl, 1, 0);
3149 if (!quiet_flag)
3150 fprintf (stderr, "\n");
3152 timevar_pop (lto_timer);
3156 /* Show various memory usage statistics related to LTO. */
3157 static void
3158 print_lto_report_1 (void)
3160 const char *pfx = (flag_lto) ? "LTO" : (flag_wpa) ? "WPA" : "LTRANS";
3161 fprintf (stderr, "%s statistics\n", pfx);
3163 if (gimple_types)
3164 fprintf (stderr, "[%s] GIMPLE type table: size %ld, %ld elements, "
3165 "%ld searches, %ld collisions (ratio: %f)\n", pfx,
3166 (long) htab_size (gimple_types),
3167 (long) htab_elements (gimple_types),
3168 (long) gimple_types->searches,
3169 (long) gimple_types->collisions,
3170 htab_collisions (gimple_types));
3171 else
3172 fprintf (stderr, "[%s] GIMPLE type table is empty\n", pfx);
3173 if (type_hash_cache)
3174 fprintf (stderr, "[%s] GIMPLE type hash table: size %ld, %ld elements, "
3175 "%ld searches, %ld collisions (ratio: %f)\n", pfx,
3176 (long) htab_size (type_hash_cache),
3177 (long) htab_elements (type_hash_cache),
3178 (long) type_hash_cache->searches,
3179 (long) type_hash_cache->collisions,
3180 htab_collisions (type_hash_cache));
3181 else
3182 fprintf (stderr, "[%s] GIMPLE type hash table is empty\n", pfx);
3184 print_gimple_types_stats (pfx);
3185 print_lto_report (pfx);
3188 /* Perform whole program analysis (WPA) on the callgraph and write out the
3189 optimization plan. */
3191 static void
3192 do_whole_program_analysis (void)
3194 symtab_node node;
3196 timevar_start (TV_PHASE_OPT_GEN);
3198 /* Note that since we are in WPA mode, materialize_cgraph will not
3199 actually read in all the function bodies. It only materializes
3200 the decls and cgraph nodes so that analysis can be performed. */
3201 materialize_cgraph ();
3203 /* Reading in the cgraph uses different timers, start timing WPA now. */
3204 timevar_push (TV_WHOPR_WPA);
3206 if (pre_ipa_mem_report)
3208 fprintf (stderr, "Memory consumption before IPA\n");
3209 dump_memory_report (false);
3212 cgraph_function_flags_ready = true;
3214 if (cgraph_dump_file)
3216 dump_cgraph (cgraph_dump_file);
3217 dump_varpool (cgraph_dump_file);
3219 bitmap_obstack_initialize (NULL);
3220 cgraph_state = CGRAPH_STATE_IPA_SSA;
3222 execute_ipa_pass_list (all_regular_ipa_passes);
3224 if (cgraph_dump_file)
3226 fprintf (cgraph_dump_file, "Optimized ");
3227 dump_cgraph (cgraph_dump_file);
3228 dump_varpool (cgraph_dump_file);
3230 #ifdef ENABLE_CHECKING
3231 verify_cgraph ();
3232 #endif
3233 bitmap_obstack_release (NULL);
3235 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
3236 timevar_pop (TV_WHOPR_WPA);
3238 timevar_push (TV_WHOPR_PARTITIONING);
3239 if (flag_lto_partition_1to1)
3240 lto_1_to_1_map ();
3241 else if (flag_lto_partition_max)
3242 lto_max_map ();
3243 else
3244 lto_balanced_map ();
3246 /* AUX pointers are used by partitioning code to bookkeep number of
3247 partitions symbol is in. This is no longer needed. */
3248 FOR_EACH_SYMBOL (node)
3249 node->symbol.aux = NULL;
3251 lto_stats.num_cgraph_partitions += VEC_length (ltrans_partition,
3252 ltrans_partitions);
3253 timevar_pop (TV_WHOPR_PARTITIONING);
3255 timevar_stop (TV_PHASE_OPT_GEN);
3256 timevar_start (TV_PHASE_STREAM_OUT);
3258 if (!quiet_flag)
3260 fprintf (stderr, "\nStreaming out");
3261 fflush (stderr);
3263 lto_wpa_write_files ();
3264 if (!quiet_flag)
3265 fprintf (stderr, "\n");
3267 timevar_stop (TV_PHASE_STREAM_OUT);
3269 ggc_collect ();
3270 if (post_ipa_mem_report)
3272 fprintf (stderr, "Memory consumption after IPA\n");
3273 dump_memory_report (false);
3276 /* Show the LTO report before launching LTRANS. */
3277 if (flag_lto_report)
3278 print_lto_report_1 ();
3279 if (mem_report_wpa)
3280 dump_memory_report (true);
3284 static GTY(()) tree lto_eh_personality_decl;
3286 /* Return the LTO personality function decl. */
3288 tree
3289 lto_eh_personality (void)
3291 if (!lto_eh_personality_decl)
3293 /* Use the first personality DECL for our personality if we don't
3294 support multiple ones. This ensures that we don't artificially
3295 create the need for them in a single-language program. */
3296 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
3297 lto_eh_personality_decl = first_personality_decl;
3298 else
3299 lto_eh_personality_decl = lhd_gcc_personality ();
3302 return lto_eh_personality_decl;
3305 /* Set the process name based on the LTO mode. */
3307 static void
3308 lto_process_name (void)
3310 if (flag_lto)
3311 setproctitle ("lto1-lto");
3312 if (flag_wpa)
3313 setproctitle ("lto1-wpa");
3314 if (flag_ltrans)
3315 setproctitle ("lto1-ltrans");
3319 /* Initialize the LTO front end. */
3321 static void
3322 lto_init (void)
3324 lto_process_name ();
3325 lto_streamer_hooks_init ();
3326 lto_reader_init ();
3327 lto_set_in_hooks (NULL, get_section_data, free_section_data);
3328 memset (&lto_stats, 0, sizeof (lto_stats));
3329 bitmap_obstack_initialize (NULL);
3330 gimple_register_cfg_hooks ();
3334 /* Main entry point for the GIMPLE front end. This front end has
3335 three main personalities:
3337 - LTO (-flto). All the object files on the command line are
3338 loaded in memory and processed as a single translation unit.
3339 This is the traditional link-time optimization behavior.
3341 - WPA (-fwpa). Only the callgraph and summary information for
3342 files in the command file are loaded. A single callgraph
3343 (without function bodies) is instantiated for the whole set of
3344 files. IPA passes are only allowed to analyze the call graph
3345 and make transformation decisions. The callgraph is
3346 partitioned, each partition is written to a new object file
3347 together with the transformation decisions.
3349 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
3350 summary files from running again. Since WPA computed summary
3351 information and decided what transformations to apply, LTRANS
3352 simply applies them. */
3354 void
3355 lto_main (void)
3357 /* LTO is called as a front end, even though it is not a front end.
3358 Because it is called as a front end, TV_PHASE_PARSING and
3359 TV_PARSE_GLOBAL are active, and we need to turn them off while
3360 doing LTO. Later we turn them back on so they are active up in
3361 toplev.c. */
3362 timevar_pop (TV_PARSE_GLOBAL);
3363 timevar_stop (TV_PHASE_PARSING);
3365 timevar_start (TV_PHASE_SETUP);
3367 /* Initialize the LTO front end. */
3368 lto_init ();
3370 timevar_stop (TV_PHASE_SETUP);
3371 timevar_start (TV_PHASE_STREAM_IN);
3373 /* Read all the symbols and call graph from all the files in the
3374 command line. */
3375 read_cgraph_and_symbols (num_in_fnames, in_fnames);
3377 timevar_stop (TV_PHASE_STREAM_IN);
3379 if (!seen_error ())
3381 /* If WPA is enabled analyze the whole call graph and create an
3382 optimization plan. Otherwise, read in all the function
3383 bodies and continue with optimization. */
3384 if (flag_wpa)
3385 do_whole_program_analysis ();
3386 else
3388 timevar_start (TV_PHASE_OPT_GEN);
3390 materialize_cgraph ();
3392 /* Let the middle end know that we have read and merged all of
3393 the input files. */
3394 compile ();
3396 timevar_stop (TV_PHASE_OPT_GEN);
3398 /* FIXME lto, if the processes spawned by WPA fail, we miss
3399 the chance to print WPA's report, so WPA will call
3400 print_lto_report before launching LTRANS. If LTRANS was
3401 launched directly by the driver we would not need to do
3402 this. */
3403 if (flag_lto_report)
3404 print_lto_report_1 ();
3408 /* Here we make LTO pretend to be a parser. */
3409 timevar_start (TV_PHASE_PARSING);
3410 timevar_push (TV_PARSE_GLOBAL);
3413 #include "gt-lto-lto.h"