re PR rtl-optimization/54739 (FAIL: gcc.dg/lower-subreg-1.c scan-rtl-dump subreg1...
[official-gcc.git] / gcc / lto / lto.c
blob44718537b3c2b41c71974a3d2bb5019240a43f8f
1 /* Top-level LTO routines.
2 Copyright 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "opts.h"
25 #include "toplev.h"
26 #include "tree.h"
27 #include "tree-flow.h"
28 #include "diagnostic-core.h"
29 #include "tm.h"
30 #include "cgraph.h"
31 #include "ggc.h"
32 #include "tree-ssa-operands.h"
33 #include "tree-pass.h"
34 #include "langhooks.h"
35 #include "vec.h"
36 #include "bitmap.h"
37 #include "pointer-set.h"
38 #include "ipa-prop.h"
39 #include "common.h"
40 #include "debug.h"
41 #include "gimple.h"
42 #include "lto.h"
43 #include "lto-tree.h"
44 #include "lto-streamer.h"
45 #include "tree-streamer.h"
46 #include "splay-tree.h"
47 #include "lto-partition.h"
49 static GTY(()) tree first_personality_decl;
51 /* Returns a hash code for P. */
53 static hashval_t
54 hash_name (const void *p)
56 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
57 return (hashval_t) htab_hash_string (ds->name);
61 /* Returns nonzero if P1 and P2 are equal. */
63 static int
64 eq_name (const void *p1, const void *p2)
66 const struct lto_section_slot *s1 =
67 (const struct lto_section_slot *) p1;
68 const struct lto_section_slot *s2 =
69 (const struct lto_section_slot *) p2;
71 return strcmp (s1->name, s2->name) == 0;
74 /* Free lto_section_slot */
76 static void
77 free_with_string (void *arg)
79 struct lto_section_slot *s = (struct lto_section_slot *)arg;
81 free (CONST_CAST (char *, s->name));
82 free (arg);
85 /* Create section hash table */
87 htab_t
88 lto_obj_create_section_hash_table (void)
90 return htab_create (37, hash_name, eq_name, free_with_string);
93 /* Delete an allocated integer KEY in the splay tree. */
95 static void
96 lto_splay_tree_delete_id (splay_tree_key key)
98 free ((void *) key);
101 /* Compare splay tree node ids A and B. */
103 static int
104 lto_splay_tree_compare_ids (splay_tree_key a, splay_tree_key b)
106 unsigned HOST_WIDE_INT ai;
107 unsigned HOST_WIDE_INT bi;
109 ai = *(unsigned HOST_WIDE_INT *) a;
110 bi = *(unsigned HOST_WIDE_INT *) b;
112 if (ai < bi)
113 return -1;
114 else if (ai > bi)
115 return 1;
116 return 0;
119 /* Look up splay tree node by ID in splay tree T. */
121 static splay_tree_node
122 lto_splay_tree_lookup (splay_tree t, unsigned HOST_WIDE_INT id)
124 return splay_tree_lookup (t, (splay_tree_key) &id);
127 /* Check if KEY has ID. */
129 static bool
130 lto_splay_tree_id_equal_p (splay_tree_key key, unsigned HOST_WIDE_INT id)
132 return *(unsigned HOST_WIDE_INT *) key == id;
135 /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
136 The ID is allocated separately because we need HOST_WIDE_INTs which may
137 be wider than a splay_tree_key. */
139 static void
140 lto_splay_tree_insert (splay_tree t, unsigned HOST_WIDE_INT id,
141 struct lto_file_decl_data *file_data)
143 unsigned HOST_WIDE_INT *idp = XCNEW (unsigned HOST_WIDE_INT);
144 *idp = id;
145 splay_tree_insert (t, (splay_tree_key) idp, (splay_tree_value) file_data);
148 /* Create a splay tree. */
150 static splay_tree
151 lto_splay_tree_new (void)
153 return splay_tree_new (lto_splay_tree_compare_ids,
154 lto_splay_tree_delete_id,
155 NULL);
158 /* Return true when NODE has a clone that is analyzed (i.e. we need
159 to load its body even if the node itself is not needed). */
161 static bool
162 has_analyzed_clone_p (struct cgraph_node *node)
164 struct cgraph_node *orig = node;
165 node = node->clones;
166 if (node)
167 while (node != orig)
169 if (node->analyzed)
170 return true;
171 if (node->clones)
172 node = node->clones;
173 else if (node->next_sibling_clone)
174 node = node->next_sibling_clone;
175 else
177 while (node != orig && !node->next_sibling_clone)
178 node = node->clone_of;
179 if (node != orig)
180 node = node->next_sibling_clone;
183 return false;
186 /* Read the function body for the function associated with NODE. */
188 static void
189 lto_materialize_function (struct cgraph_node *node)
191 tree decl;
192 struct lto_file_decl_data *file_data;
193 const char *data, *name;
194 size_t len;
196 decl = node->symbol.decl;
197 /* Read in functions with body (analyzed nodes)
198 and also functions that are needed to produce virtual clones. */
199 if (cgraph_function_with_gimple_body_p (node) || has_analyzed_clone_p (node))
201 /* Clones don't need to be read. */
202 if (node->clone_of)
203 return;
205 /* Load the function body only if not operating in WPA mode. In
206 WPA mode, the body of the function is not needed. */
207 if (!flag_wpa)
209 file_data = node->symbol.lto_file_data;
210 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
212 /* We may have renamed the declaration, e.g., a static function. */
213 name = lto_get_decl_name_mapping (file_data, name);
215 data = lto_get_section_data (file_data, LTO_section_function_body,
216 name, &len);
217 if (!data)
218 fatal_error ("%s: section %s is missing",
219 file_data->file_name,
220 name);
222 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
224 push_struct_function (decl);
225 announce_function (decl);
226 lto_input_function_body (file_data, decl, data);
227 if (DECL_FUNCTION_PERSONALITY (decl) && !first_personality_decl)
228 first_personality_decl = DECL_FUNCTION_PERSONALITY (decl);
229 lto_stats.num_function_bodies++;
230 lto_free_section_data (file_data, LTO_section_function_body, name,
231 data, len);
232 pop_cfun ();
233 ggc_collect ();
237 /* Let the middle end know about the function. */
238 rest_of_decl_compilation (decl, 1, 0);
242 /* Decode the content of memory pointed to by DATA in the in decl
243 state object STATE. DATA_IN points to a data_in structure for
244 decoding. Return the address after the decoded object in the
245 input. */
247 static const uint32_t *
248 lto_read_in_decl_state (struct data_in *data_in, const uint32_t *data,
249 struct lto_in_decl_state *state)
251 uint32_t ix;
252 tree decl;
253 uint32_t i, j;
255 ix = *data++;
256 decl = streamer_tree_cache_get (data_in->reader_cache, ix);
257 if (TREE_CODE (decl) != FUNCTION_DECL)
259 gcc_assert (decl == void_type_node);
260 decl = NULL_TREE;
262 state->fn_decl = decl;
264 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
266 uint32_t size = *data++;
267 tree *decls = ggc_alloc_vec_tree (size);
269 for (j = 0; j < size; j++)
270 decls[j] = streamer_tree_cache_get (data_in->reader_cache, data[j]);
272 state->streams[i].size = size;
273 state->streams[i].trees = decls;
274 data += size;
277 return data;
282 /* Global type table. FIXME, it should be possible to re-use some
283 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
284 etc), but those assume that types were built with the various
285 build_*_type routines which is not the case with the streamer. */
286 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
287 htab_t gimple_types;
288 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
289 htab_t type_hash_cache;
291 static hashval_t gimple_type_hash (const void *);
293 /* Structure used to maintain a cache of some type pairs compared by
294 gimple_types_compatible_p when comparing aggregate types. There are
295 three possible values for SAME_P:
297 -2: The pair (T1, T2) has just been inserted in the table.
298 0: T1 and T2 are different types.
299 1: T1 and T2 are the same type. */
301 struct type_pair_d
303 unsigned int uid1;
304 unsigned int uid2;
305 signed char same_p;
307 typedef struct type_pair_d *type_pair_t;
308 DEF_VEC_P(type_pair_t);
309 DEF_VEC_ALLOC_P(type_pair_t,heap);
311 #define GIMPLE_TYPE_PAIR_SIZE 16381
312 struct type_pair_d *type_pair_cache;
315 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
316 entry if none existed. */
318 static inline type_pair_t
319 lookup_type_pair (tree t1, tree t2)
321 unsigned int index;
322 unsigned int uid1, uid2;
324 if (TYPE_UID (t1) < TYPE_UID (t2))
326 uid1 = TYPE_UID (t1);
327 uid2 = TYPE_UID (t2);
329 else
331 uid1 = TYPE_UID (t2);
332 uid2 = TYPE_UID (t1);
334 gcc_checking_assert (uid1 != uid2);
336 /* iterative_hash_hashval_t imply an function calls.
337 We know that UIDS are in limited range. */
338 index = ((((unsigned HOST_WIDE_INT)uid1 << HOST_BITS_PER_WIDE_INT / 2) + uid2)
339 % GIMPLE_TYPE_PAIR_SIZE);
340 if (type_pair_cache [index].uid1 == uid1
341 && type_pair_cache [index].uid2 == uid2)
342 return &type_pair_cache[index];
344 type_pair_cache [index].uid1 = uid1;
345 type_pair_cache [index].uid2 = uid2;
346 type_pair_cache [index].same_p = -2;
348 return &type_pair_cache[index];
351 /* Per pointer state for the SCC finding. The on_sccstack flag
352 is not strictly required, it is true when there is no hash value
353 recorded for the type and false otherwise. But querying that
354 is slower. */
356 struct sccs
358 unsigned int dfsnum;
359 unsigned int low;
360 bool on_sccstack;
361 union {
362 hashval_t hash;
363 signed char same_p;
364 } u;
367 static unsigned int next_dfs_num;
368 static unsigned int gtc_next_dfs_num;
370 /* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
372 typedef struct GTY(()) gimple_type_leader_entry_s {
373 tree type;
374 tree leader;
375 } gimple_type_leader_entry;
377 #define GIMPLE_TYPE_LEADER_SIZE 16381
378 static GTY((length("GIMPLE_TYPE_LEADER_SIZE")))
379 gimple_type_leader_entry *gimple_type_leader;
381 /* Lookup an existing leader for T and return it or NULL_TREE, if
382 there is none in the cache. */
384 static inline tree
385 gimple_lookup_type_leader (tree t)
387 gimple_type_leader_entry *leader;
389 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
390 if (leader->type != t)
391 return NULL_TREE;
393 return leader->leader;
397 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
398 true then if any type has no name return false, otherwise return
399 true if both types have no names. */
401 static bool
402 compare_type_names_p (tree t1, tree t2)
404 tree name1 = TYPE_NAME (t1);
405 tree name2 = TYPE_NAME (t2);
407 if ((name1 != NULL_TREE) != (name2 != NULL_TREE))
408 return false;
410 if (name1 == NULL_TREE)
411 return true;
413 /* Either both should be a TYPE_DECL or both an IDENTIFIER_NODE. */
414 if (TREE_CODE (name1) != TREE_CODE (name2))
415 return false;
417 if (TREE_CODE (name1) == TYPE_DECL)
418 name1 = DECL_NAME (name1);
419 gcc_checking_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
421 if (TREE_CODE (name2) == TYPE_DECL)
422 name2 = DECL_NAME (name2);
423 gcc_checking_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
425 /* Identifiers can be compared with pointer equality rather
426 than a string comparison. */
427 if (name1 == name2)
428 return true;
430 return false;
433 static bool
434 gimple_types_compatible_p_1 (tree, tree, type_pair_t,
435 VEC(type_pair_t, heap) **,
436 struct pointer_map_t *, struct obstack *);
438 /* DFS visit the edge from the callers type pair with state *STATE to
439 the pair T1, T2 while operating in FOR_MERGING_P mode.
440 Update the merging status if it is not part of the SCC containing the
441 callers pair and return it.
442 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
444 static bool
445 gtc_visit (tree t1, tree t2,
446 struct sccs *state,
447 VEC(type_pair_t, heap) **sccstack,
448 struct pointer_map_t *sccstate,
449 struct obstack *sccstate_obstack)
451 struct sccs *cstate = NULL;
452 type_pair_t p;
453 void **slot;
454 tree leader1, leader2;
456 /* Check first for the obvious case of pointer identity. */
457 if (t1 == t2)
458 return true;
460 /* Check that we have two types to compare. */
461 if (t1 == NULL_TREE || t2 == NULL_TREE)
462 return false;
464 /* Can't be the same type if the types don't have the same code. */
465 if (TREE_CODE (t1) != TREE_CODE (t2))
466 return false;
468 /* Can't be the same type if they have different CV qualifiers. */
469 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
470 return false;
472 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
473 return false;
475 /* Void types and nullptr types are always the same. */
476 if (TREE_CODE (t1) == VOID_TYPE
477 || TREE_CODE (t1) == NULLPTR_TYPE)
478 return true;
480 /* Can't be the same type if they have different alignment or mode. */
481 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
482 || TYPE_MODE (t1) != TYPE_MODE (t2))
483 return false;
485 /* Do some simple checks before doing three hashtable queries. */
486 if (INTEGRAL_TYPE_P (t1)
487 || SCALAR_FLOAT_TYPE_P (t1)
488 || FIXED_POINT_TYPE_P (t1)
489 || TREE_CODE (t1) == VECTOR_TYPE
490 || TREE_CODE (t1) == COMPLEX_TYPE
491 || TREE_CODE (t1) == OFFSET_TYPE
492 || POINTER_TYPE_P (t1))
494 /* Can't be the same type if they have different sign or precision. */
495 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
496 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
497 return false;
499 if (TREE_CODE (t1) == INTEGER_TYPE
500 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
501 return false;
503 /* That's all we need to check for float and fixed-point types. */
504 if (SCALAR_FLOAT_TYPE_P (t1)
505 || FIXED_POINT_TYPE_P (t1))
506 return true;
508 /* For other types fall through to more complex checks. */
511 /* If the types have been previously registered and found equal
512 they still are. */
513 leader1 = gimple_lookup_type_leader (t1);
514 leader2 = gimple_lookup_type_leader (t2);
515 if (leader1 == t2
516 || t1 == leader2
517 || (leader1 && leader1 == leader2))
518 return true;
520 /* If the hash values of t1 and t2 are different the types can't
521 possibly be the same. This helps keeping the type-pair hashtable
522 small, only tracking comparisons for hash collisions. */
523 if (gimple_type_hash (t1) != gimple_type_hash (t2))
524 return false;
526 /* Allocate a new cache entry for this comparison. */
527 p = lookup_type_pair (t1, t2);
528 if (p->same_p == 0 || p->same_p == 1)
530 /* We have already decided whether T1 and T2 are the
531 same, return the cached result. */
532 return p->same_p == 1;
535 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
536 cstate = (struct sccs *)*slot;
537 /* Not yet visited. DFS recurse. */
538 if (!cstate)
540 gimple_types_compatible_p_1 (t1, t2, p,
541 sccstack, sccstate, sccstate_obstack);
542 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
543 state->low = MIN (state->low, cstate->low);
545 /* If the type is still on the SCC stack adjust the parents low. */
546 if (cstate->dfsnum < state->dfsnum
547 && cstate->on_sccstack)
548 state->low = MIN (cstate->dfsnum, state->low);
550 /* Return the current lattice value. We start with an equality
551 assumption so types part of a SCC will be optimistically
552 treated equal unless proven otherwise. */
553 return cstate->u.same_p;
556 /* Worker for gimple_types_compatible.
557 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
559 static bool
560 gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
561 VEC(type_pair_t, heap) **sccstack,
562 struct pointer_map_t *sccstate,
563 struct obstack *sccstate_obstack)
565 struct sccs *state;
567 gcc_assert (p->same_p == -2);
569 state = XOBNEW (sccstate_obstack, struct sccs);
570 *pointer_map_insert (sccstate, p) = state;
572 VEC_safe_push (type_pair_t, heap, *sccstack, p);
573 state->dfsnum = gtc_next_dfs_num++;
574 state->low = state->dfsnum;
575 state->on_sccstack = true;
576 /* Start with an equality assumption. As we DFS recurse into child
577 SCCs this assumption may get revisited. */
578 state->u.same_p = 1;
580 /* The struct tags shall compare equal. */
581 if (!compare_type_names_p (t1, t2))
582 goto different_types;
584 /* We may not merge typedef types to the same type in different
585 contexts. */
586 if (TYPE_NAME (t1)
587 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
588 && DECL_CONTEXT (TYPE_NAME (t1))
589 && TYPE_P (DECL_CONTEXT (TYPE_NAME (t1))))
591 if (!gtc_visit (DECL_CONTEXT (TYPE_NAME (t1)),
592 DECL_CONTEXT (TYPE_NAME (t2)),
593 state, sccstack, sccstate, sccstate_obstack))
594 goto different_types;
597 /* If their attributes are not the same they can't be the same type. */
598 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
599 goto different_types;
601 /* Do type-specific comparisons. */
602 switch (TREE_CODE (t1))
604 case VECTOR_TYPE:
605 case COMPLEX_TYPE:
606 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
607 state, sccstack, sccstate, sccstate_obstack))
608 goto different_types;
609 goto same_types;
611 case ARRAY_TYPE:
612 /* Array types are the same if the element types are the same and
613 the number of elements are the same. */
614 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
615 state, sccstack, sccstate, sccstate_obstack)
616 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
617 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
618 goto different_types;
619 else
621 tree i1 = TYPE_DOMAIN (t1);
622 tree i2 = TYPE_DOMAIN (t2);
624 /* For an incomplete external array, the type domain can be
625 NULL_TREE. Check this condition also. */
626 if (i1 == NULL_TREE && i2 == NULL_TREE)
627 goto same_types;
628 else if (i1 == NULL_TREE || i2 == NULL_TREE)
629 goto different_types;
630 else
632 tree min1 = TYPE_MIN_VALUE (i1);
633 tree min2 = TYPE_MIN_VALUE (i2);
634 tree max1 = TYPE_MAX_VALUE (i1);
635 tree max2 = TYPE_MAX_VALUE (i2);
637 /* The minimum/maximum values have to be the same. */
638 if ((min1 == min2
639 || (min1 && min2
640 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
641 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
642 || operand_equal_p (min1, min2, 0))))
643 && (max1 == max2
644 || (max1 && max2
645 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
646 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
647 || operand_equal_p (max1, max2, 0)))))
648 goto same_types;
649 else
650 goto different_types;
654 case METHOD_TYPE:
655 /* Method types should belong to the same class. */
656 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
657 state, sccstack, sccstate, sccstate_obstack))
658 goto different_types;
660 /* Fallthru */
662 case FUNCTION_TYPE:
663 /* Function types are the same if the return type and arguments types
664 are the same. */
665 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
666 state, sccstack, sccstate, sccstate_obstack))
667 goto different_types;
669 if (!comp_type_attributes (t1, t2))
670 goto different_types;
672 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
673 goto same_types;
674 else
676 tree parms1, parms2;
678 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
679 parms1 && parms2;
680 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
682 if (!gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2),
683 state, sccstack, sccstate, sccstate_obstack))
684 goto different_types;
687 if (parms1 || parms2)
688 goto different_types;
690 goto same_types;
693 case OFFSET_TYPE:
695 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
696 state, sccstack, sccstate, sccstate_obstack)
697 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
698 TYPE_OFFSET_BASETYPE (t2),
699 state, sccstack, sccstate, sccstate_obstack))
700 goto different_types;
702 goto same_types;
705 case POINTER_TYPE:
706 case REFERENCE_TYPE:
708 /* If the two pointers have different ref-all attributes,
709 they can't be the same type. */
710 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
711 goto different_types;
713 /* Otherwise, pointer and reference types are the same if the
714 pointed-to types are the same. */
715 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
716 state, sccstack, sccstate, sccstate_obstack))
717 goto same_types;
719 goto different_types;
722 case INTEGER_TYPE:
723 case BOOLEAN_TYPE:
725 tree min1 = TYPE_MIN_VALUE (t1);
726 tree max1 = TYPE_MAX_VALUE (t1);
727 tree min2 = TYPE_MIN_VALUE (t2);
728 tree max2 = TYPE_MAX_VALUE (t2);
729 bool min_equal_p = false;
730 bool max_equal_p = false;
732 /* If either type has a minimum value, the other type must
733 have the same. */
734 if (min1 == NULL_TREE && min2 == NULL_TREE)
735 min_equal_p = true;
736 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
737 min_equal_p = true;
739 /* Likewise, if either type has a maximum value, the other
740 type must have the same. */
741 if (max1 == NULL_TREE && max2 == NULL_TREE)
742 max_equal_p = true;
743 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
744 max_equal_p = true;
746 if (!min_equal_p || !max_equal_p)
747 goto different_types;
749 goto same_types;
752 case ENUMERAL_TYPE:
754 /* FIXME lto, we cannot check bounds on enumeral types because
755 different front ends will produce different values.
756 In C, enumeral types are integers, while in C++ each element
757 will have its own symbolic value. We should decide how enums
758 are to be represented in GIMPLE and have each front end lower
759 to that. */
760 tree v1, v2;
762 /* For enumeral types, all the values must be the same. */
763 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
764 goto same_types;
766 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
767 v1 && v2;
768 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
770 tree c1 = TREE_VALUE (v1);
771 tree c2 = TREE_VALUE (v2);
773 if (TREE_CODE (c1) == CONST_DECL)
774 c1 = DECL_INITIAL (c1);
776 if (TREE_CODE (c2) == CONST_DECL)
777 c2 = DECL_INITIAL (c2);
779 if (tree_int_cst_equal (c1, c2) != 1)
780 goto different_types;
782 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
783 goto different_types;
786 /* If one enumeration has more values than the other, they
787 are not the same. */
788 if (v1 || v2)
789 goto different_types;
791 goto same_types;
794 case RECORD_TYPE:
795 case UNION_TYPE:
796 case QUAL_UNION_TYPE:
798 tree f1, f2;
800 /* For aggregate types, all the fields must be the same. */
801 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
802 f1 && f2;
803 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
805 /* Different field kinds are not compatible. */
806 if (TREE_CODE (f1) != TREE_CODE (f2))
807 goto different_types;
808 /* Field decls must have the same name and offset. */
809 if (TREE_CODE (f1) == FIELD_DECL
810 && (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
811 || !gimple_compare_field_offset (f1, f2)))
812 goto different_types;
813 /* All entities should have the same name and type. */
814 if (DECL_NAME (f1) != DECL_NAME (f2)
815 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2),
816 state, sccstack, sccstate, sccstate_obstack))
817 goto different_types;
820 /* If one aggregate has more fields than the other, they
821 are not the same. */
822 if (f1 || f2)
823 goto different_types;
825 goto same_types;
828 default:
829 gcc_unreachable ();
832 /* Common exit path for types that are not compatible. */
833 different_types:
834 state->u.same_p = 0;
835 goto pop;
837 /* Common exit path for types that are compatible. */
838 same_types:
839 gcc_assert (state->u.same_p == 1);
841 pop:
842 if (state->low == state->dfsnum)
844 type_pair_t x;
846 /* Pop off the SCC and set its cache values to the final
847 comparison result. */
850 struct sccs *cstate;
851 x = VEC_pop (type_pair_t, *sccstack);
852 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
853 cstate->on_sccstack = false;
854 x->same_p = state->u.same_p;
856 while (x != p);
859 return state->u.same_p;
862 /* Return true iff T1 and T2 are structurally identical. When
863 FOR_MERGING_P is true the an incomplete type and a complete type
864 are considered different, otherwise they are considered compatible. */
866 static bool
867 gimple_types_compatible_p (tree t1, tree t2)
869 VEC(type_pair_t, heap) *sccstack = NULL;
870 struct pointer_map_t *sccstate;
871 struct obstack sccstate_obstack;
872 type_pair_t p = NULL;
873 bool res;
874 tree leader1, leader2;
876 /* Before starting to set up the SCC machinery handle simple cases. */
878 /* Check first for the obvious case of pointer identity. */
879 if (t1 == t2)
880 return true;
882 /* Check that we have two types to compare. */
883 if (t1 == NULL_TREE || t2 == NULL_TREE)
884 return false;
886 /* Can't be the same type if the types don't have the same code. */
887 if (TREE_CODE (t1) != TREE_CODE (t2))
888 return false;
890 /* Can't be the same type if they have different CV qualifiers. */
891 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
892 return false;
894 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
895 return false;
897 /* Void types and nullptr types are always the same. */
898 if (TREE_CODE (t1) == VOID_TYPE
899 || TREE_CODE (t1) == NULLPTR_TYPE)
900 return true;
902 /* Can't be the same type if they have different alignment or mode. */
903 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
904 || TYPE_MODE (t1) != TYPE_MODE (t2))
905 return false;
907 /* Do some simple checks before doing three hashtable queries. */
908 if (INTEGRAL_TYPE_P (t1)
909 || SCALAR_FLOAT_TYPE_P (t1)
910 || FIXED_POINT_TYPE_P (t1)
911 || TREE_CODE (t1) == VECTOR_TYPE
912 || TREE_CODE (t1) == COMPLEX_TYPE
913 || TREE_CODE (t1) == OFFSET_TYPE
914 || POINTER_TYPE_P (t1))
916 /* Can't be the same type if they have different sign or precision. */
917 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
918 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
919 return false;
921 if (TREE_CODE (t1) == INTEGER_TYPE
922 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
923 return false;
925 /* That's all we need to check for float and fixed-point types. */
926 if (SCALAR_FLOAT_TYPE_P (t1)
927 || FIXED_POINT_TYPE_P (t1))
928 return true;
930 /* For other types fall through to more complex checks. */
933 /* If the types have been previously registered and found equal
934 they still are. */
935 leader1 = gimple_lookup_type_leader (t1);
936 leader2 = gimple_lookup_type_leader (t2);
937 if (leader1 == t2
938 || t1 == leader2
939 || (leader1 && leader1 == leader2))
940 return true;
942 /* If the hash values of t1 and t2 are different the types can't
943 possibly be the same. This helps keeping the type-pair hashtable
944 small, only tracking comparisons for hash collisions. */
945 if (gimple_type_hash (t1) != gimple_type_hash (t2))
946 return false;
948 /* If we've visited this type pair before (in the case of aggregates
949 with self-referential types), and we made a decision, return it. */
950 p = lookup_type_pair (t1, t2);
951 if (p->same_p == 0 || p->same_p == 1)
953 /* We have already decided whether T1 and T2 are the
954 same, return the cached result. */
955 return p->same_p == 1;
958 /* Now set up the SCC machinery for the comparison. */
959 gtc_next_dfs_num = 1;
960 sccstate = pointer_map_create ();
961 gcc_obstack_init (&sccstate_obstack);
962 res = gimple_types_compatible_p_1 (t1, t2, p,
963 &sccstack, sccstate, &sccstate_obstack);
964 VEC_free (type_pair_t, heap, sccstack);
965 pointer_map_destroy (sccstate);
966 obstack_free (&sccstate_obstack, NULL);
968 return res;
971 static hashval_t
972 iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
973 struct pointer_map_t *, struct obstack *);
975 /* DFS visit the edge from the callers type with state *STATE to T.
976 Update the callers type hash V with the hash for T if it is not part
977 of the SCC containing the callers type and return it.
978 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
980 static hashval_t
981 visit (tree t, struct sccs *state, hashval_t v,
982 VEC (tree, heap) **sccstack,
983 struct pointer_map_t *sccstate,
984 struct obstack *sccstate_obstack)
986 struct sccs *cstate = NULL;
987 struct tree_int_map m;
988 void **slot;
990 /* If there is a hash value recorded for this type then it can't
991 possibly be part of our parent SCC. Simply mix in its hash. */
992 m.base.from = t;
993 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
994 && *slot)
995 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
997 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
998 cstate = (struct sccs *)*slot;
999 if (!cstate)
1001 hashval_t tem;
1002 /* Not yet visited. DFS recurse. */
1003 tem = iterative_hash_gimple_type (t, v,
1004 sccstack, sccstate, sccstate_obstack);
1005 if (!cstate)
1006 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
1007 state->low = MIN (state->low, cstate->low);
1008 /* If the type is no longer on the SCC stack and thus is not part
1009 of the parents SCC mix in its hash value. Otherwise we will
1010 ignore the type for hashing purposes and return the unaltered
1011 hash value. */
1012 if (!cstate->on_sccstack)
1013 return tem;
1015 if (cstate->dfsnum < state->dfsnum
1016 && cstate->on_sccstack)
1017 state->low = MIN (cstate->dfsnum, state->low);
1019 /* We are part of our parents SCC, skip this type during hashing
1020 and return the unaltered hash value. */
1021 return v;
1024 /* Hash NAME with the previous hash value V and return it. */
1026 static hashval_t
1027 iterative_hash_name (tree name, hashval_t v)
1029 if (!name)
1030 return v;
1031 v = iterative_hash_hashval_t (TREE_CODE (name), v);
1032 if (TREE_CODE (name) == TYPE_DECL)
1033 name = DECL_NAME (name);
1034 if (!name)
1035 return v;
1036 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
1037 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
1040 /* A type, hashvalue pair for sorting SCC members. */
1042 struct type_hash_pair {
1043 tree type;
1044 hashval_t hash;
1047 /* Compare two type, hashvalue pairs. */
1049 static int
1050 type_hash_pair_compare (const void *p1_, const void *p2_)
1052 const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_;
1053 const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_;
1054 if (p1->hash < p2->hash)
1055 return -1;
1056 else if (p1->hash > p2->hash)
1057 return 1;
1058 return 0;
1061 /* Returning a hash value for gimple type TYPE combined with VAL.
1062 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
1064 To hash a type we end up hashing in types that are reachable.
1065 Through pointers we can end up with cycles which messes up the
1066 required property that we need to compute the same hash value
1067 for structurally equivalent types. To avoid this we have to
1068 hash all types in a cycle (the SCC) in a commutative way. The
1069 easiest way is to not mix in the hashes of the SCC members at
1070 all. To make this work we have to delay setting the hash
1071 values of the SCC until it is complete. */
1073 static hashval_t
1074 iterative_hash_gimple_type (tree type, hashval_t val,
1075 VEC(tree, heap) **sccstack,
1076 struct pointer_map_t *sccstate,
1077 struct obstack *sccstate_obstack)
1079 hashval_t v;
1080 void **slot;
1081 struct sccs *state;
1083 /* Not visited during this DFS walk. */
1084 gcc_checking_assert (!pointer_map_contains (sccstate, type));
1085 state = XOBNEW (sccstate_obstack, struct sccs);
1086 *pointer_map_insert (sccstate, type) = state;
1088 VEC_safe_push (tree, heap, *sccstack, type);
1089 state->dfsnum = next_dfs_num++;
1090 state->low = state->dfsnum;
1091 state->on_sccstack = true;
1093 /* Combine a few common features of types so that types are grouped into
1094 smaller sets; when searching for existing matching types to merge,
1095 only existing types having the same features as the new type will be
1096 checked. */
1097 v = iterative_hash_name (TYPE_NAME (type), 0);
1098 if (TYPE_NAME (type)
1099 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1100 && DECL_CONTEXT (TYPE_NAME (type))
1101 && TYPE_P (DECL_CONTEXT (TYPE_NAME (type))))
1102 v = visit (DECL_CONTEXT (TYPE_NAME (type)), state, v,
1103 sccstack, sccstate, sccstate_obstack);
1104 v = iterative_hash_hashval_t (TREE_CODE (type), v);
1105 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
1106 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
1108 /* Do not hash the types size as this will cause differences in
1109 hash values for the complete vs. the incomplete type variant. */
1111 /* Incorporate common features of numerical types. */
1112 if (INTEGRAL_TYPE_P (type)
1113 || SCALAR_FLOAT_TYPE_P (type)
1114 || FIXED_POINT_TYPE_P (type))
1116 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
1117 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
1118 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
1121 /* For pointer and reference types, fold in information about the type
1122 pointed to. */
1123 if (POINTER_TYPE_P (type))
1124 v = visit (TREE_TYPE (type), state, v,
1125 sccstack, sccstate, sccstate_obstack);
1127 /* For integer types hash the types min/max values and the string flag. */
1128 if (TREE_CODE (type) == INTEGER_TYPE)
1130 /* OMP lowering can introduce error_mark_node in place of
1131 random local decls in types. */
1132 if (TYPE_MIN_VALUE (type) != error_mark_node)
1133 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
1134 if (TYPE_MAX_VALUE (type) != error_mark_node)
1135 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
1136 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
1139 /* For array types hash the domain and the string flag. */
1140 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
1142 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
1143 v = visit (TYPE_DOMAIN (type), state, v,
1144 sccstack, sccstate, sccstate_obstack);
1147 /* Recurse for aggregates with a single element type. */
1148 if (TREE_CODE (type) == ARRAY_TYPE
1149 || TREE_CODE (type) == COMPLEX_TYPE
1150 || TREE_CODE (type) == VECTOR_TYPE)
1151 v = visit (TREE_TYPE (type), state, v,
1152 sccstack, sccstate, sccstate_obstack);
1154 /* Incorporate function return and argument types. */
1155 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
1157 unsigned na;
1158 tree p;
1160 /* For method types also incorporate their parent class. */
1161 if (TREE_CODE (type) == METHOD_TYPE)
1162 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
1163 sccstack, sccstate, sccstate_obstack);
1165 /* Check result and argument types. */
1166 v = visit (TREE_TYPE (type), state, v,
1167 sccstack, sccstate, sccstate_obstack);
1168 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
1170 v = visit (TREE_VALUE (p), state, v,
1171 sccstack, sccstate, sccstate_obstack);
1172 na++;
1175 v = iterative_hash_hashval_t (na, v);
1178 if (RECORD_OR_UNION_TYPE_P (type))
1180 unsigned nf;
1181 tree f;
1183 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
1185 v = iterative_hash_name (DECL_NAME (f), v);
1186 v = visit (TREE_TYPE (f), state, v,
1187 sccstack, sccstate, sccstate_obstack);
1188 nf++;
1191 v = iterative_hash_hashval_t (nf, v);
1194 /* Record hash for us. */
1195 state->u.hash = v;
1197 /* See if we found an SCC. */
1198 if (state->low == state->dfsnum)
1200 tree x;
1201 struct tree_int_map *m;
1203 /* Pop off the SCC and set its hash values. */
1204 x = VEC_pop (tree, *sccstack);
1205 /* Optimize SCC size one. */
1206 if (x == type)
1208 state->on_sccstack = false;
1209 m = ggc_alloc_cleared_tree_int_map ();
1210 m->base.from = x;
1211 m->to = v;
1212 slot = htab_find_slot (type_hash_cache, m, INSERT);
1213 gcc_assert (!*slot);
1214 *slot = (void *) m;
1216 else
1218 struct sccs *cstate;
1219 unsigned first, i, size, j;
1220 struct type_hash_pair *pairs;
1221 /* Pop off the SCC and build an array of type, hash pairs. */
1222 first = VEC_length (tree, *sccstack) - 1;
1223 while (VEC_index (tree, *sccstack, first) != type)
1224 --first;
1225 size = VEC_length (tree, *sccstack) - first + 1;
1226 pairs = XALLOCAVEC (struct type_hash_pair, size);
1227 i = 0;
1228 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
1229 cstate->on_sccstack = false;
1230 pairs[i].type = x;
1231 pairs[i].hash = cstate->u.hash;
1234 x = VEC_pop (tree, *sccstack);
1235 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
1236 cstate->on_sccstack = false;
1237 ++i;
1238 pairs[i].type = x;
1239 pairs[i].hash = cstate->u.hash;
1241 while (x != type);
1242 gcc_assert (i + 1 == size);
1243 /* Sort the arrays of type, hash pairs so that when we mix in
1244 all members of the SCC the hash value becomes independent on
1245 the order we visited the SCC. Disregard hashes equal to
1246 the hash of the type we mix into because we cannot guarantee
1247 a stable sort for those across different TUs. */
1248 qsort (pairs, size, sizeof (struct type_hash_pair),
1249 type_hash_pair_compare);
1250 for (i = 0; i < size; ++i)
1252 hashval_t hash;
1253 m = ggc_alloc_cleared_tree_int_map ();
1254 m->base.from = pairs[i].type;
1255 hash = pairs[i].hash;
1256 /* Skip same hashes. */
1257 for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j)
1259 for (; j < size; ++j)
1260 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
1261 for (j = 0; pairs[j].hash != pairs[i].hash; ++j)
1262 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
1263 m->to = hash;
1264 if (pairs[i].type == type)
1265 v = hash;
1266 slot = htab_find_slot (type_hash_cache, m, INSERT);
1267 gcc_assert (!*slot);
1268 *slot = (void *) m;
1273 return iterative_hash_hashval_t (v, val);
1276 /* Returns a hash value for P (assumed to be a type). The hash value
1277 is computed using some distinguishing features of the type. Note
1278 that we cannot use pointer hashing here as we may be dealing with
1279 two distinct instances of the same type.
1281 This function should produce the same hash value for two compatible
1282 types according to gimple_types_compatible_p. */
1284 static hashval_t
1285 gimple_type_hash (const void *p)
1287 const_tree t = (const_tree) p;
1288 VEC(tree, heap) *sccstack = NULL;
1289 struct pointer_map_t *sccstate;
1290 struct obstack sccstate_obstack;
1291 hashval_t val;
1292 void **slot;
1293 struct tree_int_map m;
1295 m.base.from = CONST_CAST_TREE (t);
1296 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
1297 && *slot)
1298 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
1300 /* Perform a DFS walk and pre-hash all reachable types. */
1301 next_dfs_num = 1;
1302 sccstate = pointer_map_create ();
1303 gcc_obstack_init (&sccstate_obstack);
1304 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
1305 &sccstack, sccstate, &sccstate_obstack);
1306 VEC_free (tree, heap, sccstack);
1307 pointer_map_destroy (sccstate);
1308 obstack_free (&sccstate_obstack, NULL);
1310 return val;
1313 /* Returns nonzero if P1 and P2 are equal. */
1315 static int
1316 gimple_type_eq (const void *p1, const void *p2)
1318 const_tree t1 = (const_tree) p1;
1319 const_tree t2 = (const_tree) p2;
1320 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
1321 CONST_CAST_TREE (t2));
1325 /* Worker for gimple_register_type.
1326 Register type T in the global type table gimple_types.
1327 When REGISTERING_MV is false first recurse for the main variant of T. */
1329 static tree
1330 gimple_register_type_1 (tree t, bool registering_mv)
1332 void **slot;
1333 gimple_type_leader_entry *leader;
1335 /* If we registered this type before return the cached result. */
1336 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
1337 if (leader->type == t)
1338 return leader->leader;
1340 /* Always register the main variant first. This is important so we
1341 pick up the non-typedef variants as canonical, otherwise we'll end
1342 up taking typedef ids for structure tags during comparison.
1343 It also makes sure that main variants will be merged to main variants.
1344 As we are operating on a possibly partially fixed up type graph
1345 do not bother to recurse more than once, otherwise we may end up
1346 walking in circles.
1347 If we are registering a main variant it will either remain its
1348 own main variant or it will be merged to something else in which
1349 case we do not care for the main variant leader. */
1350 if (!registering_mv
1351 && TYPE_MAIN_VARIANT (t) != t)
1352 gimple_register_type_1 (TYPE_MAIN_VARIANT (t), true);
1354 /* See if we already have an equivalent type registered. */
1355 slot = htab_find_slot (gimple_types, t, INSERT);
1356 if (*slot
1357 && *(tree *)slot != t)
1359 tree new_type = (tree) *((tree *) slot);
1360 leader->type = t;
1361 leader->leader = new_type;
1362 return new_type;
1365 /* If not, insert it to the cache and the hash. */
1366 leader->type = t;
1367 leader->leader = t;
1368 *slot = (void *) t;
1369 return t;
1372 /* Register type T in the global type table gimple_types.
1373 If another type T', compatible with T, already existed in
1374 gimple_types then return T', otherwise return T. This is used by
1375 LTO to merge identical types read from different TUs. */
1377 static tree
1378 gimple_register_type (tree t)
1380 gcc_assert (TYPE_P (t));
1381 return gimple_register_type_1 (t, false);
1384 #define GIMPLE_REGISTER_TYPE(tt) \
1385 (TREE_VISITED (tt) ? gimple_register_type (tt) : tt)
1389 /* A hashtable of trees that potentially refer to variables or functions
1390 that must be replaced with their prevailing variant. */
1391 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node))) htab_t
1392 tree_with_vars;
1394 /* Remember that T is a tree that (potentially) refers to a variable
1395 or function decl that may be replaced with its prevailing variant. */
1396 static void
1397 remember_with_vars (tree t)
1399 *(tree *) htab_find_slot (tree_with_vars, t, INSERT) = t;
1402 #define LTO_FIXUP_TREE(tt) \
1403 do \
1405 if (tt) \
1407 if (TYPE_P (tt)) \
1408 (tt) = GIMPLE_REGISTER_TYPE (tt); \
1409 if (VAR_OR_FUNCTION_DECL_P (tt) && TREE_PUBLIC (tt)) \
1410 remember_with_vars (t); \
1412 } while (0)
1414 static void lto_fixup_types (tree);
1416 /* Fix up fields of a tree_typed T. */
1418 static void
1419 lto_ft_typed (tree t)
1421 LTO_FIXUP_TREE (TREE_TYPE (t));
1424 /* Fix up fields of a tree_common T. */
1426 static void
1427 lto_ft_common (tree t)
1429 lto_ft_typed (t);
1430 LTO_FIXUP_TREE (TREE_CHAIN (t));
1433 /* Fix up fields of a decl_minimal T. */
1435 static void
1436 lto_ft_decl_minimal (tree t)
1438 lto_ft_common (t);
1439 LTO_FIXUP_TREE (DECL_NAME (t));
1440 LTO_FIXUP_TREE (DECL_CONTEXT (t));
1443 /* Fix up fields of a decl_common T. */
1445 static void
1446 lto_ft_decl_common (tree t)
1448 lto_ft_decl_minimal (t);
1449 LTO_FIXUP_TREE (DECL_SIZE (t));
1450 LTO_FIXUP_TREE (DECL_SIZE_UNIT (t));
1451 LTO_FIXUP_TREE (DECL_INITIAL (t));
1452 LTO_FIXUP_TREE (DECL_ATTRIBUTES (t));
1453 LTO_FIXUP_TREE (DECL_ABSTRACT_ORIGIN (t));
1456 /* Fix up fields of a decl_with_vis T. */
1458 static void
1459 lto_ft_decl_with_vis (tree t)
1461 lto_ft_decl_common (t);
1463 /* Accessor macro has side-effects, use field-name here. */
1464 LTO_FIXUP_TREE (t->decl_with_vis.assembler_name);
1465 LTO_FIXUP_TREE (DECL_SECTION_NAME (t));
1468 /* Fix up fields of a decl_non_common T. */
1470 static void
1471 lto_ft_decl_non_common (tree t)
1473 lto_ft_decl_with_vis (t);
1474 LTO_FIXUP_TREE (DECL_ARGUMENT_FLD (t));
1475 LTO_FIXUP_TREE (DECL_RESULT_FLD (t));
1476 LTO_FIXUP_TREE (DECL_VINDEX (t));
1477 /* The C frontends may create exact duplicates for DECL_ORIGINAL_TYPE
1478 like for 'typedef enum foo foo'. We have no way of avoiding to
1479 merge them and dwarf2out.c cannot deal with this,
1480 so fix this up by clearing DECL_ORIGINAL_TYPE in this case. */
1481 if (TREE_CODE (t) == TYPE_DECL
1482 && DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
1483 DECL_ORIGINAL_TYPE (t) = NULL_TREE;
1486 /* Fix up fields of a decl_non_common T. */
1488 static void
1489 lto_ft_function (tree t)
1491 lto_ft_decl_non_common (t);
1492 LTO_FIXUP_TREE (DECL_FUNCTION_PERSONALITY (t));
1495 /* Fix up fields of a field_decl T. */
1497 static void
1498 lto_ft_field_decl (tree t)
1500 lto_ft_decl_common (t);
1501 LTO_FIXUP_TREE (DECL_FIELD_OFFSET (t));
1502 LTO_FIXUP_TREE (DECL_BIT_FIELD_TYPE (t));
1503 LTO_FIXUP_TREE (DECL_QUALIFIER (t));
1504 LTO_FIXUP_TREE (DECL_FIELD_BIT_OFFSET (t));
1505 LTO_FIXUP_TREE (DECL_FCONTEXT (t));
1508 /* Fix up fields of a type T. */
1510 static void
1511 lto_ft_type (tree t)
1513 lto_ft_common (t);
1514 LTO_FIXUP_TREE (TYPE_CACHED_VALUES (t));
1515 LTO_FIXUP_TREE (TYPE_SIZE (t));
1516 LTO_FIXUP_TREE (TYPE_SIZE_UNIT (t));
1517 LTO_FIXUP_TREE (TYPE_ATTRIBUTES (t));
1518 LTO_FIXUP_TREE (TYPE_NAME (t));
1520 /* Accessors are for derived node types only. */
1521 if (!POINTER_TYPE_P (t))
1522 LTO_FIXUP_TREE (TYPE_MINVAL (t));
1523 LTO_FIXUP_TREE (TYPE_MAXVAL (t));
1525 /* Accessor is for derived node types only. */
1526 LTO_FIXUP_TREE (t->type_non_common.binfo);
1528 LTO_FIXUP_TREE (TYPE_CONTEXT (t));
1531 /* Fix up fields of a BINFO T. */
1533 static void
1534 lto_ft_binfo (tree t)
1536 unsigned HOST_WIDE_INT i, n;
1537 tree base, saved_base;
1539 lto_ft_common (t);
1540 LTO_FIXUP_TREE (BINFO_VTABLE (t));
1541 LTO_FIXUP_TREE (BINFO_OFFSET (t));
1542 LTO_FIXUP_TREE (BINFO_VIRTUALS (t));
1543 LTO_FIXUP_TREE (BINFO_VPTR_FIELD (t));
1544 n = VEC_length (tree, BINFO_BASE_ACCESSES (t));
1545 for (i = 0; i < n; i++)
1547 saved_base = base = BINFO_BASE_ACCESS (t, i);
1548 LTO_FIXUP_TREE (base);
1549 if (base != saved_base)
1550 VEC_replace (tree, BINFO_BASE_ACCESSES (t), i, base);
1552 LTO_FIXUP_TREE (BINFO_INHERITANCE_CHAIN (t));
1553 LTO_FIXUP_TREE (BINFO_SUBVTT_INDEX (t));
1554 LTO_FIXUP_TREE (BINFO_VPTR_INDEX (t));
1555 n = BINFO_N_BASE_BINFOS (t);
1556 for (i = 0; i < n; i++)
1558 saved_base = base = BINFO_BASE_BINFO (t, i);
1559 LTO_FIXUP_TREE (base);
1560 if (base != saved_base)
1561 VEC_replace (tree, BINFO_BASE_BINFOS (t), i, base);
1565 /* Fix up fields of a CONSTRUCTOR T. */
1567 static void
1568 lto_ft_constructor (tree t)
1570 unsigned HOST_WIDE_INT idx;
1571 constructor_elt *ce;
1573 lto_ft_typed (t);
1575 for (idx = 0;
1576 VEC_iterate(constructor_elt, CONSTRUCTOR_ELTS (t), idx, ce);
1577 idx++)
1579 LTO_FIXUP_TREE (ce->index);
1580 LTO_FIXUP_TREE (ce->value);
1584 /* Fix up fields of an expression tree T. */
1586 static void
1587 lto_ft_expr (tree t)
1589 int i;
1590 lto_ft_typed (t);
1591 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
1592 LTO_FIXUP_TREE (TREE_OPERAND (t, i));
1595 /* Given a tree T fixup fields of T by replacing types with their merged
1596 variant and other entities by an equal entity from an earlier compilation
1597 unit, or an entity being canonical in a different way. This includes
1598 for instance integer or string constants. */
1600 static void
1601 lto_fixup_types (tree t)
1603 switch (TREE_CODE (t))
1605 case IDENTIFIER_NODE:
1606 break;
1608 case TREE_LIST:
1609 LTO_FIXUP_TREE (TREE_VALUE (t));
1610 LTO_FIXUP_TREE (TREE_PURPOSE (t));
1611 LTO_FIXUP_TREE (TREE_CHAIN (t));
1612 break;
1614 case FIELD_DECL:
1615 lto_ft_field_decl (t);
1616 break;
1618 case LABEL_DECL:
1619 case CONST_DECL:
1620 case PARM_DECL:
1621 case RESULT_DECL:
1622 case IMPORTED_DECL:
1623 lto_ft_decl_common (t);
1624 break;
1626 case VAR_DECL:
1627 lto_ft_decl_with_vis (t);
1628 break;
1630 case TYPE_DECL:
1631 lto_ft_decl_non_common (t);
1632 break;
1634 case FUNCTION_DECL:
1635 lto_ft_function (t);
1636 break;
1638 case TREE_BINFO:
1639 lto_ft_binfo (t);
1640 break;
1642 case PLACEHOLDER_EXPR:
1643 lto_ft_common (t);
1644 break;
1646 case BLOCK:
1647 case TRANSLATION_UNIT_DECL:
1648 case OPTIMIZATION_NODE:
1649 case TARGET_OPTION_NODE:
1650 break;
1652 default:
1653 if (TYPE_P (t))
1654 lto_ft_type (t);
1655 else if (TREE_CODE (t) == CONSTRUCTOR)
1656 lto_ft_constructor (t);
1657 else if (CONSTANT_CLASS_P (t))
1658 LTO_FIXUP_TREE (TREE_TYPE (t));
1659 else if (EXPR_P (t))
1661 lto_ft_expr (t);
1663 else
1665 remember_with_vars (t);
1671 /* Return the resolution for the decl with index INDEX from DATA_IN. */
1673 static enum ld_plugin_symbol_resolution
1674 get_resolution (struct data_in *data_in, unsigned index)
1676 if (data_in->globals_resolution)
1678 ld_plugin_symbol_resolution_t ret;
1679 /* We can have references to not emitted functions in
1680 DECL_FUNCTION_PERSONALITY at least. So we can and have
1681 to indeed return LDPR_UNKNOWN in some cases. */
1682 if (VEC_length (ld_plugin_symbol_resolution_t,
1683 data_in->globals_resolution) <= index)
1684 return LDPR_UNKNOWN;
1685 ret = VEC_index (ld_plugin_symbol_resolution_t,
1686 data_in->globals_resolution,
1687 index);
1688 return ret;
1690 else
1691 /* Delay resolution finding until decl merging. */
1692 return LDPR_UNKNOWN;
1696 /* Register DECL with the global symbol table and change its
1697 name if necessary to avoid name clashes for static globals across
1698 different files. */
1700 static void
1701 lto_register_var_decl_in_symtab (struct data_in *data_in, tree decl)
1703 tree context;
1705 /* Variable has file scope, not local. Need to ensure static variables
1706 between different files don't clash unexpectedly. */
1707 if (!TREE_PUBLIC (decl)
1708 && !((context = decl_function_context (decl))
1709 && auto_var_in_fn_p (decl, context)))
1711 /* ??? We normally pre-mangle names before we serialize them
1712 out. Here, in lto1, we do not know the language, and
1713 thus cannot do the mangling again. Instead, we just
1714 append a suffix to the mangled name. The resulting name,
1715 however, is not a properly-formed mangled name, and will
1716 confuse any attempt to unmangle it. */
1717 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
1718 char *label;
1720 ASM_FORMAT_PRIVATE_NAME (label, name, DECL_UID (decl));
1721 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (label));
1722 rest_of_decl_compilation (decl, 1, 0);
1723 VEC_safe_push (tree, gc, lto_global_var_decls, decl);
1726 /* If this variable has already been declared, queue the
1727 declaration for merging. */
1728 if (TREE_PUBLIC (decl))
1730 unsigned ix;
1731 if (!streamer_tree_cache_lookup (data_in->reader_cache, decl, &ix))
1732 gcc_unreachable ();
1733 lto_symtab_register_decl (decl, get_resolution (data_in, ix),
1734 data_in->file_data);
1739 /* Register DECL with the global symbol table and change its
1740 name if necessary to avoid name clashes for static globals across
1741 different files. DATA_IN contains descriptors and tables for the
1742 file being read. */
1744 static void
1745 lto_register_function_decl_in_symtab (struct data_in *data_in, tree decl)
1747 /* Need to ensure static entities between different files
1748 don't clash unexpectedly. */
1749 if (!TREE_PUBLIC (decl))
1751 /* We must not use the DECL_ASSEMBLER_NAME macro here, as it
1752 may set the assembler name where it was previously empty. */
1753 tree old_assembler_name = decl->decl_with_vis.assembler_name;
1755 /* FIXME lto: We normally pre-mangle names before we serialize
1756 them out. Here, in lto1, we do not know the language, and
1757 thus cannot do the mangling again. Instead, we just append a
1758 suffix to the mangled name. The resulting name, however, is
1759 not a properly-formed mangled name, and will confuse any
1760 attempt to unmangle it. */
1761 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
1762 char *label;
1764 ASM_FORMAT_PRIVATE_NAME (label, name, DECL_UID (decl));
1765 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (label));
1767 /* We may arrive here with the old assembler name not set
1768 if the function body is not needed, e.g., it has been
1769 inlined away and does not appear in the cgraph. */
1770 if (old_assembler_name)
1772 tree new_assembler_name = DECL_ASSEMBLER_NAME (decl);
1774 /* Make the original assembler name available for later use.
1775 We may have used it to indicate the section within its
1776 object file where the function body may be found.
1777 FIXME lto: Find a better way to maintain the function decl
1778 to body section mapping so we don't need this hack. */
1779 lto_record_renamed_decl (data_in->file_data,
1780 IDENTIFIER_POINTER (old_assembler_name),
1781 IDENTIFIER_POINTER (new_assembler_name));
1785 /* If this variable has already been declared, queue the
1786 declaration for merging. */
1787 if (TREE_PUBLIC (decl) && !DECL_ABSTRACT (decl))
1789 unsigned ix;
1790 if (!streamer_tree_cache_lookup (data_in->reader_cache, decl, &ix))
1791 gcc_unreachable ();
1792 lto_symtab_register_decl (decl, get_resolution (data_in, ix),
1793 data_in->file_data);
1798 /* Given a streamer cache structure DATA_IN (holding a sequence of trees
1799 for one compilation unit) go over all trees starting at index FROM until the
1800 end of the sequence and replace fields of those trees, and the trees
1801 themself with their canonical variants as per gimple_register_type. */
1803 static void
1804 uniquify_nodes (struct data_in *data_in, unsigned from)
1806 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1807 unsigned len = VEC_length (tree, cache->nodes);
1808 unsigned i;
1810 /* Go backwards because children streamed for the first time come
1811 as part of their parents, and hence are created after them. */
1813 /* First register all the types in the cache. This makes sure to
1814 have the original structure in the type cycles when registering
1815 them and computing hashes. */
1816 for (i = len; i-- > from;)
1818 tree t = VEC_index (tree, cache->nodes, i);
1819 if (t && TYPE_P (t))
1821 tree newt = gimple_register_type (t);
1822 /* Mark non-prevailing types so we fix them up. No need
1823 to reset that flag afterwards - nothing that refers
1824 to those types is left and they are collected. */
1825 if (newt != t)
1826 TREE_VISITED (t) = 1;
1830 /* Second fixup all trees in the new cache entries. */
1831 for (i = len; i-- > from;)
1833 tree t = VEC_index (tree, cache->nodes, i);
1834 tree oldt = t;
1835 if (!t)
1836 continue;
1838 /* First fixup the fields of T. */
1839 lto_fixup_types (t);
1841 if (!TYPE_P (t))
1842 continue;
1844 /* Now try to find a canonical variant of T itself. */
1845 t = GIMPLE_REGISTER_TYPE (t);
1847 if (t == oldt)
1849 /* The following re-creates proper variant lists while fixing up
1850 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
1851 variant list state before fixup is broken. */
1852 tree tem, mv;
1854 #ifdef ENABLE_CHECKING
1855 /* Remove us from our main variant list if we are not the
1856 variant leader. */
1857 if (TYPE_MAIN_VARIANT (t) != t)
1859 tem = TYPE_MAIN_VARIANT (t);
1860 while (tem && TYPE_NEXT_VARIANT (tem) != t)
1861 tem = TYPE_NEXT_VARIANT (tem);
1862 gcc_assert (!tem && !TYPE_NEXT_VARIANT (t));
1864 #endif
1866 /* Query our new main variant. */
1867 mv = GIMPLE_REGISTER_TYPE (TYPE_MAIN_VARIANT (t));
1869 /* If we were the variant leader and we get replaced ourselves drop
1870 all variants from our list. */
1871 if (TYPE_MAIN_VARIANT (t) == t
1872 && mv != t)
1874 tem = t;
1875 while (tem)
1877 tree tem2 = TYPE_NEXT_VARIANT (tem);
1878 TYPE_NEXT_VARIANT (tem) = NULL_TREE;
1879 tem = tem2;
1883 /* If we are not our own variant leader link us into our new leaders
1884 variant list. */
1885 if (mv != t)
1887 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
1888 TYPE_NEXT_VARIANT (mv) = t;
1889 if (RECORD_OR_UNION_TYPE_P (t))
1890 TYPE_BINFO (t) = TYPE_BINFO (mv);
1891 /* Preserve the invariant that type variants share their
1892 TYPE_FIELDS. */
1893 if (RECORD_OR_UNION_TYPE_P (t)
1894 && TYPE_FIELDS (mv) != TYPE_FIELDS (t))
1896 tree f1, f2;
1897 for (f1 = TYPE_FIELDS (mv), f2 = TYPE_FIELDS (t);
1898 f1 && f2; f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1900 unsigned ix;
1901 gcc_assert (f1 != f2
1902 && DECL_NAME (f1) == DECL_NAME (f2));
1903 if (!streamer_tree_cache_lookup (cache, f2, &ix))
1904 gcc_unreachable ();
1905 /* If we're going to replace an element which we'd
1906 still visit in the next iterations, we wouldn't
1907 handle it, so do it here. We do have to handle it
1908 even though the field_decl itself will be removed,
1909 as it could refer to e.g. integer_cst which we
1910 wouldn't reach via any other way, hence they
1911 (and their type) would stay uncollected. */
1912 /* ??? We should rather make sure to replace all
1913 references to f2 with f1. That means handling
1914 COMPONENT_REFs and CONSTRUCTOR elements in
1915 lto_fixup_types and special-case the field-decl
1916 operand handling. */
1917 /* ??? Not sure the above is all relevant in this
1918 path canonicalizing TYPE_FIELDS to that of the
1919 main variant. */
1920 if (ix < i)
1921 lto_fixup_types (f2);
1922 streamer_tree_cache_insert_at (cache, f1, ix);
1924 TYPE_FIELDS (t) = TYPE_FIELDS (mv);
1928 /* Finally adjust our main variant and fix it up. */
1929 TYPE_MAIN_VARIANT (t) = mv;
1931 /* The following reconstructs the pointer chains
1932 of the new pointed-to type if we are a main variant. We do
1933 not stream those so they are broken before fixup. */
1934 if (TREE_CODE (t) == POINTER_TYPE
1935 && TYPE_MAIN_VARIANT (t) == t)
1937 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
1938 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
1940 else if (TREE_CODE (t) == REFERENCE_TYPE
1941 && TYPE_MAIN_VARIANT (t) == t)
1943 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
1944 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
1948 else
1950 if (RECORD_OR_UNION_TYPE_P (t))
1952 tree f1, f2;
1953 if (TYPE_FIELDS (t) != TYPE_FIELDS (oldt))
1954 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (oldt);
1955 f1 && f2; f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1957 unsigned ix;
1958 gcc_assert (f1 != f2 && DECL_NAME (f1) == DECL_NAME (f2));
1959 if (!streamer_tree_cache_lookup (cache, f2, &ix))
1960 gcc_unreachable ();
1961 /* If we're going to replace an element which we'd
1962 still visit in the next iterations, we wouldn't
1963 handle it, so do it here. We do have to handle it
1964 even though the field_decl itself will be removed,
1965 as it could refer to e.g. integer_cst which we
1966 wouldn't reach via any other way, hence they
1967 (and their type) would stay uncollected. */
1968 /* ??? We should rather make sure to replace all
1969 references to f2 with f1. That means handling
1970 COMPONENT_REFs and CONSTRUCTOR elements in
1971 lto_fixup_types and special-case the field-decl
1972 operand handling. */
1973 if (ix < i)
1974 lto_fixup_types (f2);
1975 streamer_tree_cache_insert_at (cache, f1, ix);
1979 /* If we found a tree that is equal to oldt replace it in the
1980 cache, so that further users (in the various LTO sections)
1981 make use of it. */
1982 streamer_tree_cache_insert_at (cache, t, i);
1986 /* Finally compute the canonical type of all TREE_TYPEs and register
1987 VAR_DECL and FUNCTION_DECL nodes in the symbol table.
1988 From this point there are no longer any types with
1989 TYPE_STRUCTURAL_EQUALITY_P and its type-based alias problems.
1990 This step requires the TYPE_POINTER_TO lists being present, so
1991 make sure it is done last. */
1992 for (i = len; i-- > from;)
1994 tree t = VEC_index (tree, cache->nodes, i);
1995 if (t == NULL_TREE)
1996 continue;
1998 if (TREE_CODE (t) == VAR_DECL)
1999 lto_register_var_decl_in_symtab (data_in, t);
2000 else if (TREE_CODE (t) == FUNCTION_DECL && !DECL_BUILT_IN (t))
2001 lto_register_function_decl_in_symtab (data_in, t);
2002 else if (!flag_wpa
2003 && TREE_CODE (t) == TYPE_DECL)
2004 debug_hooks->type_decl (t, !DECL_FILE_SCOPE_P (t));
2005 else if (TYPE_P (t) && !TYPE_CANONICAL (t))
2006 TYPE_CANONICAL (t) = gimple_register_canonical_type (t);
2011 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
2012 RESOLUTIONS is the set of symbols picked by the linker (read from the
2013 resolution file when the linker plugin is being used). */
2015 static void
2016 lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
2017 VEC(ld_plugin_symbol_resolution_t,heap) *resolutions)
2019 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
2020 const int decl_offset = sizeof (struct lto_decl_header);
2021 const int main_offset = decl_offset + header->decl_state_size;
2022 const int string_offset = main_offset + header->main_size;
2023 struct lto_input_block ib_main;
2024 struct data_in *data_in;
2025 unsigned int i;
2026 const uint32_t *data_ptr, *data_end;
2027 uint32_t num_decl_states;
2029 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2030 header->main_size);
2032 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
2033 header->string_size, resolutions);
2035 /* We do not uniquify the pre-loaded cache entries, those are middle-end
2036 internal types that should not be merged. */
2038 /* Read the global declarations and types. */
2039 while (ib_main.p < ib_main.len)
2041 tree t;
2042 unsigned from = VEC_length (tree, data_in->reader_cache->nodes);
2043 t = stream_read_tree (&ib_main, data_in);
2044 gcc_assert (t && ib_main.p <= ib_main.len);
2045 uniquify_nodes (data_in, from);
2048 /* Read in lto_in_decl_state objects. */
2049 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
2050 data_end =
2051 (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
2052 num_decl_states = *data_ptr++;
2054 gcc_assert (num_decl_states > 0);
2055 decl_data->global_decl_state = lto_new_in_decl_state ();
2056 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
2057 decl_data->global_decl_state);
2059 /* Read in per-function decl states and enter them in hash table. */
2060 decl_data->function_decl_states =
2061 htab_create_ggc (37, lto_hash_in_decl_state, lto_eq_in_decl_state, NULL);
2063 for (i = 1; i < num_decl_states; i++)
2065 struct lto_in_decl_state *state = lto_new_in_decl_state ();
2066 void **slot;
2068 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
2069 slot = htab_find_slot (decl_data->function_decl_states, state, INSERT);
2070 gcc_assert (*slot == NULL);
2071 *slot = state;
2074 if (data_ptr != data_end)
2075 internal_error ("bytecode stream: garbage at the end of symbols section");
2077 /* Set the current decl state to be the global state. */
2078 decl_data->current_decl_state = decl_data->global_decl_state;
2080 lto_data_in_delete (data_in);
2083 /* Custom version of strtoll, which is not portable. */
2085 static HOST_WIDEST_INT
2086 lto_parse_hex (const char *p)
2088 HOST_WIDEST_INT ret = 0;
2090 for (; *p != '\0'; ++p)
2092 char c = *p;
2093 unsigned char part;
2094 ret <<= 4;
2095 if (c >= '0' && c <= '9')
2096 part = c - '0';
2097 else if (c >= 'a' && c <= 'f')
2098 part = c - 'a' + 10;
2099 else if (c >= 'A' && c <= 'F')
2100 part = c - 'A' + 10;
2101 else
2102 internal_error ("could not parse hex number");
2103 ret |= part;
2106 return ret;
2109 /* Read resolution for file named FILE_NAME. The resolution is read from
2110 RESOLUTION. */
2112 static void
2113 lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
2115 /* We require that objects in the resolution file are in the same
2116 order as the lto1 command line. */
2117 unsigned int name_len;
2118 char *obj_name;
2119 unsigned int num_symbols;
2120 unsigned int i;
2121 struct lto_file_decl_data *file_data;
2122 splay_tree_node nd = NULL;
2124 if (!resolution)
2125 return;
2127 name_len = strlen (file->filename);
2128 obj_name = XNEWVEC (char, name_len + 1);
2129 fscanf (resolution, " "); /* Read white space. */
2131 fread (obj_name, sizeof (char), name_len, resolution);
2132 obj_name[name_len] = '\0';
2133 if (filename_cmp (obj_name, file->filename) != 0)
2134 internal_error ("unexpected file name %s in linker resolution file. "
2135 "Expected %s", obj_name, file->filename);
2136 if (file->offset != 0)
2138 int t;
2139 char offset_p[17];
2140 HOST_WIDEST_INT offset;
2141 t = fscanf (resolution, "@0x%16s", offset_p);
2142 if (t != 1)
2143 internal_error ("could not parse file offset");
2144 offset = lto_parse_hex (offset_p);
2145 if (offset != file->offset)
2146 internal_error ("unexpected offset");
2149 free (obj_name);
2151 fscanf (resolution, "%u", &num_symbols);
2153 for (i = 0; i < num_symbols; i++)
2155 int t;
2156 unsigned index;
2157 unsigned HOST_WIDE_INT id;
2158 char r_str[27];
2159 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
2160 unsigned int j;
2161 unsigned int lto_resolution_str_len =
2162 sizeof (lto_resolution_str) / sizeof (char *);
2163 res_pair rp;
2165 t = fscanf (resolution, "%u " HOST_WIDE_INT_PRINT_HEX_PURE " %26s %*[^\n]\n",
2166 &index, &id, r_str);
2167 if (t != 3)
2168 internal_error ("invalid line in the resolution file");
2170 for (j = 0; j < lto_resolution_str_len; j++)
2172 if (strcmp (lto_resolution_str[j], r_str) == 0)
2174 r = (enum ld_plugin_symbol_resolution) j;
2175 break;
2178 if (j == lto_resolution_str_len)
2179 internal_error ("invalid resolution in the resolution file");
2181 if (!(nd && lto_splay_tree_id_equal_p (nd->key, id)))
2183 nd = lto_splay_tree_lookup (file_ids, id);
2184 if (nd == NULL)
2185 internal_error ("resolution sub id " HOST_WIDE_INT_PRINT_HEX_PURE
2186 " not in object file", id);
2189 file_data = (struct lto_file_decl_data *)nd->value;
2190 /* The indexes are very sparse. To save memory save them in a compact
2191 format that is only unpacked later when the subfile is processed. */
2192 rp.res = r;
2193 rp.index = index;
2194 VEC_safe_push (res_pair, heap, file_data->respairs, rp);
2195 if (file_data->max_index < index)
2196 file_data->max_index = index;
2200 /* List of file_decl_datas */
2201 struct file_data_list
2203 struct lto_file_decl_data *first, *last;
2206 /* Is the name for a id'ed LTO section? */
2208 static int
2209 lto_section_with_id (const char *name, unsigned HOST_WIDE_INT *id)
2211 const char *s;
2213 if (strncmp (name, LTO_SECTION_NAME_PREFIX, strlen (LTO_SECTION_NAME_PREFIX)))
2214 return 0;
2215 s = strrchr (name, '.');
2216 return s && sscanf (s, "." HOST_WIDE_INT_PRINT_HEX_PURE, id) == 1;
2219 /* Create file_data of each sub file id */
2221 static int
2222 create_subid_section_table (struct lto_section_slot *ls, splay_tree file_ids,
2223 struct file_data_list *list)
2225 struct lto_section_slot s_slot, *new_slot;
2226 unsigned HOST_WIDE_INT id;
2227 splay_tree_node nd;
2228 void **hash_slot;
2229 char *new_name;
2230 struct lto_file_decl_data *file_data;
2232 if (!lto_section_with_id (ls->name, &id))
2233 return 1;
2235 /* Find hash table of sub module id */
2236 nd = lto_splay_tree_lookup (file_ids, id);
2237 if (nd != NULL)
2239 file_data = (struct lto_file_decl_data *)nd->value;
2241 else
2243 file_data = ggc_alloc_lto_file_decl_data ();
2244 memset(file_data, 0, sizeof (struct lto_file_decl_data));
2245 file_data->id = id;
2246 file_data->section_hash_table = lto_obj_create_section_hash_table ();;
2247 lto_splay_tree_insert (file_ids, id, file_data);
2249 /* Maintain list in linker order */
2250 if (!list->first)
2251 list->first = file_data;
2252 if (list->last)
2253 list->last->next = file_data;
2254 list->last = file_data;
2257 /* Copy section into sub module hash table */
2258 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
2259 s_slot.name = new_name;
2260 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
2261 gcc_assert (*hash_slot == NULL);
2263 new_slot = XDUP (struct lto_section_slot, ls);
2264 new_slot->name = new_name;
2265 *hash_slot = new_slot;
2266 return 1;
2269 /* Read declarations and other initializations for a FILE_DATA. */
2271 static void
2272 lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file)
2274 const char *data;
2275 size_t len;
2276 VEC(ld_plugin_symbol_resolution_t,heap) *resolutions = NULL;
2277 int i;
2278 res_pair *rp;
2280 /* Create vector for fast access of resolution. We do this lazily
2281 to save memory. */
2282 VEC_safe_grow_cleared (ld_plugin_symbol_resolution_t, heap,
2283 resolutions,
2284 file_data->max_index + 1);
2285 for (i = 0; VEC_iterate (res_pair, file_data->respairs, i, rp); i++)
2286 VEC_replace (ld_plugin_symbol_resolution_t, resolutions, rp->index, rp->res);
2287 VEC_free (res_pair, heap, file_data->respairs);
2289 file_data->renaming_hash_table = lto_create_renaming_table ();
2290 file_data->file_name = file->filename;
2291 data = lto_get_section_data (file_data, LTO_section_decls, NULL, &len);
2292 if (data == NULL)
2294 internal_error ("cannot read LTO decls from %s", file_data->file_name);
2295 return;
2297 /* Frees resolutions */
2298 lto_read_decls (file_data, data, resolutions);
2299 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
2302 /* Finalize FILE_DATA in FILE and increase COUNT. */
2304 static int
2305 lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data,
2306 int *count)
2308 lto_file_finalize (file_data, file);
2309 if (cgraph_dump_file)
2310 fprintf (cgraph_dump_file, "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX "\n",
2311 file_data->file_name, file_data->id);
2312 (*count)++;
2313 return 0;
2316 /* Generate a TREE representation for all types and external decls
2317 entities in FILE.
2319 Read all of the globals out of the file. Then read the cgraph
2320 and process the .o index into the cgraph nodes so that it can open
2321 the .o file to load the functions and ipa information. */
2323 static struct lto_file_decl_data *
2324 lto_file_read (lto_file *file, FILE *resolution_file, int *count)
2326 struct lto_file_decl_data *file_data = NULL;
2327 splay_tree file_ids;
2328 htab_t section_hash_table;
2329 struct lto_section_slot *section;
2330 struct file_data_list file_list;
2331 struct lto_section_list section_list;
2333 memset (&section_list, 0, sizeof (struct lto_section_list));
2334 section_hash_table = lto_obj_build_section_table (file, &section_list);
2336 /* Find all sub modules in the object and put their sections into new hash
2337 tables in a splay tree. */
2338 file_ids = lto_splay_tree_new ();
2339 memset (&file_list, 0, sizeof (struct file_data_list));
2340 for (section = section_list.first; section != NULL; section = section->next)
2341 create_subid_section_table (section, file_ids, &file_list);
2343 /* Add resolutions to file ids */
2344 lto_resolution_read (file_ids, resolution_file, file);
2346 /* Finalize each lto file for each submodule in the merged object */
2347 for (file_data = file_list.first; file_data != NULL; file_data = file_data->next)
2348 lto_create_files_from_ids (file, file_data, count);
2350 splay_tree_delete (file_ids);
2351 htab_delete (section_hash_table);
2353 return file_list.first;
2356 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
2357 #define LTO_MMAP_IO 1
2358 #endif
2360 #if LTO_MMAP_IO
2361 /* Page size of machine is used for mmap and munmap calls. */
2362 static size_t page_mask;
2363 #endif
2365 /* Get the section data of length LEN from FILENAME starting at
2366 OFFSET. The data segment must be freed by the caller when the
2367 caller is finished. Returns NULL if all was not well. */
2369 static char *
2370 lto_read_section_data (struct lto_file_decl_data *file_data,
2371 intptr_t offset, size_t len)
2373 char *result;
2374 static int fd = -1;
2375 static char *fd_name;
2376 #if LTO_MMAP_IO
2377 intptr_t computed_len;
2378 intptr_t computed_offset;
2379 intptr_t diff;
2380 #endif
2382 /* Keep a single-entry file-descriptor cache. The last file we
2383 touched will get closed at exit.
2384 ??? Eventually we want to add a more sophisticated larger cache
2385 or rather fix function body streaming to not stream them in
2386 practically random order. */
2387 if (fd != -1
2388 && filename_cmp (fd_name, file_data->file_name) != 0)
2390 free (fd_name);
2391 close (fd);
2392 fd = -1;
2394 if (fd == -1)
2396 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
2397 if (fd == -1)
2399 fatal_error ("Cannot open %s", file_data->file_name);
2400 return NULL;
2402 fd_name = xstrdup (file_data->file_name);
2405 #if LTO_MMAP_IO
2406 if (!page_mask)
2408 size_t page_size = sysconf (_SC_PAGE_SIZE);
2409 page_mask = ~(page_size - 1);
2412 computed_offset = offset & page_mask;
2413 diff = offset - computed_offset;
2414 computed_len = len + diff;
2416 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
2417 fd, computed_offset);
2418 if (result == MAP_FAILED)
2420 fatal_error ("Cannot map %s", file_data->file_name);
2421 return NULL;
2424 return result + diff;
2425 #else
2426 result = (char *) xmalloc (len);
2427 if (lseek (fd, offset, SEEK_SET) != offset
2428 || read (fd, result, len) != (ssize_t) len)
2430 free (result);
2431 fatal_error ("Cannot read %s", file_data->file_name);
2432 result = NULL;
2434 #ifdef __MINGW32__
2435 /* Native windows doesn't supports delayed unlink on opened file. So
2436 we close file here again. This produces higher I/O load, but at least
2437 it prevents to have dangling file handles preventing unlink. */
2438 free (fd_name);
2439 fd_name = NULL;
2440 close (fd);
2441 fd = -1;
2442 #endif
2443 return result;
2444 #endif
2448 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
2449 NAME will be NULL unless the section type is for a function
2450 body. */
2452 static const char *
2453 get_section_data (struct lto_file_decl_data *file_data,
2454 enum lto_section_type section_type,
2455 const char *name,
2456 size_t *len)
2458 htab_t section_hash_table = file_data->section_hash_table;
2459 struct lto_section_slot *f_slot;
2460 struct lto_section_slot s_slot;
2461 const char *section_name = lto_get_section_name (section_type, name, file_data);
2462 char *data = NULL;
2464 *len = 0;
2465 s_slot.name = section_name;
2466 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
2467 if (f_slot)
2469 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
2470 *len = f_slot->len;
2473 free (CONST_CAST (char *, section_name));
2474 return data;
2478 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
2479 starts at OFFSET and has LEN bytes. */
2481 static void
2482 free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
2483 enum lto_section_type section_type ATTRIBUTE_UNUSED,
2484 const char *name ATTRIBUTE_UNUSED,
2485 const char *offset, size_t len ATTRIBUTE_UNUSED)
2487 #if LTO_MMAP_IO
2488 intptr_t computed_len;
2489 intptr_t computed_offset;
2490 intptr_t diff;
2491 #endif
2493 #if LTO_MMAP_IO
2494 computed_offset = ((intptr_t) offset) & page_mask;
2495 diff = (intptr_t) offset - computed_offset;
2496 computed_len = len + diff;
2498 munmap ((caddr_t) computed_offset, computed_len);
2499 #else
2500 free (CONST_CAST(char *, offset));
2501 #endif
2504 static lto_file *current_lto_file;
2506 /* Helper for qsort; compare partitions and return one with smaller size.
2507 We sort from greatest to smallest so parallel build doesn't stale on the
2508 longest compilation being executed too late. */
2510 static int
2511 cmp_partitions_size (const void *a, const void *b)
2513 const struct ltrans_partition_def *pa
2514 = *(struct ltrans_partition_def *const *)a;
2515 const struct ltrans_partition_def *pb
2516 = *(struct ltrans_partition_def *const *)b;
2517 return pb->insns - pa->insns;
2520 /* Helper for qsort; compare partitions and return one with smaller order. */
2522 static int
2523 cmp_partitions_order (const void *a, const void *b)
2525 const struct ltrans_partition_def *pa
2526 = *(struct ltrans_partition_def *const *)a;
2527 const struct ltrans_partition_def *pb
2528 = *(struct ltrans_partition_def *const *)b;
2529 int ordera = -1, orderb = -1;
2531 if (lto_symtab_encoder_size (pa->encoder))
2532 ordera = lto_symtab_encoder_deref (pa->encoder, 0)->symbol.order;
2533 if (lto_symtab_encoder_size (pb->encoder))
2534 orderb = lto_symtab_encoder_deref (pb->encoder, 0)->symbol.order;
2535 return orderb - ordera;
2538 /* Write all output files in WPA mode and the file with the list of
2539 LTRANS units. */
2541 static void
2542 lto_wpa_write_files (void)
2544 unsigned i, n_sets;
2545 lto_file *file;
2546 ltrans_partition part;
2547 FILE *ltrans_output_list_stream;
2548 char *temp_filename;
2549 size_t blen;
2551 /* Open the LTRANS output list. */
2552 if (!ltrans_output_list)
2553 fatal_error ("no LTRANS output list filename provided");
2554 ltrans_output_list_stream = fopen (ltrans_output_list, "w");
2555 if (ltrans_output_list_stream == NULL)
2556 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list);
2558 timevar_push (TV_WHOPR_WPA);
2560 FOR_EACH_VEC_ELT (ltrans_partition, ltrans_partitions, i, part)
2561 lto_stats.num_output_symtab_nodes += lto_symtab_encoder_size (part->encoder);
2563 /* Find out statics that need to be promoted
2564 to globals with hidden visibility because they are accessed from multiple
2565 partitions. */
2566 lto_promote_cross_file_statics ();
2568 timevar_pop (TV_WHOPR_WPA);
2570 timevar_push (TV_WHOPR_WPA_IO);
2572 /* Generate a prefix for the LTRANS unit files. */
2573 blen = strlen (ltrans_output_list);
2574 temp_filename = (char *) xmalloc (blen + sizeof ("2147483648.o"));
2575 strcpy (temp_filename, ltrans_output_list);
2576 if (blen > sizeof (".out")
2577 && strcmp (temp_filename + blen - sizeof (".out") + 1,
2578 ".out") == 0)
2579 temp_filename[blen - sizeof (".out") + 1] = '\0';
2580 blen = strlen (temp_filename);
2582 n_sets = VEC_length (ltrans_partition, ltrans_partitions);
2584 /* Sort partitions by size so small ones are compiled last.
2585 FIXME: Even when not reordering we may want to output one list for parallel make
2586 and other for final link command. */
2587 VEC_qsort (ltrans_partition, ltrans_partitions,
2588 flag_toplevel_reorder ? cmp_partitions_size : cmp_partitions_order);
2589 for (i = 0; i < n_sets; i++)
2591 size_t len;
2592 ltrans_partition part = VEC_index (ltrans_partition, ltrans_partitions, i);
2594 /* Write all the nodes in SET. */
2595 sprintf (temp_filename + blen, "%u.o", i);
2596 file = lto_obj_file_open (temp_filename, true);
2597 if (!file)
2598 fatal_error ("lto_obj_file_open() failed");
2600 if (!quiet_flag)
2601 fprintf (stderr, " %s (%s %i insns)", temp_filename, part->name, part->insns);
2602 if (cgraph_dump_file)
2604 lto_symtab_encoder_iterator lsei;
2606 fprintf (cgraph_dump_file, "Writing partition %s to file %s, %i insns\n",
2607 part->name, temp_filename, part->insns);
2608 fprintf (cgraph_dump_file, " Symbols in partition: ");
2609 for (lsei = lsei_start_in_partition (part->encoder); !lsei_end_p (lsei);
2610 lsei_next_in_partition (&lsei))
2612 symtab_node node = lsei_node (lsei);
2613 fprintf (cgraph_dump_file, "%s ", symtab_node_asm_name (node));
2615 fprintf (cgraph_dump_file, "\n Symbols in boundary: ");
2616 for (lsei = lsei_start (part->encoder); !lsei_end_p (lsei);
2617 lsei_next (&lsei))
2619 symtab_node node = lsei_node (lsei);
2620 if (!lto_symtab_encoder_in_partition_p (part->encoder, node))
2622 fprintf (cgraph_dump_file, "%s ", symtab_node_asm_name (node));
2623 if (symtab_function_p (node)
2624 && lto_symtab_encoder_encode_body_p (part->encoder, cgraph (node)))
2625 fprintf (cgraph_dump_file, "(body included)");
2626 else if (symtab_variable_p (node)
2627 && lto_symtab_encoder_encode_initializer_p (part->encoder, varpool (node)))
2628 fprintf (cgraph_dump_file, "(initializer included)");
2631 fprintf (cgraph_dump_file, "\n");
2633 gcc_checking_assert (lto_symtab_encoder_size (part->encoder) || !i);
2635 lto_set_current_out_file (file);
2637 ipa_write_optimization_summaries (part->encoder);
2639 lto_set_current_out_file (NULL);
2640 lto_obj_file_close (file);
2641 part->encoder = NULL;
2643 len = strlen (temp_filename);
2644 if (fwrite (temp_filename, 1, len, ltrans_output_list_stream) < len
2645 || fwrite ("\n", 1, 1, ltrans_output_list_stream) < 1)
2646 fatal_error ("writing to LTRANS output list %s: %m",
2647 ltrans_output_list);
2650 lto_stats.num_output_files += n_sets;
2652 /* Close the LTRANS output list. */
2653 if (fclose (ltrans_output_list_stream))
2654 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list);
2656 free_ltrans_partitions();
2658 timevar_pop (TV_WHOPR_WPA_IO);
2662 /* If TT is a variable or function decl replace it with its
2663 prevailing variant. */
2664 #define LTO_SET_PREVAIL(tt) \
2665 do {\
2666 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt)) \
2667 tt = lto_symtab_prevailing_decl (tt); \
2668 } while (0)
2670 /* Ensure that TT isn't a replacable var of function decl. */
2671 #define LTO_NO_PREVAIL(tt) \
2672 gcc_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
2674 /* Given a tree T replace all fields referring to variables or functions
2675 with their prevailing variant. */
2676 static void
2677 lto_fixup_prevailing_decls (tree t)
2679 enum tree_code code = TREE_CODE (t);
2680 LTO_NO_PREVAIL (TREE_TYPE (t));
2681 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
2682 LTO_NO_PREVAIL (TREE_CHAIN (t));
2683 if (DECL_P (t))
2685 LTO_NO_PREVAIL (DECL_NAME (t));
2686 LTO_SET_PREVAIL (DECL_CONTEXT (t));
2687 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
2689 LTO_SET_PREVAIL (DECL_SIZE (t));
2690 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t));
2691 LTO_SET_PREVAIL (DECL_INITIAL (t));
2692 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t));
2693 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t));
2695 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
2697 LTO_NO_PREVAIL (t->decl_with_vis.assembler_name);
2698 LTO_NO_PREVAIL (DECL_SECTION_NAME (t));
2700 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
2702 LTO_NO_PREVAIL (DECL_ARGUMENT_FLD (t));
2703 LTO_NO_PREVAIL (DECL_RESULT_FLD (t));
2704 LTO_NO_PREVAIL (DECL_VINDEX (t));
2706 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
2707 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t));
2708 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
2710 LTO_NO_PREVAIL (DECL_FIELD_OFFSET (t));
2711 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t));
2712 LTO_NO_PREVAIL (DECL_QUALIFIER (t));
2713 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t));
2714 LTO_NO_PREVAIL (DECL_FCONTEXT (t));
2717 else if (TYPE_P (t))
2719 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t));
2720 LTO_SET_PREVAIL (TYPE_SIZE (t));
2721 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t));
2722 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t));
2723 LTO_NO_PREVAIL (TYPE_NAME (t));
2725 LTO_SET_PREVAIL (TYPE_MINVAL (t));
2726 LTO_SET_PREVAIL (TYPE_MAXVAL (t));
2727 LTO_SET_PREVAIL (t->type_non_common.binfo);
2729 LTO_SET_PREVAIL (TYPE_CONTEXT (t));
2731 LTO_NO_PREVAIL (TYPE_CANONICAL (t));
2732 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t));
2733 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t));
2735 else if (EXPR_P (t))
2737 int i;
2738 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
2739 LTO_SET_PREVAIL (TREE_OPERAND (t, i));
2741 else
2743 switch (code)
2745 case TREE_LIST:
2746 LTO_SET_PREVAIL (TREE_VALUE (t));
2747 LTO_SET_PREVAIL (TREE_PURPOSE (t));
2748 break;
2749 default:
2750 gcc_unreachable ();
2754 #undef LTO_SET_PREVAIL
2755 #undef LTO_NO_PREVAIL
2757 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
2758 replaces var and function decls with the corresponding prevailing def. */
2760 static void
2761 lto_fixup_state (struct lto_in_decl_state *state)
2763 unsigned i, si;
2764 struct lto_tree_ref_table *table;
2766 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
2767 we still need to walk from all DECLs to find the reachable
2768 FUNCTION_DECLs and VAR_DECLs. */
2769 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
2771 table = &state->streams[si];
2772 for (i = 0; i < table->size; i++)
2774 tree *tp = table->trees + i;
2775 if (VAR_OR_FUNCTION_DECL_P (*tp))
2776 *tp = lto_symtab_prevailing_decl (*tp);
2781 /* A callback of htab_traverse. Just extracts a state from SLOT
2782 and calls lto_fixup_state. */
2784 static int
2785 lto_fixup_state_aux (void **slot, void *aux ATTRIBUTE_UNUSED)
2787 struct lto_in_decl_state *state = (struct lto_in_decl_state *) *slot;
2788 lto_fixup_state (state);
2789 return 1;
2792 /* Fix the decls from all FILES. Replaces each decl with the corresponding
2793 prevailing one. */
2795 static void
2796 lto_fixup_decls (struct lto_file_decl_data **files)
2798 unsigned int i;
2799 htab_iterator hi;
2800 tree t;
2802 FOR_EACH_HTAB_ELEMENT (tree_with_vars, t, tree, hi)
2803 lto_fixup_prevailing_decls (t);
2805 for (i = 0; files[i]; i++)
2807 struct lto_file_decl_data *file = files[i];
2808 struct lto_in_decl_state *state = file->global_decl_state;
2809 lto_fixup_state (state);
2811 htab_traverse (file->function_decl_states, lto_fixup_state_aux, NULL);
2815 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
2817 /* Turn file datas for sub files into a single array, so that they look
2818 like separate files for further passes. */
2820 static void
2821 lto_flatten_files (struct lto_file_decl_data **orig, int count, int last_file_ix)
2823 struct lto_file_decl_data *n, *next;
2824 int i, k;
2826 lto_stats.num_input_files = count;
2827 all_file_decl_data
2828 = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (count + 1);
2829 /* Set the hooks so that all of the ipa passes can read in their data. */
2830 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2831 for (i = 0, k = 0; i < last_file_ix; i++)
2833 for (n = orig[i]; n != NULL; n = next)
2835 all_file_decl_data[k++] = n;
2836 next = n->next;
2837 n->next = NULL;
2840 all_file_decl_data[k] = NULL;
2841 gcc_assert (k == count);
2844 /* Input file data before flattening (i.e. splitting them to subfiles to support
2845 incremental linking. */
2846 static int real_file_count;
2847 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
2849 /* Read all the symbols from the input files FNAMES. NFILES is the
2850 number of files requested in the command line. Instantiate a
2851 global call graph by aggregating all the sub-graphs found in each
2852 file. */
2854 static void
2855 read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
2857 unsigned int i, last_file_ix;
2858 FILE *resolution;
2859 struct cgraph_node *node;
2860 int count = 0;
2861 struct lto_file_decl_data **decl_data;
2863 init_cgraph ();
2865 timevar_push (TV_IPA_LTO_DECL_IN);
2867 real_file_decl_data
2868 = decl_data = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (nfiles + 1);
2869 real_file_count = nfiles;
2871 /* Read the resolution file. */
2872 resolution = NULL;
2873 if (resolution_file_name)
2875 int t;
2876 unsigned num_objects;
2878 resolution = fopen (resolution_file_name, "r");
2879 if (resolution == NULL)
2880 fatal_error ("could not open symbol resolution file: %m");
2882 t = fscanf (resolution, "%u", &num_objects);
2883 gcc_assert (t == 1);
2885 /* True, since the plugin splits the archives. */
2886 gcc_assert (num_objects == nfiles);
2889 tree_with_vars = htab_create_ggc (101, htab_hash_pointer, htab_eq_pointer,
2890 NULL);
2891 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
2892 tree_int_map_eq, NULL);
2893 type_pair_cache = XCNEWVEC (struct type_pair_d, GIMPLE_TYPE_PAIR_SIZE);
2894 gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
2895 (GIMPLE_TYPE_LEADER_SIZE);
2896 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
2898 if (!quiet_flag)
2899 fprintf (stderr, "Reading object files:");
2901 /* Read all of the object files specified on the command line. */
2902 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
2904 struct lto_file_decl_data *file_data = NULL;
2905 if (!quiet_flag)
2907 fprintf (stderr, " %s", fnames[i]);
2908 fflush (stderr);
2911 current_lto_file = lto_obj_file_open (fnames[i], false);
2912 if (!current_lto_file)
2913 break;
2915 file_data = lto_file_read (current_lto_file, resolution, &count);
2916 if (!file_data)
2918 lto_obj_file_close (current_lto_file);
2919 current_lto_file = NULL;
2920 break;
2923 decl_data[last_file_ix++] = file_data;
2925 lto_obj_file_close (current_lto_file);
2926 current_lto_file = NULL;
2927 ggc_collect ();
2930 lto_flatten_files (decl_data, count, last_file_ix);
2931 lto_stats.num_input_files = count;
2932 ggc_free(decl_data);
2933 real_file_decl_data = NULL;
2935 if (resolution_file_name)
2936 fclose (resolution);
2938 /* Set the hooks so that all of the ipa passes can read in their data. */
2939 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2941 timevar_pop (TV_IPA_LTO_DECL_IN);
2943 if (!quiet_flag)
2944 fprintf (stderr, "\nReading the callgraph\n");
2946 timevar_push (TV_IPA_LTO_CGRAPH_IO);
2947 /* Read the symtab. */
2948 input_symtab ();
2949 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
2951 if (!quiet_flag)
2952 fprintf (stderr, "Merging declarations\n");
2954 timevar_push (TV_IPA_LTO_DECL_MERGE);
2955 /* Merge global decls. */
2956 lto_symtab_merge_decls ();
2958 /* If there were errors during symbol merging bail out, we have no
2959 good way to recover here. */
2960 if (seen_error ())
2961 fatal_error ("errors during merging of translation units");
2963 /* Fixup all decls and types and free the type hash tables. */
2964 lto_fixup_decls (all_file_decl_data);
2965 htab_delete (tree_with_vars);
2966 tree_with_vars = NULL;
2967 htab_delete (gimple_types);
2968 gimple_types = NULL;
2969 htab_delete (type_hash_cache);
2970 type_hash_cache = NULL;
2971 free (type_pair_cache);
2972 type_pair_cache = NULL;
2973 gimple_type_leader = NULL;
2974 free_gimple_type_tables ();
2975 ggc_collect ();
2977 timevar_pop (TV_IPA_LTO_DECL_MERGE);
2978 /* Each pass will set the appropriate timer. */
2980 if (!quiet_flag)
2981 fprintf (stderr, "Reading summaries\n");
2983 /* Read the IPA summary data. */
2984 if (flag_ltrans)
2985 ipa_read_optimization_summaries ();
2986 else
2987 ipa_read_summaries ();
2989 /* Finally merge the cgraph according to the decl merging decisions. */
2990 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
2991 if (cgraph_dump_file)
2993 fprintf (cgraph_dump_file, "Before merging:\n");
2994 dump_cgraph (cgraph_dump_file);
2995 dump_varpool (cgraph_dump_file);
2997 lto_symtab_merge_cgraph_nodes ();
2998 ggc_collect ();
3000 /* FIXME: ipa_transforms_to_apply holds list of passes that have optimization
3001 summaries computed and needs to apply changes. At the moment WHOPR only
3002 supports inlining, so we can push it here by hand. In future we need to stream
3003 this field into ltrans compilation. */
3004 if (flag_ltrans)
3005 FOR_EACH_DEFINED_FUNCTION (node)
3006 VEC_safe_push (ipa_opt_pass, heap,
3007 node->ipa_transforms_to_apply,
3008 (ipa_opt_pass)&pass_ipa_inline);
3010 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
3012 timevar_push (TV_IPA_LTO_DECL_INIT_IO);
3014 /* Indicate that the cgraph is built and ready. */
3015 cgraph_function_flags_ready = true;
3017 timevar_pop (TV_IPA_LTO_DECL_INIT_IO);
3018 ggc_free (all_file_decl_data);
3019 all_file_decl_data = NULL;
3023 /* Materialize all the bodies for all the nodes in the callgraph. */
3025 static void
3026 materialize_cgraph (void)
3028 tree decl;
3029 struct cgraph_node *node;
3030 unsigned i;
3031 timevar_id_t lto_timer;
3033 if (!quiet_flag)
3034 fprintf (stderr,
3035 flag_wpa ? "Materializing decls:" : "Reading function bodies:");
3037 /* Now that we have input the cgraph, we need to clear all of the aux
3038 nodes and read the functions if we are not running in WPA mode. */
3039 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3041 FOR_EACH_FUNCTION (node)
3043 if (node->symbol.lto_file_data)
3045 lto_materialize_function (node);
3046 lto_stats.num_input_cgraph_nodes++;
3050 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3052 /* Start the appropriate timer depending on the mode that we are
3053 operating in. */
3054 lto_timer = (flag_wpa) ? TV_WHOPR_WPA
3055 : (flag_ltrans) ? TV_WHOPR_LTRANS
3056 : TV_LTO;
3057 timevar_push (lto_timer);
3059 current_function_decl = NULL;
3060 set_cfun (NULL);
3062 /* Inform the middle end about the global variables we have seen. */
3063 FOR_EACH_VEC_ELT (tree, lto_global_var_decls, i, decl)
3064 rest_of_decl_compilation (decl, 1, 0);
3066 if (!quiet_flag)
3067 fprintf (stderr, "\n");
3069 timevar_pop (lto_timer);
3073 /* Show various memory usage statistics related to LTO. */
3074 static void
3075 print_lto_report_1 (void)
3077 const char *pfx = (flag_lto) ? "LTO" : (flag_wpa) ? "WPA" : "LTRANS";
3078 fprintf (stderr, "%s statistics\n", pfx);
3080 if (gimple_types)
3081 fprintf (stderr, "[%s] GIMPLE type table: size %ld, %ld elements, "
3082 "%ld searches, %ld collisions (ratio: %f)\n", pfx,
3083 (long) htab_size (gimple_types),
3084 (long) htab_elements (gimple_types),
3085 (long) gimple_types->searches,
3086 (long) gimple_types->collisions,
3087 htab_collisions (gimple_types));
3088 else
3089 fprintf (stderr, "[%s] GIMPLE type table is empty\n", pfx);
3090 if (type_hash_cache)
3091 fprintf (stderr, "[%s] GIMPLE type hash table: size %ld, %ld elements, "
3092 "%ld searches, %ld collisions (ratio: %f)\n", pfx,
3093 (long) htab_size (type_hash_cache),
3094 (long) htab_elements (type_hash_cache),
3095 (long) type_hash_cache->searches,
3096 (long) type_hash_cache->collisions,
3097 htab_collisions (type_hash_cache));
3098 else
3099 fprintf (stderr, "[%s] GIMPLE type hash table is empty\n", pfx);
3101 print_gimple_types_stats (pfx);
3102 print_lto_report (pfx);
3105 /* Perform whole program analysis (WPA) on the callgraph and write out the
3106 optimization plan. */
3108 static void
3109 do_whole_program_analysis (void)
3111 symtab_node node;
3113 timevar_start (TV_PHASE_OPT_GEN);
3115 /* Note that since we are in WPA mode, materialize_cgraph will not
3116 actually read in all the function bodies. It only materializes
3117 the decls and cgraph nodes so that analysis can be performed. */
3118 materialize_cgraph ();
3120 /* Reading in the cgraph uses different timers, start timing WPA now. */
3121 timevar_push (TV_WHOPR_WPA);
3123 if (pre_ipa_mem_report)
3125 fprintf (stderr, "Memory consumption before IPA\n");
3126 dump_memory_report (false);
3129 cgraph_function_flags_ready = true;
3131 if (cgraph_dump_file)
3133 dump_cgraph (cgraph_dump_file);
3134 dump_varpool (cgraph_dump_file);
3136 bitmap_obstack_initialize (NULL);
3137 cgraph_state = CGRAPH_STATE_IPA_SSA;
3139 execute_ipa_pass_list (all_regular_ipa_passes);
3141 if (cgraph_dump_file)
3143 fprintf (cgraph_dump_file, "Optimized ");
3144 dump_cgraph (cgraph_dump_file);
3145 dump_varpool (cgraph_dump_file);
3147 #ifdef ENABLE_CHECKING
3148 verify_cgraph ();
3149 #endif
3150 bitmap_obstack_release (NULL);
3152 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
3153 timevar_pop (TV_WHOPR_WPA);
3155 timevar_push (TV_WHOPR_PARTITIONING);
3156 if (flag_lto_partition_1to1)
3157 lto_1_to_1_map ();
3158 else if (flag_lto_partition_max)
3159 lto_max_map ();
3160 else
3161 lto_balanced_map ();
3163 /* AUX pointers are used by partitioning code to bookkeep number of
3164 partitions symbol is in. This is no longer needed. */
3165 FOR_EACH_SYMBOL (node)
3166 node->symbol.aux = NULL;
3168 lto_stats.num_cgraph_partitions += VEC_length (ltrans_partition,
3169 ltrans_partitions);
3170 timevar_pop (TV_WHOPR_PARTITIONING);
3172 timevar_stop (TV_PHASE_OPT_GEN);
3173 timevar_start (TV_PHASE_STREAM_OUT);
3175 if (!quiet_flag)
3177 fprintf (stderr, "\nStreaming out");
3178 fflush (stderr);
3180 lto_wpa_write_files ();
3181 if (!quiet_flag)
3182 fprintf (stderr, "\n");
3184 timevar_stop (TV_PHASE_STREAM_OUT);
3186 ggc_collect ();
3187 if (post_ipa_mem_report)
3189 fprintf (stderr, "Memory consumption after IPA\n");
3190 dump_memory_report (false);
3193 /* Show the LTO report before launching LTRANS. */
3194 if (flag_lto_report)
3195 print_lto_report_1 ();
3196 if (mem_report_wpa)
3197 dump_memory_report (true);
3201 static GTY(()) tree lto_eh_personality_decl;
3203 /* Return the LTO personality function decl. */
3205 tree
3206 lto_eh_personality (void)
3208 if (!lto_eh_personality_decl)
3210 /* Use the first personality DECL for our personality if we don't
3211 support multiple ones. This ensures that we don't artificially
3212 create the need for them in a single-language program. */
3213 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
3214 lto_eh_personality_decl = first_personality_decl;
3215 else
3216 lto_eh_personality_decl = lhd_gcc_personality ();
3219 return lto_eh_personality_decl;
3222 /* Set the process name based on the LTO mode. */
3224 static void
3225 lto_process_name (void)
3227 if (flag_lto)
3228 setproctitle ("lto1-lto");
3229 if (flag_wpa)
3230 setproctitle ("lto1-wpa");
3231 if (flag_ltrans)
3232 setproctitle ("lto1-ltrans");
3236 /* Initialize the LTO front end. */
3238 static void
3239 lto_init (void)
3241 lto_process_name ();
3242 lto_streamer_hooks_init ();
3243 lto_reader_init ();
3244 lto_set_in_hooks (NULL, get_section_data, free_section_data);
3245 memset (&lto_stats, 0, sizeof (lto_stats));
3246 bitmap_obstack_initialize (NULL);
3247 gimple_register_cfg_hooks ();
3251 /* Main entry point for the GIMPLE front end. This front end has
3252 three main personalities:
3254 - LTO (-flto). All the object files on the command line are
3255 loaded in memory and processed as a single translation unit.
3256 This is the traditional link-time optimization behavior.
3258 - WPA (-fwpa). Only the callgraph and summary information for
3259 files in the command file are loaded. A single callgraph
3260 (without function bodies) is instantiated for the whole set of
3261 files. IPA passes are only allowed to analyze the call graph
3262 and make transformation decisions. The callgraph is
3263 partitioned, each partition is written to a new object file
3264 together with the transformation decisions.
3266 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
3267 summary files from running again. Since WPA computed summary
3268 information and decided what transformations to apply, LTRANS
3269 simply applies them. */
3271 void
3272 lto_main (void)
3274 /* LTO is called as a front end, even though it is not a front end.
3275 Because it is called as a front end, TV_PHASE_PARSING and
3276 TV_PARSE_GLOBAL are active, and we need to turn them off while
3277 doing LTO. Later we turn them back on so they are active up in
3278 toplev.c. */
3279 timevar_pop (TV_PARSE_GLOBAL);
3280 timevar_stop (TV_PHASE_PARSING);
3282 timevar_start (TV_PHASE_SETUP);
3284 /* Initialize the LTO front end. */
3285 lto_init ();
3287 timevar_stop (TV_PHASE_SETUP);
3288 timevar_start (TV_PHASE_STREAM_IN);
3290 /* Read all the symbols and call graph from all the files in the
3291 command line. */
3292 read_cgraph_and_symbols (num_in_fnames, in_fnames);
3294 timevar_stop (TV_PHASE_STREAM_IN);
3296 if (!seen_error ())
3298 /* If WPA is enabled analyze the whole call graph and create an
3299 optimization plan. Otherwise, read in all the function
3300 bodies and continue with optimization. */
3301 if (flag_wpa)
3302 do_whole_program_analysis ();
3303 else
3305 timevar_start (TV_PHASE_OPT_GEN);
3307 materialize_cgraph ();
3309 /* Let the middle end know that we have read and merged all of
3310 the input files. */
3311 compile ();
3313 timevar_stop (TV_PHASE_OPT_GEN);
3315 /* FIXME lto, if the processes spawned by WPA fail, we miss
3316 the chance to print WPA's report, so WPA will call
3317 print_lto_report before launching LTRANS. If LTRANS was
3318 launched directly by the driver we would not need to do
3319 this. */
3320 if (flag_lto_report)
3321 print_lto_report_1 ();
3325 /* Here we make LTO pretend to be a parser. */
3326 timevar_start (TV_PHASE_PARSING);
3327 timevar_push (TV_PARSE_GLOBAL);
3330 #include "gt-lto-lto.h"