debug/dwarf: support 64-bit DWARF in byte order check
[official-gcc.git] / gcc / lto-streamer-in.c
blobea6f92b174f5d65b1f88d9b5a4cb11661bff0a7f
1 /* Read the GIMPLE representation from a file stream.
3 Copyright (C) 2009-2017 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "cfghooks.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "gimple-streamer.h"
35 #include "toplev.h"
36 #include "gimple-iterator.h"
37 #include "tree-cfg.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "tree-ssa.h"
41 #include "except.h"
42 #include "cgraph.h"
43 #include "cfgloop.h"
44 #include "debug.h"
47 struct freeing_string_slot_hasher : string_slot_hasher
49 static inline void remove (value_type *);
52 inline void
53 freeing_string_slot_hasher::remove (value_type *v)
55 free (v);
58 /* The table to hold the file names. */
59 static hash_table<freeing_string_slot_hasher> *file_name_hash_table;
62 /* Check that tag ACTUAL has one of the given values. NUM_TAGS is the
63 number of valid tag values to check. */
65 void
66 lto_tag_check_set (enum LTO_tags actual, int ntags, ...)
68 va_list ap;
69 int i;
71 va_start (ap, ntags);
72 for (i = 0; i < ntags; i++)
73 if ((unsigned) actual == va_arg (ap, unsigned))
75 va_end (ap);
76 return;
79 va_end (ap);
80 internal_error ("bytecode stream: unexpected tag %s", lto_tag_name (actual));
84 /* Read LENGTH bytes from STREAM to ADDR. */
86 void
87 lto_input_data_block (struct lto_input_block *ib, void *addr, size_t length)
89 size_t i;
90 unsigned char *const buffer = (unsigned char *) addr;
92 for (i = 0; i < length; i++)
93 buffer[i] = streamer_read_uchar (ib);
97 /* Lookup STRING in file_name_hash_table. If found, return the existing
98 string, otherwise insert STRING as the canonical version. */
100 static const char *
101 canon_file_name (const char *string)
103 string_slot **slot;
104 struct string_slot s_slot;
105 size_t len = strlen (string);
107 s_slot.s = string;
108 s_slot.len = len;
110 slot = file_name_hash_table->find_slot (&s_slot, INSERT);
111 if (*slot == NULL)
113 char *saved_string;
114 struct string_slot *new_slot;
116 saved_string = (char *) xmalloc (len + 1);
117 new_slot = XCNEW (struct string_slot);
118 memcpy (saved_string, string, len + 1);
119 new_slot->s = saved_string;
120 new_slot->len = len;
121 *slot = new_slot;
122 return saved_string;
124 else
126 struct string_slot *old_slot = *slot;
127 return old_slot->s;
131 /* Pointer to currently alive instance of lto_location_cache. */
133 lto_location_cache *lto_location_cache::current_cache;
135 /* Sort locations in source order. Start with file from last application. */
138 lto_location_cache::cmp_loc (const void *pa, const void *pb)
140 const cached_location *a = ((const cached_location *)pa);
141 const cached_location *b = ((const cached_location *)pb);
142 const char *current_file = current_cache->current_file;
143 int current_line = current_cache->current_line;
145 if (a->file == current_file && b->file != current_file)
146 return -1;
147 if (a->file != current_file && b->file == current_file)
148 return 1;
149 if (a->file == current_file && b->file == current_file)
151 if (a->line == current_line && b->line != current_line)
152 return -1;
153 if (a->line != current_line && b->line == current_line)
154 return 1;
156 if (a->file != b->file)
157 return strcmp (a->file, b->file);
158 if (a->sysp != b->sysp)
159 return a->sysp ? 1 : -1;
160 if (a->line != b->line)
161 return a->line - b->line;
162 return a->col - b->col;
165 /* Apply all changes in location cache. Add locations into linemap and patch
166 trees. */
168 bool
169 lto_location_cache::apply_location_cache ()
171 static const char *prev_file;
172 if (!loc_cache.length ())
173 return false;
174 if (loc_cache.length () > 1)
175 loc_cache.qsort (cmp_loc);
177 for (unsigned int i = 0; i < loc_cache.length (); i++)
179 struct cached_location loc = loc_cache[i];
181 if (current_file != loc.file)
182 linemap_add (line_table, prev_file ? LC_RENAME : LC_ENTER,
183 loc.sysp, loc.file, loc.line);
184 else if (current_line != loc.line)
186 int max = loc.col;
188 for (unsigned int j = i + 1; j < loc_cache.length (); j++)
189 if (loc.file != loc_cache[j].file
190 || loc.line != loc_cache[j].line)
191 break;
192 else if (max < loc_cache[j].col)
193 max = loc_cache[j].col;
194 linemap_line_start (line_table, loc.line, max + 1);
196 gcc_assert (*loc.loc == BUILTINS_LOCATION + 1);
197 if (current_file == loc.file && current_line == loc.line
198 && current_col == loc.col)
199 *loc.loc = current_loc;
200 else
201 current_loc = *loc.loc = linemap_position_for_column (line_table,
202 loc.col);
203 current_line = loc.line;
204 prev_file = current_file = loc.file;
205 current_col = loc.col;
207 loc_cache.truncate (0);
208 accepted_length = 0;
209 return true;
212 /* Tree merging did not suceed; mark all changes in the cache as accepted. */
214 void
215 lto_location_cache::accept_location_cache ()
217 gcc_assert (current_cache == this);
218 accepted_length = loc_cache.length ();
221 /* Tree merging did suceed; throw away recent changes. */
223 void
224 lto_location_cache::revert_location_cache ()
226 loc_cache.truncate (accepted_length);
229 /* Read a location bitpack from input block IB and either update *LOC directly
230 or add it to the location cache.
231 It is neccesary to call apply_location_cache to get *LOC updated. */
233 void
234 lto_location_cache::input_location (location_t *loc, struct bitpack_d *bp,
235 struct data_in *data_in)
237 static const char *stream_file;
238 static int stream_line;
239 static int stream_col;
240 static bool stream_sysp;
241 bool file_change, line_change, column_change;
243 gcc_assert (current_cache == this);
245 *loc = bp_unpack_int_in_range (bp, "location", 0, RESERVED_LOCATION_COUNT);
247 if (*loc < RESERVED_LOCATION_COUNT)
248 return;
250 /* Keep value RESERVED_LOCATION_COUNT in *loc as linemap lookups will
251 ICE on it. */
253 file_change = bp_unpack_value (bp, 1);
254 line_change = bp_unpack_value (bp, 1);
255 column_change = bp_unpack_value (bp, 1);
257 if (file_change)
259 stream_file = canon_file_name (bp_unpack_string (data_in, bp));
260 stream_sysp = bp_unpack_value (bp, 1);
263 if (line_change)
264 stream_line = bp_unpack_var_len_unsigned (bp);
266 if (column_change)
267 stream_col = bp_unpack_var_len_unsigned (bp);
269 /* This optimization saves location cache operations druing gimple
270 streaming. */
272 if (current_file == stream_file && current_line == stream_line
273 && current_col == stream_col && current_sysp == stream_sysp)
275 *loc = current_loc;
276 return;
279 struct cached_location entry
280 = {stream_file, loc, stream_line, stream_col, stream_sysp};
281 loc_cache.safe_push (entry);
284 /* Read a location bitpack from input block IB and either update *LOC directly
285 or add it to the location cache.
286 It is neccesary to call apply_location_cache to get *LOC updated. */
288 void
289 lto_input_location (location_t *loc, struct bitpack_d *bp,
290 struct data_in *data_in)
292 data_in->location_cache.input_location (loc, bp, data_in);
295 /* Read location and return it instead of going through location caching.
296 This should be used only when the resulting location is not going to be
297 discarded. */
299 location_t
300 stream_input_location_now (struct bitpack_d *bp, struct data_in *data_in)
302 location_t loc;
303 stream_input_location (&loc, bp, data_in);
304 data_in->location_cache.apply_location_cache ();
305 return loc;
308 /* Read a reference to a tree node from DATA_IN using input block IB.
309 TAG is the expected node that should be found in IB, if TAG belongs
310 to one of the indexable trees, expect to read a reference index to
311 be looked up in one of the symbol tables, otherwise read the pysical
312 representation of the tree using stream_read_tree. FN is the
313 function scope for the read tree. */
315 tree
316 lto_input_tree_ref (struct lto_input_block *ib, struct data_in *data_in,
317 struct function *fn, enum LTO_tags tag)
319 unsigned HOST_WIDE_INT ix_u;
320 tree result = NULL_TREE;
322 lto_tag_check_range (tag, LTO_field_decl_ref, LTO_namelist_decl_ref);
324 switch (tag)
326 case LTO_type_ref:
327 ix_u = streamer_read_uhwi (ib);
328 result = lto_file_decl_data_get_type (data_in->file_data, ix_u);
329 break;
331 case LTO_ssa_name_ref:
332 ix_u = streamer_read_uhwi (ib);
333 result = (*SSANAMES (fn))[ix_u];
334 break;
336 case LTO_field_decl_ref:
337 ix_u = streamer_read_uhwi (ib);
338 result = lto_file_decl_data_get_field_decl (data_in->file_data, ix_u);
339 break;
341 case LTO_function_decl_ref:
342 ix_u = streamer_read_uhwi (ib);
343 result = lto_file_decl_data_get_fn_decl (data_in->file_data, ix_u);
344 break;
346 case LTO_type_decl_ref:
347 ix_u = streamer_read_uhwi (ib);
348 result = lto_file_decl_data_get_type_decl (data_in->file_data, ix_u);
349 break;
351 case LTO_namespace_decl_ref:
352 ix_u = streamer_read_uhwi (ib);
353 result = lto_file_decl_data_get_namespace_decl (data_in->file_data, ix_u);
354 break;
356 case LTO_global_decl_ref:
357 case LTO_result_decl_ref:
358 case LTO_const_decl_ref:
359 case LTO_imported_decl_ref:
360 case LTO_label_decl_ref:
361 case LTO_translation_unit_decl_ref:
362 case LTO_namelist_decl_ref:
363 ix_u = streamer_read_uhwi (ib);
364 result = lto_file_decl_data_get_var_decl (data_in->file_data, ix_u);
365 break;
367 default:
368 gcc_unreachable ();
371 gcc_assert (result);
373 return result;
377 /* Read and return a double-linked list of catch handlers from input
378 block IB, using descriptors in DATA_IN. */
380 static struct eh_catch_d *
381 lto_input_eh_catch_list (struct lto_input_block *ib, struct data_in *data_in,
382 eh_catch *last_p)
384 eh_catch first;
385 enum LTO_tags tag;
387 *last_p = first = NULL;
388 tag = streamer_read_record_start (ib);
389 while (tag)
391 tree list;
392 eh_catch n;
394 lto_tag_check_range (tag, LTO_eh_catch, LTO_eh_catch);
396 /* Read the catch node. */
397 n = ggc_cleared_alloc<eh_catch_d> ();
398 n->type_list = stream_read_tree (ib, data_in);
399 n->filter_list = stream_read_tree (ib, data_in);
400 n->label = stream_read_tree (ib, data_in);
402 /* Register all the types in N->FILTER_LIST. */
403 for (list = n->filter_list; list; list = TREE_CHAIN (list))
404 add_type_for_runtime (TREE_VALUE (list));
406 /* Chain N to the end of the list. */
407 if (*last_p)
408 (*last_p)->next_catch = n;
409 n->prev_catch = *last_p;
410 *last_p = n;
412 /* Set the head of the list the first time through the loop. */
413 if (first == NULL)
414 first = n;
416 tag = streamer_read_record_start (ib);
419 return first;
423 /* Read and return EH region IX from input block IB, using descriptors
424 in DATA_IN. */
426 static eh_region
427 input_eh_region (struct lto_input_block *ib, struct data_in *data_in, int ix)
429 enum LTO_tags tag;
430 eh_region r;
432 /* Read the region header. */
433 tag = streamer_read_record_start (ib);
434 if (tag == LTO_null)
435 return NULL;
437 r = ggc_cleared_alloc<eh_region_d> ();
438 r->index = streamer_read_hwi (ib);
440 gcc_assert (r->index == ix);
442 /* Read all the region pointers as region numbers. We'll fix up
443 the pointers once the whole array has been read. */
444 r->outer = (eh_region) (intptr_t) streamer_read_hwi (ib);
445 r->inner = (eh_region) (intptr_t) streamer_read_hwi (ib);
446 r->next_peer = (eh_region) (intptr_t) streamer_read_hwi (ib);
448 switch (tag)
450 case LTO_ert_cleanup:
451 r->type = ERT_CLEANUP;
452 break;
454 case LTO_ert_try:
456 struct eh_catch_d *last_catch;
457 r->type = ERT_TRY;
458 r->u.eh_try.first_catch = lto_input_eh_catch_list (ib, data_in,
459 &last_catch);
460 r->u.eh_try.last_catch = last_catch;
461 break;
464 case LTO_ert_allowed_exceptions:
466 tree l;
468 r->type = ERT_ALLOWED_EXCEPTIONS;
469 r->u.allowed.type_list = stream_read_tree (ib, data_in);
470 r->u.allowed.label = stream_read_tree (ib, data_in);
471 r->u.allowed.filter = streamer_read_uhwi (ib);
473 for (l = r->u.allowed.type_list; l ; l = TREE_CHAIN (l))
474 add_type_for_runtime (TREE_VALUE (l));
476 break;
478 case LTO_ert_must_not_throw:
480 r->type = ERT_MUST_NOT_THROW;
481 r->u.must_not_throw.failure_decl = stream_read_tree (ib, data_in);
482 bitpack_d bp = streamer_read_bitpack (ib);
483 r->u.must_not_throw.failure_loc
484 = stream_input_location_now (&bp, data_in);
486 break;
488 default:
489 gcc_unreachable ();
492 r->landing_pads = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
494 return r;
498 /* Read and return EH landing pad IX from input block IB, using descriptors
499 in DATA_IN. */
501 static eh_landing_pad
502 input_eh_lp (struct lto_input_block *ib, struct data_in *data_in, int ix)
504 enum LTO_tags tag;
505 eh_landing_pad lp;
507 /* Read the landing pad header. */
508 tag = streamer_read_record_start (ib);
509 if (tag == LTO_null)
510 return NULL;
512 lto_tag_check_range (tag, LTO_eh_landing_pad, LTO_eh_landing_pad);
514 lp = ggc_cleared_alloc<eh_landing_pad_d> ();
515 lp->index = streamer_read_hwi (ib);
516 gcc_assert (lp->index == ix);
517 lp->next_lp = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
518 lp->region = (eh_region) (intptr_t) streamer_read_hwi (ib);
519 lp->post_landing_pad = stream_read_tree (ib, data_in);
521 return lp;
525 /* After reading the EH regions, pointers to peer and children regions
526 are region numbers. This converts all these region numbers into
527 real pointers into the rematerialized regions for FN. ROOT_REGION
528 is the region number for the root EH region in FN. */
530 static void
531 fixup_eh_region_pointers (struct function *fn, HOST_WIDE_INT root_region)
533 unsigned i;
534 vec<eh_region, va_gc> *eh_array = fn->eh->region_array;
535 vec<eh_landing_pad, va_gc> *lp_array = fn->eh->lp_array;
536 eh_region r;
537 eh_landing_pad lp;
539 gcc_assert (eh_array && lp_array);
541 gcc_assert (root_region >= 0);
542 fn->eh->region_tree = (*eh_array)[root_region];
544 #define FIXUP_EH_REGION(r) (r) = (*eh_array)[(HOST_WIDE_INT) (intptr_t) (r)]
545 #define FIXUP_EH_LP(p) (p) = (*lp_array)[(HOST_WIDE_INT) (intptr_t) (p)]
547 /* Convert all the index numbers stored in pointer fields into
548 pointers to the corresponding slots in the EH region array. */
549 FOR_EACH_VEC_ELT (*eh_array, i, r)
551 /* The array may contain NULL regions. */
552 if (r == NULL)
553 continue;
555 gcc_assert (i == (unsigned) r->index);
556 FIXUP_EH_REGION (r->outer);
557 FIXUP_EH_REGION (r->inner);
558 FIXUP_EH_REGION (r->next_peer);
559 FIXUP_EH_LP (r->landing_pads);
562 /* Convert all the index numbers stored in pointer fields into
563 pointers to the corresponding slots in the EH landing pad array. */
564 FOR_EACH_VEC_ELT (*lp_array, i, lp)
566 /* The array may contain NULL landing pads. */
567 if (lp == NULL)
568 continue;
570 gcc_assert (i == (unsigned) lp->index);
571 FIXUP_EH_LP (lp->next_lp);
572 FIXUP_EH_REGION (lp->region);
575 #undef FIXUP_EH_REGION
576 #undef FIXUP_EH_LP
580 /* Initialize EH support. */
582 void
583 lto_init_eh (void)
585 static bool eh_initialized_p = false;
587 if (eh_initialized_p)
588 return;
590 /* Contrary to most other FEs, we only initialize EH support when at
591 least one of the files in the set contains exception regions in
592 it. Since this happens much later than the call to init_eh in
593 lang_dependent_init, we have to set flag_exceptions and call
594 init_eh again to initialize the EH tables. */
595 flag_exceptions = 1;
596 init_eh ();
598 eh_initialized_p = true;
602 /* Read the exception table for FN from IB using the data descriptors
603 in DATA_IN. */
605 static void
606 input_eh_regions (struct lto_input_block *ib, struct data_in *data_in,
607 struct function *fn)
609 HOST_WIDE_INT i, root_region, len;
610 enum LTO_tags tag;
612 tag = streamer_read_record_start (ib);
613 if (tag == LTO_null)
614 return;
616 lto_tag_check_range (tag, LTO_eh_table, LTO_eh_table);
618 /* If the file contains EH regions, then it was compiled with
619 -fexceptions. In that case, initialize the backend EH
620 machinery. */
621 lto_init_eh ();
623 gcc_assert (fn->eh);
625 root_region = streamer_read_hwi (ib);
626 gcc_assert (root_region == (int) root_region);
628 /* Read the EH region array. */
629 len = streamer_read_hwi (ib);
630 gcc_assert (len == (int) len);
631 if (len > 0)
633 vec_safe_grow_cleared (fn->eh->region_array, len);
634 for (i = 0; i < len; i++)
636 eh_region r = input_eh_region (ib, data_in, i);
637 (*fn->eh->region_array)[i] = r;
641 /* Read the landing pads. */
642 len = streamer_read_hwi (ib);
643 gcc_assert (len == (int) len);
644 if (len > 0)
646 vec_safe_grow_cleared (fn->eh->lp_array, len);
647 for (i = 0; i < len; i++)
649 eh_landing_pad lp = input_eh_lp (ib, data_in, i);
650 (*fn->eh->lp_array)[i] = lp;
654 /* Read the runtime type data. */
655 len = streamer_read_hwi (ib);
656 gcc_assert (len == (int) len);
657 if (len > 0)
659 vec_safe_grow_cleared (fn->eh->ttype_data, len);
660 for (i = 0; i < len; i++)
662 tree ttype = stream_read_tree (ib, data_in);
663 (*fn->eh->ttype_data)[i] = ttype;
667 /* Read the table of action chains. */
668 len = streamer_read_hwi (ib);
669 gcc_assert (len == (int) len);
670 if (len > 0)
672 if (targetm.arm_eabi_unwinder)
674 vec_safe_grow_cleared (fn->eh->ehspec_data.arm_eabi, len);
675 for (i = 0; i < len; i++)
677 tree t = stream_read_tree (ib, data_in);
678 (*fn->eh->ehspec_data.arm_eabi)[i] = t;
681 else
683 vec_safe_grow_cleared (fn->eh->ehspec_data.other, len);
684 for (i = 0; i < len; i++)
686 uchar c = streamer_read_uchar (ib);
687 (*fn->eh->ehspec_data.other)[i] = c;
692 /* Reconstruct the EH region tree by fixing up the peer/children
693 pointers. */
694 fixup_eh_region_pointers (fn, root_region);
696 tag = streamer_read_record_start (ib);
697 lto_tag_check_range (tag, LTO_null, LTO_null);
701 /* Make a new basic block with index INDEX in function FN. */
703 static basic_block
704 make_new_block (struct function *fn, unsigned int index)
706 basic_block bb = alloc_block ();
707 bb->index = index;
708 SET_BASIC_BLOCK_FOR_FN (fn, index, bb);
709 n_basic_blocks_for_fn (fn)++;
710 return bb;
714 /* Read the CFG for function FN from input block IB. */
716 static void
717 input_cfg (struct lto_input_block *ib, struct data_in *data_in,
718 struct function *fn)
720 unsigned int bb_count;
721 basic_block p_bb;
722 unsigned int i;
723 int index;
725 init_empty_tree_cfg_for_function (fn);
726 init_ssa_operands (fn);
728 profile_status_for_fn (fn) = streamer_read_enum (ib, profile_status_d,
729 PROFILE_LAST);
731 bb_count = streamer_read_uhwi (ib);
733 last_basic_block_for_fn (fn) = bb_count;
734 if (bb_count > basic_block_info_for_fn (fn)->length ())
735 vec_safe_grow_cleared (basic_block_info_for_fn (fn), bb_count);
737 if (bb_count > label_to_block_map_for_fn (fn)->length ())
738 vec_safe_grow_cleared (label_to_block_map_for_fn (fn), bb_count);
740 index = streamer_read_hwi (ib);
741 while (index != -1)
743 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
744 unsigned int edge_count;
746 if (bb == NULL)
747 bb = make_new_block (fn, index);
749 edge_count = streamer_read_uhwi (ib);
751 /* Connect up the CFG. */
752 for (i = 0; i < edge_count; i++)
754 unsigned int dest_index;
755 unsigned int edge_flags;
756 basic_block dest;
757 profile_probability probability;
758 edge e;
760 dest_index = streamer_read_uhwi (ib);
761 probability = profile_probability::stream_in (ib);
762 edge_flags = streamer_read_uhwi (ib);
764 dest = BASIC_BLOCK_FOR_FN (fn, dest_index);
766 if (dest == NULL)
767 dest = make_new_block (fn, dest_index);
769 e = make_edge (bb, dest, edge_flags);
770 e->probability = probability;
773 index = streamer_read_hwi (ib);
776 p_bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
777 index = streamer_read_hwi (ib);
778 while (index != -1)
780 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
781 bb->prev_bb = p_bb;
782 p_bb->next_bb = bb;
783 p_bb = bb;
784 index = streamer_read_hwi (ib);
787 /* ??? The cfgloop interface is tied to cfun. */
788 gcc_assert (cfun == fn);
790 /* Input the loop tree. */
791 unsigned n_loops = streamer_read_uhwi (ib);
792 if (n_loops == 0)
793 return;
795 struct loops *loops = ggc_cleared_alloc<struct loops> ();
796 init_loops_structure (fn, loops, n_loops);
797 set_loops_for_fn (fn, loops);
799 /* Input each loop and associate it with its loop header so
800 flow_loops_find can rebuild the loop tree. */
801 for (unsigned i = 1; i < n_loops; ++i)
803 int header_index = streamer_read_hwi (ib);
804 if (header_index == -1)
806 loops->larray->quick_push (NULL);
807 continue;
810 struct loop *loop = alloc_loop ();
811 loop->header = BASIC_BLOCK_FOR_FN (fn, header_index);
812 loop->header->loop_father = loop;
814 /* Read everything copy_loop_info copies. */
815 loop->estimate_state = streamer_read_enum (ib, loop_estimation, EST_LAST);
816 loop->any_upper_bound = streamer_read_hwi (ib);
817 if (loop->any_upper_bound)
818 loop->nb_iterations_upper_bound = streamer_read_widest_int (ib);
819 loop->any_likely_upper_bound = streamer_read_hwi (ib);
820 if (loop->any_likely_upper_bound)
821 loop->nb_iterations_likely_upper_bound = streamer_read_widest_int (ib);
822 loop->any_estimate = streamer_read_hwi (ib);
823 if (loop->any_estimate)
824 loop->nb_iterations_estimate = streamer_read_widest_int (ib);
826 /* Read OMP SIMD related info. */
827 loop->safelen = streamer_read_hwi (ib);
828 loop->dont_vectorize = streamer_read_hwi (ib);
829 loop->force_vectorize = streamer_read_hwi (ib);
830 loop->simduid = stream_read_tree (ib, data_in);
832 place_new_loop (fn, loop);
834 /* flow_loops_find doesn't like loops not in the tree, hook them
835 all as siblings of the tree root temporarily. */
836 flow_loop_tree_node_add (loops->tree_root, loop);
839 /* Rebuild the loop tree. */
840 flow_loops_find (loops);
844 /* Read the SSA names array for function FN from DATA_IN using input
845 block IB. */
847 static void
848 input_ssa_names (struct lto_input_block *ib, struct data_in *data_in,
849 struct function *fn)
851 unsigned int i, size;
853 size = streamer_read_uhwi (ib);
854 init_ssanames (fn, size);
856 i = streamer_read_uhwi (ib);
857 while (i)
859 tree ssa_name, name;
860 bool is_default_def;
862 /* Skip over the elements that had been freed. */
863 while (SSANAMES (fn)->length () < i)
864 SSANAMES (fn)->quick_push (NULL_TREE);
866 is_default_def = (streamer_read_uchar (ib) != 0);
867 name = stream_read_tree (ib, data_in);
868 ssa_name = make_ssa_name_fn (fn, name, NULL);
870 if (is_default_def)
872 set_ssa_default_def (cfun, SSA_NAME_VAR (ssa_name), ssa_name);
873 SSA_NAME_DEF_STMT (ssa_name) = gimple_build_nop ();
876 i = streamer_read_uhwi (ib);
881 /* Go through all NODE edges and fixup call_stmt pointers
882 so they point to STMTS. */
884 static void
885 fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple **stmts,
886 struct function *fn)
888 #define STMT_UID_NOT_IN_RANGE(uid) \
889 (gimple_stmt_max_uid (fn) < uid || uid == 0)
891 struct cgraph_edge *cedge;
892 struct ipa_ref *ref = NULL;
893 unsigned int i;
895 for (cedge = node->callees; cedge; cedge = cedge->next_callee)
897 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
898 fatal_error (input_location,
899 "Cgraph edge statement index out of range");
900 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
901 if (!cedge->call_stmt)
902 fatal_error (input_location,
903 "Cgraph edge statement index not found");
905 for (cedge = node->indirect_calls; cedge; cedge = cedge->next_callee)
907 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
908 fatal_error (input_location,
909 "Cgraph edge statement index out of range");
910 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
911 if (!cedge->call_stmt)
912 fatal_error (input_location, "Cgraph edge statement index not found");
914 for (i = 0; node->iterate_reference (i, ref); i++)
915 if (ref->lto_stmt_uid)
917 if (STMT_UID_NOT_IN_RANGE (ref->lto_stmt_uid))
918 fatal_error (input_location,
919 "Reference statement index out of range");
920 ref->stmt = stmts[ref->lto_stmt_uid - 1];
921 if (!ref->stmt)
922 fatal_error (input_location, "Reference statement index not found");
927 /* Fixup call_stmt pointers in NODE and all clones. */
929 static void
930 fixup_call_stmt_edges (struct cgraph_node *orig, gimple **stmts)
932 struct cgraph_node *node;
933 struct function *fn;
935 while (orig->clone_of)
936 orig = orig->clone_of;
937 fn = DECL_STRUCT_FUNCTION (orig->decl);
939 if (!orig->thunk.thunk_p)
940 fixup_call_stmt_edges_1 (orig, stmts, fn);
941 if (orig->clones)
942 for (node = orig->clones; node != orig;)
944 if (!node->thunk.thunk_p)
945 fixup_call_stmt_edges_1 (node, stmts, fn);
946 if (node->clones)
947 node = node->clones;
948 else if (node->next_sibling_clone)
949 node = node->next_sibling_clone;
950 else
952 while (node != orig && !node->next_sibling_clone)
953 node = node->clone_of;
954 if (node != orig)
955 node = node->next_sibling_clone;
961 /* Input the base body of struct function FN from DATA_IN
962 using input block IB. */
964 static void
965 input_struct_function_base (struct function *fn, struct data_in *data_in,
966 struct lto_input_block *ib)
968 struct bitpack_d bp;
969 int len;
971 /* Read the static chain and non-local goto save area. */
972 fn->static_chain_decl = stream_read_tree (ib, data_in);
973 fn->nonlocal_goto_save_area = stream_read_tree (ib, data_in);
975 /* Read all the local symbols. */
976 len = streamer_read_hwi (ib);
977 if (len > 0)
979 int i;
980 vec_safe_grow_cleared (fn->local_decls, len);
981 for (i = 0; i < len; i++)
983 tree t = stream_read_tree (ib, data_in);
984 (*fn->local_decls)[i] = t;
988 /* Input the current IL state of the function. */
989 fn->curr_properties = streamer_read_uhwi (ib);
991 /* Read all the attributes for FN. */
992 bp = streamer_read_bitpack (ib);
993 fn->is_thunk = bp_unpack_value (&bp, 1);
994 fn->has_local_explicit_reg_vars = bp_unpack_value (&bp, 1);
995 fn->returns_pcc_struct = bp_unpack_value (&bp, 1);
996 fn->returns_struct = bp_unpack_value (&bp, 1);
997 fn->can_throw_non_call_exceptions = bp_unpack_value (&bp, 1);
998 fn->can_delete_dead_exceptions = bp_unpack_value (&bp, 1);
999 fn->always_inline_functions_inlined = bp_unpack_value (&bp, 1);
1000 fn->after_inlining = bp_unpack_value (&bp, 1);
1001 fn->stdarg = bp_unpack_value (&bp, 1);
1002 fn->has_nonlocal_label = bp_unpack_value (&bp, 1);
1003 fn->has_forced_label_in_static = bp_unpack_value (&bp, 1);
1004 fn->calls_alloca = bp_unpack_value (&bp, 1);
1005 fn->calls_setjmp = bp_unpack_value (&bp, 1);
1006 fn->has_force_vectorize_loops = bp_unpack_value (&bp, 1);
1007 fn->has_simduid_loops = bp_unpack_value (&bp, 1);
1008 fn->va_list_fpr_size = bp_unpack_value (&bp, 8);
1009 fn->va_list_gpr_size = bp_unpack_value (&bp, 8);
1010 fn->last_clique = bp_unpack_value (&bp, sizeof (short) * 8);
1012 /* Input the function start and end loci. */
1013 fn->function_start_locus = stream_input_location_now (&bp, data_in);
1014 fn->function_end_locus = stream_input_location_now (&bp, data_in);
1018 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1020 static void
1021 input_function (tree fn_decl, struct data_in *data_in,
1022 struct lto_input_block *ib, struct lto_input_block *ib_cfg)
1024 struct function *fn;
1025 enum LTO_tags tag;
1026 gimple **stmts;
1027 basic_block bb;
1028 struct cgraph_node *node;
1030 tag = streamer_read_record_start (ib);
1031 lto_tag_check (tag, LTO_function);
1033 /* Read decls for parameters and args. */
1034 DECL_RESULT (fn_decl) = stream_read_tree (ib, data_in);
1035 DECL_ARGUMENTS (fn_decl) = streamer_read_chain (ib, data_in);
1037 /* Read debug args if available. */
1038 unsigned n_debugargs = streamer_read_uhwi (ib);
1039 if (n_debugargs)
1041 vec<tree, va_gc> **debugargs = decl_debug_args_insert (fn_decl);
1042 vec_safe_grow (*debugargs, n_debugargs);
1043 for (unsigned i = 0; i < n_debugargs; ++i)
1044 (**debugargs)[i] = stream_read_tree (ib, data_in);
1047 /* Read the tree of lexical scopes for the function. */
1048 DECL_INITIAL (fn_decl) = stream_read_tree (ib, data_in);
1049 unsigned block_leaf_count = streamer_read_uhwi (ib);
1050 while (block_leaf_count--)
1051 stream_read_tree (ib, data_in);
1053 if (!streamer_read_uhwi (ib))
1054 return;
1056 push_struct_function (fn_decl);
1057 fn = DECL_STRUCT_FUNCTION (fn_decl);
1058 init_tree_ssa (fn);
1059 /* We input IL in SSA form. */
1060 cfun->gimple_df->in_ssa_p = true;
1062 gimple_register_cfg_hooks ();
1064 node = cgraph_node::get (fn_decl);
1065 if (!node)
1066 node = cgraph_node::create (fn_decl);
1067 input_struct_function_base (fn, data_in, ib);
1068 input_cfg (ib_cfg, data_in, fn);
1070 /* Read all the SSA names. */
1071 input_ssa_names (ib, data_in, fn);
1073 /* Read the exception handling regions in the function. */
1074 input_eh_regions (ib, data_in, fn);
1076 gcc_assert (DECL_INITIAL (fn_decl));
1077 DECL_SAVED_TREE (fn_decl) = NULL_TREE;
1079 /* Read all the basic blocks. */
1080 tag = streamer_read_record_start (ib);
1081 while (tag)
1083 input_bb (ib, tag, data_in, fn,
1084 node->count_materialization_scale);
1085 tag = streamer_read_record_start (ib);
1088 /* Fix up the call statements that are mentioned in the callgraph
1089 edges. */
1090 set_gimple_stmt_max_uid (cfun, 0);
1091 FOR_ALL_BB_FN (bb, cfun)
1093 gimple_stmt_iterator gsi;
1094 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1096 gimple *stmt = gsi_stmt (gsi);
1097 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1099 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1101 gimple *stmt = gsi_stmt (gsi);
1102 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1105 stmts = (gimple **) xcalloc (gimple_stmt_max_uid (fn), sizeof (gimple *));
1106 FOR_ALL_BB_FN (bb, cfun)
1108 gimple_stmt_iterator bsi = gsi_start_phis (bb);
1109 while (!gsi_end_p (bsi))
1111 gimple *stmt = gsi_stmt (bsi);
1112 gsi_next (&bsi);
1113 stmts[gimple_uid (stmt)] = stmt;
1115 bsi = gsi_start_bb (bb);
1116 while (!gsi_end_p (bsi))
1118 gimple *stmt = gsi_stmt (bsi);
1119 bool remove = false;
1120 /* If we're recompiling LTO objects with debug stmts but
1121 we're not supposed to have debug stmts, remove them now.
1122 We can't remove them earlier because this would cause uid
1123 mismatches in fixups, but we can do it at this point, as
1124 long as debug stmts don't require fixups.
1125 Similarly remove all IFN_*SAN_* internal calls */
1126 if (!flag_wpa)
1128 if (!MAY_HAVE_DEBUG_STMTS && is_gimple_debug (stmt))
1129 remove = true;
1130 if (is_gimple_call (stmt)
1131 && gimple_call_internal_p (stmt))
1133 switch (gimple_call_internal_fn (stmt))
1135 case IFN_UBSAN_NULL:
1136 if ((flag_sanitize
1137 & (SANITIZE_NULL | SANITIZE_ALIGNMENT)) == 0)
1138 remove = true;
1139 break;
1140 case IFN_UBSAN_BOUNDS:
1141 if ((flag_sanitize & SANITIZE_BOUNDS) == 0)
1142 remove = true;
1143 break;
1144 case IFN_UBSAN_VPTR:
1145 if ((flag_sanitize & SANITIZE_VPTR) == 0)
1146 remove = true;
1147 break;
1148 case IFN_UBSAN_OBJECT_SIZE:
1149 if ((flag_sanitize & SANITIZE_OBJECT_SIZE) == 0)
1150 remove = true;
1151 break;
1152 case IFN_UBSAN_PTR:
1153 if ((flag_sanitize & SANITIZE_POINTER_OVERFLOW) == 0)
1154 remove = true;
1155 break;
1156 case IFN_ASAN_MARK:
1157 if ((flag_sanitize & SANITIZE_ADDRESS) == 0)
1158 remove = true;
1159 break;
1160 case IFN_TSAN_FUNC_EXIT:
1161 if ((flag_sanitize & SANITIZE_THREAD) == 0)
1162 remove = true;
1163 break;
1164 default:
1165 break;
1167 gcc_assert (!remove || gimple_call_lhs (stmt) == NULL_TREE);
1170 if (remove)
1172 gimple_stmt_iterator gsi = bsi;
1173 gsi_next (&bsi);
1174 unlink_stmt_vdef (stmt);
1175 release_defs (stmt);
1176 gsi_remove (&gsi, true);
1178 else
1180 gsi_next (&bsi);
1181 stmts[gimple_uid (stmt)] = stmt;
1186 /* Set the gimple body to the statement sequence in the entry
1187 basic block. FIXME lto, this is fairly hacky. The existence
1188 of a gimple body is used by the cgraph routines, but we should
1189 really use the presence of the CFG. */
1191 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1192 gimple_set_body (fn_decl, bb_seq (ei_edge (ei)->dest));
1195 fixup_call_stmt_edges (node, stmts);
1196 execute_all_ipa_stmt_fixups (node, stmts);
1198 update_ssa (TODO_update_ssa_only_virtuals);
1199 free_dominance_info (CDI_DOMINATORS);
1200 free_dominance_info (CDI_POST_DOMINATORS);
1201 free (stmts);
1202 pop_cfun ();
1205 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1207 static void
1208 input_constructor (tree var, struct data_in *data_in,
1209 struct lto_input_block *ib)
1211 DECL_INITIAL (var) = stream_read_tree (ib, data_in);
1215 /* Read the body from DATA for function NODE and fill it in.
1216 FILE_DATA are the global decls and types. SECTION_TYPE is either
1217 LTO_section_function_body or LTO_section_static_initializer. If
1218 section type is LTO_section_function_body, FN must be the decl for
1219 that function. */
1221 static void
1222 lto_read_body_or_constructor (struct lto_file_decl_data *file_data, struct symtab_node *node,
1223 const char *data, enum lto_section_type section_type)
1225 const struct lto_function_header *header;
1226 struct data_in *data_in;
1227 int cfg_offset;
1228 int main_offset;
1229 int string_offset;
1230 tree fn_decl = node->decl;
1232 header = (const struct lto_function_header *) data;
1233 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1235 cfg_offset = sizeof (struct lto_function_header);
1236 main_offset = cfg_offset + header->cfg_size;
1237 string_offset = main_offset + header->main_size;
1239 else
1241 main_offset = sizeof (struct lto_function_header);
1242 string_offset = main_offset + header->main_size;
1245 data_in = lto_data_in_create (file_data, data + string_offset,
1246 header->string_size, vNULL);
1248 if (section_type == LTO_section_function_body)
1250 struct lto_in_decl_state *decl_state;
1251 unsigned from;
1253 gcc_checking_assert (node);
1255 /* Use the function's decl state. */
1256 decl_state = lto_get_function_in_decl_state (file_data, fn_decl);
1257 gcc_assert (decl_state);
1258 file_data->current_decl_state = decl_state;
1261 /* Set up the struct function. */
1262 from = data_in->reader_cache->nodes.length ();
1263 lto_input_block ib_main (data + main_offset, header->main_size,
1264 file_data->mode_table);
1265 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1267 lto_input_block ib_cfg (data + cfg_offset, header->cfg_size,
1268 file_data->mode_table);
1269 input_function (fn_decl, data_in, &ib_main, &ib_cfg);
1271 else
1272 input_constructor (fn_decl, data_in, &ib_main);
1273 data_in->location_cache.apply_location_cache ();
1274 /* And fixup types we streamed locally. */
1276 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1277 unsigned len = cache->nodes.length ();
1278 unsigned i;
1279 for (i = len; i-- > from;)
1281 tree t = streamer_tree_cache_get_tree (cache, i);
1282 if (t == NULL_TREE)
1283 continue;
1285 if (TYPE_P (t))
1287 gcc_assert (TYPE_CANONICAL (t) == NULL_TREE);
1288 if (type_with_alias_set_p (t)
1289 && canonical_type_used_p (t))
1290 TYPE_CANONICAL (t) = TYPE_MAIN_VARIANT (t);
1291 if (TYPE_MAIN_VARIANT (t) != t)
1293 gcc_assert (TYPE_NEXT_VARIANT (t) == NULL_TREE);
1294 TYPE_NEXT_VARIANT (t)
1295 = TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t));
1296 TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t)) = t;
1302 /* Restore decl state */
1303 file_data->current_decl_state = file_data->global_decl_state;
1306 lto_data_in_delete (data_in);
1310 /* Read the body of NODE using DATA. FILE_DATA holds the global
1311 decls and types. */
1313 void
1314 lto_input_function_body (struct lto_file_decl_data *file_data,
1315 struct cgraph_node *node, const char *data)
1317 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1320 /* Read the body of NODE using DATA. FILE_DATA holds the global
1321 decls and types. */
1323 void
1324 lto_input_variable_constructor (struct lto_file_decl_data *file_data,
1325 struct varpool_node *node, const char *data)
1327 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1331 /* Queue of acummulated decl -> DIE mappings. Similar to locations those
1332 are only applied to prevailing tree nodes during tree merging. */
1333 vec<dref_entry> dref_queue;
1335 /* Read the physical representation of a tree node EXPR from
1336 input block IB using the per-file context in DATA_IN. */
1338 static void
1339 lto_read_tree_1 (struct lto_input_block *ib, struct data_in *data_in, tree expr)
1341 /* Read all the bitfield values in EXPR. Note that for LTO, we
1342 only write language-independent bitfields, so no more unpacking is
1343 needed. */
1344 streamer_read_tree_bitfields (ib, data_in, expr);
1346 /* Read all the pointer fields in EXPR. */
1347 streamer_read_tree_body (ib, data_in, expr);
1349 /* Read any LTO-specific data not read by the tree streamer. */
1350 if (DECL_P (expr)
1351 && TREE_CODE (expr) != FUNCTION_DECL
1352 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1353 DECL_INITIAL (expr) = stream_read_tree (ib, data_in);
1355 /* Stream references to early generated DIEs. Keep in sync with the
1356 trees handled in dwarf2out_register_external_die. */
1357 if ((DECL_P (expr)
1358 && TREE_CODE (expr) != FIELD_DECL
1359 && TREE_CODE (expr) != DEBUG_EXPR_DECL
1360 && TREE_CODE (expr) != TYPE_DECL)
1361 || TREE_CODE (expr) == BLOCK)
1363 const char *str = streamer_read_string (data_in, ib);
1364 if (str)
1366 unsigned HOST_WIDE_INT off = streamer_read_uhwi (ib);
1367 dref_entry e = { expr, str, off };
1368 dref_queue.safe_push (e);
1373 /* Read the physical representation of a tree node with tag TAG from
1374 input block IB using the per-file context in DATA_IN. */
1376 static tree
1377 lto_read_tree (struct lto_input_block *ib, struct data_in *data_in,
1378 enum LTO_tags tag, hashval_t hash)
1380 /* Instantiate a new tree node. */
1381 tree result = streamer_alloc_tree (ib, data_in, tag);
1383 /* Enter RESULT in the reader cache. This will make RESULT
1384 available so that circular references in the rest of the tree
1385 structure can be resolved in subsequent calls to stream_read_tree. */
1386 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1388 lto_read_tree_1 (ib, data_in, result);
1390 /* end_marker = */ streamer_read_uchar (ib);
1392 return result;
1396 /* Populate the reader cache with trees materialized from the SCC
1397 following in the IB, DATA_IN stream. */
1399 hashval_t
1400 lto_input_scc (struct lto_input_block *ib, struct data_in *data_in,
1401 unsigned *len, unsigned *entry_len)
1403 /* A blob of unnamed tree nodes, fill the cache from it and
1404 recurse. */
1405 unsigned size = streamer_read_uhwi (ib);
1406 hashval_t scc_hash = streamer_read_uhwi (ib);
1407 unsigned scc_entry_len = 1;
1409 if (size == 1)
1411 enum LTO_tags tag = streamer_read_record_start (ib);
1412 lto_input_tree_1 (ib, data_in, tag, scc_hash);
1414 else
1416 unsigned int first = data_in->reader_cache->nodes.length ();
1417 tree result;
1419 scc_entry_len = streamer_read_uhwi (ib);
1421 /* Materialize size trees by reading their headers. */
1422 for (unsigned i = 0; i < size; ++i)
1424 enum LTO_tags tag = streamer_read_record_start (ib);
1425 if (tag == LTO_null
1426 || (tag >= LTO_field_decl_ref && tag <= LTO_global_decl_ref)
1427 || tag == LTO_tree_pickle_reference
1428 || tag == LTO_integer_cst
1429 || tag == LTO_tree_scc)
1430 gcc_unreachable ();
1432 result = streamer_alloc_tree (ib, data_in, tag);
1433 streamer_tree_cache_append (data_in->reader_cache, result, 0);
1436 /* Read the tree bitpacks and references. */
1437 for (unsigned i = 0; i < size; ++i)
1439 result = streamer_tree_cache_get_tree (data_in->reader_cache,
1440 first + i);
1441 lto_read_tree_1 (ib, data_in, result);
1442 /* end_marker = */ streamer_read_uchar (ib);
1446 *len = size;
1447 *entry_len = scc_entry_len;
1448 return scc_hash;
1452 /* Read a tree from input block IB using the per-file context in
1453 DATA_IN. This context is used, for example, to resolve references
1454 to previously read nodes. */
1456 tree
1457 lto_input_tree_1 (struct lto_input_block *ib, struct data_in *data_in,
1458 enum LTO_tags tag, hashval_t hash)
1460 tree result;
1462 gcc_assert ((unsigned) tag < (unsigned) LTO_NUM_TAGS);
1464 if (tag == LTO_null)
1465 result = NULL_TREE;
1466 else if (tag >= LTO_field_decl_ref && tag <= LTO_namelist_decl_ref)
1468 /* If TAG is a reference to an indexable tree, the next value
1469 in IB is the index into the table where we expect to find
1470 that tree. */
1471 result = lto_input_tree_ref (ib, data_in, cfun, tag);
1473 else if (tag == LTO_tree_pickle_reference)
1475 /* If TAG is a reference to a previously read tree, look it up in
1476 the reader cache. */
1477 result = streamer_get_pickled_tree (ib, data_in);
1479 else if (tag == LTO_integer_cst)
1481 /* For shared integer constants in singletons we can use the
1482 existing tree integer constant merging code. */
1483 tree type = stream_read_tree (ib, data_in);
1484 unsigned HOST_WIDE_INT len = streamer_read_uhwi (ib);
1485 unsigned HOST_WIDE_INT i;
1486 HOST_WIDE_INT a[WIDE_INT_MAX_ELTS];
1488 for (i = 0; i < len; i++)
1489 a[i] = streamer_read_hwi (ib);
1490 gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT);
1491 result = wide_int_to_tree (type, wide_int::from_array
1492 (a, len, TYPE_PRECISION (type)));
1493 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1495 else if (tag == LTO_tree_scc)
1496 gcc_unreachable ();
1497 else
1499 /* Otherwise, materialize a new node from IB. */
1500 result = lto_read_tree (ib, data_in, tag, hash);
1503 return result;
1506 tree
1507 lto_input_tree (struct lto_input_block *ib, struct data_in *data_in)
1509 enum LTO_tags tag;
1511 /* Input and skip SCCs. */
1512 while ((tag = streamer_read_record_start (ib)) == LTO_tree_scc)
1514 unsigned len, entry_len;
1515 lto_input_scc (ib, data_in, &len, &entry_len);
1517 /* Register DECLs with the debuginfo machinery. */
1518 while (!dref_queue.is_empty ())
1520 dref_entry e = dref_queue.pop ();
1521 debug_hooks->register_external_die (e.decl, e.sym, e.off);
1524 return lto_input_tree_1 (ib, data_in, tag, 0);
1528 /* Input toplevel asms. */
1530 void
1531 lto_input_toplevel_asms (struct lto_file_decl_data *file_data, int order_base)
1533 size_t len;
1534 const char *data = lto_get_section_data (file_data, LTO_section_asm,
1535 NULL, &len);
1536 const struct lto_simple_header_with_strings *header
1537 = (const struct lto_simple_header_with_strings *) data;
1538 int string_offset;
1539 struct data_in *data_in;
1540 tree str;
1542 if (! data)
1543 return;
1545 string_offset = sizeof (*header) + header->main_size;
1547 lto_input_block ib (data + sizeof (*header), header->main_size,
1548 file_data->mode_table);
1550 data_in = lto_data_in_create (file_data, data + string_offset,
1551 header->string_size, vNULL);
1553 while ((str = streamer_read_string_cst (data_in, &ib)))
1555 asm_node *node = symtab->finalize_toplevel_asm (str);
1556 node->order = streamer_read_hwi (&ib) + order_base;
1557 if (node->order >= symtab->order)
1558 symtab->order = node->order + 1;
1561 lto_data_in_delete (data_in);
1563 lto_free_section_data (file_data, LTO_section_asm, NULL, data, len);
1567 /* Input mode table. */
1569 void
1570 lto_input_mode_table (struct lto_file_decl_data *file_data)
1572 size_t len;
1573 const char *data = lto_get_section_data (file_data, LTO_section_mode_table,
1574 NULL, &len);
1575 if (! data)
1577 internal_error ("cannot read LTO mode table from %s",
1578 file_data->file_name);
1579 return;
1582 unsigned char *table = ggc_cleared_vec_alloc<unsigned char> (1 << 8);
1583 file_data->mode_table = table;
1584 const struct lto_simple_header_with_strings *header
1585 = (const struct lto_simple_header_with_strings *) data;
1586 int string_offset;
1587 struct data_in *data_in;
1588 string_offset = sizeof (*header) + header->main_size;
1590 lto_input_block ib (data + sizeof (*header), header->main_size, NULL);
1591 data_in = lto_data_in_create (file_data, data + string_offset,
1592 header->string_size, vNULL);
1593 bitpack_d bp = streamer_read_bitpack (&ib);
1595 table[VOIDmode] = VOIDmode;
1596 table[BLKmode] = BLKmode;
1597 unsigned int m;
1598 while ((m = bp_unpack_value (&bp, 8)) != VOIDmode)
1600 enum mode_class mclass
1601 = bp_unpack_enum (&bp, mode_class, MAX_MODE_CLASS);
1602 unsigned int size = bp_unpack_value (&bp, 8);
1603 unsigned int prec = bp_unpack_value (&bp, 16);
1604 machine_mode inner = (machine_mode) bp_unpack_value (&bp, 8);
1605 unsigned int nunits = bp_unpack_value (&bp, 8);
1606 unsigned int ibit = 0, fbit = 0;
1607 unsigned int real_fmt_len = 0;
1608 const char *real_fmt_name = NULL;
1609 switch (mclass)
1611 case MODE_FRACT:
1612 case MODE_UFRACT:
1613 case MODE_ACCUM:
1614 case MODE_UACCUM:
1615 ibit = bp_unpack_value (&bp, 8);
1616 fbit = bp_unpack_value (&bp, 8);
1617 break;
1618 case MODE_FLOAT:
1619 case MODE_DECIMAL_FLOAT:
1620 real_fmt_name = bp_unpack_indexed_string (data_in, &bp,
1621 &real_fmt_len);
1622 break;
1623 default:
1624 break;
1626 /* First search just the GET_CLASS_NARROWEST_MODE to wider modes,
1627 if not found, fallback to all modes. */
1628 int pass;
1629 for (pass = 0; pass < 2; pass++)
1630 for (machine_mode mr = pass ? VOIDmode
1631 : GET_CLASS_NARROWEST_MODE (mclass);
1632 pass ? mr < MAX_MACHINE_MODE : mr != VOIDmode;
1633 pass ? mr = (machine_mode) (mr + 1)
1634 : mr = GET_MODE_WIDER_MODE (mr).else_void ())
1635 if (GET_MODE_CLASS (mr) != mclass
1636 || GET_MODE_SIZE (mr) != size
1637 || GET_MODE_PRECISION (mr) != prec
1638 || (inner == m
1639 ? GET_MODE_INNER (mr) != mr
1640 : GET_MODE_INNER (mr) != table[(int) inner])
1641 || GET_MODE_IBIT (mr) != ibit
1642 || GET_MODE_FBIT (mr) != fbit
1643 || GET_MODE_NUNITS (mr) != nunits)
1644 continue;
1645 else if ((mclass == MODE_FLOAT || mclass == MODE_DECIMAL_FLOAT)
1646 && strcmp (REAL_MODE_FORMAT (mr)->name, real_fmt_name) != 0)
1647 continue;
1648 else
1650 table[m] = mr;
1651 pass = 2;
1652 break;
1654 unsigned int mname_len;
1655 const char *mname = bp_unpack_indexed_string (data_in, &bp, &mname_len);
1656 if (pass == 2)
1658 switch (mclass)
1660 case MODE_VECTOR_INT:
1661 case MODE_VECTOR_FLOAT:
1662 case MODE_VECTOR_FRACT:
1663 case MODE_VECTOR_UFRACT:
1664 case MODE_VECTOR_ACCUM:
1665 case MODE_VECTOR_UACCUM:
1666 /* For unsupported vector modes just use BLKmode,
1667 if the scalar mode is supported. */
1668 if (table[(int) inner] != VOIDmode)
1670 table[m] = BLKmode;
1671 break;
1673 /* FALLTHRU */
1674 default:
1675 fatal_error (UNKNOWN_LOCATION, "unsupported mode %s\n", mname);
1676 break;
1680 lto_data_in_delete (data_in);
1682 lto_free_section_data (file_data, LTO_section_mode_table, NULL, data, len);
1686 /* Initialization for the LTO reader. */
1688 void
1689 lto_reader_init (void)
1691 lto_streamer_init ();
1692 file_name_hash_table
1693 = new hash_table<freeing_string_slot_hasher> (37);
1697 /* Create a new data_in object for FILE_DATA. STRINGS is the string
1698 table to use with LEN strings. RESOLUTIONS is the vector of linker
1699 resolutions (NULL if not using a linker plugin). */
1701 struct data_in *
1702 lto_data_in_create (struct lto_file_decl_data *file_data, const char *strings,
1703 unsigned len,
1704 vec<ld_plugin_symbol_resolution_t> resolutions)
1706 struct data_in *data_in = new (struct data_in);
1707 data_in->file_data = file_data;
1708 data_in->strings = strings;
1709 data_in->strings_len = len;
1710 data_in->globals_resolution = resolutions;
1711 data_in->reader_cache = streamer_tree_cache_create (false, false, true);
1712 return data_in;
1716 /* Remove DATA_IN. */
1718 void
1719 lto_data_in_delete (struct data_in *data_in)
1721 data_in->globals_resolution.release ();
1722 streamer_tree_cache_delete (data_in->reader_cache);
1723 delete data_in;