1 /* Read the GIMPLE representation from a file stream.
3 Copyright (C) 2009-2019 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
32 #include "tree-pass.h"
34 #include "gimple-streamer.h"
36 #include "gimple-iterator.h"
38 #include "tree-into-ssa.h"
47 struct freeing_string_slot_hasher
: string_slot_hasher
49 static inline void remove (value_type
*);
53 freeing_string_slot_hasher::remove (value_type
*v
)
58 /* The table to hold the file names. */
59 static hash_table
<freeing_string_slot_hasher
> *file_name_hash_table
;
62 /* Check that tag ACTUAL has one of the given values. NUM_TAGS is the
63 number of valid tag values to check. */
66 lto_tag_check_set (enum LTO_tags actual
, int ntags
, ...)
72 for (i
= 0; i
< ntags
; i
++)
73 if ((unsigned) actual
== va_arg (ap
, unsigned))
80 internal_error ("bytecode stream: unexpected tag %s", lto_tag_name (actual
));
84 /* Read LENGTH bytes from STREAM to ADDR. */
87 lto_input_data_block (struct lto_input_block
*ib
, void *addr
, size_t length
)
90 unsigned char *const buffer
= (unsigned char *) addr
;
92 for (i
= 0; i
< length
; i
++)
93 buffer
[i
] = streamer_read_uchar (ib
);
97 /* Lookup STRING in file_name_hash_table. If found, return the existing
98 string, otherwise insert STRING as the canonical version. */
101 canon_file_name (const char *string
)
104 struct string_slot s_slot
;
105 size_t len
= strlen (string
);
110 slot
= file_name_hash_table
->find_slot (&s_slot
, INSERT
);
114 struct string_slot
*new_slot
;
116 saved_string
= (char *) xmalloc (len
+ 1);
117 new_slot
= XCNEW (struct string_slot
);
118 memcpy (saved_string
, string
, len
+ 1);
119 new_slot
->s
= saved_string
;
126 struct string_slot
*old_slot
= *slot
;
131 /* Pointer to currently alive instance of lto_location_cache. */
133 lto_location_cache
*lto_location_cache::current_cache
;
135 /* Sort locations in source order. Start with file from last application. */
138 lto_location_cache::cmp_loc (const void *pa
, const void *pb
)
140 const cached_location
*a
= ((const cached_location
*)pa
);
141 const cached_location
*b
= ((const cached_location
*)pb
);
142 const char *current_file
= current_cache
->current_file
;
143 int current_line
= current_cache
->current_line
;
145 if (a
->file
== current_file
&& b
->file
!= current_file
)
147 if (a
->file
!= current_file
&& b
->file
== current_file
)
149 if (a
->file
== current_file
&& b
->file
== current_file
)
151 if (a
->line
== current_line
&& b
->line
!= current_line
)
153 if (a
->line
!= current_line
&& b
->line
== current_line
)
156 if (a
->file
!= b
->file
)
157 return strcmp (a
->file
, b
->file
);
158 if (a
->sysp
!= b
->sysp
)
159 return a
->sysp
? 1 : -1;
160 if (a
->line
!= b
->line
)
161 return a
->line
- b
->line
;
162 return a
->col
- b
->col
;
165 /* Apply all changes in location cache. Add locations into linemap and patch
169 lto_location_cache::apply_location_cache ()
171 static const char *prev_file
;
172 if (!loc_cache
.length ())
174 if (loc_cache
.length () > 1)
175 loc_cache
.qsort (cmp_loc
);
177 for (unsigned int i
= 0; i
< loc_cache
.length (); i
++)
179 struct cached_location loc
= loc_cache
[i
];
181 if (current_file
!= loc
.file
)
182 linemap_add (line_table
, prev_file
? LC_RENAME
: LC_ENTER
,
183 loc
.sysp
, loc
.file
, loc
.line
);
184 else if (current_line
!= loc
.line
)
188 for (unsigned int j
= i
+ 1; j
< loc_cache
.length (); j
++)
189 if (loc
.file
!= loc_cache
[j
].file
190 || loc
.line
!= loc_cache
[j
].line
)
192 else if (max
< loc_cache
[j
].col
)
193 max
= loc_cache
[j
].col
;
194 linemap_line_start (line_table
, loc
.line
, max
+ 1);
196 gcc_assert (*loc
.loc
== BUILTINS_LOCATION
+ 1);
197 if (current_file
== loc
.file
&& current_line
== loc
.line
198 && current_col
== loc
.col
)
199 *loc
.loc
= current_loc
;
201 current_loc
= *loc
.loc
= linemap_position_for_column (line_table
,
203 current_line
= loc
.line
;
204 prev_file
= current_file
= loc
.file
;
205 current_col
= loc
.col
;
207 loc_cache
.truncate (0);
212 /* Tree merging did not suceed; mark all changes in the cache as accepted. */
215 lto_location_cache::accept_location_cache ()
217 gcc_assert (current_cache
== this);
218 accepted_length
= loc_cache
.length ();
221 /* Tree merging did suceed; throw away recent changes. */
224 lto_location_cache::revert_location_cache ()
226 loc_cache
.truncate (accepted_length
);
229 /* Read a location bitpack from input block IB and either update *LOC directly
230 or add it to the location cache.
231 It is neccesary to call apply_location_cache to get *LOC updated. */
234 lto_location_cache::input_location (location_t
*loc
, struct bitpack_d
*bp
,
235 struct data_in
*data_in
)
237 static const char *stream_file
;
238 static int stream_line
;
239 static int stream_col
;
240 static bool stream_sysp
;
241 bool file_change
, line_change
, column_change
;
243 gcc_assert (current_cache
== this);
245 *loc
= bp_unpack_int_in_range (bp
, "location", 0, RESERVED_LOCATION_COUNT
);
247 if (*loc
< RESERVED_LOCATION_COUNT
)
250 /* Keep value RESERVED_LOCATION_COUNT in *loc as linemap lookups will
253 file_change
= bp_unpack_value (bp
, 1);
254 line_change
= bp_unpack_value (bp
, 1);
255 column_change
= bp_unpack_value (bp
, 1);
259 stream_file
= canon_file_name (bp_unpack_string (data_in
, bp
));
260 stream_sysp
= bp_unpack_value (bp
, 1);
264 stream_line
= bp_unpack_var_len_unsigned (bp
);
267 stream_col
= bp_unpack_var_len_unsigned (bp
);
269 /* This optimization saves location cache operations druing gimple
272 if (current_file
== stream_file
&& current_line
== stream_line
273 && current_col
== stream_col
&& current_sysp
== stream_sysp
)
279 struct cached_location entry
280 = {stream_file
, loc
, stream_line
, stream_col
, stream_sysp
};
281 loc_cache
.safe_push (entry
);
284 /* Read a location bitpack from input block IB and either update *LOC directly
285 or add it to the location cache.
286 It is neccesary to call apply_location_cache to get *LOC updated. */
289 lto_input_location (location_t
*loc
, struct bitpack_d
*bp
,
290 struct data_in
*data_in
)
292 data_in
->location_cache
.input_location (loc
, bp
, data_in
);
295 /* Read location and return it instead of going through location caching.
296 This should be used only when the resulting location is not going to be
300 stream_input_location_now (struct bitpack_d
*bp
, struct data_in
*data_in
)
303 stream_input_location (&loc
, bp
, data_in
);
304 data_in
->location_cache
.apply_location_cache ();
308 /* Read a reference to a tree node from DATA_IN using input block IB.
309 TAG is the expected node that should be found in IB, if TAG belongs
310 to one of the indexable trees, expect to read a reference index to
311 be looked up in one of the symbol tables, otherwise read the pysical
312 representation of the tree using stream_read_tree. FN is the
313 function scope for the read tree. */
316 lto_input_tree_ref (struct lto_input_block
*ib
, struct data_in
*data_in
,
317 struct function
*fn
, enum LTO_tags tag
)
319 unsigned HOST_WIDE_INT ix_u
;
320 tree result
= NULL_TREE
;
322 lto_tag_check_range (tag
, LTO_field_decl_ref
, LTO_namelist_decl_ref
);
327 ix_u
= streamer_read_uhwi (ib
);
328 result
= lto_file_decl_data_get_type (data_in
->file_data
, ix_u
);
331 case LTO_ssa_name_ref
:
332 ix_u
= streamer_read_uhwi (ib
);
333 result
= (*SSANAMES (fn
))[ix_u
];
336 case LTO_field_decl_ref
:
337 ix_u
= streamer_read_uhwi (ib
);
338 result
= lto_file_decl_data_get_field_decl (data_in
->file_data
, ix_u
);
341 case LTO_function_decl_ref
:
342 ix_u
= streamer_read_uhwi (ib
);
343 result
= lto_file_decl_data_get_fn_decl (data_in
->file_data
, ix_u
);
346 case LTO_type_decl_ref
:
347 ix_u
= streamer_read_uhwi (ib
);
348 result
= lto_file_decl_data_get_type_decl (data_in
->file_data
, ix_u
);
351 case LTO_namespace_decl_ref
:
352 ix_u
= streamer_read_uhwi (ib
);
353 result
= lto_file_decl_data_get_namespace_decl (data_in
->file_data
, ix_u
);
356 case LTO_global_decl_ref
:
357 case LTO_result_decl_ref
:
358 case LTO_const_decl_ref
:
359 case LTO_imported_decl_ref
:
360 case LTO_label_decl_ref
:
361 case LTO_translation_unit_decl_ref
:
362 case LTO_namelist_decl_ref
:
363 ix_u
= streamer_read_uhwi (ib
);
364 result
= lto_file_decl_data_get_var_decl (data_in
->file_data
, ix_u
);
377 /* Read and return a double-linked list of catch handlers from input
378 block IB, using descriptors in DATA_IN. */
380 static struct eh_catch_d
*
381 lto_input_eh_catch_list (struct lto_input_block
*ib
, struct data_in
*data_in
,
387 *last_p
= first
= NULL
;
388 tag
= streamer_read_record_start (ib
);
394 lto_tag_check_range (tag
, LTO_eh_catch
, LTO_eh_catch
);
396 /* Read the catch node. */
397 n
= ggc_cleared_alloc
<eh_catch_d
> ();
398 n
->type_list
= stream_read_tree (ib
, data_in
);
399 n
->filter_list
= stream_read_tree (ib
, data_in
);
400 n
->label
= stream_read_tree (ib
, data_in
);
402 /* Register all the types in N->FILTER_LIST. */
403 for (list
= n
->filter_list
; list
; list
= TREE_CHAIN (list
))
404 add_type_for_runtime (TREE_VALUE (list
));
406 /* Chain N to the end of the list. */
408 (*last_p
)->next_catch
= n
;
409 n
->prev_catch
= *last_p
;
412 /* Set the head of the list the first time through the loop. */
416 tag
= streamer_read_record_start (ib
);
423 /* Read and return EH region IX from input block IB, using descriptors
427 input_eh_region (struct lto_input_block
*ib
, struct data_in
*data_in
, int ix
)
432 /* Read the region header. */
433 tag
= streamer_read_record_start (ib
);
437 r
= ggc_cleared_alloc
<eh_region_d
> ();
438 r
->index
= streamer_read_hwi (ib
);
440 gcc_assert (r
->index
== ix
);
442 /* Read all the region pointers as region numbers. We'll fix up
443 the pointers once the whole array has been read. */
444 r
->outer
= (eh_region
) (intptr_t) streamer_read_hwi (ib
);
445 r
->inner
= (eh_region
) (intptr_t) streamer_read_hwi (ib
);
446 r
->next_peer
= (eh_region
) (intptr_t) streamer_read_hwi (ib
);
450 case LTO_ert_cleanup
:
451 r
->type
= ERT_CLEANUP
;
456 struct eh_catch_d
*last_catch
;
458 r
->u
.eh_try
.first_catch
= lto_input_eh_catch_list (ib
, data_in
,
460 r
->u
.eh_try
.last_catch
= last_catch
;
464 case LTO_ert_allowed_exceptions
:
468 r
->type
= ERT_ALLOWED_EXCEPTIONS
;
469 r
->u
.allowed
.type_list
= stream_read_tree (ib
, data_in
);
470 r
->u
.allowed
.label
= stream_read_tree (ib
, data_in
);
471 r
->u
.allowed
.filter
= streamer_read_uhwi (ib
);
473 for (l
= r
->u
.allowed
.type_list
; l
; l
= TREE_CHAIN (l
))
474 add_type_for_runtime (TREE_VALUE (l
));
478 case LTO_ert_must_not_throw
:
480 r
->type
= ERT_MUST_NOT_THROW
;
481 r
->u
.must_not_throw
.failure_decl
= stream_read_tree (ib
, data_in
);
482 bitpack_d bp
= streamer_read_bitpack (ib
);
483 r
->u
.must_not_throw
.failure_loc
484 = stream_input_location_now (&bp
, data_in
);
492 r
->landing_pads
= (eh_landing_pad
) (intptr_t) streamer_read_hwi (ib
);
498 /* Read and return EH landing pad IX from input block IB, using descriptors
501 static eh_landing_pad
502 input_eh_lp (struct lto_input_block
*ib
, struct data_in
*data_in
, int ix
)
507 /* Read the landing pad header. */
508 tag
= streamer_read_record_start (ib
);
512 lto_tag_check_range (tag
, LTO_eh_landing_pad
, LTO_eh_landing_pad
);
514 lp
= ggc_cleared_alloc
<eh_landing_pad_d
> ();
515 lp
->index
= streamer_read_hwi (ib
);
516 gcc_assert (lp
->index
== ix
);
517 lp
->next_lp
= (eh_landing_pad
) (intptr_t) streamer_read_hwi (ib
);
518 lp
->region
= (eh_region
) (intptr_t) streamer_read_hwi (ib
);
519 lp
->post_landing_pad
= stream_read_tree (ib
, data_in
);
525 /* After reading the EH regions, pointers to peer and children regions
526 are region numbers. This converts all these region numbers into
527 real pointers into the rematerialized regions for FN. ROOT_REGION
528 is the region number for the root EH region in FN. */
531 fixup_eh_region_pointers (struct function
*fn
, HOST_WIDE_INT root_region
)
534 vec
<eh_region
, va_gc
> *eh_array
= fn
->eh
->region_array
;
535 vec
<eh_landing_pad
, va_gc
> *lp_array
= fn
->eh
->lp_array
;
539 gcc_assert (eh_array
&& lp_array
);
541 gcc_assert (root_region
>= 0);
542 fn
->eh
->region_tree
= (*eh_array
)[root_region
];
544 #define FIXUP_EH_REGION(r) (r) = (*eh_array)[(HOST_WIDE_INT) (intptr_t) (r)]
545 #define FIXUP_EH_LP(p) (p) = (*lp_array)[(HOST_WIDE_INT) (intptr_t) (p)]
547 /* Convert all the index numbers stored in pointer fields into
548 pointers to the corresponding slots in the EH region array. */
549 FOR_EACH_VEC_ELT (*eh_array
, i
, r
)
551 /* The array may contain NULL regions. */
555 gcc_assert (i
== (unsigned) r
->index
);
556 FIXUP_EH_REGION (r
->outer
);
557 FIXUP_EH_REGION (r
->inner
);
558 FIXUP_EH_REGION (r
->next_peer
);
559 FIXUP_EH_LP (r
->landing_pads
);
562 /* Convert all the index numbers stored in pointer fields into
563 pointers to the corresponding slots in the EH landing pad array. */
564 FOR_EACH_VEC_ELT (*lp_array
, i
, lp
)
566 /* The array may contain NULL landing pads. */
570 gcc_assert (i
== (unsigned) lp
->index
);
571 FIXUP_EH_LP (lp
->next_lp
);
572 FIXUP_EH_REGION (lp
->region
);
575 #undef FIXUP_EH_REGION
580 /* Initialize EH support. */
585 static bool eh_initialized_p
= false;
587 if (eh_initialized_p
)
590 /* Contrary to most other FEs, we only initialize EH support when at
591 least one of the files in the set contains exception regions in
592 it. Since this happens much later than the call to init_eh in
593 lang_dependent_init, we have to set flag_exceptions and call
594 init_eh again to initialize the EH tables. */
598 eh_initialized_p
= true;
602 /* Read the exception table for FN from IB using the data descriptors
606 input_eh_regions (struct lto_input_block
*ib
, struct data_in
*data_in
,
609 HOST_WIDE_INT i
, root_region
, len
;
612 tag
= streamer_read_record_start (ib
);
616 lto_tag_check_range (tag
, LTO_eh_table
, LTO_eh_table
);
618 /* If the file contains EH regions, then it was compiled with
619 -fexceptions. In that case, initialize the backend EH
625 root_region
= streamer_read_hwi (ib
);
626 gcc_assert (root_region
== (int) root_region
);
628 /* Read the EH region array. */
629 len
= streamer_read_hwi (ib
);
630 gcc_assert (len
== (int) len
);
633 vec_safe_grow_cleared (fn
->eh
->region_array
, len
);
634 for (i
= 0; i
< len
; i
++)
636 eh_region r
= input_eh_region (ib
, data_in
, i
);
637 (*fn
->eh
->region_array
)[i
] = r
;
641 /* Read the landing pads. */
642 len
= streamer_read_hwi (ib
);
643 gcc_assert (len
== (int) len
);
646 vec_safe_grow_cleared (fn
->eh
->lp_array
, len
);
647 for (i
= 0; i
< len
; i
++)
649 eh_landing_pad lp
= input_eh_lp (ib
, data_in
, i
);
650 (*fn
->eh
->lp_array
)[i
] = lp
;
654 /* Read the runtime type data. */
655 len
= streamer_read_hwi (ib
);
656 gcc_assert (len
== (int) len
);
659 vec_safe_grow_cleared (fn
->eh
->ttype_data
, len
);
660 for (i
= 0; i
< len
; i
++)
662 tree ttype
= stream_read_tree (ib
, data_in
);
663 (*fn
->eh
->ttype_data
)[i
] = ttype
;
667 /* Read the table of action chains. */
668 len
= streamer_read_hwi (ib
);
669 gcc_assert (len
== (int) len
);
672 if (targetm
.arm_eabi_unwinder
)
674 vec_safe_grow_cleared (fn
->eh
->ehspec_data
.arm_eabi
, len
);
675 for (i
= 0; i
< len
; i
++)
677 tree t
= stream_read_tree (ib
, data_in
);
678 (*fn
->eh
->ehspec_data
.arm_eabi
)[i
] = t
;
683 vec_safe_grow_cleared (fn
->eh
->ehspec_data
.other
, len
);
684 for (i
= 0; i
< len
; i
++)
686 uchar c
= streamer_read_uchar (ib
);
687 (*fn
->eh
->ehspec_data
.other
)[i
] = c
;
692 /* Reconstruct the EH region tree by fixing up the peer/children
694 fixup_eh_region_pointers (fn
, root_region
);
696 tag
= streamer_read_record_start (ib
);
697 lto_tag_check_range (tag
, LTO_null
, LTO_null
);
701 /* Make a new basic block with index INDEX in function FN. */
704 make_new_block (struct function
*fn
, unsigned int index
)
706 basic_block bb
= alloc_block ();
708 SET_BASIC_BLOCK_FOR_FN (fn
, index
, bb
);
709 n_basic_blocks_for_fn (fn
)++;
714 /* Read the CFG for function FN from input block IB. */
717 input_cfg (struct lto_input_block
*ib
, struct data_in
*data_in
,
720 unsigned int bb_count
;
725 init_empty_tree_cfg_for_function (fn
);
726 init_ssa_operands (fn
);
728 profile_status_for_fn (fn
) = streamer_read_enum (ib
, profile_status_d
,
731 bb_count
= streamer_read_uhwi (ib
);
733 last_basic_block_for_fn (fn
) = bb_count
;
734 if (bb_count
> basic_block_info_for_fn (fn
)->length ())
735 vec_safe_grow_cleared (basic_block_info_for_fn (fn
), bb_count
);
737 if (bb_count
> label_to_block_map_for_fn (fn
)->length ())
738 vec_safe_grow_cleared (label_to_block_map_for_fn (fn
), bb_count
);
740 index
= streamer_read_hwi (ib
);
743 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, index
);
744 unsigned int edge_count
;
747 bb
= make_new_block (fn
, index
);
749 edge_count
= streamer_read_uhwi (ib
);
751 /* Connect up the CFG. */
752 for (i
= 0; i
< edge_count
; i
++)
754 unsigned int dest_index
;
755 unsigned int edge_flags
;
757 profile_probability probability
;
760 dest_index
= streamer_read_uhwi (ib
);
761 probability
= profile_probability::stream_in (ib
);
762 edge_flags
= streamer_read_uhwi (ib
);
764 dest
= BASIC_BLOCK_FOR_FN (fn
, dest_index
);
767 dest
= make_new_block (fn
, dest_index
);
769 e
= make_edge (bb
, dest
, edge_flags
);
770 e
->probability
= probability
;
773 index
= streamer_read_hwi (ib
);
776 p_bb
= ENTRY_BLOCK_PTR_FOR_FN (fn
);
777 index
= streamer_read_hwi (ib
);
780 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, index
);
784 index
= streamer_read_hwi (ib
);
787 /* ??? The cfgloop interface is tied to cfun. */
788 gcc_assert (cfun
== fn
);
790 /* Input the loop tree. */
791 unsigned n_loops
= streamer_read_uhwi (ib
);
795 struct loops
*loops
= ggc_cleared_alloc
<struct loops
> ();
796 init_loops_structure (fn
, loops
, n_loops
);
797 set_loops_for_fn (fn
, loops
);
799 /* Input each loop and associate it with its loop header so
800 flow_loops_find can rebuild the loop tree. */
801 for (unsigned i
= 1; i
< n_loops
; ++i
)
803 int header_index
= streamer_read_hwi (ib
);
804 if (header_index
== -1)
806 loops
->larray
->quick_push (NULL
);
810 struct loop
*loop
= alloc_loop ();
811 loop
->header
= BASIC_BLOCK_FOR_FN (fn
, header_index
);
812 loop
->header
->loop_father
= loop
;
814 /* Read everything copy_loop_info copies. */
815 loop
->estimate_state
= streamer_read_enum (ib
, loop_estimation
, EST_LAST
);
816 loop
->any_upper_bound
= streamer_read_hwi (ib
);
817 if (loop
->any_upper_bound
)
818 loop
->nb_iterations_upper_bound
= streamer_read_widest_int (ib
);
819 loop
->any_likely_upper_bound
= streamer_read_hwi (ib
);
820 if (loop
->any_likely_upper_bound
)
821 loop
->nb_iterations_likely_upper_bound
= streamer_read_widest_int (ib
);
822 loop
->any_estimate
= streamer_read_hwi (ib
);
823 if (loop
->any_estimate
)
824 loop
->nb_iterations_estimate
= streamer_read_widest_int (ib
);
826 /* Read OMP SIMD related info. */
827 loop
->safelen
= streamer_read_hwi (ib
);
828 loop
->unroll
= streamer_read_hwi (ib
);
829 loop
->owned_clique
= streamer_read_hwi (ib
);
830 loop
->dont_vectorize
= streamer_read_hwi (ib
);
831 loop
->force_vectorize
= streamer_read_hwi (ib
);
832 loop
->simduid
= stream_read_tree (ib
, data_in
);
834 place_new_loop (fn
, loop
);
836 /* flow_loops_find doesn't like loops not in the tree, hook them
837 all as siblings of the tree root temporarily. */
838 flow_loop_tree_node_add (loops
->tree_root
, loop
);
841 /* Rebuild the loop tree. */
842 flow_loops_find (loops
);
846 /* Read the SSA names array for function FN from DATA_IN using input
850 input_ssa_names (struct lto_input_block
*ib
, struct data_in
*data_in
,
853 unsigned int i
, size
;
855 size
= streamer_read_uhwi (ib
);
856 init_ssanames (fn
, size
);
858 i
= streamer_read_uhwi (ib
);
864 /* Skip over the elements that had been freed. */
865 while (SSANAMES (fn
)->length () < i
)
866 SSANAMES (fn
)->quick_push (NULL_TREE
);
868 is_default_def
= (streamer_read_uchar (ib
) != 0);
869 name
= stream_read_tree (ib
, data_in
);
870 ssa_name
= make_ssa_name_fn (fn
, name
, NULL
);
874 set_ssa_default_def (cfun
, SSA_NAME_VAR (ssa_name
), ssa_name
);
875 SSA_NAME_DEF_STMT (ssa_name
) = gimple_build_nop ();
878 i
= streamer_read_uhwi (ib
);
883 /* Go through all NODE edges and fixup call_stmt pointers
884 so they point to STMTS. */
887 fixup_call_stmt_edges_1 (struct cgraph_node
*node
, gimple
**stmts
,
890 #define STMT_UID_NOT_IN_RANGE(uid) \
891 (gimple_stmt_max_uid (fn) < uid || uid == 0)
893 struct cgraph_edge
*cedge
;
894 struct ipa_ref
*ref
= NULL
;
897 for (cedge
= node
->callees
; cedge
; cedge
= cedge
->next_callee
)
899 if (STMT_UID_NOT_IN_RANGE (cedge
->lto_stmt_uid
))
900 fatal_error (input_location
,
901 "Cgraph edge statement index out of range");
902 cedge
->call_stmt
= as_a
<gcall
*> (stmts
[cedge
->lto_stmt_uid
- 1]);
903 if (!cedge
->call_stmt
)
904 fatal_error (input_location
,
905 "Cgraph edge statement index not found");
907 for (cedge
= node
->indirect_calls
; cedge
; cedge
= cedge
->next_callee
)
909 if (STMT_UID_NOT_IN_RANGE (cedge
->lto_stmt_uid
))
910 fatal_error (input_location
,
911 "Cgraph edge statement index out of range");
912 cedge
->call_stmt
= as_a
<gcall
*> (stmts
[cedge
->lto_stmt_uid
- 1]);
913 if (!cedge
->call_stmt
)
914 fatal_error (input_location
, "Cgraph edge statement index not found");
916 for (i
= 0; node
->iterate_reference (i
, ref
); i
++)
917 if (ref
->lto_stmt_uid
)
919 if (STMT_UID_NOT_IN_RANGE (ref
->lto_stmt_uid
))
920 fatal_error (input_location
,
921 "Reference statement index out of range");
922 ref
->stmt
= stmts
[ref
->lto_stmt_uid
- 1];
924 fatal_error (input_location
, "Reference statement index not found");
929 /* Fixup call_stmt pointers in NODE and all clones. */
932 fixup_call_stmt_edges (struct cgraph_node
*orig
, gimple
**stmts
)
934 struct cgraph_node
*node
;
937 while (orig
->clone_of
)
938 orig
= orig
->clone_of
;
939 fn
= DECL_STRUCT_FUNCTION (orig
->decl
);
941 if (!orig
->thunk
.thunk_p
)
942 fixup_call_stmt_edges_1 (orig
, stmts
, fn
);
944 for (node
= orig
->clones
; node
!= orig
;)
946 if (!node
->thunk
.thunk_p
)
947 fixup_call_stmt_edges_1 (node
, stmts
, fn
);
950 else if (node
->next_sibling_clone
)
951 node
= node
->next_sibling_clone
;
954 while (node
!= orig
&& !node
->next_sibling_clone
)
955 node
= node
->clone_of
;
957 node
= node
->next_sibling_clone
;
963 /* Input the base body of struct function FN from DATA_IN
964 using input block IB. */
967 input_struct_function_base (struct function
*fn
, struct data_in
*data_in
,
968 struct lto_input_block
*ib
)
973 /* Read the static chain and non-local goto save area. */
974 fn
->static_chain_decl
= stream_read_tree (ib
, data_in
);
975 fn
->nonlocal_goto_save_area
= stream_read_tree (ib
, data_in
);
977 /* Read all the local symbols. */
978 len
= streamer_read_hwi (ib
);
982 vec_safe_grow_cleared (fn
->local_decls
, len
);
983 for (i
= 0; i
< len
; i
++)
985 tree t
= stream_read_tree (ib
, data_in
);
986 (*fn
->local_decls
)[i
] = t
;
990 /* Input the current IL state of the function. */
991 fn
->curr_properties
= streamer_read_uhwi (ib
);
993 /* Read all the attributes for FN. */
994 bp
= streamer_read_bitpack (ib
);
995 fn
->is_thunk
= bp_unpack_value (&bp
, 1);
996 fn
->has_local_explicit_reg_vars
= bp_unpack_value (&bp
, 1);
997 fn
->returns_pcc_struct
= bp_unpack_value (&bp
, 1);
998 fn
->returns_struct
= bp_unpack_value (&bp
, 1);
999 fn
->can_throw_non_call_exceptions
= bp_unpack_value (&bp
, 1);
1000 fn
->can_delete_dead_exceptions
= bp_unpack_value (&bp
, 1);
1001 fn
->always_inline_functions_inlined
= bp_unpack_value (&bp
, 1);
1002 fn
->after_inlining
= bp_unpack_value (&bp
, 1);
1003 fn
->stdarg
= bp_unpack_value (&bp
, 1);
1004 fn
->has_nonlocal_label
= bp_unpack_value (&bp
, 1);
1005 fn
->has_forced_label_in_static
= bp_unpack_value (&bp
, 1);
1006 fn
->calls_alloca
= bp_unpack_value (&bp
, 1);
1007 fn
->calls_setjmp
= bp_unpack_value (&bp
, 1);
1008 fn
->calls_eh_return
= bp_unpack_value (&bp
, 1);
1009 fn
->has_force_vectorize_loops
= bp_unpack_value (&bp
, 1);
1010 fn
->has_simduid_loops
= bp_unpack_value (&bp
, 1);
1011 fn
->va_list_fpr_size
= bp_unpack_value (&bp
, 8);
1012 fn
->va_list_gpr_size
= bp_unpack_value (&bp
, 8);
1013 fn
->last_clique
= bp_unpack_value (&bp
, sizeof (short) * 8);
1015 /* Input the function start and end loci. */
1016 fn
->function_start_locus
= stream_input_location_now (&bp
, data_in
);
1017 fn
->function_end_locus
= stream_input_location_now (&bp
, data_in
);
1019 /* Restore the instance discriminators if present. */
1020 int instance_number
= bp_unpack_value (&bp
, 1);
1021 if (instance_number
)
1023 instance_number
= bp_unpack_value (&bp
, sizeof (int) * CHAR_BIT
);
1024 maybe_create_decl_to_instance_map ()->put (fn
->decl
, instance_number
);
1029 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1032 input_function (tree fn_decl
, struct data_in
*data_in
,
1033 struct lto_input_block
*ib
, struct lto_input_block
*ib_cfg
)
1035 struct function
*fn
;
1039 struct cgraph_node
*node
;
1041 tag
= streamer_read_record_start (ib
);
1042 lto_tag_check (tag
, LTO_function
);
1044 /* Read decls for parameters and args. */
1045 DECL_RESULT (fn_decl
) = stream_read_tree (ib
, data_in
);
1046 DECL_ARGUMENTS (fn_decl
) = streamer_read_chain (ib
, data_in
);
1048 /* Read debug args if available. */
1049 unsigned n_debugargs
= streamer_read_uhwi (ib
);
1052 vec
<tree
, va_gc
> **debugargs
= decl_debug_args_insert (fn_decl
);
1053 vec_safe_grow (*debugargs
, n_debugargs
);
1054 for (unsigned i
= 0; i
< n_debugargs
; ++i
)
1055 (**debugargs
)[i
] = stream_read_tree (ib
, data_in
);
1058 /* Read the tree of lexical scopes for the function. */
1059 DECL_INITIAL (fn_decl
) = stream_read_tree (ib
, data_in
);
1060 unsigned block_leaf_count
= streamer_read_uhwi (ib
);
1061 while (block_leaf_count
--)
1062 stream_read_tree (ib
, data_in
);
1064 if (!streamer_read_uhwi (ib
))
1067 push_struct_function (fn_decl
);
1068 fn
= DECL_STRUCT_FUNCTION (fn_decl
);
1070 /* We input IL in SSA form. */
1071 cfun
->gimple_df
->in_ssa_p
= true;
1073 gimple_register_cfg_hooks ();
1075 node
= cgraph_node::get (fn_decl
);
1077 node
= cgraph_node::create (fn_decl
);
1078 input_struct_function_base (fn
, data_in
, ib
);
1079 input_cfg (ib_cfg
, data_in
, fn
);
1081 /* Read all the SSA names. */
1082 input_ssa_names (ib
, data_in
, fn
);
1084 /* Read the exception handling regions in the function. */
1085 input_eh_regions (ib
, data_in
, fn
);
1087 gcc_assert (DECL_INITIAL (fn_decl
));
1088 DECL_SAVED_TREE (fn_decl
) = NULL_TREE
;
1090 /* Read all the basic blocks. */
1091 tag
= streamer_read_record_start (ib
);
1094 input_bb (ib
, tag
, data_in
, fn
,
1095 node
->count_materialization_scale
);
1096 tag
= streamer_read_record_start (ib
);
1099 /* Fix up the call statements that are mentioned in the callgraph
1101 set_gimple_stmt_max_uid (cfun
, 0);
1102 FOR_ALL_BB_FN (bb
, cfun
)
1104 gimple_stmt_iterator gsi
;
1105 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1107 gimple
*stmt
= gsi_stmt (gsi
);
1108 gimple_set_uid (stmt
, inc_gimple_stmt_max_uid (cfun
));
1110 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1112 gimple
*stmt
= gsi_stmt (gsi
);
1113 gimple_set_uid (stmt
, inc_gimple_stmt_max_uid (cfun
));
1116 stmts
= (gimple
**) xcalloc (gimple_stmt_max_uid (fn
), sizeof (gimple
*));
1117 FOR_ALL_BB_FN (bb
, cfun
)
1119 gimple_stmt_iterator bsi
= gsi_start_phis (bb
);
1120 while (!gsi_end_p (bsi
))
1122 gimple
*stmt
= gsi_stmt (bsi
);
1124 stmts
[gimple_uid (stmt
)] = stmt
;
1126 bsi
= gsi_start_bb (bb
);
1127 while (!gsi_end_p (bsi
))
1129 gimple
*stmt
= gsi_stmt (bsi
);
1130 bool remove
= false;
1131 /* If we're recompiling LTO objects with debug stmts but
1132 we're not supposed to have debug stmts, remove them now.
1133 We can't remove them earlier because this would cause uid
1134 mismatches in fixups, but we can do it at this point, as
1135 long as debug stmts don't require fixups.
1136 Similarly remove all IFN_*SAN_* internal calls */
1139 if (is_gimple_debug (stmt
)
1140 && (gimple_debug_nonbind_marker_p (stmt
)
1141 ? !MAY_HAVE_DEBUG_MARKER_STMTS
1142 : !MAY_HAVE_DEBUG_BIND_STMTS
))
1144 if (is_gimple_call (stmt
)
1145 && gimple_call_internal_p (stmt
))
1147 bool replace
= false;
1148 switch (gimple_call_internal_fn (stmt
))
1150 case IFN_UBSAN_NULL
:
1152 & (SANITIZE_NULL
| SANITIZE_ALIGNMENT
)) == 0)
1155 case IFN_UBSAN_BOUNDS
:
1156 if ((flag_sanitize
& SANITIZE_BOUNDS
) == 0)
1159 case IFN_UBSAN_VPTR
:
1160 if ((flag_sanitize
& SANITIZE_VPTR
) == 0)
1163 case IFN_UBSAN_OBJECT_SIZE
:
1164 if ((flag_sanitize
& SANITIZE_OBJECT_SIZE
) == 0)
1168 if ((flag_sanitize
& SANITIZE_POINTER_OVERFLOW
) == 0)
1172 if ((flag_sanitize
& SANITIZE_ADDRESS
) == 0)
1175 case IFN_TSAN_FUNC_EXIT
:
1176 if ((flag_sanitize
& SANITIZE_THREAD
) == 0)
1184 gimple_call_set_internal_fn (as_a
<gcall
*> (stmt
),
1192 gimple_stmt_iterator gsi
= bsi
;
1194 unlink_stmt_vdef (stmt
);
1195 release_defs (stmt
);
1196 gsi_remove (&gsi
, true);
1201 stmts
[gimple_uid (stmt
)] = stmt
;
1203 /* Remember that the input function has begin stmt
1204 markers, so that we know to expect them when emitting
1206 if (!cfun
->debug_nonbind_markers
1207 && gimple_debug_nonbind_marker_p (stmt
))
1208 cfun
->debug_nonbind_markers
= true;
1213 /* Set the gimple body to the statement sequence in the entry
1214 basic block. FIXME lto, this is fairly hacky. The existence
1215 of a gimple body is used by the cgraph routines, but we should
1216 really use the presence of the CFG. */
1218 edge_iterator ei
= ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
);
1219 gimple_set_body (fn_decl
, bb_seq (ei_edge (ei
)->dest
));
1222 update_max_bb_count ();
1223 fixup_call_stmt_edges (node
, stmts
);
1224 execute_all_ipa_stmt_fixups (node
, stmts
);
1226 update_ssa (TODO_update_ssa_only_virtuals
);
1227 free_dominance_info (CDI_DOMINATORS
);
1228 free_dominance_info (CDI_POST_DOMINATORS
);
1233 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1236 input_constructor (tree var
, struct data_in
*data_in
,
1237 struct lto_input_block
*ib
)
1239 DECL_INITIAL (var
) = stream_read_tree (ib
, data_in
);
1243 /* Read the body from DATA for function NODE and fill it in.
1244 FILE_DATA are the global decls and types. SECTION_TYPE is either
1245 LTO_section_function_body or LTO_section_static_initializer. If
1246 section type is LTO_section_function_body, FN must be the decl for
1250 lto_read_body_or_constructor (struct lto_file_decl_data
*file_data
, struct symtab_node
*node
,
1251 const char *data
, enum lto_section_type section_type
)
1253 const struct lto_function_header
*header
;
1254 struct data_in
*data_in
;
1258 tree fn_decl
= node
->decl
;
1260 header
= (const struct lto_function_header
*) data
;
1261 if (TREE_CODE (node
->decl
) == FUNCTION_DECL
)
1263 cfg_offset
= sizeof (struct lto_function_header
);
1264 main_offset
= cfg_offset
+ header
->cfg_size
;
1265 string_offset
= main_offset
+ header
->main_size
;
1269 main_offset
= sizeof (struct lto_function_header
);
1270 string_offset
= main_offset
+ header
->main_size
;
1273 data_in
= lto_data_in_create (file_data
, data
+ string_offset
,
1274 header
->string_size
, vNULL
);
1276 if (section_type
== LTO_section_function_body
)
1278 struct lto_in_decl_state
*decl_state
;
1281 gcc_checking_assert (node
);
1283 /* Use the function's decl state. */
1284 decl_state
= lto_get_function_in_decl_state (file_data
, fn_decl
);
1285 gcc_assert (decl_state
);
1286 file_data
->current_decl_state
= decl_state
;
1289 /* Set up the struct function. */
1290 from
= data_in
->reader_cache
->nodes
.length ();
1291 lto_input_block
ib_main (data
+ main_offset
, header
->main_size
,
1292 file_data
->mode_table
);
1293 if (TREE_CODE (node
->decl
) == FUNCTION_DECL
)
1295 lto_input_block
ib_cfg (data
+ cfg_offset
, header
->cfg_size
,
1296 file_data
->mode_table
);
1297 input_function (fn_decl
, data_in
, &ib_main
, &ib_cfg
);
1300 input_constructor (fn_decl
, data_in
, &ib_main
);
1301 data_in
->location_cache
.apply_location_cache ();
1302 /* And fixup types we streamed locally. */
1304 struct streamer_tree_cache_d
*cache
= data_in
->reader_cache
;
1305 unsigned len
= cache
->nodes
.length ();
1307 for (i
= len
; i
-- > from
;)
1309 tree t
= streamer_tree_cache_get_tree (cache
, i
);
1315 gcc_assert (TYPE_CANONICAL (t
) == NULL_TREE
);
1316 if (type_with_alias_set_p (t
)
1317 && canonical_type_used_p (t
))
1318 TYPE_CANONICAL (t
) = TYPE_MAIN_VARIANT (t
);
1319 if (TYPE_MAIN_VARIANT (t
) != t
)
1321 gcc_assert (TYPE_NEXT_VARIANT (t
) == NULL_TREE
);
1322 TYPE_NEXT_VARIANT (t
)
1323 = TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t
));
1324 TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t
)) = t
;
1330 /* Restore decl state */
1331 file_data
->current_decl_state
= file_data
->global_decl_state
;
1334 lto_data_in_delete (data_in
);
1338 /* Read the body of NODE using DATA. FILE_DATA holds the global
1342 lto_input_function_body (struct lto_file_decl_data
*file_data
,
1343 struct cgraph_node
*node
, const char *data
)
1345 lto_read_body_or_constructor (file_data
, node
, data
, LTO_section_function_body
);
1348 /* Read the body of NODE using DATA. FILE_DATA holds the global
1352 lto_input_variable_constructor (struct lto_file_decl_data
*file_data
,
1353 struct varpool_node
*node
, const char *data
)
1355 lto_read_body_or_constructor (file_data
, node
, data
, LTO_section_function_body
);
1359 /* Queue of acummulated decl -> DIE mappings. Similar to locations those
1360 are only applied to prevailing tree nodes during tree merging. */
1361 vec
<dref_entry
> dref_queue
;
1363 /* Read the physical representation of a tree node EXPR from
1364 input block IB using the per-file context in DATA_IN. */
1367 lto_read_tree_1 (struct lto_input_block
*ib
, struct data_in
*data_in
, tree expr
)
1369 /* Read all the bitfield values in EXPR. Note that for LTO, we
1370 only write language-independent bitfields, so no more unpacking is
1372 streamer_read_tree_bitfields (ib
, data_in
, expr
);
1374 /* Read all the pointer fields in EXPR. */
1375 streamer_read_tree_body (ib
, data_in
, expr
);
1377 /* Read any LTO-specific data not read by the tree streamer. */
1379 && TREE_CODE (expr
) != FUNCTION_DECL
1380 && TREE_CODE (expr
) != TRANSLATION_UNIT_DECL
)
1381 DECL_INITIAL (expr
) = stream_read_tree (ib
, data_in
);
1383 /* Stream references to early generated DIEs. Keep in sync with the
1384 trees handled in dwarf2out_register_external_die. */
1386 && TREE_CODE (expr
) != FIELD_DECL
1387 && TREE_CODE (expr
) != DEBUG_EXPR_DECL
1388 && TREE_CODE (expr
) != TYPE_DECL
)
1389 || TREE_CODE (expr
) == BLOCK
)
1391 const char *str
= streamer_read_string (data_in
, ib
);
1394 unsigned HOST_WIDE_INT off
= streamer_read_uhwi (ib
);
1395 dref_entry e
= { expr
, str
, off
};
1396 dref_queue
.safe_push (e
);
1401 /* Read the physical representation of a tree node with tag TAG from
1402 input block IB using the per-file context in DATA_IN. */
1405 lto_read_tree (struct lto_input_block
*ib
, struct data_in
*data_in
,
1406 enum LTO_tags tag
, hashval_t hash
)
1408 /* Instantiate a new tree node. */
1409 tree result
= streamer_alloc_tree (ib
, data_in
, tag
);
1411 /* Enter RESULT in the reader cache. This will make RESULT
1412 available so that circular references in the rest of the tree
1413 structure can be resolved in subsequent calls to stream_read_tree. */
1414 streamer_tree_cache_append (data_in
->reader_cache
, result
, hash
);
1416 lto_read_tree_1 (ib
, data_in
, result
);
1418 /* end_marker = */ streamer_read_uchar (ib
);
1424 /* Populate the reader cache with trees materialized from the SCC
1425 following in the IB, DATA_IN stream. */
1428 lto_input_scc (struct lto_input_block
*ib
, struct data_in
*data_in
,
1429 unsigned *len
, unsigned *entry_len
)
1431 /* A blob of unnamed tree nodes, fill the cache from it and
1433 unsigned size
= streamer_read_uhwi (ib
);
1434 hashval_t scc_hash
= streamer_read_uhwi (ib
);
1435 unsigned scc_entry_len
= 1;
1439 enum LTO_tags tag
= streamer_read_record_start (ib
);
1440 lto_input_tree_1 (ib
, data_in
, tag
, scc_hash
);
1444 unsigned int first
= data_in
->reader_cache
->nodes
.length ();
1447 scc_entry_len
= streamer_read_uhwi (ib
);
1449 /* Materialize size trees by reading their headers. */
1450 for (unsigned i
= 0; i
< size
; ++i
)
1452 enum LTO_tags tag
= streamer_read_record_start (ib
);
1454 || (tag
>= LTO_field_decl_ref
&& tag
<= LTO_global_decl_ref
)
1455 || tag
== LTO_tree_pickle_reference
1456 || tag
== LTO_integer_cst
1457 || tag
== LTO_tree_scc
)
1460 result
= streamer_alloc_tree (ib
, data_in
, tag
);
1461 streamer_tree_cache_append (data_in
->reader_cache
, result
, 0);
1464 /* Read the tree bitpacks and references. */
1465 for (unsigned i
= 0; i
< size
; ++i
)
1467 result
= streamer_tree_cache_get_tree (data_in
->reader_cache
,
1469 lto_read_tree_1 (ib
, data_in
, result
);
1470 /* end_marker = */ streamer_read_uchar (ib
);
1475 *entry_len
= scc_entry_len
;
1480 /* Read a tree from input block IB using the per-file context in
1481 DATA_IN. This context is used, for example, to resolve references
1482 to previously read nodes. */
1485 lto_input_tree_1 (struct lto_input_block
*ib
, struct data_in
*data_in
,
1486 enum LTO_tags tag
, hashval_t hash
)
1490 gcc_assert ((unsigned) tag
< (unsigned) LTO_NUM_TAGS
);
1492 if (tag
== LTO_null
)
1494 else if (tag
>= LTO_field_decl_ref
&& tag
<= LTO_namelist_decl_ref
)
1496 /* If TAG is a reference to an indexable tree, the next value
1497 in IB is the index into the table where we expect to find
1499 result
= lto_input_tree_ref (ib
, data_in
, cfun
, tag
);
1501 else if (tag
== LTO_tree_pickle_reference
)
1503 /* If TAG is a reference to a previously read tree, look it up in
1504 the reader cache. */
1505 result
= streamer_get_pickled_tree (ib
, data_in
);
1507 else if (tag
== LTO_integer_cst
)
1509 /* For shared integer constants in singletons we can use the
1510 existing tree integer constant merging code. */
1511 tree type
= stream_read_tree (ib
, data_in
);
1512 unsigned HOST_WIDE_INT len
= streamer_read_uhwi (ib
);
1513 unsigned HOST_WIDE_INT i
;
1514 HOST_WIDE_INT a
[WIDE_INT_MAX_ELTS
];
1516 for (i
= 0; i
< len
; i
++)
1517 a
[i
] = streamer_read_hwi (ib
);
1518 gcc_assert (TYPE_PRECISION (type
) <= MAX_BITSIZE_MODE_ANY_INT
);
1519 result
= wide_int_to_tree (type
, wide_int::from_array
1520 (a
, len
, TYPE_PRECISION (type
)));
1521 streamer_tree_cache_append (data_in
->reader_cache
, result
, hash
);
1523 else if (tag
== LTO_tree_scc
)
1527 /* Otherwise, materialize a new node from IB. */
1528 result
= lto_read_tree (ib
, data_in
, tag
, hash
);
1535 lto_input_tree (struct lto_input_block
*ib
, struct data_in
*data_in
)
1539 /* Input and skip SCCs. */
1540 while ((tag
= streamer_read_record_start (ib
)) == LTO_tree_scc
)
1542 unsigned len
, entry_len
;
1543 lto_input_scc (ib
, data_in
, &len
, &entry_len
);
1545 /* Register DECLs with the debuginfo machinery. */
1546 while (!dref_queue
.is_empty ())
1548 dref_entry e
= dref_queue
.pop ();
1549 debug_hooks
->register_external_die (e
.decl
, e
.sym
, e
.off
);
1552 return lto_input_tree_1 (ib
, data_in
, tag
, 0);
1556 /* Input toplevel asms. */
1559 lto_input_toplevel_asms (struct lto_file_decl_data
*file_data
, int order_base
)
1562 const char *data
= lto_get_section_data (file_data
, LTO_section_asm
,
1564 const struct lto_simple_header_with_strings
*header
1565 = (const struct lto_simple_header_with_strings
*) data
;
1567 struct data_in
*data_in
;
1573 string_offset
= sizeof (*header
) + header
->main_size
;
1575 lto_input_block
ib (data
+ sizeof (*header
), header
->main_size
,
1576 file_data
->mode_table
);
1578 data_in
= lto_data_in_create (file_data
, data
+ string_offset
,
1579 header
->string_size
, vNULL
);
1581 while ((str
= streamer_read_string_cst (data_in
, &ib
)))
1583 asm_node
*node
= symtab
->finalize_toplevel_asm (str
);
1584 node
->order
= streamer_read_hwi (&ib
) + order_base
;
1585 if (node
->order
>= symtab
->order
)
1586 symtab
->order
= node
->order
+ 1;
1589 lto_data_in_delete (data_in
);
1591 lto_free_section_data (file_data
, LTO_section_asm
, NULL
, data
, len
);
1595 /* Input mode table. */
1598 lto_input_mode_table (struct lto_file_decl_data
*file_data
)
1601 const char *data
= lto_get_section_data (file_data
, LTO_section_mode_table
,
1605 internal_error ("cannot read LTO mode table from %s",
1606 file_data
->file_name
);
1610 unsigned char *table
= ggc_cleared_vec_alloc
<unsigned char> (1 << 8);
1611 file_data
->mode_table
= table
;
1612 const struct lto_simple_header_with_strings
*header
1613 = (const struct lto_simple_header_with_strings
*) data
;
1615 struct data_in
*data_in
;
1616 string_offset
= sizeof (*header
) + header
->main_size
;
1618 lto_input_block
ib (data
+ sizeof (*header
), header
->main_size
, NULL
);
1619 data_in
= lto_data_in_create (file_data
, data
+ string_offset
,
1620 header
->string_size
, vNULL
);
1621 bitpack_d bp
= streamer_read_bitpack (&ib
);
1623 table
[VOIDmode
] = VOIDmode
;
1624 table
[BLKmode
] = BLKmode
;
1626 while ((m
= bp_unpack_value (&bp
, 8)) != VOIDmode
)
1628 enum mode_class mclass
1629 = bp_unpack_enum (&bp
, mode_class
, MAX_MODE_CLASS
);
1630 poly_uint16 size
= bp_unpack_poly_value (&bp
, 16);
1631 poly_uint16 prec
= bp_unpack_poly_value (&bp
, 16);
1632 machine_mode inner
= (machine_mode
) bp_unpack_value (&bp
, 8);
1633 poly_uint16 nunits
= bp_unpack_poly_value (&bp
, 16);
1634 unsigned int ibit
= 0, fbit
= 0;
1635 unsigned int real_fmt_len
= 0;
1636 const char *real_fmt_name
= NULL
;
1643 ibit
= bp_unpack_value (&bp
, 8);
1644 fbit
= bp_unpack_value (&bp
, 8);
1647 case MODE_DECIMAL_FLOAT
:
1648 real_fmt_name
= bp_unpack_indexed_string (data_in
, &bp
,
1654 /* First search just the GET_CLASS_NARROWEST_MODE to wider modes,
1655 if not found, fallback to all modes. */
1657 for (pass
= 0; pass
< 2; pass
++)
1658 for (machine_mode mr
= pass
? VOIDmode
1659 : GET_CLASS_NARROWEST_MODE (mclass
);
1660 pass
? mr
< MAX_MACHINE_MODE
: mr
!= VOIDmode
;
1661 pass
? mr
= (machine_mode
) (mr
+ 1)
1662 : mr
= GET_MODE_WIDER_MODE (mr
).else_void ())
1663 if (GET_MODE_CLASS (mr
) != mclass
1664 || maybe_ne (GET_MODE_SIZE (mr
), size
)
1665 || maybe_ne (GET_MODE_PRECISION (mr
), prec
)
1667 ? GET_MODE_INNER (mr
) != mr
1668 : GET_MODE_INNER (mr
) != table
[(int) inner
])
1669 || GET_MODE_IBIT (mr
) != ibit
1670 || GET_MODE_FBIT (mr
) != fbit
1671 || maybe_ne (GET_MODE_NUNITS (mr
), nunits
))
1673 else if ((mclass
== MODE_FLOAT
|| mclass
== MODE_DECIMAL_FLOAT
)
1674 && strcmp (REAL_MODE_FORMAT (mr
)->name
, real_fmt_name
) != 0)
1682 unsigned int mname_len
;
1683 const char *mname
= bp_unpack_indexed_string (data_in
, &bp
, &mname_len
);
1688 case MODE_VECTOR_BOOL
:
1689 case MODE_VECTOR_INT
:
1690 case MODE_VECTOR_FLOAT
:
1691 case MODE_VECTOR_FRACT
:
1692 case MODE_VECTOR_UFRACT
:
1693 case MODE_VECTOR_ACCUM
:
1694 case MODE_VECTOR_UACCUM
:
1695 /* For unsupported vector modes just use BLKmode,
1696 if the scalar mode is supported. */
1697 if (table
[(int) inner
] != VOIDmode
)
1704 fatal_error (UNKNOWN_LOCATION
, "unsupported mode %qs", mname
);
1709 lto_data_in_delete (data_in
);
1711 lto_free_section_data (file_data
, LTO_section_mode_table
, NULL
, data
, len
);
1715 /* Initialization for the LTO reader. */
1718 lto_reader_init (void)
1720 lto_streamer_init ();
1721 file_name_hash_table
1722 = new hash_table
<freeing_string_slot_hasher
> (37);
1726 /* Create a new data_in object for FILE_DATA. STRINGS is the string
1727 table to use with LEN strings. RESOLUTIONS is the vector of linker
1728 resolutions (NULL if not using a linker plugin). */
1731 lto_data_in_create (struct lto_file_decl_data
*file_data
, const char *strings
,
1733 vec
<ld_plugin_symbol_resolution_t
> resolutions
)
1735 struct data_in
*data_in
= new (struct data_in
);
1736 data_in
->file_data
= file_data
;
1737 data_in
->strings
= strings
;
1738 data_in
->strings_len
= len
;
1739 data_in
->globals_resolution
= resolutions
;
1740 data_in
->reader_cache
= streamer_tree_cache_create (false, false, true);
1745 /* Remove DATA_IN. */
1748 lto_data_in_delete (struct data_in
*data_in
)
1750 data_in
->globals_resolution
.release ();
1751 streamer_tree_cache_delete (data_in
->reader_cache
);