1 /* Support for thunks in symbol table.
2 Copyright (C) 2003-2023 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "alloc-pool.h"
32 #include "symbol-summary.h"
33 #include "symtab-thunks.h"
34 #include "lto-streamer.h"
35 #include "fold-const.h"
36 #include "gimple-iterator.h"
37 #include "stor-layout.h"
38 #include "gimplify-me.h"
43 #include "gimple-ssa.h"
44 #include "gimple-fold.h"
46 #include "tree-into-ssa.h"
48 #include "cfgcleanup.h"
49 #include "tree-pass.h"
50 #include "data-streamer.h"
51 #include "langhooks.h"
53 /* Used for vtable lookup in thunk adjusting. */
54 static GTY (()) tree vtable_entry_type
;
55 struct GTY (()) unprocessed_thunk
60 /* To be PCH safe we store thunks into a vector before end of compilation
62 static GTY (()) vec
<unprocessed_thunk
, va_gc
> *thunks
;
66 /* Function summary for thunk_infos. */
67 class GTY((user
)) thunk_infos_t
: public function_summary
<thunk_info
*>
70 thunk_infos_t (symbol_table
*table
, bool ggc
):
71 function_summary
<thunk_info
*> (table
, ggc
) { }
73 /* Hook that is called by summary when a node is duplicated. */
74 void duplicate (cgraph_node
*node
,
77 thunk_info
*data2
) final override
;
80 /* Duplication hook. */
82 thunk_infos_t::duplicate (cgraph_node
*, cgraph_node
*,
83 thunk_info
*src
, thunk_info
*dst
)
88 } /* anon namespace */
90 /* Return thunk_info possibly creating new one. */
92 thunk_info::get_create (cgraph_node
*node
)
94 if (!symtab
->m_thunks
)
97 = new (ggc_alloc_no_dtor
<thunk_infos_t
> ())
98 thunk_infos_t (symtab
, true);
99 symtab
->m_thunks
->disable_insertion_hook ();
101 return symtab
->m_thunks
->get_create (node
);
104 /* Stream out THIS to OB. */
106 thunk_info::stream_out (lto_simple_output_block
*ob
)
108 streamer_write_uhwi_stream
110 1 + (this_adjusting
!= 0) * 2
111 + (virtual_offset_p
!= 0) * 4);
112 streamer_write_uhwi_stream (ob
->main_stream
, fixed_offset
);
113 streamer_write_uhwi_stream (ob
->main_stream
, virtual_value
);
114 streamer_write_uhwi_stream (ob
->main_stream
, indirect_offset
);
117 /* Stream in THIS from IB. */
119 thunk_info::stream_in (class lto_input_block
*ib
)
121 int type
= streamer_read_uhwi (ib
);
122 fixed_offset
= streamer_read_uhwi (ib
);
123 virtual_value
= streamer_read_uhwi (ib
);
124 indirect_offset
= streamer_read_uhwi (ib
);
126 this_adjusting
= (type
& 2);
127 virtual_offset_p
= (type
& 4);
130 /* Dump THIS to F. */
132 thunk_info::dump (FILE *f
)
135 fprintf (f
, " of %s (asm:%s)",
136 lang_hooks
.decl_printable_name (alias
, 2),
137 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (alias
)));
138 fprintf (f
, " fixed offset %i virtual value %i indirect_offset %i "
139 "has virtual offset %i\n",
142 (int)indirect_offset
,
143 (int)virtual_offset_p
);
150 inchash::hash hstate
;
151 hstate
.add_hwi (fixed_offset
);
152 hstate
.add_hwi (virtual_value
);
153 hstate
.add_flag (this_adjusting
);
154 hstate
.add_flag (virtual_offset_p
);
155 return hstate
.end ();
158 /* Add unprocessed thunk. */
160 thunk_info::register_early (cgraph_node
*node
)
162 unprocessed_thunk entry
= {node
, new (ggc_alloc
<thunk_info
> ()) thunk_info
};
164 vec_safe_push (thunks
, entry
);
167 /* Attach recorded thunks to cgraph_nodes.
168 All this is done only to avoid need to stream summaries to PCH. */
170 thunk_info::process_early_thunks ()
172 unprocessed_thunk
*e
;
177 FOR_EACH_VEC_ELT (*thunks
, i
, e
)
179 *thunk_info::get_create (e
->node
) = *e
->info
;
185 /* Adjust PTR by the constant FIXED_OFFSET, by the vtable offset indicated by
186 VIRTUAL_OFFSET, and by the indirect offset indicated by INDIRECT_OFFSET, if
187 it is non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and zero
188 for a result adjusting thunk. */
190 thunk_adjust (gimple_stmt_iterator
* bsi
,
191 tree ptr
, bool this_adjusting
,
192 HOST_WIDE_INT fixed_offset
, tree virtual_offset
,
193 HOST_WIDE_INT indirect_offset
)
199 && fixed_offset
!= 0)
201 stmt
= gimple_build_assign
202 (ptr
, fold_build_pointer_plus_hwi_loc (input_location
,
205 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
208 if (!vtable_entry_type
&& (virtual_offset
|| indirect_offset
!= 0))
210 tree vfunc_type
= make_node (FUNCTION_TYPE
);
211 TREE_TYPE (vfunc_type
) = integer_type_node
;
212 TYPE_ARG_TYPES (vfunc_type
) = NULL_TREE
;
213 layout_type (vfunc_type
);
215 vtable_entry_type
= build_pointer_type (vfunc_type
);
218 /* If there's a virtual offset, look up that value in the vtable and
219 adjust the pointer again. */
226 vtabletmp
= create_tmp_reg
228 (build_pointer_type (vtable_entry_type
)), "vptr");
230 /* The vptr is always at offset zero in the object. */
231 stmt
= gimple_build_assign (vtabletmp
,
232 build1 (NOP_EXPR
, TREE_TYPE (vtabletmp
),
234 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
236 /* Form the vtable address. */
237 vtabletmp2
= create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp
)),
239 stmt
= gimple_build_assign (vtabletmp2
,
240 build_simple_mem_ref (vtabletmp
));
241 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
243 /* Find the entry with the vcall offset. */
244 stmt
= gimple_build_assign (vtabletmp2
,
245 fold_build_pointer_plus_loc (input_location
,
248 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
250 /* Get the offset itself. */
251 vtabletmp3
= create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2
)),
253 stmt
= gimple_build_assign (vtabletmp3
,
254 build_simple_mem_ref (vtabletmp2
));
255 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
257 /* Adjust the `this' pointer. */
258 ptr
= fold_build_pointer_plus_loc (input_location
, ptr
, vtabletmp3
);
259 ptr
= force_gimple_operand_gsi (bsi
, ptr
, true, NULL_TREE
, false,
260 GSI_CONTINUE_LINKING
);
263 /* Likewise for an offset that is stored in the object that contains the
265 if (indirect_offset
!= 0)
267 tree offset_ptr
, offset_tree
;
269 /* Get the address of the offset. */
271 = create_tmp_reg (build_pointer_type
272 (build_pointer_type (vtable_entry_type
)),
274 stmt
= gimple_build_assign (offset_ptr
,
275 build1 (NOP_EXPR
, TREE_TYPE (offset_ptr
),
277 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
279 stmt
= gimple_build_assign
281 fold_build_pointer_plus_hwi_loc (input_location
, offset_ptr
,
283 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
285 /* Get the offset itself. */
286 offset_tree
= create_tmp_reg (TREE_TYPE (TREE_TYPE (offset_ptr
)),
288 stmt
= gimple_build_assign (offset_tree
,
289 build_simple_mem_ref (offset_ptr
));
290 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
292 /* Adjust the `this' pointer. */
293 ptr
= fold_build_pointer_plus_loc (input_location
, ptr
, offset_tree
);
294 ptr
= force_gimple_operand_gsi (bsi
, ptr
, true, NULL_TREE
, false,
295 GSI_CONTINUE_LINKING
);
299 && fixed_offset
!= 0)
300 /* Adjust the pointer by the constant. */
308 ptrtmp
= create_tmp_reg (TREE_TYPE (ptr
), "ptr");
309 stmt
= gimple_build_assign (ptrtmp
, ptr
);
310 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
312 ptr
= fold_build_pointer_plus_hwi_loc (input_location
,
313 ptrtmp
, fixed_offset
);
316 /* Emit the statement and gimplify the adjustment expression. */
317 ret
= create_tmp_reg (TREE_TYPE (ptr
), "adjusted_this");
318 stmt
= gimple_build_assign (ret
, ptr
);
319 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
324 /* Expand thunk NODE to gimple if possible.
325 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
326 no assembler is produced.
327 When OUTPUT_ASM_THUNK is true, also produce assembler for
328 thunks that are not lowered. */
330 expand_thunk (cgraph_node
*node
, bool output_asm_thunks
,
331 bool force_gimple_thunk
)
333 thunk_info
*info
= thunk_info::get (node
);
334 bool this_adjusting
= info
->this_adjusting
;
335 HOST_WIDE_INT fixed_offset
= info
->fixed_offset
;
336 HOST_WIDE_INT virtual_value
= info
->virtual_value
;
337 HOST_WIDE_INT indirect_offset
= info
->indirect_offset
;
338 tree virtual_offset
= NULL
;
339 tree alias
= node
->callees
->callee
->decl
;
340 tree thunk_fndecl
= node
->decl
;
343 if (!force_gimple_thunk
345 && indirect_offset
== 0
346 && !DECL_EXTERNAL (alias
)
347 && !DECL_STATIC_CHAIN (alias
)
348 && targetm
.asm_out
.can_output_mi_thunk (thunk_fndecl
, fixed_offset
,
349 virtual_value
, alias
))
352 tree restype
= TREE_TYPE (TREE_TYPE (thunk_fndecl
));
354 if (!output_asm_thunks
)
356 node
->analyzed
= true;
361 node
->get_untransformed_body ();
362 a
= DECL_ARGUMENTS (thunk_fndecl
);
364 current_function_decl
= thunk_fndecl
;
366 /* Ensure thunks are emitted in their correct sections. */
367 resolve_unique_section (thunk_fndecl
, 0,
368 flag_function_sections
);
370 DECL_RESULT (thunk_fndecl
)
371 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl
),
372 RESULT_DECL
, 0, restype
);
373 DECL_CONTEXT (DECL_RESULT (thunk_fndecl
)) = thunk_fndecl
;
375 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
377 fn_block
= make_node (BLOCK
);
378 BLOCK_VARS (fn_block
) = a
;
379 DECL_INITIAL (thunk_fndecl
) = fn_block
;
380 BLOCK_SUPERCONTEXT (fn_block
) = thunk_fndecl
;
381 allocate_struct_function (thunk_fndecl
, false);
382 init_function_start (thunk_fndecl
);
384 insn_locations_init ();
385 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl
));
386 prologue_location
= curr_insn_location ();
388 targetm
.asm_out
.output_mi_thunk (asm_out_file
, thunk_fndecl
,
389 fixed_offset
, virtual_value
, alias
);
391 insn_locations_finalize ();
392 init_insn_lengths ();
393 free_after_compilation (cfun
);
394 TREE_ASM_WRITTEN (thunk_fndecl
) = 1;
396 node
->analyzed
= false;
398 else if (stdarg_p (TREE_TYPE (thunk_fndecl
)))
400 error ("generic thunk code fails for method %qD which uses %<...%>",
402 TREE_ASM_WRITTEN (thunk_fndecl
) = 1;
403 node
->analyzed
= true;
409 basic_block bb
, then_bb
, else_bb
, return_bb
;
410 gimple_stmt_iterator bsi
;
419 bool alias_is_noreturn
= TREE_THIS_VOLATILE (alias
);
421 /* We may be called from expand_thunk that releases body except for
422 DECL_ARGUMENTS. In this case force_gimple_thunk is true. */
423 if (in_lto_p
&& !force_gimple_thunk
)
424 node
->get_untransformed_body ();
426 /* We need to force DECL_IGNORED_P when the thunk is created
427 after early debug was run. */
428 if (force_gimple_thunk
)
429 DECL_IGNORED_P (thunk_fndecl
) = 1;
431 a
= DECL_ARGUMENTS (thunk_fndecl
);
433 current_function_decl
= thunk_fndecl
;
435 /* Ensure thunks are emitted in their correct sections. */
436 resolve_unique_section (thunk_fndecl
, 0,
437 flag_function_sections
);
439 bitmap_obstack_initialize (NULL
);
441 if (info
->virtual_offset_p
)
442 virtual_offset
= size_int (virtual_value
);
444 /* Build the return declaration for the function. */
445 restype
= TREE_TYPE (TREE_TYPE (thunk_fndecl
));
446 if (DECL_RESULT (thunk_fndecl
) == NULL_TREE
)
448 resdecl
= build_decl (input_location
, RESULT_DECL
, 0, restype
);
449 DECL_ARTIFICIAL (resdecl
) = 1;
450 DECL_IGNORED_P (resdecl
) = 1;
451 DECL_CONTEXT (resdecl
) = thunk_fndecl
;
452 DECL_RESULT (thunk_fndecl
) = resdecl
;
455 resdecl
= DECL_RESULT (thunk_fndecl
);
457 profile_count cfg_count
= node
->count
;
458 if (!cfg_count
.initialized_p ())
459 cfg_count
= profile_count::from_gcov_type
460 (BB_FREQ_MAX
).guessed_local ();
462 bb
= then_bb
= else_bb
= return_bb
463 = init_lowered_empty_function (thunk_fndecl
, true, cfg_count
);
465 bsi
= gsi_start_bb (bb
);
467 /* Build call to the function being thunked. */
468 if (!VOID_TYPE_P (restype
)
469 && (!alias_is_noreturn
470 || TREE_ADDRESSABLE (restype
)
471 || TREE_CODE (TYPE_SIZE_UNIT (restype
)) != INTEGER_CST
))
473 if (DECL_BY_REFERENCE (resdecl
))
475 restmp
= gimple_fold_indirect_ref (resdecl
);
477 restmp
= build2 (MEM_REF
,
478 TREE_TYPE (TREE_TYPE (resdecl
)),
480 build_int_cst (TREE_TYPE (resdecl
), 0));
482 else if (!is_gimple_reg_type (restype
))
484 if (aggregate_value_p (resdecl
, TREE_TYPE (thunk_fndecl
)))
490 add_local_decl (cfun
, restmp
);
491 BLOCK_VARS (DECL_INITIAL (current_function_decl
))
496 restmp
= create_tmp_var (restype
, "retval");
499 restmp
= create_tmp_reg (restype
, "retval");
502 for (arg
= a
; arg
; arg
= DECL_CHAIN (arg
))
504 auto_vec
<tree
> vargs (nargs
);
509 vargs
.quick_push (thunk_adjust (&bsi
, a
, 1, fixed_offset
,
510 virtual_offset
, indirect_offset
));
511 arg
= DECL_CHAIN (a
);
516 for (; i
< nargs
; i
++, arg
= DECL_CHAIN (arg
))
519 DECL_NOT_GIMPLE_REG_P (arg
) = 0;
520 if (!is_gimple_val (arg
))
522 tmp
= create_tmp_reg (TYPE_MAIN_VARIANT
523 (TREE_TYPE (arg
)), "arg");
524 gimple
*stmt
= gimple_build_assign (tmp
, arg
);
525 gsi_insert_after (&bsi
, stmt
, GSI_NEW_STMT
);
527 vargs
.quick_push (tmp
);
529 call
= gimple_build_call_vec (build_fold_addr_expr_loc (0, alias
), vargs
);
530 node
->callees
->call_stmt
= call
;
531 gimple_call_set_from_thunk (call
, true);
532 if (DECL_STATIC_CHAIN (alias
))
534 tree p
= DECL_STRUCT_FUNCTION (alias
)->static_chain_decl
;
535 tree type
= TREE_TYPE (p
);
536 tree decl
= build_decl (DECL_SOURCE_LOCATION (thunk_fndecl
),
537 PARM_DECL
, create_tmp_var_name ("CHAIN"),
539 DECL_ARTIFICIAL (decl
) = 1;
540 DECL_IGNORED_P (decl
) = 1;
541 TREE_USED (decl
) = 1;
542 DECL_CONTEXT (decl
) = thunk_fndecl
;
543 DECL_ARG_TYPE (decl
) = type
;
544 TREE_READONLY (decl
) = 1;
546 struct function
*sf
= DECL_STRUCT_FUNCTION (thunk_fndecl
);
547 sf
->static_chain_decl
= decl
;
549 gimple_call_set_chain (call
, decl
);
552 /* Return slot optimization is always possible and in fact required to
553 return values with DECL_BY_REFERENCE. */
554 if (aggregate_value_p (resdecl
, TREE_TYPE (thunk_fndecl
))
555 && (!is_gimple_reg_type (TREE_TYPE (resdecl
))
556 || DECL_BY_REFERENCE (resdecl
)))
557 gimple_call_set_return_slot_opt (call
, true);
561 gimple_call_set_lhs (call
, restmp
);
562 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp
),
563 TREE_TYPE (TREE_TYPE (alias
))));
565 gsi_insert_after (&bsi
, call
, GSI_NEW_STMT
);
566 if (!alias_is_noreturn
)
568 if (restmp
&& !this_adjusting
569 && (fixed_offset
|| virtual_offset
))
571 tree true_label
= NULL_TREE
;
573 if (TREE_CODE (TREE_TYPE (restmp
)) == POINTER_TYPE
)
577 /* If the return type is a pointer, we need to
578 protect against NULL. We know there will be an
579 adjustment, because that's why we're emitting a
581 then_bb
= create_basic_block (NULL
, bb
);
582 then_bb
->count
= cfg_count
- cfg_count
/ 16;
583 return_bb
= create_basic_block (NULL
, then_bb
);
584 return_bb
->count
= cfg_count
;
585 else_bb
= create_basic_block (NULL
, else_bb
);
586 else_bb
->count
= cfg_count
/ 16;
587 add_bb_to_loop (then_bb
, bb
->loop_father
);
588 add_bb_to_loop (return_bb
, bb
->loop_father
);
589 add_bb_to_loop (else_bb
, bb
->loop_father
);
590 remove_edge (single_succ_edge (bb
));
591 true_label
= gimple_block_label (then_bb
);
592 stmt
= gimple_build_cond (NE_EXPR
, restmp
,
593 build_zero_cst (TREE_TYPE (restmp
)),
594 NULL_TREE
, NULL_TREE
);
595 gsi_insert_after (&bsi
, stmt
, GSI_NEW_STMT
);
596 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
597 e
->probability
= profile_probability::guessed_always () / 16;
598 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
599 e
->probability
= profile_probability::guessed_always () / 16;
600 make_single_succ_edge (return_bb
,
601 EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
602 make_single_succ_edge (then_bb
, return_bb
, EDGE_FALLTHRU
);
603 e
= make_edge (else_bb
, return_bb
, EDGE_FALLTHRU
);
604 e
->probability
= profile_probability::always ();
605 bsi
= gsi_last_bb (then_bb
);
608 restmp
= thunk_adjust (&bsi
, restmp
, /*this_adjusting=*/0,
609 fixed_offset
, virtual_offset
,
614 bsi
= gsi_last_bb (else_bb
);
615 stmt
= gimple_build_assign (restmp
,
617 (TREE_TYPE (restmp
)));
618 gsi_insert_after (&bsi
, stmt
, GSI_NEW_STMT
);
619 bsi
= gsi_last_bb (return_bb
);
624 gimple_call_set_tail (call
, true);
625 cfun
->tail_call_marked
= true;
628 /* Build return value. */
629 if (!DECL_BY_REFERENCE (resdecl
))
630 ret
= gimple_build_return (restmp
);
632 ret
= gimple_build_return (resdecl
);
634 gsi_insert_after (&bsi
, ret
, GSI_NEW_STMT
);
638 gimple_call_set_ctrl_altering (call
, true);
639 gimple_call_set_tail (call
, true);
640 cfun
->tail_call_marked
= true;
641 remove_edge (single_succ_edge (bb
));
644 cfun
->gimple_df
->in_ssa_p
= true;
645 update_max_bb_count ();
646 profile_status_for_fn (cfun
)
647 = cfg_count
.initialized_p () && cfg_count
.ipa_p ()
648 ? PROFILE_READ
: PROFILE_GUESSED
;
649 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
650 TREE_ASM_WRITTEN (thunk_fndecl
) = false;
651 delete_unreachable_blocks ();
652 update_ssa (TODO_update_ssa
);
653 checking_verify_flow_info ();
654 free_dominance_info (CDI_DOMINATORS
);
656 /* Since we want to emit the thunk, we explicitly mark its name as
659 node
->lowered
= true;
660 bitmap_obstack_release (NULL
);
662 current_function_decl
= NULL
;
668 symtab_thunks_cc_finalize (void)
670 vtable_entry_type
= NULL
;
673 #include "gt-symtab-thunks.h"