1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
52 #include "coretypes.h"
60 #include "insn-config.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
75 #include "langhooks.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
81 /* Provide defaults for stuff that may not be defined when using
83 #ifndef EH_RETURN_DATA_REGNO
84 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 tree (*lang_protect_cleanup_actions
) (void);
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers
) (tree a
, tree b
);
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type
) (tree
);
98 /* A hash table of label to region number. */
100 struct ehl_map_entry
GTY(())
103 struct eh_region
*region
;
106 static GTY(()) int call_site_base
;
107 static GTY ((param_is (union tree_node
)))
108 htab_t type_to_runtime_map
;
110 /* Describe the SjLj_Function_Context structure. */
111 static GTY(()) tree sjlj_fc_type_node
;
112 static int sjlj_fc_call_site_ofs
;
113 static int sjlj_fc_data_ofs
;
114 static int sjlj_fc_personality_ofs
;
115 static int sjlj_fc_lsda_ofs
;
116 static int sjlj_fc_jbuf_ofs
;
118 /* Describes one exception region. */
119 struct eh_region
GTY(())
121 /* The immediately surrounding region. */
122 struct eh_region
*outer
;
124 /* The list of immediately contained regions. */
125 struct eh_region
*inner
;
126 struct eh_region
*next_peer
;
128 /* An identifier for this region. */
131 /* When a region is deleted, its parents inherit the REG_EH_REGION
132 numbers already assigned. */
135 /* Each region does exactly one thing. */
142 ERT_ALLOWED_EXCEPTIONS
,
147 /* Holds the action to perform based on the preceding type. */
149 /* A list of catch blocks, a surrounding try block,
150 and the label for continuing after a catch. */
151 struct eh_region_u_try
{
152 struct eh_region
*catch;
153 struct eh_region
*last_catch
;
154 } GTY ((tag ("ERT_TRY"))) try;
156 /* The list through the catch handlers, the list of type objects
157 matched, and the list of associated filters. */
158 struct eh_region_u_catch
{
159 struct eh_region
*next_catch
;
160 struct eh_region
*prev_catch
;
163 } GTY ((tag ("ERT_CATCH"))) catch;
165 /* A tree_list of allowed types. */
166 struct eh_region_u_allowed
{
169 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed
;
171 /* The type given by a call to "throw foo();", or discovered
173 struct eh_region_u_throw
{
175 } GTY ((tag ("ERT_THROW"))) throw;
177 /* Retain the cleanup expression even after expansion so that
178 we can match up fixup regions. */
179 struct eh_region_u_cleanup
{
180 struct eh_region
*prev_try
;
181 } GTY ((tag ("ERT_CLEANUP"))) cleanup
;
182 } GTY ((desc ("%0.type"))) u
;
184 /* Entry point for this region's handler before landing pads are built. */
188 /* Entry point for this region's handler from the runtime eh library. */
191 /* Entry point for this region's handler from an inner region. */
192 rtx post_landing_pad
;
194 /* The RESX insn for handing off control to the next outermost handler,
198 /* True if something in this region may throw. */
199 unsigned may_contain_throw
: 1;
202 struct call_site_record
GTY(())
208 /* Used to save exception status for each function. */
209 struct eh_status
GTY(())
211 /* The tree of all regions for this function. */
212 struct eh_region
*region_tree
;
214 /* The same information as an indexable array. */
215 struct eh_region
** GTY ((length ("%h.last_region_number"))) region_array
;
217 /* The most recently open region. */
218 struct eh_region
*cur_region
;
220 /* This is the region for which we are processing catch blocks. */
221 struct eh_region
*try_region
;
226 int built_landing_pads
;
227 int last_region_number
;
229 VEC(tree
,gc
) *ttype_data
;
230 varray_type ehspec_data
;
231 varray_type action_record_data
;
233 htab_t
GTY ((param_is (struct ehl_map_entry
))) exception_handler_label_map
;
235 struct call_site_record
* GTY ((length ("%h.call_site_data_used")))
237 int call_site_data_used
;
238 int call_site_data_size
;
247 htab_t
GTY((param_is (struct throw_stmt_node
))) throw_stmt_table
;
251 static int t2r_eq (const void *, const void *);
252 static hashval_t
t2r_hash (const void *);
253 static void add_type_for_runtime (tree
);
254 static tree
lookup_type_for_runtime (tree
);
256 static void remove_unreachable_regions (rtx
);
258 static int ttypes_filter_eq (const void *, const void *);
259 static hashval_t
ttypes_filter_hash (const void *);
260 static int ehspec_filter_eq (const void *, const void *);
261 static hashval_t
ehspec_filter_hash (const void *);
262 static int add_ttypes_entry (htab_t
, tree
);
263 static int add_ehspec_entry (htab_t
, htab_t
, tree
);
264 static void assign_filter_values (void);
265 static void build_post_landing_pads (void);
266 static void connect_post_landing_pads (void);
267 static void dw2_build_landing_pads (void);
270 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info
*);
271 static void sjlj_assign_call_site_values (rtx
, struct sjlj_lp_info
*);
272 static void sjlj_mark_call_sites (struct sjlj_lp_info
*);
273 static void sjlj_emit_function_enter (rtx
);
274 static void sjlj_emit_function_exit (void);
275 static void sjlj_emit_dispatch_table (rtx
, struct sjlj_lp_info
*);
276 static void sjlj_build_landing_pads (void);
278 static hashval_t
ehl_hash (const void *);
279 static int ehl_eq (const void *, const void *);
280 static void add_ehl_entry (rtx
, struct eh_region
*);
281 static void remove_exception_handler_label (rtx
);
282 static void remove_eh_handler (struct eh_region
*);
283 static int for_each_eh_label_1 (void **, void *);
285 /* The return value of reachable_next_level. */
288 /* The given exception is not processed by the given region. */
290 /* The given exception may need processing by the given region. */
292 /* The given exception is completely processed by the given region. */
294 /* The given exception is completely processed by the runtime. */
298 struct reachable_info
;
299 static enum reachable_code
reachable_next_level (struct eh_region
*, tree
,
300 struct reachable_info
*);
302 static int action_record_eq (const void *, const void *);
303 static hashval_t
action_record_hash (const void *);
304 static int add_action_record (htab_t
, int, int);
305 static int collect_one_action_chain (htab_t
, struct eh_region
*);
306 static int add_call_site (rtx
, int);
308 static void push_uleb128 (varray_type
*, unsigned int);
309 static void push_sleb128 (varray_type
*, int);
310 #ifndef HAVE_AS_LEB128
311 static int dw2_size_of_call_site_table (void);
312 static int sjlj_size_of_call_site_table (void);
314 static void dw2_output_call_site_table (void);
315 static void sjlj_output_call_site_table (void);
318 /* Routine to see if exception handling is turned on.
319 DO_WARN is nonzero if we want to inform the user that exception
320 handling is turned off.
322 This is used to ensure that -fexceptions has been specified if the
323 compiler tries to use any exception-specific functions. */
326 doing_eh (int do_warn
)
328 if (! flag_exceptions
)
330 static int warned
= 0;
331 if (! warned
&& do_warn
)
333 error ("exception handling disabled, use -fexceptions to enable");
345 if (! flag_exceptions
)
348 type_to_runtime_map
= htab_create_ggc (31, t2r_hash
, t2r_eq
, NULL
);
350 /* Create the SjLj_Function_Context structure. This should match
351 the definition in unwind-sjlj.c. */
352 if (USING_SJLJ_EXCEPTIONS
)
354 tree f_jbuf
, f_per
, f_lsda
, f_prev
, f_cs
, f_data
, tmp
;
356 sjlj_fc_type_node
= lang_hooks
.types
.make_type (RECORD_TYPE
);
358 f_prev
= build_decl (FIELD_DECL
, get_identifier ("__prev"),
359 build_pointer_type (sjlj_fc_type_node
));
360 DECL_FIELD_CONTEXT (f_prev
) = sjlj_fc_type_node
;
362 f_cs
= build_decl (FIELD_DECL
, get_identifier ("__call_site"),
364 DECL_FIELD_CONTEXT (f_cs
) = sjlj_fc_type_node
;
366 tmp
= build_index_type (build_int_cst (NULL_TREE
, 4 - 1));
367 tmp
= build_array_type (lang_hooks
.types
.type_for_mode (word_mode
, 1),
369 f_data
= build_decl (FIELD_DECL
, get_identifier ("__data"), tmp
);
370 DECL_FIELD_CONTEXT (f_data
) = sjlj_fc_type_node
;
372 f_per
= build_decl (FIELD_DECL
, get_identifier ("__personality"),
374 DECL_FIELD_CONTEXT (f_per
) = sjlj_fc_type_node
;
376 f_lsda
= build_decl (FIELD_DECL
, get_identifier ("__lsda"),
378 DECL_FIELD_CONTEXT (f_lsda
) = sjlj_fc_type_node
;
380 #ifdef DONT_USE_BUILTIN_SETJMP
382 tmp
= build_int_cst (NULL_TREE
, JMP_BUF_SIZE
- 1);
384 /* Should be large enough for most systems, if it is not,
385 JMP_BUF_SIZE should be defined with the proper value. It will
386 also tend to be larger than necessary for most systems, a more
387 optimal port will define JMP_BUF_SIZE. */
388 tmp
= build_int_cst (NULL_TREE
, FIRST_PSEUDO_REGISTER
+ 2 - 1);
391 /* builtin_setjmp takes a pointer to 5 words. */
392 tmp
= build_int_cst (NULL_TREE
, 5 * BITS_PER_WORD
/ POINTER_SIZE
- 1);
394 tmp
= build_index_type (tmp
);
395 tmp
= build_array_type (ptr_type_node
, tmp
);
396 f_jbuf
= build_decl (FIELD_DECL
, get_identifier ("__jbuf"), tmp
);
397 #ifdef DONT_USE_BUILTIN_SETJMP
398 /* We don't know what the alignment requirements of the
399 runtime's jmp_buf has. Overestimate. */
400 DECL_ALIGN (f_jbuf
) = BIGGEST_ALIGNMENT
;
401 DECL_USER_ALIGN (f_jbuf
) = 1;
403 DECL_FIELD_CONTEXT (f_jbuf
) = sjlj_fc_type_node
;
405 TYPE_FIELDS (sjlj_fc_type_node
) = f_prev
;
406 TREE_CHAIN (f_prev
) = f_cs
;
407 TREE_CHAIN (f_cs
) = f_data
;
408 TREE_CHAIN (f_data
) = f_per
;
409 TREE_CHAIN (f_per
) = f_lsda
;
410 TREE_CHAIN (f_lsda
) = f_jbuf
;
412 layout_type (sjlj_fc_type_node
);
414 /* Cache the interesting field offsets so that we have
415 easy access from rtl. */
416 sjlj_fc_call_site_ofs
417 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs
), 1)
418 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs
), 1) / BITS_PER_UNIT
);
420 = (tree_low_cst (DECL_FIELD_OFFSET (f_data
), 1)
421 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data
), 1) / BITS_PER_UNIT
);
422 sjlj_fc_personality_ofs
423 = (tree_low_cst (DECL_FIELD_OFFSET (f_per
), 1)
424 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per
), 1) / BITS_PER_UNIT
);
426 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda
), 1)
427 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda
), 1) / BITS_PER_UNIT
);
429 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf
), 1)
430 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf
), 1) / BITS_PER_UNIT
);
435 init_eh_for_function (void)
437 cfun
->eh
= ggc_alloc_cleared (sizeof (struct eh_status
));
440 /* Routines to generate the exception tree somewhat directly.
441 These are used from tree-eh.c when processing exception related
442 nodes during tree optimization. */
444 static struct eh_region
*
445 gen_eh_region (enum eh_region_type type
, struct eh_region
*outer
)
447 struct eh_region
*new;
449 #ifdef ENABLE_CHECKING
450 gcc_assert (doing_eh (0));
453 /* Insert a new blank region as a leaf in the tree. */
454 new = ggc_alloc_cleared (sizeof (*new));
459 new->next_peer
= outer
->inner
;
464 new->next_peer
= cfun
->eh
->region_tree
;
465 cfun
->eh
->region_tree
= new;
468 new->region_number
= ++cfun
->eh
->last_region_number
;
474 gen_eh_region_cleanup (struct eh_region
*outer
, struct eh_region
*prev_try
)
476 struct eh_region
*cleanup
= gen_eh_region (ERT_CLEANUP
, outer
);
477 cleanup
->u
.cleanup
.prev_try
= prev_try
;
482 gen_eh_region_try (struct eh_region
*outer
)
484 return gen_eh_region (ERT_TRY
, outer
);
488 gen_eh_region_catch (struct eh_region
*t
, tree type_or_list
)
490 struct eh_region
*c
, *l
;
491 tree type_list
, type_node
;
493 /* Ensure to always end up with a type list to normalize further
494 processing, then register each type against the runtime types map. */
495 type_list
= type_or_list
;
498 if (TREE_CODE (type_or_list
) != TREE_LIST
)
499 type_list
= tree_cons (NULL_TREE
, type_or_list
, NULL_TREE
);
501 type_node
= type_list
;
502 for (; type_node
; type_node
= TREE_CHAIN (type_node
))
503 add_type_for_runtime (TREE_VALUE (type_node
));
506 c
= gen_eh_region (ERT_CATCH
, t
->outer
);
507 c
->u
.catch.type_list
= type_list
;
508 l
= t
->u
.try.last_catch
;
509 c
->u
.catch.prev_catch
= l
;
511 l
->u
.catch.next_catch
= c
;
514 t
->u
.try.last_catch
= c
;
520 gen_eh_region_allowed (struct eh_region
*outer
, tree allowed
)
522 struct eh_region
*region
= gen_eh_region (ERT_ALLOWED_EXCEPTIONS
, outer
);
523 region
->u
.allowed
.type_list
= allowed
;
525 for (; allowed
; allowed
= TREE_CHAIN (allowed
))
526 add_type_for_runtime (TREE_VALUE (allowed
));
532 gen_eh_region_must_not_throw (struct eh_region
*outer
)
534 return gen_eh_region (ERT_MUST_NOT_THROW
, outer
);
538 get_eh_region_number (struct eh_region
*region
)
540 return region
->region_number
;
544 get_eh_region_may_contain_throw (struct eh_region
*region
)
546 return region
->may_contain_throw
;
550 get_eh_region_tree_label (struct eh_region
*region
)
552 return region
->tree_label
;
556 set_eh_region_tree_label (struct eh_region
*region
, tree lab
)
558 region
->tree_label
= lab
;
562 expand_resx_expr (tree exp
)
564 int region_nr
= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0));
565 struct eh_region
*reg
= cfun
->eh
->region_array
[region_nr
];
567 gcc_assert (!reg
->resume
);
568 reg
->resume
= emit_jump_insn (gen_rtx_RESX (VOIDmode
, region_nr
));
572 /* Note that the current EH region (if any) may contain a throw, or a
573 call to a function which itself may contain a throw. */
576 note_eh_region_may_contain_throw (struct eh_region
*region
)
578 while (region
&& !region
->may_contain_throw
)
580 region
->may_contain_throw
= 1;
581 region
= region
->outer
;
586 note_current_region_may_contain_throw (void)
588 note_eh_region_may_contain_throw (cfun
->eh
->cur_region
);
592 /* Return an rtl expression for a pointer to the exception object
596 get_exception_pointer (struct function
*fun
)
598 rtx exc_ptr
= fun
->eh
->exc_ptr
;
599 if (fun
== cfun
&& ! exc_ptr
)
601 exc_ptr
= gen_reg_rtx (ptr_mode
);
602 fun
->eh
->exc_ptr
= exc_ptr
;
607 /* Return an rtl expression for the exception dispatch filter
611 get_exception_filter (struct function
*fun
)
613 rtx filter
= fun
->eh
->filter
;
614 if (fun
== cfun
&& ! filter
)
616 filter
= gen_reg_rtx (targetm
.eh_return_filter_mode ());
617 fun
->eh
->filter
= filter
;
622 /* This section is for the exception handling specific optimization pass. */
624 /* Random access the exception region tree. */
627 collect_eh_region_array (void)
629 struct eh_region
**array
, *i
;
631 i
= cfun
->eh
->region_tree
;
635 array
= ggc_alloc_cleared ((cfun
->eh
->last_region_number
+ 1)
637 cfun
->eh
->region_array
= array
;
641 array
[i
->region_number
] = i
;
643 /* If there are sub-regions, process them. */
646 /* If there are peers, process them. */
647 else if (i
->next_peer
)
649 /* Otherwise, step back up the tree to the next peer. */
656 } while (i
->next_peer
== NULL
);
662 /* Remove all regions whose labels are not reachable from insns. */
665 remove_unreachable_regions (rtx insns
)
667 int i
, *uid_region_num
;
672 uid_region_num
= xcalloc (get_max_uid (), sizeof(int));
673 reachable
= xcalloc (cfun
->eh
->last_region_number
+ 1, sizeof(bool));
675 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
677 r
= cfun
->eh
->region_array
[i
];
678 if (!r
|| r
->region_number
!= i
)
683 gcc_assert (!uid_region_num
[INSN_UID (r
->resume
)]);
684 uid_region_num
[INSN_UID (r
->resume
)] = i
;
688 gcc_assert (!uid_region_num
[INSN_UID (r
->label
)]);
689 uid_region_num
[INSN_UID (r
->label
)] = i
;
693 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
694 reachable
[uid_region_num
[INSN_UID (insn
)]] = true;
696 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
698 r
= cfun
->eh
->region_array
[i
];
699 if (r
&& r
->region_number
== i
&& !reachable
[i
])
705 /* Don't remove ERT_THROW regions if their outer region
707 if (r
->outer
&& reachable
[r
->outer
->region_number
])
711 case ERT_MUST_NOT_THROW
:
712 /* MUST_NOT_THROW regions are implementable solely in the
713 runtime, but their existence continues to affect calls
714 within that region. Never delete them here. */
720 /* TRY regions are reachable if any of its CATCH regions
723 for (c
= r
->u
.try.catch; c
; c
= c
->u
.catch.next_catch
)
724 if (reachable
[c
->region_number
])
737 remove_eh_handler (r
);
742 free (uid_region_num
);
745 /* Set up EH labels for RTL. */
748 convert_from_eh_region_ranges (void)
750 rtx insns
= get_insns ();
751 int i
, n
= cfun
->eh
->last_region_number
;
753 /* Most of the work is already done at the tree level. All we need to
754 do is collect the rtl labels that correspond to the tree labels that
755 collect the rtl labels that correspond to the tree labels
756 we allocated earlier. */
757 for (i
= 1; i
<= n
; ++i
)
759 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
760 if (region
&& region
->tree_label
)
761 region
->label
= DECL_RTL_IF_SET (region
->tree_label
);
764 remove_unreachable_regions (insns
);
768 add_ehl_entry (rtx label
, struct eh_region
*region
)
770 struct ehl_map_entry
**slot
, *entry
;
772 LABEL_PRESERVE_P (label
) = 1;
774 entry
= ggc_alloc (sizeof (*entry
));
775 entry
->label
= label
;
776 entry
->region
= region
;
778 slot
= (struct ehl_map_entry
**)
779 htab_find_slot (cfun
->eh
->exception_handler_label_map
, entry
, INSERT
);
781 /* Before landing pad creation, each exception handler has its own
782 label. After landing pad creation, the exception handlers may
783 share landing pads. This is ok, since maybe_remove_eh_handler
784 only requires the 1-1 mapping before landing pad creation. */
785 gcc_assert (!*slot
|| cfun
->eh
->built_landing_pads
);
791 find_exception_handler_labels (void)
795 if (cfun
->eh
->exception_handler_label_map
)
796 htab_empty (cfun
->eh
->exception_handler_label_map
);
799 /* ??? The expansion factor here (3/2) must be greater than the htab
800 occupancy factor (4/3) to avoid unnecessary resizing. */
801 cfun
->eh
->exception_handler_label_map
802 = htab_create_ggc (cfun
->eh
->last_region_number
* 3 / 2,
803 ehl_hash
, ehl_eq
, NULL
);
806 if (cfun
->eh
->region_tree
== NULL
)
809 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
811 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
814 if (! region
|| region
->region_number
!= i
)
816 if (cfun
->eh
->built_landing_pads
)
817 lab
= region
->landing_pad
;
822 add_ehl_entry (lab
, region
);
825 /* For sjlj exceptions, need the return label to remain live until
826 after landing pad generation. */
827 if (USING_SJLJ_EXCEPTIONS
&& ! cfun
->eh
->built_landing_pads
)
828 add_ehl_entry (return_label
, NULL
);
831 /* Returns true if the current function has exception handling regions. */
834 current_function_has_exception_handlers (void)
838 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
840 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
843 && region
->region_number
== i
844 && region
->type
!= ERT_THROW
)
851 static struct eh_region
*
852 duplicate_eh_region_1 (struct eh_region
*o
)
854 struct eh_region
*n
= ggc_alloc_cleared (sizeof (struct eh_region
));
858 n
->region_number
= o
->region_number
+ cfun
->eh
->last_region_number
;
859 gcc_assert (!o
->aka
);
865 duplicate_eh_region_2 (struct eh_region
*o
, struct eh_region
**n_array
,
866 struct eh_region
*prev_try
)
868 struct eh_region
*n
= n_array
[o
->region_number
];
874 n
->u
.try.catch = n_array
[o
->u
.try.catch->region_number
];
875 if (o
->u
.try.last_catch
)
876 n
->u
.try.last_catch
= n_array
[o
->u
.try.last_catch
->region_number
];
880 if (o
->u
.catch.next_catch
)
881 n
->u
.catch.next_catch
= n_array
[o
->u
.catch.next_catch
->region_number
];
882 if (o
->u
.catch.prev_catch
)
883 n
->u
.catch.prev_catch
= n_array
[o
->u
.catch.prev_catch
->region_number
];
887 if (o
->u
.cleanup
.prev_try
)
888 n
->u
.cleanup
.prev_try
= n_array
[o
->u
.cleanup
.prev_try
->region_number
];
890 n
->u
.cleanup
.prev_try
= prev_try
;
898 n
->outer
= n_array
[o
->outer
->region_number
];
900 n
->inner
= n_array
[o
->inner
->region_number
];
902 n
->next_peer
= n_array
[o
->next_peer
->region_number
];
905 /* Duplicate the EH regions of IFUN into current function, root the tree in
906 OUTER_REGION and remap labels using MAP callback. */
908 duplicate_eh_regions (struct function
*ifun
, duplicate_eh_regions_map map
,
909 void *data
, int outer_region
)
911 int ifun_last_region_number
= ifun
->eh
->last_region_number
;
912 struct eh_region
**n_array
, *root
, *cur
, *prev_try
;
915 if (ifun_last_region_number
== 0 || !ifun
->eh
->region_tree
)
918 n_array
= xcalloc (ifun_last_region_number
+ 1, sizeof (*n_array
));
920 /* Search for the containing ERT_TRY region to fix up
921 the prev_try short-cuts for ERT_CLEANUP regions. */
923 if (outer_region
> 0)
924 for (prev_try
= cfun
->eh
->region_array
[outer_region
];
925 prev_try
&& prev_try
->type
!= ERT_TRY
;
926 prev_try
= prev_try
->outer
)
929 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
931 cur
= ifun
->eh
->region_array
[i
];
932 if (!cur
|| cur
->region_number
!= i
)
934 n_array
[i
] = duplicate_eh_region_1 (cur
);
937 tree newlabel
= map (cur
->tree_label
, data
);
938 n_array
[i
]->tree_label
= newlabel
;
941 n_array
[i
]->tree_label
= NULL
;
943 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
945 cur
= ifun
->eh
->region_array
[i
];
946 if (!cur
|| cur
->region_number
!= i
)
948 duplicate_eh_region_2 (cur
, n_array
, prev_try
);
951 root
= n_array
[ifun
->eh
->region_tree
->region_number
];
952 gcc_assert (root
->outer
== NULL
);
953 if (outer_region
> 0)
955 struct eh_region
*cur
= cfun
->eh
->region_array
[outer_region
];
956 struct eh_region
*p
= cur
->inner
;
966 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
967 if (n_array
[i
] && n_array
[i
]->outer
== NULL
)
968 n_array
[i
]->outer
= cur
;
972 struct eh_region
*p
= cfun
->eh
->region_tree
;
980 cfun
->eh
->region_tree
= root
;
985 i
= cfun
->eh
->last_region_number
;
986 cfun
->eh
->last_region_number
= i
+ ifun_last_region_number
;
988 collect_eh_region_array ();
994 t2r_eq (const void *pentry
, const void *pdata
)
996 tree entry
= (tree
) pentry
;
997 tree data
= (tree
) pdata
;
999 return TREE_PURPOSE (entry
) == data
;
1003 t2r_hash (const void *pentry
)
1005 tree entry
= (tree
) pentry
;
1006 return TREE_HASH (TREE_PURPOSE (entry
));
1010 add_type_for_runtime (tree type
)
1014 slot
= (tree
*) htab_find_slot_with_hash (type_to_runtime_map
, type
,
1015 TREE_HASH (type
), INSERT
);
1018 tree runtime
= (*lang_eh_runtime_type
) (type
);
1019 *slot
= tree_cons (type
, runtime
, NULL_TREE
);
1024 lookup_type_for_runtime (tree type
)
1028 slot
= (tree
*) htab_find_slot_with_hash (type_to_runtime_map
, type
,
1029 TREE_HASH (type
), NO_INSERT
);
1031 /* We should have always inserted the data earlier. */
1032 return TREE_VALUE (*slot
);
1036 /* Represent an entry in @TTypes for either catch actions
1037 or exception filter actions. */
1038 struct ttypes_filter
GTY(())
1044 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1045 (a tree) for a @TTypes type node we are thinking about adding. */
1048 ttypes_filter_eq (const void *pentry
, const void *pdata
)
1050 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1051 tree data
= (tree
) pdata
;
1053 return entry
->t
== data
;
1057 ttypes_filter_hash (const void *pentry
)
1059 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1060 return TREE_HASH (entry
->t
);
1063 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1064 exception specification list we are thinking about adding. */
1065 /* ??? Currently we use the type lists in the order given. Someone
1066 should put these in some canonical order. */
1069 ehspec_filter_eq (const void *pentry
, const void *pdata
)
1071 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1072 const struct ttypes_filter
*data
= (const struct ttypes_filter
*) pdata
;
1074 return type_list_equal (entry
->t
, data
->t
);
1077 /* Hash function for exception specification lists. */
1080 ehspec_filter_hash (const void *pentry
)
1082 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1086 for (list
= entry
->t
; list
; list
= TREE_CHAIN (list
))
1087 h
= (h
<< 5) + (h
>> 27) + TREE_HASH (TREE_VALUE (list
));
1091 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
1092 to speed up the search. Return the filter value to be used. */
1095 add_ttypes_entry (htab_t ttypes_hash
, tree type
)
1097 struct ttypes_filter
**slot
, *n
;
1099 slot
= (struct ttypes_filter
**)
1100 htab_find_slot_with_hash (ttypes_hash
, type
, TREE_HASH (type
), INSERT
);
1102 if ((n
= *slot
) == NULL
)
1104 /* Filter value is a 1 based table index. */
1106 n
= xmalloc (sizeof (*n
));
1108 n
->filter
= VEC_length (tree
, cfun
->eh
->ttype_data
) + 1;
1111 VEC_safe_push (tree
, gc
, cfun
->eh
->ttype_data
, type
);
1117 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1118 to speed up the search. Return the filter value to be used. */
1121 add_ehspec_entry (htab_t ehspec_hash
, htab_t ttypes_hash
, tree list
)
1123 struct ttypes_filter
**slot
, *n
;
1124 struct ttypes_filter dummy
;
1127 slot
= (struct ttypes_filter
**)
1128 htab_find_slot (ehspec_hash
, &dummy
, INSERT
);
1130 if ((n
= *slot
) == NULL
)
1132 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1134 n
= xmalloc (sizeof (*n
));
1136 n
->filter
= -(VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
) + 1);
1139 /* Generate a 0 terminated list of filter values. */
1140 for (; list
; list
= TREE_CHAIN (list
))
1142 if (targetm
.arm_eabi_unwinder
)
1143 VARRAY_PUSH_TREE (cfun
->eh
->ehspec_data
, TREE_VALUE (list
));
1146 /* Look up each type in the list and encode its filter
1147 value as a uleb128. */
1148 push_uleb128 (&cfun
->eh
->ehspec_data
,
1149 add_ttypes_entry (ttypes_hash
, TREE_VALUE (list
)));
1152 if (targetm
.arm_eabi_unwinder
)
1153 VARRAY_PUSH_TREE (cfun
->eh
->ehspec_data
, NULL_TREE
);
1155 VARRAY_PUSH_UCHAR (cfun
->eh
->ehspec_data
, 0);
1161 /* Generate the action filter values to be used for CATCH and
1162 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1163 we use lots of landing pads, and so every type or list can share
1164 the same filter value, which saves table space. */
1167 assign_filter_values (void)
1170 htab_t ttypes
, ehspec
;
1172 cfun
->eh
->ttype_data
= VEC_alloc (tree
, gc
, 16);
1173 if (targetm
.arm_eabi_unwinder
)
1174 VARRAY_TREE_INIT (cfun
->eh
->ehspec_data
, 64, "ehspec_data");
1176 VARRAY_UCHAR_INIT (cfun
->eh
->ehspec_data
, 64, "ehspec_data");
1178 ttypes
= htab_create (31, ttypes_filter_hash
, ttypes_filter_eq
, free
);
1179 ehspec
= htab_create (31, ehspec_filter_hash
, ehspec_filter_eq
, free
);
1181 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1183 struct eh_region
*r
= cfun
->eh
->region_array
[i
];
1185 /* Mind we don't process a region more than once. */
1186 if (!r
|| r
->region_number
!= i
)
1192 /* Whatever type_list is (NULL or true list), we build a list
1193 of filters for the region. */
1194 r
->u
.catch.filter_list
= NULL_TREE
;
1196 if (r
->u
.catch.type_list
!= NULL
)
1198 /* Get a filter value for each of the types caught and store
1199 them in the region's dedicated list. */
1200 tree tp_node
= r
->u
.catch.type_list
;
1202 for (;tp_node
; tp_node
= TREE_CHAIN (tp_node
))
1204 int flt
= add_ttypes_entry (ttypes
, TREE_VALUE (tp_node
));
1205 tree flt_node
= build_int_cst (NULL_TREE
, flt
);
1207 r
->u
.catch.filter_list
1208 = tree_cons (NULL_TREE
, flt_node
, r
->u
.catch.filter_list
);
1213 /* Get a filter value for the NULL list also since it will need
1214 an action record anyway. */
1215 int flt
= add_ttypes_entry (ttypes
, NULL
);
1216 tree flt_node
= build_int_cst (NULL_TREE
, flt
);
1218 r
->u
.catch.filter_list
1219 = tree_cons (NULL_TREE
, flt_node
, r
->u
.catch.filter_list
);
1224 case ERT_ALLOWED_EXCEPTIONS
:
1226 = add_ehspec_entry (ehspec
, ttypes
, r
->u
.allowed
.type_list
);
1234 htab_delete (ttypes
);
1235 htab_delete (ehspec
);
1238 /* Emit SEQ into basic block just before INSN (that is assumed to be
1239 first instruction of some existing BB and return the newly
1242 emit_to_new_bb_before (rtx seq
, rtx insn
)
1249 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1250 call), we don't want it to go into newly created landing pad or other EH
1252 for (ei
= ei_start (BLOCK_FOR_INSN (insn
)->preds
); (e
= ei_safe_edge (ei
)); )
1253 if (e
->flags
& EDGE_FALLTHRU
)
1254 force_nonfallthru (e
);
1257 last
= emit_insn_before (seq
, insn
);
1258 if (BARRIER_P (last
))
1259 last
= PREV_INSN (last
);
1260 bb
= create_basic_block (seq
, last
, BLOCK_FOR_INSN (insn
)->prev_bb
);
1261 update_bb_for_insn (bb
);
1262 bb
->flags
|= BB_SUPERBLOCK
;
1266 /* Generate the code to actually handle exceptions, which will follow the
1270 build_post_landing_pads (void)
1274 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1276 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1279 /* Mind we don't process a region more than once. */
1280 if (!region
|| region
->region_number
!= i
)
1283 switch (region
->type
)
1286 /* ??? Collect the set of all non-overlapping catch handlers
1287 all the way up the chain until blocked by a cleanup. */
1288 /* ??? Outer try regions can share landing pads with inner
1289 try regions if the types are completely non-overlapping,
1290 and there are no intervening cleanups. */
1292 region
->post_landing_pad
= gen_label_rtx ();
1296 emit_label (region
->post_landing_pad
);
1298 /* ??? It is mighty inconvenient to call back into the
1299 switch statement generation code in expand_end_case.
1300 Rapid prototyping sez a sequence of ifs. */
1302 struct eh_region
*c
;
1303 for (c
= region
->u
.try.catch; c
; c
= c
->u
.catch.next_catch
)
1305 if (c
->u
.catch.type_list
== NULL
)
1306 emit_jump (c
->label
);
1309 /* Need for one cmp/jump per type caught. Each type
1310 list entry has a matching entry in the filter list
1311 (see assign_filter_values). */
1312 tree tp_node
= c
->u
.catch.type_list
;
1313 tree flt_node
= c
->u
.catch.filter_list
;
1317 emit_cmp_and_jump_insns
1319 GEN_INT (tree_low_cst (TREE_VALUE (flt_node
), 0)),
1321 targetm
.eh_return_filter_mode (), 0, c
->label
);
1323 tp_node
= TREE_CHAIN (tp_node
);
1324 flt_node
= TREE_CHAIN (flt_node
);
1330 /* We delay the generation of the _Unwind_Resume until we generate
1331 landing pads. We emit a marker here so as to get good control
1332 flow data in the meantime. */
1334 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
1340 emit_to_new_bb_before (seq
, region
->u
.try.catch->label
);
1344 case ERT_ALLOWED_EXCEPTIONS
:
1345 region
->post_landing_pad
= gen_label_rtx ();
1349 emit_label (region
->post_landing_pad
);
1351 emit_cmp_and_jump_insns (cfun
->eh
->filter
,
1352 GEN_INT (region
->u
.allowed
.filter
),
1354 targetm
.eh_return_filter_mode (), 0, region
->label
);
1356 /* We delay the generation of the _Unwind_Resume until we generate
1357 landing pads. We emit a marker here so as to get good control
1358 flow data in the meantime. */
1360 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
1366 emit_to_new_bb_before (seq
, region
->label
);
1370 case ERT_MUST_NOT_THROW
:
1371 region
->post_landing_pad
= region
->label
;
1376 /* Nothing to do. */
1385 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1386 _Unwind_Resume otherwise. */
1389 connect_post_landing_pads (void)
1393 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1395 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1396 struct eh_region
*outer
;
1400 /* Mind we don't process a region more than once. */
1401 if (!region
|| region
->region_number
!= i
)
1404 /* If there is no RESX, or it has been deleted by flow, there's
1405 nothing to fix up. */
1406 if (! region
->resume
|| INSN_DELETED_P (region
->resume
))
1409 /* Search for another landing pad in this function. */
1410 for (outer
= region
->outer
; outer
; outer
= outer
->outer
)
1411 if (outer
->post_landing_pad
)
1419 basic_block src
, dest
;
1421 emit_jump (outer
->post_landing_pad
);
1422 src
= BLOCK_FOR_INSN (region
->resume
);
1423 dest
= BLOCK_FOR_INSN (outer
->post_landing_pad
);
1424 while (EDGE_COUNT (src
->succs
) > 0)
1425 remove_edge (EDGE_SUCC (src
, 0));
1426 e
= make_edge (src
, dest
, 0);
1427 e
->probability
= REG_BR_PROB_BASE
;
1428 e
->count
= src
->count
;
1432 emit_library_call (unwind_resume_libfunc
, LCT_THROW
,
1433 VOIDmode
, 1, cfun
->eh
->exc_ptr
, ptr_mode
);
1435 /* What we just emitted was a throwing libcall, so it got a
1436 barrier automatically added after it. If the last insn in
1437 the libcall sequence isn't the barrier, it's because the
1438 target emits multiple insns for a call, and there are insns
1439 after the actual call insn (which are redundant and would be
1440 optimized away). The barrier is inserted exactly after the
1441 call insn, so let's go get that and delete the insns after
1442 it, because below we need the barrier to be the last insn in
1444 delete_insns_since (NEXT_INSN (last_call_insn ()));
1449 barrier
= emit_insn_before (seq
, region
->resume
);
1450 /* Avoid duplicate barrier. */
1451 gcc_assert (BARRIER_P (barrier
));
1452 delete_insn (barrier
);
1453 delete_insn (region
->resume
);
1455 /* ??? From tree-ssa we can wind up with catch regions whose
1456 label is not instantiated, but whose resx is present. Now
1457 that we've dealt with the resx, kill the region. */
1458 if (region
->label
== NULL
&& region
->type
== ERT_CLEANUP
)
1459 remove_eh_handler (region
);
1465 dw2_build_landing_pads (void)
1470 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1472 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1475 bool clobbers_hard_regs
= false;
1478 /* Mind we don't process a region more than once. */
1479 if (!region
|| region
->region_number
!= i
)
1482 if (region
->type
!= ERT_CLEANUP
1483 && region
->type
!= ERT_TRY
1484 && region
->type
!= ERT_ALLOWED_EXCEPTIONS
)
1489 region
->landing_pad
= gen_label_rtx ();
1490 emit_label (region
->landing_pad
);
1492 #ifdef HAVE_exception_receiver
1493 if (HAVE_exception_receiver
)
1494 emit_insn (gen_exception_receiver ());
1497 #ifdef HAVE_nonlocal_goto_receiver
1498 if (HAVE_nonlocal_goto_receiver
)
1499 emit_insn (gen_nonlocal_goto_receiver ());
1504 /* If the eh_return data registers are call-saved, then we
1505 won't have considered them clobbered from the call that
1506 threw. Kill them now. */
1509 unsigned r
= EH_RETURN_DATA_REGNO (j
);
1510 if (r
== INVALID_REGNUM
)
1512 if (! call_used_regs
[r
])
1514 emit_insn (gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, r
)));
1515 clobbers_hard_regs
= true;
1519 if (clobbers_hard_regs
)
1521 /* @@@ This is a kludge. Not all machine descriptions define a
1522 blockage insn, but we must not allow the code we just generated
1523 to be reordered by scheduling. So emit an ASM_INPUT to act as
1525 emit_insn (gen_rtx_ASM_INPUT (VOIDmode
, ""));
1528 emit_move_insn (cfun
->eh
->exc_ptr
,
1529 gen_rtx_REG (ptr_mode
, EH_RETURN_DATA_REGNO (0)));
1530 emit_move_insn (cfun
->eh
->filter
,
1531 gen_rtx_REG (targetm
.eh_return_filter_mode (),
1532 EH_RETURN_DATA_REGNO (1)));
1537 bb
= emit_to_new_bb_before (seq
, region
->post_landing_pad
);
1538 e
= make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
1539 e
->count
= bb
->count
;
1540 e
->probability
= REG_BR_PROB_BASE
;
1547 int directly_reachable
;
1550 int call_site_index
;
1554 sjlj_find_directly_reachable_regions (struct sjlj_lp_info
*lp_info
)
1557 bool found_one
= false;
1559 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1561 struct eh_region
*region
;
1562 enum reachable_code rc
;
1566 if (! INSN_P (insn
))
1569 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1570 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
1573 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
1575 type_thrown
= NULL_TREE
;
1576 if (region
->type
== ERT_THROW
)
1578 type_thrown
= region
->u
.throw.type
;
1579 region
= region
->outer
;
1582 /* Find the first containing region that might handle the exception.
1583 That's the landing pad to which we will transfer control. */
1584 rc
= RNL_NOT_CAUGHT
;
1585 for (; region
; region
= region
->outer
)
1587 rc
= reachable_next_level (region
, type_thrown
, NULL
);
1588 if (rc
!= RNL_NOT_CAUGHT
)
1591 if (rc
== RNL_MAYBE_CAUGHT
|| rc
== RNL_CAUGHT
)
1593 lp_info
[region
->region_number
].directly_reachable
= 1;
1602 sjlj_assign_call_site_values (rtx dispatch_label
, struct sjlj_lp_info
*lp_info
)
1607 /* First task: build the action table. */
1609 VARRAY_UCHAR_INIT (cfun
->eh
->action_record_data
, 64, "action_record_data");
1610 ar_hash
= htab_create (31, action_record_hash
, action_record_eq
, free
);
1612 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1613 if (lp_info
[i
].directly_reachable
)
1615 struct eh_region
*r
= cfun
->eh
->region_array
[i
];
1616 r
->landing_pad
= dispatch_label
;
1617 lp_info
[i
].action_index
= collect_one_action_chain (ar_hash
, r
);
1618 if (lp_info
[i
].action_index
!= -1)
1619 cfun
->uses_eh_lsda
= 1;
1622 htab_delete (ar_hash
);
1624 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1625 landing pad label for the region. For sjlj though, there is one
1626 common landing pad from which we dispatch to the post-landing pads.
1628 A region receives a dispatch index if it is directly reachable
1629 and requires in-function processing. Regions that share post-landing
1630 pads may share dispatch indices. */
1631 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1632 (see build_post_landing_pads) so we don't bother checking for it. */
1635 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1636 if (lp_info
[i
].directly_reachable
)
1637 lp_info
[i
].dispatch_index
= index
++;
1639 /* Finally: assign call-site values. If dwarf2 terms, this would be
1640 the region number assigned by convert_to_eh_region_ranges, but
1641 handles no-action and must-not-throw differently. */
1644 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1645 if (lp_info
[i
].directly_reachable
)
1647 int action
= lp_info
[i
].action_index
;
1649 /* Map must-not-throw to otherwise unused call-site index 0. */
1652 /* Map no-action to otherwise unused call-site index -1. */
1653 else if (action
== -1)
1655 /* Otherwise, look it up in the table. */
1657 index
= add_call_site (GEN_INT (lp_info
[i
].dispatch_index
), action
);
1659 lp_info
[i
].call_site_index
= index
;
1664 sjlj_mark_call_sites (struct sjlj_lp_info
*lp_info
)
1666 int last_call_site
= -2;
1669 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1671 struct eh_region
*region
;
1673 rtx note
, before
, p
;
1675 /* Reset value tracking at extended basic block boundaries. */
1677 last_call_site
= -2;
1679 if (! INSN_P (insn
))
1682 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1685 /* Calls (and trapping insns) without notes are outside any
1686 exception handling region in this function. Mark them as
1689 || (flag_non_call_exceptions
1690 && may_trap_p (PATTERN (insn
))))
1691 this_call_site
= -1;
1697 /* Calls that are known to not throw need not be marked. */
1698 if (INTVAL (XEXP (note
, 0)) <= 0)
1701 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
1702 this_call_site
= lp_info
[region
->region_number
].call_site_index
;
1705 if (this_call_site
== last_call_site
)
1708 /* Don't separate a call from it's argument loads. */
1711 before
= find_first_parameter_load (insn
, NULL_RTX
);
1714 mem
= adjust_address (cfun
->eh
->sjlj_fc
, TYPE_MODE (integer_type_node
),
1715 sjlj_fc_call_site_ofs
);
1716 emit_move_insn (mem
, GEN_INT (this_call_site
));
1720 emit_insn_before (p
, before
);
1721 last_call_site
= this_call_site
;
1725 /* Construct the SjLj_Function_Context. */
1728 sjlj_emit_function_enter (rtx dispatch_label
)
1730 rtx fn_begin
, fc
, mem
, seq
;
1732 fc
= cfun
->eh
->sjlj_fc
;
1736 /* We're storing this libcall's address into memory instead of
1737 calling it directly. Thus, we must call assemble_external_libcall
1738 here, as we can not depend on emit_library_call to do it for us. */
1739 assemble_external_libcall (eh_personality_libfunc
);
1740 mem
= adjust_address (fc
, Pmode
, sjlj_fc_personality_ofs
);
1741 emit_move_insn (mem
, eh_personality_libfunc
);
1743 mem
= adjust_address (fc
, Pmode
, sjlj_fc_lsda_ofs
);
1744 if (cfun
->uses_eh_lsda
)
1749 ASM_GENERATE_INTERNAL_LABEL (buf
, "LLSDA", current_function_funcdef_no
);
1750 sym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
1751 SYMBOL_REF_FLAGS (sym
) = SYMBOL_FLAG_LOCAL
;
1752 emit_move_insn (mem
, sym
);
1755 emit_move_insn (mem
, const0_rtx
);
1757 #ifdef DONT_USE_BUILTIN_SETJMP
1760 x
= emit_library_call_value (setjmp_libfunc
, NULL_RTX
, LCT_RETURNS_TWICE
,
1761 TYPE_MODE (integer_type_node
), 1,
1762 plus_constant (XEXP (fc
, 0),
1763 sjlj_fc_jbuf_ofs
), Pmode
);
1765 note
= emit_note (NOTE_INSN_EXPECTED_VALUE
);
1766 NOTE_EXPECTED_VALUE (note
) = gen_rtx_EQ (VOIDmode
, x
, const0_rtx
);
1768 emit_cmp_and_jump_insns (x
, const0_rtx
, NE
, 0,
1769 TYPE_MODE (integer_type_node
), 0, dispatch_label
);
1772 expand_builtin_setjmp_setup (plus_constant (XEXP (fc
, 0), sjlj_fc_jbuf_ofs
),
1776 emit_library_call (unwind_sjlj_register_libfunc
, LCT_NORMAL
, VOIDmode
,
1777 1, XEXP (fc
, 0), Pmode
);
1782 /* ??? Instead of doing this at the beginning of the function,
1783 do this in a block that is at loop level 0 and dominates all
1784 can_throw_internal instructions. */
1786 for (fn_begin
= get_insns (); ; fn_begin
= NEXT_INSN (fn_begin
))
1787 if (NOTE_P (fn_begin
)
1788 && (NOTE_LINE_NUMBER (fn_begin
) == NOTE_INSN_FUNCTION_BEG
1789 || NOTE_LINE_NUMBER (fn_begin
) == NOTE_INSN_BASIC_BLOCK
))
1791 if (NOTE_LINE_NUMBER (fn_begin
) == NOTE_INSN_FUNCTION_BEG
)
1792 insert_insn_on_edge (seq
, single_succ_edge (ENTRY_BLOCK_PTR
));
1795 rtx last
= BB_END (single_succ (ENTRY_BLOCK_PTR
));
1796 for (; ; fn_begin
= NEXT_INSN (fn_begin
))
1797 if ((NOTE_P (fn_begin
)
1798 && NOTE_LINE_NUMBER (fn_begin
) == NOTE_INSN_FUNCTION_BEG
)
1799 || fn_begin
== last
)
1801 emit_insn_after (seq
, fn_begin
);
1805 /* Call back from expand_function_end to know where we should put
1806 the call to unwind_sjlj_unregister_libfunc if needed. */
1809 sjlj_emit_function_exit_after (rtx after
)
1811 cfun
->eh
->sjlj_exit_after
= after
;
1815 sjlj_emit_function_exit (void)
1823 emit_library_call (unwind_sjlj_unregister_libfunc
, LCT_NORMAL
, VOIDmode
,
1824 1, XEXP (cfun
->eh
->sjlj_fc
, 0), Pmode
);
1829 /* ??? Really this can be done in any block at loop level 0 that
1830 post-dominates all can_throw_internal instructions. This is
1831 the last possible moment. */
1833 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
1834 if (e
->flags
& EDGE_FALLTHRU
)
1840 /* Figure out whether the place we are supposed to insert libcall
1841 is inside the last basic block or after it. In the other case
1842 we need to emit to edge. */
1843 gcc_assert (e
->src
->next_bb
== EXIT_BLOCK_PTR
);
1844 for (insn
= BB_HEAD (e
->src
); ; insn
= NEXT_INSN (insn
))
1846 if (insn
== cfun
->eh
->sjlj_exit_after
)
1849 insn
= NEXT_INSN (insn
);
1850 emit_insn_after (seq
, insn
);
1853 if (insn
== BB_END (e
->src
))
1856 insert_insn_on_edge (seq
, e
);
1861 sjlj_emit_dispatch_table (rtx dispatch_label
, struct sjlj_lp_info
*lp_info
)
1863 int i
, first_reachable
;
1864 rtx mem
, dispatch
, seq
, fc
;
1869 fc
= cfun
->eh
->sjlj_fc
;
1873 emit_label (dispatch_label
);
1875 #ifndef DONT_USE_BUILTIN_SETJMP
1876 expand_builtin_setjmp_receiver (dispatch_label
);
1879 /* Load up dispatch index, exc_ptr and filter values from the
1880 function context. */
1881 mem
= adjust_address (fc
, TYPE_MODE (integer_type_node
),
1882 sjlj_fc_call_site_ofs
);
1883 dispatch
= copy_to_reg (mem
);
1885 mem
= adjust_address (fc
, word_mode
, sjlj_fc_data_ofs
);
1886 if (word_mode
!= ptr_mode
)
1888 #ifdef POINTERS_EXTEND_UNSIGNED
1889 mem
= convert_memory_address (ptr_mode
, mem
);
1891 mem
= convert_to_mode (ptr_mode
, mem
, 0);
1894 emit_move_insn (cfun
->eh
->exc_ptr
, mem
);
1896 mem
= adjust_address (fc
, word_mode
, sjlj_fc_data_ofs
+ UNITS_PER_WORD
);
1897 emit_move_insn (cfun
->eh
->filter
, mem
);
1899 /* Jump to one of the directly reachable regions. */
1900 /* ??? This really ought to be using a switch statement. */
1902 first_reachable
= 0;
1903 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1905 if (! lp_info
[i
].directly_reachable
)
1908 if (! first_reachable
)
1910 first_reachable
= i
;
1914 emit_cmp_and_jump_insns (dispatch
, GEN_INT (lp_info
[i
].dispatch_index
),
1915 EQ
, NULL_RTX
, TYPE_MODE (integer_type_node
), 0,
1916 cfun
->eh
->region_array
[i
]->post_landing_pad
);
1922 before
= cfun
->eh
->region_array
[first_reachable
]->post_landing_pad
;
1924 bb
= emit_to_new_bb_before (seq
, before
);
1925 e
= make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
1926 e
->count
= bb
->count
;
1927 e
->probability
= REG_BR_PROB_BASE
;
1931 sjlj_build_landing_pads (void)
1933 struct sjlj_lp_info
*lp_info
;
1935 lp_info
= xcalloc (cfun
->eh
->last_region_number
+ 1,
1936 sizeof (struct sjlj_lp_info
));
1938 if (sjlj_find_directly_reachable_regions (lp_info
))
1940 rtx dispatch_label
= gen_label_rtx ();
1943 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node
),
1944 int_size_in_bytes (sjlj_fc_type_node
),
1945 TYPE_ALIGN (sjlj_fc_type_node
));
1947 sjlj_assign_call_site_values (dispatch_label
, lp_info
);
1948 sjlj_mark_call_sites (lp_info
);
1950 sjlj_emit_function_enter (dispatch_label
);
1951 sjlj_emit_dispatch_table (dispatch_label
, lp_info
);
1952 sjlj_emit_function_exit ();
1959 finish_eh_generation (void)
1963 /* Nothing to do if no regions created. */
1964 if (cfun
->eh
->region_tree
== NULL
)
1967 /* The object here is to provide find_basic_blocks with detailed
1968 information (via reachable_handlers) on how exception control
1969 flows within the function. In this first pass, we can include
1970 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
1971 regions, and hope that it will be useful in deleting unreachable
1972 handlers. Subsequently, we will generate landing pads which will
1973 connect many of the handlers, and then type information will not
1974 be effective. Still, this is a win over previous implementations. */
1976 /* These registers are used by the landing pads. Make sure they
1977 have been generated. */
1978 get_exception_pointer (cfun
);
1979 get_exception_filter (cfun
);
1981 /* Construct the landing pads. */
1983 assign_filter_values ();
1984 build_post_landing_pads ();
1985 connect_post_landing_pads ();
1986 if (USING_SJLJ_EXCEPTIONS
)
1987 sjlj_build_landing_pads ();
1989 dw2_build_landing_pads ();
1991 cfun
->eh
->built_landing_pads
= 1;
1993 /* We've totally changed the CFG. Start over. */
1994 find_exception_handler_labels ();
1995 break_superblocks ();
1996 if (USING_SJLJ_EXCEPTIONS
)
1997 commit_edge_insertions ();
2003 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
2005 if (e
->flags
& EDGE_EH
)
2014 rtl_make_eh_edge (NULL
, bb
, BB_END (bb
));
2019 ehl_hash (const void *pentry
)
2021 struct ehl_map_entry
*entry
= (struct ehl_map_entry
*) pentry
;
2023 /* 2^32 * ((sqrt(5) - 1) / 2) */
2024 const hashval_t scaled_golden_ratio
= 0x9e3779b9;
2025 return CODE_LABEL_NUMBER (entry
->label
) * scaled_golden_ratio
;
2029 ehl_eq (const void *pentry
, const void *pdata
)
2031 struct ehl_map_entry
*entry
= (struct ehl_map_entry
*) pentry
;
2032 struct ehl_map_entry
*data
= (struct ehl_map_entry
*) pdata
;
2034 return entry
->label
== data
->label
;
2037 /* This section handles removing dead code for flow. */
2039 /* Remove LABEL from exception_handler_label_map. */
2042 remove_exception_handler_label (rtx label
)
2044 struct ehl_map_entry
**slot
, tmp
;
2046 /* If exception_handler_label_map was not built yet,
2047 there is nothing to do. */
2048 if (cfun
->eh
->exception_handler_label_map
== NULL
)
2052 slot
= (struct ehl_map_entry
**)
2053 htab_find_slot (cfun
->eh
->exception_handler_label_map
, &tmp
, NO_INSERT
);
2056 htab_clear_slot (cfun
->eh
->exception_handler_label_map
, (void **) slot
);
2059 /* Splice REGION from the region tree etc. */
2062 remove_eh_handler (struct eh_region
*region
)
2064 struct eh_region
**pp
, **pp_start
, *p
, *outer
, *inner
;
2067 /* For the benefit of efficiently handling REG_EH_REGION notes,
2068 replace this region in the region array with its containing
2069 region. Note that previous region deletions may result in
2070 multiple copies of this region in the array, so we have a
2071 list of alternate numbers by which we are known. */
2073 outer
= region
->outer
;
2074 cfun
->eh
->region_array
[region
->region_number
] = outer
;
2080 EXECUTE_IF_SET_IN_BITMAP (region
->aka
, 0, i
, bi
)
2082 cfun
->eh
->region_array
[i
] = outer
;
2089 outer
->aka
= BITMAP_GGC_ALLOC ();
2091 bitmap_ior_into (outer
->aka
, region
->aka
);
2092 bitmap_set_bit (outer
->aka
, region
->region_number
);
2095 if (cfun
->eh
->built_landing_pads
)
2096 lab
= region
->landing_pad
;
2098 lab
= region
->label
;
2100 remove_exception_handler_label (lab
);
2103 pp_start
= &outer
->inner
;
2105 pp_start
= &cfun
->eh
->region_tree
;
2106 for (pp
= pp_start
, p
= *pp
; p
!= region
; pp
= &p
->next_peer
, p
= *pp
)
2108 *pp
= region
->next_peer
;
2110 inner
= region
->inner
;
2113 for (p
= inner
; p
->next_peer
; p
= p
->next_peer
)
2117 p
->next_peer
= *pp_start
;
2121 if (region
->type
== ERT_CATCH
)
2123 struct eh_region
*try, *next
, *prev
;
2125 for (try = region
->next_peer
;
2126 try->type
== ERT_CATCH
;
2127 try = try->next_peer
)
2129 gcc_assert (try->type
== ERT_TRY
);
2131 next
= region
->u
.catch.next_catch
;
2132 prev
= region
->u
.catch.prev_catch
;
2135 next
->u
.catch.prev_catch
= prev
;
2137 try->u
.try.last_catch
= prev
;
2139 prev
->u
.catch.next_catch
= next
;
2142 try->u
.try.catch = next
;
2144 remove_eh_handler (try);
2149 /* LABEL heads a basic block that is about to be deleted. If this
2150 label corresponds to an exception region, we may be able to
2151 delete the region. */
2154 maybe_remove_eh_handler (rtx label
)
2156 struct ehl_map_entry
**slot
, tmp
;
2157 struct eh_region
*region
;
2159 /* ??? After generating landing pads, it's not so simple to determine
2160 if the region data is completely unused. One must examine the
2161 landing pad and the post landing pad, and whether an inner try block
2162 is referencing the catch handlers directly. */
2163 if (cfun
->eh
->built_landing_pads
)
2167 slot
= (struct ehl_map_entry
**)
2168 htab_find_slot (cfun
->eh
->exception_handler_label_map
, &tmp
, NO_INSERT
);
2171 region
= (*slot
)->region
;
2175 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2176 because there is no path to the fallback call to terminate.
2177 But the region continues to affect call-site data until there
2178 are no more contained calls, which we don't see here. */
2179 if (region
->type
== ERT_MUST_NOT_THROW
)
2181 htab_clear_slot (cfun
->eh
->exception_handler_label_map
, (void **) slot
);
2182 region
->label
= NULL_RTX
;
2185 remove_eh_handler (region
);
2188 /* Invokes CALLBACK for every exception handler label. Only used by old
2189 loop hackery; should not be used by new code. */
2192 for_each_eh_label (void (*callback
) (rtx
))
2194 htab_traverse (cfun
->eh
->exception_handler_label_map
, for_each_eh_label_1
,
2195 (void *) &callback
);
2199 for_each_eh_label_1 (void **pentry
, void *data
)
2201 struct ehl_map_entry
*entry
= *(struct ehl_map_entry
**)pentry
;
2202 void (*callback
) (rtx
) = *(void (**) (rtx
)) data
;
2204 (*callback
) (entry
->label
);
2208 /* Invoke CALLBACK for every exception region in the current function. */
2211 for_each_eh_region (void (*callback
) (struct eh_region
*))
2213 int i
, n
= cfun
->eh
->last_region_number
;
2214 for (i
= 1; i
<= n
; ++i
)
2216 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
2218 (*callback
) (region
);
2222 /* This section describes CFG exception edges for flow. */
2224 /* For communicating between calls to reachable_next_level. */
2225 struct reachable_info
2229 void (*callback
) (struct eh_region
*, void *);
2230 void *callback_data
;
2231 bool saw_any_handlers
;
2234 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2235 base class of TYPE, is in HANDLED. */
2238 check_handled (tree handled
, tree type
)
2242 /* We can check for exact matches without front-end help. */
2243 if (! lang_eh_type_covers
)
2245 for (t
= handled
; t
; t
= TREE_CHAIN (t
))
2246 if (TREE_VALUE (t
) == type
)
2251 for (t
= handled
; t
; t
= TREE_CHAIN (t
))
2252 if ((*lang_eh_type_covers
) (TREE_VALUE (t
), type
))
2259 /* A subroutine of reachable_next_level. If we are collecting a list
2260 of handlers, add one. After landing pad generation, reference
2261 it instead of the handlers themselves. Further, the handlers are
2262 all wired together, so by referencing one, we've got them all.
2263 Before landing pad generation we reference each handler individually.
2265 LP_REGION contains the landing pad; REGION is the handler. */
2268 add_reachable_handler (struct reachable_info
*info
,
2269 struct eh_region
*lp_region
, struct eh_region
*region
)
2274 info
->saw_any_handlers
= true;
2276 if (cfun
->eh
->built_landing_pads
)
2277 info
->callback (lp_region
, info
->callback_data
);
2279 info
->callback (region
, info
->callback_data
);
2282 /* Process one level of exception regions for reachability.
2283 If TYPE_THROWN is non-null, then it is the *exact* type being
2284 propagated. If INFO is non-null, then collect handler labels
2285 and caught/allowed type information between invocations. */
2287 static enum reachable_code
2288 reachable_next_level (struct eh_region
*region
, tree type_thrown
,
2289 struct reachable_info
*info
)
2291 switch (region
->type
)
2294 /* Before landing-pad generation, we model control flow
2295 directly to the individual handlers. In this way we can
2296 see that catch handler types may shadow one another. */
2297 add_reachable_handler (info
, region
, region
);
2298 return RNL_MAYBE_CAUGHT
;
2302 struct eh_region
*c
;
2303 enum reachable_code ret
= RNL_NOT_CAUGHT
;
2305 for (c
= region
->u
.try.catch; c
; c
= c
->u
.catch.next_catch
)
2307 /* A catch-all handler ends the search. */
2308 if (c
->u
.catch.type_list
== NULL
)
2310 add_reachable_handler (info
, region
, c
);
2316 /* If we have at least one type match, end the search. */
2317 tree tp_node
= c
->u
.catch.type_list
;
2319 for (; tp_node
; tp_node
= TREE_CHAIN (tp_node
))
2321 tree type
= TREE_VALUE (tp_node
);
2323 if (type
== type_thrown
2324 || (lang_eh_type_covers
2325 && (*lang_eh_type_covers
) (type
, type_thrown
)))
2327 add_reachable_handler (info
, region
, c
);
2332 /* If we have definitive information of a match failure,
2333 the catch won't trigger. */
2334 if (lang_eh_type_covers
)
2335 return RNL_NOT_CAUGHT
;
2338 /* At this point, we either don't know what type is thrown or
2339 don't have front-end assistance to help deciding if it is
2340 covered by one of the types in the list for this region.
2342 We'd then like to add this region to the list of reachable
2343 handlers since it is indeed potentially reachable based on the
2344 information we have.
2346 Actually, this handler is for sure not reachable if all the
2347 types it matches have already been caught. That is, it is only
2348 potentially reachable if at least one of the types it catches
2349 has not been previously caught. */
2352 ret
= RNL_MAYBE_CAUGHT
;
2355 tree tp_node
= c
->u
.catch.type_list
;
2356 bool maybe_reachable
= false;
2358 /* Compute the potential reachability of this handler and
2359 update the list of types caught at the same time. */
2360 for (; tp_node
; tp_node
= TREE_CHAIN (tp_node
))
2362 tree type
= TREE_VALUE (tp_node
);
2364 if (! check_handled (info
->types_caught
, type
))
2367 = tree_cons (NULL
, type
, info
->types_caught
);
2369 maybe_reachable
= true;
2373 if (maybe_reachable
)
2375 add_reachable_handler (info
, region
, c
);
2377 /* ??? If the catch type is a base class of every allowed
2378 type, then we know we can stop the search. */
2379 ret
= RNL_MAYBE_CAUGHT
;
2387 case ERT_ALLOWED_EXCEPTIONS
:
2388 /* An empty list of types definitely ends the search. */
2389 if (region
->u
.allowed
.type_list
== NULL_TREE
)
2391 add_reachable_handler (info
, region
, region
);
2395 /* Collect a list of lists of allowed types for use in detecting
2396 when a catch may be transformed into a catch-all. */
2398 info
->types_allowed
= tree_cons (NULL_TREE
,
2399 region
->u
.allowed
.type_list
,
2400 info
->types_allowed
);
2402 /* If we have definitive information about the type hierarchy,
2403 then we can tell if the thrown type will pass through the
2405 if (type_thrown
&& lang_eh_type_covers
)
2407 if (check_handled (region
->u
.allowed
.type_list
, type_thrown
))
2408 return RNL_NOT_CAUGHT
;
2411 add_reachable_handler (info
, region
, region
);
2416 add_reachable_handler (info
, region
, region
);
2417 return RNL_MAYBE_CAUGHT
;
2420 /* Catch regions are handled by their controlling try region. */
2421 return RNL_NOT_CAUGHT
;
2423 case ERT_MUST_NOT_THROW
:
2424 /* Here we end our search, since no exceptions may propagate.
2425 If we've touched down at some landing pad previous, then the
2426 explicit function call we generated may be used. Otherwise
2427 the call is made by the runtime.
2429 Before inlining, do not perform this optimization. We may
2430 inline a subroutine that contains handlers, and that will
2431 change the value of saw_any_handlers. */
2433 if ((info
&& info
->saw_any_handlers
) || !cfun
->after_inlining
)
2435 add_reachable_handler (info
, region
, region
);
2443 /* Shouldn't see these here. */
2451 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2454 foreach_reachable_handler (int region_number
, bool is_resx
,
2455 void (*callback
) (struct eh_region
*, void *),
2456 void *callback_data
)
2458 struct reachable_info info
;
2459 struct eh_region
*region
;
2462 memset (&info
, 0, sizeof (info
));
2463 info
.callback
= callback
;
2464 info
.callback_data
= callback_data
;
2466 region
= cfun
->eh
->region_array
[region_number
];
2468 type_thrown
= NULL_TREE
;
2471 /* A RESX leaves a region instead of entering it. Thus the
2472 region itself may have been deleted out from under us. */
2475 region
= region
->outer
;
2477 else if (region
->type
== ERT_THROW
)
2479 type_thrown
= region
->u
.throw.type
;
2480 region
= region
->outer
;
2485 if (reachable_next_level (region
, type_thrown
, &info
) >= RNL_CAUGHT
)
2487 /* If we have processed one cleanup, there is no point in
2488 processing any more of them. Each cleanup will have an edge
2489 to the next outer cleanup region, so the flow graph will be
2491 if (region
->type
== ERT_CLEANUP
)
2492 region
= region
->u
.cleanup
.prev_try
;
2494 region
= region
->outer
;
2498 /* Retrieve a list of labels of exception handlers which can be
2499 reached by a given insn. */
2502 arh_to_landing_pad (struct eh_region
*region
, void *data
)
2504 rtx
*p_handlers
= data
;
2506 *p_handlers
= alloc_INSN_LIST (region
->landing_pad
, NULL_RTX
);
2510 arh_to_label (struct eh_region
*region
, void *data
)
2512 rtx
*p_handlers
= data
;
2513 *p_handlers
= alloc_INSN_LIST (region
->label
, *p_handlers
);
2517 reachable_handlers (rtx insn
)
2519 bool is_resx
= false;
2520 rtx handlers
= NULL
;
2524 && GET_CODE (PATTERN (insn
)) == RESX
)
2526 region_number
= XINT (PATTERN (insn
), 0);
2531 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2532 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
2534 region_number
= INTVAL (XEXP (note
, 0));
2537 foreach_reachable_handler (region_number
, is_resx
,
2538 (cfun
->eh
->built_landing_pads
2539 ? arh_to_landing_pad
2546 /* Determine if the given INSN can throw an exception that is caught
2547 within the function. */
2550 can_throw_internal_1 (int region_number
, bool is_resx
)
2552 struct eh_region
*region
;
2555 region
= cfun
->eh
->region_array
[region_number
];
2557 type_thrown
= NULL_TREE
;
2559 region
= region
->outer
;
2560 else if (region
->type
== ERT_THROW
)
2562 type_thrown
= region
->u
.throw.type
;
2563 region
= region
->outer
;
2566 /* If this exception is ignored by each and every containing region,
2567 then control passes straight out. The runtime may handle some
2568 regions, which also do not require processing internally. */
2569 for (; region
; region
= region
->outer
)
2571 enum reachable_code how
= reachable_next_level (region
, type_thrown
, 0);
2572 if (how
== RNL_BLOCKED
)
2574 if (how
!= RNL_NOT_CAUGHT
)
2582 can_throw_internal (rtx insn
)
2586 if (! INSN_P (insn
))
2590 && GET_CODE (PATTERN (insn
)) == RESX
2591 && XINT (PATTERN (insn
), 0) > 0)
2592 return can_throw_internal_1 (XINT (PATTERN (insn
), 0), true);
2594 if (NONJUMP_INSN_P (insn
)
2595 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2596 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2598 /* Every insn that might throw has an EH_REGION note. */
2599 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2600 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
2603 return can_throw_internal_1 (INTVAL (XEXP (note
, 0)), false);
2606 /* Determine if the given INSN can throw an exception that is
2607 visible outside the function. */
2610 can_throw_external_1 (int region_number
, bool is_resx
)
2612 struct eh_region
*region
;
2615 region
= cfun
->eh
->region_array
[region_number
];
2617 type_thrown
= NULL_TREE
;
2619 region
= region
->outer
;
2620 else if (region
->type
== ERT_THROW
)
2622 type_thrown
= region
->u
.throw.type
;
2623 region
= region
->outer
;
2626 /* If the exception is caught or blocked by any containing region,
2627 then it is not seen by any calling function. */
2628 for (; region
; region
= region
->outer
)
2629 if (reachable_next_level (region
, type_thrown
, NULL
) >= RNL_CAUGHT
)
2636 can_throw_external (rtx insn
)
2640 if (! INSN_P (insn
))
2644 && GET_CODE (PATTERN (insn
)) == RESX
2645 && XINT (PATTERN (insn
), 0) > 0)
2646 return can_throw_external_1 (XINT (PATTERN (insn
), 0), true);
2648 if (NONJUMP_INSN_P (insn
)
2649 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2650 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2652 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2655 /* Calls (and trapping insns) without notes are outside any
2656 exception handling region in this function. We have to
2657 assume it might throw. Given that the front end and middle
2658 ends mark known NOTHROW functions, this isn't so wildly
2660 return (CALL_P (insn
)
2661 || (flag_non_call_exceptions
2662 && may_trap_p (PATTERN (insn
))));
2664 if (INTVAL (XEXP (note
, 0)) <= 0)
2667 return can_throw_external_1 (INTVAL (XEXP (note
, 0)), false);
2670 /* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls. */
2673 set_nothrow_function_flags (void)
2677 TREE_NOTHROW (current_function_decl
) = 1;
2679 /* Assume cfun->all_throwers_are_sibcalls until we encounter
2680 something that can throw an exception. We specifically exempt
2681 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2682 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2685 cfun
->all_throwers_are_sibcalls
= 1;
2687 if (! flag_exceptions
)
2690 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2691 if (can_throw_external (insn
))
2693 TREE_NOTHROW (current_function_decl
) = 0;
2695 if (!CALL_P (insn
) || !SIBLING_CALL_P (insn
))
2697 cfun
->all_throwers_are_sibcalls
= 0;
2702 for (insn
= current_function_epilogue_delay_list
; insn
;
2703 insn
= XEXP (insn
, 1))
2704 if (can_throw_external (insn
))
2706 TREE_NOTHROW (current_function_decl
) = 0;
2708 if (!CALL_P (insn
) || !SIBLING_CALL_P (insn
))
2710 cfun
->all_throwers_are_sibcalls
= 0;
2716 struct tree_opt_pass pass_set_nothrow_function_flags
=
2720 set_nothrow_function_flags
, /* execute */
2723 0, /* static_pass_number */
2725 0, /* properties_required */
2726 0, /* properties_provided */
2727 0, /* properties_destroyed */
2728 0, /* todo_flags_start */
2729 0, /* todo_flags_finish */
2734 /* Various hooks for unwind library. */
2736 /* Do any necessary initialization to access arbitrary stack frames.
2737 On the SPARC, this means flushing the register windows. */
2740 expand_builtin_unwind_init (void)
2742 /* Set this so all the registers get saved in our frame; we need to be
2743 able to copy the saved values for any registers from frames we unwind. */
2744 current_function_has_nonlocal_label
= 1;
2746 #ifdef SETUP_FRAME_ADDRESSES
2747 SETUP_FRAME_ADDRESSES ();
2752 expand_builtin_eh_return_data_regno (tree arglist
)
2754 tree which
= TREE_VALUE (arglist
);
2755 unsigned HOST_WIDE_INT iwhich
;
2757 if (TREE_CODE (which
) != INTEGER_CST
)
2759 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2763 iwhich
= tree_low_cst (which
, 1);
2764 iwhich
= EH_RETURN_DATA_REGNO (iwhich
);
2765 if (iwhich
== INVALID_REGNUM
)
2768 #ifdef DWARF_FRAME_REGNUM
2769 iwhich
= DWARF_FRAME_REGNUM (iwhich
);
2771 iwhich
= DBX_REGISTER_NUMBER (iwhich
);
2774 return GEN_INT (iwhich
);
2777 /* Given a value extracted from the return address register or stack slot,
2778 return the actual address encoded in that value. */
2781 expand_builtin_extract_return_addr (tree addr_tree
)
2783 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, Pmode
, 0);
2785 if (GET_MODE (addr
) != Pmode
2786 && GET_MODE (addr
) != VOIDmode
)
2788 #ifdef POINTERS_EXTEND_UNSIGNED
2789 addr
= convert_memory_address (Pmode
, addr
);
2791 addr
= convert_to_mode (Pmode
, addr
, 0);
2795 /* First mask out any unwanted bits. */
2796 #ifdef MASK_RETURN_ADDR
2797 expand_and (Pmode
, addr
, MASK_RETURN_ADDR
, addr
);
2800 /* Then adjust to find the real return address. */
2801 #if defined (RETURN_ADDR_OFFSET)
2802 addr
= plus_constant (addr
, RETURN_ADDR_OFFSET
);
2808 /* Given an actual address in addr_tree, do any necessary encoding
2809 and return the value to be stored in the return address register or
2810 stack slot so the epilogue will return to that address. */
2813 expand_builtin_frob_return_addr (tree addr_tree
)
2815 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, ptr_mode
, 0);
2817 addr
= convert_memory_address (Pmode
, addr
);
2819 #ifdef RETURN_ADDR_OFFSET
2820 addr
= force_reg (Pmode
, addr
);
2821 addr
= plus_constant (addr
, -RETURN_ADDR_OFFSET
);
2827 /* Set up the epilogue with the magic bits we'll need to return to the
2828 exception handler. */
2831 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED
,
2836 #ifdef EH_RETURN_STACKADJ_RTX
2837 tmp
= expand_expr (stackadj_tree
, cfun
->eh
->ehr_stackadj
, VOIDmode
, 0);
2838 tmp
= convert_memory_address (Pmode
, tmp
);
2839 if (!cfun
->eh
->ehr_stackadj
)
2840 cfun
->eh
->ehr_stackadj
= copy_to_reg (tmp
);
2841 else if (tmp
!= cfun
->eh
->ehr_stackadj
)
2842 emit_move_insn (cfun
->eh
->ehr_stackadj
, tmp
);
2845 tmp
= expand_expr (handler_tree
, cfun
->eh
->ehr_handler
, VOIDmode
, 0);
2846 tmp
= convert_memory_address (Pmode
, tmp
);
2847 if (!cfun
->eh
->ehr_handler
)
2848 cfun
->eh
->ehr_handler
= copy_to_reg (tmp
);
2849 else if (tmp
!= cfun
->eh
->ehr_handler
)
2850 emit_move_insn (cfun
->eh
->ehr_handler
, tmp
);
2852 if (!cfun
->eh
->ehr_label
)
2853 cfun
->eh
->ehr_label
= gen_label_rtx ();
2854 emit_jump (cfun
->eh
->ehr_label
);
2858 expand_eh_return (void)
2862 if (! cfun
->eh
->ehr_label
)
2865 current_function_calls_eh_return
= 1;
2867 #ifdef EH_RETURN_STACKADJ_RTX
2868 emit_move_insn (EH_RETURN_STACKADJ_RTX
, const0_rtx
);
2871 around_label
= gen_label_rtx ();
2872 emit_jump (around_label
);
2874 emit_label (cfun
->eh
->ehr_label
);
2875 clobber_return_register ();
2877 #ifdef EH_RETURN_STACKADJ_RTX
2878 emit_move_insn (EH_RETURN_STACKADJ_RTX
, cfun
->eh
->ehr_stackadj
);
2881 #ifdef HAVE_eh_return
2883 emit_insn (gen_eh_return (cfun
->eh
->ehr_handler
));
2887 #ifdef EH_RETURN_HANDLER_RTX
2888 emit_move_insn (EH_RETURN_HANDLER_RTX
, cfun
->eh
->ehr_handler
);
2890 error ("__builtin_eh_return not supported on this target");
2894 emit_label (around_label
);
2897 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2898 POINTERS_EXTEND_UNSIGNED and return it. */
2901 expand_builtin_extend_pointer (tree addr_tree
)
2903 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, ptr_mode
, 0);
2906 #ifdef POINTERS_EXTEND_UNSIGNED
2907 extend
= POINTERS_EXTEND_UNSIGNED
;
2909 /* The previous EH code did an unsigned extend by default, so we do this also
2914 return convert_modes (word_mode
, ptr_mode
, addr
, extend
);
2917 /* In the following functions, we represent entries in the action table
2918 as 1-based indices. Special cases are:
2920 0: null action record, non-null landing pad; implies cleanups
2921 -1: null action record, null landing pad; implies no action
2922 -2: no call-site entry; implies must_not_throw
2923 -3: we have yet to process outer regions
2925 Further, no special cases apply to the "next" field of the record.
2926 For next, 0 means end of list. */
2928 struct action_record
2936 action_record_eq (const void *pentry
, const void *pdata
)
2938 const struct action_record
*entry
= (const struct action_record
*) pentry
;
2939 const struct action_record
*data
= (const struct action_record
*) pdata
;
2940 return entry
->filter
== data
->filter
&& entry
->next
== data
->next
;
2944 action_record_hash (const void *pentry
)
2946 const struct action_record
*entry
= (const struct action_record
*) pentry
;
2947 return entry
->next
* 1009 + entry
->filter
;
2951 add_action_record (htab_t ar_hash
, int filter
, int next
)
2953 struct action_record
**slot
, *new, tmp
;
2955 tmp
.filter
= filter
;
2957 slot
= (struct action_record
**) htab_find_slot (ar_hash
, &tmp
, INSERT
);
2959 if ((new = *slot
) == NULL
)
2961 new = xmalloc (sizeof (*new));
2962 new->offset
= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
) + 1;
2963 new->filter
= filter
;
2967 /* The filter value goes in untouched. The link to the next
2968 record is a "self-relative" byte offset, or zero to indicate
2969 that there is no next record. So convert the absolute 1 based
2970 indices we've been carrying around into a displacement. */
2972 push_sleb128 (&cfun
->eh
->action_record_data
, filter
);
2974 next
-= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
) + 1;
2975 push_sleb128 (&cfun
->eh
->action_record_data
, next
);
2982 collect_one_action_chain (htab_t ar_hash
, struct eh_region
*region
)
2984 struct eh_region
*c
;
2987 /* If we've reached the top of the region chain, then we have
2988 no actions, and require no landing pad. */
2992 switch (region
->type
)
2995 /* A cleanup adds a zero filter to the beginning of the chain, but
2996 there are special cases to look out for. If there are *only*
2997 cleanups along a path, then it compresses to a zero action.
2998 Further, if there are multiple cleanups along a path, we only
2999 need to represent one of them, as that is enough to trigger
3000 entry to the landing pad at runtime. */
3001 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3004 for (c
= region
->outer
; c
; c
= c
->outer
)
3005 if (c
->type
== ERT_CLEANUP
)
3007 return add_action_record (ar_hash
, 0, next
);
3010 /* Process the associated catch regions in reverse order.
3011 If there's a catch-all handler, then we don't need to
3012 search outer regions. Use a magic -3 value to record
3013 that we haven't done the outer search. */
3015 for (c
= region
->u
.try.last_catch
; c
; c
= c
->u
.catch.prev_catch
)
3017 if (c
->u
.catch.type_list
== NULL
)
3019 /* Retrieve the filter from the head of the filter list
3020 where we have stored it (see assign_filter_values). */
3022 = TREE_INT_CST_LOW (TREE_VALUE (c
->u
.catch.filter_list
));
3024 next
= add_action_record (ar_hash
, filter
, 0);
3028 /* Once the outer search is done, trigger an action record for
3029 each filter we have. */
3034 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3036 /* If there is no next action, terminate the chain. */
3039 /* If all outer actions are cleanups or must_not_throw,
3040 we'll have no action record for it, since we had wanted
3041 to encode these states in the call-site record directly.
3042 Add a cleanup action to the chain to catch these. */
3044 next
= add_action_record (ar_hash
, 0, 0);
3047 flt_node
= c
->u
.catch.filter_list
;
3048 for (; flt_node
; flt_node
= TREE_CHAIN (flt_node
))
3050 int filter
= TREE_INT_CST_LOW (TREE_VALUE (flt_node
));
3051 next
= add_action_record (ar_hash
, filter
, next
);
3057 case ERT_ALLOWED_EXCEPTIONS
:
3058 /* An exception specification adds its filter to the
3059 beginning of the chain. */
3060 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3062 /* If there is no next action, terminate the chain. */
3065 /* If all outer actions are cleanups or must_not_throw,
3066 we'll have no action record for it, since we had wanted
3067 to encode these states in the call-site record directly.
3068 Add a cleanup action to the chain to catch these. */
3070 next
= add_action_record (ar_hash
, 0, 0);
3072 return add_action_record (ar_hash
, region
->u
.allowed
.filter
, next
);
3074 case ERT_MUST_NOT_THROW
:
3075 /* A must-not-throw region with no inner handlers or cleanups
3076 requires no call-site entry. Note that this differs from
3077 the no handler or cleanup case in that we do require an lsda
3078 to be generated. Return a magic -2 value to record this. */
3083 /* CATCH regions are handled in TRY above. THROW regions are
3084 for optimization information only and produce no output. */
3085 return collect_one_action_chain (ar_hash
, region
->outer
);
3093 add_call_site (rtx landing_pad
, int action
)
3095 struct call_site_record
*data
= cfun
->eh
->call_site_data
;
3096 int used
= cfun
->eh
->call_site_data_used
;
3097 int size
= cfun
->eh
->call_site_data_size
;
3101 size
= (size
? size
* 2 : 64);
3102 data
= ggc_realloc (data
, sizeof (*data
) * size
);
3103 cfun
->eh
->call_site_data
= data
;
3104 cfun
->eh
->call_site_data_size
= size
;
3107 data
[used
].landing_pad
= landing_pad
;
3108 data
[used
].action
= action
;
3110 cfun
->eh
->call_site_data_used
= used
+ 1;
3112 return used
+ call_site_base
;
3115 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3116 The new note numbers will not refer to region numbers, but
3117 instead to call site entries. */
3120 convert_to_eh_region_ranges (void)
3122 rtx insn
, iter
, note
;
3124 int last_action
= -3;
3125 rtx last_action_insn
= NULL_RTX
;
3126 rtx last_landing_pad
= NULL_RTX
;
3127 rtx first_no_action_insn
= NULL_RTX
;
3130 if (USING_SJLJ_EXCEPTIONS
|| cfun
->eh
->region_tree
== NULL
)
3133 VARRAY_UCHAR_INIT (cfun
->eh
->action_record_data
, 64, "action_record_data");
3135 ar_hash
= htab_create (31, action_record_hash
, action_record_eq
, free
);
3137 for (iter
= get_insns (); iter
; iter
= NEXT_INSN (iter
))
3140 struct eh_region
*region
;
3142 rtx this_landing_pad
;
3145 if (NONJUMP_INSN_P (insn
)
3146 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3147 insn
= XVECEXP (PATTERN (insn
), 0, 0);
3149 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
3152 if (! (CALL_P (insn
)
3153 || (flag_non_call_exceptions
3154 && may_trap_p (PATTERN (insn
)))))
3161 if (INTVAL (XEXP (note
, 0)) <= 0)
3163 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
3164 this_action
= collect_one_action_chain (ar_hash
, region
);
3167 /* Existence of catch handlers, or must-not-throw regions
3168 implies that an lsda is needed (even if empty). */
3169 if (this_action
!= -1)
3170 cfun
->uses_eh_lsda
= 1;
3172 /* Delay creation of region notes for no-action regions
3173 until we're sure that an lsda will be required. */
3174 else if (last_action
== -3)
3176 first_no_action_insn
= iter
;
3180 /* Cleanups and handlers may share action chains but not
3181 landing pads. Collect the landing pad for this region. */
3182 if (this_action
>= 0)
3184 struct eh_region
*o
;
3185 for (o
= region
; ! o
->landing_pad
; o
= o
->outer
)
3187 this_landing_pad
= o
->landing_pad
;
3190 this_landing_pad
= NULL_RTX
;
3192 /* Differing actions or landing pads implies a change in call-site
3193 info, which implies some EH_REGION note should be emitted. */
3194 if (last_action
!= this_action
3195 || last_landing_pad
!= this_landing_pad
)
3197 /* If we'd not seen a previous action (-3) or the previous
3198 action was must-not-throw (-2), then we do not need an
3200 if (last_action
>= -1)
3202 /* If we delayed the creation of the begin, do it now. */
3203 if (first_no_action_insn
)
3205 call_site
= add_call_site (NULL_RTX
, 0);
3206 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
,
3207 first_no_action_insn
);
3208 NOTE_EH_HANDLER (note
) = call_site
;
3209 first_no_action_insn
= NULL_RTX
;
3212 note
= emit_note_after (NOTE_INSN_EH_REGION_END
,
3214 NOTE_EH_HANDLER (note
) = call_site
;
3217 /* If the new action is must-not-throw, then no region notes
3219 if (this_action
>= -1)
3221 call_site
= add_call_site (this_landing_pad
,
3222 this_action
< 0 ? 0 : this_action
);
3223 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
, iter
);
3224 NOTE_EH_HANDLER (note
) = call_site
;
3227 last_action
= this_action
;
3228 last_landing_pad
= this_landing_pad
;
3230 last_action_insn
= iter
;
3233 if (last_action
>= -1 && ! first_no_action_insn
)
3235 note
= emit_note_after (NOTE_INSN_EH_REGION_END
, last_action_insn
);
3236 NOTE_EH_HANDLER (note
) = call_site
;
3239 htab_delete (ar_hash
);
3242 struct tree_opt_pass pass_convert_to_eh_region_ranges
=
3244 "eh-ranges", /* name */
3246 convert_to_eh_region_ranges
, /* execute */
3249 0, /* static_pass_number */
3251 0, /* properties_required */
3252 0, /* properties_provided */
3253 0, /* properties_destroyed */
3254 0, /* todo_flags_start */
3255 TODO_dump_func
, /* todo_flags_finish */
3261 push_uleb128 (varray_type
*data_area
, unsigned int value
)
3265 unsigned char byte
= value
& 0x7f;
3269 VARRAY_PUSH_UCHAR (*data_area
, byte
);
3275 push_sleb128 (varray_type
*data_area
, int value
)
3282 byte
= value
& 0x7f;
3284 more
= ! ((value
== 0 && (byte
& 0x40) == 0)
3285 || (value
== -1 && (byte
& 0x40) != 0));
3288 VARRAY_PUSH_UCHAR (*data_area
, byte
);
3294 #ifndef HAVE_AS_LEB128
3296 dw2_size_of_call_site_table (void)
3298 int n
= cfun
->eh
->call_site_data_used
;
3299 int size
= n
* (4 + 4 + 4);
3302 for (i
= 0; i
< n
; ++i
)
3304 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3305 size
+= size_of_uleb128 (cs
->action
);
3312 sjlj_size_of_call_site_table (void)
3314 int n
= cfun
->eh
->call_site_data_used
;
3318 for (i
= 0; i
< n
; ++i
)
3320 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3321 size
+= size_of_uleb128 (INTVAL (cs
->landing_pad
));
3322 size
+= size_of_uleb128 (cs
->action
);
3330 dw2_output_call_site_table (void)
3332 int n
= cfun
->eh
->call_site_data_used
;
3335 for (i
= 0; i
< n
; ++i
)
3337 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3338 char reg_start_lab
[32];
3339 char reg_end_lab
[32];
3340 char landing_pad_lab
[32];
3342 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab
, "LEHB", call_site_base
+ i
);
3343 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab
, "LEHE", call_site_base
+ i
);
3345 if (cs
->landing_pad
)
3346 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab
, "L",
3347 CODE_LABEL_NUMBER (cs
->landing_pad
));
3349 /* ??? Perhaps use insn length scaling if the assembler supports
3350 generic arithmetic. */
3351 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3352 data4 if the function is small enough. */
3353 #ifdef HAVE_AS_LEB128
3354 dw2_asm_output_delta_uleb128 (reg_start_lab
,
3355 current_function_func_begin_label
,
3356 "region %d start", i
);
3357 dw2_asm_output_delta_uleb128 (reg_end_lab
, reg_start_lab
,
3359 if (cs
->landing_pad
)
3360 dw2_asm_output_delta_uleb128 (landing_pad_lab
,
3361 current_function_func_begin_label
,
3364 dw2_asm_output_data_uleb128 (0, "landing pad");
3366 dw2_asm_output_delta (4, reg_start_lab
,
3367 current_function_func_begin_label
,
3368 "region %d start", i
);
3369 dw2_asm_output_delta (4, reg_end_lab
, reg_start_lab
, "length");
3370 if (cs
->landing_pad
)
3371 dw2_asm_output_delta (4, landing_pad_lab
,
3372 current_function_func_begin_label
,
3375 dw2_asm_output_data (4, 0, "landing pad");
3377 dw2_asm_output_data_uleb128 (cs
->action
, "action");
3380 call_site_base
+= n
;
3384 sjlj_output_call_site_table (void)
3386 int n
= cfun
->eh
->call_site_data_used
;
3389 for (i
= 0; i
< n
; ++i
)
3391 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3393 dw2_asm_output_data_uleb128 (INTVAL (cs
->landing_pad
),
3394 "region %d landing pad", i
);
3395 dw2_asm_output_data_uleb128 (cs
->action
, "action");
3398 call_site_base
+= n
;
3401 /* Tell assembler to switch to the section for the exception handling
3405 default_exception_section (void)
3407 if (targetm
.have_named_sections
)
3411 if (EH_TABLES_CAN_BE_READ_ONLY
)
3413 int tt_format
= ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3416 || ((tt_format
& 0x70) != DW_EH_PE_absptr
3417 && (tt_format
& 0x70) != DW_EH_PE_aligned
))
3418 ? 0 : SECTION_WRITE
;
3421 flags
= SECTION_WRITE
;
3422 named_section_flags (".gcc_except_table", flags
);
3427 readonly_data_section ();
3431 /* Output a reference from an exception table to the type_info object TYPE.
3432 TT_FORMAT and TT_FORMAT_SIZE descibe the DWARF encoding method used for
3436 output_ttype (tree type
, int tt_format
, int tt_format_size
)
3440 if (type
== NULL_TREE
)
3444 struct cgraph_varpool_node
*node
;
3446 type
= lookup_type_for_runtime (type
);
3447 value
= expand_expr (type
, NULL_RTX
, VOIDmode
, EXPAND_INITIALIZER
);
3449 /* Let cgraph know that the rtti decl is used. Not all of the
3450 paths below go through assemble_integer, which would take
3451 care of this for us. */
3453 if (TREE_CODE (type
) == ADDR_EXPR
)
3455 type
= TREE_OPERAND (type
, 0);
3456 if (TREE_CODE (type
) == VAR_DECL
)
3458 node
= cgraph_varpool_node (type
);
3460 cgraph_varpool_mark_needed_node (node
);
3463 else if (TREE_CODE (type
) != INTEGER_CST
)
3467 /* Allow the target to override the type table entry format. */
3468 if (targetm
.asm_out
.ttype (value
))
3471 if (tt_format
== DW_EH_PE_absptr
|| tt_format
== DW_EH_PE_aligned
)
3472 assemble_integer (value
, tt_format_size
,
3473 tt_format_size
* BITS_PER_UNIT
, 1);
3475 dw2_asm_output_encoded_addr_rtx (tt_format
, value
, NULL
);
3479 output_function_exception_table (void)
3481 int tt_format
, cs_format
, lp_format
, i
, n
;
3482 #ifdef HAVE_AS_LEB128
3483 char ttype_label
[32];
3484 char cs_after_size_label
[32];
3485 char cs_end_label
[32];
3490 int tt_format_size
= 0;
3492 /* Not all functions need anything. */
3493 if (! cfun
->uses_eh_lsda
)
3496 #ifdef TARGET_UNWIND_INFO
3497 /* TODO: Move this into target file. */
3498 assemble_external_libcall (eh_personality_libfunc
);
3499 fputs ("\t.personality\t", asm_out_file
);
3500 output_addr_const (asm_out_file
, eh_personality_libfunc
);
3501 fputs ("\n\t.handlerdata\n", asm_out_file
);
3502 /* Note that varasm still thinks we're in the function's code section.
3503 The ".endp" directive that will immediately follow will take us back. */
3505 targetm
.asm_out
.exception_section ();
3508 have_tt_data
= (VEC_length (tree
, cfun
->eh
->ttype_data
) > 0
3509 || VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
) > 0);
3511 /* Indicate the format of the @TType entries. */
3513 tt_format
= DW_EH_PE_omit
;
3516 tt_format
= ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3517 #ifdef HAVE_AS_LEB128
3518 ASM_GENERATE_INTERNAL_LABEL (ttype_label
, "LLSDATT",
3519 current_function_funcdef_no
);
3521 tt_format_size
= size_of_encoded_value (tt_format
);
3523 assemble_align (tt_format_size
* BITS_PER_UNIT
);
3526 targetm
.asm_out
.internal_label (asm_out_file
, "LLSDA",
3527 current_function_funcdef_no
);
3529 /* The LSDA header. */
3531 /* Indicate the format of the landing pad start pointer. An omitted
3532 field implies @LPStart == @Start. */
3533 /* Currently we always put @LPStart == @Start. This field would
3534 be most useful in moving the landing pads completely out of
3535 line to another section, but it could also be used to minimize
3536 the size of uleb128 landing pad offsets. */
3537 lp_format
= DW_EH_PE_omit
;
3538 dw2_asm_output_data (1, lp_format
, "@LPStart format (%s)",
3539 eh_data_format_name (lp_format
));
3541 /* @LPStart pointer would go here. */
3543 dw2_asm_output_data (1, tt_format
, "@TType format (%s)",
3544 eh_data_format_name (tt_format
));
3546 #ifndef HAVE_AS_LEB128
3547 if (USING_SJLJ_EXCEPTIONS
)
3548 call_site_len
= sjlj_size_of_call_site_table ();
3550 call_site_len
= dw2_size_of_call_site_table ();
3553 /* A pc-relative 4-byte displacement to the @TType data. */
3556 #ifdef HAVE_AS_LEB128
3557 char ttype_after_disp_label
[32];
3558 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label
, "LLSDATTD",
3559 current_function_funcdef_no
);
3560 dw2_asm_output_delta_uleb128 (ttype_label
, ttype_after_disp_label
,
3561 "@TType base offset");
3562 ASM_OUTPUT_LABEL (asm_out_file
, ttype_after_disp_label
);
3564 /* Ug. Alignment queers things. */
3565 unsigned int before_disp
, after_disp
, last_disp
, disp
;
3567 before_disp
= 1 + 1;
3568 after_disp
= (1 + size_of_uleb128 (call_site_len
)
3570 + VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
)
3571 + (VEC_length (tree
, cfun
->eh
->ttype_data
)
3577 unsigned int disp_size
, pad
;
3580 disp_size
= size_of_uleb128 (disp
);
3581 pad
= before_disp
+ disp_size
+ after_disp
;
3582 if (pad
% tt_format_size
)
3583 pad
= tt_format_size
- (pad
% tt_format_size
);
3586 disp
= after_disp
+ pad
;
3588 while (disp
!= last_disp
);
3590 dw2_asm_output_data_uleb128 (disp
, "@TType base offset");
3594 /* Indicate the format of the call-site offsets. */
3595 #ifdef HAVE_AS_LEB128
3596 cs_format
= DW_EH_PE_uleb128
;
3598 cs_format
= DW_EH_PE_udata4
;
3600 dw2_asm_output_data (1, cs_format
, "call-site format (%s)",
3601 eh_data_format_name (cs_format
));
3603 #ifdef HAVE_AS_LEB128
3604 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label
, "LLSDACSB",
3605 current_function_funcdef_no
);
3606 ASM_GENERATE_INTERNAL_LABEL (cs_end_label
, "LLSDACSE",
3607 current_function_funcdef_no
);
3608 dw2_asm_output_delta_uleb128 (cs_end_label
, cs_after_size_label
,
3609 "Call-site table length");
3610 ASM_OUTPUT_LABEL (asm_out_file
, cs_after_size_label
);
3611 if (USING_SJLJ_EXCEPTIONS
)
3612 sjlj_output_call_site_table ();
3614 dw2_output_call_site_table ();
3615 ASM_OUTPUT_LABEL (asm_out_file
, cs_end_label
);
3617 dw2_asm_output_data_uleb128 (call_site_len
,"Call-site table length");
3618 if (USING_SJLJ_EXCEPTIONS
)
3619 sjlj_output_call_site_table ();
3621 dw2_output_call_site_table ();
3624 /* ??? Decode and interpret the data for flag_debug_asm. */
3625 n
= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
);
3626 for (i
= 0; i
< n
; ++i
)
3627 dw2_asm_output_data (1, VARRAY_UCHAR (cfun
->eh
->action_record_data
, i
),
3628 (i
? NULL
: "Action record table"));
3631 assemble_align (tt_format_size
* BITS_PER_UNIT
);
3633 i
= VEC_length (tree
, cfun
->eh
->ttype_data
);
3636 tree type
= VEC_index (tree
, cfun
->eh
->ttype_data
, i
);
3637 output_ttype (type
, tt_format
, tt_format_size
);
3640 #ifdef HAVE_AS_LEB128
3642 ASM_OUTPUT_LABEL (asm_out_file
, ttype_label
);
3645 /* ??? Decode and interpret the data for flag_debug_asm. */
3646 n
= VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
);
3647 for (i
= 0; i
< n
; ++i
)
3649 if (targetm
.arm_eabi_unwinder
)
3651 tree type
= VARRAY_TREE (cfun
->eh
->ehspec_data
, i
);
3652 output_ttype (type
, tt_format
, tt_format_size
);
3655 dw2_asm_output_data (1, VARRAY_UCHAR (cfun
->eh
->ehspec_data
, i
),
3656 (i
? NULL
: "Exception specification table"));
3659 current_function_section (current_function_decl
);
3663 set_eh_throw_stmt_table (struct function
*fun
, struct htab
*table
)
3665 fun
->eh
->throw_stmt_table
= table
;
3669 get_eh_throw_stmt_table (struct function
*fun
)
3671 return fun
->eh
->throw_stmt_table
;
3674 /* Dump EH information to OUT. */
3676 dump_eh_tree (FILE *out
, struct function
*fun
)
3678 struct eh_region
*i
;
3680 static const char * const type_name
[] = {"unknown", "cleanup", "try", "catch",
3681 "allowed_exceptions", "must_not_throw",
3684 i
= fun
->eh
->region_tree
;
3688 fprintf (out
, "Eh tree:\n");
3691 fprintf (out
, " %*s %i %s", depth
* 2, "",
3692 i
->region_number
, type_name
[(int)i
->type
]);
3695 fprintf (out
, " tree_label:");
3696 print_generic_expr (out
, i
->tree_label
, 0);
3698 fprintf (out
, "\n");
3699 /* If there are sub-regions, process them. */
3701 i
= i
->inner
, depth
++;
3702 /* If there are peers, process them. */
3703 else if (i
->next_peer
)
3705 /* Otherwise, step back up the tree to the next peer. */
3713 } while (i
->next_peer
== NULL
);
3719 /* Verify some basic invariants on EH datastructures. Could be extended to
3722 verify_eh_tree (struct function
*fun
)
3724 struct eh_region
*i
, *outer
= NULL
;
3731 i
= fun
->eh
->region_tree
;
3734 for (j
= fun
->eh
->last_region_number
; j
> 0; --j
)
3735 if (fun
->eh
->region_array
[j
])
3738 if (fun
->eh
->region_array
[j
]->region_number
!= j
)
3740 error ("region_array is corrupted for region %i", i
->region_number
);
3747 if (fun
->eh
->region_array
[i
->region_number
] != i
)
3749 error ("region_array is corrupted for region %i", i
->region_number
);
3752 if (i
->outer
!= outer
)
3754 error ("outer block of region %i is wrong", i
->region_number
);
3757 if (i
->may_contain_throw
&& outer
&& !outer
->may_contain_throw
)
3759 error ("region %i may contain throw and is contained in region that may not",
3765 error ("negative nesting depth of region %i", i
->region_number
);
3769 /* If there are sub-regions, process them. */
3771 outer
= i
, i
= i
->inner
, depth
++;
3772 /* If there are peers, process them. */
3773 else if (i
->next_peer
)
3775 /* Otherwise, step back up the tree to the next peer. */
3785 error ("tree list ends on depth %i", depth
+ 1);
3788 if (count
!= nvisited
)
3790 error ("array does not match the region tree");
3795 dump_eh_tree (stderr
, fun
);
3796 internal_error ("verify_eh_tree failed");
3801 } while (i
->next_peer
== NULL
);
3807 /* Initialize unwind_resume_libfunc. */
3810 default_init_unwind_resume_libfunc (void)
3812 /* The default c++ routines aren't actually c++ specific, so use those. */
3813 unwind_resume_libfunc
=
3814 init_one_libfunc ( USING_SJLJ_EXCEPTIONS
? "_Unwind_SjLj_Resume"
3815 : "_Unwind_Resume");
3820 gate_handle_eh (void)
3822 return doing_eh (0);
3825 /* Complete generation of exception handling code. */
3827 rest_of_handle_eh (void)
3829 cleanup_cfg (CLEANUP_PRE_LOOP
| CLEANUP_NO_INSN_DEL
);
3830 finish_eh_generation ();
3831 cleanup_cfg (CLEANUP_PRE_LOOP
| CLEANUP_NO_INSN_DEL
);
3834 struct tree_opt_pass pass_rtl_eh
=
3837 gate_handle_eh
, /* gate */
3838 rest_of_handle_eh
, /* execute */
3841 0, /* static_pass_number */
3842 TV_JUMP
, /* tv_id */
3843 0, /* properties_required */
3844 0, /* properties_provided */
3845 0, /* properties_destroyed */
3846 0, /* todo_flags_start */
3847 TODO_dump_func
, /* todo_flags_finish */
3851 #include "gt-except.h"