1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
52 #include "coretypes.h"
60 #include "insn-config.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
75 #include "langhooks.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
81 /* Provide defaults for stuff that may not be defined when using
83 #ifndef EH_RETURN_DATA_REGNO
84 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
87 /* Protect cleanup actions with must-not-throw regions, with a call
88 to the given failure handler. */
89 gimple (*lang_protect_cleanup_actions
) (void);
91 /* Return true if type A catches type B. */
92 int (*lang_eh_type_covers
) (tree a
, tree b
);
94 /* Map a type to a runtime object to match type. */
95 tree (*lang_eh_runtime_type
) (tree
);
97 /* A hash table of label to region number. */
99 struct ehl_map_entry
GTY(())
102 struct eh_region
*region
;
105 static GTY(()) int call_site_base
;
106 static GTY ((param_is (union tree_node
)))
107 htab_t type_to_runtime_map
;
109 /* Describe the SjLj_Function_Context structure. */
110 static GTY(()) tree sjlj_fc_type_node
;
111 static int sjlj_fc_call_site_ofs
;
112 static int sjlj_fc_data_ofs
;
113 static int sjlj_fc_personality_ofs
;
114 static int sjlj_fc_lsda_ofs
;
115 static int sjlj_fc_jbuf_ofs
;
117 /* Describes one exception region. */
118 struct eh_region
GTY(())
120 /* The immediately surrounding region. */
121 struct eh_region
*outer
;
123 /* The list of immediately contained regions. */
124 struct eh_region
*inner
;
125 struct eh_region
*next_peer
;
127 /* An identifier for this region. */
130 /* When a region is deleted, its parents inherit the REG_EH_REGION
131 numbers already assigned. */
134 /* Each region does exactly one thing. */
141 ERT_ALLOWED_EXCEPTIONS
,
146 /* Holds the action to perform based on the preceding type. */
148 /* A list of catch blocks, a surrounding try block,
149 and the label for continuing after a catch. */
150 struct eh_region_u_try
{
151 struct eh_region
*eh_catch
;
152 struct eh_region
*last_catch
;
153 } GTY ((tag ("ERT_TRY"))) eh_try
;
155 /* The list through the catch handlers, the list of type objects
156 matched, and the list of associated filters. */
157 struct eh_region_u_catch
{
158 struct eh_region
*next_catch
;
159 struct eh_region
*prev_catch
;
162 } GTY ((tag ("ERT_CATCH"))) eh_catch
;
164 /* A tree_list of allowed types. */
165 struct eh_region_u_allowed
{
168 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed
;
170 /* The type given by a call to "throw foo();", or discovered
172 struct eh_region_u_throw
{
174 } GTY ((tag ("ERT_THROW"))) eh_throw
;
176 /* Retain the cleanup expression even after expansion so that
177 we can match up fixup regions. */
178 struct eh_region_u_cleanup
{
179 struct eh_region
*prev_try
;
180 } GTY ((tag ("ERT_CLEANUP"))) cleanup
;
181 } GTY ((desc ("%0.type"))) u
;
183 /* Entry point for this region's handler before landing pads are built. */
187 /* Entry point for this region's handler from the runtime eh library. */
190 /* Entry point for this region's handler from an inner region. */
191 rtx post_landing_pad
;
193 /* The RESX insn for handing off control to the next outermost handler,
197 /* True if something in this region may throw. */
198 unsigned may_contain_throw
: 1;
201 typedef struct eh_region
*eh_region
;
203 struct call_site_record
GTY(())
209 DEF_VEC_P(eh_region
);
210 DEF_VEC_ALLOC_P(eh_region
, gc
);
211 DEF_VEC_ALLOC_P(eh_region
, heap
);
213 /* Used to save exception status for each function. */
214 struct eh_status
GTY(())
216 /* The tree of all regions for this function. */
217 struct eh_region
*region_tree
;
219 /* The same information as an indexable array. */
220 VEC(eh_region
,gc
) *region_array
;
221 int last_region_number
;
223 htab_t
GTY((param_is (struct throw_stmt_node
))) throw_stmt_table
;
226 static int t2r_eq (const void *, const void *);
227 static hashval_t
t2r_hash (const void *);
228 static void add_type_for_runtime (tree
);
229 static tree
lookup_type_for_runtime (tree
);
231 static int ttypes_filter_eq (const void *, const void *);
232 static hashval_t
ttypes_filter_hash (const void *);
233 static int ehspec_filter_eq (const void *, const void *);
234 static hashval_t
ehspec_filter_hash (const void *);
235 static int add_ttypes_entry (htab_t
, tree
);
236 static int add_ehspec_entry (htab_t
, htab_t
, tree
);
237 static void assign_filter_values (void);
238 static void build_post_landing_pads (void);
239 static void connect_post_landing_pads (void);
240 static void dw2_build_landing_pads (void);
243 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info
*);
244 static void sjlj_assign_call_site_values (rtx
, struct sjlj_lp_info
*);
245 static void sjlj_mark_call_sites (struct sjlj_lp_info
*);
246 static void sjlj_emit_function_enter (rtx
);
247 static void sjlj_emit_function_exit (void);
248 static void sjlj_emit_dispatch_table (rtx
, struct sjlj_lp_info
*);
249 static void sjlj_build_landing_pads (void);
251 static void remove_eh_handler (struct eh_region
*);
252 static void remove_eh_handler_and_replace (struct eh_region
*,
255 /* The return value of reachable_next_level. */
258 /* The given exception is not processed by the given region. */
260 /* The given exception may need processing by the given region. */
262 /* The given exception is completely processed by the given region. */
264 /* The given exception is completely processed by the runtime. */
268 struct reachable_info
;
269 static enum reachable_code
reachable_next_level (struct eh_region
*, tree
,
270 struct reachable_info
*, bool);
272 static int action_record_eq (const void *, const void *);
273 static hashval_t
action_record_hash (const void *);
274 static int add_action_record (htab_t
, int, int);
275 static int collect_one_action_chain (htab_t
, struct eh_region
*);
276 static int add_call_site (rtx
, int);
278 static void push_uleb128 (varray_type
*, unsigned int);
279 static void push_sleb128 (varray_type
*, int);
280 #ifndef HAVE_AS_LEB128
281 static int dw2_size_of_call_site_table (void);
282 static int sjlj_size_of_call_site_table (void);
284 static void dw2_output_call_site_table (void);
285 static void sjlj_output_call_site_table (void);
288 /* Routine to see if exception handling is turned on.
289 DO_WARN is nonzero if we want to inform the user that exception
290 handling is turned off.
292 This is used to ensure that -fexceptions has been specified if the
293 compiler tries to use any exception-specific functions. */
296 doing_eh (int do_warn
)
298 if (! flag_exceptions
)
300 static int warned
= 0;
301 if (! warned
&& do_warn
)
303 error ("exception handling disabled, use -fexceptions to enable");
315 if (! flag_exceptions
)
318 type_to_runtime_map
= htab_create_ggc (31, t2r_hash
, t2r_eq
, NULL
);
320 /* Create the SjLj_Function_Context structure. This should match
321 the definition in unwind-sjlj.c. */
322 if (USING_SJLJ_EXCEPTIONS
)
324 tree f_jbuf
, f_per
, f_lsda
, f_prev
, f_cs
, f_data
, tmp
;
326 sjlj_fc_type_node
= lang_hooks
.types
.make_type (RECORD_TYPE
);
328 f_prev
= build_decl (FIELD_DECL
, get_identifier ("__prev"),
329 build_pointer_type (sjlj_fc_type_node
));
330 DECL_FIELD_CONTEXT (f_prev
) = sjlj_fc_type_node
;
332 f_cs
= build_decl (FIELD_DECL
, get_identifier ("__call_site"),
334 DECL_FIELD_CONTEXT (f_cs
) = sjlj_fc_type_node
;
336 tmp
= build_index_type (build_int_cst (NULL_TREE
, 4 - 1));
337 tmp
= build_array_type (lang_hooks
.types
.type_for_mode
338 (targetm
.unwind_word_mode (), 1),
340 f_data
= build_decl (FIELD_DECL
, get_identifier ("__data"), tmp
);
341 DECL_FIELD_CONTEXT (f_data
) = sjlj_fc_type_node
;
343 f_per
= build_decl (FIELD_DECL
, get_identifier ("__personality"),
345 DECL_FIELD_CONTEXT (f_per
) = sjlj_fc_type_node
;
347 f_lsda
= build_decl (FIELD_DECL
, get_identifier ("__lsda"),
349 DECL_FIELD_CONTEXT (f_lsda
) = sjlj_fc_type_node
;
351 #ifdef DONT_USE_BUILTIN_SETJMP
353 tmp
= build_int_cst (NULL_TREE
, JMP_BUF_SIZE
- 1);
355 /* Should be large enough for most systems, if it is not,
356 JMP_BUF_SIZE should be defined with the proper value. It will
357 also tend to be larger than necessary for most systems, a more
358 optimal port will define JMP_BUF_SIZE. */
359 tmp
= build_int_cst (NULL_TREE
, FIRST_PSEUDO_REGISTER
+ 2 - 1);
362 /* builtin_setjmp takes a pointer to 5 words. */
363 tmp
= build_int_cst (NULL_TREE
, 5 * BITS_PER_WORD
/ POINTER_SIZE
- 1);
365 tmp
= build_index_type (tmp
);
366 tmp
= build_array_type (ptr_type_node
, tmp
);
367 f_jbuf
= build_decl (FIELD_DECL
, get_identifier ("__jbuf"), tmp
);
368 #ifdef DONT_USE_BUILTIN_SETJMP
369 /* We don't know what the alignment requirements of the
370 runtime's jmp_buf has. Overestimate. */
371 DECL_ALIGN (f_jbuf
) = BIGGEST_ALIGNMENT
;
372 DECL_USER_ALIGN (f_jbuf
) = 1;
374 DECL_FIELD_CONTEXT (f_jbuf
) = sjlj_fc_type_node
;
376 TYPE_FIELDS (sjlj_fc_type_node
) = f_prev
;
377 TREE_CHAIN (f_prev
) = f_cs
;
378 TREE_CHAIN (f_cs
) = f_data
;
379 TREE_CHAIN (f_data
) = f_per
;
380 TREE_CHAIN (f_per
) = f_lsda
;
381 TREE_CHAIN (f_lsda
) = f_jbuf
;
383 layout_type (sjlj_fc_type_node
);
385 /* Cache the interesting field offsets so that we have
386 easy access from rtl. */
387 sjlj_fc_call_site_ofs
388 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs
), 1)
389 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs
), 1) / BITS_PER_UNIT
);
391 = (tree_low_cst (DECL_FIELD_OFFSET (f_data
), 1)
392 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data
), 1) / BITS_PER_UNIT
);
393 sjlj_fc_personality_ofs
394 = (tree_low_cst (DECL_FIELD_OFFSET (f_per
), 1)
395 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per
), 1) / BITS_PER_UNIT
);
397 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda
), 1)
398 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda
), 1) / BITS_PER_UNIT
);
400 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf
), 1)
401 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf
), 1) / BITS_PER_UNIT
);
406 init_eh_for_function (void)
408 cfun
->eh
= GGC_CNEW (struct eh_status
);
411 /* Routines to generate the exception tree somewhat directly.
412 These are used from tree-eh.c when processing exception related
413 nodes during tree optimization. */
415 static struct eh_region
*
416 gen_eh_region (enum eh_region_type type
, struct eh_region
*outer
)
418 struct eh_region
*new_eh
;
420 #ifdef ENABLE_CHECKING
421 gcc_assert (doing_eh (0));
424 /* Insert a new blank region as a leaf in the tree. */
425 new_eh
= GGC_CNEW (struct eh_region
);
427 new_eh
->outer
= outer
;
430 new_eh
->next_peer
= outer
->inner
;
431 outer
->inner
= new_eh
;
435 new_eh
->next_peer
= cfun
->eh
->region_tree
;
436 cfun
->eh
->region_tree
= new_eh
;
439 new_eh
->region_number
= ++cfun
->eh
->last_region_number
;
445 gen_eh_region_cleanup (struct eh_region
*outer
, struct eh_region
*prev_try
)
447 struct eh_region
*cleanup
= gen_eh_region (ERT_CLEANUP
, outer
);
448 cleanup
->u
.cleanup
.prev_try
= prev_try
;
453 gen_eh_region_try (struct eh_region
*outer
)
455 return gen_eh_region (ERT_TRY
, outer
);
459 gen_eh_region_catch (struct eh_region
*t
, tree type_or_list
)
461 struct eh_region
*c
, *l
;
462 tree type_list
, type_node
;
464 /* Ensure to always end up with a type list to normalize further
465 processing, then register each type against the runtime types map. */
466 type_list
= type_or_list
;
469 if (TREE_CODE (type_or_list
) != TREE_LIST
)
470 type_list
= tree_cons (NULL_TREE
, type_or_list
, NULL_TREE
);
472 type_node
= type_list
;
473 for (; type_node
; type_node
= TREE_CHAIN (type_node
))
474 add_type_for_runtime (TREE_VALUE (type_node
));
477 c
= gen_eh_region (ERT_CATCH
, t
->outer
);
478 c
->u
.eh_catch
.type_list
= type_list
;
479 l
= t
->u
.eh_try
.last_catch
;
480 c
->u
.eh_catch
.prev_catch
= l
;
482 l
->u
.eh_catch
.next_catch
= c
;
484 t
->u
.eh_try
.eh_catch
= c
;
485 t
->u
.eh_try
.last_catch
= c
;
491 gen_eh_region_allowed (struct eh_region
*outer
, tree allowed
)
493 struct eh_region
*region
= gen_eh_region (ERT_ALLOWED_EXCEPTIONS
, outer
);
494 region
->u
.allowed
.type_list
= allowed
;
496 for (; allowed
; allowed
= TREE_CHAIN (allowed
))
497 add_type_for_runtime (TREE_VALUE (allowed
));
503 gen_eh_region_must_not_throw (struct eh_region
*outer
)
505 return gen_eh_region (ERT_MUST_NOT_THROW
, outer
);
509 get_eh_region_number (struct eh_region
*region
)
511 return region
->region_number
;
515 get_eh_region_may_contain_throw (struct eh_region
*region
)
517 return region
->may_contain_throw
;
521 get_eh_region_tree_label (struct eh_region
*region
)
523 return region
->tree_label
;
527 get_eh_region_no_tree_label (int region
)
529 return VEC_index (eh_region
, cfun
->eh
->region_array
, region
)->tree_label
;
533 set_eh_region_tree_label (struct eh_region
*region
, tree lab
)
535 region
->tree_label
= lab
;
539 expand_resx_expr (tree exp
)
541 int region_nr
= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0));
542 struct eh_region
*reg
= VEC_index (eh_region
,
543 cfun
->eh
->region_array
, region_nr
);
545 gcc_assert (!reg
->resume
);
546 do_pending_stack_adjust ();
547 reg
->resume
= emit_jump_insn (gen_rtx_RESX (VOIDmode
, region_nr
));
551 /* Note that the current EH region (if any) may contain a throw, or a
552 call to a function which itself may contain a throw. */
555 note_eh_region_may_contain_throw (struct eh_region
*region
)
557 while (region
&& !region
->may_contain_throw
)
559 region
->may_contain_throw
= 1;
560 region
= region
->outer
;
565 /* Return an rtl expression for a pointer to the exception object
569 get_exception_pointer (void)
571 if (! crtl
->eh
.exc_ptr
)
572 crtl
->eh
.exc_ptr
= gen_reg_rtx (ptr_mode
);
573 return crtl
->eh
.exc_ptr
;
576 /* Return an rtl expression for the exception dispatch filter
580 get_exception_filter (void)
582 if (! crtl
->eh
.filter
)
583 crtl
->eh
.filter
= gen_reg_rtx (targetm
.eh_return_filter_mode ());
584 return crtl
->eh
.filter
;
587 /* This section is for the exception handling specific optimization pass. */
589 /* Random access the exception region tree. */
592 collect_eh_region_array (void)
596 i
= cfun
->eh
->region_tree
;
600 VEC_safe_grow (eh_region
, gc
, cfun
->eh
->region_array
,
601 cfun
->eh
->last_region_number
+ 1);
602 VEC_replace (eh_region
, cfun
->eh
->region_array
, 0, 0);
606 VEC_replace (eh_region
, cfun
->eh
->region_array
, i
->region_number
, i
);
608 /* If there are sub-regions, process them. */
611 /* If there are peers, process them. */
612 else if (i
->next_peer
)
614 /* Otherwise, step back up the tree to the next peer. */
621 } while (i
->next_peer
== NULL
);
627 /* R is MUST_NOT_THROW region that is not reachable via local
628 RESX instructions. It still must be kept in the tree in case runtime
629 can unwind through it, or we will eliminate out terminate call
630 runtime would do otherwise. Return TRUE if R contains throwing statements
631 or some of the exceptions in inner regions can be unwound up to R.
633 CONTAINS_STMT is bitmap of all regions that contains some throwing
636 Function looks O(^3) at first sight. In fact the function is called at most
637 once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
638 Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
639 the outer loop examines every region at most once. The inner loop
640 is doing unwinding from the throwing statement same way as we do during
641 CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
642 of CFG. In practice Eh trees are wide, not deep, so this is not
646 can_be_reached_by_runtime (sbitmap contains_stmt
, struct eh_region
*r
)
648 struct eh_region
*i
= r
->inner
;
652 if (TEST_BIT (contains_stmt
, r
->region_number
))
655 EXECUTE_IF_SET_IN_BITMAP (r
->aka
, 0, n
, bi
)
656 if (TEST_BIT (contains_stmt
, n
))
662 /* It is pointless to look into MUST_NOT_THROW
663 or dive into subregions. They never unwind up. */
664 if (i
->type
!= ERT_MUST_NOT_THROW
)
666 bool found
= TEST_BIT (contains_stmt
, i
->region_number
);
668 EXECUTE_IF_SET_IN_BITMAP (i
->aka
, 0, n
, bi
)
669 if (TEST_BIT (contains_stmt
, n
))
674 /* We have nested region that contains throwing statement.
675 See if resuming might lead up to the resx or we get locally
676 caught sooner. If we get locally caught sooner, we either
677 know region R is not reachable or it would have direct edge
678 from the EH resx and thus consider region reachable at
682 struct eh_region
*i1
= i
;
683 tree type_thrown
= NULL_TREE
;
685 if (i1
->type
== ERT_THROW
)
687 type_thrown
= i1
->u
.eh_throw
.type
;
690 for (; i1
!= r
; i1
= i1
->outer
)
691 if (reachable_next_level (i1
, type_thrown
, NULL
,
692 false) >= RNL_CAUGHT
)
698 /* If there are sub-regions, process them. */
699 if (i
->type
!= ERT_MUST_NOT_THROW
&& i
->inner
)
701 /* If there are peers, process them. */
702 else if (i
->next_peer
)
704 /* Otherwise, step back up the tree to the next peer. */
713 while (i
->next_peer
== NULL
);
719 /* Bring region R to the root of tree. */
722 bring_to_root (struct eh_region
*r
)
724 struct eh_region
**pp
;
725 struct eh_region
*outer
= r
->outer
;
728 for (pp
= &outer
->inner
; *pp
!= r
; pp
= &(*pp
)->next_peer
)
732 r
->next_peer
= cfun
->eh
->region_tree
;
733 cfun
->eh
->region_tree
= r
;
736 /* Remove all regions whose labels are not reachable.
737 REACHABLE is bitmap of all regions that are used by the function
738 CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
741 remove_unreachable_regions (sbitmap reachable
, sbitmap contains_stmt
)
745 VEC(eh_region
,heap
) *must_not_throws
= VEC_alloc (eh_region
, heap
, 16);
746 struct eh_region
*local_must_not_throw
= NULL
;
747 struct eh_region
*first_must_not_throw
= NULL
;
749 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
751 r
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
752 if (!r
|| r
->region_number
!= i
)
754 if (!TEST_BIT (reachable
, i
) && !r
->resume
)
758 r
->tree_label
= NULL
;
762 /* Don't remove ERT_THROW regions if their outer region
764 if (r
->outer
&& TEST_BIT (reachable
, r
->outer
->region_number
))
767 case ERT_MUST_NOT_THROW
:
768 /* MUST_NOT_THROW regions are implementable solely in the
769 runtime, but we need them when inlining function.
771 Keep them if outer region is not MUST_NOT_THROW a well
772 and if they contain some statement that might unwind through
774 if ((!r
->outer
|| r
->outer
->type
!= ERT_MUST_NOT_THROW
)
776 || can_be_reached_by_runtime (contains_stmt
, r
)))
781 /* TRY regions are reachable if any of its CATCH regions
784 for (c
= r
->u
.eh_try
.eh_catch
; c
;
785 c
= c
->u
.eh_catch
.next_catch
)
786 if (TEST_BIT (reachable
, c
->region_number
))
801 fprintf (dump_file
, "Removing unreachable eh region %i\n",
803 remove_eh_handler (r
);
805 else if (r
->type
== ERT_MUST_NOT_THROW
)
807 if (!first_must_not_throw
)
808 first_must_not_throw
= r
;
809 VEC_safe_push (eh_region
, heap
, must_not_throws
, r
);
813 if (r
->type
== ERT_MUST_NOT_THROW
)
815 if (!local_must_not_throw
)
816 local_must_not_throw
= r
;
818 VEC_safe_push (eh_region
, heap
, must_not_throws
, r
);
822 /* MUST_NOT_THROW regions without local handler are all the same; they
823 trigger terminate call in runtime.
824 MUST_NOT_THROW handled locally can differ in debug info associated
825 to std::terminate () call or if one is coming from Java and other
826 from C++ whether they call terminate or abort.
828 We merge all MUST_NOT_THROW regions handled by the run-time into one.
829 We alsobring all local MUST_NOT_THROW regions to the roots of EH tree
830 (since unwinding never continues to the outer region anyway).
831 If MUST_NOT_THROW with local handler is present in the tree, we use
832 that region to merge into, since it will remain in tree anyway;
833 otherwise we use first MUST_NOT_THROW.
835 Merging of locally handled regions needs changes to the CFG. Crossjumping
836 should take care of this, by looking at the actual code and
837 ensuring that the cleanup actions are really the same. */
839 if (local_must_not_throw
)
840 first_must_not_throw
= local_must_not_throw
;
842 for (i
= 0; VEC_iterate (eh_region
, must_not_throws
, i
, r
); i
++)
844 if (!r
->label
&& !r
->tree_label
&& r
!= first_must_not_throw
)
847 fprintf (dump_file
, "Replacing MUST_NOT_THROW region %i by %i\n",
849 first_must_not_throw
->region_number
);
850 remove_eh_handler_and_replace (r
, first_must_not_throw
);
851 first_must_not_throw
->may_contain_throw
|= r
->may_contain_throw
;
856 #ifdef ENABLE_CHECKING
857 verify_eh_tree (cfun
);
859 VEC_free (eh_region
, heap
, must_not_throws
);
862 /* Return array mapping LABEL_DECL_UID to region such that region's tree_label
863 is identical to label. */
866 label_to_region_map (void)
868 VEC(int,heap
) * label_to_region
= NULL
;
871 VEC_safe_grow_cleared (int, heap
, label_to_region
,
872 cfun
->cfg
->last_label_uid
+ 1);
873 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
875 struct eh_region
*r
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
876 if (r
&& r
->region_number
== i
877 && r
->tree_label
&& LABEL_DECL_UID (r
->tree_label
) >= 0)
879 VEC_replace (int, label_to_region
, LABEL_DECL_UID (r
->tree_label
),
883 return label_to_region
;
886 /* Return number of EH regions. */
888 num_eh_regions (void)
890 return cfun
->eh
->last_region_number
+ 1;
893 /* Set up EH labels for RTL. */
896 convert_from_eh_region_ranges (void)
898 int i
, n
= cfun
->eh
->last_region_number
;
900 /* Most of the work is already done at the tree level. All we need to
901 do is collect the rtl labels that correspond to the tree labels that
902 collect the rtl labels that correspond to the tree labels
903 we allocated earlier. */
904 for (i
= 1; i
<= n
; ++i
)
906 struct eh_region
*region
;
908 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
909 if (region
&& region
->tree_label
)
910 region
->label
= DECL_RTL_IF_SET (region
->tree_label
);
915 find_exception_handler_labels (void)
919 if (cfun
->eh
->region_tree
== NULL
)
922 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
924 struct eh_region
*region
;
927 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
928 if (! region
|| region
->region_number
!= i
)
930 if (crtl
->eh
.built_landing_pads
)
931 lab
= region
->landing_pad
;
937 /* Returns true if the current function has exception handling regions. */
940 current_function_has_exception_handlers (void)
944 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
946 struct eh_region
*region
;
948 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
950 && region
->region_number
== i
951 && region
->type
!= ERT_THROW
)
958 /* A subroutine of duplicate_eh_regions. Search the region tree under O
959 for the minimum and maximum region numbers. Update *MIN and *MAX. */
962 duplicate_eh_regions_0 (eh_region o
, int *min
, int *max
)
968 i
= bitmap_first_set_bit (o
->aka
);
971 i
= bitmap_last_set_bit (o
->aka
);
975 if (o
->region_number
< *min
)
976 *min
= o
->region_number
;
977 if (o
->region_number
> *max
)
978 *max
= o
->region_number
;
983 duplicate_eh_regions_0 (o
, min
, max
);
987 duplicate_eh_regions_0 (o
, min
, max
);
992 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
993 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
994 about the other internal pointers just yet, just the tree-like pointers. */
997 duplicate_eh_regions_1 (eh_region old
, eh_region outer
, int eh_offset
)
1001 ret
= n
= GGC_NEW (struct eh_region
);
1005 n
->next_peer
= NULL
;
1010 n
->aka
= BITMAP_GGC_ALLOC ();
1012 EXECUTE_IF_SET_IN_BITMAP (old
->aka
, 0, i
, bi
)
1014 bitmap_set_bit (n
->aka
, i
+ eh_offset
);
1015 VEC_replace (eh_region
, cfun
->eh
->region_array
, i
+ eh_offset
, n
);
1019 n
->region_number
+= eh_offset
;
1020 VEC_replace (eh_region
, cfun
->eh
->region_array
, n
->region_number
, n
);
1025 n
= n
->inner
= duplicate_eh_regions_1 (old
, ret
, eh_offset
);
1026 while (old
->next_peer
)
1028 old
= old
->next_peer
;
1029 n
= n
->next_peer
= duplicate_eh_regions_1 (old
, ret
, eh_offset
);
1036 /* Return prev_try pointers catch subregions of R should
1039 static struct eh_region
*
1040 find_prev_try (struct eh_region
* r
)
1042 for (; r
&& r
->type
!= ERT_TRY
; r
= r
->outer
)
1043 if (r
->type
== ERT_MUST_NOT_THROW
1044 || (r
->type
== ERT_ALLOWED_EXCEPTIONS
1045 && !r
->u
.allowed
.type_list
))
1053 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
1054 function and root the tree below OUTER_REGION. Remap labels using MAP
1055 callback. The special case of COPY_REGION of 0 means all regions. */
1058 duplicate_eh_regions (struct function
*ifun
, duplicate_eh_regions_map map
,
1059 void *data
, int copy_region
, int outer_region
)
1061 eh_region cur
, prev_try
, old_prev_try
, outer
, *splice
;
1062 int i
, min_region
, max_region
, eh_offset
, cfun_last_region_number
;
1067 #ifdef ENABLE_CHECKING
1068 verify_eh_tree (ifun
);
1071 /* Find the range of region numbers to be copied. The interface we
1072 provide here mandates a single offset to find new number from old,
1073 which means we must look at the numbers present, instead of the
1074 count or something else. */
1075 if (copy_region
> 0)
1077 min_region
= INT_MAX
;
1080 cur
= VEC_index (eh_region
, ifun
->eh
->region_array
, copy_region
);
1081 old_prev_try
= find_prev_try (cur
);
1082 duplicate_eh_regions_0 (cur
, &min_region
, &max_region
);
1087 max_region
= ifun
->eh
->last_region_number
;
1088 old_prev_try
= NULL
;
1090 num_regions
= max_region
- min_region
+ 1;
1091 cfun_last_region_number
= cfun
->eh
->last_region_number
;
1092 eh_offset
= cfun_last_region_number
+ 1 - min_region
;
1094 /* If we've not yet created a region array, do so now. */
1095 cfun
->eh
->last_region_number
= cfun_last_region_number
+ num_regions
;
1096 VEC_safe_grow_cleared (eh_region
, gc
, cfun
->eh
->region_array
,
1097 cfun
->eh
->last_region_number
+ 1);
1099 /* Locate the spot at which to insert the new tree. */
1100 if (outer_region
> 0)
1102 outer
= VEC_index (eh_region
, cfun
->eh
->region_array
, outer_region
);
1104 splice
= &outer
->inner
;
1106 splice
= &cfun
->eh
->region_tree
;
1111 splice
= &cfun
->eh
->region_tree
;
1114 splice
= &(*splice
)->next_peer
;
1116 if (!ifun
->eh
->region_tree
)
1119 for (i
= cfun_last_region_number
+ 1;
1120 i
<= cfun
->eh
->last_region_number
; i
++)
1122 VEC_replace (eh_region
, cfun
->eh
->region_array
, i
, outer
);
1123 if (outer
->aka
== NULL
)
1124 outer
->aka
= BITMAP_GGC_ALLOC ();
1125 bitmap_set_bit (outer
->aka
, i
);
1130 /* Copy all the regions in the subtree. */
1131 if (copy_region
> 0)
1133 cur
= VEC_index (eh_region
, ifun
->eh
->region_array
, copy_region
);
1134 *splice
= duplicate_eh_regions_1 (cur
, outer
, eh_offset
);
1140 cur
= ifun
->eh
->region_tree
;
1141 *splice
= n
= duplicate_eh_regions_1 (cur
, outer
, eh_offset
);
1142 while (cur
->next_peer
)
1144 cur
= cur
->next_peer
;
1145 n
= n
->next_peer
= duplicate_eh_regions_1 (cur
, outer
, eh_offset
);
1149 /* Remap all the labels in the new regions. */
1150 for (i
= cfun_last_region_number
+ 1;
1151 VEC_iterate (eh_region
, cfun
->eh
->region_array
, i
, cur
); ++i
)
1152 if (cur
&& cur
->tree_label
)
1153 cur
->tree_label
= map (cur
->tree_label
, data
);
1155 /* Search for the containing ERT_TRY region to fix up
1156 the prev_try short-cuts for ERT_CLEANUP regions. */
1158 if (outer_region
> 0)
1159 prev_try
= find_prev_try (VEC_index (eh_region
, cfun
->eh
->region_array
, outer_region
));
1161 /* Remap all of the internal catch and cleanup linkages. Since we
1162 duplicate entire subtrees, all of the referenced regions will have
1163 been copied too. And since we renumbered them as a block, a simple
1164 bit of arithmetic finds us the index for the replacement region. */
1165 for (i
= cfun_last_region_number
+ 1;
1166 VEC_iterate (eh_region
, cfun
->eh
->region_array
, i
, cur
); ++i
)
1168 /* All removed EH that is toplevel in input function is now
1169 in outer EH of output function. */
1172 gcc_assert (VEC_index
1173 (eh_region
, ifun
->eh
->region_array
,
1174 i
- eh_offset
) == NULL
);
1177 VEC_replace (eh_region
, cfun
->eh
->region_array
, i
, outer
);
1178 if (outer
->aka
== NULL
)
1179 outer
->aka
= BITMAP_GGC_ALLOC ();
1180 bitmap_set_bit (outer
->aka
, i
);
1184 if (i
!= cur
->region_number
)
1187 #define REMAP(REG) \
1188 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1189 (REG)->region_number + eh_offset)
1194 if (cur
->u
.eh_try
.eh_catch
)
1195 REMAP (cur
->u
.eh_try
.eh_catch
);
1196 if (cur
->u
.eh_try
.last_catch
)
1197 REMAP (cur
->u
.eh_try
.last_catch
);
1201 if (cur
->u
.eh_catch
.next_catch
)
1202 REMAP (cur
->u
.eh_catch
.next_catch
);
1203 if (cur
->u
.eh_catch
.prev_catch
)
1204 REMAP (cur
->u
.eh_catch
.prev_catch
);
1208 if (cur
->u
.cleanup
.prev_try
!= old_prev_try
)
1209 REMAP (cur
->u
.cleanup
.prev_try
);
1211 cur
->u
.cleanup
.prev_try
= prev_try
;
1220 #ifdef ENABLE_CHECKING
1221 verify_eh_tree (cfun
);
1227 /* Return true if REGION_A is outer to REGION_B in IFUN. */
1230 eh_region_outer_p (struct function
*ifun
, int region_a
, int region_b
)
1232 struct eh_region
*rp_a
, *rp_b
;
1234 gcc_assert (ifun
->eh
->last_region_number
> 0);
1235 gcc_assert (ifun
->eh
->region_tree
);
1237 rp_a
= VEC_index (eh_region
, ifun
->eh
->region_array
, region_a
);
1238 rp_b
= VEC_index (eh_region
, ifun
->eh
->region_array
, region_b
);
1239 gcc_assert (rp_a
!= NULL
);
1240 gcc_assert (rp_b
!= NULL
);
1253 /* Return region number of region that is outer to both if REGION_A and
1254 REGION_B in IFUN. */
1257 eh_region_outermost (struct function
*ifun
, int region_a
, int region_b
)
1259 struct eh_region
*rp_a
, *rp_b
;
1262 gcc_assert (ifun
->eh
->last_region_number
> 0);
1263 gcc_assert (ifun
->eh
->region_tree
);
1265 rp_a
= VEC_index (eh_region
, ifun
->eh
->region_array
, region_a
);
1266 rp_b
= VEC_index (eh_region
, ifun
->eh
->region_array
, region_b
);
1267 gcc_assert (rp_a
!= NULL
);
1268 gcc_assert (rp_b
!= NULL
);
1270 b_outer
= sbitmap_alloc (ifun
->eh
->last_region_number
+ 1);
1271 sbitmap_zero (b_outer
);
1275 SET_BIT (b_outer
, rp_b
->region_number
);
1282 if (TEST_BIT (b_outer
, rp_a
->region_number
))
1284 sbitmap_free (b_outer
);
1285 return rp_a
->region_number
;
1291 sbitmap_free (b_outer
);
1296 t2r_eq (const void *pentry
, const void *pdata
)
1298 const_tree
const entry
= (const_tree
) pentry
;
1299 const_tree
const data
= (const_tree
) pdata
;
1301 return TREE_PURPOSE (entry
) == data
;
1305 t2r_hash (const void *pentry
)
1307 const_tree
const entry
= (const_tree
) pentry
;
1308 return TREE_HASH (TREE_PURPOSE (entry
));
1312 add_type_for_runtime (tree type
)
1316 slot
= (tree
*) htab_find_slot_with_hash (type_to_runtime_map
, type
,
1317 TREE_HASH (type
), INSERT
);
1320 tree runtime
= (*lang_eh_runtime_type
) (type
);
1321 *slot
= tree_cons (type
, runtime
, NULL_TREE
);
1326 lookup_type_for_runtime (tree type
)
1330 slot
= (tree
*) htab_find_slot_with_hash (type_to_runtime_map
, type
,
1331 TREE_HASH (type
), NO_INSERT
);
1333 /* We should have always inserted the data earlier. */
1334 return TREE_VALUE (*slot
);
1338 /* Represent an entry in @TTypes for either catch actions
1339 or exception filter actions. */
1340 struct ttypes_filter
GTY(())
1346 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1347 (a tree) for a @TTypes type node we are thinking about adding. */
1350 ttypes_filter_eq (const void *pentry
, const void *pdata
)
1352 const struct ttypes_filter
*const entry
1353 = (const struct ttypes_filter
*) pentry
;
1354 const_tree
const data
= (const_tree
) pdata
;
1356 return entry
->t
== data
;
1360 ttypes_filter_hash (const void *pentry
)
1362 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1363 return TREE_HASH (entry
->t
);
1366 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1367 exception specification list we are thinking about adding. */
1368 /* ??? Currently we use the type lists in the order given. Someone
1369 should put these in some canonical order. */
1372 ehspec_filter_eq (const void *pentry
, const void *pdata
)
1374 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1375 const struct ttypes_filter
*data
= (const struct ttypes_filter
*) pdata
;
1377 return type_list_equal (entry
->t
, data
->t
);
1380 /* Hash function for exception specification lists. */
1383 ehspec_filter_hash (const void *pentry
)
1385 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1389 for (list
= entry
->t
; list
; list
= TREE_CHAIN (list
))
1390 h
= (h
<< 5) + (h
>> 27) + TREE_HASH (TREE_VALUE (list
));
1394 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1395 to speed up the search. Return the filter value to be used. */
1398 add_ttypes_entry (htab_t ttypes_hash
, tree type
)
1400 struct ttypes_filter
**slot
, *n
;
1402 slot
= (struct ttypes_filter
**)
1403 htab_find_slot_with_hash (ttypes_hash
, type
, TREE_HASH (type
), INSERT
);
1405 if ((n
= *slot
) == NULL
)
1407 /* Filter value is a 1 based table index. */
1409 n
= XNEW (struct ttypes_filter
);
1411 n
->filter
= VEC_length (tree
, crtl
->eh
.ttype_data
) + 1;
1414 VEC_safe_push (tree
, gc
, crtl
->eh
.ttype_data
, type
);
1420 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1421 to speed up the search. Return the filter value to be used. */
1424 add_ehspec_entry (htab_t ehspec_hash
, htab_t ttypes_hash
, tree list
)
1426 struct ttypes_filter
**slot
, *n
;
1427 struct ttypes_filter dummy
;
1430 slot
= (struct ttypes_filter
**)
1431 htab_find_slot (ehspec_hash
, &dummy
, INSERT
);
1433 if ((n
= *slot
) == NULL
)
1435 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1437 n
= XNEW (struct ttypes_filter
);
1439 n
->filter
= -(VARRAY_ACTIVE_SIZE (crtl
->eh
.ehspec_data
) + 1);
1442 /* Generate a 0 terminated list of filter values. */
1443 for (; list
; list
= TREE_CHAIN (list
))
1445 if (targetm
.arm_eabi_unwinder
)
1446 VARRAY_PUSH_TREE (crtl
->eh
.ehspec_data
, TREE_VALUE (list
));
1449 /* Look up each type in the list and encode its filter
1450 value as a uleb128. */
1451 push_uleb128 (&crtl
->eh
.ehspec_data
,
1452 add_ttypes_entry (ttypes_hash
, TREE_VALUE (list
)));
1455 if (targetm
.arm_eabi_unwinder
)
1456 VARRAY_PUSH_TREE (crtl
->eh
.ehspec_data
, NULL_TREE
);
1458 VARRAY_PUSH_UCHAR (crtl
->eh
.ehspec_data
, 0);
1464 /* Generate the action filter values to be used for CATCH and
1465 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1466 we use lots of landing pads, and so every type or list can share
1467 the same filter value, which saves table space. */
1470 assign_filter_values (void)
1473 htab_t ttypes
, ehspec
;
1475 crtl
->eh
.ttype_data
= VEC_alloc (tree
, gc
, 16);
1476 if (targetm
.arm_eabi_unwinder
)
1477 VARRAY_TREE_INIT (crtl
->eh
.ehspec_data
, 64, "ehspec_data");
1479 VARRAY_UCHAR_INIT (crtl
->eh
.ehspec_data
, 64, "ehspec_data");
1481 ttypes
= htab_create (31, ttypes_filter_hash
, ttypes_filter_eq
, free
);
1482 ehspec
= htab_create (31, ehspec_filter_hash
, ehspec_filter_eq
, free
);
1484 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1486 struct eh_region
*r
;
1488 r
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
1490 /* Mind we don't process a region more than once. */
1491 if (!r
|| r
->region_number
!= i
)
1497 /* Whatever type_list is (NULL or true list), we build a list
1498 of filters for the region. */
1499 r
->u
.eh_catch
.filter_list
= NULL_TREE
;
1501 if (r
->u
.eh_catch
.type_list
!= NULL
)
1503 /* Get a filter value for each of the types caught and store
1504 them in the region's dedicated list. */
1505 tree tp_node
= r
->u
.eh_catch
.type_list
;
1507 for (;tp_node
; tp_node
= TREE_CHAIN (tp_node
))
1509 int flt
= add_ttypes_entry (ttypes
, TREE_VALUE (tp_node
));
1510 tree flt_node
= build_int_cst (NULL_TREE
, flt
);
1512 r
->u
.eh_catch
.filter_list
1513 = tree_cons (NULL_TREE
, flt_node
, r
->u
.eh_catch
.filter_list
);
1518 /* Get a filter value for the NULL list also since it will need
1519 an action record anyway. */
1520 int flt
= add_ttypes_entry (ttypes
, NULL
);
1521 tree flt_node
= build_int_cst (NULL_TREE
, flt
);
1523 r
->u
.eh_catch
.filter_list
1524 = tree_cons (NULL_TREE
, flt_node
, r
->u
.eh_catch
.filter_list
);
1529 case ERT_ALLOWED_EXCEPTIONS
:
1531 = add_ehspec_entry (ehspec
, ttypes
, r
->u
.allowed
.type_list
);
1539 htab_delete (ttypes
);
1540 htab_delete (ehspec
);
1543 /* Emit SEQ into basic block just before INSN (that is assumed to be
1544 first instruction of some existing BB and return the newly
1547 emit_to_new_bb_before (rtx seq
, rtx insn
)
1554 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1555 call), we don't want it to go into newly created landing pad or other EH
1557 for (ei
= ei_start (BLOCK_FOR_INSN (insn
)->preds
); (e
= ei_safe_edge (ei
)); )
1558 if (e
->flags
& EDGE_FALLTHRU
)
1559 force_nonfallthru (e
);
1562 last
= emit_insn_before (seq
, insn
);
1563 if (BARRIER_P (last
))
1564 last
= PREV_INSN (last
);
1565 bb
= create_basic_block (seq
, last
, BLOCK_FOR_INSN (insn
)->prev_bb
);
1566 update_bb_for_insn (bb
);
1567 bb
->flags
|= BB_SUPERBLOCK
;
1571 /* Generate the code to actually handle exceptions, which will follow the
1575 build_post_landing_pads (void)
1579 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1581 struct eh_region
*region
;
1584 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
1585 /* Mind we don't process a region more than once. */
1586 if (!region
|| region
->region_number
!= i
)
1589 switch (region
->type
)
1592 /* ??? Collect the set of all non-overlapping catch handlers
1593 all the way up the chain until blocked by a cleanup. */
1594 /* ??? Outer try regions can share landing pads with inner
1595 try regions if the types are completely non-overlapping,
1596 and there are no intervening cleanups. */
1598 region
->post_landing_pad
= gen_label_rtx ();
1602 emit_label (region
->post_landing_pad
);
1604 /* ??? It is mighty inconvenient to call back into the
1605 switch statement generation code in expand_end_case.
1606 Rapid prototyping sez a sequence of ifs. */
1608 struct eh_region
*c
;
1609 for (c
= region
->u
.eh_try
.eh_catch
; c
; c
= c
->u
.eh_catch
.next_catch
)
1611 if (c
->u
.eh_catch
.type_list
== NULL
)
1612 emit_jump (c
->label
);
1615 /* Need for one cmp/jump per type caught. Each type
1616 list entry has a matching entry in the filter list
1617 (see assign_filter_values). */
1618 tree tp_node
= c
->u
.eh_catch
.type_list
;
1619 tree flt_node
= c
->u
.eh_catch
.filter_list
;
1623 emit_cmp_and_jump_insns
1625 GEN_INT (tree_low_cst (TREE_VALUE (flt_node
), 0)),
1627 targetm
.eh_return_filter_mode (), 0, c
->label
);
1629 tp_node
= TREE_CHAIN (tp_node
);
1630 flt_node
= TREE_CHAIN (flt_node
);
1636 /* We delay the generation of the _Unwind_Resume until we generate
1637 landing pads. We emit a marker here so as to get good control
1638 flow data in the meantime. */
1640 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
1646 emit_to_new_bb_before (seq
, region
->u
.eh_try
.eh_catch
->label
);
1650 case ERT_ALLOWED_EXCEPTIONS
:
1651 region
->post_landing_pad
= gen_label_rtx ();
1655 emit_label (region
->post_landing_pad
);
1657 emit_cmp_and_jump_insns (crtl
->eh
.filter
,
1658 GEN_INT (region
->u
.allowed
.filter
),
1660 targetm
.eh_return_filter_mode (), 0, region
->label
);
1662 /* We delay the generation of the _Unwind_Resume until we generate
1663 landing pads. We emit a marker here so as to get good control
1664 flow data in the meantime. */
1666 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
1672 emit_to_new_bb_before (seq
, region
->label
);
1676 case ERT_MUST_NOT_THROW
:
1677 region
->post_landing_pad
= region
->label
;
1682 /* Nothing to do. */
1691 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1692 _Unwind_Resume otherwise. */
1695 connect_post_landing_pads (void)
1699 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1701 struct eh_region
*region
;
1702 struct eh_region
*outer
;
1706 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
1707 /* Mind we don't process a region more than once. */
1708 if (!region
|| region
->region_number
!= i
)
1711 /* If there is no RESX, or it has been deleted by flow, there's
1712 nothing to fix up. */
1713 if (! region
->resume
|| INSN_DELETED_P (region
->resume
))
1716 /* Search for another landing pad in this function. */
1717 for (outer
= region
->outer
; outer
; outer
= outer
->outer
)
1718 if (outer
->post_landing_pad
)
1726 basic_block src
, dest
;
1728 emit_jump (outer
->post_landing_pad
);
1729 src
= BLOCK_FOR_INSN (region
->resume
);
1730 dest
= BLOCK_FOR_INSN (outer
->post_landing_pad
);
1731 while (EDGE_COUNT (src
->succs
) > 0)
1732 remove_edge (EDGE_SUCC (src
, 0));
1733 e
= make_edge (src
, dest
, 0);
1734 e
->probability
= REG_BR_PROB_BASE
;
1735 e
->count
= src
->count
;
1739 emit_library_call (unwind_resume_libfunc
, LCT_THROW
,
1740 VOIDmode
, 1, crtl
->eh
.exc_ptr
, ptr_mode
);
1742 /* What we just emitted was a throwing libcall, so it got a
1743 barrier automatically added after it. If the last insn in
1744 the libcall sequence isn't the barrier, it's because the
1745 target emits multiple insns for a call, and there are insns
1746 after the actual call insn (which are redundant and would be
1747 optimized away). The barrier is inserted exactly after the
1748 call insn, so let's go get that and delete the insns after
1749 it, because below we need the barrier to be the last insn in
1751 delete_insns_since (NEXT_INSN (last_call_insn ()));
1756 barrier
= emit_insn_before (seq
, region
->resume
);
1757 /* Avoid duplicate barrier. */
1758 gcc_assert (BARRIER_P (barrier
));
1759 delete_insn (barrier
);
1760 delete_insn (region
->resume
);
1762 /* ??? From tree-ssa we can wind up with catch regions whose
1763 label is not instantiated, but whose resx is present. Now
1764 that we've dealt with the resx, kill the region. */
1765 if (region
->label
== NULL
&& region
->type
== ERT_CLEANUP
)
1766 remove_eh_handler (region
);
1772 dw2_build_landing_pads (void)
1776 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1778 struct eh_region
*region
;
1783 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
1784 /* Mind we don't process a region more than once. */
1785 if (!region
|| region
->region_number
!= i
)
1788 if (region
->type
!= ERT_CLEANUP
1789 && region
->type
!= ERT_TRY
1790 && region
->type
!= ERT_ALLOWED_EXCEPTIONS
)
1795 region
->landing_pad
= gen_label_rtx ();
1796 emit_label (region
->landing_pad
);
1798 #ifdef HAVE_exception_receiver
1799 if (HAVE_exception_receiver
)
1800 emit_insn (gen_exception_receiver ());
1803 #ifdef HAVE_nonlocal_goto_receiver
1804 if (HAVE_nonlocal_goto_receiver
)
1805 emit_insn (gen_nonlocal_goto_receiver ());
1810 emit_move_insn (crtl
->eh
.exc_ptr
,
1811 gen_rtx_REG (ptr_mode
, EH_RETURN_DATA_REGNO (0)));
1812 emit_move_insn (crtl
->eh
.filter
,
1813 gen_rtx_REG (targetm
.eh_return_filter_mode (),
1814 EH_RETURN_DATA_REGNO (1)));
1819 bb
= emit_to_new_bb_before (seq
, region
->post_landing_pad
);
1820 e
= make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
1821 e
->count
= bb
->count
;
1822 e
->probability
= REG_BR_PROB_BASE
;
1829 int directly_reachable
;
1832 int call_site_index
;
1836 sjlj_find_directly_reachable_regions (struct sjlj_lp_info
*lp_info
)
1839 bool found_one
= false;
1841 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1843 struct eh_region
*region
;
1844 enum reachable_code rc
;
1848 if (! INSN_P (insn
))
1851 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1852 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
1855 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, INTVAL (XEXP (note
, 0)));
1859 type_thrown
= NULL_TREE
;
1860 if (region
->type
== ERT_THROW
)
1862 type_thrown
= region
->u
.eh_throw
.type
;
1863 region
= region
->outer
;
1866 /* Find the first containing region that might handle the exception.
1867 That's the landing pad to which we will transfer control. */
1868 rc
= RNL_NOT_CAUGHT
;
1869 for (; region
; region
= region
->outer
)
1871 rc
= reachable_next_level (region
, type_thrown
, NULL
, false);
1872 if (rc
!= RNL_NOT_CAUGHT
)
1875 if (rc
== RNL_MAYBE_CAUGHT
|| rc
== RNL_CAUGHT
)
1877 lp_info
[region
->region_number
].directly_reachable
= 1;
1886 sjlj_assign_call_site_values (rtx dispatch_label
, struct sjlj_lp_info
*lp_info
)
1891 /* First task: build the action table. */
1893 VARRAY_UCHAR_INIT (crtl
->eh
.action_record_data
, 64, "action_record_data");
1894 ar_hash
= htab_create (31, action_record_hash
, action_record_eq
, free
);
1896 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1897 if (lp_info
[i
].directly_reachable
)
1899 struct eh_region
*r
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
1901 r
->landing_pad
= dispatch_label
;
1902 lp_info
[i
].action_index
= collect_one_action_chain (ar_hash
, r
);
1903 if (lp_info
[i
].action_index
!= -1)
1904 crtl
->uses_eh_lsda
= 1;
1907 htab_delete (ar_hash
);
1909 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1910 landing pad label for the region. For sjlj though, there is one
1911 common landing pad from which we dispatch to the post-landing pads.
1913 A region receives a dispatch index if it is directly reachable
1914 and requires in-function processing. Regions that share post-landing
1915 pads may share dispatch indices. */
1916 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1917 (see build_post_landing_pads) so we don't bother checking for it. */
1920 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1921 if (lp_info
[i
].directly_reachable
)
1922 lp_info
[i
].dispatch_index
= index
++;
1924 /* Finally: assign call-site values. If dwarf2 terms, this would be
1925 the region number assigned by convert_to_eh_region_ranges, but
1926 handles no-action and must-not-throw differently. */
1929 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1930 if (lp_info
[i
].directly_reachable
)
1932 int action
= lp_info
[i
].action_index
;
1934 /* Map must-not-throw to otherwise unused call-site index 0. */
1937 /* Map no-action to otherwise unused call-site index -1. */
1938 else if (action
== -1)
1940 /* Otherwise, look it up in the table. */
1942 index
= add_call_site (GEN_INT (lp_info
[i
].dispatch_index
), action
);
1944 lp_info
[i
].call_site_index
= index
;
1949 sjlj_mark_call_sites (struct sjlj_lp_info
*lp_info
)
1951 int last_call_site
= -2;
1954 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1956 struct eh_region
*region
;
1958 rtx note
, before
, p
;
1960 /* Reset value tracking at extended basic block boundaries. */
1962 last_call_site
= -2;
1964 if (! INSN_P (insn
))
1967 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1969 /* Calls that are known to not throw need not be marked. */
1970 if (note
&& INTVAL (XEXP (note
, 0)) <= 0)
1974 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, INTVAL (XEXP (note
, 0)));
1980 /* Calls (and trapping insns) without notes are outside any
1981 exception handling region in this function. Mark them as
1984 || (flag_non_call_exceptions
1985 && may_trap_p (PATTERN (insn
))))
1986 this_call_site
= -1;
1991 this_call_site
= lp_info
[region
->region_number
].call_site_index
;
1993 if (this_call_site
== last_call_site
)
1996 /* Don't separate a call from it's argument loads. */
1999 before
= find_first_parameter_load (insn
, NULL_RTX
);
2002 mem
= adjust_address (crtl
->eh
.sjlj_fc
, TYPE_MODE (integer_type_node
),
2003 sjlj_fc_call_site_ofs
);
2004 emit_move_insn (mem
, GEN_INT (this_call_site
));
2008 emit_insn_before (p
, before
);
2009 last_call_site
= this_call_site
;
2013 /* Construct the SjLj_Function_Context. */
2016 sjlj_emit_function_enter (rtx dispatch_label
)
2018 rtx fn_begin
, fc
, mem
, seq
;
2019 bool fn_begin_outside_block
;
2021 fc
= crtl
->eh
.sjlj_fc
;
2025 /* We're storing this libcall's address into memory instead of
2026 calling it directly. Thus, we must call assemble_external_libcall
2027 here, as we can not depend on emit_library_call to do it for us. */
2028 assemble_external_libcall (eh_personality_libfunc
);
2029 mem
= adjust_address (fc
, Pmode
, sjlj_fc_personality_ofs
);
2030 emit_move_insn (mem
, eh_personality_libfunc
);
2032 mem
= adjust_address (fc
, Pmode
, sjlj_fc_lsda_ofs
);
2033 if (crtl
->uses_eh_lsda
)
2038 ASM_GENERATE_INTERNAL_LABEL (buf
, "LLSDA", current_function_funcdef_no
);
2039 sym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
2040 SYMBOL_REF_FLAGS (sym
) = SYMBOL_FLAG_LOCAL
;
2041 emit_move_insn (mem
, sym
);
2044 emit_move_insn (mem
, const0_rtx
);
2046 #ifdef DONT_USE_BUILTIN_SETJMP
2049 x
= emit_library_call_value (setjmp_libfunc
, NULL_RTX
, LCT_RETURNS_TWICE
,
2050 TYPE_MODE (integer_type_node
), 1,
2051 plus_constant (XEXP (fc
, 0),
2052 sjlj_fc_jbuf_ofs
), Pmode
);
2054 emit_cmp_and_jump_insns (x
, const0_rtx
, NE
, 0,
2055 TYPE_MODE (integer_type_node
), 0, dispatch_label
);
2056 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE
/100);
2059 expand_builtin_setjmp_setup (plus_constant (XEXP (fc
, 0), sjlj_fc_jbuf_ofs
),
2063 emit_library_call (unwind_sjlj_register_libfunc
, LCT_NORMAL
, VOIDmode
,
2064 1, XEXP (fc
, 0), Pmode
);
2069 /* ??? Instead of doing this at the beginning of the function,
2070 do this in a block that is at loop level 0 and dominates all
2071 can_throw_internal instructions. */
2073 fn_begin_outside_block
= true;
2074 for (fn_begin
= get_insns (); ; fn_begin
= NEXT_INSN (fn_begin
))
2075 if (NOTE_P (fn_begin
))
2077 if (NOTE_KIND (fn_begin
) == NOTE_INSN_FUNCTION_BEG
)
2079 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin
))
2080 fn_begin_outside_block
= false;
2083 if (fn_begin_outside_block
)
2084 insert_insn_on_edge (seq
, single_succ_edge (ENTRY_BLOCK_PTR
));
2086 emit_insn_after (seq
, fn_begin
);
2089 /* Call back from expand_function_end to know where we should put
2090 the call to unwind_sjlj_unregister_libfunc if needed. */
2093 sjlj_emit_function_exit_after (rtx after
)
2095 crtl
->eh
.sjlj_exit_after
= after
;
2099 sjlj_emit_function_exit (void)
2107 emit_library_call (unwind_sjlj_unregister_libfunc
, LCT_NORMAL
, VOIDmode
,
2108 1, XEXP (crtl
->eh
.sjlj_fc
, 0), Pmode
);
2113 /* ??? Really this can be done in any block at loop level 0 that
2114 post-dominates all can_throw_internal instructions. This is
2115 the last possible moment. */
2117 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
2118 if (e
->flags
& EDGE_FALLTHRU
)
2124 /* Figure out whether the place we are supposed to insert libcall
2125 is inside the last basic block or after it. In the other case
2126 we need to emit to edge. */
2127 gcc_assert (e
->src
->next_bb
== EXIT_BLOCK_PTR
);
2128 for (insn
= BB_HEAD (e
->src
); ; insn
= NEXT_INSN (insn
))
2130 if (insn
== crtl
->eh
.sjlj_exit_after
)
2133 insn
= NEXT_INSN (insn
);
2134 emit_insn_after (seq
, insn
);
2137 if (insn
== BB_END (e
->src
))
2140 insert_insn_on_edge (seq
, e
);
2145 sjlj_emit_dispatch_table (rtx dispatch_label
, struct sjlj_lp_info
*lp_info
)
2147 enum machine_mode unwind_word_mode
= targetm
.unwind_word_mode ();
2148 enum machine_mode filter_mode
= targetm
.eh_return_filter_mode ();
2149 int i
, first_reachable
;
2150 rtx mem
, dispatch
, seq
, fc
;
2155 fc
= crtl
->eh
.sjlj_fc
;
2159 emit_label (dispatch_label
);
2161 #ifndef DONT_USE_BUILTIN_SETJMP
2162 expand_builtin_setjmp_receiver (dispatch_label
);
2165 /* Load up dispatch index, exc_ptr and filter values from the
2166 function context. */
2167 mem
= adjust_address (fc
, TYPE_MODE (integer_type_node
),
2168 sjlj_fc_call_site_ofs
);
2169 dispatch
= copy_to_reg (mem
);
2171 mem
= adjust_address (fc
, unwind_word_mode
, sjlj_fc_data_ofs
);
2172 if (unwind_word_mode
!= ptr_mode
)
2174 #ifdef POINTERS_EXTEND_UNSIGNED
2175 mem
= convert_memory_address (ptr_mode
, mem
);
2177 mem
= convert_to_mode (ptr_mode
, mem
, 0);
2180 emit_move_insn (crtl
->eh
.exc_ptr
, mem
);
2182 mem
= adjust_address (fc
, unwind_word_mode
,
2183 sjlj_fc_data_ofs
+ GET_MODE_SIZE (unwind_word_mode
));
2184 if (unwind_word_mode
!= filter_mode
)
2185 mem
= convert_to_mode (filter_mode
, mem
, 0);
2186 emit_move_insn (crtl
->eh
.filter
, mem
);
2188 /* Jump to one of the directly reachable regions. */
2189 /* ??? This really ought to be using a switch statement. */
2191 first_reachable
= 0;
2192 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2194 if (! lp_info
[i
].directly_reachable
)
2197 if (! first_reachable
)
2199 first_reachable
= i
;
2203 emit_cmp_and_jump_insns (dispatch
, GEN_INT (lp_info
[i
].dispatch_index
),
2204 EQ
, NULL_RTX
, TYPE_MODE (integer_type_node
), 0,
2205 ((struct eh_region
*)VEC_index (eh_region
, cfun
->eh
->region_array
, i
))
2206 ->post_landing_pad
);
2212 before
= (((struct eh_region
*)VEC_index (eh_region
, cfun
->eh
->region_array
, first_reachable
))
2213 ->post_landing_pad
);
2215 bb
= emit_to_new_bb_before (seq
, before
);
2216 e
= make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
2217 e
->count
= bb
->count
;
2218 e
->probability
= REG_BR_PROB_BASE
;
2222 sjlj_build_landing_pads (void)
2224 struct sjlj_lp_info
*lp_info
;
2226 lp_info
= XCNEWVEC (struct sjlj_lp_info
, cfun
->eh
->last_region_number
+ 1);
2228 if (sjlj_find_directly_reachable_regions (lp_info
))
2230 rtx dispatch_label
= gen_label_rtx ();
2231 int align
= STACK_SLOT_ALIGNMENT (sjlj_fc_type_node
,
2232 TYPE_MODE (sjlj_fc_type_node
),
2233 TYPE_ALIGN (sjlj_fc_type_node
));
2235 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node
),
2236 int_size_in_bytes (sjlj_fc_type_node
),
2239 sjlj_assign_call_site_values (dispatch_label
, lp_info
);
2240 sjlj_mark_call_sites (lp_info
);
2242 sjlj_emit_function_enter (dispatch_label
);
2243 sjlj_emit_dispatch_table (dispatch_label
, lp_info
);
2244 sjlj_emit_function_exit ();
2251 finish_eh_generation (void)
2255 /* Nothing to do if no regions created. */
2256 if (cfun
->eh
->region_tree
== NULL
)
2259 /* The object here is to provide find_basic_blocks with detailed
2260 information (via reachable_handlers) on how exception control
2261 flows within the function. In this first pass, we can include
2262 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2263 regions, and hope that it will be useful in deleting unreachable
2264 handlers. Subsequently, we will generate landing pads which will
2265 connect many of the handlers, and then type information will not
2266 be effective. Still, this is a win over previous implementations. */
2268 /* These registers are used by the landing pads. Make sure they
2269 have been generated. */
2270 get_exception_pointer ();
2271 get_exception_filter ();
2273 /* Construct the landing pads. */
2275 assign_filter_values ();
2276 build_post_landing_pads ();
2277 connect_post_landing_pads ();
2278 if (USING_SJLJ_EXCEPTIONS
)
2279 sjlj_build_landing_pads ();
2281 dw2_build_landing_pads ();
2283 crtl
->eh
.built_landing_pads
= 1;
2285 /* We've totally changed the CFG. Start over. */
2286 find_exception_handler_labels ();
2287 break_superblocks ();
2288 if (USING_SJLJ_EXCEPTIONS
2289 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2290 || single_succ_edge (ENTRY_BLOCK_PTR
)->insns
.r
)
2291 commit_edge_insertions ();
2297 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
2299 if (e
->flags
& EDGE_EH
)
2308 rtl_make_eh_edge (NULL
, bb
, BB_END (bb
));
2312 /* This section handles removing dead code for flow. */
2314 /* Splice REGION from the region tree and replace it by REPLACE etc. */
2317 remove_eh_handler_and_replace (struct eh_region
*region
,
2318 struct eh_region
*replace
)
2320 struct eh_region
**pp
, **pp_start
, *p
, *outer
, *inner
;
2323 outer
= region
->outer
;
2324 /* For the benefit of efficiently handling REG_EH_REGION notes,
2325 replace this region in the region array with its containing
2326 region. Note that previous region deletions may result in
2327 multiple copies of this region in the array, so we have a
2328 list of alternate numbers by which we are known. */
2330 VEC_replace (eh_region
, cfun
->eh
->region_array
, region
->region_number
,
2337 EXECUTE_IF_SET_IN_BITMAP (region
->aka
, 0, i
, bi
)
2339 VEC_replace (eh_region
, cfun
->eh
->region_array
, i
, replace
);
2346 replace
->aka
= BITMAP_GGC_ALLOC ();
2348 bitmap_ior_into (replace
->aka
, region
->aka
);
2349 bitmap_set_bit (replace
->aka
, region
->region_number
);
2352 if (crtl
->eh
.built_landing_pads
)
2353 lab
= region
->landing_pad
;
2355 lab
= region
->label
;
2357 pp_start
= &outer
->inner
;
2359 pp_start
= &cfun
->eh
->region_tree
;
2360 for (pp
= pp_start
, p
= *pp
; p
!= region
; pp
= &p
->next_peer
, p
= *pp
)
2362 *pp
= region
->next_peer
;
2365 pp_start
= &replace
->inner
;
2367 pp_start
= &cfun
->eh
->region_tree
;
2368 inner
= region
->inner
;
2371 for (p
= inner
; p
->next_peer
; p
= p
->next_peer
)
2375 p
->next_peer
= *pp_start
;
2379 if (region
->type
== ERT_CATCH
)
2381 struct eh_region
*eh_try
, *next
, *prev
;
2383 for (eh_try
= region
->next_peer
;
2384 eh_try
->type
== ERT_CATCH
;
2385 eh_try
= eh_try
->next_peer
)
2387 gcc_assert (eh_try
->type
== ERT_TRY
);
2389 next
= region
->u
.eh_catch
.next_catch
;
2390 prev
= region
->u
.eh_catch
.prev_catch
;
2393 next
->u
.eh_catch
.prev_catch
= prev
;
2395 eh_try
->u
.eh_try
.last_catch
= prev
;
2397 prev
->u
.eh_catch
.next_catch
= next
;
2400 eh_try
->u
.eh_try
.eh_catch
= next
;
2402 remove_eh_handler (eh_try
);
2407 /* Splice REGION from the region tree and replace it by the outer region
2411 remove_eh_handler (struct eh_region
*region
)
2413 remove_eh_handler_and_replace (region
, region
->outer
);
2416 /* Remove Eh region R that has turned out to have no code in its handler. */
2419 remove_eh_region (int r
)
2421 struct eh_region
*region
;
2423 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, r
);
2424 remove_eh_handler (region
);
2427 /* Invokes CALLBACK for every exception handler label. Only used by old
2428 loop hackery; should not be used by new code. */
2431 for_each_eh_label (void (*callback
) (rtx
))
2434 for (i
= 0; i
< cfun
->eh
->last_region_number
; i
++)
2436 struct eh_region
*r
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
2437 if (r
&& r
->region_number
== i
&& r
->label
2438 && GET_CODE (r
->label
) == CODE_LABEL
)
2439 (*callback
) (r
->label
);
2443 /* Invoke CALLBACK for every exception region in the current function. */
2446 for_each_eh_region (void (*callback
) (struct eh_region
*))
2448 int i
, n
= cfun
->eh
->last_region_number
;
2449 for (i
= 1; i
<= n
; ++i
)
2451 struct eh_region
*region
;
2453 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
2455 (*callback
) (region
);
2459 /* This section describes CFG exception edges for flow. */
2461 /* For communicating between calls to reachable_next_level. */
2462 struct reachable_info
2466 void (*callback
) (struct eh_region
*, void *);
2467 void *callback_data
;
2470 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2471 base class of TYPE, is in HANDLED. */
2474 check_handled (tree handled
, tree type
)
2478 /* We can check for exact matches without front-end help. */
2479 if (! lang_eh_type_covers
)
2481 for (t
= handled
; t
; t
= TREE_CHAIN (t
))
2482 if (TREE_VALUE (t
) == type
)
2487 for (t
= handled
; t
; t
= TREE_CHAIN (t
))
2488 if ((*lang_eh_type_covers
) (TREE_VALUE (t
), type
))
2495 /* A subroutine of reachable_next_level. If we are collecting a list
2496 of handlers, add one. After landing pad generation, reference
2497 it instead of the handlers themselves. Further, the handlers are
2498 all wired together, so by referencing one, we've got them all.
2499 Before landing pad generation we reference each handler individually.
2501 LP_REGION contains the landing pad; REGION is the handler. */
2504 add_reachable_handler (struct reachable_info
*info
,
2505 struct eh_region
*lp_region
, struct eh_region
*region
)
2510 if (crtl
->eh
.built_landing_pads
)
2511 info
->callback (lp_region
, info
->callback_data
);
2513 info
->callback (region
, info
->callback_data
);
2516 /* Process one level of exception regions for reachability.
2517 If TYPE_THROWN is non-null, then it is the *exact* type being
2518 propagated. If INFO is non-null, then collect handler labels
2519 and caught/allowed type information between invocations. */
2521 static enum reachable_code
2522 reachable_next_level (struct eh_region
*region
, tree type_thrown
,
2523 struct reachable_info
*info
,
2526 switch (region
->type
)
2529 /* Before landing-pad generation, we model control flow
2530 directly to the individual handlers. In this way we can
2531 see that catch handler types may shadow one another. */
2532 add_reachable_handler (info
, region
, region
);
2533 return RNL_MAYBE_CAUGHT
;
2537 struct eh_region
*c
;
2538 enum reachable_code ret
= RNL_NOT_CAUGHT
;
2540 for (c
= region
->u
.eh_try
.eh_catch
; c
; c
= c
->u
.eh_catch
.next_catch
)
2542 /* A catch-all handler ends the search. */
2543 if (c
->u
.eh_catch
.type_list
== NULL
)
2545 add_reachable_handler (info
, region
, c
);
2551 /* If we have at least one type match, end the search. */
2552 tree tp_node
= c
->u
.eh_catch
.type_list
;
2554 for (; tp_node
; tp_node
= TREE_CHAIN (tp_node
))
2556 tree type
= TREE_VALUE (tp_node
);
2558 if (type
== type_thrown
2559 || (lang_eh_type_covers
2560 && (*lang_eh_type_covers
) (type
, type_thrown
)))
2562 add_reachable_handler (info
, region
, c
);
2567 /* If we have definitive information of a match failure,
2568 the catch won't trigger. */
2569 if (lang_eh_type_covers
)
2570 return RNL_NOT_CAUGHT
;
2573 /* At this point, we either don't know what type is thrown or
2574 don't have front-end assistance to help deciding if it is
2575 covered by one of the types in the list for this region.
2577 We'd then like to add this region to the list of reachable
2578 handlers since it is indeed potentially reachable based on the
2579 information we have.
2581 Actually, this handler is for sure not reachable if all the
2582 types it matches have already been caught. That is, it is only
2583 potentially reachable if at least one of the types it catches
2584 has not been previously caught. */
2587 ret
= RNL_MAYBE_CAUGHT
;
2590 tree tp_node
= c
->u
.eh_catch
.type_list
;
2591 bool maybe_reachable
= false;
2593 /* Compute the potential reachability of this handler and
2594 update the list of types caught at the same time. */
2595 for (; tp_node
; tp_node
= TREE_CHAIN (tp_node
))
2597 tree type
= TREE_VALUE (tp_node
);
2599 if (! check_handled (info
->types_caught
, type
))
2602 = tree_cons (NULL
, type
, info
->types_caught
);
2604 maybe_reachable
= true;
2608 if (maybe_reachable
)
2610 add_reachable_handler (info
, region
, c
);
2612 /* ??? If the catch type is a base class of every allowed
2613 type, then we know we can stop the search. */
2614 ret
= RNL_MAYBE_CAUGHT
;
2622 case ERT_ALLOWED_EXCEPTIONS
:
2623 /* An empty list of types definitely ends the search. */
2624 if (region
->u
.allowed
.type_list
== NULL_TREE
)
2626 add_reachable_handler (info
, region
, region
);
2630 /* Collect a list of lists of allowed types for use in detecting
2631 when a catch may be transformed into a catch-all. */
2633 info
->types_allowed
= tree_cons (NULL_TREE
,
2634 region
->u
.allowed
.type_list
,
2635 info
->types_allowed
);
2637 /* If we have definitive information about the type hierarchy,
2638 then we can tell if the thrown type will pass through the
2640 if (type_thrown
&& lang_eh_type_covers
)
2642 if (check_handled (region
->u
.allowed
.type_list
, type_thrown
))
2643 return RNL_NOT_CAUGHT
;
2646 add_reachable_handler (info
, region
, region
);
2651 add_reachable_handler (info
, region
, region
);
2652 return RNL_MAYBE_CAUGHT
;
2655 /* Catch regions are handled by their controlling try region. */
2656 return RNL_NOT_CAUGHT
;
2658 case ERT_MUST_NOT_THROW
:
2659 /* Here we end our search, since no exceptions may propagate.
2661 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
2662 only via locally handled RESX instructions.
2664 When we inline a function call, we can bring in new handlers. In order
2665 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
2666 assume that such handlers exists prior for any inlinable call prior
2667 inlining decisions are fixed. */
2671 add_reachable_handler (info
, region
, region
);
2679 /* Shouldn't see these here. */
2687 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2690 foreach_reachable_handler (int region_number
, bool is_resx
, bool inlinable_call
,
2691 void (*callback
) (struct eh_region
*, void *),
2692 void *callback_data
)
2694 struct reachable_info info
;
2695 struct eh_region
*region
;
2698 memset (&info
, 0, sizeof (info
));
2699 info
.callback
= callback
;
2700 info
.callback_data
= callback_data
;
2702 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, region_number
);
2706 type_thrown
= NULL_TREE
;
2709 /* A RESX leaves a region instead of entering it. Thus the
2710 region itself may have been deleted out from under us. */
2713 region
= region
->outer
;
2715 else if (region
->type
== ERT_THROW
)
2717 type_thrown
= region
->u
.eh_throw
.type
;
2718 region
= region
->outer
;
2723 if (reachable_next_level (region
, type_thrown
, &info
,
2724 inlinable_call
|| is_resx
) >= RNL_CAUGHT
)
2726 /* If we have processed one cleanup, there is no point in
2727 processing any more of them. Each cleanup will have an edge
2728 to the next outer cleanup region, so the flow graph will be
2730 if (region
->type
== ERT_CLEANUP
)
2731 region
= region
->u
.cleanup
.prev_try
;
2733 region
= region
->outer
;
2737 /* Retrieve a list of labels of exception handlers which can be
2738 reached by a given insn. */
2741 arh_to_landing_pad (struct eh_region
*region
, void *data
)
2743 rtx
*p_handlers
= (rtx
*) data
;
2745 *p_handlers
= alloc_INSN_LIST (region
->landing_pad
, NULL_RTX
);
2749 arh_to_label (struct eh_region
*region
, void *data
)
2751 rtx
*p_handlers
= (rtx
*) data
;
2752 *p_handlers
= alloc_INSN_LIST (region
->label
, *p_handlers
);
2756 reachable_handlers (rtx insn
)
2758 bool is_resx
= false;
2759 rtx handlers
= NULL
;
2763 && GET_CODE (PATTERN (insn
)) == RESX
)
2765 region_number
= XINT (PATTERN (insn
), 0);
2770 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2771 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
2773 region_number
= INTVAL (XEXP (note
, 0));
2776 foreach_reachable_handler (region_number
, is_resx
, false,
2777 (crtl
->eh
.built_landing_pads
2778 ? arh_to_landing_pad
2785 /* Determine if the given INSN can throw an exception that is caught
2786 within the function. */
2789 can_throw_internal_1 (int region_number
, bool is_resx
, bool inlinable_call
)
2791 struct eh_region
*region
;
2794 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, region_number
);
2798 type_thrown
= NULL_TREE
;
2800 region
= region
->outer
;
2801 else if (region
->type
== ERT_THROW
)
2803 type_thrown
= region
->u
.eh_throw
.type
;
2804 region
= region
->outer
;
2807 /* If this exception is ignored by each and every containing region,
2808 then control passes straight out. The runtime may handle some
2809 regions, which also do not require processing internally. */
2810 for (; region
; region
= region
->outer
)
2812 enum reachable_code how
= reachable_next_level (region
, type_thrown
, 0,
2813 inlinable_call
|| is_resx
);
2814 if (how
== RNL_BLOCKED
)
2816 if (how
!= RNL_NOT_CAUGHT
)
2824 can_throw_internal (const_rtx insn
)
2828 if (! INSN_P (insn
))
2832 && GET_CODE (PATTERN (insn
)) == RESX
2833 && XINT (PATTERN (insn
), 0) > 0)
2834 return can_throw_internal_1 (XINT (PATTERN (insn
), 0), true, false);
2836 if (NONJUMP_INSN_P (insn
)
2837 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2838 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2840 /* Every insn that might throw has an EH_REGION note. */
2841 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2842 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
2845 return can_throw_internal_1 (INTVAL (XEXP (note
, 0)), false, false);
2848 /* Determine if the given INSN can throw an exception that is
2849 visible outside the function. */
2852 can_throw_external_1 (int region_number
, bool is_resx
, bool inlinable_call
)
2854 struct eh_region
*region
;
2857 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, region_number
);
2861 type_thrown
= NULL_TREE
;
2863 region
= region
->outer
;
2864 else if (region
->type
== ERT_THROW
)
2866 type_thrown
= region
->u
.eh_throw
.type
;
2867 region
= region
->outer
;
2870 /* If the exception is caught or blocked by any containing region,
2871 then it is not seen by any calling function. */
2872 for (; region
; region
= region
->outer
)
2873 if (reachable_next_level (region
, type_thrown
, NULL
,
2874 inlinable_call
|| is_resx
) >= RNL_CAUGHT
)
2881 can_throw_external (const_rtx insn
)
2885 if (! INSN_P (insn
))
2889 && GET_CODE (PATTERN (insn
)) == RESX
2890 && XINT (PATTERN (insn
), 0) > 0)
2891 return can_throw_external_1 (XINT (PATTERN (insn
), 0), true, false);
2893 if (NONJUMP_INSN_P (insn
)
2894 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2896 rtx seq
= PATTERN (insn
);
2897 int i
, n
= XVECLEN (seq
, 0);
2899 for (i
= 0; i
< n
; i
++)
2900 if (can_throw_external (XVECEXP (seq
, 0, i
)))
2906 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2909 /* Calls (and trapping insns) without notes are outside any
2910 exception handling region in this function. We have to
2911 assume it might throw. Given that the front end and middle
2912 ends mark known NOTHROW functions, this isn't so wildly
2914 return (CALL_P (insn
)
2915 || (flag_non_call_exceptions
2916 && may_trap_p (PATTERN (insn
))));
2918 if (INTVAL (XEXP (note
, 0)) <= 0)
2921 return can_throw_external_1 (INTVAL (XEXP (note
, 0)), false, false);
2924 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
2927 set_nothrow_function_flags (void)
2933 /* Assume crtl->all_throwers_are_sibcalls until we encounter
2934 something that can throw an exception. We specifically exempt
2935 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2936 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2939 crtl
->all_throwers_are_sibcalls
= 1;
2941 /* If we don't know that this implementation of the function will
2942 actually be used, then we must not set TREE_NOTHROW, since
2943 callers must not assume that this function does not throw. */
2944 if (TREE_NOTHROW (current_function_decl
))
2947 if (! flag_exceptions
)
2950 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2951 if (can_throw_external (insn
))
2955 if (!CALL_P (insn
) || !SIBLING_CALL_P (insn
))
2957 crtl
->all_throwers_are_sibcalls
= 0;
2962 for (insn
= crtl
->epilogue_delay_list
; insn
;
2963 insn
= XEXP (insn
, 1))
2964 if (can_throw_external (insn
))
2968 if (!CALL_P (insn
) || !SIBLING_CALL_P (insn
))
2970 crtl
->all_throwers_are_sibcalls
= 0;
2975 && (cgraph_function_body_availability (cgraph_node
2976 (current_function_decl
))
2977 >= AVAIL_AVAILABLE
))
2979 TREE_NOTHROW (current_function_decl
) = 1;
2982 fprintf (dump_file
, "Marking function nothrow: %s\n\n",
2983 current_function_name ());
2988 struct rtl_opt_pass pass_set_nothrow_function_flags
=
2992 "nothrow", /* name */
2994 set_nothrow_function_flags
, /* execute */
2997 0, /* static_pass_number */
2999 0, /* properties_required */
3000 0, /* properties_provided */
3001 0, /* properties_destroyed */
3002 0, /* todo_flags_start */
3003 TODO_dump_func
, /* todo_flags_finish */
3008 /* Various hooks for unwind library. */
3010 /* Do any necessary initialization to access arbitrary stack frames.
3011 On the SPARC, this means flushing the register windows. */
3014 expand_builtin_unwind_init (void)
3016 /* Set this so all the registers get saved in our frame; we need to be
3017 able to copy the saved values for any registers from frames we unwind. */
3018 crtl
->saves_all_registers
= 1;
3020 #ifdef SETUP_FRAME_ADDRESSES
3021 SETUP_FRAME_ADDRESSES ();
3026 expand_builtin_eh_return_data_regno (tree exp
)
3028 tree which
= CALL_EXPR_ARG (exp
, 0);
3029 unsigned HOST_WIDE_INT iwhich
;
3031 if (TREE_CODE (which
) != INTEGER_CST
)
3033 error ("argument of %<__builtin_eh_return_regno%> must be constant");
3037 iwhich
= tree_low_cst (which
, 1);
3038 iwhich
= EH_RETURN_DATA_REGNO (iwhich
);
3039 if (iwhich
== INVALID_REGNUM
)
3042 #ifdef DWARF_FRAME_REGNUM
3043 iwhich
= DWARF_FRAME_REGNUM (iwhich
);
3045 iwhich
= DBX_REGISTER_NUMBER (iwhich
);
3048 return GEN_INT (iwhich
);
3051 /* Given a value extracted from the return address register or stack slot,
3052 return the actual address encoded in that value. */
3055 expand_builtin_extract_return_addr (tree addr_tree
)
3057 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
3059 if (GET_MODE (addr
) != Pmode
3060 && GET_MODE (addr
) != VOIDmode
)
3062 #ifdef POINTERS_EXTEND_UNSIGNED
3063 addr
= convert_memory_address (Pmode
, addr
);
3065 addr
= convert_to_mode (Pmode
, addr
, 0);
3069 /* First mask out any unwanted bits. */
3070 #ifdef MASK_RETURN_ADDR
3071 expand_and (Pmode
, addr
, MASK_RETURN_ADDR
, addr
);
3074 /* Then adjust to find the real return address. */
3075 #if defined (RETURN_ADDR_OFFSET)
3076 addr
= plus_constant (addr
, RETURN_ADDR_OFFSET
);
3082 /* Given an actual address in addr_tree, do any necessary encoding
3083 and return the value to be stored in the return address register or
3084 stack slot so the epilogue will return to that address. */
3087 expand_builtin_frob_return_addr (tree addr_tree
)
3089 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
3091 addr
= convert_memory_address (Pmode
, addr
);
3093 #ifdef RETURN_ADDR_OFFSET
3094 addr
= force_reg (Pmode
, addr
);
3095 addr
= plus_constant (addr
, -RETURN_ADDR_OFFSET
);
3101 /* Set up the epilogue with the magic bits we'll need to return to the
3102 exception handler. */
3105 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED
,
3110 #ifdef EH_RETURN_STACKADJ_RTX
3111 tmp
= expand_expr (stackadj_tree
, crtl
->eh
.ehr_stackadj
,
3112 VOIDmode
, EXPAND_NORMAL
);
3113 tmp
= convert_memory_address (Pmode
, tmp
);
3114 if (!crtl
->eh
.ehr_stackadj
)
3115 crtl
->eh
.ehr_stackadj
= copy_to_reg (tmp
);
3116 else if (tmp
!= crtl
->eh
.ehr_stackadj
)
3117 emit_move_insn (crtl
->eh
.ehr_stackadj
, tmp
);
3120 tmp
= expand_expr (handler_tree
, crtl
->eh
.ehr_handler
,
3121 VOIDmode
, EXPAND_NORMAL
);
3122 tmp
= convert_memory_address (Pmode
, tmp
);
3123 if (!crtl
->eh
.ehr_handler
)
3124 crtl
->eh
.ehr_handler
= copy_to_reg (tmp
);
3125 else if (tmp
!= crtl
->eh
.ehr_handler
)
3126 emit_move_insn (crtl
->eh
.ehr_handler
, tmp
);
3128 if (!crtl
->eh
.ehr_label
)
3129 crtl
->eh
.ehr_label
= gen_label_rtx ();
3130 emit_jump (crtl
->eh
.ehr_label
);
3134 expand_eh_return (void)
3138 if (! crtl
->eh
.ehr_label
)
3141 crtl
->calls_eh_return
= 1;
3143 #ifdef EH_RETURN_STACKADJ_RTX
3144 emit_move_insn (EH_RETURN_STACKADJ_RTX
, const0_rtx
);
3147 around_label
= gen_label_rtx ();
3148 emit_jump (around_label
);
3150 emit_label (crtl
->eh
.ehr_label
);
3151 clobber_return_register ();
3153 #ifdef EH_RETURN_STACKADJ_RTX
3154 emit_move_insn (EH_RETURN_STACKADJ_RTX
, crtl
->eh
.ehr_stackadj
);
3157 #ifdef HAVE_eh_return
3159 emit_insn (gen_eh_return (crtl
->eh
.ehr_handler
));
3163 #ifdef EH_RETURN_HANDLER_RTX
3164 emit_move_insn (EH_RETURN_HANDLER_RTX
, crtl
->eh
.ehr_handler
);
3166 error ("__builtin_eh_return not supported on this target");
3170 emit_label (around_label
);
3173 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3174 POINTERS_EXTEND_UNSIGNED and return it. */
3177 expand_builtin_extend_pointer (tree addr_tree
)
3179 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
3182 #ifdef POINTERS_EXTEND_UNSIGNED
3183 extend
= POINTERS_EXTEND_UNSIGNED
;
3185 /* The previous EH code did an unsigned extend by default, so we do this also
3190 return convert_modes (targetm
.unwind_word_mode (), ptr_mode
, addr
, extend
);
3193 /* In the following functions, we represent entries in the action table
3194 as 1-based indices. Special cases are:
3196 0: null action record, non-null landing pad; implies cleanups
3197 -1: null action record, null landing pad; implies no action
3198 -2: no call-site entry; implies must_not_throw
3199 -3: we have yet to process outer regions
3201 Further, no special cases apply to the "next" field of the record.
3202 For next, 0 means end of list. */
3204 struct action_record
3212 action_record_eq (const void *pentry
, const void *pdata
)
3214 const struct action_record
*entry
= (const struct action_record
*) pentry
;
3215 const struct action_record
*data
= (const struct action_record
*) pdata
;
3216 return entry
->filter
== data
->filter
&& entry
->next
== data
->next
;
3220 action_record_hash (const void *pentry
)
3222 const struct action_record
*entry
= (const struct action_record
*) pentry
;
3223 return entry
->next
* 1009 + entry
->filter
;
3227 add_action_record (htab_t ar_hash
, int filter
, int next
)
3229 struct action_record
**slot
, *new_ar
, tmp
;
3231 tmp
.filter
= filter
;
3233 slot
= (struct action_record
**) htab_find_slot (ar_hash
, &tmp
, INSERT
);
3235 if ((new_ar
= *slot
) == NULL
)
3237 new_ar
= XNEW (struct action_record
);
3238 new_ar
->offset
= VARRAY_ACTIVE_SIZE (crtl
->eh
.action_record_data
) + 1;
3239 new_ar
->filter
= filter
;
3240 new_ar
->next
= next
;
3243 /* The filter value goes in untouched. The link to the next
3244 record is a "self-relative" byte offset, or zero to indicate
3245 that there is no next record. So convert the absolute 1 based
3246 indices we've been carrying around into a displacement. */
3248 push_sleb128 (&crtl
->eh
.action_record_data
, filter
);
3250 next
-= VARRAY_ACTIVE_SIZE (crtl
->eh
.action_record_data
) + 1;
3251 push_sleb128 (&crtl
->eh
.action_record_data
, next
);
3254 return new_ar
->offset
;
3258 collect_one_action_chain (htab_t ar_hash
, struct eh_region
*region
)
3260 struct eh_region
*c
;
3263 /* If we've reached the top of the region chain, then we have
3264 no actions, and require no landing pad. */
3268 switch (region
->type
)
3271 /* A cleanup adds a zero filter to the beginning of the chain, but
3272 there are special cases to look out for. If there are *only*
3273 cleanups along a path, then it compresses to a zero action.
3274 Further, if there are multiple cleanups along a path, we only
3275 need to represent one of them, as that is enough to trigger
3276 entry to the landing pad at runtime. */
3277 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3280 for (c
= region
->outer
; c
; c
= c
->outer
)
3281 if (c
->type
== ERT_CLEANUP
)
3283 return add_action_record (ar_hash
, 0, next
);
3286 /* Process the associated catch regions in reverse order.
3287 If there's a catch-all handler, then we don't need to
3288 search outer regions. Use a magic -3 value to record
3289 that we haven't done the outer search. */
3291 for (c
= region
->u
.eh_try
.last_catch
; c
; c
= c
->u
.eh_catch
.prev_catch
)
3293 if (c
->u
.eh_catch
.type_list
== NULL
)
3295 /* Retrieve the filter from the head of the filter list
3296 where we have stored it (see assign_filter_values). */
3298 = TREE_INT_CST_LOW (TREE_VALUE (c
->u
.eh_catch
.filter_list
));
3300 next
= add_action_record (ar_hash
, filter
, 0);
3304 /* Once the outer search is done, trigger an action record for
3305 each filter we have. */
3310 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3312 /* If there is no next action, terminate the chain. */
3315 /* If all outer actions are cleanups or must_not_throw,
3316 we'll have no action record for it, since we had wanted
3317 to encode these states in the call-site record directly.
3318 Add a cleanup action to the chain to catch these. */
3320 next
= add_action_record (ar_hash
, 0, 0);
3323 flt_node
= c
->u
.eh_catch
.filter_list
;
3324 for (; flt_node
; flt_node
= TREE_CHAIN (flt_node
))
3326 int filter
= TREE_INT_CST_LOW (TREE_VALUE (flt_node
));
3327 next
= add_action_record (ar_hash
, filter
, next
);
3333 case ERT_ALLOWED_EXCEPTIONS
:
3334 /* An exception specification adds its filter to the
3335 beginning of the chain. */
3336 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3338 /* If there is no next action, terminate the chain. */
3341 /* If all outer actions are cleanups or must_not_throw,
3342 we'll have no action record for it, since we had wanted
3343 to encode these states in the call-site record directly.
3344 Add a cleanup action to the chain to catch these. */
3346 next
= add_action_record (ar_hash
, 0, 0);
3348 return add_action_record (ar_hash
, region
->u
.allowed
.filter
, next
);
3350 case ERT_MUST_NOT_THROW
:
3351 /* A must-not-throw region with no inner handlers or cleanups
3352 requires no call-site entry. Note that this differs from
3353 the no handler or cleanup case in that we do require an lsda
3354 to be generated. Return a magic -2 value to record this. */
3359 /* CATCH regions are handled in TRY above. THROW regions are
3360 for optimization information only and produce no output. */
3361 return collect_one_action_chain (ar_hash
, region
->outer
);
3369 add_call_site (rtx landing_pad
, int action
)
3371 call_site_record record
;
3373 record
= GGC_NEW (struct call_site_record
);
3374 record
->landing_pad
= landing_pad
;
3375 record
->action
= action
;
3377 VEC_safe_push (call_site_record
, gc
, crtl
->eh
.call_site_record
, record
);
3379 return call_site_base
+ VEC_length (call_site_record
, crtl
->eh
.call_site_record
) - 1;
3382 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3383 The new note numbers will not refer to region numbers, but
3384 instead to call site entries. */
3387 convert_to_eh_region_ranges (void)
3389 rtx insn
, iter
, note
;
3391 int last_action
= -3;
3392 rtx last_action_insn
= NULL_RTX
;
3393 rtx last_landing_pad
= NULL_RTX
;
3394 rtx first_no_action_insn
= NULL_RTX
;
3397 if (USING_SJLJ_EXCEPTIONS
|| cfun
->eh
->region_tree
== NULL
)
3400 VARRAY_UCHAR_INIT (crtl
->eh
.action_record_data
, 64, "action_record_data");
3402 ar_hash
= htab_create (31, action_record_hash
, action_record_eq
, free
);
3404 for (iter
= get_insns (); iter
; iter
= NEXT_INSN (iter
))
3407 struct eh_region
*region
;
3409 rtx this_landing_pad
;
3412 if (NONJUMP_INSN_P (insn
)
3413 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3414 insn
= XVECEXP (PATTERN (insn
), 0, 0);
3416 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
3419 if (! (CALL_P (insn
)
3420 || (flag_non_call_exceptions
3421 && may_trap_p (PATTERN (insn
)))))
3428 if (INTVAL (XEXP (note
, 0)) <= 0)
3430 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, INTVAL (XEXP (note
, 0)));
3431 this_action
= collect_one_action_chain (ar_hash
, region
);
3434 /* Existence of catch handlers, or must-not-throw regions
3435 implies that an lsda is needed (even if empty). */
3436 if (this_action
!= -1)
3437 crtl
->uses_eh_lsda
= 1;
3439 /* Delay creation of region notes for no-action regions
3440 until we're sure that an lsda will be required. */
3441 else if (last_action
== -3)
3443 first_no_action_insn
= iter
;
3447 /* Cleanups and handlers may share action chains but not
3448 landing pads. Collect the landing pad for this region. */
3449 if (this_action
>= 0)
3451 struct eh_region
*o
;
3452 for (o
= region
; ! o
->landing_pad
; o
= o
->outer
)
3454 this_landing_pad
= o
->landing_pad
;
3457 this_landing_pad
= NULL_RTX
;
3459 /* Differing actions or landing pads implies a change in call-site
3460 info, which implies some EH_REGION note should be emitted. */
3461 if (last_action
!= this_action
3462 || last_landing_pad
!= this_landing_pad
)
3464 /* If we'd not seen a previous action (-3) or the previous
3465 action was must-not-throw (-2), then we do not need an
3467 if (last_action
>= -1)
3469 /* If we delayed the creation of the begin, do it now. */
3470 if (first_no_action_insn
)
3472 call_site
= add_call_site (NULL_RTX
, 0);
3473 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
,
3474 first_no_action_insn
);
3475 NOTE_EH_HANDLER (note
) = call_site
;
3476 first_no_action_insn
= NULL_RTX
;
3479 note
= emit_note_after (NOTE_INSN_EH_REGION_END
,
3481 NOTE_EH_HANDLER (note
) = call_site
;
3484 /* If the new action is must-not-throw, then no region notes
3486 if (this_action
>= -1)
3488 call_site
= add_call_site (this_landing_pad
,
3489 this_action
< 0 ? 0 : this_action
);
3490 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
, iter
);
3491 NOTE_EH_HANDLER (note
) = call_site
;
3494 last_action
= this_action
;
3495 last_landing_pad
= this_landing_pad
;
3497 last_action_insn
= iter
;
3500 if (last_action
>= -1 && ! first_no_action_insn
)
3502 note
= emit_note_after (NOTE_INSN_EH_REGION_END
, last_action_insn
);
3503 NOTE_EH_HANDLER (note
) = call_site
;
3506 htab_delete (ar_hash
);
3510 struct rtl_opt_pass pass_convert_to_eh_region_ranges
=
3514 "eh_ranges", /* name */
3516 convert_to_eh_region_ranges
, /* execute */
3519 0, /* static_pass_number */
3521 0, /* properties_required */
3522 0, /* properties_provided */
3523 0, /* properties_destroyed */
3524 0, /* todo_flags_start */
3525 TODO_dump_func
, /* todo_flags_finish */
3531 push_uleb128 (varray_type
*data_area
, unsigned int value
)
3535 unsigned char byte
= value
& 0x7f;
3539 VARRAY_PUSH_UCHAR (*data_area
, byte
);
3545 push_sleb128 (varray_type
*data_area
, int value
)
3552 byte
= value
& 0x7f;
3554 more
= ! ((value
== 0 && (byte
& 0x40) == 0)
3555 || (value
== -1 && (byte
& 0x40) != 0));
3558 VARRAY_PUSH_UCHAR (*data_area
, byte
);
3564 #ifndef HAVE_AS_LEB128
3566 dw2_size_of_call_site_table (void)
3568 int n
= VEC_length (call_site_record
, crtl
->eh
.call_site_record
);
3569 int size
= n
* (4 + 4 + 4);
3572 for (i
= 0; i
< n
; ++i
)
3574 struct call_site_record
*cs
= VEC_index (call_site_record
, crtl
->eh
.call_site_record
, i
);
3575 size
+= size_of_uleb128 (cs
->action
);
3582 sjlj_size_of_call_site_table (void)
3584 int n
= VEC_length (call_site_record
, crtl
->eh
.call_site_record
);
3588 for (i
= 0; i
< n
; ++i
)
3590 struct call_site_record
*cs
= VEC_index (call_site_record
, crtl
->eh
.call_site_record
, i
);
3591 size
+= size_of_uleb128 (INTVAL (cs
->landing_pad
));
3592 size
+= size_of_uleb128 (cs
->action
);
3600 dw2_output_call_site_table (void)
3602 int n
= VEC_length (call_site_record
, crtl
->eh
.call_site_record
);
3605 for (i
= 0; i
< n
; ++i
)
3607 struct call_site_record
*cs
= VEC_index (call_site_record
, crtl
->eh
.call_site_record
, i
);
3608 char reg_start_lab
[32];
3609 char reg_end_lab
[32];
3610 char landing_pad_lab
[32];
3612 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab
, "LEHB", call_site_base
+ i
);
3613 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab
, "LEHE", call_site_base
+ i
);
3615 if (cs
->landing_pad
)
3616 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab
, "L",
3617 CODE_LABEL_NUMBER (cs
->landing_pad
));
3619 /* ??? Perhaps use insn length scaling if the assembler supports
3620 generic arithmetic. */
3621 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3622 data4 if the function is small enough. */
3623 #ifdef HAVE_AS_LEB128
3624 dw2_asm_output_delta_uleb128 (reg_start_lab
,
3625 current_function_func_begin_label
,
3626 "region %d start", i
);
3627 dw2_asm_output_delta_uleb128 (reg_end_lab
, reg_start_lab
,
3629 if (cs
->landing_pad
)
3630 dw2_asm_output_delta_uleb128 (landing_pad_lab
,
3631 current_function_func_begin_label
,
3634 dw2_asm_output_data_uleb128 (0, "landing pad");
3636 dw2_asm_output_delta (4, reg_start_lab
,
3637 current_function_func_begin_label
,
3638 "region %d start", i
);
3639 dw2_asm_output_delta (4, reg_end_lab
, reg_start_lab
, "length");
3640 if (cs
->landing_pad
)
3641 dw2_asm_output_delta (4, landing_pad_lab
,
3642 current_function_func_begin_label
,
3645 dw2_asm_output_data (4, 0, "landing pad");
3647 dw2_asm_output_data_uleb128 (cs
->action
, "action");
3650 call_site_base
+= n
;
3654 sjlj_output_call_site_table (void)
3656 int n
= VEC_length (call_site_record
, crtl
->eh
.call_site_record
);
3659 for (i
= 0; i
< n
; ++i
)
3661 struct call_site_record
*cs
= VEC_index (call_site_record
, crtl
->eh
.call_site_record
, i
);
3663 dw2_asm_output_data_uleb128 (INTVAL (cs
->landing_pad
),
3664 "region %d landing pad", i
);
3665 dw2_asm_output_data_uleb128 (cs
->action
, "action");
3668 call_site_base
+= n
;
3671 #ifndef TARGET_UNWIND_INFO
3672 /* Switch to the section that should be used for exception tables. */
3675 switch_to_exception_section (const char * ARG_UNUSED (fnname
))
3679 if (exception_section
)
3680 s
= exception_section
;
3683 /* Compute the section and cache it into exception_section,
3684 unless it depends on the function name. */
3685 if (targetm
.have_named_sections
)
3689 if (EH_TABLES_CAN_BE_READ_ONLY
)
3692 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3693 flags
= ((! flag_pic
3694 || ((tt_format
& 0x70) != DW_EH_PE_absptr
3695 && (tt_format
& 0x70) != DW_EH_PE_aligned
))
3696 ? 0 : SECTION_WRITE
);
3699 flags
= SECTION_WRITE
;
3701 #ifdef HAVE_LD_EH_GC_SECTIONS
3702 if (flag_function_sections
)
3704 char *section_name
= XNEWVEC (char, strlen (fnname
) + 32);
3705 sprintf (section_name
, ".gcc_except_table.%s", fnname
);
3706 s
= get_section (section_name
, flags
, NULL
);
3707 free (section_name
);
3712 = s
= get_section (".gcc_except_table", flags
, NULL
);
3716 = s
= flag_pic
? data_section
: readonly_data_section
;
3719 switch_to_section (s
);
3724 /* Output a reference from an exception table to the type_info object TYPE.
3725 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
3729 output_ttype (tree type
, int tt_format
, int tt_format_size
)
3732 bool is_public
= true;
3734 if (type
== NULL_TREE
)
3738 struct varpool_node
*node
;
3740 type
= lookup_type_for_runtime (type
);
3741 value
= expand_expr (type
, NULL_RTX
, VOIDmode
, EXPAND_INITIALIZER
);
3743 /* Let cgraph know that the rtti decl is used. Not all of the
3744 paths below go through assemble_integer, which would take
3745 care of this for us. */
3747 if (TREE_CODE (type
) == ADDR_EXPR
)
3749 type
= TREE_OPERAND (type
, 0);
3750 if (TREE_CODE (type
) == VAR_DECL
)
3752 node
= varpool_node (type
);
3754 varpool_mark_needed_node (node
);
3755 is_public
= TREE_PUBLIC (type
);
3759 gcc_assert (TREE_CODE (type
) == INTEGER_CST
);
3762 /* Allow the target to override the type table entry format. */
3763 if (targetm
.asm_out
.ttype (value
))
3766 if (tt_format
== DW_EH_PE_absptr
|| tt_format
== DW_EH_PE_aligned
)
3767 assemble_integer (value
, tt_format_size
,
3768 tt_format_size
* BITS_PER_UNIT
, 1);
3770 dw2_asm_output_encoded_addr_rtx (tt_format
, value
, is_public
, NULL
);
3774 output_function_exception_table (const char * ARG_UNUSED (fnname
))
3776 int tt_format
, cs_format
, lp_format
, i
, n
;
3777 #ifdef HAVE_AS_LEB128
3778 char ttype_label
[32];
3779 char cs_after_size_label
[32];
3780 char cs_end_label
[32];
3785 int tt_format_size
= 0;
3787 /* Not all functions need anything. */
3788 if (! crtl
->uses_eh_lsda
)
3791 if (eh_personality_libfunc
)
3792 assemble_external_libcall (eh_personality_libfunc
);
3794 #ifdef TARGET_UNWIND_INFO
3795 /* TODO: Move this into target file. */
3796 fputs ("\t.personality\t", asm_out_file
);
3797 output_addr_const (asm_out_file
, eh_personality_libfunc
);
3798 fputs ("\n\t.handlerdata\n", asm_out_file
);
3799 /* Note that varasm still thinks we're in the function's code section.
3800 The ".endp" directive that will immediately follow will take us back. */
3802 switch_to_exception_section (fnname
);
3805 /* If the target wants a label to begin the table, emit it here. */
3806 targetm
.asm_out
.except_table_label (asm_out_file
);
3808 have_tt_data
= (VEC_length (tree
, crtl
->eh
.ttype_data
) > 0
3809 || VARRAY_ACTIVE_SIZE (crtl
->eh
.ehspec_data
) > 0);
3811 /* Indicate the format of the @TType entries. */
3813 tt_format
= DW_EH_PE_omit
;
3816 tt_format
= ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3817 #ifdef HAVE_AS_LEB128
3818 ASM_GENERATE_INTERNAL_LABEL (ttype_label
, "LLSDATT",
3819 current_function_funcdef_no
);
3821 tt_format_size
= size_of_encoded_value (tt_format
);
3823 assemble_align (tt_format_size
* BITS_PER_UNIT
);
3826 targetm
.asm_out
.internal_label (asm_out_file
, "LLSDA",
3827 current_function_funcdef_no
);
3829 /* The LSDA header. */
3831 /* Indicate the format of the landing pad start pointer. An omitted
3832 field implies @LPStart == @Start. */
3833 /* Currently we always put @LPStart == @Start. This field would
3834 be most useful in moving the landing pads completely out of
3835 line to another section, but it could also be used to minimize
3836 the size of uleb128 landing pad offsets. */
3837 lp_format
= DW_EH_PE_omit
;
3838 dw2_asm_output_data (1, lp_format
, "@LPStart format (%s)",
3839 eh_data_format_name (lp_format
));
3841 /* @LPStart pointer would go here. */
3843 dw2_asm_output_data (1, tt_format
, "@TType format (%s)",
3844 eh_data_format_name (tt_format
));
3846 #ifndef HAVE_AS_LEB128
3847 if (USING_SJLJ_EXCEPTIONS
)
3848 call_site_len
= sjlj_size_of_call_site_table ();
3850 call_site_len
= dw2_size_of_call_site_table ();
3853 /* A pc-relative 4-byte displacement to the @TType data. */
3856 #ifdef HAVE_AS_LEB128
3857 char ttype_after_disp_label
[32];
3858 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label
, "LLSDATTD",
3859 current_function_funcdef_no
);
3860 dw2_asm_output_delta_uleb128 (ttype_label
, ttype_after_disp_label
,
3861 "@TType base offset");
3862 ASM_OUTPUT_LABEL (asm_out_file
, ttype_after_disp_label
);
3864 /* Ug. Alignment queers things. */
3865 unsigned int before_disp
, after_disp
, last_disp
, disp
;
3867 before_disp
= 1 + 1;
3868 after_disp
= (1 + size_of_uleb128 (call_site_len
)
3870 + VARRAY_ACTIVE_SIZE (crtl
->eh
.action_record_data
)
3871 + (VEC_length (tree
, crtl
->eh
.ttype_data
)
3877 unsigned int disp_size
, pad
;
3880 disp_size
= size_of_uleb128 (disp
);
3881 pad
= before_disp
+ disp_size
+ after_disp
;
3882 if (pad
% tt_format_size
)
3883 pad
= tt_format_size
- (pad
% tt_format_size
);
3886 disp
= after_disp
+ pad
;
3888 while (disp
!= last_disp
);
3890 dw2_asm_output_data_uleb128 (disp
, "@TType base offset");
3894 /* Indicate the format of the call-site offsets. */
3895 #ifdef HAVE_AS_LEB128
3896 cs_format
= DW_EH_PE_uleb128
;
3898 cs_format
= DW_EH_PE_udata4
;
3900 dw2_asm_output_data (1, cs_format
, "call-site format (%s)",
3901 eh_data_format_name (cs_format
));
3903 #ifdef HAVE_AS_LEB128
3904 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label
, "LLSDACSB",
3905 current_function_funcdef_no
);
3906 ASM_GENERATE_INTERNAL_LABEL (cs_end_label
, "LLSDACSE",
3907 current_function_funcdef_no
);
3908 dw2_asm_output_delta_uleb128 (cs_end_label
, cs_after_size_label
,
3909 "Call-site table length");
3910 ASM_OUTPUT_LABEL (asm_out_file
, cs_after_size_label
);
3911 if (USING_SJLJ_EXCEPTIONS
)
3912 sjlj_output_call_site_table ();
3914 dw2_output_call_site_table ();
3915 ASM_OUTPUT_LABEL (asm_out_file
, cs_end_label
);
3917 dw2_asm_output_data_uleb128 (call_site_len
,"Call-site table length");
3918 if (USING_SJLJ_EXCEPTIONS
)
3919 sjlj_output_call_site_table ();
3921 dw2_output_call_site_table ();
3924 /* ??? Decode and interpret the data for flag_debug_asm. */
3925 n
= VARRAY_ACTIVE_SIZE (crtl
->eh
.action_record_data
);
3926 for (i
= 0; i
< n
; ++i
)
3927 dw2_asm_output_data (1, VARRAY_UCHAR (crtl
->eh
.action_record_data
, i
),
3928 (i
? NULL
: "Action record table"));
3931 assemble_align (tt_format_size
* BITS_PER_UNIT
);
3933 i
= VEC_length (tree
, crtl
->eh
.ttype_data
);
3936 tree type
= VEC_index (tree
, crtl
->eh
.ttype_data
, i
);
3937 output_ttype (type
, tt_format
, tt_format_size
);
3940 #ifdef HAVE_AS_LEB128
3942 ASM_OUTPUT_LABEL (asm_out_file
, ttype_label
);
3945 /* ??? Decode and interpret the data for flag_debug_asm. */
3946 n
= VARRAY_ACTIVE_SIZE (crtl
->eh
.ehspec_data
);
3947 for (i
= 0; i
< n
; ++i
)
3949 if (targetm
.arm_eabi_unwinder
)
3951 tree type
= VARRAY_TREE (crtl
->eh
.ehspec_data
, i
);
3952 output_ttype (type
, tt_format
, tt_format_size
);
3955 dw2_asm_output_data (1, VARRAY_UCHAR (crtl
->eh
.ehspec_data
, i
),
3956 (i
? NULL
: "Exception specification table"));
3959 switch_to_section (current_function_section ());
3963 set_eh_throw_stmt_table (struct function
*fun
, struct htab
*table
)
3965 fun
->eh
->throw_stmt_table
= table
;
3969 get_eh_throw_stmt_table (struct function
*fun
)
3971 return fun
->eh
->throw_stmt_table
;
3974 /* Dump EH information to OUT. */
3977 dump_eh_tree (FILE * out
, struct function
*fun
)
3979 struct eh_region
*i
;
3981 static const char *const type_name
[] = { "unknown", "cleanup", "try", "catch",
3982 "allowed_exceptions", "must_not_throw",
3986 i
= fun
->eh
->region_tree
;
3990 fprintf (out
, "Eh tree:\n");
3993 fprintf (out
, " %*s %i %s", depth
* 2, "",
3994 i
->region_number
, type_name
[(int) i
->type
]);
3997 fprintf (out
, " tree_label:");
3998 print_generic_expr (out
, i
->tree_label
, 0);
4001 fprintf (out
, " label:%i", INSN_UID (i
->label
));
4004 fprintf (out
, " landing_pad:%i", INSN_UID (i
->landing_pad
));
4005 if (GET_CODE (i
->landing_pad
) == NOTE
)
4006 fprintf (out
, " (deleted)");
4008 if (i
->post_landing_pad
)
4010 fprintf (out
, " post_landing_pad:%i", INSN_UID (i
->post_landing_pad
));
4011 if (GET_CODE (i
->post_landing_pad
) == NOTE
)
4012 fprintf (out
, " (deleted)");
4016 fprintf (out
, " resume:%i", INSN_UID (i
->resume
));
4017 if (GET_CODE (i
->resume
) == NOTE
)
4018 fprintf (out
, " (deleted)");
4020 if (i
->may_contain_throw
)
4021 fprintf (out
, " may_contain_throw");
4025 if (i
->u
.cleanup
.prev_try
)
4026 fprintf (out
, " prev try:%i",
4027 i
->u
.cleanup
.prev_try
->region_number
);
4032 struct eh_region
*c
;
4033 fprintf (out
, " catch regions:");
4034 for (c
= i
->u
.eh_try
.eh_catch
; c
; c
= c
->u
.eh_catch
.next_catch
)
4035 fprintf (out
, " %i", c
->region_number
);
4040 if (i
->u
.eh_catch
.prev_catch
)
4041 fprintf (out
, " prev: %i",
4042 i
->u
.eh_catch
.prev_catch
->region_number
);
4043 if (i
->u
.eh_catch
.next_catch
)
4044 fprintf (out
, " next %i",
4045 i
->u
.eh_catch
.next_catch
->region_number
);
4046 fprintf (out
, " type:");
4047 print_generic_expr (out
, i
->u
.eh_catch
.type_list
, 0);
4050 case ERT_ALLOWED_EXCEPTIONS
:
4051 fprintf (out
, " filter :%i types:", i
->u
.allowed
.filter
);
4052 print_generic_expr (out
, i
->u
.allowed
.type_list
, 0);
4056 fprintf (out
, " type:");
4057 print_generic_expr (out
, i
->u
.eh_throw
.type
, 0);
4060 case ERT_MUST_NOT_THROW
:
4068 fprintf (out
, " also known as:");
4069 dump_bitmap (out
, i
->aka
);
4072 fprintf (out
, "\n");
4073 /* If there are sub-regions, process them. */
4075 i
= i
->inner
, depth
++;
4076 /* If there are peers, process them. */
4077 else if (i
->next_peer
)
4079 /* Otherwise, step back up the tree to the next peer. */
4089 while (i
->next_peer
== NULL
);
4095 /* Verify EH region invariants. */
4098 verify_eh_region (struct eh_region
*region
, struct eh_region
*prev_try
)
4103 switch (region
->type
)
4106 if (region
->u
.cleanup
.prev_try
!= prev_try
)
4108 error ("Wrong prev_try pointer in EH region %i",
4109 region
->region_number
);
4115 struct eh_region
*c
, *prev
= NULL
;
4116 if (region
->u
.eh_try
.eh_catch
->u
.eh_catch
.prev_catch
)
4118 error ("Try region %i has wrong rh_catch pointer to %i",
4119 region
->region_number
,
4120 region
->u
.eh_try
.eh_catch
->region_number
);
4123 for (c
= region
->u
.eh_try
.eh_catch
; c
; c
= c
->u
.eh_catch
.next_catch
)
4125 if (c
->outer
!= region
->outer
)
4128 ("Catch region %i has different outer region than try region %i",
4129 c
->region_number
, region
->region_number
);
4132 if (c
->u
.eh_catch
.prev_catch
!= prev
)
4134 error ("Catch region %i has corrupted catchlist",
4140 if (prev
!= region
->u
.eh_try
.last_catch
)
4143 ("Try region %i has wrong last_catch pointer to %i instead of %i",
4144 region
->region_number
,
4145 region
->u
.eh_try
.last_catch
->region_number
,
4146 prev
->region_number
);
4152 if (!region
->u
.eh_catch
.prev_catch
4153 && (!region
->next_peer
|| region
->next_peer
->type
!= ERT_TRY
))
4155 error ("Catch region %i should be followed by try", region
->region_number
);
4159 case ERT_ALLOWED_EXCEPTIONS
:
4160 case ERT_MUST_NOT_THROW
:
4166 if (region
->type
== ERT_TRY
)
4168 else if (region
->type
== ERT_MUST_NOT_THROW
4169 || (region
->type
== ERT_ALLOWED_EXCEPTIONS
4170 && !region
->u
.allowed
.type_list
))
4172 for (region
= region
->inner
; region
; region
= region
->next_peer
)
4173 found
|= verify_eh_region (region
, prev_try
);
4177 /* Verify invariants on EH datastructures. */
4180 verify_eh_tree (struct function
*fun
)
4182 struct eh_region
*i
, *outer
= NULL
;
4189 if (!fun
->eh
->region_tree
)
4191 for (j
= fun
->eh
->last_region_number
; j
> 0; --j
)
4192 if ((i
= VEC_index (eh_region
, fun
->eh
->region_array
, j
)))
4194 if (i
->region_number
== j
)
4196 if (i
->region_number
!= j
&& (!i
->aka
|| !bitmap_bit_p (i
->aka
, j
)))
4198 error ("region_array is corrupted for region %i",
4203 i
= fun
->eh
->region_tree
;
4207 if (VEC_index (eh_region
, fun
->eh
->region_array
, i
->region_number
) != i
)
4209 error ("region_array is corrupted for region %i", i
->region_number
);
4212 if (i
->outer
!= outer
)
4214 error ("outer block of region %i is wrong", i
->region_number
);
4217 if (i
->may_contain_throw
&& outer
&& !outer
->may_contain_throw
)
4220 ("region %i may contain throw and is contained in region that may not",
4226 error ("negative nesting depth of region %i", i
->region_number
);
4230 /* If there are sub-regions, process them. */
4232 outer
= i
, i
= i
->inner
, depth
++;
4233 /* If there are peers, process them. */
4234 else if (i
->next_peer
)
4236 /* Otherwise, step back up the tree to the next peer. */
4247 error ("tree list ends on depth %i", depth
+ 1);
4250 if (count
!= nvisited
)
4252 error ("array does not match the region tree");
4256 for (i
= fun
->eh
->region_tree
; i
; i
= i
->next_peer
)
4257 err
|= verify_eh_region (i
, NULL
);
4261 dump_eh_tree (stderr
, fun
);
4262 internal_error ("verify_eh_tree failed");
4268 while (i
->next_peer
== NULL
);
4274 /* Initialize unwind_resume_libfunc. */
4277 default_init_unwind_resume_libfunc (void)
4279 /* The default c++ routines aren't actually c++ specific, so use those. */
4280 unwind_resume_libfunc
=
4281 init_one_libfunc ( USING_SJLJ_EXCEPTIONS
? "_Unwind_SjLj_Resume"
4282 : "_Unwind_Resume");
4287 gate_handle_eh (void)
4289 return doing_eh (0);
4292 /* Complete generation of exception handling code. */
4294 rest_of_handle_eh (void)
4296 finish_eh_generation ();
4297 cleanup_cfg (CLEANUP_NO_INSN_DEL
);
4301 struct rtl_opt_pass pass_rtl_eh
=
4306 gate_handle_eh
, /* gate */
4307 rest_of_handle_eh
, /* execute */
4310 0, /* static_pass_number */
4311 TV_JUMP
, /* tv_id */
4312 0, /* properties_required */
4313 0, /* properties_provided */
4314 0, /* properties_destroyed */
4315 0, /* todo_flags_start */
4316 TODO_dump_func
/* todo_flags_finish */
4320 #include "gt-except.h"