1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
52 #include "coretypes.h"
60 #include "insn-config.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
75 #include "langhooks.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
81 /* Provide defaults for stuff that may not be defined when using
83 #ifndef EH_RETURN_DATA_REGNO
84 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
87 /* Protect cleanup actions with must-not-throw regions, with a call
88 to the given failure handler. */
89 gimple (*lang_protect_cleanup_actions
) (void);
91 /* Return true if type A catches type B. */
92 int (*lang_eh_type_covers
) (tree a
, tree b
);
94 /* Map a type to a runtime object to match type. */
95 tree (*lang_eh_runtime_type
) (tree
);
97 /* A hash table of label to region number. */
99 struct ehl_map_entry
GTY(())
102 struct eh_region
*region
;
105 static GTY(()) int call_site_base
;
106 static GTY ((param_is (union tree_node
)))
107 htab_t type_to_runtime_map
;
109 /* Describe the SjLj_Function_Context structure. */
110 static GTY(()) tree sjlj_fc_type_node
;
111 static int sjlj_fc_call_site_ofs
;
112 static int sjlj_fc_data_ofs
;
113 static int sjlj_fc_personality_ofs
;
114 static int sjlj_fc_lsda_ofs
;
115 static int sjlj_fc_jbuf_ofs
;
117 /* Describes one exception region. */
118 struct eh_region
GTY(())
120 /* The immediately surrounding region. */
121 struct eh_region
*outer
;
123 /* The list of immediately contained regions. */
124 struct eh_region
*inner
;
125 struct eh_region
*next_peer
;
127 /* An identifier for this region. */
130 /* When a region is deleted, its parents inherit the REG_EH_REGION
131 numbers already assigned. */
134 /* Each region does exactly one thing. */
141 ERT_ALLOWED_EXCEPTIONS
,
146 /* Holds the action to perform based on the preceding type. */
148 /* A list of catch blocks, a surrounding try block,
149 and the label for continuing after a catch. */
150 struct eh_region_u_try
{
151 struct eh_region
*eh_catch
;
152 struct eh_region
*last_catch
;
153 } GTY ((tag ("ERT_TRY"))) eh_try
;
155 /* The list through the catch handlers, the list of type objects
156 matched, and the list of associated filters. */
157 struct eh_region_u_catch
{
158 struct eh_region
*next_catch
;
159 struct eh_region
*prev_catch
;
162 } GTY ((tag ("ERT_CATCH"))) eh_catch
;
164 /* A tree_list of allowed types. */
165 struct eh_region_u_allowed
{
168 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed
;
170 /* The type given by a call to "throw foo();", or discovered
172 struct eh_region_u_throw
{
174 } GTY ((tag ("ERT_THROW"))) eh_throw
;
176 /* Retain the cleanup expression even after expansion so that
177 we can match up fixup regions. */
178 struct eh_region_u_cleanup
{
179 struct eh_region
*prev_try
;
180 } GTY ((tag ("ERT_CLEANUP"))) cleanup
;
181 } GTY ((desc ("%0.type"))) u
;
183 /* Entry point for this region's handler before landing pads are built. */
187 /* Entry point for this region's handler from the runtime eh library. */
190 /* Entry point for this region's handler from an inner region. */
191 rtx post_landing_pad
;
193 /* The RESX insn for handing off control to the next outermost handler,
197 /* True if something in this region may throw. */
198 unsigned may_contain_throw
: 1;
201 typedef struct eh_region
*eh_region
;
203 struct call_site_record
GTY(())
209 DEF_VEC_P(eh_region
);
210 DEF_VEC_ALLOC_P(eh_region
, gc
);
212 /* Used to save exception status for each function. */
213 struct eh_status
GTY(())
215 /* The tree of all regions for this function. */
216 struct eh_region
*region_tree
;
218 /* The same information as an indexable array. */
219 VEC(eh_region
,gc
) *region_array
;
220 int last_region_number
;
222 htab_t
GTY((param_is (struct throw_stmt_node
))) throw_stmt_table
;
225 static int t2r_eq (const void *, const void *);
226 static hashval_t
t2r_hash (const void *);
227 static void add_type_for_runtime (tree
);
228 static tree
lookup_type_for_runtime (tree
);
230 static int ttypes_filter_eq (const void *, const void *);
231 static hashval_t
ttypes_filter_hash (const void *);
232 static int ehspec_filter_eq (const void *, const void *);
233 static hashval_t
ehspec_filter_hash (const void *);
234 static int add_ttypes_entry (htab_t
, tree
);
235 static int add_ehspec_entry (htab_t
, htab_t
, tree
);
236 static void assign_filter_values (void);
237 static void build_post_landing_pads (void);
238 static void connect_post_landing_pads (void);
239 static void dw2_build_landing_pads (void);
242 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info
*);
243 static void sjlj_assign_call_site_values (rtx
, struct sjlj_lp_info
*);
244 static void sjlj_mark_call_sites (struct sjlj_lp_info
*);
245 static void sjlj_emit_function_enter (rtx
);
246 static void sjlj_emit_function_exit (void);
247 static void sjlj_emit_dispatch_table (rtx
, struct sjlj_lp_info
*);
248 static void sjlj_build_landing_pads (void);
250 static hashval_t
ehl_hash (const void *);
251 static int ehl_eq (const void *, const void *);
252 static void add_ehl_entry (rtx
, struct eh_region
*);
253 static void remove_exception_handler_label (rtx
);
254 static void remove_eh_handler (struct eh_region
*);
255 static int for_each_eh_label_1 (void **, void *);
257 /* The return value of reachable_next_level. */
260 /* The given exception is not processed by the given region. */
262 /* The given exception may need processing by the given region. */
264 /* The given exception is completely processed by the given region. */
266 /* The given exception is completely processed by the runtime. */
270 struct reachable_info
;
271 static enum reachable_code
reachable_next_level (struct eh_region
*, tree
,
272 struct reachable_info
*, bool);
274 static int action_record_eq (const void *, const void *);
275 static hashval_t
action_record_hash (const void *);
276 static int add_action_record (htab_t
, int, int);
277 static int collect_one_action_chain (htab_t
, struct eh_region
*);
278 static int add_call_site (rtx
, int);
280 static void push_uleb128 (varray_type
*, unsigned int);
281 static void push_sleb128 (varray_type
*, int);
282 #ifndef HAVE_AS_LEB128
283 static int dw2_size_of_call_site_table (void);
284 static int sjlj_size_of_call_site_table (void);
286 static void dw2_output_call_site_table (void);
287 static void sjlj_output_call_site_table (void);
290 /* Routine to see if exception handling is turned on.
291 DO_WARN is nonzero if we want to inform the user that exception
292 handling is turned off.
294 This is used to ensure that -fexceptions has been specified if the
295 compiler tries to use any exception-specific functions. */
298 doing_eh (int do_warn
)
300 if (! flag_exceptions
)
302 static int warned
= 0;
303 if (! warned
&& do_warn
)
305 error ("exception handling disabled, use -fexceptions to enable");
317 if (! flag_exceptions
)
320 type_to_runtime_map
= htab_create_ggc (31, t2r_hash
, t2r_eq
, NULL
);
322 /* Create the SjLj_Function_Context structure. This should match
323 the definition in unwind-sjlj.c. */
324 if (USING_SJLJ_EXCEPTIONS
)
326 tree f_jbuf
, f_per
, f_lsda
, f_prev
, f_cs
, f_data
, tmp
;
328 sjlj_fc_type_node
= lang_hooks
.types
.make_type (RECORD_TYPE
);
330 f_prev
= build_decl (FIELD_DECL
, get_identifier ("__prev"),
331 build_pointer_type (sjlj_fc_type_node
));
332 DECL_FIELD_CONTEXT (f_prev
) = sjlj_fc_type_node
;
334 f_cs
= build_decl (FIELD_DECL
, get_identifier ("__call_site"),
336 DECL_FIELD_CONTEXT (f_cs
) = sjlj_fc_type_node
;
338 tmp
= build_index_type (build_int_cst (NULL_TREE
, 4 - 1));
339 tmp
= build_array_type (lang_hooks
.types
.type_for_mode
340 (targetm
.unwind_word_mode (), 1),
342 f_data
= build_decl (FIELD_DECL
, get_identifier ("__data"), tmp
);
343 DECL_FIELD_CONTEXT (f_data
) = sjlj_fc_type_node
;
345 f_per
= build_decl (FIELD_DECL
, get_identifier ("__personality"),
347 DECL_FIELD_CONTEXT (f_per
) = sjlj_fc_type_node
;
349 f_lsda
= build_decl (FIELD_DECL
, get_identifier ("__lsda"),
351 DECL_FIELD_CONTEXT (f_lsda
) = sjlj_fc_type_node
;
353 #ifdef DONT_USE_BUILTIN_SETJMP
355 tmp
= build_int_cst (NULL_TREE
, JMP_BUF_SIZE
- 1);
357 /* Should be large enough for most systems, if it is not,
358 JMP_BUF_SIZE should be defined with the proper value. It will
359 also tend to be larger than necessary for most systems, a more
360 optimal port will define JMP_BUF_SIZE. */
361 tmp
= build_int_cst (NULL_TREE
, FIRST_PSEUDO_REGISTER
+ 2 - 1);
364 /* builtin_setjmp takes a pointer to 5 words. */
365 tmp
= build_int_cst (NULL_TREE
, 5 * BITS_PER_WORD
/ POINTER_SIZE
- 1);
367 tmp
= build_index_type (tmp
);
368 tmp
= build_array_type (ptr_type_node
, tmp
);
369 f_jbuf
= build_decl (FIELD_DECL
, get_identifier ("__jbuf"), tmp
);
370 #ifdef DONT_USE_BUILTIN_SETJMP
371 /* We don't know what the alignment requirements of the
372 runtime's jmp_buf has. Overestimate. */
373 DECL_ALIGN (f_jbuf
) = BIGGEST_ALIGNMENT
;
374 DECL_USER_ALIGN (f_jbuf
) = 1;
376 DECL_FIELD_CONTEXT (f_jbuf
) = sjlj_fc_type_node
;
378 TYPE_FIELDS (sjlj_fc_type_node
) = f_prev
;
379 TREE_CHAIN (f_prev
) = f_cs
;
380 TREE_CHAIN (f_cs
) = f_data
;
381 TREE_CHAIN (f_data
) = f_per
;
382 TREE_CHAIN (f_per
) = f_lsda
;
383 TREE_CHAIN (f_lsda
) = f_jbuf
;
385 layout_type (sjlj_fc_type_node
);
387 /* Cache the interesting field offsets so that we have
388 easy access from rtl. */
389 sjlj_fc_call_site_ofs
390 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs
), 1)
391 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs
), 1) / BITS_PER_UNIT
);
393 = (tree_low_cst (DECL_FIELD_OFFSET (f_data
), 1)
394 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data
), 1) / BITS_PER_UNIT
);
395 sjlj_fc_personality_ofs
396 = (tree_low_cst (DECL_FIELD_OFFSET (f_per
), 1)
397 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per
), 1) / BITS_PER_UNIT
);
399 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda
), 1)
400 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda
), 1) / BITS_PER_UNIT
);
402 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf
), 1)
403 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf
), 1) / BITS_PER_UNIT
);
408 init_eh_for_function (void)
410 cfun
->eh
= GGC_CNEW (struct eh_status
);
413 /* Routines to generate the exception tree somewhat directly.
414 These are used from tree-eh.c when processing exception related
415 nodes during tree optimization. */
417 static struct eh_region
*
418 gen_eh_region (enum eh_region_type type
, struct eh_region
*outer
)
420 struct eh_region
*new_eh
;
422 #ifdef ENABLE_CHECKING
423 gcc_assert (doing_eh (0));
426 /* Insert a new blank region as a leaf in the tree. */
427 new_eh
= GGC_CNEW (struct eh_region
);
429 new_eh
->outer
= outer
;
432 new_eh
->next_peer
= outer
->inner
;
433 outer
->inner
= new_eh
;
437 new_eh
->next_peer
= cfun
->eh
->region_tree
;
438 cfun
->eh
->region_tree
= new_eh
;
441 new_eh
->region_number
= ++cfun
->eh
->last_region_number
;
447 gen_eh_region_cleanup (struct eh_region
*outer
, struct eh_region
*prev_try
)
449 struct eh_region
*cleanup
= gen_eh_region (ERT_CLEANUP
, outer
);
450 cleanup
->u
.cleanup
.prev_try
= prev_try
;
455 gen_eh_region_try (struct eh_region
*outer
)
457 return gen_eh_region (ERT_TRY
, outer
);
461 gen_eh_region_catch (struct eh_region
*t
, tree type_or_list
)
463 struct eh_region
*c
, *l
;
464 tree type_list
, type_node
;
466 /* Ensure to always end up with a type list to normalize further
467 processing, then register each type against the runtime types map. */
468 type_list
= type_or_list
;
471 if (TREE_CODE (type_or_list
) != TREE_LIST
)
472 type_list
= tree_cons (NULL_TREE
, type_or_list
, NULL_TREE
);
474 type_node
= type_list
;
475 for (; type_node
; type_node
= TREE_CHAIN (type_node
))
476 add_type_for_runtime (TREE_VALUE (type_node
));
479 c
= gen_eh_region (ERT_CATCH
, t
->outer
);
480 c
->u
.eh_catch
.type_list
= type_list
;
481 l
= t
->u
.eh_try
.last_catch
;
482 c
->u
.eh_catch
.prev_catch
= l
;
484 l
->u
.eh_catch
.next_catch
= c
;
486 t
->u
.eh_try
.eh_catch
= c
;
487 t
->u
.eh_try
.last_catch
= c
;
493 gen_eh_region_allowed (struct eh_region
*outer
, tree allowed
)
495 struct eh_region
*region
= gen_eh_region (ERT_ALLOWED_EXCEPTIONS
, outer
);
496 region
->u
.allowed
.type_list
= allowed
;
498 for (; allowed
; allowed
= TREE_CHAIN (allowed
))
499 add_type_for_runtime (TREE_VALUE (allowed
));
505 gen_eh_region_must_not_throw (struct eh_region
*outer
)
507 return gen_eh_region (ERT_MUST_NOT_THROW
, outer
);
511 get_eh_region_number (struct eh_region
*region
)
513 return region
->region_number
;
517 get_eh_region_may_contain_throw (struct eh_region
*region
)
519 return region
->may_contain_throw
;
523 get_eh_region_tree_label (struct eh_region
*region
)
525 return region
->tree_label
;
529 set_eh_region_tree_label (struct eh_region
*region
, tree lab
)
531 region
->tree_label
= lab
;
535 expand_resx_expr (tree exp
)
537 int region_nr
= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0));
538 struct eh_region
*reg
= VEC_index (eh_region
,
539 cfun
->eh
->region_array
, region_nr
);
541 gcc_assert (!reg
->resume
);
542 do_pending_stack_adjust ();
543 reg
->resume
= emit_jump_insn (gen_rtx_RESX (VOIDmode
, region_nr
));
547 /* Note that the current EH region (if any) may contain a throw, or a
548 call to a function which itself may contain a throw. */
551 note_eh_region_may_contain_throw (struct eh_region
*region
)
553 while (region
&& !region
->may_contain_throw
)
555 region
->may_contain_throw
= 1;
556 region
= region
->outer
;
561 /* Return an rtl expression for a pointer to the exception object
565 get_exception_pointer (void)
567 if (! crtl
->eh
.exc_ptr
)
568 crtl
->eh
.exc_ptr
= gen_reg_rtx (ptr_mode
);
569 return crtl
->eh
.exc_ptr
;
572 /* Return an rtl expression for the exception dispatch filter
576 get_exception_filter (void)
578 if (! crtl
->eh
.filter
)
579 crtl
->eh
.filter
= gen_reg_rtx (targetm
.eh_return_filter_mode ());
580 return crtl
->eh
.filter
;
583 /* This section is for the exception handling specific optimization pass. */
585 /* Random access the exception region tree. */
588 collect_eh_region_array (void)
592 i
= cfun
->eh
->region_tree
;
596 VEC_safe_grow (eh_region
, gc
, cfun
->eh
->region_array
,
597 cfun
->eh
->last_region_number
+ 1);
598 VEC_replace (eh_region
, cfun
->eh
->region_array
, 0, 0);
602 VEC_replace (eh_region
, cfun
->eh
->region_array
, i
->region_number
, i
);
604 /* If there are sub-regions, process them. */
607 /* If there are peers, process them. */
608 else if (i
->next_peer
)
610 /* Otherwise, step back up the tree to the next peer. */
617 } while (i
->next_peer
== NULL
);
623 /* R is MUST_NOT_THROW region that is not reachable via local
624 RESX instructions. It still must be kept in the tree in case runtime
625 can unwind through it, or we will eliminate out terminate call
626 runtime would do otherwise. Return TRUE if R contains throwing statements
627 or some of the exceptions in inner regions can be unwound up to R.
629 CONTAINS_STMT is bitmap of all regions that contains some throwing
632 Function looks O(^3) at first sight. In fact the function is called at most
633 once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
634 Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
635 the outer loop examines every region at most once. The inner loop
636 is doing unwinding from the throwing statement same way as we do during
637 CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
638 of CFG. In practice Eh trees are wide, not deep, so this is not
642 can_be_reached_by_runtime (sbitmap contains_stmt
, struct eh_region
*r
)
644 struct eh_region
*i
= r
->inner
;
648 if (TEST_BIT (contains_stmt
, r
->region_number
))
651 EXECUTE_IF_SET_IN_BITMAP (r
->aka
, 0, n
, bi
)
652 if (TEST_BIT (contains_stmt
, n
))
658 /* It is pointless to look into MUST_NOT_THROW
659 or dive into subregions. They never unwind up. */
660 if (i
->type
!= ERT_MUST_NOT_THROW
)
662 bool found
= TEST_BIT (contains_stmt
, i
->region_number
);
664 EXECUTE_IF_SET_IN_BITMAP (i
->aka
, 0, n
, bi
)
665 if (TEST_BIT (contains_stmt
, n
))
670 /* We have nested region that contains throwing statement.
671 See if resuming might lead up to the resx or we get locally
672 caught sooner. If we get locally caught sooner, we either
673 know region R is not reachable or it would have direct edge
674 from the EH resx and thus consider region reachable at
678 struct eh_region
*i1
= i
;
679 tree type_thrown
= NULL_TREE
;
681 if (i1
->type
== ERT_THROW
)
683 type_thrown
= i1
->u
.eh_throw
.type
;
686 for (; i1
!= r
; i1
= i1
->outer
)
687 if (reachable_next_level (i1
, type_thrown
, NULL
,
688 false) >= RNL_CAUGHT
)
694 /* If there are sub-regions, process them. */
695 if (i
->type
!= ERT_MUST_NOT_THROW
&& i
->inner
)
697 /* If there are peers, process them. */
698 else if (i
->next_peer
)
700 /* Otherwise, step back up the tree to the next peer. */
709 while (i
->next_peer
== NULL
);
715 /* Remove all regions whose labels are not reachable.
716 REACHABLE is bitmap of all regions that are used by the function
717 CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
719 remove_unreachable_regions (sbitmap reachable
, sbitmap contains_stmt
)
724 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
726 r
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
729 if (r
->region_number
== i
&& !TEST_BIT (reachable
, i
) && !r
->resume
)
733 r
->tree_label
= NULL
;
737 /* Don't remove ERT_THROW regions if their outer region
739 if (r
->outer
&& TEST_BIT (reachable
, r
->outer
->region_number
))
742 case ERT_MUST_NOT_THROW
:
743 /* MUST_NOT_THROW regions are implementable solely in the
744 runtime, but we need them when inlining function.
746 Keep them if outer region is not MUST_NOT_THROW a well
747 and if they contain some statement that might unwind through
749 if ((!r
->outer
|| r
->outer
->type
!= ERT_MUST_NOT_THROW
)
751 || can_be_reached_by_runtime (contains_stmt
, r
)))
756 /* TRY regions are reachable if any of its CATCH regions
759 for (c
= r
->u
.eh_try
.eh_catch
; c
;
760 c
= c
->u
.eh_catch
.next_catch
)
761 if (TEST_BIT (reachable
, c
->region_number
))
776 fprintf (dump_file
, "Removing unreachable eh region %i\n",
778 remove_eh_handler (r
);
782 #ifdef ENABLE_CHECKING
783 verify_eh_tree (cfun
);
787 /* Return array mapping LABEL_DECL_UID to region such that region's tree_label
788 is identical to label. */
791 label_to_region_map (void)
793 VEC(int,heap
) * label_to_region
= NULL
;
796 VEC_safe_grow_cleared (int, heap
, label_to_region
,
797 cfun
->cfg
->last_label_uid
+ 1);
798 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
800 struct eh_region
*r
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
801 if (r
&& r
->region_number
== i
802 && r
->tree_label
&& LABEL_DECL_UID (r
->tree_label
) >= 0)
804 VEC_replace (int, label_to_region
, LABEL_DECL_UID (r
->tree_label
),
808 return label_to_region
;
811 /* Return number of EH regions. */
813 num_eh_regions (void)
815 return cfun
->eh
->last_region_number
+ 1;
818 /* Remove all regions whose labels are not reachable from insns. */
821 rtl_remove_unreachable_regions (rtx insns
)
823 int i
, *uid_region_num
;
828 uid_region_num
= XCNEWVEC (int, get_max_uid ());
829 reachable
= sbitmap_alloc (cfun
->eh
->last_region_number
+ 1);
830 sbitmap_zero (reachable
);
832 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
834 r
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
835 if (!r
|| r
->region_number
!= i
)
840 gcc_assert (!uid_region_num
[INSN_UID (r
->resume
)]);
841 uid_region_num
[INSN_UID (r
->resume
)] = i
;
845 gcc_assert (!uid_region_num
[INSN_UID (r
->label
)]);
846 uid_region_num
[INSN_UID (r
->label
)] = i
;
850 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
851 SET_BIT (reachable
, uid_region_num
[INSN_UID (insn
)]);
853 remove_unreachable_regions (reachable
, NULL
);
855 sbitmap_free (reachable
);
856 free (uid_region_num
);
859 /* Set up EH labels for RTL. */
862 convert_from_eh_region_ranges (void)
864 rtx insns
= get_insns ();
865 int i
, n
= cfun
->eh
->last_region_number
;
867 /* Most of the work is already done at the tree level. All we need to
868 do is collect the rtl labels that correspond to the tree labels that
869 collect the rtl labels that correspond to the tree labels
870 we allocated earlier. */
871 for (i
= 1; i
<= n
; ++i
)
873 struct eh_region
*region
;
875 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
876 if (region
&& region
->tree_label
)
877 region
->label
= DECL_RTL_IF_SET (region
->tree_label
);
880 rtl_remove_unreachable_regions (insns
);
884 add_ehl_entry (rtx label
, struct eh_region
*region
)
886 struct ehl_map_entry
**slot
, *entry
;
888 LABEL_PRESERVE_P (label
) = 1;
890 entry
= GGC_NEW (struct ehl_map_entry
);
891 entry
->label
= label
;
892 entry
->region
= region
;
894 slot
= (struct ehl_map_entry
**)
895 htab_find_slot (crtl
->eh
.exception_handler_label_map
, entry
, INSERT
);
897 /* Before landing pad creation, each exception handler has its own
898 label. After landing pad creation, the exception handlers may
899 share landing pads. This is ok, since maybe_remove_eh_handler
900 only requires the 1-1 mapping before landing pad creation. */
901 gcc_assert (!*slot
|| crtl
->eh
.built_landing_pads
);
907 find_exception_handler_labels (void)
911 if (crtl
->eh
.exception_handler_label_map
)
912 htab_empty (crtl
->eh
.exception_handler_label_map
);
915 /* ??? The expansion factor here (3/2) must be greater than the htab
916 occupancy factor (4/3) to avoid unnecessary resizing. */
917 crtl
->eh
.exception_handler_label_map
918 = htab_create_ggc (cfun
->eh
->last_region_number
* 3 / 2,
919 ehl_hash
, ehl_eq
, NULL
);
922 if (cfun
->eh
->region_tree
== NULL
)
925 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
927 struct eh_region
*region
;
930 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
931 if (! region
|| region
->region_number
!= i
)
933 if (crtl
->eh
.built_landing_pads
)
934 lab
= region
->landing_pad
;
939 add_ehl_entry (lab
, region
);
942 /* For sjlj exceptions, need the return label to remain live until
943 after landing pad generation. */
944 if (USING_SJLJ_EXCEPTIONS
&& ! crtl
->eh
.built_landing_pads
)
945 add_ehl_entry (return_label
, NULL
);
948 /* Returns true if the current function has exception handling regions. */
951 current_function_has_exception_handlers (void)
955 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
957 struct eh_region
*region
;
959 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
961 && region
->region_number
== i
962 && region
->type
!= ERT_THROW
)
969 /* A subroutine of duplicate_eh_regions. Search the region tree under O
970 for the minimum and maximum region numbers. Update *MIN and *MAX. */
973 duplicate_eh_regions_0 (eh_region o
, int *min
, int *max
)
979 i
= bitmap_first_set_bit (o
->aka
);
982 i
= bitmap_last_set_bit (o
->aka
);
986 if (o
->region_number
< *min
)
987 *min
= o
->region_number
;
988 if (o
->region_number
> *max
)
989 *max
= o
->region_number
;
994 duplicate_eh_regions_0 (o
, min
, max
);
998 duplicate_eh_regions_0 (o
, min
, max
);
1003 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
1004 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
1005 about the other internal pointers just yet, just the tree-like pointers. */
1008 duplicate_eh_regions_1 (eh_region old
, eh_region outer
, int eh_offset
)
1012 ret
= n
= GGC_NEW (struct eh_region
);
1016 n
->next_peer
= NULL
;
1021 n
->aka
= BITMAP_GGC_ALLOC ();
1023 EXECUTE_IF_SET_IN_BITMAP (old
->aka
, 0, i
, bi
)
1025 bitmap_set_bit (n
->aka
, i
+ eh_offset
);
1026 VEC_replace (eh_region
, cfun
->eh
->region_array
, i
+ eh_offset
, n
);
1030 n
->region_number
+= eh_offset
;
1031 VEC_replace (eh_region
, cfun
->eh
->region_array
, n
->region_number
, n
);
1036 n
= n
->inner
= duplicate_eh_regions_1 (old
, ret
, eh_offset
);
1037 while (old
->next_peer
)
1039 old
= old
->next_peer
;
1040 n
= n
->next_peer
= duplicate_eh_regions_1 (old
, ret
, eh_offset
);
1047 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
1048 function and root the tree below OUTER_REGION. Remap labels using MAP
1049 callback. The special case of COPY_REGION of 0 means all regions. */
1052 duplicate_eh_regions (struct function
*ifun
, duplicate_eh_regions_map map
,
1053 void *data
, int copy_region
, int outer_region
)
1055 eh_region cur
, prev_try
, outer
, *splice
;
1056 int i
, min_region
, max_region
, eh_offset
, cfun_last_region_number
;
1061 #ifdef ENABLE_CHECKING
1062 verify_eh_tree (ifun
);
1065 /* Find the range of region numbers to be copied. The interface we
1066 provide here mandates a single offset to find new number from old,
1067 which means we must look at the numbers present, instead of the
1068 count or something else. */
1069 if (copy_region
> 0)
1071 min_region
= INT_MAX
;
1074 cur
= VEC_index (eh_region
, ifun
->eh
->region_array
, copy_region
);
1075 duplicate_eh_regions_0 (cur
, &min_region
, &max_region
);
1078 min_region
= 1, max_region
= ifun
->eh
->last_region_number
;
1079 num_regions
= max_region
- min_region
+ 1;
1080 cfun_last_region_number
= cfun
->eh
->last_region_number
;
1081 eh_offset
= cfun_last_region_number
+ 1 - min_region
;
1083 /* If we've not yet created a region array, do so now. */
1084 cfun
->eh
->last_region_number
= cfun_last_region_number
+ num_regions
;
1085 VEC_safe_grow_cleared (eh_region
, gc
, cfun
->eh
->region_array
,
1086 cfun
->eh
->last_region_number
+ 1);
1088 /* Locate the spot at which to insert the new tree. */
1089 if (outer_region
> 0)
1091 outer
= VEC_index (eh_region
, cfun
->eh
->region_array
, outer_region
);
1093 splice
= &outer
->inner
;
1095 splice
= &cfun
->eh
->region_tree
;
1100 splice
= &cfun
->eh
->region_tree
;
1103 splice
= &(*splice
)->next_peer
;
1105 if (!ifun
->eh
->region_tree
)
1108 for (i
= cfun_last_region_number
+ 1;
1109 i
<= cfun
->eh
->last_region_number
; i
++)
1111 VEC_replace (eh_region
, cfun
->eh
->region_array
, i
, outer
);
1112 if (outer
->aka
== NULL
)
1113 outer
->aka
= BITMAP_GGC_ALLOC ();
1114 bitmap_set_bit (outer
->aka
, i
);
1119 /* Copy all the regions in the subtree. */
1120 if (copy_region
> 0)
1122 cur
= VEC_index (eh_region
, ifun
->eh
->region_array
, copy_region
);
1123 *splice
= duplicate_eh_regions_1 (cur
, outer
, eh_offset
);
1129 cur
= ifun
->eh
->region_tree
;
1130 *splice
= n
= duplicate_eh_regions_1 (cur
, outer
, eh_offset
);
1131 while (cur
->next_peer
)
1133 cur
= cur
->next_peer
;
1134 n
= n
->next_peer
= duplicate_eh_regions_1 (cur
, outer
, eh_offset
);
1138 /* Remap all the labels in the new regions. */
1139 for (i
= cfun_last_region_number
+ 1;
1140 VEC_iterate (eh_region
, cfun
->eh
->region_array
, i
, cur
); ++i
)
1141 if (cur
&& cur
->tree_label
)
1142 cur
->tree_label
= map (cur
->tree_label
, data
);
1144 /* Search for the containing ERT_TRY region to fix up
1145 the prev_try short-cuts for ERT_CLEANUP regions. */
1147 if (outer_region
> 0)
1149 VEC_index (eh_region
, cfun
->eh
->region_array
, outer_region
);
1150 prev_try
&& prev_try
->type
!= ERT_TRY
; prev_try
= prev_try
->outer
)
1151 if (prev_try
->type
== ERT_MUST_NOT_THROW
1152 || (prev_try
->type
== ERT_ALLOWED_EXCEPTIONS
1153 && !prev_try
->u
.allowed
.type_list
))
1159 /* Remap all of the internal catch and cleanup linkages. Since we
1160 duplicate entire subtrees, all of the referenced regions will have
1161 been copied too. And since we renumbered them as a block, a simple
1162 bit of arithmetic finds us the index for the replacement region. */
1163 for (i
= cfun_last_region_number
+ 1;
1164 VEC_iterate (eh_region
, cfun
->eh
->region_array
, i
, cur
); ++i
)
1166 /* All removed EH that is toplevel in input function is now
1167 in outer EH of output function. */
1170 gcc_assert (VEC_index
1171 (eh_region
, ifun
->eh
->region_array
,
1172 i
- eh_offset
) == NULL
);
1175 VEC_replace (eh_region
, cfun
->eh
->region_array
, i
, outer
);
1176 if (outer
->aka
== NULL
)
1177 outer
->aka
= BITMAP_GGC_ALLOC ();
1178 bitmap_set_bit (outer
->aka
, i
);
1182 if (i
!= cur
->region_number
)
1185 #define REMAP(REG) \
1186 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1187 (REG)->region_number + eh_offset)
1192 if (cur
->u
.eh_try
.eh_catch
)
1193 REMAP (cur
->u
.eh_try
.eh_catch
);
1194 if (cur
->u
.eh_try
.last_catch
)
1195 REMAP (cur
->u
.eh_try
.last_catch
);
1199 if (cur
->u
.eh_catch
.next_catch
)
1200 REMAP (cur
->u
.eh_catch
.next_catch
);
1201 if (cur
->u
.eh_catch
.prev_catch
)
1202 REMAP (cur
->u
.eh_catch
.prev_catch
);
1206 if (cur
->u
.cleanup
.prev_try
)
1207 REMAP (cur
->u
.cleanup
.prev_try
);
1209 cur
->u
.cleanup
.prev_try
= prev_try
;
1218 #ifdef ENABLE_CHECKING
1219 verify_eh_tree (cfun
);
1225 /* Return true if REGION_A is outer to REGION_B in IFUN. */
1228 eh_region_outer_p (struct function
*ifun
, int region_a
, int region_b
)
1230 struct eh_region
*rp_a
, *rp_b
;
1232 gcc_assert (ifun
->eh
->last_region_number
> 0);
1233 gcc_assert (ifun
->eh
->region_tree
);
1235 rp_a
= VEC_index (eh_region
, ifun
->eh
->region_array
, region_a
);
1236 rp_b
= VEC_index (eh_region
, ifun
->eh
->region_array
, region_b
);
1237 gcc_assert (rp_a
!= NULL
);
1238 gcc_assert (rp_b
!= NULL
);
1251 /* Return region number of region that is outer to both if REGION_A and
1252 REGION_B in IFUN. */
1255 eh_region_outermost (struct function
*ifun
, int region_a
, int region_b
)
1257 struct eh_region
*rp_a
, *rp_b
;
1260 gcc_assert (ifun
->eh
->last_region_number
> 0);
1261 gcc_assert (ifun
->eh
->region_tree
);
1263 rp_a
= VEC_index (eh_region
, ifun
->eh
->region_array
, region_a
);
1264 rp_b
= VEC_index (eh_region
, ifun
->eh
->region_array
, region_b
);
1265 gcc_assert (rp_a
!= NULL
);
1266 gcc_assert (rp_b
!= NULL
);
1268 b_outer
= sbitmap_alloc (ifun
->eh
->last_region_number
+ 1);
1269 sbitmap_zero (b_outer
);
1273 SET_BIT (b_outer
, rp_b
->region_number
);
1280 if (TEST_BIT (b_outer
, rp_a
->region_number
))
1282 sbitmap_free (b_outer
);
1283 return rp_a
->region_number
;
1289 sbitmap_free (b_outer
);
1294 t2r_eq (const void *pentry
, const void *pdata
)
1296 const_tree
const entry
= (const_tree
) pentry
;
1297 const_tree
const data
= (const_tree
) pdata
;
1299 return TREE_PURPOSE (entry
) == data
;
1303 t2r_hash (const void *pentry
)
1305 const_tree
const entry
= (const_tree
) pentry
;
1306 return TREE_HASH (TREE_PURPOSE (entry
));
1310 add_type_for_runtime (tree type
)
1314 slot
= (tree
*) htab_find_slot_with_hash (type_to_runtime_map
, type
,
1315 TREE_HASH (type
), INSERT
);
1318 tree runtime
= (*lang_eh_runtime_type
) (type
);
1319 *slot
= tree_cons (type
, runtime
, NULL_TREE
);
1324 lookup_type_for_runtime (tree type
)
1328 slot
= (tree
*) htab_find_slot_with_hash (type_to_runtime_map
, type
,
1329 TREE_HASH (type
), NO_INSERT
);
1331 /* We should have always inserted the data earlier. */
1332 return TREE_VALUE (*slot
);
1336 /* Represent an entry in @TTypes for either catch actions
1337 or exception filter actions. */
1338 struct ttypes_filter
GTY(())
1344 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1345 (a tree) for a @TTypes type node we are thinking about adding. */
1348 ttypes_filter_eq (const void *pentry
, const void *pdata
)
1350 const struct ttypes_filter
*const entry
1351 = (const struct ttypes_filter
*) pentry
;
1352 const_tree
const data
= (const_tree
) pdata
;
1354 return entry
->t
== data
;
1358 ttypes_filter_hash (const void *pentry
)
1360 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1361 return TREE_HASH (entry
->t
);
1364 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1365 exception specification list we are thinking about adding. */
1366 /* ??? Currently we use the type lists in the order given. Someone
1367 should put these in some canonical order. */
1370 ehspec_filter_eq (const void *pentry
, const void *pdata
)
1372 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1373 const struct ttypes_filter
*data
= (const struct ttypes_filter
*) pdata
;
1375 return type_list_equal (entry
->t
, data
->t
);
1378 /* Hash function for exception specification lists. */
1381 ehspec_filter_hash (const void *pentry
)
1383 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1387 for (list
= entry
->t
; list
; list
= TREE_CHAIN (list
))
1388 h
= (h
<< 5) + (h
>> 27) + TREE_HASH (TREE_VALUE (list
));
1392 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1393 to speed up the search. Return the filter value to be used. */
1396 add_ttypes_entry (htab_t ttypes_hash
, tree type
)
1398 struct ttypes_filter
**slot
, *n
;
1400 slot
= (struct ttypes_filter
**)
1401 htab_find_slot_with_hash (ttypes_hash
, type
, TREE_HASH (type
), INSERT
);
1403 if ((n
= *slot
) == NULL
)
1405 /* Filter value is a 1 based table index. */
1407 n
= XNEW (struct ttypes_filter
);
1409 n
->filter
= VEC_length (tree
, crtl
->eh
.ttype_data
) + 1;
1412 VEC_safe_push (tree
, gc
, crtl
->eh
.ttype_data
, type
);
1418 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1419 to speed up the search. Return the filter value to be used. */
1422 add_ehspec_entry (htab_t ehspec_hash
, htab_t ttypes_hash
, tree list
)
1424 struct ttypes_filter
**slot
, *n
;
1425 struct ttypes_filter dummy
;
1428 slot
= (struct ttypes_filter
**)
1429 htab_find_slot (ehspec_hash
, &dummy
, INSERT
);
1431 if ((n
= *slot
) == NULL
)
1433 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1435 n
= XNEW (struct ttypes_filter
);
1437 n
->filter
= -(VARRAY_ACTIVE_SIZE (crtl
->eh
.ehspec_data
) + 1);
1440 /* Generate a 0 terminated list of filter values. */
1441 for (; list
; list
= TREE_CHAIN (list
))
1443 if (targetm
.arm_eabi_unwinder
)
1444 VARRAY_PUSH_TREE (crtl
->eh
.ehspec_data
, TREE_VALUE (list
));
1447 /* Look up each type in the list and encode its filter
1448 value as a uleb128. */
1449 push_uleb128 (&crtl
->eh
.ehspec_data
,
1450 add_ttypes_entry (ttypes_hash
, TREE_VALUE (list
)));
1453 if (targetm
.arm_eabi_unwinder
)
1454 VARRAY_PUSH_TREE (crtl
->eh
.ehspec_data
, NULL_TREE
);
1456 VARRAY_PUSH_UCHAR (crtl
->eh
.ehspec_data
, 0);
1462 /* Generate the action filter values to be used for CATCH and
1463 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1464 we use lots of landing pads, and so every type or list can share
1465 the same filter value, which saves table space. */
1468 assign_filter_values (void)
1471 htab_t ttypes
, ehspec
;
1473 crtl
->eh
.ttype_data
= VEC_alloc (tree
, gc
, 16);
1474 if (targetm
.arm_eabi_unwinder
)
1475 VARRAY_TREE_INIT (crtl
->eh
.ehspec_data
, 64, "ehspec_data");
1477 VARRAY_UCHAR_INIT (crtl
->eh
.ehspec_data
, 64, "ehspec_data");
1479 ttypes
= htab_create (31, ttypes_filter_hash
, ttypes_filter_eq
, free
);
1480 ehspec
= htab_create (31, ehspec_filter_hash
, ehspec_filter_eq
, free
);
1482 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1484 struct eh_region
*r
;
1486 r
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
1488 /* Mind we don't process a region more than once. */
1489 if (!r
|| r
->region_number
!= i
)
1495 /* Whatever type_list is (NULL or true list), we build a list
1496 of filters for the region. */
1497 r
->u
.eh_catch
.filter_list
= NULL_TREE
;
1499 if (r
->u
.eh_catch
.type_list
!= NULL
)
1501 /* Get a filter value for each of the types caught and store
1502 them in the region's dedicated list. */
1503 tree tp_node
= r
->u
.eh_catch
.type_list
;
1505 for (;tp_node
; tp_node
= TREE_CHAIN (tp_node
))
1507 int flt
= add_ttypes_entry (ttypes
, TREE_VALUE (tp_node
));
1508 tree flt_node
= build_int_cst (NULL_TREE
, flt
);
1510 r
->u
.eh_catch
.filter_list
1511 = tree_cons (NULL_TREE
, flt_node
, r
->u
.eh_catch
.filter_list
);
1516 /* Get a filter value for the NULL list also since it will need
1517 an action record anyway. */
1518 int flt
= add_ttypes_entry (ttypes
, NULL
);
1519 tree flt_node
= build_int_cst (NULL_TREE
, flt
);
1521 r
->u
.eh_catch
.filter_list
1522 = tree_cons (NULL_TREE
, flt_node
, r
->u
.eh_catch
.filter_list
);
1527 case ERT_ALLOWED_EXCEPTIONS
:
1529 = add_ehspec_entry (ehspec
, ttypes
, r
->u
.allowed
.type_list
);
1537 htab_delete (ttypes
);
1538 htab_delete (ehspec
);
1541 /* Emit SEQ into basic block just before INSN (that is assumed to be
1542 first instruction of some existing BB and return the newly
1545 emit_to_new_bb_before (rtx seq
, rtx insn
)
1552 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1553 call), we don't want it to go into newly created landing pad or other EH
1555 for (ei
= ei_start (BLOCK_FOR_INSN (insn
)->preds
); (e
= ei_safe_edge (ei
)); )
1556 if (e
->flags
& EDGE_FALLTHRU
)
1557 force_nonfallthru (e
);
1560 last
= emit_insn_before (seq
, insn
);
1561 if (BARRIER_P (last
))
1562 last
= PREV_INSN (last
);
1563 bb
= create_basic_block (seq
, last
, BLOCK_FOR_INSN (insn
)->prev_bb
);
1564 update_bb_for_insn (bb
);
1565 bb
->flags
|= BB_SUPERBLOCK
;
1569 /* Generate the code to actually handle exceptions, which will follow the
1573 build_post_landing_pads (void)
1577 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1579 struct eh_region
*region
;
1582 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
1583 /* Mind we don't process a region more than once. */
1584 if (!region
|| region
->region_number
!= i
)
1587 switch (region
->type
)
1590 /* ??? Collect the set of all non-overlapping catch handlers
1591 all the way up the chain until blocked by a cleanup. */
1592 /* ??? Outer try regions can share landing pads with inner
1593 try regions if the types are completely non-overlapping,
1594 and there are no intervening cleanups. */
1596 region
->post_landing_pad
= gen_label_rtx ();
1600 emit_label (region
->post_landing_pad
);
1602 /* ??? It is mighty inconvenient to call back into the
1603 switch statement generation code in expand_end_case.
1604 Rapid prototyping sez a sequence of ifs. */
1606 struct eh_region
*c
;
1607 for (c
= region
->u
.eh_try
.eh_catch
; c
; c
= c
->u
.eh_catch
.next_catch
)
1609 if (c
->u
.eh_catch
.type_list
== NULL
)
1610 emit_jump (c
->label
);
1613 /* Need for one cmp/jump per type caught. Each type
1614 list entry has a matching entry in the filter list
1615 (see assign_filter_values). */
1616 tree tp_node
= c
->u
.eh_catch
.type_list
;
1617 tree flt_node
= c
->u
.eh_catch
.filter_list
;
1621 emit_cmp_and_jump_insns
1623 GEN_INT (tree_low_cst (TREE_VALUE (flt_node
), 0)),
1625 targetm
.eh_return_filter_mode (), 0, c
->label
);
1627 tp_node
= TREE_CHAIN (tp_node
);
1628 flt_node
= TREE_CHAIN (flt_node
);
1634 /* We delay the generation of the _Unwind_Resume until we generate
1635 landing pads. We emit a marker here so as to get good control
1636 flow data in the meantime. */
1638 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
1644 emit_to_new_bb_before (seq
, region
->u
.eh_try
.eh_catch
->label
);
1648 case ERT_ALLOWED_EXCEPTIONS
:
1649 region
->post_landing_pad
= gen_label_rtx ();
1653 emit_label (region
->post_landing_pad
);
1655 emit_cmp_and_jump_insns (crtl
->eh
.filter
,
1656 GEN_INT (region
->u
.allowed
.filter
),
1658 targetm
.eh_return_filter_mode (), 0, region
->label
);
1660 /* We delay the generation of the _Unwind_Resume until we generate
1661 landing pads. We emit a marker here so as to get good control
1662 flow data in the meantime. */
1664 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
1670 emit_to_new_bb_before (seq
, region
->label
);
1674 case ERT_MUST_NOT_THROW
:
1675 region
->post_landing_pad
= region
->label
;
1680 /* Nothing to do. */
1689 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1690 _Unwind_Resume otherwise. */
1693 connect_post_landing_pads (void)
1697 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1699 struct eh_region
*region
;
1700 struct eh_region
*outer
;
1704 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
1705 /* Mind we don't process a region more than once. */
1706 if (!region
|| region
->region_number
!= i
)
1709 /* If there is no RESX, or it has been deleted by flow, there's
1710 nothing to fix up. */
1711 if (! region
->resume
|| INSN_DELETED_P (region
->resume
))
1714 /* Search for another landing pad in this function. */
1715 for (outer
= region
->outer
; outer
; outer
= outer
->outer
)
1716 if (outer
->post_landing_pad
)
1724 basic_block src
, dest
;
1726 emit_jump (outer
->post_landing_pad
);
1727 src
= BLOCK_FOR_INSN (region
->resume
);
1728 dest
= BLOCK_FOR_INSN (outer
->post_landing_pad
);
1729 while (EDGE_COUNT (src
->succs
) > 0)
1730 remove_edge (EDGE_SUCC (src
, 0));
1731 e
= make_edge (src
, dest
, 0);
1732 e
->probability
= REG_BR_PROB_BASE
;
1733 e
->count
= src
->count
;
1737 emit_library_call (unwind_resume_libfunc
, LCT_THROW
,
1738 VOIDmode
, 1, crtl
->eh
.exc_ptr
, ptr_mode
);
1740 /* What we just emitted was a throwing libcall, so it got a
1741 barrier automatically added after it. If the last insn in
1742 the libcall sequence isn't the barrier, it's because the
1743 target emits multiple insns for a call, and there are insns
1744 after the actual call insn (which are redundant and would be
1745 optimized away). The barrier is inserted exactly after the
1746 call insn, so let's go get that and delete the insns after
1747 it, because below we need the barrier to be the last insn in
1749 delete_insns_since (NEXT_INSN (last_call_insn ()));
1754 barrier
= emit_insn_before (seq
, region
->resume
);
1755 /* Avoid duplicate barrier. */
1756 gcc_assert (BARRIER_P (barrier
));
1757 delete_insn (barrier
);
1758 delete_insn (region
->resume
);
1760 /* ??? From tree-ssa we can wind up with catch regions whose
1761 label is not instantiated, but whose resx is present. Now
1762 that we've dealt with the resx, kill the region. */
1763 if (region
->label
== NULL
&& region
->type
== ERT_CLEANUP
)
1764 remove_eh_handler (region
);
1770 dw2_build_landing_pads (void)
1774 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1776 struct eh_region
*region
;
1781 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
1782 /* Mind we don't process a region more than once. */
1783 if (!region
|| region
->region_number
!= i
)
1786 if (region
->type
!= ERT_CLEANUP
1787 && region
->type
!= ERT_TRY
1788 && region
->type
!= ERT_ALLOWED_EXCEPTIONS
)
1793 region
->landing_pad
= gen_label_rtx ();
1794 emit_label (region
->landing_pad
);
1796 #ifdef HAVE_exception_receiver
1797 if (HAVE_exception_receiver
)
1798 emit_insn (gen_exception_receiver ());
1801 #ifdef HAVE_nonlocal_goto_receiver
1802 if (HAVE_nonlocal_goto_receiver
)
1803 emit_insn (gen_nonlocal_goto_receiver ());
1808 emit_move_insn (crtl
->eh
.exc_ptr
,
1809 gen_rtx_REG (ptr_mode
, EH_RETURN_DATA_REGNO (0)));
1810 emit_move_insn (crtl
->eh
.filter
,
1811 gen_rtx_REG (targetm
.eh_return_filter_mode (),
1812 EH_RETURN_DATA_REGNO (1)));
1817 bb
= emit_to_new_bb_before (seq
, region
->post_landing_pad
);
1818 e
= make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
1819 e
->count
= bb
->count
;
1820 e
->probability
= REG_BR_PROB_BASE
;
1827 int directly_reachable
;
1830 int call_site_index
;
1834 sjlj_find_directly_reachable_regions (struct sjlj_lp_info
*lp_info
)
1837 bool found_one
= false;
1839 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1841 struct eh_region
*region
;
1842 enum reachable_code rc
;
1846 if (! INSN_P (insn
))
1849 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1850 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
1853 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, INTVAL (XEXP (note
, 0)));
1855 type_thrown
= NULL_TREE
;
1856 if (region
->type
== ERT_THROW
)
1858 type_thrown
= region
->u
.eh_throw
.type
;
1859 region
= region
->outer
;
1862 /* Find the first containing region that might handle the exception.
1863 That's the landing pad to which we will transfer control. */
1864 rc
= RNL_NOT_CAUGHT
;
1865 for (; region
; region
= region
->outer
)
1867 rc
= reachable_next_level (region
, type_thrown
, NULL
, false);
1868 if (rc
!= RNL_NOT_CAUGHT
)
1871 if (rc
== RNL_MAYBE_CAUGHT
|| rc
== RNL_CAUGHT
)
1873 lp_info
[region
->region_number
].directly_reachable
= 1;
1882 sjlj_assign_call_site_values (rtx dispatch_label
, struct sjlj_lp_info
*lp_info
)
1887 /* First task: build the action table. */
1889 VARRAY_UCHAR_INIT (crtl
->eh
.action_record_data
, 64, "action_record_data");
1890 ar_hash
= htab_create (31, action_record_hash
, action_record_eq
, free
);
1892 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1893 if (lp_info
[i
].directly_reachable
)
1895 struct eh_region
*r
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
1897 r
->landing_pad
= dispatch_label
;
1898 lp_info
[i
].action_index
= collect_one_action_chain (ar_hash
, r
);
1899 if (lp_info
[i
].action_index
!= -1)
1900 crtl
->uses_eh_lsda
= 1;
1903 htab_delete (ar_hash
);
1905 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1906 landing pad label for the region. For sjlj though, there is one
1907 common landing pad from which we dispatch to the post-landing pads.
1909 A region receives a dispatch index if it is directly reachable
1910 and requires in-function processing. Regions that share post-landing
1911 pads may share dispatch indices. */
1912 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1913 (see build_post_landing_pads) so we don't bother checking for it. */
1916 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1917 if (lp_info
[i
].directly_reachable
)
1918 lp_info
[i
].dispatch_index
= index
++;
1920 /* Finally: assign call-site values. If dwarf2 terms, this would be
1921 the region number assigned by convert_to_eh_region_ranges, but
1922 handles no-action and must-not-throw differently. */
1925 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1926 if (lp_info
[i
].directly_reachable
)
1928 int action
= lp_info
[i
].action_index
;
1930 /* Map must-not-throw to otherwise unused call-site index 0. */
1933 /* Map no-action to otherwise unused call-site index -1. */
1934 else if (action
== -1)
1936 /* Otherwise, look it up in the table. */
1938 index
= add_call_site (GEN_INT (lp_info
[i
].dispatch_index
), action
);
1940 lp_info
[i
].call_site_index
= index
;
1945 sjlj_mark_call_sites (struct sjlj_lp_info
*lp_info
)
1947 int last_call_site
= -2;
1950 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1952 struct eh_region
*region
;
1954 rtx note
, before
, p
;
1956 /* Reset value tracking at extended basic block boundaries. */
1958 last_call_site
= -2;
1960 if (! INSN_P (insn
))
1963 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1966 /* Calls (and trapping insns) without notes are outside any
1967 exception handling region in this function. Mark them as
1970 || (flag_non_call_exceptions
1971 && may_trap_p (PATTERN (insn
))))
1972 this_call_site
= -1;
1978 /* Calls that are known to not throw need not be marked. */
1979 if (INTVAL (XEXP (note
, 0)) <= 0)
1982 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, INTVAL (XEXP (note
, 0)));
1983 this_call_site
= lp_info
[region
->region_number
].call_site_index
;
1986 if (this_call_site
== last_call_site
)
1989 /* Don't separate a call from it's argument loads. */
1992 before
= find_first_parameter_load (insn
, NULL_RTX
);
1995 mem
= adjust_address (crtl
->eh
.sjlj_fc
, TYPE_MODE (integer_type_node
),
1996 sjlj_fc_call_site_ofs
);
1997 emit_move_insn (mem
, GEN_INT (this_call_site
));
2001 emit_insn_before (p
, before
);
2002 last_call_site
= this_call_site
;
2006 /* Construct the SjLj_Function_Context. */
2009 sjlj_emit_function_enter (rtx dispatch_label
)
2011 rtx fn_begin
, fc
, mem
, seq
;
2012 bool fn_begin_outside_block
;
2014 fc
= crtl
->eh
.sjlj_fc
;
2018 /* We're storing this libcall's address into memory instead of
2019 calling it directly. Thus, we must call assemble_external_libcall
2020 here, as we can not depend on emit_library_call to do it for us. */
2021 assemble_external_libcall (eh_personality_libfunc
);
2022 mem
= adjust_address (fc
, Pmode
, sjlj_fc_personality_ofs
);
2023 emit_move_insn (mem
, eh_personality_libfunc
);
2025 mem
= adjust_address (fc
, Pmode
, sjlj_fc_lsda_ofs
);
2026 if (crtl
->uses_eh_lsda
)
2031 ASM_GENERATE_INTERNAL_LABEL (buf
, "LLSDA", current_function_funcdef_no
);
2032 sym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
2033 SYMBOL_REF_FLAGS (sym
) = SYMBOL_FLAG_LOCAL
;
2034 emit_move_insn (mem
, sym
);
2037 emit_move_insn (mem
, const0_rtx
);
2039 #ifdef DONT_USE_BUILTIN_SETJMP
2042 x
= emit_library_call_value (setjmp_libfunc
, NULL_RTX
, LCT_RETURNS_TWICE
,
2043 TYPE_MODE (integer_type_node
), 1,
2044 plus_constant (XEXP (fc
, 0),
2045 sjlj_fc_jbuf_ofs
), Pmode
);
2047 emit_cmp_and_jump_insns (x
, const0_rtx
, NE
, 0,
2048 TYPE_MODE (integer_type_node
), 0, dispatch_label
);
2049 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE
/100);
2052 expand_builtin_setjmp_setup (plus_constant (XEXP (fc
, 0), sjlj_fc_jbuf_ofs
),
2056 emit_library_call (unwind_sjlj_register_libfunc
, LCT_NORMAL
, VOIDmode
,
2057 1, XEXP (fc
, 0), Pmode
);
2062 /* ??? Instead of doing this at the beginning of the function,
2063 do this in a block that is at loop level 0 and dominates all
2064 can_throw_internal instructions. */
2066 fn_begin_outside_block
= true;
2067 for (fn_begin
= get_insns (); ; fn_begin
= NEXT_INSN (fn_begin
))
2068 if (NOTE_P (fn_begin
))
2070 if (NOTE_KIND (fn_begin
) == NOTE_INSN_FUNCTION_BEG
)
2072 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin
))
2073 fn_begin_outside_block
= false;
2076 if (fn_begin_outside_block
)
2077 insert_insn_on_edge (seq
, single_succ_edge (ENTRY_BLOCK_PTR
));
2079 emit_insn_after (seq
, fn_begin
);
2082 /* Call back from expand_function_end to know where we should put
2083 the call to unwind_sjlj_unregister_libfunc if needed. */
2086 sjlj_emit_function_exit_after (rtx after
)
2088 crtl
->eh
.sjlj_exit_after
= after
;
2092 sjlj_emit_function_exit (void)
2100 emit_library_call (unwind_sjlj_unregister_libfunc
, LCT_NORMAL
, VOIDmode
,
2101 1, XEXP (crtl
->eh
.sjlj_fc
, 0), Pmode
);
2106 /* ??? Really this can be done in any block at loop level 0 that
2107 post-dominates all can_throw_internal instructions. This is
2108 the last possible moment. */
2110 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
2111 if (e
->flags
& EDGE_FALLTHRU
)
2117 /* Figure out whether the place we are supposed to insert libcall
2118 is inside the last basic block or after it. In the other case
2119 we need to emit to edge. */
2120 gcc_assert (e
->src
->next_bb
== EXIT_BLOCK_PTR
);
2121 for (insn
= BB_HEAD (e
->src
); ; insn
= NEXT_INSN (insn
))
2123 if (insn
== crtl
->eh
.sjlj_exit_after
)
2126 insn
= NEXT_INSN (insn
);
2127 emit_insn_after (seq
, insn
);
2130 if (insn
== BB_END (e
->src
))
2133 insert_insn_on_edge (seq
, e
);
2138 sjlj_emit_dispatch_table (rtx dispatch_label
, struct sjlj_lp_info
*lp_info
)
2140 enum machine_mode unwind_word_mode
= targetm
.unwind_word_mode ();
2141 enum machine_mode filter_mode
= targetm
.eh_return_filter_mode ();
2142 int i
, first_reachable
;
2143 rtx mem
, dispatch
, seq
, fc
;
2148 fc
= crtl
->eh
.sjlj_fc
;
2152 emit_label (dispatch_label
);
2154 #ifndef DONT_USE_BUILTIN_SETJMP
2155 expand_builtin_setjmp_receiver (dispatch_label
);
2158 /* Load up dispatch index, exc_ptr and filter values from the
2159 function context. */
2160 mem
= adjust_address (fc
, TYPE_MODE (integer_type_node
),
2161 sjlj_fc_call_site_ofs
);
2162 dispatch
= copy_to_reg (mem
);
2164 mem
= adjust_address (fc
, unwind_word_mode
, sjlj_fc_data_ofs
);
2165 if (unwind_word_mode
!= ptr_mode
)
2167 #ifdef POINTERS_EXTEND_UNSIGNED
2168 mem
= convert_memory_address (ptr_mode
, mem
);
2170 mem
= convert_to_mode (ptr_mode
, mem
, 0);
2173 emit_move_insn (crtl
->eh
.exc_ptr
, mem
);
2175 mem
= adjust_address (fc
, unwind_word_mode
,
2176 sjlj_fc_data_ofs
+ GET_MODE_SIZE (unwind_word_mode
));
2177 if (unwind_word_mode
!= filter_mode
)
2178 mem
= convert_to_mode (filter_mode
, mem
, 0);
2179 emit_move_insn (crtl
->eh
.filter
, mem
);
2181 /* Jump to one of the directly reachable regions. */
2182 /* ??? This really ought to be using a switch statement. */
2184 first_reachable
= 0;
2185 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2187 if (! lp_info
[i
].directly_reachable
)
2190 if (! first_reachable
)
2192 first_reachable
= i
;
2196 emit_cmp_and_jump_insns (dispatch
, GEN_INT (lp_info
[i
].dispatch_index
),
2197 EQ
, NULL_RTX
, TYPE_MODE (integer_type_node
), 0,
2198 ((struct eh_region
*)VEC_index (eh_region
, cfun
->eh
->region_array
, i
))
2199 ->post_landing_pad
);
2205 before
= (((struct eh_region
*)VEC_index (eh_region
, cfun
->eh
->region_array
, first_reachable
))
2206 ->post_landing_pad
);
2208 bb
= emit_to_new_bb_before (seq
, before
);
2209 e
= make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
2210 e
->count
= bb
->count
;
2211 e
->probability
= REG_BR_PROB_BASE
;
2215 sjlj_build_landing_pads (void)
2217 struct sjlj_lp_info
*lp_info
;
2219 lp_info
= XCNEWVEC (struct sjlj_lp_info
, cfun
->eh
->last_region_number
+ 1);
2221 if (sjlj_find_directly_reachable_regions (lp_info
))
2223 rtx dispatch_label
= gen_label_rtx ();
2224 int align
= STACK_SLOT_ALIGNMENT (sjlj_fc_type_node
,
2225 TYPE_MODE (sjlj_fc_type_node
),
2226 TYPE_ALIGN (sjlj_fc_type_node
));
2228 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node
),
2229 int_size_in_bytes (sjlj_fc_type_node
),
2232 sjlj_assign_call_site_values (dispatch_label
, lp_info
);
2233 sjlj_mark_call_sites (lp_info
);
2235 sjlj_emit_function_enter (dispatch_label
);
2236 sjlj_emit_dispatch_table (dispatch_label
, lp_info
);
2237 sjlj_emit_function_exit ();
2244 finish_eh_generation (void)
2248 /* Nothing to do if no regions created. */
2249 if (cfun
->eh
->region_tree
== NULL
)
2252 /* The object here is to provide find_basic_blocks with detailed
2253 information (via reachable_handlers) on how exception control
2254 flows within the function. In this first pass, we can include
2255 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2256 regions, and hope that it will be useful in deleting unreachable
2257 handlers. Subsequently, we will generate landing pads which will
2258 connect many of the handlers, and then type information will not
2259 be effective. Still, this is a win over previous implementations. */
2261 /* These registers are used by the landing pads. Make sure they
2262 have been generated. */
2263 get_exception_pointer ();
2264 get_exception_filter ();
2266 /* Construct the landing pads. */
2268 assign_filter_values ();
2269 build_post_landing_pads ();
2270 connect_post_landing_pads ();
2271 if (USING_SJLJ_EXCEPTIONS
)
2272 sjlj_build_landing_pads ();
2274 dw2_build_landing_pads ();
2276 crtl
->eh
.built_landing_pads
= 1;
2278 /* We've totally changed the CFG. Start over. */
2279 find_exception_handler_labels ();
2280 break_superblocks ();
2281 if (USING_SJLJ_EXCEPTIONS
2282 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2283 || single_succ_edge (ENTRY_BLOCK_PTR
)->insns
.r
)
2284 commit_edge_insertions ();
2290 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
2292 if (e
->flags
& EDGE_EH
)
2301 rtl_make_eh_edge (NULL
, bb
, BB_END (bb
));
2306 ehl_hash (const void *pentry
)
2308 const struct ehl_map_entry
*const entry
2309 = (const struct ehl_map_entry
*) pentry
;
2311 /* 2^32 * ((sqrt(5) - 1) / 2) */
2312 const hashval_t scaled_golden_ratio
= 0x9e3779b9;
2313 return CODE_LABEL_NUMBER (entry
->label
) * scaled_golden_ratio
;
2317 ehl_eq (const void *pentry
, const void *pdata
)
2319 const struct ehl_map_entry
*const entry
2320 = (const struct ehl_map_entry
*) pentry
;
2321 const struct ehl_map_entry
*const data
2322 = (const struct ehl_map_entry
*) pdata
;
2324 return entry
->label
== data
->label
;
2327 /* This section handles removing dead code for flow. */
2329 /* Remove LABEL from exception_handler_label_map. */
2332 remove_exception_handler_label (rtx label
)
2334 struct ehl_map_entry
**slot
, tmp
;
2336 /* If exception_handler_label_map was not built yet,
2337 there is nothing to do. */
2338 if (crtl
->eh
.exception_handler_label_map
== NULL
)
2342 slot
= (struct ehl_map_entry
**)
2343 htab_find_slot (crtl
->eh
.exception_handler_label_map
, &tmp
, NO_INSERT
);
2346 htab_clear_slot (crtl
->eh
.exception_handler_label_map
, (void **) slot
);
2349 /* Splice REGION from the region tree etc. */
2352 remove_eh_handler (struct eh_region
*region
)
2354 struct eh_region
**pp
, **pp_start
, *p
, *outer
, *inner
;
2357 /* For the benefit of efficiently handling REG_EH_REGION notes,
2358 replace this region in the region array with its containing
2359 region. Note that previous region deletions may result in
2360 multiple copies of this region in the array, so we have a
2361 list of alternate numbers by which we are known. */
2363 outer
= region
->outer
;
2364 VEC_replace (eh_region
, cfun
->eh
->region_array
, region
->region_number
, outer
);
2370 EXECUTE_IF_SET_IN_BITMAP (region
->aka
, 0, i
, bi
)
2372 VEC_replace (eh_region
, cfun
->eh
->region_array
, i
, outer
);
2379 outer
->aka
= BITMAP_GGC_ALLOC ();
2381 bitmap_ior_into (outer
->aka
, region
->aka
);
2382 bitmap_set_bit (outer
->aka
, region
->region_number
);
2385 if (crtl
->eh
.built_landing_pads
)
2386 lab
= region
->landing_pad
;
2388 lab
= region
->label
;
2390 remove_exception_handler_label (lab
);
2393 pp_start
= &outer
->inner
;
2395 pp_start
= &cfun
->eh
->region_tree
;
2396 for (pp
= pp_start
, p
= *pp
; p
!= region
; pp
= &p
->next_peer
, p
= *pp
)
2398 *pp
= region
->next_peer
;
2400 inner
= region
->inner
;
2403 for (p
= inner
; p
->next_peer
; p
= p
->next_peer
)
2407 p
->next_peer
= *pp_start
;
2411 if (region
->type
== ERT_CATCH
)
2413 struct eh_region
*eh_try
, *next
, *prev
;
2415 for (eh_try
= region
->next_peer
;
2416 eh_try
->type
== ERT_CATCH
;
2417 eh_try
= eh_try
->next_peer
)
2419 gcc_assert (eh_try
->type
== ERT_TRY
);
2421 next
= region
->u
.eh_catch
.next_catch
;
2422 prev
= region
->u
.eh_catch
.prev_catch
;
2425 next
->u
.eh_catch
.prev_catch
= prev
;
2427 eh_try
->u
.eh_try
.last_catch
= prev
;
2429 prev
->u
.eh_catch
.next_catch
= next
;
2432 eh_try
->u
.eh_try
.eh_catch
= next
;
2434 remove_eh_handler (eh_try
);
2439 /* LABEL heads a basic block that is about to be deleted. If this
2440 label corresponds to an exception region, we may be able to
2441 delete the region. */
2444 maybe_remove_eh_handler (rtx label
)
2446 struct ehl_map_entry
**slot
, tmp
;
2447 struct eh_region
*region
;
2449 /* ??? After generating landing pads, it's not so simple to determine
2450 if the region data is completely unused. One must examine the
2451 landing pad and the post landing pad, and whether an inner try block
2452 is referencing the catch handlers directly. */
2453 if (crtl
->eh
.built_landing_pads
)
2457 slot
= (struct ehl_map_entry
**)
2458 htab_find_slot (crtl
->eh
.exception_handler_label_map
, &tmp
, NO_INSERT
);
2461 region
= (*slot
)->region
;
2465 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2466 because there is no path to the fallback call to terminate.
2467 But the region continues to affect call-site data until there
2468 are no more contained calls, which we don't see here. */
2469 if (region
->type
== ERT_MUST_NOT_THROW
)
2471 htab_clear_slot (crtl
->eh
.exception_handler_label_map
, (void **) slot
);
2472 region
->label
= NULL_RTX
;
2475 remove_eh_handler (region
);
2478 /* Remove Eh region R that has turned out to have no code in its handler. */
2481 remove_eh_region (int r
)
2483 struct eh_region
*region
;
2485 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, r
);
2486 remove_eh_handler (region
);
2489 /* Invokes CALLBACK for every exception handler label. Only used by old
2490 loop hackery; should not be used by new code. */
2493 for_each_eh_label (void (*callback
) (rtx
))
2495 htab_traverse (crtl
->eh
.exception_handler_label_map
, for_each_eh_label_1
,
2496 (void *) &callback
);
2500 for_each_eh_label_1 (void **pentry
, void *data
)
2502 struct ehl_map_entry
*entry
= *(struct ehl_map_entry
**)pentry
;
2503 void (*callback
) (rtx
) = *(void (**) (rtx
)) data
;
2505 (*callback
) (entry
->label
);
2509 /* Invoke CALLBACK for every exception region in the current function. */
2512 for_each_eh_region (void (*callback
) (struct eh_region
*))
2514 int i
, n
= cfun
->eh
->last_region_number
;
2515 for (i
= 1; i
<= n
; ++i
)
2517 struct eh_region
*region
;
2519 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, i
);
2521 (*callback
) (region
);
2525 /* This section describes CFG exception edges for flow. */
2527 /* For communicating between calls to reachable_next_level. */
2528 struct reachable_info
2532 void (*callback
) (struct eh_region
*, void *);
2533 void *callback_data
;
2536 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2537 base class of TYPE, is in HANDLED. */
2540 check_handled (tree handled
, tree type
)
2544 /* We can check for exact matches without front-end help. */
2545 if (! lang_eh_type_covers
)
2547 for (t
= handled
; t
; t
= TREE_CHAIN (t
))
2548 if (TREE_VALUE (t
) == type
)
2553 for (t
= handled
; t
; t
= TREE_CHAIN (t
))
2554 if ((*lang_eh_type_covers
) (TREE_VALUE (t
), type
))
2561 /* A subroutine of reachable_next_level. If we are collecting a list
2562 of handlers, add one. After landing pad generation, reference
2563 it instead of the handlers themselves. Further, the handlers are
2564 all wired together, so by referencing one, we've got them all.
2565 Before landing pad generation we reference each handler individually.
2567 LP_REGION contains the landing pad; REGION is the handler. */
2570 add_reachable_handler (struct reachable_info
*info
,
2571 struct eh_region
*lp_region
, struct eh_region
*region
)
2576 if (crtl
->eh
.built_landing_pads
)
2577 info
->callback (lp_region
, info
->callback_data
);
2579 info
->callback (region
, info
->callback_data
);
2582 /* Process one level of exception regions for reachability.
2583 If TYPE_THROWN is non-null, then it is the *exact* type being
2584 propagated. If INFO is non-null, then collect handler labels
2585 and caught/allowed type information between invocations. */
2587 static enum reachable_code
2588 reachable_next_level (struct eh_region
*region
, tree type_thrown
,
2589 struct reachable_info
*info
,
2592 switch (region
->type
)
2595 /* Before landing-pad generation, we model control flow
2596 directly to the individual handlers. In this way we can
2597 see that catch handler types may shadow one another. */
2598 add_reachable_handler (info
, region
, region
);
2599 return RNL_MAYBE_CAUGHT
;
2603 struct eh_region
*c
;
2604 enum reachable_code ret
= RNL_NOT_CAUGHT
;
2606 for (c
= region
->u
.eh_try
.eh_catch
; c
; c
= c
->u
.eh_catch
.next_catch
)
2608 /* A catch-all handler ends the search. */
2609 if (c
->u
.eh_catch
.type_list
== NULL
)
2611 add_reachable_handler (info
, region
, c
);
2617 /* If we have at least one type match, end the search. */
2618 tree tp_node
= c
->u
.eh_catch
.type_list
;
2620 for (; tp_node
; tp_node
= TREE_CHAIN (tp_node
))
2622 tree type
= TREE_VALUE (tp_node
);
2624 if (type
== type_thrown
2625 || (lang_eh_type_covers
2626 && (*lang_eh_type_covers
) (type
, type_thrown
)))
2628 add_reachable_handler (info
, region
, c
);
2633 /* If we have definitive information of a match failure,
2634 the catch won't trigger. */
2635 if (lang_eh_type_covers
)
2636 return RNL_NOT_CAUGHT
;
2639 /* At this point, we either don't know what type is thrown or
2640 don't have front-end assistance to help deciding if it is
2641 covered by one of the types in the list for this region.
2643 We'd then like to add this region to the list of reachable
2644 handlers since it is indeed potentially reachable based on the
2645 information we have.
2647 Actually, this handler is for sure not reachable if all the
2648 types it matches have already been caught. That is, it is only
2649 potentially reachable if at least one of the types it catches
2650 has not been previously caught. */
2653 ret
= RNL_MAYBE_CAUGHT
;
2656 tree tp_node
= c
->u
.eh_catch
.type_list
;
2657 bool maybe_reachable
= false;
2659 /* Compute the potential reachability of this handler and
2660 update the list of types caught at the same time. */
2661 for (; tp_node
; tp_node
= TREE_CHAIN (tp_node
))
2663 tree type
= TREE_VALUE (tp_node
);
2665 if (! check_handled (info
->types_caught
, type
))
2668 = tree_cons (NULL
, type
, info
->types_caught
);
2670 maybe_reachable
= true;
2674 if (maybe_reachable
)
2676 add_reachable_handler (info
, region
, c
);
2678 /* ??? If the catch type is a base class of every allowed
2679 type, then we know we can stop the search. */
2680 ret
= RNL_MAYBE_CAUGHT
;
2688 case ERT_ALLOWED_EXCEPTIONS
:
2689 /* An empty list of types definitely ends the search. */
2690 if (region
->u
.allowed
.type_list
== NULL_TREE
)
2692 add_reachable_handler (info
, region
, region
);
2696 /* Collect a list of lists of allowed types for use in detecting
2697 when a catch may be transformed into a catch-all. */
2699 info
->types_allowed
= tree_cons (NULL_TREE
,
2700 region
->u
.allowed
.type_list
,
2701 info
->types_allowed
);
2703 /* If we have definitive information about the type hierarchy,
2704 then we can tell if the thrown type will pass through the
2706 if (type_thrown
&& lang_eh_type_covers
)
2708 if (check_handled (region
->u
.allowed
.type_list
, type_thrown
))
2709 return RNL_NOT_CAUGHT
;
2712 add_reachable_handler (info
, region
, region
);
2717 add_reachable_handler (info
, region
, region
);
2718 return RNL_MAYBE_CAUGHT
;
2721 /* Catch regions are handled by their controlling try region. */
2722 return RNL_NOT_CAUGHT
;
2724 case ERT_MUST_NOT_THROW
:
2725 /* Here we end our search, since no exceptions may propagate.
2727 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
2728 only via locally handled RESX instructions.
2730 When we inline a function call, we can bring in new handlers. In order
2731 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
2732 assume that such handlers exists prior for any inlinable call prior
2733 inlining decisions are fixed. */
2737 add_reachable_handler (info
, region
, region
);
2745 /* Shouldn't see these here. */
2753 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2756 foreach_reachable_handler (int region_number
, bool is_resx
, bool inlinable_call
,
2757 void (*callback
) (struct eh_region
*, void *),
2758 void *callback_data
)
2760 struct reachable_info info
;
2761 struct eh_region
*region
;
2764 memset (&info
, 0, sizeof (info
));
2765 info
.callback
= callback
;
2766 info
.callback_data
= callback_data
;
2768 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, region_number
);
2772 type_thrown
= NULL_TREE
;
2775 /* A RESX leaves a region instead of entering it. Thus the
2776 region itself may have been deleted out from under us. */
2779 region
= region
->outer
;
2781 else if (region
->type
== ERT_THROW
)
2783 type_thrown
= region
->u
.eh_throw
.type
;
2784 region
= region
->outer
;
2789 if (reachable_next_level (region
, type_thrown
, &info
,
2790 inlinable_call
|| is_resx
) >= RNL_CAUGHT
)
2792 /* If we have processed one cleanup, there is no point in
2793 processing any more of them. Each cleanup will have an edge
2794 to the next outer cleanup region, so the flow graph will be
2796 if (region
->type
== ERT_CLEANUP
)
2797 region
= region
->u
.cleanup
.prev_try
;
2799 region
= region
->outer
;
2803 /* Retrieve a list of labels of exception handlers which can be
2804 reached by a given insn. */
2807 arh_to_landing_pad (struct eh_region
*region
, void *data
)
2809 rtx
*p_handlers
= (rtx
*) data
;
2811 *p_handlers
= alloc_INSN_LIST (region
->landing_pad
, NULL_RTX
);
2815 arh_to_label (struct eh_region
*region
, void *data
)
2817 rtx
*p_handlers
= (rtx
*) data
;
2818 *p_handlers
= alloc_INSN_LIST (region
->label
, *p_handlers
);
2822 reachable_handlers (rtx insn
)
2824 bool is_resx
= false;
2825 rtx handlers
= NULL
;
2829 && GET_CODE (PATTERN (insn
)) == RESX
)
2831 region_number
= XINT (PATTERN (insn
), 0);
2836 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2837 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
2839 region_number
= INTVAL (XEXP (note
, 0));
2842 foreach_reachable_handler (region_number
, is_resx
, false,
2843 (crtl
->eh
.built_landing_pads
2844 ? arh_to_landing_pad
2851 /* Determine if the given INSN can throw an exception that is caught
2852 within the function. */
2855 can_throw_internal_1 (int region_number
, bool is_resx
, bool inlinable_call
)
2857 struct eh_region
*region
;
2860 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, region_number
);
2864 type_thrown
= NULL_TREE
;
2866 region
= region
->outer
;
2867 else if (region
->type
== ERT_THROW
)
2869 type_thrown
= region
->u
.eh_throw
.type
;
2870 region
= region
->outer
;
2873 /* If this exception is ignored by each and every containing region,
2874 then control passes straight out. The runtime may handle some
2875 regions, which also do not require processing internally. */
2876 for (; region
; region
= region
->outer
)
2878 enum reachable_code how
= reachable_next_level (region
, type_thrown
, 0,
2879 inlinable_call
|| is_resx
);
2880 if (how
== RNL_BLOCKED
)
2882 if (how
!= RNL_NOT_CAUGHT
)
2890 can_throw_internal (const_rtx insn
)
2894 if (! INSN_P (insn
))
2898 && GET_CODE (PATTERN (insn
)) == RESX
2899 && XINT (PATTERN (insn
), 0) > 0)
2900 return can_throw_internal_1 (XINT (PATTERN (insn
), 0), true, false);
2902 if (NONJUMP_INSN_P (insn
)
2903 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2904 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2906 /* Every insn that might throw has an EH_REGION note. */
2907 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2908 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
2911 return can_throw_internal_1 (INTVAL (XEXP (note
, 0)), false, false);
2914 /* Determine if the given INSN can throw an exception that is
2915 visible outside the function. */
2918 can_throw_external_1 (int region_number
, bool is_resx
, bool inlinable_call
)
2920 struct eh_region
*region
;
2923 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, region_number
);
2927 type_thrown
= NULL_TREE
;
2929 region
= region
->outer
;
2930 else if (region
->type
== ERT_THROW
)
2932 type_thrown
= region
->u
.eh_throw
.type
;
2933 region
= region
->outer
;
2936 /* If the exception is caught or blocked by any containing region,
2937 then it is not seen by any calling function. */
2938 for (; region
; region
= region
->outer
)
2939 if (reachable_next_level (region
, type_thrown
, NULL
,
2940 inlinable_call
|| is_resx
) >= RNL_CAUGHT
)
2947 can_throw_external (const_rtx insn
)
2951 if (! INSN_P (insn
))
2955 && GET_CODE (PATTERN (insn
)) == RESX
2956 && XINT (PATTERN (insn
), 0) > 0)
2957 return can_throw_external_1 (XINT (PATTERN (insn
), 0), true, false);
2959 if (NONJUMP_INSN_P (insn
)
2960 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2961 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2963 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2966 /* Calls (and trapping insns) without notes are outside any
2967 exception handling region in this function. We have to
2968 assume it might throw. Given that the front end and middle
2969 ends mark known NOTHROW functions, this isn't so wildly
2971 return (CALL_P (insn
)
2972 || (flag_non_call_exceptions
2973 && may_trap_p (PATTERN (insn
))));
2975 if (INTVAL (XEXP (note
, 0)) <= 0)
2978 return can_throw_external_1 (INTVAL (XEXP (note
, 0)), false, false);
2981 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
2984 set_nothrow_function_flags (void)
2990 /* Assume crtl->all_throwers_are_sibcalls until we encounter
2991 something that can throw an exception. We specifically exempt
2992 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2993 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2996 crtl
->all_throwers_are_sibcalls
= 1;
2998 /* If we don't know that this implementation of the function will
2999 actually be used, then we must not set TREE_NOTHROW, since
3000 callers must not assume that this function does not throw. */
3001 if (TREE_NOTHROW (current_function_decl
))
3004 if (! flag_exceptions
)
3007 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3008 if (can_throw_external (insn
))
3012 if (!CALL_P (insn
) || !SIBLING_CALL_P (insn
))
3014 crtl
->all_throwers_are_sibcalls
= 0;
3019 for (insn
= crtl
->epilogue_delay_list
; insn
;
3020 insn
= XEXP (insn
, 1))
3021 if (can_throw_external (insn
))
3025 if (!CALL_P (insn
) || !SIBLING_CALL_P (insn
))
3027 crtl
->all_throwers_are_sibcalls
= 0;
3032 && (cgraph_function_body_availability (cgraph_node (current_function_decl
))
3033 >= AVAIL_AVAILABLE
))
3034 TREE_NOTHROW (current_function_decl
) = 1;
3038 struct rtl_opt_pass pass_set_nothrow_function_flags
=
3044 set_nothrow_function_flags
, /* execute */
3047 0, /* static_pass_number */
3049 0, /* properties_required */
3050 0, /* properties_provided */
3051 0, /* properties_destroyed */
3052 0, /* todo_flags_start */
3053 0, /* todo_flags_finish */
3058 /* Various hooks for unwind library. */
3060 /* Do any necessary initialization to access arbitrary stack frames.
3061 On the SPARC, this means flushing the register windows. */
3064 expand_builtin_unwind_init (void)
3066 /* Set this so all the registers get saved in our frame; we need to be
3067 able to copy the saved values for any registers from frames we unwind. */
3068 crtl
->saves_all_registers
= 1;
3070 #ifdef SETUP_FRAME_ADDRESSES
3071 SETUP_FRAME_ADDRESSES ();
3076 expand_builtin_eh_return_data_regno (tree exp
)
3078 tree which
= CALL_EXPR_ARG (exp
, 0);
3079 unsigned HOST_WIDE_INT iwhich
;
3081 if (TREE_CODE (which
) != INTEGER_CST
)
3083 error ("argument of %<__builtin_eh_return_regno%> must be constant");
3087 iwhich
= tree_low_cst (which
, 1);
3088 iwhich
= EH_RETURN_DATA_REGNO (iwhich
);
3089 if (iwhich
== INVALID_REGNUM
)
3092 #ifdef DWARF_FRAME_REGNUM
3093 iwhich
= DWARF_FRAME_REGNUM (iwhich
);
3095 iwhich
= DBX_REGISTER_NUMBER (iwhich
);
3098 return GEN_INT (iwhich
);
3101 /* Given a value extracted from the return address register or stack slot,
3102 return the actual address encoded in that value. */
3105 expand_builtin_extract_return_addr (tree addr_tree
)
3107 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
3109 if (GET_MODE (addr
) != Pmode
3110 && GET_MODE (addr
) != VOIDmode
)
3112 #ifdef POINTERS_EXTEND_UNSIGNED
3113 addr
= convert_memory_address (Pmode
, addr
);
3115 addr
= convert_to_mode (Pmode
, addr
, 0);
3119 /* First mask out any unwanted bits. */
3120 #ifdef MASK_RETURN_ADDR
3121 expand_and (Pmode
, addr
, MASK_RETURN_ADDR
, addr
);
3124 /* Then adjust to find the real return address. */
3125 #if defined (RETURN_ADDR_OFFSET)
3126 addr
= plus_constant (addr
, RETURN_ADDR_OFFSET
);
3132 /* Given an actual address in addr_tree, do any necessary encoding
3133 and return the value to be stored in the return address register or
3134 stack slot so the epilogue will return to that address. */
3137 expand_builtin_frob_return_addr (tree addr_tree
)
3139 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
3141 addr
= convert_memory_address (Pmode
, addr
);
3143 #ifdef RETURN_ADDR_OFFSET
3144 addr
= force_reg (Pmode
, addr
);
3145 addr
= plus_constant (addr
, -RETURN_ADDR_OFFSET
);
3151 /* Set up the epilogue with the magic bits we'll need to return to the
3152 exception handler. */
3155 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED
,
3160 #ifdef EH_RETURN_STACKADJ_RTX
3161 tmp
= expand_expr (stackadj_tree
, crtl
->eh
.ehr_stackadj
,
3162 VOIDmode
, EXPAND_NORMAL
);
3163 tmp
= convert_memory_address (Pmode
, tmp
);
3164 if (!crtl
->eh
.ehr_stackadj
)
3165 crtl
->eh
.ehr_stackadj
= copy_to_reg (tmp
);
3166 else if (tmp
!= crtl
->eh
.ehr_stackadj
)
3167 emit_move_insn (crtl
->eh
.ehr_stackadj
, tmp
);
3170 tmp
= expand_expr (handler_tree
, crtl
->eh
.ehr_handler
,
3171 VOIDmode
, EXPAND_NORMAL
);
3172 tmp
= convert_memory_address (Pmode
, tmp
);
3173 if (!crtl
->eh
.ehr_handler
)
3174 crtl
->eh
.ehr_handler
= copy_to_reg (tmp
);
3175 else if (tmp
!= crtl
->eh
.ehr_handler
)
3176 emit_move_insn (crtl
->eh
.ehr_handler
, tmp
);
3178 if (!crtl
->eh
.ehr_label
)
3179 crtl
->eh
.ehr_label
= gen_label_rtx ();
3180 emit_jump (crtl
->eh
.ehr_label
);
3184 expand_eh_return (void)
3188 if (! crtl
->eh
.ehr_label
)
3191 crtl
->calls_eh_return
= 1;
3193 #ifdef EH_RETURN_STACKADJ_RTX
3194 emit_move_insn (EH_RETURN_STACKADJ_RTX
, const0_rtx
);
3197 around_label
= gen_label_rtx ();
3198 emit_jump (around_label
);
3200 emit_label (crtl
->eh
.ehr_label
);
3201 clobber_return_register ();
3203 #ifdef EH_RETURN_STACKADJ_RTX
3204 emit_move_insn (EH_RETURN_STACKADJ_RTX
, crtl
->eh
.ehr_stackadj
);
3207 #ifdef HAVE_eh_return
3209 emit_insn (gen_eh_return (crtl
->eh
.ehr_handler
));
3213 #ifdef EH_RETURN_HANDLER_RTX
3214 emit_move_insn (EH_RETURN_HANDLER_RTX
, crtl
->eh
.ehr_handler
);
3216 error ("__builtin_eh_return not supported on this target");
3220 emit_label (around_label
);
3223 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3224 POINTERS_EXTEND_UNSIGNED and return it. */
3227 expand_builtin_extend_pointer (tree addr_tree
)
3229 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
3232 #ifdef POINTERS_EXTEND_UNSIGNED
3233 extend
= POINTERS_EXTEND_UNSIGNED
;
3235 /* The previous EH code did an unsigned extend by default, so we do this also
3240 return convert_modes (targetm
.unwind_word_mode (), ptr_mode
, addr
, extend
);
3243 /* In the following functions, we represent entries in the action table
3244 as 1-based indices. Special cases are:
3246 0: null action record, non-null landing pad; implies cleanups
3247 -1: null action record, null landing pad; implies no action
3248 -2: no call-site entry; implies must_not_throw
3249 -3: we have yet to process outer regions
3251 Further, no special cases apply to the "next" field of the record.
3252 For next, 0 means end of list. */
3254 struct action_record
3262 action_record_eq (const void *pentry
, const void *pdata
)
3264 const struct action_record
*entry
= (const struct action_record
*) pentry
;
3265 const struct action_record
*data
= (const struct action_record
*) pdata
;
3266 return entry
->filter
== data
->filter
&& entry
->next
== data
->next
;
3270 action_record_hash (const void *pentry
)
3272 const struct action_record
*entry
= (const struct action_record
*) pentry
;
3273 return entry
->next
* 1009 + entry
->filter
;
3277 add_action_record (htab_t ar_hash
, int filter
, int next
)
3279 struct action_record
**slot
, *new_ar
, tmp
;
3281 tmp
.filter
= filter
;
3283 slot
= (struct action_record
**) htab_find_slot (ar_hash
, &tmp
, INSERT
);
3285 if ((new_ar
= *slot
) == NULL
)
3287 new_ar
= XNEW (struct action_record
);
3288 new_ar
->offset
= VARRAY_ACTIVE_SIZE (crtl
->eh
.action_record_data
) + 1;
3289 new_ar
->filter
= filter
;
3290 new_ar
->next
= next
;
3293 /* The filter value goes in untouched. The link to the next
3294 record is a "self-relative" byte offset, or zero to indicate
3295 that there is no next record. So convert the absolute 1 based
3296 indices we've been carrying around into a displacement. */
3298 push_sleb128 (&crtl
->eh
.action_record_data
, filter
);
3300 next
-= VARRAY_ACTIVE_SIZE (crtl
->eh
.action_record_data
) + 1;
3301 push_sleb128 (&crtl
->eh
.action_record_data
, next
);
3304 return new_ar
->offset
;
3308 collect_one_action_chain (htab_t ar_hash
, struct eh_region
*region
)
3310 struct eh_region
*c
;
3313 /* If we've reached the top of the region chain, then we have
3314 no actions, and require no landing pad. */
3318 switch (region
->type
)
3321 /* A cleanup adds a zero filter to the beginning of the chain, but
3322 there are special cases to look out for. If there are *only*
3323 cleanups along a path, then it compresses to a zero action.
3324 Further, if there are multiple cleanups along a path, we only
3325 need to represent one of them, as that is enough to trigger
3326 entry to the landing pad at runtime. */
3327 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3330 for (c
= region
->outer
; c
; c
= c
->outer
)
3331 if (c
->type
== ERT_CLEANUP
)
3333 return add_action_record (ar_hash
, 0, next
);
3336 /* Process the associated catch regions in reverse order.
3337 If there's a catch-all handler, then we don't need to
3338 search outer regions. Use a magic -3 value to record
3339 that we haven't done the outer search. */
3341 for (c
= region
->u
.eh_try
.last_catch
; c
; c
= c
->u
.eh_catch
.prev_catch
)
3343 if (c
->u
.eh_catch
.type_list
== NULL
)
3345 /* Retrieve the filter from the head of the filter list
3346 where we have stored it (see assign_filter_values). */
3348 = TREE_INT_CST_LOW (TREE_VALUE (c
->u
.eh_catch
.filter_list
));
3350 next
= add_action_record (ar_hash
, filter
, 0);
3354 /* Once the outer search is done, trigger an action record for
3355 each filter we have. */
3360 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3362 /* If there is no next action, terminate the chain. */
3365 /* If all outer actions are cleanups or must_not_throw,
3366 we'll have no action record for it, since we had wanted
3367 to encode these states in the call-site record directly.
3368 Add a cleanup action to the chain to catch these. */
3370 next
= add_action_record (ar_hash
, 0, 0);
3373 flt_node
= c
->u
.eh_catch
.filter_list
;
3374 for (; flt_node
; flt_node
= TREE_CHAIN (flt_node
))
3376 int filter
= TREE_INT_CST_LOW (TREE_VALUE (flt_node
));
3377 next
= add_action_record (ar_hash
, filter
, next
);
3383 case ERT_ALLOWED_EXCEPTIONS
:
3384 /* An exception specification adds its filter to the
3385 beginning of the chain. */
3386 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3388 /* If there is no next action, terminate the chain. */
3391 /* If all outer actions are cleanups or must_not_throw,
3392 we'll have no action record for it, since we had wanted
3393 to encode these states in the call-site record directly.
3394 Add a cleanup action to the chain to catch these. */
3396 next
= add_action_record (ar_hash
, 0, 0);
3398 return add_action_record (ar_hash
, region
->u
.allowed
.filter
, next
);
3400 case ERT_MUST_NOT_THROW
:
3401 /* A must-not-throw region with no inner handlers or cleanups
3402 requires no call-site entry. Note that this differs from
3403 the no handler or cleanup case in that we do require an lsda
3404 to be generated. Return a magic -2 value to record this. */
3409 /* CATCH regions are handled in TRY above. THROW regions are
3410 for optimization information only and produce no output. */
3411 return collect_one_action_chain (ar_hash
, region
->outer
);
3419 add_call_site (rtx landing_pad
, int action
)
3421 call_site_record record
;
3423 record
= GGC_NEW (struct call_site_record
);
3424 record
->landing_pad
= landing_pad
;
3425 record
->action
= action
;
3427 VEC_safe_push (call_site_record
, gc
, crtl
->eh
.call_site_record
, record
);
3429 return call_site_base
+ VEC_length (call_site_record
, crtl
->eh
.call_site_record
) - 1;
3432 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3433 The new note numbers will not refer to region numbers, but
3434 instead to call site entries. */
3437 convert_to_eh_region_ranges (void)
3439 rtx insn
, iter
, note
;
3441 int last_action
= -3;
3442 rtx last_action_insn
= NULL_RTX
;
3443 rtx last_landing_pad
= NULL_RTX
;
3444 rtx first_no_action_insn
= NULL_RTX
;
3447 if (USING_SJLJ_EXCEPTIONS
|| cfun
->eh
->region_tree
== NULL
)
3450 VARRAY_UCHAR_INIT (crtl
->eh
.action_record_data
, 64, "action_record_data");
3452 ar_hash
= htab_create (31, action_record_hash
, action_record_eq
, free
);
3454 for (iter
= get_insns (); iter
; iter
= NEXT_INSN (iter
))
3457 struct eh_region
*region
;
3459 rtx this_landing_pad
;
3462 if (NONJUMP_INSN_P (insn
)
3463 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3464 insn
= XVECEXP (PATTERN (insn
), 0, 0);
3466 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
3469 if (! (CALL_P (insn
)
3470 || (flag_non_call_exceptions
3471 && may_trap_p (PATTERN (insn
)))))
3478 if (INTVAL (XEXP (note
, 0)) <= 0)
3480 region
= VEC_index (eh_region
, cfun
->eh
->region_array
, INTVAL (XEXP (note
, 0)));
3481 this_action
= collect_one_action_chain (ar_hash
, region
);
3484 /* Existence of catch handlers, or must-not-throw regions
3485 implies that an lsda is needed (even if empty). */
3486 if (this_action
!= -1)
3487 crtl
->uses_eh_lsda
= 1;
3489 /* Delay creation of region notes for no-action regions
3490 until we're sure that an lsda will be required. */
3491 else if (last_action
== -3)
3493 first_no_action_insn
= iter
;
3497 /* Cleanups and handlers may share action chains but not
3498 landing pads. Collect the landing pad for this region. */
3499 if (this_action
>= 0)
3501 struct eh_region
*o
;
3502 for (o
= region
; ! o
->landing_pad
; o
= o
->outer
)
3504 this_landing_pad
= o
->landing_pad
;
3507 this_landing_pad
= NULL_RTX
;
3509 /* Differing actions or landing pads implies a change in call-site
3510 info, which implies some EH_REGION note should be emitted. */
3511 if (last_action
!= this_action
3512 || last_landing_pad
!= this_landing_pad
)
3514 /* If we'd not seen a previous action (-3) or the previous
3515 action was must-not-throw (-2), then we do not need an
3517 if (last_action
>= -1)
3519 /* If we delayed the creation of the begin, do it now. */
3520 if (first_no_action_insn
)
3522 call_site
= add_call_site (NULL_RTX
, 0);
3523 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
,
3524 first_no_action_insn
);
3525 NOTE_EH_HANDLER (note
) = call_site
;
3526 first_no_action_insn
= NULL_RTX
;
3529 note
= emit_note_after (NOTE_INSN_EH_REGION_END
,
3531 NOTE_EH_HANDLER (note
) = call_site
;
3534 /* If the new action is must-not-throw, then no region notes
3536 if (this_action
>= -1)
3538 call_site
= add_call_site (this_landing_pad
,
3539 this_action
< 0 ? 0 : this_action
);
3540 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
, iter
);
3541 NOTE_EH_HANDLER (note
) = call_site
;
3544 last_action
= this_action
;
3545 last_landing_pad
= this_landing_pad
;
3547 last_action_insn
= iter
;
3550 if (last_action
>= -1 && ! first_no_action_insn
)
3552 note
= emit_note_after (NOTE_INSN_EH_REGION_END
, last_action_insn
);
3553 NOTE_EH_HANDLER (note
) = call_site
;
3556 htab_delete (ar_hash
);
3560 struct rtl_opt_pass pass_convert_to_eh_region_ranges
=
3564 "eh_ranges", /* name */
3566 convert_to_eh_region_ranges
, /* execute */
3569 0, /* static_pass_number */
3571 0, /* properties_required */
3572 0, /* properties_provided */
3573 0, /* properties_destroyed */
3574 0, /* todo_flags_start */
3575 TODO_dump_func
, /* todo_flags_finish */
3581 push_uleb128 (varray_type
*data_area
, unsigned int value
)
3585 unsigned char byte
= value
& 0x7f;
3589 VARRAY_PUSH_UCHAR (*data_area
, byte
);
3595 push_sleb128 (varray_type
*data_area
, int value
)
3602 byte
= value
& 0x7f;
3604 more
= ! ((value
== 0 && (byte
& 0x40) == 0)
3605 || (value
== -1 && (byte
& 0x40) != 0));
3608 VARRAY_PUSH_UCHAR (*data_area
, byte
);
3614 #ifndef HAVE_AS_LEB128
3616 dw2_size_of_call_site_table (void)
3618 int n
= VEC_length (call_site_record
, crtl
->eh
.call_site_record
);
3619 int size
= n
* (4 + 4 + 4);
3622 for (i
= 0; i
< n
; ++i
)
3624 struct call_site_record
*cs
= VEC_index (call_site_record
, crtl
->eh
.call_site_record
, i
);
3625 size
+= size_of_uleb128 (cs
->action
);
3632 sjlj_size_of_call_site_table (void)
3634 int n
= VEC_length (call_site_record
, crtl
->eh
.call_site_record
);
3638 for (i
= 0; i
< n
; ++i
)
3640 struct call_site_record
*cs
= VEC_index (call_site_record
, crtl
->eh
.call_site_record
, i
);
3641 size
+= size_of_uleb128 (INTVAL (cs
->landing_pad
));
3642 size
+= size_of_uleb128 (cs
->action
);
3650 dw2_output_call_site_table (void)
3652 int n
= VEC_length (call_site_record
, crtl
->eh
.call_site_record
);
3655 for (i
= 0; i
< n
; ++i
)
3657 struct call_site_record
*cs
= VEC_index (call_site_record
, crtl
->eh
.call_site_record
, i
);
3658 char reg_start_lab
[32];
3659 char reg_end_lab
[32];
3660 char landing_pad_lab
[32];
3662 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab
, "LEHB", call_site_base
+ i
);
3663 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab
, "LEHE", call_site_base
+ i
);
3665 if (cs
->landing_pad
)
3666 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab
, "L",
3667 CODE_LABEL_NUMBER (cs
->landing_pad
));
3669 /* ??? Perhaps use insn length scaling if the assembler supports
3670 generic arithmetic. */
3671 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3672 data4 if the function is small enough. */
3673 #ifdef HAVE_AS_LEB128
3674 dw2_asm_output_delta_uleb128 (reg_start_lab
,
3675 current_function_func_begin_label
,
3676 "region %d start", i
);
3677 dw2_asm_output_delta_uleb128 (reg_end_lab
, reg_start_lab
,
3679 if (cs
->landing_pad
)
3680 dw2_asm_output_delta_uleb128 (landing_pad_lab
,
3681 current_function_func_begin_label
,
3684 dw2_asm_output_data_uleb128 (0, "landing pad");
3686 dw2_asm_output_delta (4, reg_start_lab
,
3687 current_function_func_begin_label
,
3688 "region %d start", i
);
3689 dw2_asm_output_delta (4, reg_end_lab
, reg_start_lab
, "length");
3690 if (cs
->landing_pad
)
3691 dw2_asm_output_delta (4, landing_pad_lab
,
3692 current_function_func_begin_label
,
3695 dw2_asm_output_data (4, 0, "landing pad");
3697 dw2_asm_output_data_uleb128 (cs
->action
, "action");
3700 call_site_base
+= n
;
3704 sjlj_output_call_site_table (void)
3706 int n
= VEC_length (call_site_record
, crtl
->eh
.call_site_record
);
3709 for (i
= 0; i
< n
; ++i
)
3711 struct call_site_record
*cs
= VEC_index (call_site_record
, crtl
->eh
.call_site_record
, i
);
3713 dw2_asm_output_data_uleb128 (INTVAL (cs
->landing_pad
),
3714 "region %d landing pad", i
);
3715 dw2_asm_output_data_uleb128 (cs
->action
, "action");
3718 call_site_base
+= n
;
3721 #ifndef TARGET_UNWIND_INFO
3722 /* Switch to the section that should be used for exception tables. */
3725 switch_to_exception_section (const char * ARG_UNUSED (fnname
))
3729 if (exception_section
)
3730 s
= exception_section
;
3733 /* Compute the section and cache it into exception_section,
3734 unless it depends on the function name. */
3735 if (targetm
.have_named_sections
)
3739 if (EH_TABLES_CAN_BE_READ_ONLY
)
3742 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3743 flags
= ((! flag_pic
3744 || ((tt_format
& 0x70) != DW_EH_PE_absptr
3745 && (tt_format
& 0x70) != DW_EH_PE_aligned
))
3746 ? 0 : SECTION_WRITE
);
3749 flags
= SECTION_WRITE
;
3751 #ifdef HAVE_LD_EH_GC_SECTIONS
3752 if (flag_function_sections
)
3754 char *section_name
= XNEWVEC (char, strlen (fnname
) + 32);
3755 sprintf (section_name
, ".gcc_except_table.%s", fnname
);
3756 s
= get_section (section_name
, flags
, NULL
);
3757 free (section_name
);
3762 = s
= get_section (".gcc_except_table", flags
, NULL
);
3766 = s
= flag_pic
? data_section
: readonly_data_section
;
3769 switch_to_section (s
);
3774 /* Output a reference from an exception table to the type_info object TYPE.
3775 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
3779 output_ttype (tree type
, int tt_format
, int tt_format_size
)
3782 bool is_public
= true;
3784 if (type
== NULL_TREE
)
3788 struct varpool_node
*node
;
3790 type
= lookup_type_for_runtime (type
);
3791 value
= expand_expr (type
, NULL_RTX
, VOIDmode
, EXPAND_INITIALIZER
);
3793 /* Let cgraph know that the rtti decl is used. Not all of the
3794 paths below go through assemble_integer, which would take
3795 care of this for us. */
3797 if (TREE_CODE (type
) == ADDR_EXPR
)
3799 type
= TREE_OPERAND (type
, 0);
3800 if (TREE_CODE (type
) == VAR_DECL
)
3802 node
= varpool_node (type
);
3804 varpool_mark_needed_node (node
);
3805 is_public
= TREE_PUBLIC (type
);
3809 gcc_assert (TREE_CODE (type
) == INTEGER_CST
);
3812 /* Allow the target to override the type table entry format. */
3813 if (targetm
.asm_out
.ttype (value
))
3816 if (tt_format
== DW_EH_PE_absptr
|| tt_format
== DW_EH_PE_aligned
)
3817 assemble_integer (value
, tt_format_size
,
3818 tt_format_size
* BITS_PER_UNIT
, 1);
3820 dw2_asm_output_encoded_addr_rtx (tt_format
, value
, is_public
, NULL
);
3824 output_function_exception_table (const char * ARG_UNUSED (fnname
))
3826 int tt_format
, cs_format
, lp_format
, i
, n
;
3827 #ifdef HAVE_AS_LEB128
3828 char ttype_label
[32];
3829 char cs_after_size_label
[32];
3830 char cs_end_label
[32];
3835 int tt_format_size
= 0;
3837 /* Not all functions need anything. */
3838 if (! crtl
->uses_eh_lsda
)
3841 if (eh_personality_libfunc
)
3842 assemble_external_libcall (eh_personality_libfunc
);
3844 #ifdef TARGET_UNWIND_INFO
3845 /* TODO: Move this into target file. */
3846 fputs ("\t.personality\t", asm_out_file
);
3847 output_addr_const (asm_out_file
, eh_personality_libfunc
);
3848 fputs ("\n\t.handlerdata\n", asm_out_file
);
3849 /* Note that varasm still thinks we're in the function's code section.
3850 The ".endp" directive that will immediately follow will take us back. */
3852 switch_to_exception_section (fnname
);
3855 /* If the target wants a label to begin the table, emit it here. */
3856 targetm
.asm_out
.except_table_label (asm_out_file
);
3858 have_tt_data
= (VEC_length (tree
, crtl
->eh
.ttype_data
) > 0
3859 || VARRAY_ACTIVE_SIZE (crtl
->eh
.ehspec_data
) > 0);
3861 /* Indicate the format of the @TType entries. */
3863 tt_format
= DW_EH_PE_omit
;
3866 tt_format
= ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3867 #ifdef HAVE_AS_LEB128
3868 ASM_GENERATE_INTERNAL_LABEL (ttype_label
, "LLSDATT",
3869 current_function_funcdef_no
);
3871 tt_format_size
= size_of_encoded_value (tt_format
);
3873 assemble_align (tt_format_size
* BITS_PER_UNIT
);
3876 targetm
.asm_out
.internal_label (asm_out_file
, "LLSDA",
3877 current_function_funcdef_no
);
3879 /* The LSDA header. */
3881 /* Indicate the format of the landing pad start pointer. An omitted
3882 field implies @LPStart == @Start. */
3883 /* Currently we always put @LPStart == @Start. This field would
3884 be most useful in moving the landing pads completely out of
3885 line to another section, but it could also be used to minimize
3886 the size of uleb128 landing pad offsets. */
3887 lp_format
= DW_EH_PE_omit
;
3888 dw2_asm_output_data (1, lp_format
, "@LPStart format (%s)",
3889 eh_data_format_name (lp_format
));
3891 /* @LPStart pointer would go here. */
3893 dw2_asm_output_data (1, tt_format
, "@TType format (%s)",
3894 eh_data_format_name (tt_format
));
3896 #ifndef HAVE_AS_LEB128
3897 if (USING_SJLJ_EXCEPTIONS
)
3898 call_site_len
= sjlj_size_of_call_site_table ();
3900 call_site_len
= dw2_size_of_call_site_table ();
3903 /* A pc-relative 4-byte displacement to the @TType data. */
3906 #ifdef HAVE_AS_LEB128
3907 char ttype_after_disp_label
[32];
3908 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label
, "LLSDATTD",
3909 current_function_funcdef_no
);
3910 dw2_asm_output_delta_uleb128 (ttype_label
, ttype_after_disp_label
,
3911 "@TType base offset");
3912 ASM_OUTPUT_LABEL (asm_out_file
, ttype_after_disp_label
);
3914 /* Ug. Alignment queers things. */
3915 unsigned int before_disp
, after_disp
, last_disp
, disp
;
3917 before_disp
= 1 + 1;
3918 after_disp
= (1 + size_of_uleb128 (call_site_len
)
3920 + VARRAY_ACTIVE_SIZE (crtl
->eh
.action_record_data
)
3921 + (VEC_length (tree
, crtl
->eh
.ttype_data
)
3927 unsigned int disp_size
, pad
;
3930 disp_size
= size_of_uleb128 (disp
);
3931 pad
= before_disp
+ disp_size
+ after_disp
;
3932 if (pad
% tt_format_size
)
3933 pad
= tt_format_size
- (pad
% tt_format_size
);
3936 disp
= after_disp
+ pad
;
3938 while (disp
!= last_disp
);
3940 dw2_asm_output_data_uleb128 (disp
, "@TType base offset");
3944 /* Indicate the format of the call-site offsets. */
3945 #ifdef HAVE_AS_LEB128
3946 cs_format
= DW_EH_PE_uleb128
;
3948 cs_format
= DW_EH_PE_udata4
;
3950 dw2_asm_output_data (1, cs_format
, "call-site format (%s)",
3951 eh_data_format_name (cs_format
));
3953 #ifdef HAVE_AS_LEB128
3954 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label
, "LLSDACSB",
3955 current_function_funcdef_no
);
3956 ASM_GENERATE_INTERNAL_LABEL (cs_end_label
, "LLSDACSE",
3957 current_function_funcdef_no
);
3958 dw2_asm_output_delta_uleb128 (cs_end_label
, cs_after_size_label
,
3959 "Call-site table length");
3960 ASM_OUTPUT_LABEL (asm_out_file
, cs_after_size_label
);
3961 if (USING_SJLJ_EXCEPTIONS
)
3962 sjlj_output_call_site_table ();
3964 dw2_output_call_site_table ();
3965 ASM_OUTPUT_LABEL (asm_out_file
, cs_end_label
);
3967 dw2_asm_output_data_uleb128 (call_site_len
,"Call-site table length");
3968 if (USING_SJLJ_EXCEPTIONS
)
3969 sjlj_output_call_site_table ();
3971 dw2_output_call_site_table ();
3974 /* ??? Decode and interpret the data for flag_debug_asm. */
3975 n
= VARRAY_ACTIVE_SIZE (crtl
->eh
.action_record_data
);
3976 for (i
= 0; i
< n
; ++i
)
3977 dw2_asm_output_data (1, VARRAY_UCHAR (crtl
->eh
.action_record_data
, i
),
3978 (i
? NULL
: "Action record table"));
3981 assemble_align (tt_format_size
* BITS_PER_UNIT
);
3983 i
= VEC_length (tree
, crtl
->eh
.ttype_data
);
3986 tree type
= VEC_index (tree
, crtl
->eh
.ttype_data
, i
);
3987 output_ttype (type
, tt_format
, tt_format_size
);
3990 #ifdef HAVE_AS_LEB128
3992 ASM_OUTPUT_LABEL (asm_out_file
, ttype_label
);
3995 /* ??? Decode and interpret the data for flag_debug_asm. */
3996 n
= VARRAY_ACTIVE_SIZE (crtl
->eh
.ehspec_data
);
3997 for (i
= 0; i
< n
; ++i
)
3999 if (targetm
.arm_eabi_unwinder
)
4001 tree type
= VARRAY_TREE (crtl
->eh
.ehspec_data
, i
);
4002 output_ttype (type
, tt_format
, tt_format_size
);
4005 dw2_asm_output_data (1, VARRAY_UCHAR (crtl
->eh
.ehspec_data
, i
),
4006 (i
? NULL
: "Exception specification table"));
4009 switch_to_section (current_function_section ());
4013 set_eh_throw_stmt_table (struct function
*fun
, struct htab
*table
)
4015 fun
->eh
->throw_stmt_table
= table
;
4019 get_eh_throw_stmt_table (struct function
*fun
)
4021 return fun
->eh
->throw_stmt_table
;
4024 /* Dump EH information to OUT. */
4027 dump_eh_tree (FILE * out
, struct function
*fun
)
4029 struct eh_region
*i
;
4031 static const char *const type_name
[] = { "unknown", "cleanup", "try", "catch",
4032 "allowed_exceptions", "must_not_throw",
4036 i
= fun
->eh
->region_tree
;
4040 fprintf (out
, "Eh tree:\n");
4043 fprintf (out
, " %*s %i %s", depth
* 2, "",
4044 i
->region_number
, type_name
[(int) i
->type
]);
4047 fprintf (out
, " tree_label:");
4048 print_generic_expr (out
, i
->tree_label
, 0);
4053 if (i
->u
.cleanup
.prev_try
)
4054 fprintf (out
, " prev try:%i",
4055 i
->u
.cleanup
.prev_try
->region_number
);
4060 struct eh_region
*c
;
4061 fprintf (out
, " catch regions:");
4062 for (c
= i
->u
.eh_try
.eh_catch
; c
; c
= c
->u
.eh_catch
.next_catch
)
4063 fprintf (out
, " %i", c
->region_number
);
4068 if (i
->u
.eh_catch
.prev_catch
)
4069 fprintf (out
, " prev: %i",
4070 i
->u
.eh_catch
.prev_catch
->region_number
);
4071 if (i
->u
.eh_catch
.next_catch
)
4072 fprintf (out
, " next %i",
4073 i
->u
.eh_catch
.next_catch
->region_number
);
4076 case ERT_ALLOWED_EXCEPTIONS
:
4077 fprintf (out
, "filter :%i types:", i
->u
.allowed
.filter
);
4078 print_generic_expr (out
, i
->u
.allowed
.type_list
, 0);
4082 fprintf (out
, "type:");
4083 print_generic_expr (out
, i
->u
.eh_throw
.type
, 0);
4086 case ERT_MUST_NOT_THROW
:
4094 fprintf (out
, " also known as:");
4095 dump_bitmap (out
, i
->aka
);
4098 fprintf (out
, "\n");
4099 /* If there are sub-regions, process them. */
4101 i
= i
->inner
, depth
++;
4102 /* If there are peers, process them. */
4103 else if (i
->next_peer
)
4105 /* Otherwise, step back up the tree to the next peer. */
4115 while (i
->next_peer
== NULL
);
4121 /* Verify some basic invariants on EH datastructures. Could be extended to
4124 verify_eh_tree (struct function
*fun
)
4126 struct eh_region
*i
, *outer
= NULL
;
4133 if (!fun
->eh
->region_tree
)
4135 for (j
= fun
->eh
->last_region_number
; j
> 0; --j
)
4136 if ((i
= VEC_index (eh_region
, fun
->eh
->region_array
, j
)))
4138 if (i
->region_number
== j
)
4140 if (i
->region_number
!= j
&& (!i
->aka
|| !bitmap_bit_p (i
->aka
, j
)))
4142 error ("region_array is corrupted for region %i",
4147 i
= fun
->eh
->region_tree
;
4151 if (VEC_index (eh_region
, fun
->eh
->region_array
, i
->region_number
) != i
)
4153 error ("region_array is corrupted for region %i", i
->region_number
);
4156 if (i
->outer
!= outer
)
4158 error ("outer block of region %i is wrong", i
->region_number
);
4161 if (i
->may_contain_throw
&& outer
&& !outer
->may_contain_throw
)
4164 ("region %i may contain throw and is contained in region that may not",
4170 error ("negative nesting depth of region %i", i
->region_number
);
4174 /* If there are sub-regions, process them. */
4176 outer
= i
, i
= i
->inner
, depth
++;
4177 /* If there are peers, process them. */
4178 else if (i
->next_peer
)
4180 /* Otherwise, step back up the tree to the next peer. */
4191 error ("tree list ends on depth %i", depth
+ 1);
4194 if (count
!= nvisited
)
4196 error ("array does not match the region tree");
4201 dump_eh_tree (stderr
, fun
);
4202 internal_error ("verify_eh_tree failed");
4208 while (i
->next_peer
== NULL
);
4214 /* Initialize unwind_resume_libfunc. */
4217 default_init_unwind_resume_libfunc (void)
4219 /* The default c++ routines aren't actually c++ specific, so use those. */
4220 unwind_resume_libfunc
=
4221 init_one_libfunc ( USING_SJLJ_EXCEPTIONS
? "_Unwind_SjLj_Resume"
4222 : "_Unwind_Resume");
4227 gate_handle_eh (void)
4229 return doing_eh (0);
4232 /* Complete generation of exception handling code. */
4234 rest_of_handle_eh (void)
4236 cleanup_cfg (CLEANUP_NO_INSN_DEL
);
4237 finish_eh_generation ();
4238 cleanup_cfg (CLEANUP_NO_INSN_DEL
);
4242 struct rtl_opt_pass pass_rtl_eh
=
4247 gate_handle_eh
, /* gate */
4248 rest_of_handle_eh
, /* execute */
4251 0, /* static_pass_number */
4252 TV_JUMP
, /* tv_id */
4253 0, /* properties_required */
4254 0, /* properties_provided */
4255 0, /* properties_destroyed */
4256 0, /* todo_flags_start */
4257 TODO_dump_func
/* todo_flags_finish */
4261 #include "gt-except.h"