1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
52 #include "coretypes.h"
60 #include "insn-config.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
75 #include "langhooks.h"
78 /* Provide defaults for stuff that may not be defined when using
80 #ifndef EH_RETURN_DATA_REGNO
81 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
85 /* Nonzero means enable synchronous exceptions for non-call instructions. */
86 int flag_non_call_exceptions
;
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 tree (*lang_protect_cleanup_actions
) (void);
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers
) (tree a
, tree b
);
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type
) (tree
);
98 /* A hash table of label to region number. */
100 struct ehl_map_entry
GTY(())
103 struct eh_region
*region
;
106 static GTY(()) int call_site_base
;
107 static GTY ((param_is (union tree_node
)))
108 htab_t type_to_runtime_map
;
110 /* Describe the SjLj_Function_Context structure. */
111 static GTY(()) tree sjlj_fc_type_node
;
112 static int sjlj_fc_call_site_ofs
;
113 static int sjlj_fc_data_ofs
;
114 static int sjlj_fc_personality_ofs
;
115 static int sjlj_fc_lsda_ofs
;
116 static int sjlj_fc_jbuf_ofs
;
118 /* Describes one exception region. */
119 struct eh_region
GTY(())
121 /* The immediately surrounding region. */
122 struct eh_region
*outer
;
124 /* The list of immediately contained regions. */
125 struct eh_region
*inner
;
126 struct eh_region
*next_peer
;
128 /* An identifier for this region. */
131 /* When a region is deleted, its parents inherit the REG_EH_REGION
132 numbers already assigned. */
135 /* Each region does exactly one thing. */
142 ERT_ALLOWED_EXCEPTIONS
,
148 /* Holds the action to perform based on the preceding type. */
150 /* A list of catch blocks, a surrounding try block,
151 and the label for continuing after a catch. */
152 struct eh_region_u_try
{
153 struct eh_region
*catch;
154 struct eh_region
*last_catch
;
155 struct eh_region
*prev_try
;
157 } GTY ((tag ("ERT_TRY"))) try;
159 /* The list through the catch handlers, the list of type objects
160 matched, and the list of associated filters. */
161 struct eh_region_u_catch
{
162 struct eh_region
*next_catch
;
163 struct eh_region
*prev_catch
;
166 } GTY ((tag ("ERT_CATCH"))) catch;
168 /* A tree_list of allowed types. */
169 struct eh_region_u_allowed
{
172 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed
;
174 /* The type given by a call to "throw foo();", or discovered
176 struct eh_region_u_throw
{
178 } GTY ((tag ("ERT_THROW"))) throw;
180 /* Retain the cleanup expression even after expansion so that
181 we can match up fixup regions. */
182 struct eh_region_u_cleanup
{
184 struct eh_region
*prev_try
;
185 } GTY ((tag ("ERT_CLEANUP"))) cleanup
;
187 /* The real region (by expression and by pointer) that fixup code
189 struct eh_region_u_fixup
{
191 struct eh_region
*real_region
;
193 } GTY ((tag ("ERT_FIXUP"))) fixup
;
194 } GTY ((desc ("%0.type"))) u
;
196 /* Entry point for this region's handler before landing pads are built. */
199 /* Entry point for this region's handler from the runtime eh library. */
202 /* Entry point for this region's handler from an inner region. */
203 rtx post_landing_pad
;
205 /* The RESX insn for handing off control to the next outermost handler,
209 /* True if something in this region may throw. */
210 unsigned may_contain_throw
: 1;
213 struct call_site_record
GTY(())
219 /* Used to save exception status for each function. */
220 struct eh_status
GTY(())
222 /* The tree of all regions for this function. */
223 struct eh_region
*region_tree
;
225 /* The same information as an indexable array. */
226 struct eh_region
** GTY ((length ("%h.last_region_number"))) region_array
;
228 /* The most recently open region. */
229 struct eh_region
*cur_region
;
231 /* This is the region for which we are processing catch blocks. */
232 struct eh_region
*try_region
;
237 int built_landing_pads
;
238 int last_region_number
;
240 varray_type ttype_data
;
241 varray_type ehspec_data
;
242 varray_type action_record_data
;
244 htab_t
GTY ((param_is (struct ehl_map_entry
))) exception_handler_label_map
;
246 struct call_site_record
* GTY ((length ("%h.call_site_data_used")))
248 int call_site_data_used
;
249 int call_site_data_size
;
260 static int t2r_eq (const void *, const void *);
261 static hashval_t
t2r_hash (const void *);
262 static void add_type_for_runtime (tree
);
263 static tree
lookup_type_for_runtime (tree
);
265 static struct eh_region
*expand_eh_region_end (void);
267 static rtx
get_exception_filter (struct function
*);
269 static void collect_eh_region_array (void);
270 static void resolve_fixup_regions (void);
271 static void remove_fixup_regions (void);
272 static void remove_unreachable_regions (rtx
);
273 static void convert_from_eh_region_ranges_1 (rtx
*, int *, int);
275 static struct eh_region
*duplicate_eh_region_1 (struct eh_region
*,
276 struct inline_remap
*);
277 static void duplicate_eh_region_2 (struct eh_region
*, struct eh_region
**);
278 static int ttypes_filter_eq (const void *, const void *);
279 static hashval_t
ttypes_filter_hash (const void *);
280 static int ehspec_filter_eq (const void *, const void *);
281 static hashval_t
ehspec_filter_hash (const void *);
282 static int add_ttypes_entry (htab_t
, tree
);
283 static int add_ehspec_entry (htab_t
, htab_t
, tree
);
284 static void assign_filter_values (void);
285 static void build_post_landing_pads (void);
286 static void connect_post_landing_pads (void);
287 static void dw2_build_landing_pads (void);
290 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info
*);
291 static void sjlj_assign_call_site_values (rtx
, struct sjlj_lp_info
*);
292 static void sjlj_mark_call_sites (struct sjlj_lp_info
*);
293 static void sjlj_emit_function_enter (rtx
);
294 static void sjlj_emit_function_exit (void);
295 static void sjlj_emit_dispatch_table (rtx
, struct sjlj_lp_info
*);
296 static void sjlj_build_landing_pads (void);
298 static hashval_t
ehl_hash (const void *);
299 static int ehl_eq (const void *, const void *);
300 static void add_ehl_entry (rtx
, struct eh_region
*);
301 static void remove_exception_handler_label (rtx
);
302 static void remove_eh_handler (struct eh_region
*);
303 static int for_each_eh_label_1 (void **, void *);
305 struct reachable_info
;
307 /* The return value of reachable_next_level. */
310 /* The given exception is not processed by the given region. */
312 /* The given exception may need processing by the given region. */
314 /* The given exception is completely processed by the given region. */
316 /* The given exception is completely processed by the runtime. */
320 static int check_handled (tree
, tree
);
321 static void add_reachable_handler (struct reachable_info
*,
322 struct eh_region
*, struct eh_region
*);
323 static enum reachable_code
reachable_next_level (struct eh_region
*, tree
,
324 struct reachable_info
*);
326 static int action_record_eq (const void *, const void *);
327 static hashval_t
action_record_hash (const void *);
328 static int add_action_record (htab_t
, int, int);
329 static int collect_one_action_chain (htab_t
, struct eh_region
*);
330 static int add_call_site (rtx
, int);
332 static void push_uleb128 (varray_type
*, unsigned int);
333 static void push_sleb128 (varray_type
*, int);
334 #ifndef HAVE_AS_LEB128
335 static int dw2_size_of_call_site_table (void);
336 static int sjlj_size_of_call_site_table (void);
338 static void dw2_output_call_site_table (void);
339 static void sjlj_output_call_site_table (void);
342 /* Routine to see if exception handling is turned on.
343 DO_WARN is nonzero if we want to inform the user that exception
344 handling is turned off.
346 This is used to ensure that -fexceptions has been specified if the
347 compiler tries to use any exception-specific functions. */
350 doing_eh (int do_warn
)
352 if (! flag_exceptions
)
354 static int warned
= 0;
355 if (! warned
&& do_warn
)
357 error ("exception handling disabled, use -fexceptions to enable");
369 if (! flag_exceptions
)
372 type_to_runtime_map
= htab_create_ggc (31, t2r_hash
, t2r_eq
, NULL
);
374 /* Create the SjLj_Function_Context structure. This should match
375 the definition in unwind-sjlj.c. */
376 if (USING_SJLJ_EXCEPTIONS
)
378 tree f_jbuf
, f_per
, f_lsda
, f_prev
, f_cs
, f_data
, tmp
;
380 sjlj_fc_type_node
= lang_hooks
.types
.make_type (RECORD_TYPE
);
382 f_prev
= build_decl (FIELD_DECL
, get_identifier ("__prev"),
383 build_pointer_type (sjlj_fc_type_node
));
384 DECL_FIELD_CONTEXT (f_prev
) = sjlj_fc_type_node
;
386 f_cs
= build_decl (FIELD_DECL
, get_identifier ("__call_site"),
388 DECL_FIELD_CONTEXT (f_cs
) = sjlj_fc_type_node
;
390 tmp
= build_index_type (build_int_2 (4 - 1, 0));
391 tmp
= build_array_type (lang_hooks
.types
.type_for_mode (word_mode
, 1),
393 f_data
= build_decl (FIELD_DECL
, get_identifier ("__data"), tmp
);
394 DECL_FIELD_CONTEXT (f_data
) = sjlj_fc_type_node
;
396 f_per
= build_decl (FIELD_DECL
, get_identifier ("__personality"),
398 DECL_FIELD_CONTEXT (f_per
) = sjlj_fc_type_node
;
400 f_lsda
= build_decl (FIELD_DECL
, get_identifier ("__lsda"),
402 DECL_FIELD_CONTEXT (f_lsda
) = sjlj_fc_type_node
;
404 #ifdef DONT_USE_BUILTIN_SETJMP
406 tmp
= build_int_2 (JMP_BUF_SIZE
- 1, 0);
408 /* Should be large enough for most systems, if it is not,
409 JMP_BUF_SIZE should be defined with the proper value. It will
410 also tend to be larger than necessary for most systems, a more
411 optimal port will define JMP_BUF_SIZE. */
412 tmp
= build_int_2 (FIRST_PSEUDO_REGISTER
+ 2 - 1, 0);
415 /* builtin_setjmp takes a pointer to 5 words. */
416 tmp
= build_int_2 (5 * BITS_PER_WORD
/ POINTER_SIZE
- 1, 0);
418 tmp
= build_index_type (tmp
);
419 tmp
= build_array_type (ptr_type_node
, tmp
);
420 f_jbuf
= build_decl (FIELD_DECL
, get_identifier ("__jbuf"), tmp
);
421 #ifdef DONT_USE_BUILTIN_SETJMP
422 /* We don't know what the alignment requirements of the
423 runtime's jmp_buf has. Overestimate. */
424 DECL_ALIGN (f_jbuf
) = BIGGEST_ALIGNMENT
;
425 DECL_USER_ALIGN (f_jbuf
) = 1;
427 DECL_FIELD_CONTEXT (f_jbuf
) = sjlj_fc_type_node
;
429 TYPE_FIELDS (sjlj_fc_type_node
) = f_prev
;
430 TREE_CHAIN (f_prev
) = f_cs
;
431 TREE_CHAIN (f_cs
) = f_data
;
432 TREE_CHAIN (f_data
) = f_per
;
433 TREE_CHAIN (f_per
) = f_lsda
;
434 TREE_CHAIN (f_lsda
) = f_jbuf
;
436 layout_type (sjlj_fc_type_node
);
438 /* Cache the interesting field offsets so that we have
439 easy access from rtl. */
440 sjlj_fc_call_site_ofs
441 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs
), 1)
442 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs
), 1) / BITS_PER_UNIT
);
444 = (tree_low_cst (DECL_FIELD_OFFSET (f_data
), 1)
445 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data
), 1) / BITS_PER_UNIT
);
446 sjlj_fc_personality_ofs
447 = (tree_low_cst (DECL_FIELD_OFFSET (f_per
), 1)
448 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per
), 1) / BITS_PER_UNIT
);
450 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda
), 1)
451 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda
), 1) / BITS_PER_UNIT
);
453 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf
), 1)
454 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf
), 1) / BITS_PER_UNIT
);
459 init_eh_for_function (void)
461 cfun
->eh
= ggc_alloc_cleared (sizeof (struct eh_status
));
464 /* Start an exception handling region. All instructions emitted
465 after this point are considered to be part of the region until
466 expand_eh_region_end is invoked. */
469 expand_eh_region_start (void)
471 struct eh_region
*new_region
;
472 struct eh_region
*cur_region
;
478 /* Insert a new blank region as a leaf in the tree. */
479 new_region
= ggc_alloc_cleared (sizeof (*new_region
));
480 cur_region
= cfun
->eh
->cur_region
;
481 new_region
->outer
= cur_region
;
484 new_region
->next_peer
= cur_region
->inner
;
485 cur_region
->inner
= new_region
;
489 new_region
->next_peer
= cfun
->eh
->region_tree
;
490 cfun
->eh
->region_tree
= new_region
;
492 cfun
->eh
->cur_region
= new_region
;
494 /* Create a note marking the start of this region. */
495 new_region
->region_number
= ++cfun
->eh
->last_region_number
;
496 note
= emit_note (NOTE_INSN_EH_REGION_BEG
);
497 NOTE_EH_HANDLER (note
) = new_region
->region_number
;
500 /* Common code to end a region. Returns the region just ended. */
502 static struct eh_region
*
503 expand_eh_region_end (void)
505 struct eh_region
*cur_region
= cfun
->eh
->cur_region
;
508 /* Create a note marking the end of this region. */
509 note
= emit_note (NOTE_INSN_EH_REGION_END
);
510 NOTE_EH_HANDLER (note
) = cur_region
->region_number
;
513 cfun
->eh
->cur_region
= cur_region
->outer
;
518 /* End an exception handling region for a cleanup. HANDLER is an
519 expression to expand for the cleanup. */
522 expand_eh_region_end_cleanup (tree handler
)
524 struct eh_region
*region
;
525 tree protect_cleanup_actions
;
532 region
= expand_eh_region_end ();
533 region
->type
= ERT_CLEANUP
;
534 region
->label
= gen_label_rtx ();
535 region
->u
.cleanup
.exp
= handler
;
536 region
->u
.cleanup
.prev_try
= cfun
->eh
->try_region
;
538 around_label
= gen_label_rtx ();
539 emit_jump (around_label
);
541 emit_label (region
->label
);
543 if (flag_non_call_exceptions
|| region
->may_contain_throw
)
545 /* Give the language a chance to specify an action to be taken if an
546 exception is thrown that would propagate out of the HANDLER. */
547 protect_cleanup_actions
548 = (lang_protect_cleanup_actions
549 ? (*lang_protect_cleanup_actions
) ()
552 if (protect_cleanup_actions
)
553 expand_eh_region_start ();
555 /* In case this cleanup involves an inline destructor with a try block in
556 it, we need to save the EH return data registers around it. */
557 data_save
[0] = gen_reg_rtx (ptr_mode
);
558 emit_move_insn (data_save
[0], get_exception_pointer (cfun
));
559 data_save
[1] = gen_reg_rtx (word_mode
);
560 emit_move_insn (data_save
[1], get_exception_filter (cfun
));
562 expand_expr (handler
, const0_rtx
, VOIDmode
, 0);
564 emit_move_insn (cfun
->eh
->exc_ptr
, data_save
[0]);
565 emit_move_insn (cfun
->eh
->filter
, data_save
[1]);
567 if (protect_cleanup_actions
)
568 expand_eh_region_end_must_not_throw (protect_cleanup_actions
);
570 /* We need any stack adjustment complete before the around_label. */
571 do_pending_stack_adjust ();
574 /* We delay the generation of the _Unwind_Resume until we generate
575 landing pads. We emit a marker here so as to get good control
576 flow data in the meantime. */
578 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
581 emit_label (around_label
);
584 /* End an exception handling region for a try block, and prepares
585 for subsequent calls to expand_start_catch. */
588 expand_start_all_catch (void)
590 struct eh_region
*region
;
595 region
= expand_eh_region_end ();
596 region
->type
= ERT_TRY
;
597 region
->u
.try.prev_try
= cfun
->eh
->try_region
;
598 region
->u
.try.continue_label
= gen_label_rtx ();
600 cfun
->eh
->try_region
= region
;
602 emit_jump (region
->u
.try.continue_label
);
605 /* Begin a catch clause. TYPE is the type caught, a list of such
606 types, (in the case of Java) an ADDR_EXPR which points to the
607 runtime type to match, or null if this is a catch-all
608 clause. Providing a type list enables to associate the catch region
609 with potentially several exception types, which is useful e.g. for
613 expand_start_catch (tree type_or_list
)
615 struct eh_region
*t
, *c
, *l
;
621 type_list
= type_or_list
;
625 /* Ensure to always end up with a type list to normalize further
626 processing, then register each type against the runtime types
630 if (TREE_CODE (type_or_list
) != TREE_LIST
)
631 type_list
= tree_cons (NULL_TREE
, type_or_list
, NULL_TREE
);
633 type_node
= type_list
;
634 for (; type_node
; type_node
= TREE_CHAIN (type_node
))
635 add_type_for_runtime (TREE_VALUE (type_node
));
638 expand_eh_region_start ();
640 t
= cfun
->eh
->try_region
;
641 c
= cfun
->eh
->cur_region
;
643 c
->u
.catch.type_list
= type_list
;
644 c
->label
= gen_label_rtx ();
646 l
= t
->u
.try.last_catch
;
647 c
->u
.catch.prev_catch
= l
;
649 l
->u
.catch.next_catch
= c
;
652 t
->u
.try.last_catch
= c
;
654 emit_label (c
->label
);
657 /* End a catch clause. Control will resume after the try/catch block. */
660 expand_end_catch (void)
662 struct eh_region
*try_region
;
667 expand_eh_region_end ();
668 try_region
= cfun
->eh
->try_region
;
670 emit_jump (try_region
->u
.try.continue_label
);
673 /* End a sequence of catch handlers for a try block. */
676 expand_end_all_catch (void)
678 struct eh_region
*try_region
;
683 try_region
= cfun
->eh
->try_region
;
684 cfun
->eh
->try_region
= try_region
->u
.try.prev_try
;
686 emit_label (try_region
->u
.try.continue_label
);
689 /* End an exception region for an exception type filter. ALLOWED is a
690 TREE_LIST of types to be matched by the runtime. FAILURE is an
691 expression to invoke if a mismatch occurs.
693 ??? We could use these semantics for calls to rethrow, too; if we can
694 see the surrounding catch clause, we know that the exception we're
695 rethrowing satisfies the "filter" of the catch type. */
698 expand_eh_region_end_allowed (tree allowed
, tree failure
)
700 struct eh_region
*region
;
706 region
= expand_eh_region_end ();
707 region
->type
= ERT_ALLOWED_EXCEPTIONS
;
708 region
->u
.allowed
.type_list
= allowed
;
709 region
->label
= gen_label_rtx ();
711 for (; allowed
; allowed
= TREE_CHAIN (allowed
))
712 add_type_for_runtime (TREE_VALUE (allowed
));
714 /* We must emit the call to FAILURE here, so that if this function
715 throws a different exception, that it will be processed by the
718 around_label
= gen_label_rtx ();
719 emit_jump (around_label
);
721 emit_label (region
->label
);
722 expand_expr (failure
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
723 /* We must adjust the stack before we reach the AROUND_LABEL because
724 the call to FAILURE does not occur on all paths to the
726 do_pending_stack_adjust ();
728 emit_label (around_label
);
731 /* End an exception region for a must-not-throw filter. FAILURE is an
732 expression invoke if an uncaught exception propagates this far.
734 This is conceptually identical to expand_eh_region_end_allowed with
735 an empty allowed list (if you passed "std::terminate" instead of
736 "__cxa_call_unexpected"), but they are represented differently in
740 expand_eh_region_end_must_not_throw (tree failure
)
742 struct eh_region
*region
;
748 region
= expand_eh_region_end ();
749 region
->type
= ERT_MUST_NOT_THROW
;
750 region
->label
= gen_label_rtx ();
752 /* We must emit the call to FAILURE here, so that if this function
753 throws a different exception, that it will be processed by the
756 around_label
= gen_label_rtx ();
757 emit_jump (around_label
);
759 emit_label (region
->label
);
760 expand_expr (failure
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
762 emit_label (around_label
);
765 /* End an exception region for a throw. No handling goes on here,
766 but it's the easiest way for the front-end to indicate what type
770 expand_eh_region_end_throw (tree type
)
772 struct eh_region
*region
;
777 region
= expand_eh_region_end ();
778 region
->type
= ERT_THROW
;
779 region
->u
.throw.type
= type
;
782 /* End a fixup region. Within this region the cleanups for the immediately
783 enclosing region are _not_ run. This is used for goto cleanup to avoid
784 destroying an object twice.
786 This would be an extraordinarily simple prospect, were it not for the
787 fact that we don't actually know what the immediately enclosing region
788 is. This surprising fact is because expand_cleanups is currently
789 generating a sequence that it will insert somewhere else. We collect
790 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
793 expand_eh_region_end_fixup (tree handler
)
795 struct eh_region
*fixup
;
800 fixup
= expand_eh_region_end ();
801 fixup
->type
= ERT_FIXUP
;
802 fixup
->u
.fixup
.cleanup_exp
= handler
;
805 /* Note that the current EH region (if any) may contain a throw, or a
806 call to a function which itself may contain a throw. */
809 note_eh_region_may_contain_throw (void)
811 struct eh_region
*region
;
813 region
= cfun
->eh
->cur_region
;
814 while (region
&& !region
->may_contain_throw
)
816 region
->may_contain_throw
= 1;
817 region
= region
->outer
;
821 /* Return an rtl expression for a pointer to the exception object
825 get_exception_pointer (struct function
*fun
)
827 rtx exc_ptr
= fun
->eh
->exc_ptr
;
828 if (fun
== cfun
&& ! exc_ptr
)
830 exc_ptr
= gen_reg_rtx (ptr_mode
);
831 fun
->eh
->exc_ptr
= exc_ptr
;
836 /* Return an rtl expression for the exception dispatch filter
840 get_exception_filter (struct function
*fun
)
842 rtx filter
= fun
->eh
->filter
;
843 if (fun
== cfun
&& ! filter
)
845 filter
= gen_reg_rtx (word_mode
);
846 fun
->eh
->filter
= filter
;
851 /* This section is for the exception handling specific optimization pass. */
853 /* Random access the exception region tree. It's just as simple to
854 collect the regions this way as in expand_eh_region_start, but
855 without having to realloc memory. */
858 collect_eh_region_array (void)
860 struct eh_region
**array
, *i
;
862 i
= cfun
->eh
->region_tree
;
866 array
= ggc_alloc_cleared ((cfun
->eh
->last_region_number
+ 1)
868 cfun
->eh
->region_array
= array
;
872 array
[i
->region_number
] = i
;
874 /* If there are sub-regions, process them. */
877 /* If there are peers, process them. */
878 else if (i
->next_peer
)
880 /* Otherwise, step back up the tree to the next peer. */
887 } while (i
->next_peer
== NULL
);
894 resolve_one_fixup_region (struct eh_region
*fixup
)
896 struct eh_region
*cleanup
, *real
;
899 n
= cfun
->eh
->last_region_number
;
902 for (j
= 1; j
<= n
; ++j
)
904 cleanup
= cfun
->eh
->region_array
[j
];
905 if (cleanup
&& cleanup
->type
== ERT_CLEANUP
906 && cleanup
->u
.cleanup
.exp
== fixup
->u
.fixup
.cleanup_exp
)
912 real
= cleanup
->outer
;
913 if (real
&& real
->type
== ERT_FIXUP
)
915 if (!real
->u
.fixup
.resolved
)
916 resolve_one_fixup_region (real
);
917 real
= real
->u
.fixup
.real_region
;
920 fixup
->u
.fixup
.real_region
= real
;
921 fixup
->u
.fixup
.resolved
= true;
925 resolve_fixup_regions (void)
927 int i
, n
= cfun
->eh
->last_region_number
;
929 for (i
= 1; i
<= n
; ++i
)
931 struct eh_region
*fixup
= cfun
->eh
->region_array
[i
];
933 if (!fixup
|| fixup
->type
!= ERT_FIXUP
|| fixup
->u
.fixup
.resolved
)
936 resolve_one_fixup_region (fixup
);
940 /* Now that we've discovered what region actually encloses a fixup,
941 we can shuffle pointers and remove them from the tree. */
944 remove_fixup_regions (void)
948 struct eh_region
*fixup
;
950 /* Walk the insn chain and adjust the REG_EH_REGION numbers
951 for instructions referencing fixup regions. This is only
952 strictly necessary for fixup regions with no parent, but
953 doesn't hurt to do it for all regions. */
954 for (insn
= get_insns(); insn
; insn
= NEXT_INSN (insn
))
956 && (note
= find_reg_note (insn
, REG_EH_REGION
, NULL
))
957 && INTVAL (XEXP (note
, 0)) > 0
958 && (fixup
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))])
959 && fixup
->type
== ERT_FIXUP
)
961 if (fixup
->u
.fixup
.real_region
)
962 XEXP (note
, 0) = GEN_INT (fixup
->u
.fixup
.real_region
->region_number
);
964 remove_note (insn
, note
);
967 /* Remove the fixup regions from the tree. */
968 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
970 fixup
= cfun
->eh
->region_array
[i
];
974 /* Allow GC to maybe free some memory. */
975 if (fixup
->type
== ERT_CLEANUP
)
976 fixup
->u
.cleanup
.exp
= NULL_TREE
;
978 if (fixup
->type
!= ERT_FIXUP
)
983 struct eh_region
*parent
, *p
, **pp
;
985 parent
= fixup
->u
.fixup
.real_region
;
987 /* Fix up the children's parent pointers; find the end of
989 for (p
= fixup
->inner
; ; p
= p
->next_peer
)
996 /* In the tree of cleanups, only outer-inner ordering matters.
997 So link the children back in anywhere at the correct level. */
1001 pp
= &cfun
->eh
->region_tree
;
1004 fixup
->inner
= NULL
;
1007 remove_eh_handler (fixup
);
1011 /* Remove all regions whose labels are not reachable from insns. */
1014 remove_unreachable_regions (rtx insns
)
1016 int i
, *uid_region_num
;
1018 struct eh_region
*r
;
1021 uid_region_num
= xcalloc (get_max_uid (), sizeof(int));
1022 reachable
= xcalloc (cfun
->eh
->last_region_number
+ 1, sizeof(bool));
1024 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1026 r
= cfun
->eh
->region_array
[i
];
1027 if (!r
|| r
->region_number
!= i
)
1032 if (uid_region_num
[INSN_UID (r
->resume
)])
1034 uid_region_num
[INSN_UID (r
->resume
)] = i
;
1038 if (uid_region_num
[INSN_UID (r
->label
)])
1040 uid_region_num
[INSN_UID (r
->label
)] = i
;
1042 if (r
->type
== ERT_TRY
&& r
->u
.try.continue_label
)
1044 if (uid_region_num
[INSN_UID (r
->u
.try.continue_label
)])
1046 uid_region_num
[INSN_UID (r
->u
.try.continue_label
)] = i
;
1050 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1052 reachable
[uid_region_num
[INSN_UID (insn
)]] = true;
1054 if (GET_CODE (insn
) == CALL_INSN
1055 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
1056 for (i
= 0; i
< 3; i
++)
1058 rtx sub
= XEXP (PATTERN (insn
), i
);
1059 for (; sub
; sub
= NEXT_INSN (sub
))
1060 reachable
[uid_region_num
[INSN_UID (sub
)]] = true;
1064 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1066 r
= cfun
->eh
->region_array
[i
];
1067 if (r
&& r
->region_number
== i
&& !reachable
[i
])
1069 /* Don't remove ERT_THROW regions if their outer region
1071 if (r
->type
== ERT_THROW
1073 && reachable
[r
->outer
->region_number
])
1076 remove_eh_handler (r
);
1081 free (uid_region_num
);
1084 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1085 can_throw instruction in the region. */
1088 convert_from_eh_region_ranges_1 (rtx
*pinsns
, int *orig_sp
, int cur
)
1093 for (insn
= *pinsns
; insn
; insn
= next
)
1095 next
= NEXT_INSN (insn
);
1096 if (GET_CODE (insn
) == NOTE
)
1098 int kind
= NOTE_LINE_NUMBER (insn
);
1099 if (kind
== NOTE_INSN_EH_REGION_BEG
1100 || kind
== NOTE_INSN_EH_REGION_END
)
1102 if (kind
== NOTE_INSN_EH_REGION_BEG
)
1104 struct eh_region
*r
;
1107 cur
= NOTE_EH_HANDLER (insn
);
1109 r
= cfun
->eh
->region_array
[cur
];
1110 if (r
->type
== ERT_FIXUP
)
1112 r
= r
->u
.fixup
.real_region
;
1113 cur
= r
? r
->region_number
: 0;
1115 else if (r
->type
== ERT_CATCH
)
1118 cur
= r
? r
->region_number
: 0;
1124 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1125 requires extra care to adjust sequence start. */
1126 if (insn
== *pinsns
)
1132 else if (INSN_P (insn
))
1135 && ! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
1136 /* Calls can always potentially throw exceptions, unless
1137 they have a REG_EH_REGION note with a value of 0 or less.
1138 Which should be the only possible kind so far. */
1139 && (GET_CODE (insn
) == CALL_INSN
1140 /* If we wanted exceptions for non-call insns, then
1141 any may_trap_p instruction could throw. */
1142 || (flag_non_call_exceptions
1143 && GET_CODE (PATTERN (insn
)) != CLOBBER
1144 && GET_CODE (PATTERN (insn
)) != USE
1145 && may_trap_p (PATTERN (insn
)))))
1147 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_EH_REGION
, GEN_INT (cur
),
1151 if (GET_CODE (insn
) == CALL_INSN
1152 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
1154 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn
), 0),
1156 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn
), 1),
1158 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn
), 2),
1169 convert_from_eh_region_ranges (void)
1174 collect_eh_region_array ();
1175 resolve_fixup_regions ();
1177 stack
= xmalloc (sizeof (int) * (cfun
->eh
->last_region_number
+ 1));
1178 insns
= get_insns ();
1179 convert_from_eh_region_ranges_1 (&insns
, stack
, 0);
1182 remove_fixup_regions ();
1183 remove_unreachable_regions (insns
);
1187 add_ehl_entry (rtx label
, struct eh_region
*region
)
1189 struct ehl_map_entry
**slot
, *entry
;
1191 LABEL_PRESERVE_P (label
) = 1;
1193 entry
= ggc_alloc (sizeof (*entry
));
1194 entry
->label
= label
;
1195 entry
->region
= region
;
1197 slot
= (struct ehl_map_entry
**)
1198 htab_find_slot (cfun
->eh
->exception_handler_label_map
, entry
, INSERT
);
1200 /* Before landing pad creation, each exception handler has its own
1201 label. After landing pad creation, the exception handlers may
1202 share landing pads. This is ok, since maybe_remove_eh_handler
1203 only requires the 1-1 mapping before landing pad creation. */
1204 if (*slot
&& !cfun
->eh
->built_landing_pads
)
1211 find_exception_handler_labels (void)
1215 if (cfun
->eh
->exception_handler_label_map
)
1216 htab_empty (cfun
->eh
->exception_handler_label_map
);
1219 /* ??? The expansion factor here (3/2) must be greater than the htab
1220 occupancy factor (4/3) to avoid unnecessary resizing. */
1221 cfun
->eh
->exception_handler_label_map
1222 = htab_create_ggc (cfun
->eh
->last_region_number
* 3 / 2,
1223 ehl_hash
, ehl_eq
, NULL
);
1226 if (cfun
->eh
->region_tree
== NULL
)
1229 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1231 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1234 if (! region
|| region
->region_number
!= i
)
1236 if (cfun
->eh
->built_landing_pads
)
1237 lab
= region
->landing_pad
;
1239 lab
= region
->label
;
1242 add_ehl_entry (lab
, region
);
1245 /* For sjlj exceptions, need the return label to remain live until
1246 after landing pad generation. */
1247 if (USING_SJLJ_EXCEPTIONS
&& ! cfun
->eh
->built_landing_pads
)
1248 add_ehl_entry (return_label
, NULL
);
1252 current_function_has_exception_handlers (void)
1256 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1258 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1260 if (! region
|| region
->region_number
!= i
)
1262 if (region
->type
!= ERT_THROW
)
1269 static struct eh_region
*
1270 duplicate_eh_region_1 (struct eh_region
*o
, struct inline_remap
*map
)
1272 struct eh_region
*n
= ggc_alloc_cleared (sizeof (struct eh_region
));
1274 n
->region_number
= o
->region_number
+ cfun
->eh
->last_region_number
;
1280 case ERT_MUST_NOT_THROW
:
1284 if (o
->u
.try.continue_label
)
1285 n
->u
.try.continue_label
1286 = get_label_from_map (map
,
1287 CODE_LABEL_NUMBER (o
->u
.try.continue_label
));
1291 n
->u
.catch.type_list
= o
->u
.catch.type_list
;
1294 case ERT_ALLOWED_EXCEPTIONS
:
1295 n
->u
.allowed
.type_list
= o
->u
.allowed
.type_list
;
1299 n
->u
.throw.type
= o
->u
.throw.type
;
1306 n
->label
= get_label_from_map (map
, CODE_LABEL_NUMBER (o
->label
));
1309 n
->resume
= map
->insn_map
[INSN_UID (o
->resume
)];
1310 if (n
->resume
== NULL
)
1318 duplicate_eh_region_2 (struct eh_region
*o
, struct eh_region
**n_array
)
1320 struct eh_region
*n
= n_array
[o
->region_number
];
1325 n
->u
.try.catch = n_array
[o
->u
.try.catch->region_number
];
1326 n
->u
.try.last_catch
= n_array
[o
->u
.try.last_catch
->region_number
];
1330 if (o
->u
.catch.next_catch
)
1331 n
->u
.catch.next_catch
= n_array
[o
->u
.catch.next_catch
->region_number
];
1332 if (o
->u
.catch.prev_catch
)
1333 n
->u
.catch.prev_catch
= n_array
[o
->u
.catch.prev_catch
->region_number
];
1341 n
->outer
= n_array
[o
->outer
->region_number
];
1343 n
->inner
= n_array
[o
->inner
->region_number
];
1345 n
->next_peer
= n_array
[o
->next_peer
->region_number
];
1349 duplicate_eh_regions (struct function
*ifun
, struct inline_remap
*map
)
1351 int ifun_last_region_number
= ifun
->eh
->last_region_number
;
1352 struct eh_region
**n_array
, *root
, *cur
;
1355 if (ifun_last_region_number
== 0)
1358 n_array
= xcalloc (ifun_last_region_number
+ 1, sizeof (*n_array
));
1360 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
1362 cur
= ifun
->eh
->region_array
[i
];
1363 if (!cur
|| cur
->region_number
!= i
)
1365 n_array
[i
] = duplicate_eh_region_1 (cur
, map
);
1367 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
1369 cur
= ifun
->eh
->region_array
[i
];
1370 if (!cur
|| cur
->region_number
!= i
)
1372 duplicate_eh_region_2 (cur
, n_array
);
1375 root
= n_array
[ifun
->eh
->region_tree
->region_number
];
1376 cur
= cfun
->eh
->cur_region
;
1379 struct eh_region
*p
= cur
->inner
;
1382 while (p
->next_peer
)
1384 p
->next_peer
= root
;
1389 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
1390 if (n_array
[i
] && n_array
[i
]->outer
== NULL
)
1391 n_array
[i
]->outer
= cur
;
1395 struct eh_region
*p
= cfun
->eh
->region_tree
;
1398 while (p
->next_peer
)
1400 p
->next_peer
= root
;
1403 cfun
->eh
->region_tree
= root
;
1408 i
= cfun
->eh
->last_region_number
;
1409 cfun
->eh
->last_region_number
= i
+ ifun_last_region_number
;
1415 t2r_eq (const void *pentry
, const void *pdata
)
1417 tree entry
= (tree
) pentry
;
1418 tree data
= (tree
) pdata
;
1420 return TREE_PURPOSE (entry
) == data
;
1424 t2r_hash (const void *pentry
)
1426 tree entry
= (tree
) pentry
;
1427 return TREE_HASH (TREE_PURPOSE (entry
));
1431 add_type_for_runtime (tree type
)
1435 slot
= (tree
*) htab_find_slot_with_hash (type_to_runtime_map
, type
,
1436 TREE_HASH (type
), INSERT
);
1439 tree runtime
= (*lang_eh_runtime_type
) (type
);
1440 *slot
= tree_cons (type
, runtime
, NULL_TREE
);
1445 lookup_type_for_runtime (tree type
)
1449 slot
= (tree
*) htab_find_slot_with_hash (type_to_runtime_map
, type
,
1450 TREE_HASH (type
), NO_INSERT
);
1452 /* We should have always inserted the data earlier. */
1453 return TREE_VALUE (*slot
);
1457 /* Represent an entry in @TTypes for either catch actions
1458 or exception filter actions. */
1459 struct ttypes_filter
GTY(())
1465 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1466 (a tree) for a @TTypes type node we are thinking about adding. */
1469 ttypes_filter_eq (const void *pentry
, const void *pdata
)
1471 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1472 tree data
= (tree
) pdata
;
1474 return entry
->t
== data
;
1478 ttypes_filter_hash (const void *pentry
)
1480 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1481 return TREE_HASH (entry
->t
);
1484 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1485 exception specification list we are thinking about adding. */
1486 /* ??? Currently we use the type lists in the order given. Someone
1487 should put these in some canonical order. */
1490 ehspec_filter_eq (const void *pentry
, const void *pdata
)
1492 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1493 const struct ttypes_filter
*data
= (const struct ttypes_filter
*) pdata
;
1495 return type_list_equal (entry
->t
, data
->t
);
1498 /* Hash function for exception specification lists. */
1501 ehspec_filter_hash (const void *pentry
)
1503 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1507 for (list
= entry
->t
; list
; list
= TREE_CHAIN (list
))
1508 h
= (h
<< 5) + (h
>> 27) + TREE_HASH (TREE_VALUE (list
));
1512 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
1513 to speed up the search. Return the filter value to be used. */
1516 add_ttypes_entry (htab_t ttypes_hash
, tree type
)
1518 struct ttypes_filter
**slot
, *n
;
1520 slot
= (struct ttypes_filter
**)
1521 htab_find_slot_with_hash (ttypes_hash
, type
, TREE_HASH (type
), INSERT
);
1523 if ((n
= *slot
) == NULL
)
1525 /* Filter value is a 1 based table index. */
1527 n
= xmalloc (sizeof (*n
));
1529 n
->filter
= VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
) + 1;
1532 VARRAY_PUSH_TREE (cfun
->eh
->ttype_data
, type
);
1538 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1539 to speed up the search. Return the filter value to be used. */
1542 add_ehspec_entry (htab_t ehspec_hash
, htab_t ttypes_hash
, tree list
)
1544 struct ttypes_filter
**slot
, *n
;
1545 struct ttypes_filter dummy
;
1548 slot
= (struct ttypes_filter
**)
1549 htab_find_slot (ehspec_hash
, &dummy
, INSERT
);
1551 if ((n
= *slot
) == NULL
)
1553 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1555 n
= xmalloc (sizeof (*n
));
1557 n
->filter
= -(VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
) + 1);
1560 /* Look up each type in the list and encode its filter
1561 value as a uleb128. Terminate the list with 0. */
1562 for (; list
; list
= TREE_CHAIN (list
))
1563 push_uleb128 (&cfun
->eh
->ehspec_data
,
1564 add_ttypes_entry (ttypes_hash
, TREE_VALUE (list
)));
1565 VARRAY_PUSH_UCHAR (cfun
->eh
->ehspec_data
, 0);
1571 /* Generate the action filter values to be used for CATCH and
1572 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1573 we use lots of landing pads, and so every type or list can share
1574 the same filter value, which saves table space. */
1577 assign_filter_values (void)
1580 htab_t ttypes
, ehspec
;
1582 VARRAY_TREE_INIT (cfun
->eh
->ttype_data
, 16, "ttype_data");
1583 VARRAY_UCHAR_INIT (cfun
->eh
->ehspec_data
, 64, "ehspec_data");
1585 ttypes
= htab_create (31, ttypes_filter_hash
, ttypes_filter_eq
, free
);
1586 ehspec
= htab_create (31, ehspec_filter_hash
, ehspec_filter_eq
, free
);
1588 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1590 struct eh_region
*r
= cfun
->eh
->region_array
[i
];
1592 /* Mind we don't process a region more than once. */
1593 if (!r
|| r
->region_number
!= i
)
1599 /* Whatever type_list is (NULL or true list), we build a list
1600 of filters for the region. */
1601 r
->u
.catch.filter_list
= NULL_TREE
;
1603 if (r
->u
.catch.type_list
!= NULL
)
1605 /* Get a filter value for each of the types caught and store
1606 them in the region's dedicated list. */
1607 tree tp_node
= r
->u
.catch.type_list
;
1609 for (;tp_node
; tp_node
= TREE_CHAIN (tp_node
))
1611 int flt
= add_ttypes_entry (ttypes
, TREE_VALUE (tp_node
));
1612 tree flt_node
= build_int_2 (flt
, 0);
1614 r
->u
.catch.filter_list
1615 = tree_cons (NULL_TREE
, flt_node
, r
->u
.catch.filter_list
);
1620 /* Get a filter value for the NULL list also since it will need
1621 an action record anyway. */
1622 int flt
= add_ttypes_entry (ttypes
, NULL
);
1623 tree flt_node
= build_int_2 (flt
, 0);
1625 r
->u
.catch.filter_list
1626 = tree_cons (NULL_TREE
, flt_node
, r
->u
.catch.filter_list
);
1631 case ERT_ALLOWED_EXCEPTIONS
:
1633 = add_ehspec_entry (ehspec
, ttypes
, r
->u
.allowed
.type_list
);
1641 htab_delete (ttypes
);
1642 htab_delete (ehspec
);
1645 /* Emit SEQ into basic block just before INSN (that is assumed to be
1646 first instruction of some existing BB and return the newly
1649 emit_to_new_bb_before (rtx seq
, rtx insn
)
1655 /* If there happens to be an fallthru edge (possibly created by cleanup_cfg
1656 call), we don't want it to go into newly created landing pad or other EH
1658 for (e
= BLOCK_FOR_INSN (insn
)->pred
; e
; e
= e
->pred_next
)
1659 if (e
->flags
& EDGE_FALLTHRU
)
1660 force_nonfallthru (e
);
1661 last
= emit_insn_before (seq
, insn
);
1662 if (GET_CODE (last
) == BARRIER
)
1663 last
= PREV_INSN (last
);
1664 bb
= create_basic_block (seq
, last
, BLOCK_FOR_INSN (insn
)->prev_bb
);
1665 update_bb_for_insn (bb
);
1666 bb
->flags
|= BB_SUPERBLOCK
;
1670 /* Generate the code to actually handle exceptions, which will follow the
1674 build_post_landing_pads (void)
1678 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1680 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1683 /* Mind we don't process a region more than once. */
1684 if (!region
|| region
->region_number
!= i
)
1687 switch (region
->type
)
1690 /* ??? Collect the set of all non-overlapping catch handlers
1691 all the way up the chain until blocked by a cleanup. */
1692 /* ??? Outer try regions can share landing pads with inner
1693 try regions if the types are completely non-overlapping,
1694 and there are no intervening cleanups. */
1696 region
->post_landing_pad
= gen_label_rtx ();
1700 emit_label (region
->post_landing_pad
);
1702 /* ??? It is mighty inconvenient to call back into the
1703 switch statement generation code in expand_end_case.
1704 Rapid prototyping sez a sequence of ifs. */
1706 struct eh_region
*c
;
1707 for (c
= region
->u
.try.catch; c
; c
= c
->u
.catch.next_catch
)
1709 if (c
->u
.catch.type_list
== NULL
)
1710 emit_jump (c
->label
);
1713 /* Need for one cmp/jump per type caught. Each type
1714 list entry has a matching entry in the filter list
1715 (see assign_filter_values). */
1716 tree tp_node
= c
->u
.catch.type_list
;
1717 tree flt_node
= c
->u
.catch.filter_list
;
1721 emit_cmp_and_jump_insns
1723 GEN_INT (tree_low_cst (TREE_VALUE (flt_node
), 0)),
1724 EQ
, NULL_RTX
, word_mode
, 0, c
->label
);
1726 tp_node
= TREE_CHAIN (tp_node
);
1727 flt_node
= TREE_CHAIN (flt_node
);
1733 /* We delay the generation of the _Unwind_Resume until we generate
1734 landing pads. We emit a marker here so as to get good control
1735 flow data in the meantime. */
1737 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
1743 emit_to_new_bb_before (seq
, region
->u
.try.catch->label
);
1747 case ERT_ALLOWED_EXCEPTIONS
:
1748 region
->post_landing_pad
= gen_label_rtx ();
1752 emit_label (region
->post_landing_pad
);
1754 emit_cmp_and_jump_insns (cfun
->eh
->filter
,
1755 GEN_INT (region
->u
.allowed
.filter
),
1756 EQ
, NULL_RTX
, word_mode
, 0, region
->label
);
1758 /* We delay the generation of the _Unwind_Resume until we generate
1759 landing pads. We emit a marker here so as to get good control
1760 flow data in the meantime. */
1762 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
1768 emit_to_new_bb_before (seq
, region
->label
);
1772 case ERT_MUST_NOT_THROW
:
1773 region
->post_landing_pad
= region
->label
;
1778 /* Nothing to do. */
1787 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1788 _Unwind_Resume otherwise. */
1791 connect_post_landing_pads (void)
1795 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1797 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1798 struct eh_region
*outer
;
1802 /* Mind we don't process a region more than once. */
1803 if (!region
|| region
->region_number
!= i
)
1806 /* If there is no RESX, or it has been deleted by flow, there's
1807 nothing to fix up. */
1808 if (! region
->resume
|| INSN_DELETED_P (region
->resume
))
1811 /* Search for another landing pad in this function. */
1812 for (outer
= region
->outer
; outer
; outer
= outer
->outer
)
1813 if (outer
->post_landing_pad
)
1821 basic_block src
, dest
;
1823 emit_jump (outer
->post_landing_pad
);
1824 src
= BLOCK_FOR_INSN (region
->resume
);
1825 dest
= BLOCK_FOR_INSN (outer
->post_landing_pad
);
1827 remove_edge (src
->succ
);
1828 e
= make_edge (src
, dest
, 0);
1829 e
->probability
= REG_BR_PROB_BASE
;
1830 e
->count
= src
->count
;
1833 emit_library_call (unwind_resume_libfunc
, LCT_THROW
,
1834 VOIDmode
, 1, cfun
->eh
->exc_ptr
, ptr_mode
);
1838 barrier
= emit_insn_before (seq
, region
->resume
);
1839 /* Avoid duplicate barrier. */
1840 if (GET_CODE (barrier
) != BARRIER
)
1842 delete_insn (barrier
);
1843 delete_insn (region
->resume
);
1849 dw2_build_landing_pads (void)
1854 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1856 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1859 bool clobbers_hard_regs
= false;
1862 /* Mind we don't process a region more than once. */
1863 if (!region
|| region
->region_number
!= i
)
1866 if (region
->type
!= ERT_CLEANUP
1867 && region
->type
!= ERT_TRY
1868 && region
->type
!= ERT_ALLOWED_EXCEPTIONS
)
1873 region
->landing_pad
= gen_label_rtx ();
1874 emit_label (region
->landing_pad
);
1876 #ifdef HAVE_exception_receiver
1877 if (HAVE_exception_receiver
)
1878 emit_insn (gen_exception_receiver ());
1881 #ifdef HAVE_nonlocal_goto_receiver
1882 if (HAVE_nonlocal_goto_receiver
)
1883 emit_insn (gen_nonlocal_goto_receiver ());
1888 /* If the eh_return data registers are call-saved, then we
1889 won't have considered them clobbered from the call that
1890 threw. Kill them now. */
1893 unsigned r
= EH_RETURN_DATA_REGNO (j
);
1894 if (r
== INVALID_REGNUM
)
1896 if (! call_used_regs
[r
])
1898 emit_insn (gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, r
)));
1899 clobbers_hard_regs
= true;
1903 if (clobbers_hard_regs
)
1905 /* @@@ This is a kludge. Not all machine descriptions define a
1906 blockage insn, but we must not allow the code we just generated
1907 to be reordered by scheduling. So emit an ASM_INPUT to act as
1909 emit_insn (gen_rtx_ASM_INPUT (VOIDmode
, ""));
1912 emit_move_insn (cfun
->eh
->exc_ptr
,
1913 gen_rtx_REG (ptr_mode
, EH_RETURN_DATA_REGNO (0)));
1914 emit_move_insn (cfun
->eh
->filter
,
1915 gen_rtx_REG (word_mode
, EH_RETURN_DATA_REGNO (1)));
1920 bb
= emit_to_new_bb_before (seq
, region
->post_landing_pad
);
1921 e
= make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
1922 e
->count
= bb
->count
;
1923 e
->probability
= REG_BR_PROB_BASE
;
1930 int directly_reachable
;
1933 int call_site_index
;
1937 sjlj_find_directly_reachable_regions (struct sjlj_lp_info
*lp_info
)
1940 bool found_one
= false;
1942 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1944 struct eh_region
*region
;
1945 enum reachable_code rc
;
1949 if (! INSN_P (insn
))
1952 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1953 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
1956 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
1958 type_thrown
= NULL_TREE
;
1959 if (region
->type
== ERT_THROW
)
1961 type_thrown
= region
->u
.throw.type
;
1962 region
= region
->outer
;
1965 /* Find the first containing region that might handle the exception.
1966 That's the landing pad to which we will transfer control. */
1967 rc
= RNL_NOT_CAUGHT
;
1968 for (; region
; region
= region
->outer
)
1970 rc
= reachable_next_level (region
, type_thrown
, 0);
1971 if (rc
!= RNL_NOT_CAUGHT
)
1974 if (rc
== RNL_MAYBE_CAUGHT
|| rc
== RNL_CAUGHT
)
1976 lp_info
[region
->region_number
].directly_reachable
= 1;
1985 sjlj_assign_call_site_values (rtx dispatch_label
, struct sjlj_lp_info
*lp_info
)
1990 /* First task: build the action table. */
1992 VARRAY_UCHAR_INIT (cfun
->eh
->action_record_data
, 64, "action_record_data");
1993 ar_hash
= htab_create (31, action_record_hash
, action_record_eq
, free
);
1995 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1996 if (lp_info
[i
].directly_reachable
)
1998 struct eh_region
*r
= cfun
->eh
->region_array
[i
];
1999 r
->landing_pad
= dispatch_label
;
2000 lp_info
[i
].action_index
= collect_one_action_chain (ar_hash
, r
);
2001 if (lp_info
[i
].action_index
!= -1)
2002 cfun
->uses_eh_lsda
= 1;
2005 htab_delete (ar_hash
);
2007 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2008 landing pad label for the region. For sjlj though, there is one
2009 common landing pad from which we dispatch to the post-landing pads.
2011 A region receives a dispatch index if it is directly reachable
2012 and requires in-function processing. Regions that share post-landing
2013 pads may share dispatch indices. */
2014 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2015 (see build_post_landing_pads) so we don't bother checking for it. */
2018 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2019 if (lp_info
[i
].directly_reachable
)
2020 lp_info
[i
].dispatch_index
= index
++;
2022 /* Finally: assign call-site values. If dwarf2 terms, this would be
2023 the region number assigned by convert_to_eh_region_ranges, but
2024 handles no-action and must-not-throw differently. */
2027 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2028 if (lp_info
[i
].directly_reachable
)
2030 int action
= lp_info
[i
].action_index
;
2032 /* Map must-not-throw to otherwise unused call-site index 0. */
2035 /* Map no-action to otherwise unused call-site index -1. */
2036 else if (action
== -1)
2038 /* Otherwise, look it up in the table. */
2040 index
= add_call_site (GEN_INT (lp_info
[i
].dispatch_index
), action
);
2042 lp_info
[i
].call_site_index
= index
;
2047 sjlj_mark_call_sites (struct sjlj_lp_info
*lp_info
)
2049 int last_call_site
= -2;
2052 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2054 struct eh_region
*region
;
2056 rtx note
, before
, p
;
2058 /* Reset value tracking at extended basic block boundaries. */
2059 if (GET_CODE (insn
) == CODE_LABEL
)
2060 last_call_site
= -2;
2062 if (! INSN_P (insn
))
2065 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2068 /* Calls (and trapping insns) without notes are outside any
2069 exception handling region in this function. Mark them as
2071 if (GET_CODE (insn
) == CALL_INSN
2072 || (flag_non_call_exceptions
2073 && may_trap_p (PATTERN (insn
))))
2074 this_call_site
= -1;
2080 /* Calls that are known to not throw need not be marked. */
2081 if (INTVAL (XEXP (note
, 0)) <= 0)
2084 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
2085 this_call_site
= lp_info
[region
->region_number
].call_site_index
;
2088 if (this_call_site
== last_call_site
)
2091 /* Don't separate a call from it's argument loads. */
2093 if (GET_CODE (insn
) == CALL_INSN
)
2094 before
= find_first_parameter_load (insn
, NULL_RTX
);
2097 mem
= adjust_address (cfun
->eh
->sjlj_fc
, TYPE_MODE (integer_type_node
),
2098 sjlj_fc_call_site_ofs
);
2099 emit_move_insn (mem
, GEN_INT (this_call_site
));
2103 emit_insn_before (p
, before
);
2104 last_call_site
= this_call_site
;
2108 /* Construct the SjLj_Function_Context. */
2111 sjlj_emit_function_enter (rtx dispatch_label
)
2113 rtx fn_begin
, fc
, mem
, seq
;
2115 fc
= cfun
->eh
->sjlj_fc
;
2119 /* We're storing this libcall's address into memory instead of
2120 calling it directly. Thus, we must call assemble_external_libcall
2121 here, as we can not depend on emit_library_call to do it for us. */
2122 assemble_external_libcall (eh_personality_libfunc
);
2123 mem
= adjust_address (fc
, Pmode
, sjlj_fc_personality_ofs
);
2124 emit_move_insn (mem
, eh_personality_libfunc
);
2126 mem
= adjust_address (fc
, Pmode
, sjlj_fc_lsda_ofs
);
2127 if (cfun
->uses_eh_lsda
)
2132 ASM_GENERATE_INTERNAL_LABEL (buf
, "LLSDA", current_function_funcdef_no
);
2133 sym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
2134 SYMBOL_REF_FLAGS (sym
) = SYMBOL_FLAG_LOCAL
;
2135 emit_move_insn (mem
, sym
);
2138 emit_move_insn (mem
, const0_rtx
);
2140 #ifdef DONT_USE_BUILTIN_SETJMP
2143 x
= emit_library_call_value (setjmp_libfunc
, NULL_RTX
, LCT_RETURNS_TWICE
,
2144 TYPE_MODE (integer_type_node
), 1,
2145 plus_constant (XEXP (fc
, 0),
2146 sjlj_fc_jbuf_ofs
), Pmode
);
2148 note
= emit_note (NOTE_INSN_EXPECTED_VALUE
);
2149 NOTE_EXPECTED_VALUE (note
) = gen_rtx_EQ (VOIDmode
, x
, const0_rtx
);
2151 emit_cmp_and_jump_insns (x
, const0_rtx
, NE
, 0,
2152 TYPE_MODE (integer_type_node
), 0, dispatch_label
);
2155 expand_builtin_setjmp_setup (plus_constant (XEXP (fc
, 0), sjlj_fc_jbuf_ofs
),
2159 emit_library_call (unwind_sjlj_register_libfunc
, LCT_NORMAL
, VOIDmode
,
2160 1, XEXP (fc
, 0), Pmode
);
2165 /* ??? Instead of doing this at the beginning of the function,
2166 do this in a block that is at loop level 0 and dominates all
2167 can_throw_internal instructions. */
2169 for (fn_begin
= get_insns (); ; fn_begin
= NEXT_INSN (fn_begin
))
2170 if (GET_CODE (fn_begin
) == NOTE
2171 && (NOTE_LINE_NUMBER (fn_begin
) == NOTE_INSN_FUNCTION_BEG
2172 || NOTE_LINE_NUMBER (fn_begin
) == NOTE_INSN_BASIC_BLOCK
))
2174 if (NOTE_LINE_NUMBER (fn_begin
) == NOTE_INSN_FUNCTION_BEG
)
2175 insert_insn_on_edge (seq
, ENTRY_BLOCK_PTR
->succ
);
2178 rtx last
= BB_END (ENTRY_BLOCK_PTR
->succ
->dest
);
2179 for (; ; fn_begin
= NEXT_INSN (fn_begin
))
2180 if ((GET_CODE (fn_begin
) == NOTE
2181 && NOTE_LINE_NUMBER (fn_begin
) == NOTE_INSN_FUNCTION_BEG
)
2182 || fn_begin
== last
)
2184 emit_insn_after (seq
, fn_begin
);
2188 /* Call back from expand_function_end to know where we should put
2189 the call to unwind_sjlj_unregister_libfunc if needed. */
2192 sjlj_emit_function_exit_after (rtx after
)
2194 cfun
->eh
->sjlj_exit_after
= after
;
2198 sjlj_emit_function_exit (void)
2205 emit_library_call (unwind_sjlj_unregister_libfunc
, LCT_NORMAL
, VOIDmode
,
2206 1, XEXP (cfun
->eh
->sjlj_fc
, 0), Pmode
);
2211 /* ??? Really this can be done in any block at loop level 0 that
2212 post-dominates all can_throw_internal instructions. This is
2213 the last possible moment. */
2215 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
2216 if (e
->flags
& EDGE_FALLTHRU
)
2222 /* Figure out whether the place we are supposed to insert libcall
2223 is inside the last basic block or after it. In the other case
2224 we need to emit to edge. */
2225 if (e
->src
->next_bb
!= EXIT_BLOCK_PTR
)
2227 for (insn
= NEXT_INSN (BB_END (e
->src
)); insn
; insn
= NEXT_INSN (insn
))
2228 if (insn
== cfun
->eh
->sjlj_exit_after
)
2231 insert_insn_on_edge (seq
, e
);
2234 insn
= cfun
->eh
->sjlj_exit_after
;
2235 if (GET_CODE (insn
) == CODE_LABEL
)
2236 insn
= NEXT_INSN (insn
);
2237 emit_insn_after (seq
, insn
);
2243 sjlj_emit_dispatch_table (rtx dispatch_label
, struct sjlj_lp_info
*lp_info
)
2245 int i
, first_reachable
;
2246 rtx mem
, dispatch
, seq
, fc
;
2251 fc
= cfun
->eh
->sjlj_fc
;
2255 emit_label (dispatch_label
);
2257 #ifndef DONT_USE_BUILTIN_SETJMP
2258 expand_builtin_setjmp_receiver (dispatch_label
);
2261 /* Load up dispatch index, exc_ptr and filter values from the
2262 function context. */
2263 mem
= adjust_address (fc
, TYPE_MODE (integer_type_node
),
2264 sjlj_fc_call_site_ofs
);
2265 dispatch
= copy_to_reg (mem
);
2267 mem
= adjust_address (fc
, word_mode
, sjlj_fc_data_ofs
);
2268 if (word_mode
!= ptr_mode
)
2270 #ifdef POINTERS_EXTEND_UNSIGNED
2271 mem
= convert_memory_address (ptr_mode
, mem
);
2273 mem
= convert_to_mode (ptr_mode
, mem
, 0);
2276 emit_move_insn (cfun
->eh
->exc_ptr
, mem
);
2278 mem
= adjust_address (fc
, word_mode
, sjlj_fc_data_ofs
+ UNITS_PER_WORD
);
2279 emit_move_insn (cfun
->eh
->filter
, mem
);
2281 /* Jump to one of the directly reachable regions. */
2282 /* ??? This really ought to be using a switch statement. */
2284 first_reachable
= 0;
2285 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2287 if (! lp_info
[i
].directly_reachable
)
2290 if (! first_reachable
)
2292 first_reachable
= i
;
2296 emit_cmp_and_jump_insns (dispatch
, GEN_INT (lp_info
[i
].dispatch_index
),
2297 EQ
, NULL_RTX
, TYPE_MODE (integer_type_node
), 0,
2298 cfun
->eh
->region_array
[i
]->post_landing_pad
);
2304 before
= cfun
->eh
->region_array
[first_reachable
]->post_landing_pad
;
2306 bb
= emit_to_new_bb_before (seq
, before
);
2307 e
= make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
2308 e
->count
= bb
->count
;
2309 e
->probability
= REG_BR_PROB_BASE
;
2313 sjlj_build_landing_pads (void)
2315 struct sjlj_lp_info
*lp_info
;
2317 lp_info
= xcalloc (cfun
->eh
->last_region_number
+ 1,
2318 sizeof (struct sjlj_lp_info
));
2320 if (sjlj_find_directly_reachable_regions (lp_info
))
2322 rtx dispatch_label
= gen_label_rtx ();
2325 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node
),
2326 int_size_in_bytes (sjlj_fc_type_node
),
2327 TYPE_ALIGN (sjlj_fc_type_node
));
2329 sjlj_assign_call_site_values (dispatch_label
, lp_info
);
2330 sjlj_mark_call_sites (lp_info
);
2332 sjlj_emit_function_enter (dispatch_label
);
2333 sjlj_emit_dispatch_table (dispatch_label
, lp_info
);
2334 sjlj_emit_function_exit ();
2341 finish_eh_generation (void)
2345 /* Nothing to do if no regions created. */
2346 if (cfun
->eh
->region_tree
== NULL
)
2349 /* The object here is to provide find_basic_blocks with detailed
2350 information (via reachable_handlers) on how exception control
2351 flows within the function. In this first pass, we can include
2352 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2353 regions, and hope that it will be useful in deleting unreachable
2354 handlers. Subsequently, we will generate landing pads which will
2355 connect many of the handlers, and then type information will not
2356 be effective. Still, this is a win over previous implementations. */
2358 cleanup_cfg (CLEANUP_PRE_LOOP
| CLEANUP_NO_INSN_DEL
);
2360 /* These registers are used by the landing pads. Make sure they
2361 have been generated. */
2362 get_exception_pointer (cfun
);
2363 get_exception_filter (cfun
);
2365 /* Construct the landing pads. */
2367 assign_filter_values ();
2368 build_post_landing_pads ();
2369 connect_post_landing_pads ();
2370 if (USING_SJLJ_EXCEPTIONS
)
2371 sjlj_build_landing_pads ();
2373 dw2_build_landing_pads ();
2375 cfun
->eh
->built_landing_pads
= 1;
2377 /* We've totally changed the CFG. Start over. */
2378 find_exception_handler_labels ();
2379 break_superblocks ();
2380 if (USING_SJLJ_EXCEPTIONS
)
2381 commit_edge_insertions ();
2386 for (e
= bb
->succ
; e
; e
= next
)
2388 next
= e
->succ_next
;
2389 if (e
->flags
& EDGE_EH
)
2396 make_eh_edge (NULL
, bb
, BB_END (bb
));
2398 cleanup_cfg (CLEANUP_PRE_LOOP
| CLEANUP_NO_INSN_DEL
);
2402 ehl_hash (const void *pentry
)
2404 struct ehl_map_entry
*entry
= (struct ehl_map_entry
*) pentry
;
2406 /* 2^32 * ((sqrt(5) - 1) / 2) */
2407 const hashval_t scaled_golden_ratio
= 0x9e3779b9;
2408 return CODE_LABEL_NUMBER (entry
->label
) * scaled_golden_ratio
;
2412 ehl_eq (const void *pentry
, const void *pdata
)
2414 struct ehl_map_entry
*entry
= (struct ehl_map_entry
*) pentry
;
2415 struct ehl_map_entry
*data
= (struct ehl_map_entry
*) pdata
;
2417 return entry
->label
== data
->label
;
2420 /* This section handles removing dead code for flow. */
2422 /* Remove LABEL from exception_handler_label_map. */
2425 remove_exception_handler_label (rtx label
)
2427 struct ehl_map_entry
**slot
, tmp
;
2429 /* If exception_handler_label_map was not built yet,
2430 there is nothing to do. */
2431 if (cfun
->eh
->exception_handler_label_map
== NULL
)
2435 slot
= (struct ehl_map_entry
**)
2436 htab_find_slot (cfun
->eh
->exception_handler_label_map
, &tmp
, NO_INSERT
);
2440 htab_clear_slot (cfun
->eh
->exception_handler_label_map
, (void **) slot
);
2443 /* Splice REGION from the region tree etc. */
2446 remove_eh_handler (struct eh_region
*region
)
2448 struct eh_region
**pp
, **pp_start
, *p
, *outer
, *inner
;
2451 /* For the benefit of efficiently handling REG_EH_REGION notes,
2452 replace this region in the region array with its containing
2453 region. Note that previous region deletions may result in
2454 multiple copies of this region in the array, so we have a
2455 list of alternate numbers by which we are known. */
2457 outer
= region
->outer
;
2458 cfun
->eh
->region_array
[region
->region_number
] = outer
;
2462 EXECUTE_IF_SET_IN_BITMAP (region
->aka
, 0, i
,
2463 { cfun
->eh
->region_array
[i
] = outer
; });
2469 outer
->aka
= BITMAP_GGC_ALLOC ();
2471 bitmap_a_or_b (outer
->aka
, outer
->aka
, region
->aka
);
2472 bitmap_set_bit (outer
->aka
, region
->region_number
);
2475 if (cfun
->eh
->built_landing_pads
)
2476 lab
= region
->landing_pad
;
2478 lab
= region
->label
;
2480 remove_exception_handler_label (lab
);
2483 pp_start
= &outer
->inner
;
2485 pp_start
= &cfun
->eh
->region_tree
;
2486 for (pp
= pp_start
, p
= *pp
; p
!= region
; pp
= &p
->next_peer
, p
= *pp
)
2488 *pp
= region
->next_peer
;
2490 inner
= region
->inner
;
2493 for (p
= inner
; p
->next_peer
; p
= p
->next_peer
)
2497 p
->next_peer
= *pp_start
;
2501 if (region
->type
== ERT_CATCH
)
2503 struct eh_region
*try, *next
, *prev
;
2505 for (try = region
->next_peer
;
2506 try->type
== ERT_CATCH
;
2507 try = try->next_peer
)
2509 if (try->type
!= ERT_TRY
)
2512 next
= region
->u
.catch.next_catch
;
2513 prev
= region
->u
.catch.prev_catch
;
2516 next
->u
.catch.prev_catch
= prev
;
2518 try->u
.try.last_catch
= prev
;
2520 prev
->u
.catch.next_catch
= next
;
2523 try->u
.try.catch = next
;
2525 remove_eh_handler (try);
2530 /* LABEL heads a basic block that is about to be deleted. If this
2531 label corresponds to an exception region, we may be able to
2532 delete the region. */
2535 maybe_remove_eh_handler (rtx label
)
2537 struct ehl_map_entry
**slot
, tmp
;
2538 struct eh_region
*region
;
2540 /* ??? After generating landing pads, it's not so simple to determine
2541 if the region data is completely unused. One must examine the
2542 landing pad and the post landing pad, and whether an inner try block
2543 is referencing the catch handlers directly. */
2544 if (cfun
->eh
->built_landing_pads
)
2548 slot
= (struct ehl_map_entry
**)
2549 htab_find_slot (cfun
->eh
->exception_handler_label_map
, &tmp
, NO_INSERT
);
2552 region
= (*slot
)->region
;
2556 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2557 because there is no path to the fallback call to terminate.
2558 But the region continues to affect call-site data until there
2559 are no more contained calls, which we don't see here. */
2560 if (region
->type
== ERT_MUST_NOT_THROW
)
2562 htab_clear_slot (cfun
->eh
->exception_handler_label_map
, (void **) slot
);
2563 region
->label
= NULL_RTX
;
2566 remove_eh_handler (region
);
2569 /* Invokes CALLBACK for every exception handler label. Only used by old
2570 loop hackery; should not be used by new code. */
2573 for_each_eh_label (void (*callback
) (rtx
))
2575 htab_traverse (cfun
->eh
->exception_handler_label_map
, for_each_eh_label_1
,
2576 (void *) &callback
);
2580 for_each_eh_label_1 (void **pentry
, void *data
)
2582 struct ehl_map_entry
*entry
= *(struct ehl_map_entry
**)pentry
;
2583 void (*callback
) (rtx
) = *(void (**) (rtx
)) data
;
2585 (*callback
) (entry
->label
);
2589 /* This section describes CFG exception edges for flow. */
2591 /* For communicating between calls to reachable_next_level. */
2592 struct reachable_info
GTY(())
2599 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2600 base class of TYPE, is in HANDLED. */
2603 check_handled (tree handled
, tree type
)
2607 /* We can check for exact matches without front-end help. */
2608 if (! lang_eh_type_covers
)
2610 for (t
= handled
; t
; t
= TREE_CHAIN (t
))
2611 if (TREE_VALUE (t
) == type
)
2616 for (t
= handled
; t
; t
= TREE_CHAIN (t
))
2617 if ((*lang_eh_type_covers
) (TREE_VALUE (t
), type
))
2624 /* A subroutine of reachable_next_level. If we are collecting a list
2625 of handlers, add one. After landing pad generation, reference
2626 it instead of the handlers themselves. Further, the handlers are
2627 all wired together, so by referencing one, we've got them all.
2628 Before landing pad generation we reference each handler individually.
2630 LP_REGION contains the landing pad; REGION is the handler. */
2633 add_reachable_handler (struct reachable_info
*info
, struct eh_region
*lp_region
, struct eh_region
*region
)
2638 if (cfun
->eh
->built_landing_pads
)
2640 if (! info
->handlers
)
2641 info
->handlers
= alloc_INSN_LIST (lp_region
->landing_pad
, NULL_RTX
);
2644 info
->handlers
= alloc_INSN_LIST (region
->label
, info
->handlers
);
2647 /* Process one level of exception regions for reachability.
2648 If TYPE_THROWN is non-null, then it is the *exact* type being
2649 propagated. If INFO is non-null, then collect handler labels
2650 and caught/allowed type information between invocations. */
2652 static enum reachable_code
2653 reachable_next_level (struct eh_region
*region
, tree type_thrown
,
2654 struct reachable_info
*info
)
2656 switch (region
->type
)
2659 /* Before landing-pad generation, we model control flow
2660 directly to the individual handlers. In this way we can
2661 see that catch handler types may shadow one another. */
2662 add_reachable_handler (info
, region
, region
);
2663 return RNL_MAYBE_CAUGHT
;
2667 struct eh_region
*c
;
2668 enum reachable_code ret
= RNL_NOT_CAUGHT
;
2670 for (c
= region
->u
.try.catch; c
; c
= c
->u
.catch.next_catch
)
2672 /* A catch-all handler ends the search. */
2673 if (c
->u
.catch.type_list
== NULL
)
2675 add_reachable_handler (info
, region
, c
);
2681 /* If we have at least one type match, end the search. */
2682 tree tp_node
= c
->u
.catch.type_list
;
2684 for (; tp_node
; tp_node
= TREE_CHAIN (tp_node
))
2686 tree type
= TREE_VALUE (tp_node
);
2688 if (type
== type_thrown
2689 || (lang_eh_type_covers
2690 && (*lang_eh_type_covers
) (type
, type_thrown
)))
2692 add_reachable_handler (info
, region
, c
);
2697 /* If we have definitive information of a match failure,
2698 the catch won't trigger. */
2699 if (lang_eh_type_covers
)
2700 return RNL_NOT_CAUGHT
;
2703 /* At this point, we either don't know what type is thrown or
2704 don't have front-end assistance to help deciding if it is
2705 covered by one of the types in the list for this region.
2707 We'd then like to add this region to the list of reachable
2708 handlers since it is indeed potentially reachable based on the
2709 information we have.
2711 Actually, this handler is for sure not reachable if all the
2712 types it matches have already been caught. That is, it is only
2713 potentially reachable if at least one of the types it catches
2714 has not been previously caught. */
2717 ret
= RNL_MAYBE_CAUGHT
;
2720 tree tp_node
= c
->u
.catch.type_list
;
2721 bool maybe_reachable
= false;
2723 /* Compute the potential reachability of this handler and
2724 update the list of types caught at the same time. */
2725 for (; tp_node
; tp_node
= TREE_CHAIN (tp_node
))
2727 tree type
= TREE_VALUE (tp_node
);
2729 if (! check_handled (info
->types_caught
, type
))
2732 = tree_cons (NULL
, type
, info
->types_caught
);
2734 maybe_reachable
= true;
2738 if (maybe_reachable
)
2740 add_reachable_handler (info
, region
, c
);
2742 /* ??? If the catch type is a base class of every allowed
2743 type, then we know we can stop the search. */
2744 ret
= RNL_MAYBE_CAUGHT
;
2752 case ERT_ALLOWED_EXCEPTIONS
:
2753 /* An empty list of types definitely ends the search. */
2754 if (region
->u
.allowed
.type_list
== NULL_TREE
)
2756 add_reachable_handler (info
, region
, region
);
2760 /* Collect a list of lists of allowed types for use in detecting
2761 when a catch may be transformed into a catch-all. */
2763 info
->types_allowed
= tree_cons (NULL_TREE
,
2764 region
->u
.allowed
.type_list
,
2765 info
->types_allowed
);
2767 /* If we have definitive information about the type hierarchy,
2768 then we can tell if the thrown type will pass through the
2770 if (type_thrown
&& lang_eh_type_covers
)
2772 if (check_handled (region
->u
.allowed
.type_list
, type_thrown
))
2773 return RNL_NOT_CAUGHT
;
2776 add_reachable_handler (info
, region
, region
);
2781 add_reachable_handler (info
, region
, region
);
2782 return RNL_MAYBE_CAUGHT
;
2785 /* Catch regions are handled by their controlling try region. */
2786 return RNL_NOT_CAUGHT
;
2788 case ERT_MUST_NOT_THROW
:
2789 /* Here we end our search, since no exceptions may propagate.
2790 If we've touched down at some landing pad previous, then the
2791 explicit function call we generated may be used. Otherwise
2792 the call is made by the runtime. */
2793 if (info
&& info
->handlers
)
2795 add_reachable_handler (info
, region
, region
);
2804 /* Shouldn't see these here. */
2811 /* Retrieve a list of labels of exception handlers which can be
2812 reached by a given insn. */
2815 reachable_handlers (rtx insn
)
2817 struct reachable_info info
;
2818 struct eh_region
*region
;
2822 if (GET_CODE (insn
) == JUMP_INSN
2823 && GET_CODE (PATTERN (insn
)) == RESX
)
2824 region_number
= XINT (PATTERN (insn
), 0);
2827 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2828 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
2830 region_number
= INTVAL (XEXP (note
, 0));
2833 memset (&info
, 0, sizeof (info
));
2835 region
= cfun
->eh
->region_array
[region_number
];
2837 type_thrown
= NULL_TREE
;
2838 if (GET_CODE (insn
) == JUMP_INSN
2839 && GET_CODE (PATTERN (insn
)) == RESX
)
2841 /* A RESX leaves a region instead of entering it. Thus the
2842 region itself may have been deleted out from under us. */
2845 region
= region
->outer
;
2847 else if (region
->type
== ERT_THROW
)
2849 type_thrown
= region
->u
.throw.type
;
2850 region
= region
->outer
;
2855 if (reachable_next_level (region
, type_thrown
, &info
) >= RNL_CAUGHT
)
2857 /* If we have processed one cleanup, there is no point in
2858 processing any more of them. Each cleanup will have an edge
2859 to the next outer cleanup region, so the flow graph will be
2861 if (region
->type
== ERT_CLEANUP
)
2862 region
= region
->u
.cleanup
.prev_try
;
2864 region
= region
->outer
;
2867 return info
.handlers
;
2870 /* Determine if the given INSN can throw an exception that is caught
2871 within the function. */
2874 can_throw_internal (rtx insn
)
2876 struct eh_region
*region
;
2880 if (! INSN_P (insn
))
2883 if (GET_CODE (insn
) == INSN
2884 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2885 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2887 if (GET_CODE (insn
) == CALL_INSN
2888 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
2891 for (i
= 0; i
< 3; ++i
)
2893 rtx sub
= XEXP (PATTERN (insn
), i
);
2894 for (; sub
; sub
= NEXT_INSN (sub
))
2895 if (can_throw_internal (sub
))
2901 /* Every insn that might throw has an EH_REGION note. */
2902 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2903 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
2906 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
2908 type_thrown
= NULL_TREE
;
2909 if (region
->type
== ERT_THROW
)
2911 type_thrown
= region
->u
.throw.type
;
2912 region
= region
->outer
;
2915 /* If this exception is ignored by each and every containing region,
2916 then control passes straight out. The runtime may handle some
2917 regions, which also do not require processing internally. */
2918 for (; region
; region
= region
->outer
)
2920 enum reachable_code how
= reachable_next_level (region
, type_thrown
, 0);
2921 if (how
== RNL_BLOCKED
)
2923 if (how
!= RNL_NOT_CAUGHT
)
2930 /* Determine if the given INSN can throw an exception that is
2931 visible outside the function. */
2934 can_throw_external (rtx insn
)
2936 struct eh_region
*region
;
2940 if (! INSN_P (insn
))
2943 if (GET_CODE (insn
) == INSN
2944 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2945 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2947 if (GET_CODE (insn
) == CALL_INSN
2948 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
2951 for (i
= 0; i
< 3; ++i
)
2953 rtx sub
= XEXP (PATTERN (insn
), i
);
2954 for (; sub
; sub
= NEXT_INSN (sub
))
2955 if (can_throw_external (sub
))
2961 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2964 /* Calls (and trapping insns) without notes are outside any
2965 exception handling region in this function. We have to
2966 assume it might throw. Given that the front end and middle
2967 ends mark known NOTHROW functions, this isn't so wildly
2969 return (GET_CODE (insn
) == CALL_INSN
2970 || (flag_non_call_exceptions
2971 && may_trap_p (PATTERN (insn
))));
2973 if (INTVAL (XEXP (note
, 0)) <= 0)
2976 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
2978 type_thrown
= NULL_TREE
;
2979 if (region
->type
== ERT_THROW
)
2981 type_thrown
= region
->u
.throw.type
;
2982 region
= region
->outer
;
2985 /* If the exception is caught or blocked by any containing region,
2986 then it is not seen by any calling function. */
2987 for (; region
; region
= region
->outer
)
2988 if (reachable_next_level (region
, type_thrown
, NULL
) >= RNL_CAUGHT
)
2994 /* Set current_function_nothrow and cfun->all_throwers_are_sibcalls. */
2997 set_nothrow_function_flags (void)
3001 current_function_nothrow
= 1;
3003 /* Assume cfun->all_throwers_are_sibcalls until we encounter
3004 something that can throw an exception. We specifically exempt
3005 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
3006 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
3009 cfun
->all_throwers_are_sibcalls
= 1;
3011 if (! flag_exceptions
)
3014 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3015 if (can_throw_external (insn
))
3017 current_function_nothrow
= 0;
3019 if (GET_CODE (insn
) != CALL_INSN
|| !SIBLING_CALL_P (insn
))
3021 cfun
->all_throwers_are_sibcalls
= 0;
3026 for (insn
= current_function_epilogue_delay_list
; insn
;
3027 insn
= XEXP (insn
, 1))
3028 if (can_throw_external (insn
))
3030 current_function_nothrow
= 0;
3032 if (GET_CODE (insn
) != CALL_INSN
|| !SIBLING_CALL_P (insn
))
3034 cfun
->all_throwers_are_sibcalls
= 0;
3041 /* Various hooks for unwind library. */
3043 /* Do any necessary initialization to access arbitrary stack frames.
3044 On the SPARC, this means flushing the register windows. */
3047 expand_builtin_unwind_init (void)
3049 /* Set this so all the registers get saved in our frame; we need to be
3050 able to copy the saved values for any registers from frames we unwind. */
3051 current_function_has_nonlocal_label
= 1;
3053 #ifdef SETUP_FRAME_ADDRESSES
3054 SETUP_FRAME_ADDRESSES ();
3059 expand_builtin_eh_return_data_regno (tree arglist
)
3061 tree which
= TREE_VALUE (arglist
);
3062 unsigned HOST_WIDE_INT iwhich
;
3064 if (TREE_CODE (which
) != INTEGER_CST
)
3066 error ("argument of `__builtin_eh_return_regno' must be constant");
3070 iwhich
= tree_low_cst (which
, 1);
3071 iwhich
= EH_RETURN_DATA_REGNO (iwhich
);
3072 if (iwhich
== INVALID_REGNUM
)
3075 #ifdef DWARF_FRAME_REGNUM
3076 iwhich
= DWARF_FRAME_REGNUM (iwhich
);
3078 iwhich
= DBX_REGISTER_NUMBER (iwhich
);
3081 return GEN_INT (iwhich
);
3084 /* Given a value extracted from the return address register or stack slot,
3085 return the actual address encoded in that value. */
3088 expand_builtin_extract_return_addr (tree addr_tree
)
3090 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, Pmode
, 0);
3092 if (GET_MODE (addr
) != Pmode
3093 && GET_MODE (addr
) != VOIDmode
)
3095 #ifdef POINTERS_EXTEND_UNSIGNED
3096 addr
= convert_memory_address (Pmode
, addr
);
3098 addr
= convert_to_mode (Pmode
, addr
, 0);
3102 /* First mask out any unwanted bits. */
3103 #ifdef MASK_RETURN_ADDR
3104 expand_and (Pmode
, addr
, MASK_RETURN_ADDR
, addr
);
3107 /* Then adjust to find the real return address. */
3108 #if defined (RETURN_ADDR_OFFSET)
3109 addr
= plus_constant (addr
, RETURN_ADDR_OFFSET
);
3115 /* Given an actual address in addr_tree, do any necessary encoding
3116 and return the value to be stored in the return address register or
3117 stack slot so the epilogue will return to that address. */
3120 expand_builtin_frob_return_addr (tree addr_tree
)
3122 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, ptr_mode
, 0);
3124 addr
= convert_memory_address (Pmode
, addr
);
3126 #ifdef RETURN_ADDR_OFFSET
3127 addr
= force_reg (Pmode
, addr
);
3128 addr
= plus_constant (addr
, -RETURN_ADDR_OFFSET
);
3134 /* Set up the epilogue with the magic bits we'll need to return to the
3135 exception handler. */
3138 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED
,
3143 #ifdef EH_RETURN_STACKADJ_RTX
3144 tmp
= expand_expr (stackadj_tree
, cfun
->eh
->ehr_stackadj
, VOIDmode
, 0);
3145 tmp
= convert_memory_address (Pmode
, tmp
);
3146 if (!cfun
->eh
->ehr_stackadj
)
3147 cfun
->eh
->ehr_stackadj
= copy_to_reg (tmp
);
3148 else if (tmp
!= cfun
->eh
->ehr_stackadj
)
3149 emit_move_insn (cfun
->eh
->ehr_stackadj
, tmp
);
3152 tmp
= expand_expr (handler_tree
, cfun
->eh
->ehr_handler
, VOIDmode
, 0);
3153 tmp
= convert_memory_address (Pmode
, tmp
);
3154 if (!cfun
->eh
->ehr_handler
)
3155 cfun
->eh
->ehr_handler
= copy_to_reg (tmp
);
3156 else if (tmp
!= cfun
->eh
->ehr_handler
)
3157 emit_move_insn (cfun
->eh
->ehr_handler
, tmp
);
3159 if (!cfun
->eh
->ehr_label
)
3160 cfun
->eh
->ehr_label
= gen_label_rtx ();
3161 emit_jump (cfun
->eh
->ehr_label
);
3165 expand_eh_return (void)
3169 if (! cfun
->eh
->ehr_label
)
3172 current_function_calls_eh_return
= 1;
3174 #ifdef EH_RETURN_STACKADJ_RTX
3175 emit_move_insn (EH_RETURN_STACKADJ_RTX
, const0_rtx
);
3178 around_label
= gen_label_rtx ();
3179 emit_jump (around_label
);
3181 emit_label (cfun
->eh
->ehr_label
);
3182 clobber_return_register ();
3184 #ifdef EH_RETURN_STACKADJ_RTX
3185 emit_move_insn (EH_RETURN_STACKADJ_RTX
, cfun
->eh
->ehr_stackadj
);
3188 #ifdef HAVE_eh_return
3190 emit_insn (gen_eh_return (cfun
->eh
->ehr_handler
));
3194 #ifdef EH_RETURN_HANDLER_RTX
3195 emit_move_insn (EH_RETURN_HANDLER_RTX
, cfun
->eh
->ehr_handler
);
3197 error ("__builtin_eh_return not supported on this target");
3201 emit_label (around_label
);
3204 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3205 POINTERS_EXTEND_UNSIGNED and return it. */
3208 expand_builtin_extend_pointer (tree addr_tree
)
3210 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, ptr_mode
, 0);
3213 #ifdef POINTERS_EXTEND_UNSIGNED
3214 extend
= POINTERS_EXTEND_UNSIGNED
;
3216 /* The previous EH code did an unsigned extend by default, so we do this also
3221 return convert_modes (word_mode
, ptr_mode
, addr
, extend
);
3224 /* In the following functions, we represent entries in the action table
3225 as 1-based indices. Special cases are:
3227 0: null action record, non-null landing pad; implies cleanups
3228 -1: null action record, null landing pad; implies no action
3229 -2: no call-site entry; implies must_not_throw
3230 -3: we have yet to process outer regions
3232 Further, no special cases apply to the "next" field of the record.
3233 For next, 0 means end of list. */
3235 struct action_record
3243 action_record_eq (const void *pentry
, const void *pdata
)
3245 const struct action_record
*entry
= (const struct action_record
*) pentry
;
3246 const struct action_record
*data
= (const struct action_record
*) pdata
;
3247 return entry
->filter
== data
->filter
&& entry
->next
== data
->next
;
3251 action_record_hash (const void *pentry
)
3253 const struct action_record
*entry
= (const struct action_record
*) pentry
;
3254 return entry
->next
* 1009 + entry
->filter
;
3258 add_action_record (htab_t ar_hash
, int filter
, int next
)
3260 struct action_record
**slot
, *new, tmp
;
3262 tmp
.filter
= filter
;
3264 slot
= (struct action_record
**) htab_find_slot (ar_hash
, &tmp
, INSERT
);
3266 if ((new = *slot
) == NULL
)
3268 new = xmalloc (sizeof (*new));
3269 new->offset
= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
) + 1;
3270 new->filter
= filter
;
3274 /* The filter value goes in untouched. The link to the next
3275 record is a "self-relative" byte offset, or zero to indicate
3276 that there is no next record. So convert the absolute 1 based
3277 indices we've been carrying around into a displacement. */
3279 push_sleb128 (&cfun
->eh
->action_record_data
, filter
);
3281 next
-= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
) + 1;
3282 push_sleb128 (&cfun
->eh
->action_record_data
, next
);
3289 collect_one_action_chain (htab_t ar_hash
, struct eh_region
*region
)
3291 struct eh_region
*c
;
3294 /* If we've reached the top of the region chain, then we have
3295 no actions, and require no landing pad. */
3299 switch (region
->type
)
3302 /* A cleanup adds a zero filter to the beginning of the chain, but
3303 there are special cases to look out for. If there are *only*
3304 cleanups along a path, then it compresses to a zero action.
3305 Further, if there are multiple cleanups along a path, we only
3306 need to represent one of them, as that is enough to trigger
3307 entry to the landing pad at runtime. */
3308 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3311 for (c
= region
->outer
; c
; c
= c
->outer
)
3312 if (c
->type
== ERT_CLEANUP
)
3314 return add_action_record (ar_hash
, 0, next
);
3317 /* Process the associated catch regions in reverse order.
3318 If there's a catch-all handler, then we don't need to
3319 search outer regions. Use a magic -3 value to record
3320 that we haven't done the outer search. */
3322 for (c
= region
->u
.try.last_catch
; c
; c
= c
->u
.catch.prev_catch
)
3324 if (c
->u
.catch.type_list
== NULL
)
3326 /* Retrieve the filter from the head of the filter list
3327 where we have stored it (see assign_filter_values). */
3329 = TREE_INT_CST_LOW (TREE_VALUE (c
->u
.catch.filter_list
));
3331 next
= add_action_record (ar_hash
, filter
, 0);
3335 /* Once the outer search is done, trigger an action record for
3336 each filter we have. */
3341 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3343 /* If there is no next action, terminate the chain. */
3346 /* If all outer actions are cleanups or must_not_throw,
3347 we'll have no action record for it, since we had wanted
3348 to encode these states in the call-site record directly.
3349 Add a cleanup action to the chain to catch these. */
3351 next
= add_action_record (ar_hash
, 0, 0);
3354 flt_node
= c
->u
.catch.filter_list
;
3355 for (; flt_node
; flt_node
= TREE_CHAIN (flt_node
))
3357 int filter
= TREE_INT_CST_LOW (TREE_VALUE (flt_node
));
3358 next
= add_action_record (ar_hash
, filter
, next
);
3364 case ERT_ALLOWED_EXCEPTIONS
:
3365 /* An exception specification adds its filter to the
3366 beginning of the chain. */
3367 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3369 /* If there is no next action, terminate the chain. */
3372 /* If all outer actions are cleanups or must_not_throw,
3373 we'll have no action record for it, since we had wanted
3374 to encode these states in the call-site record directly.
3375 Add a cleanup action to the chain to catch these. */
3377 next
= add_action_record (ar_hash
, 0, 0);
3379 return add_action_record (ar_hash
, region
->u
.allowed
.filter
, next
);
3381 case ERT_MUST_NOT_THROW
:
3382 /* A must-not-throw region with no inner handlers or cleanups
3383 requires no call-site entry. Note that this differs from
3384 the no handler or cleanup case in that we do require an lsda
3385 to be generated. Return a magic -2 value to record this. */
3390 /* CATCH regions are handled in TRY above. THROW regions are
3391 for optimization information only and produce no output. */
3392 return collect_one_action_chain (ar_hash
, region
->outer
);
3400 add_call_site (rtx landing_pad
, int action
)
3402 struct call_site_record
*data
= cfun
->eh
->call_site_data
;
3403 int used
= cfun
->eh
->call_site_data_used
;
3404 int size
= cfun
->eh
->call_site_data_size
;
3408 size
= (size
? size
* 2 : 64);
3409 data
= ggc_realloc (data
, sizeof (*data
) * size
);
3410 cfun
->eh
->call_site_data
= data
;
3411 cfun
->eh
->call_site_data_size
= size
;
3414 data
[used
].landing_pad
= landing_pad
;
3415 data
[used
].action
= action
;
3417 cfun
->eh
->call_site_data_used
= used
+ 1;
3419 return used
+ call_site_base
;
3422 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3423 The new note numbers will not refer to region numbers, but
3424 instead to call site entries. */
3427 convert_to_eh_region_ranges (void)
3429 rtx insn
, iter
, note
;
3431 int last_action
= -3;
3432 rtx last_action_insn
= NULL_RTX
;
3433 rtx last_landing_pad
= NULL_RTX
;
3434 rtx first_no_action_insn
= NULL_RTX
;
3437 if (USING_SJLJ_EXCEPTIONS
|| cfun
->eh
->region_tree
== NULL
)
3440 VARRAY_UCHAR_INIT (cfun
->eh
->action_record_data
, 64, "action_record_data");
3442 ar_hash
= htab_create (31, action_record_hash
, action_record_eq
, free
);
3444 for (iter
= get_insns (); iter
; iter
= NEXT_INSN (iter
))
3447 struct eh_region
*region
;
3449 rtx this_landing_pad
;
3452 if (GET_CODE (insn
) == INSN
3453 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3454 insn
= XVECEXP (PATTERN (insn
), 0, 0);
3456 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
3459 if (! (GET_CODE (insn
) == CALL_INSN
3460 || (flag_non_call_exceptions
3461 && may_trap_p (PATTERN (insn
)))))
3468 if (INTVAL (XEXP (note
, 0)) <= 0)
3470 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
3471 this_action
= collect_one_action_chain (ar_hash
, region
);
3474 /* Existence of catch handlers, or must-not-throw regions
3475 implies that an lsda is needed (even if empty). */
3476 if (this_action
!= -1)
3477 cfun
->uses_eh_lsda
= 1;
3479 /* Delay creation of region notes for no-action regions
3480 until we're sure that an lsda will be required. */
3481 else if (last_action
== -3)
3483 first_no_action_insn
= iter
;
3487 /* Cleanups and handlers may share action chains but not
3488 landing pads. Collect the landing pad for this region. */
3489 if (this_action
>= 0)
3491 struct eh_region
*o
;
3492 for (o
= region
; ! o
->landing_pad
; o
= o
->outer
)
3494 this_landing_pad
= o
->landing_pad
;
3497 this_landing_pad
= NULL_RTX
;
3499 /* Differing actions or landing pads implies a change in call-site
3500 info, which implies some EH_REGION note should be emitted. */
3501 if (last_action
!= this_action
3502 || last_landing_pad
!= this_landing_pad
)
3504 /* If we'd not seen a previous action (-3) or the previous
3505 action was must-not-throw (-2), then we do not need an
3507 if (last_action
>= -1)
3509 /* If we delayed the creation of the begin, do it now. */
3510 if (first_no_action_insn
)
3512 call_site
= add_call_site (NULL_RTX
, 0);
3513 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
,
3514 first_no_action_insn
);
3515 NOTE_EH_HANDLER (note
) = call_site
;
3516 first_no_action_insn
= NULL_RTX
;
3519 note
= emit_note_after (NOTE_INSN_EH_REGION_END
,
3521 NOTE_EH_HANDLER (note
) = call_site
;
3524 /* If the new action is must-not-throw, then no region notes
3526 if (this_action
>= -1)
3528 call_site
= add_call_site (this_landing_pad
,
3529 this_action
< 0 ? 0 : this_action
);
3530 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
, iter
);
3531 NOTE_EH_HANDLER (note
) = call_site
;
3534 last_action
= this_action
;
3535 last_landing_pad
= this_landing_pad
;
3537 last_action_insn
= iter
;
3540 if (last_action
>= -1 && ! first_no_action_insn
)
3542 note
= emit_note_after (NOTE_INSN_EH_REGION_END
, last_action_insn
);
3543 NOTE_EH_HANDLER (note
) = call_site
;
3546 htab_delete (ar_hash
);
3551 push_uleb128 (varray_type
*data_area
, unsigned int value
)
3555 unsigned char byte
= value
& 0x7f;
3559 VARRAY_PUSH_UCHAR (*data_area
, byte
);
3565 push_sleb128 (varray_type
*data_area
, int value
)
3572 byte
= value
& 0x7f;
3574 more
= ! ((value
== 0 && (byte
& 0x40) == 0)
3575 || (value
== -1 && (byte
& 0x40) != 0));
3578 VARRAY_PUSH_UCHAR (*data_area
, byte
);
3584 #ifndef HAVE_AS_LEB128
3586 dw2_size_of_call_site_table (void)
3588 int n
= cfun
->eh
->call_site_data_used
;
3589 int size
= n
* (4 + 4 + 4);
3592 for (i
= 0; i
< n
; ++i
)
3594 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3595 size
+= size_of_uleb128 (cs
->action
);
3602 sjlj_size_of_call_site_table (void)
3604 int n
= cfun
->eh
->call_site_data_used
;
3608 for (i
= 0; i
< n
; ++i
)
3610 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3611 size
+= size_of_uleb128 (INTVAL (cs
->landing_pad
));
3612 size
+= size_of_uleb128 (cs
->action
);
3620 dw2_output_call_site_table (void)
3622 const char *const function_start_lab
3623 = IDENTIFIER_POINTER (current_function_func_begin_label
);
3624 int n
= cfun
->eh
->call_site_data_used
;
3627 for (i
= 0; i
< n
; ++i
)
3629 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3630 char reg_start_lab
[32];
3631 char reg_end_lab
[32];
3632 char landing_pad_lab
[32];
3634 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab
, "LEHB", call_site_base
+ i
);
3635 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab
, "LEHE", call_site_base
+ i
);
3637 if (cs
->landing_pad
)
3638 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab
, "L",
3639 CODE_LABEL_NUMBER (cs
->landing_pad
));
3641 /* ??? Perhaps use insn length scaling if the assembler supports
3642 generic arithmetic. */
3643 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3644 data4 if the function is small enough. */
3645 #ifdef HAVE_AS_LEB128
3646 dw2_asm_output_delta_uleb128 (reg_start_lab
, function_start_lab
,
3647 "region %d start", i
);
3648 dw2_asm_output_delta_uleb128 (reg_end_lab
, reg_start_lab
,
3650 if (cs
->landing_pad
)
3651 dw2_asm_output_delta_uleb128 (landing_pad_lab
, function_start_lab
,
3654 dw2_asm_output_data_uleb128 (0, "landing pad");
3656 dw2_asm_output_delta (4, reg_start_lab
, function_start_lab
,
3657 "region %d start", i
);
3658 dw2_asm_output_delta (4, reg_end_lab
, reg_start_lab
, "length");
3659 if (cs
->landing_pad
)
3660 dw2_asm_output_delta (4, landing_pad_lab
, function_start_lab
,
3663 dw2_asm_output_data (4, 0, "landing pad");
3665 dw2_asm_output_data_uleb128 (cs
->action
, "action");
3668 call_site_base
+= n
;
3672 sjlj_output_call_site_table (void)
3674 int n
= cfun
->eh
->call_site_data_used
;
3677 for (i
= 0; i
< n
; ++i
)
3679 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3681 dw2_asm_output_data_uleb128 (INTVAL (cs
->landing_pad
),
3682 "region %d landing pad", i
);
3683 dw2_asm_output_data_uleb128 (cs
->action
, "action");
3686 call_site_base
+= n
;
3689 /* Tell assembler to switch to the section for the exception handling
3693 default_exception_section (void)
3695 if (targetm
.have_named_sections
)
3698 #ifdef HAVE_LD_RO_RW_SECTION_MIXING
3699 int tt_format
= ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3702 || ((tt_format
& 0x70) != DW_EH_PE_absptr
3703 && (tt_format
& 0x70) != DW_EH_PE_aligned
))
3704 ? 0 : SECTION_WRITE
;
3706 flags
= SECTION_WRITE
;
3708 named_section_flags (".gcc_except_table", flags
);
3713 readonly_data_section ();
3717 output_function_exception_table (void)
3719 int tt_format
, cs_format
, lp_format
, i
, n
;
3720 #ifdef HAVE_AS_LEB128
3721 char ttype_label
[32];
3722 char cs_after_size_label
[32];
3723 char cs_end_label
[32];
3728 int tt_format_size
= 0;
3730 /* Not all functions need anything. */
3731 if (! cfun
->uses_eh_lsda
)
3734 #ifdef IA64_UNWIND_INFO
3735 fputs ("\t.personality\t", asm_out_file
);
3736 output_addr_const (asm_out_file
, eh_personality_libfunc
);
3737 fputs ("\n\t.handlerdata\n", asm_out_file
);
3738 /* Note that varasm still thinks we're in the function's code section.
3739 The ".endp" directive that will immediately follow will take us back. */
3741 targetm
.asm_out
.exception_section ();
3744 have_tt_data
= (VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
) > 0
3745 || VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
) > 0);
3747 /* Indicate the format of the @TType entries. */
3749 tt_format
= DW_EH_PE_omit
;
3752 tt_format
= ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3753 #ifdef HAVE_AS_LEB128
3754 ASM_GENERATE_INTERNAL_LABEL (ttype_label
, "LLSDATT",
3755 current_function_funcdef_no
);
3757 tt_format_size
= size_of_encoded_value (tt_format
);
3759 assemble_align (tt_format_size
* BITS_PER_UNIT
);
3762 targetm
.asm_out
.internal_label (asm_out_file
, "LLSDA",
3763 current_function_funcdef_no
);
3765 /* The LSDA header. */
3767 /* Indicate the format of the landing pad start pointer. An omitted
3768 field implies @LPStart == @Start. */
3769 /* Currently we always put @LPStart == @Start. This field would
3770 be most useful in moving the landing pads completely out of
3771 line to another section, but it could also be used to minimize
3772 the size of uleb128 landing pad offsets. */
3773 lp_format
= DW_EH_PE_omit
;
3774 dw2_asm_output_data (1, lp_format
, "@LPStart format (%s)",
3775 eh_data_format_name (lp_format
));
3777 /* @LPStart pointer would go here. */
3779 dw2_asm_output_data (1, tt_format
, "@TType format (%s)",
3780 eh_data_format_name (tt_format
));
3782 #ifndef HAVE_AS_LEB128
3783 if (USING_SJLJ_EXCEPTIONS
)
3784 call_site_len
= sjlj_size_of_call_site_table ();
3786 call_site_len
= dw2_size_of_call_site_table ();
3789 /* A pc-relative 4-byte displacement to the @TType data. */
3792 #ifdef HAVE_AS_LEB128
3793 char ttype_after_disp_label
[32];
3794 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label
, "LLSDATTD",
3795 current_function_funcdef_no
);
3796 dw2_asm_output_delta_uleb128 (ttype_label
, ttype_after_disp_label
,
3797 "@TType base offset");
3798 ASM_OUTPUT_LABEL (asm_out_file
, ttype_after_disp_label
);
3800 /* Ug. Alignment queers things. */
3801 unsigned int before_disp
, after_disp
, last_disp
, disp
;
3803 before_disp
= 1 + 1;
3804 after_disp
= (1 + size_of_uleb128 (call_site_len
)
3806 + VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
)
3807 + (VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
)
3813 unsigned int disp_size
, pad
;
3816 disp_size
= size_of_uleb128 (disp
);
3817 pad
= before_disp
+ disp_size
+ after_disp
;
3818 if (pad
% tt_format_size
)
3819 pad
= tt_format_size
- (pad
% tt_format_size
);
3822 disp
= after_disp
+ pad
;
3824 while (disp
!= last_disp
);
3826 dw2_asm_output_data_uleb128 (disp
, "@TType base offset");
3830 /* Indicate the format of the call-site offsets. */
3831 #ifdef HAVE_AS_LEB128
3832 cs_format
= DW_EH_PE_uleb128
;
3834 cs_format
= DW_EH_PE_udata4
;
3836 dw2_asm_output_data (1, cs_format
, "call-site format (%s)",
3837 eh_data_format_name (cs_format
));
3839 #ifdef HAVE_AS_LEB128
3840 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label
, "LLSDACSB",
3841 current_function_funcdef_no
);
3842 ASM_GENERATE_INTERNAL_LABEL (cs_end_label
, "LLSDACSE",
3843 current_function_funcdef_no
);
3844 dw2_asm_output_delta_uleb128 (cs_end_label
, cs_after_size_label
,
3845 "Call-site table length");
3846 ASM_OUTPUT_LABEL (asm_out_file
, cs_after_size_label
);
3847 if (USING_SJLJ_EXCEPTIONS
)
3848 sjlj_output_call_site_table ();
3850 dw2_output_call_site_table ();
3851 ASM_OUTPUT_LABEL (asm_out_file
, cs_end_label
);
3853 dw2_asm_output_data_uleb128 (call_site_len
,"Call-site table length");
3854 if (USING_SJLJ_EXCEPTIONS
)
3855 sjlj_output_call_site_table ();
3857 dw2_output_call_site_table ();
3860 /* ??? Decode and interpret the data for flag_debug_asm. */
3861 n
= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
);
3862 for (i
= 0; i
< n
; ++i
)
3863 dw2_asm_output_data (1, VARRAY_UCHAR (cfun
->eh
->action_record_data
, i
),
3864 (i
? NULL
: "Action record table"));
3867 assemble_align (tt_format_size
* BITS_PER_UNIT
);
3869 i
= VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
);
3872 tree type
= VARRAY_TREE (cfun
->eh
->ttype_data
, i
);
3875 if (type
== NULL_TREE
)
3879 struct cgraph_varpool_node
*node
;
3881 type
= lookup_type_for_runtime (type
);
3882 value
= expand_expr (type
, NULL_RTX
, VOIDmode
, EXPAND_INITIALIZER
);
3884 /* Let cgraph know that the rtti decl is used. Not all of the
3885 paths below go through assemble_integer, which would take
3886 care of this for us. */
3887 if (TREE_CODE (type
) == ADDR_EXPR
)
3889 type
= TREE_OPERAND (type
, 0);
3890 node
= cgraph_varpool_node (type
);
3892 cgraph_varpool_mark_needed_node (node
);
3894 else if (TREE_CODE (type
) != INTEGER_CST
)
3898 if (tt_format
== DW_EH_PE_absptr
|| tt_format
== DW_EH_PE_aligned
)
3899 assemble_integer (value
, tt_format_size
,
3900 tt_format_size
* BITS_PER_UNIT
, 1);
3902 dw2_asm_output_encoded_addr_rtx (tt_format
, value
, NULL
);
3905 #ifdef HAVE_AS_LEB128
3907 ASM_OUTPUT_LABEL (asm_out_file
, ttype_label
);
3910 /* ??? Decode and interpret the data for flag_debug_asm. */
3911 n
= VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
);
3912 for (i
= 0; i
< n
; ++i
)
3913 dw2_asm_output_data (1, VARRAY_UCHAR (cfun
->eh
->ehspec_data
, i
),
3914 (i
? NULL
: "Exception specification table"));
3916 function_section (current_function_decl
);
3919 #include "gt-except.h"