1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
52 #include "coretypes.h"
60 #include "insn-config.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
75 #include "langhooks.h"
78 /* Provide defaults for stuff that may not be defined when using
80 #ifndef EH_RETURN_DATA_REGNO
81 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
85 /* Nonzero means enable synchronous exceptions for non-call instructions. */
86 int flag_non_call_exceptions
;
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 tree (*lang_protect_cleanup_actions
) (void);
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers
) (tree a
, tree b
);
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type
) (tree
);
98 /* A hash table of label to region number. */
100 struct ehl_map_entry
GTY(())
103 struct eh_region
*region
;
106 static GTY(()) int call_site_base
;
107 static GTY ((param_is (union tree_node
)))
108 htab_t type_to_runtime_map
;
110 /* Describe the SjLj_Function_Context structure. */
111 static GTY(()) tree sjlj_fc_type_node
;
112 static int sjlj_fc_call_site_ofs
;
113 static int sjlj_fc_data_ofs
;
114 static int sjlj_fc_personality_ofs
;
115 static int sjlj_fc_lsda_ofs
;
116 static int sjlj_fc_jbuf_ofs
;
118 /* Describes one exception region. */
119 struct eh_region
GTY(())
121 /* The immediately surrounding region. */
122 struct eh_region
*outer
;
124 /* The list of immediately contained regions. */
125 struct eh_region
*inner
;
126 struct eh_region
*next_peer
;
128 /* An identifier for this region. */
131 /* When a region is deleted, its parents inherit the REG_EH_REGION
132 numbers already assigned. */
135 /* Each region does exactly one thing. */
142 ERT_ALLOWED_EXCEPTIONS
,
148 /* Holds the action to perform based on the preceding type. */
150 /* A list of catch blocks, a surrounding try block,
151 and the label for continuing after a catch. */
152 struct eh_region_u_try
{
153 struct eh_region
*catch;
154 struct eh_region
*last_catch
;
155 struct eh_region
*prev_try
;
157 } GTY ((tag ("ERT_TRY"))) try;
159 /* The list through the catch handlers, the list of type objects
160 matched, and the list of associated filters. */
161 struct eh_region_u_catch
{
162 struct eh_region
*next_catch
;
163 struct eh_region
*prev_catch
;
166 } GTY ((tag ("ERT_CATCH"))) catch;
168 /* A tree_list of allowed types. */
169 struct eh_region_u_allowed
{
172 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed
;
174 /* The type given by a call to "throw foo();", or discovered
176 struct eh_region_u_throw
{
178 } GTY ((tag ("ERT_THROW"))) throw;
180 /* Retain the cleanup expression even after expansion so that
181 we can match up fixup regions. */
182 struct eh_region_u_cleanup
{
184 struct eh_region
*prev_try
;
185 } GTY ((tag ("ERT_CLEANUP"))) cleanup
;
187 /* The real region (by expression and by pointer) that fixup code
189 struct eh_region_u_fixup
{
191 struct eh_region
*real_region
;
193 } GTY ((tag ("ERT_FIXUP"))) fixup
;
194 } GTY ((desc ("%0.type"))) u
;
196 /* Entry point for this region's handler before landing pads are built. */
199 /* Entry point for this region's handler from the runtime eh library. */
202 /* Entry point for this region's handler from an inner region. */
203 rtx post_landing_pad
;
205 /* The RESX insn for handing off control to the next outermost handler,
209 /* True if something in this region may throw. */
210 unsigned may_contain_throw
: 1;
213 struct call_site_record
GTY(())
219 /* Used to save exception status for each function. */
220 struct eh_status
GTY(())
222 /* The tree of all regions for this function. */
223 struct eh_region
*region_tree
;
225 /* The same information as an indexable array. */
226 struct eh_region
** GTY ((length ("%h.last_region_number"))) region_array
;
228 /* The most recently open region. */
229 struct eh_region
*cur_region
;
231 /* This is the region for which we are processing catch blocks. */
232 struct eh_region
*try_region
;
237 int built_landing_pads
;
238 int last_region_number
;
240 varray_type ttype_data
;
241 varray_type ehspec_data
;
242 varray_type action_record_data
;
244 htab_t
GTY ((param_is (struct ehl_map_entry
))) exception_handler_label_map
;
246 struct call_site_record
* GTY ((length ("%h.call_site_data_used")))
248 int call_site_data_used
;
249 int call_site_data_size
;
260 static int t2r_eq (const void *, const void *);
261 static hashval_t
t2r_hash (const void *);
262 static void add_type_for_runtime (tree
);
263 static tree
lookup_type_for_runtime (tree
);
265 static struct eh_region
*expand_eh_region_end (void);
267 static rtx
get_exception_filter (struct function
*);
269 static void collect_eh_region_array (void);
270 static void resolve_fixup_regions (void);
271 static void remove_fixup_regions (void);
272 static void remove_unreachable_regions (rtx
);
273 static void convert_from_eh_region_ranges_1 (rtx
*, int *, int);
275 static struct eh_region
*duplicate_eh_region_1 (struct eh_region
*,
276 struct inline_remap
*);
277 static void duplicate_eh_region_2 (struct eh_region
*, struct eh_region
**);
278 static int ttypes_filter_eq (const void *, const void *);
279 static hashval_t
ttypes_filter_hash (const void *);
280 static int ehspec_filter_eq (const void *, const void *);
281 static hashval_t
ehspec_filter_hash (const void *);
282 static int add_ttypes_entry (htab_t
, tree
);
283 static int add_ehspec_entry (htab_t
, htab_t
, tree
);
284 static void assign_filter_values (void);
285 static void build_post_landing_pads (void);
286 static void connect_post_landing_pads (void);
287 static void dw2_build_landing_pads (void);
290 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info
*);
291 static void sjlj_assign_call_site_values (rtx
, struct sjlj_lp_info
*);
292 static void sjlj_mark_call_sites (struct sjlj_lp_info
*);
293 static void sjlj_emit_function_enter (rtx
);
294 static void sjlj_emit_function_exit (void);
295 static void sjlj_emit_dispatch_table (rtx
, struct sjlj_lp_info
*);
296 static void sjlj_build_landing_pads (void);
298 static hashval_t
ehl_hash (const void *);
299 static int ehl_eq (const void *, const void *);
300 static void add_ehl_entry (rtx
, struct eh_region
*);
301 static void remove_exception_handler_label (rtx
);
302 static void remove_eh_handler (struct eh_region
*);
303 static int for_each_eh_label_1 (void **, void *);
305 struct reachable_info
;
307 /* The return value of reachable_next_level. */
310 /* The given exception is not processed by the given region. */
312 /* The given exception may need processing by the given region. */
314 /* The given exception is completely processed by the given region. */
316 /* The given exception is completely processed by the runtime. */
320 static int check_handled (tree
, tree
);
321 static void add_reachable_handler (struct reachable_info
*,
322 struct eh_region
*, struct eh_region
*);
323 static enum reachable_code
reachable_next_level (struct eh_region
*, tree
,
324 struct reachable_info
*);
326 static int action_record_eq (const void *, const void *);
327 static hashval_t
action_record_hash (const void *);
328 static int add_action_record (htab_t
, int, int);
329 static int collect_one_action_chain (htab_t
, struct eh_region
*);
330 static int add_call_site (rtx
, int);
332 static void push_uleb128 (varray_type
*, unsigned int);
333 static void push_sleb128 (varray_type
*, int);
334 #ifndef HAVE_AS_LEB128
335 static int dw2_size_of_call_site_table (void);
336 static int sjlj_size_of_call_site_table (void);
338 static void dw2_output_call_site_table (void);
339 static void sjlj_output_call_site_table (void);
342 /* Routine to see if exception handling is turned on.
343 DO_WARN is nonzero if we want to inform the user that exception
344 handling is turned off.
346 This is used to ensure that -fexceptions has been specified if the
347 compiler tries to use any exception-specific functions. */
350 doing_eh (int do_warn
)
352 if (! flag_exceptions
)
354 static int warned
= 0;
355 if (! warned
&& do_warn
)
357 error ("exception handling disabled, use -fexceptions to enable");
369 if (! flag_exceptions
)
372 type_to_runtime_map
= htab_create_ggc (31, t2r_hash
, t2r_eq
, NULL
);
374 /* Create the SjLj_Function_Context structure. This should match
375 the definition in unwind-sjlj.c. */
376 if (USING_SJLJ_EXCEPTIONS
)
378 tree f_jbuf
, f_per
, f_lsda
, f_prev
, f_cs
, f_data
, tmp
;
380 sjlj_fc_type_node
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
382 f_prev
= build_decl (FIELD_DECL
, get_identifier ("__prev"),
383 build_pointer_type (sjlj_fc_type_node
));
384 DECL_FIELD_CONTEXT (f_prev
) = sjlj_fc_type_node
;
386 f_cs
= build_decl (FIELD_DECL
, get_identifier ("__call_site"),
388 DECL_FIELD_CONTEXT (f_cs
) = sjlj_fc_type_node
;
390 tmp
= build_index_type (build_int_2 (4 - 1, 0));
391 tmp
= build_array_type ((*lang_hooks
.types
.type_for_mode
) (word_mode
, 1),
393 f_data
= build_decl (FIELD_DECL
, get_identifier ("__data"), tmp
);
394 DECL_FIELD_CONTEXT (f_data
) = sjlj_fc_type_node
;
396 f_per
= build_decl (FIELD_DECL
, get_identifier ("__personality"),
398 DECL_FIELD_CONTEXT (f_per
) = sjlj_fc_type_node
;
400 f_lsda
= build_decl (FIELD_DECL
, get_identifier ("__lsda"),
402 DECL_FIELD_CONTEXT (f_lsda
) = sjlj_fc_type_node
;
404 #ifdef DONT_USE_BUILTIN_SETJMP
406 tmp
= build_int_2 (JMP_BUF_SIZE
- 1, 0);
408 /* Should be large enough for most systems, if it is not,
409 JMP_BUF_SIZE should be defined with the proper value. It will
410 also tend to be larger than necessary for most systems, a more
411 optimal port will define JMP_BUF_SIZE. */
412 tmp
= build_int_2 (FIRST_PSEUDO_REGISTER
+ 2 - 1, 0);
415 /* builtin_setjmp takes a pointer to 5 words. */
416 tmp
= build_int_2 (5 * BITS_PER_WORD
/ POINTER_SIZE
- 1, 0);
418 tmp
= build_index_type (tmp
);
419 tmp
= build_array_type (ptr_type_node
, tmp
);
420 f_jbuf
= build_decl (FIELD_DECL
, get_identifier ("__jbuf"), tmp
);
421 #ifdef DONT_USE_BUILTIN_SETJMP
422 /* We don't know what the alignment requirements of the
423 runtime's jmp_buf has. Overestimate. */
424 DECL_ALIGN (f_jbuf
) = BIGGEST_ALIGNMENT
;
425 DECL_USER_ALIGN (f_jbuf
) = 1;
427 DECL_FIELD_CONTEXT (f_jbuf
) = sjlj_fc_type_node
;
429 TYPE_FIELDS (sjlj_fc_type_node
) = f_prev
;
430 TREE_CHAIN (f_prev
) = f_cs
;
431 TREE_CHAIN (f_cs
) = f_data
;
432 TREE_CHAIN (f_data
) = f_per
;
433 TREE_CHAIN (f_per
) = f_lsda
;
434 TREE_CHAIN (f_lsda
) = f_jbuf
;
436 layout_type (sjlj_fc_type_node
);
438 /* Cache the interesting field offsets so that we have
439 easy access from rtl. */
440 sjlj_fc_call_site_ofs
441 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs
), 1)
442 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs
), 1) / BITS_PER_UNIT
);
444 = (tree_low_cst (DECL_FIELD_OFFSET (f_data
), 1)
445 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data
), 1) / BITS_PER_UNIT
);
446 sjlj_fc_personality_ofs
447 = (tree_low_cst (DECL_FIELD_OFFSET (f_per
), 1)
448 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per
), 1) / BITS_PER_UNIT
);
450 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda
), 1)
451 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda
), 1) / BITS_PER_UNIT
);
453 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf
), 1)
454 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf
), 1) / BITS_PER_UNIT
);
459 init_eh_for_function (void)
461 cfun
->eh
= ggc_alloc_cleared (sizeof (struct eh_status
));
464 /* Start an exception handling region. All instructions emitted
465 after this point are considered to be part of the region until
466 expand_eh_region_end is invoked. */
469 expand_eh_region_start (void)
471 struct eh_region
*new_region
;
472 struct eh_region
*cur_region
;
478 /* Insert a new blank region as a leaf in the tree. */
479 new_region
= ggc_alloc_cleared (sizeof (*new_region
));
480 cur_region
= cfun
->eh
->cur_region
;
481 new_region
->outer
= cur_region
;
484 new_region
->next_peer
= cur_region
->inner
;
485 cur_region
->inner
= new_region
;
489 new_region
->next_peer
= cfun
->eh
->region_tree
;
490 cfun
->eh
->region_tree
= new_region
;
492 cfun
->eh
->cur_region
= new_region
;
494 /* Create a note marking the start of this region. */
495 new_region
->region_number
= ++cfun
->eh
->last_region_number
;
496 note
= emit_note (NOTE_INSN_EH_REGION_BEG
);
497 NOTE_EH_HANDLER (note
) = new_region
->region_number
;
500 /* Common code to end a region. Returns the region just ended. */
502 static struct eh_region
*
503 expand_eh_region_end (void)
505 struct eh_region
*cur_region
= cfun
->eh
->cur_region
;
508 /* Create a note marking the end of this region. */
509 note
= emit_note (NOTE_INSN_EH_REGION_END
);
510 NOTE_EH_HANDLER (note
) = cur_region
->region_number
;
513 cfun
->eh
->cur_region
= cur_region
->outer
;
518 /* End an exception handling region for a cleanup. HANDLER is an
519 expression to expand for the cleanup. */
522 expand_eh_region_end_cleanup (tree handler
)
524 struct eh_region
*region
;
525 tree protect_cleanup_actions
;
532 region
= expand_eh_region_end ();
533 region
->type
= ERT_CLEANUP
;
534 region
->label
= gen_label_rtx ();
535 region
->u
.cleanup
.exp
= handler
;
536 region
->u
.cleanup
.prev_try
= cfun
->eh
->try_region
;
538 around_label
= gen_label_rtx ();
539 emit_jump (around_label
);
541 emit_label (region
->label
);
543 if (flag_non_call_exceptions
|| region
->may_contain_throw
)
545 /* Give the language a chance to specify an action to be taken if an
546 exception is thrown that would propagate out of the HANDLER. */
547 protect_cleanup_actions
548 = (lang_protect_cleanup_actions
549 ? (*lang_protect_cleanup_actions
) ()
552 if (protect_cleanup_actions
)
553 expand_eh_region_start ();
555 /* In case this cleanup involves an inline destructor with a try block in
556 it, we need to save the EH return data registers around it. */
557 data_save
[0] = gen_reg_rtx (ptr_mode
);
558 emit_move_insn (data_save
[0], get_exception_pointer (cfun
));
559 data_save
[1] = gen_reg_rtx (word_mode
);
560 emit_move_insn (data_save
[1], get_exception_filter (cfun
));
562 expand_expr (handler
, const0_rtx
, VOIDmode
, 0);
564 emit_move_insn (cfun
->eh
->exc_ptr
, data_save
[0]);
565 emit_move_insn (cfun
->eh
->filter
, data_save
[1]);
567 if (protect_cleanup_actions
)
568 expand_eh_region_end_must_not_throw (protect_cleanup_actions
);
570 /* We need any stack adjustment complete before the around_label. */
571 do_pending_stack_adjust ();
574 /* We delay the generation of the _Unwind_Resume until we generate
575 landing pads. We emit a marker here so as to get good control
576 flow data in the meantime. */
578 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
581 emit_label (around_label
);
584 /* End an exception handling region for a try block, and prepares
585 for subsequent calls to expand_start_catch. */
588 expand_start_all_catch (void)
590 struct eh_region
*region
;
595 region
= expand_eh_region_end ();
596 region
->type
= ERT_TRY
;
597 region
->u
.try.prev_try
= cfun
->eh
->try_region
;
598 region
->u
.try.continue_label
= gen_label_rtx ();
600 cfun
->eh
->try_region
= region
;
602 emit_jump (region
->u
.try.continue_label
);
605 /* Begin a catch clause. TYPE is the type caught, a list of such types, or
606 null if this is a catch-all clause. Providing a type list enables to
607 associate the catch region with potentially several exception types, which
608 is useful e.g. for Ada. */
611 expand_start_catch (tree type_or_list
)
613 struct eh_region
*t
, *c
, *l
;
619 type_list
= type_or_list
;
623 /* Ensure to always end up with a type list to normalize further
624 processing, then register each type against the runtime types
628 if (TREE_CODE (type_or_list
) != TREE_LIST
)
629 type_list
= tree_cons (NULL_TREE
, type_or_list
, NULL_TREE
);
631 type_node
= type_list
;
632 for (; type_node
; type_node
= TREE_CHAIN (type_node
))
633 add_type_for_runtime (TREE_VALUE (type_node
));
636 expand_eh_region_start ();
638 t
= cfun
->eh
->try_region
;
639 c
= cfun
->eh
->cur_region
;
641 c
->u
.catch.type_list
= type_list
;
642 c
->label
= gen_label_rtx ();
644 l
= t
->u
.try.last_catch
;
645 c
->u
.catch.prev_catch
= l
;
647 l
->u
.catch.next_catch
= c
;
650 t
->u
.try.last_catch
= c
;
652 emit_label (c
->label
);
655 /* End a catch clause. Control will resume after the try/catch block. */
658 expand_end_catch (void)
660 struct eh_region
*try_region
;
665 expand_eh_region_end ();
666 try_region
= cfun
->eh
->try_region
;
668 emit_jump (try_region
->u
.try.continue_label
);
671 /* End a sequence of catch handlers for a try block. */
674 expand_end_all_catch (void)
676 struct eh_region
*try_region
;
681 try_region
= cfun
->eh
->try_region
;
682 cfun
->eh
->try_region
= try_region
->u
.try.prev_try
;
684 emit_label (try_region
->u
.try.continue_label
);
687 /* End an exception region for an exception type filter. ALLOWED is a
688 TREE_LIST of types to be matched by the runtime. FAILURE is an
689 expression to invoke if a mismatch occurs.
691 ??? We could use these semantics for calls to rethrow, too; if we can
692 see the surrounding catch clause, we know that the exception we're
693 rethrowing satisfies the "filter" of the catch type. */
696 expand_eh_region_end_allowed (tree allowed
, tree failure
)
698 struct eh_region
*region
;
704 region
= expand_eh_region_end ();
705 region
->type
= ERT_ALLOWED_EXCEPTIONS
;
706 region
->u
.allowed
.type_list
= allowed
;
707 region
->label
= gen_label_rtx ();
709 for (; allowed
; allowed
= TREE_CHAIN (allowed
))
710 add_type_for_runtime (TREE_VALUE (allowed
));
712 /* We must emit the call to FAILURE here, so that if this function
713 throws a different exception, that it will be processed by the
716 around_label
= gen_label_rtx ();
717 emit_jump (around_label
);
719 emit_label (region
->label
);
720 expand_expr (failure
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
721 /* We must adjust the stack before we reach the AROUND_LABEL because
722 the call to FAILURE does not occur on all paths to the
724 do_pending_stack_adjust ();
726 emit_label (around_label
);
729 /* End an exception region for a must-not-throw filter. FAILURE is an
730 expression invoke if an uncaught exception propagates this far.
732 This is conceptually identical to expand_eh_region_end_allowed with
733 an empty allowed list (if you passed "std::terminate" instead of
734 "__cxa_call_unexpected"), but they are represented differently in
738 expand_eh_region_end_must_not_throw (tree failure
)
740 struct eh_region
*region
;
746 region
= expand_eh_region_end ();
747 region
->type
= ERT_MUST_NOT_THROW
;
748 region
->label
= gen_label_rtx ();
750 /* We must emit the call to FAILURE here, so that if this function
751 throws a different exception, that it will be processed by the
754 around_label
= gen_label_rtx ();
755 emit_jump (around_label
);
757 emit_label (region
->label
);
758 expand_expr (failure
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
760 emit_label (around_label
);
763 /* End an exception region for a throw. No handling goes on here,
764 but it's the easiest way for the front-end to indicate what type
768 expand_eh_region_end_throw (tree type
)
770 struct eh_region
*region
;
775 region
= expand_eh_region_end ();
776 region
->type
= ERT_THROW
;
777 region
->u
.throw.type
= type
;
780 /* End a fixup region. Within this region the cleanups for the immediately
781 enclosing region are _not_ run. This is used for goto cleanup to avoid
782 destroying an object twice.
784 This would be an extraordinarily simple prospect, were it not for the
785 fact that we don't actually know what the immediately enclosing region
786 is. This surprising fact is because expand_cleanups is currently
787 generating a sequence that it will insert somewhere else. We collect
788 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
791 expand_eh_region_end_fixup (tree handler
)
793 struct eh_region
*fixup
;
798 fixup
= expand_eh_region_end ();
799 fixup
->type
= ERT_FIXUP
;
800 fixup
->u
.fixup
.cleanup_exp
= handler
;
803 /* Note that the current EH region (if any) may contain a throw, or a
804 call to a function which itself may contain a throw. */
807 note_eh_region_may_contain_throw (void)
809 struct eh_region
*region
;
811 region
= cfun
->eh
->cur_region
;
812 while (region
&& !region
->may_contain_throw
)
814 region
->may_contain_throw
= 1;
815 region
= region
->outer
;
819 /* Return an rtl expression for a pointer to the exception object
823 get_exception_pointer (struct function
*fun
)
825 rtx exc_ptr
= fun
->eh
->exc_ptr
;
826 if (fun
== cfun
&& ! exc_ptr
)
828 exc_ptr
= gen_reg_rtx (ptr_mode
);
829 fun
->eh
->exc_ptr
= exc_ptr
;
834 /* Return an rtl expression for the exception dispatch filter
838 get_exception_filter (struct function
*fun
)
840 rtx filter
= fun
->eh
->filter
;
841 if (fun
== cfun
&& ! filter
)
843 filter
= gen_reg_rtx (word_mode
);
844 fun
->eh
->filter
= filter
;
849 /* This section is for the exception handling specific optimization pass. */
851 /* Random access the exception region tree. It's just as simple to
852 collect the regions this way as in expand_eh_region_start, but
853 without having to realloc memory. */
856 collect_eh_region_array (void)
858 struct eh_region
**array
, *i
;
860 i
= cfun
->eh
->region_tree
;
864 array
= ggc_alloc_cleared ((cfun
->eh
->last_region_number
+ 1)
866 cfun
->eh
->region_array
= array
;
870 array
[i
->region_number
] = i
;
872 /* If there are sub-regions, process them. */
875 /* If there are peers, process them. */
876 else if (i
->next_peer
)
878 /* Otherwise, step back up the tree to the next peer. */
885 } while (i
->next_peer
== NULL
);
892 resolve_one_fixup_region (struct eh_region
*fixup
)
894 struct eh_region
*cleanup
, *real
;
897 n
= cfun
->eh
->last_region_number
;
900 for (j
= 1; j
<= n
; ++j
)
902 cleanup
= cfun
->eh
->region_array
[j
];
903 if (cleanup
&& cleanup
->type
== ERT_CLEANUP
904 && cleanup
->u
.cleanup
.exp
== fixup
->u
.fixup
.cleanup_exp
)
910 real
= cleanup
->outer
;
911 if (real
&& real
->type
== ERT_FIXUP
)
913 if (!real
->u
.fixup
.resolved
)
914 resolve_one_fixup_region (real
);
915 real
= real
->u
.fixup
.real_region
;
918 fixup
->u
.fixup
.real_region
= real
;
919 fixup
->u
.fixup
.resolved
= true;
923 resolve_fixup_regions (void)
925 int i
, n
= cfun
->eh
->last_region_number
;
927 for (i
= 1; i
<= n
; ++i
)
929 struct eh_region
*fixup
= cfun
->eh
->region_array
[i
];
931 if (!fixup
|| fixup
->type
!= ERT_FIXUP
|| fixup
->u
.fixup
.resolved
)
934 resolve_one_fixup_region (fixup
);
938 /* Now that we've discovered what region actually encloses a fixup,
939 we can shuffle pointers and remove them from the tree. */
942 remove_fixup_regions (void)
946 struct eh_region
*fixup
;
948 /* Walk the insn chain and adjust the REG_EH_REGION numbers
949 for instructions referencing fixup regions. This is only
950 strictly necessary for fixup regions with no parent, but
951 doesn't hurt to do it for all regions. */
952 for (insn
= get_insns(); insn
; insn
= NEXT_INSN (insn
))
954 && (note
= find_reg_note (insn
, REG_EH_REGION
, NULL
))
955 && INTVAL (XEXP (note
, 0)) > 0
956 && (fixup
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))])
957 && fixup
->type
== ERT_FIXUP
)
959 if (fixup
->u
.fixup
.real_region
)
960 XEXP (note
, 0) = GEN_INT (fixup
->u
.fixup
.real_region
->region_number
);
962 remove_note (insn
, note
);
965 /* Remove the fixup regions from the tree. */
966 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
968 fixup
= cfun
->eh
->region_array
[i
];
972 /* Allow GC to maybe free some memory. */
973 if (fixup
->type
== ERT_CLEANUP
)
974 fixup
->u
.cleanup
.exp
= NULL_TREE
;
976 if (fixup
->type
!= ERT_FIXUP
)
981 struct eh_region
*parent
, *p
, **pp
;
983 parent
= fixup
->u
.fixup
.real_region
;
985 /* Fix up the children's parent pointers; find the end of
987 for (p
= fixup
->inner
; ; p
= p
->next_peer
)
994 /* In the tree of cleanups, only outer-inner ordering matters.
995 So link the children back in anywhere at the correct level. */
999 pp
= &cfun
->eh
->region_tree
;
1002 fixup
->inner
= NULL
;
1005 remove_eh_handler (fixup
);
1009 /* Remove all regions whose labels are not reachable from insns. */
1012 remove_unreachable_regions (rtx insns
)
1014 int i
, *uid_region_num
;
1016 struct eh_region
*r
;
1019 uid_region_num
= xcalloc (get_max_uid (), sizeof(int));
1020 reachable
= xcalloc (cfun
->eh
->last_region_number
+ 1, sizeof(bool));
1022 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1024 r
= cfun
->eh
->region_array
[i
];
1025 if (!r
|| r
->region_number
!= i
)
1030 if (uid_region_num
[INSN_UID (r
->resume
)])
1032 uid_region_num
[INSN_UID (r
->resume
)] = i
;
1036 if (uid_region_num
[INSN_UID (r
->label
)])
1038 uid_region_num
[INSN_UID (r
->label
)] = i
;
1040 if (r
->type
== ERT_TRY
&& r
->u
.try.continue_label
)
1042 if (uid_region_num
[INSN_UID (r
->u
.try.continue_label
)])
1044 uid_region_num
[INSN_UID (r
->u
.try.continue_label
)] = i
;
1048 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1049 reachable
[uid_region_num
[INSN_UID (insn
)]] = true;
1051 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1053 r
= cfun
->eh
->region_array
[i
];
1054 if (r
&& r
->region_number
== i
&& !reachable
[i
])
1056 /* Don't remove ERT_THROW regions if their outer region
1058 if (r
->type
== ERT_THROW
1060 && reachable
[r
->outer
->region_number
])
1063 remove_eh_handler (r
);
1068 free (uid_region_num
);
1071 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1072 can_throw instruction in the region. */
1075 convert_from_eh_region_ranges_1 (rtx
*pinsns
, int *orig_sp
, int cur
)
1080 for (insn
= *pinsns
; insn
; insn
= next
)
1082 next
= NEXT_INSN (insn
);
1083 if (GET_CODE (insn
) == NOTE
)
1085 int kind
= NOTE_LINE_NUMBER (insn
);
1086 if (kind
== NOTE_INSN_EH_REGION_BEG
1087 || kind
== NOTE_INSN_EH_REGION_END
)
1089 if (kind
== NOTE_INSN_EH_REGION_BEG
)
1091 struct eh_region
*r
;
1094 cur
= NOTE_EH_HANDLER (insn
);
1096 r
= cfun
->eh
->region_array
[cur
];
1097 if (r
->type
== ERT_FIXUP
)
1099 r
= r
->u
.fixup
.real_region
;
1100 cur
= r
? r
->region_number
: 0;
1102 else if (r
->type
== ERT_CATCH
)
1105 cur
= r
? r
->region_number
: 0;
1111 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1112 requires extra care to adjust sequence start. */
1113 if (insn
== *pinsns
)
1119 else if (INSN_P (insn
))
1122 && ! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
1123 /* Calls can always potentially throw exceptions, unless
1124 they have a REG_EH_REGION note with a value of 0 or less.
1125 Which should be the only possible kind so far. */
1126 && (GET_CODE (insn
) == CALL_INSN
1127 /* If we wanted exceptions for non-call insns, then
1128 any may_trap_p instruction could throw. */
1129 || (flag_non_call_exceptions
1130 && GET_CODE (PATTERN (insn
)) != CLOBBER
1131 && GET_CODE (PATTERN (insn
)) != USE
1132 && may_trap_p (PATTERN (insn
)))))
1134 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_EH_REGION
, GEN_INT (cur
),
1138 if (GET_CODE (insn
) == CALL_INSN
1139 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
1141 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn
), 0),
1143 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn
), 1),
1145 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn
), 2),
1156 convert_from_eh_region_ranges (void)
1161 collect_eh_region_array ();
1162 resolve_fixup_regions ();
1164 stack
= xmalloc (sizeof (int) * (cfun
->eh
->last_region_number
+ 1));
1165 insns
= get_insns ();
1166 convert_from_eh_region_ranges_1 (&insns
, stack
, 0);
1169 remove_fixup_regions ();
1170 remove_unreachable_regions (insns
);
1174 add_ehl_entry (rtx label
, struct eh_region
*region
)
1176 struct ehl_map_entry
**slot
, *entry
;
1178 LABEL_PRESERVE_P (label
) = 1;
1180 entry
= ggc_alloc (sizeof (*entry
));
1181 entry
->label
= label
;
1182 entry
->region
= region
;
1184 slot
= (struct ehl_map_entry
**)
1185 htab_find_slot (cfun
->eh
->exception_handler_label_map
, entry
, INSERT
);
1187 /* Before landing pad creation, each exception handler has its own
1188 label. After landing pad creation, the exception handlers may
1189 share landing pads. This is ok, since maybe_remove_eh_handler
1190 only requires the 1-1 mapping before landing pad creation. */
1191 if (*slot
&& !cfun
->eh
->built_landing_pads
)
1198 find_exception_handler_labels (void)
1202 if (cfun
->eh
->exception_handler_label_map
)
1203 htab_empty (cfun
->eh
->exception_handler_label_map
);
1206 /* ??? The expansion factor here (3/2) must be greater than the htab
1207 occupancy factor (4/3) to avoid unnecessary resizing. */
1208 cfun
->eh
->exception_handler_label_map
1209 = htab_create_ggc (cfun
->eh
->last_region_number
* 3 / 2,
1210 ehl_hash
, ehl_eq
, NULL
);
1213 if (cfun
->eh
->region_tree
== NULL
)
1216 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1218 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1221 if (! region
|| region
->region_number
!= i
)
1223 if (cfun
->eh
->built_landing_pads
)
1224 lab
= region
->landing_pad
;
1226 lab
= region
->label
;
1229 add_ehl_entry (lab
, region
);
1232 /* For sjlj exceptions, need the return label to remain live until
1233 after landing pad generation. */
1234 if (USING_SJLJ_EXCEPTIONS
&& ! cfun
->eh
->built_landing_pads
)
1235 add_ehl_entry (return_label
, NULL
);
1239 current_function_has_exception_handlers (void)
1243 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1245 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1247 if (! region
|| region
->region_number
!= i
)
1249 if (region
->type
!= ERT_THROW
)
1256 static struct eh_region
*
1257 duplicate_eh_region_1 (struct eh_region
*o
, struct inline_remap
*map
)
1259 struct eh_region
*n
= ggc_alloc_cleared (sizeof (struct eh_region
));
1261 n
->region_number
= o
->region_number
+ cfun
->eh
->last_region_number
;
1267 case ERT_MUST_NOT_THROW
:
1271 if (o
->u
.try.continue_label
)
1272 n
->u
.try.continue_label
1273 = get_label_from_map (map
,
1274 CODE_LABEL_NUMBER (o
->u
.try.continue_label
));
1278 n
->u
.catch.type_list
= o
->u
.catch.type_list
;
1281 case ERT_ALLOWED_EXCEPTIONS
:
1282 n
->u
.allowed
.type_list
= o
->u
.allowed
.type_list
;
1286 n
->u
.throw.type
= o
->u
.throw.type
;
1293 n
->label
= get_label_from_map (map
, CODE_LABEL_NUMBER (o
->label
));
1296 n
->resume
= map
->insn_map
[INSN_UID (o
->resume
)];
1297 if (n
->resume
== NULL
)
1305 duplicate_eh_region_2 (struct eh_region
*o
, struct eh_region
**n_array
)
1307 struct eh_region
*n
= n_array
[o
->region_number
];
1312 n
->u
.try.catch = n_array
[o
->u
.try.catch->region_number
];
1313 n
->u
.try.last_catch
= n_array
[o
->u
.try.last_catch
->region_number
];
1317 if (o
->u
.catch.next_catch
)
1318 n
->u
.catch.next_catch
= n_array
[o
->u
.catch.next_catch
->region_number
];
1319 if (o
->u
.catch.prev_catch
)
1320 n
->u
.catch.prev_catch
= n_array
[o
->u
.catch.prev_catch
->region_number
];
1328 n
->outer
= n_array
[o
->outer
->region_number
];
1330 n
->inner
= n_array
[o
->inner
->region_number
];
1332 n
->next_peer
= n_array
[o
->next_peer
->region_number
];
1336 duplicate_eh_regions (struct function
*ifun
, struct inline_remap
*map
)
1338 int ifun_last_region_number
= ifun
->eh
->last_region_number
;
1339 struct eh_region
**n_array
, *root
, *cur
;
1342 if (ifun_last_region_number
== 0)
1345 n_array
= xcalloc (ifun_last_region_number
+ 1, sizeof (*n_array
));
1347 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
1349 cur
= ifun
->eh
->region_array
[i
];
1350 if (!cur
|| cur
->region_number
!= i
)
1352 n_array
[i
] = duplicate_eh_region_1 (cur
, map
);
1354 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
1356 cur
= ifun
->eh
->region_array
[i
];
1357 if (!cur
|| cur
->region_number
!= i
)
1359 duplicate_eh_region_2 (cur
, n_array
);
1362 root
= n_array
[ifun
->eh
->region_tree
->region_number
];
1363 cur
= cfun
->eh
->cur_region
;
1366 struct eh_region
*p
= cur
->inner
;
1369 while (p
->next_peer
)
1371 p
->next_peer
= root
;
1376 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
1377 if (n_array
[i
] && n_array
[i
]->outer
== NULL
)
1378 n_array
[i
]->outer
= cur
;
1382 struct eh_region
*p
= cfun
->eh
->region_tree
;
1385 while (p
->next_peer
)
1387 p
->next_peer
= root
;
1390 cfun
->eh
->region_tree
= root
;
1395 i
= cfun
->eh
->last_region_number
;
1396 cfun
->eh
->last_region_number
= i
+ ifun_last_region_number
;
1402 t2r_eq (const void *pentry
, const void *pdata
)
1404 tree entry
= (tree
) pentry
;
1405 tree data
= (tree
) pdata
;
1407 return TREE_PURPOSE (entry
) == data
;
1411 t2r_hash (const void *pentry
)
1413 tree entry
= (tree
) pentry
;
1414 return TYPE_HASH (TREE_PURPOSE (entry
));
1418 add_type_for_runtime (tree type
)
1422 slot
= (tree
*) htab_find_slot_with_hash (type_to_runtime_map
, type
,
1423 TYPE_HASH (type
), INSERT
);
1426 tree runtime
= (*lang_eh_runtime_type
) (type
);
1427 *slot
= tree_cons (type
, runtime
, NULL_TREE
);
1432 lookup_type_for_runtime (tree type
)
1436 slot
= (tree
*) htab_find_slot_with_hash (type_to_runtime_map
, type
,
1437 TYPE_HASH (type
), NO_INSERT
);
1439 /* We should have always inserted the data earlier. */
1440 return TREE_VALUE (*slot
);
1444 /* Represent an entry in @TTypes for either catch actions
1445 or exception filter actions. */
1446 struct ttypes_filter
GTY(())
1452 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1453 (a tree) for a @TTypes type node we are thinking about adding. */
1456 ttypes_filter_eq (const void *pentry
, const void *pdata
)
1458 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1459 tree data
= (tree
) pdata
;
1461 return entry
->t
== data
;
1465 ttypes_filter_hash (const void *pentry
)
1467 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1468 return TYPE_HASH (entry
->t
);
1471 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1472 exception specification list we are thinking about adding. */
1473 /* ??? Currently we use the type lists in the order given. Someone
1474 should put these in some canonical order. */
1477 ehspec_filter_eq (const void *pentry
, const void *pdata
)
1479 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1480 const struct ttypes_filter
*data
= (const struct ttypes_filter
*) pdata
;
1482 return type_list_equal (entry
->t
, data
->t
);
1485 /* Hash function for exception specification lists. */
1488 ehspec_filter_hash (const void *pentry
)
1490 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1494 for (list
= entry
->t
; list
; list
= TREE_CHAIN (list
))
1495 h
= (h
<< 5) + (h
>> 27) + TYPE_HASH (TREE_VALUE (list
));
1499 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1500 up the search. Return the filter value to be used. */
1503 add_ttypes_entry (htab_t ttypes_hash
, tree type
)
1505 struct ttypes_filter
**slot
, *n
;
1507 slot
= (struct ttypes_filter
**)
1508 htab_find_slot_with_hash (ttypes_hash
, type
, TYPE_HASH (type
), INSERT
);
1510 if ((n
= *slot
) == NULL
)
1512 /* Filter value is a 1 based table index. */
1514 n
= xmalloc (sizeof (*n
));
1516 n
->filter
= VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
) + 1;
1519 VARRAY_PUSH_TREE (cfun
->eh
->ttype_data
, type
);
1525 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1526 to speed up the search. Return the filter value to be used. */
1529 add_ehspec_entry (htab_t ehspec_hash
, htab_t ttypes_hash
, tree list
)
1531 struct ttypes_filter
**slot
, *n
;
1532 struct ttypes_filter dummy
;
1535 slot
= (struct ttypes_filter
**)
1536 htab_find_slot (ehspec_hash
, &dummy
, INSERT
);
1538 if ((n
= *slot
) == NULL
)
1540 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1542 n
= xmalloc (sizeof (*n
));
1544 n
->filter
= -(VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
) + 1);
1547 /* Look up each type in the list and encode its filter
1548 value as a uleb128. Terminate the list with 0. */
1549 for (; list
; list
= TREE_CHAIN (list
))
1550 push_uleb128 (&cfun
->eh
->ehspec_data
,
1551 add_ttypes_entry (ttypes_hash
, TREE_VALUE (list
)));
1552 VARRAY_PUSH_UCHAR (cfun
->eh
->ehspec_data
, 0);
1558 /* Generate the action filter values to be used for CATCH and
1559 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1560 we use lots of landing pads, and so every type or list can share
1561 the same filter value, which saves table space. */
1564 assign_filter_values (void)
1567 htab_t ttypes
, ehspec
;
1569 VARRAY_TREE_INIT (cfun
->eh
->ttype_data
, 16, "ttype_data");
1570 VARRAY_UCHAR_INIT (cfun
->eh
->ehspec_data
, 64, "ehspec_data");
1572 ttypes
= htab_create (31, ttypes_filter_hash
, ttypes_filter_eq
, free
);
1573 ehspec
= htab_create (31, ehspec_filter_hash
, ehspec_filter_eq
, free
);
1575 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1577 struct eh_region
*r
= cfun
->eh
->region_array
[i
];
1579 /* Mind we don't process a region more than once. */
1580 if (!r
|| r
->region_number
!= i
)
1586 /* Whatever type_list is (NULL or true list), we build a list
1587 of filters for the region. */
1588 r
->u
.catch.filter_list
= NULL_TREE
;
1590 if (r
->u
.catch.type_list
!= NULL
)
1592 /* Get a filter value for each of the types caught and store
1593 them in the region's dedicated list. */
1594 tree tp_node
= r
->u
.catch.type_list
;
1596 for (;tp_node
; tp_node
= TREE_CHAIN (tp_node
))
1598 int flt
= add_ttypes_entry (ttypes
, TREE_VALUE (tp_node
));
1599 tree flt_node
= build_int_2 (flt
, 0);
1601 r
->u
.catch.filter_list
1602 = tree_cons (NULL_TREE
, flt_node
, r
->u
.catch.filter_list
);
1607 /* Get a filter value for the NULL list also since it will need
1608 an action record anyway. */
1609 int flt
= add_ttypes_entry (ttypes
, NULL
);
1610 tree flt_node
= build_int_2 (flt
, 0);
1612 r
->u
.catch.filter_list
1613 = tree_cons (NULL_TREE
, flt_node
, r
->u
.catch.filter_list
);
1618 case ERT_ALLOWED_EXCEPTIONS
:
1620 = add_ehspec_entry (ehspec
, ttypes
, r
->u
.allowed
.type_list
);
1628 htab_delete (ttypes
);
1629 htab_delete (ehspec
);
1632 /* Generate the code to actually handle exceptions, which will follow the
1636 build_post_landing_pads (void)
1640 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1642 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1645 /* Mind we don't process a region more than once. */
1646 if (!region
|| region
->region_number
!= i
)
1649 switch (region
->type
)
1652 /* ??? Collect the set of all non-overlapping catch handlers
1653 all the way up the chain until blocked by a cleanup. */
1654 /* ??? Outer try regions can share landing pads with inner
1655 try regions if the types are completely non-overlapping,
1656 and there are no intervening cleanups. */
1658 region
->post_landing_pad
= gen_label_rtx ();
1662 emit_label (region
->post_landing_pad
);
1664 /* ??? It is mighty inconvenient to call back into the
1665 switch statement generation code in expand_end_case.
1666 Rapid prototyping sez a sequence of ifs. */
1668 struct eh_region
*c
;
1669 for (c
= region
->u
.try.catch; c
; c
= c
->u
.catch.next_catch
)
1671 if (c
->u
.catch.type_list
== NULL
)
1672 emit_jump (c
->label
);
1675 /* Need for one cmp/jump per type caught. Each type
1676 list entry has a matching entry in the filter list
1677 (see assign_filter_values). */
1678 tree tp_node
= c
->u
.catch.type_list
;
1679 tree flt_node
= c
->u
.catch.filter_list
;
1683 emit_cmp_and_jump_insns
1685 GEN_INT (tree_low_cst (TREE_VALUE (flt_node
), 0)),
1686 EQ
, NULL_RTX
, word_mode
, 0, c
->label
);
1688 tp_node
= TREE_CHAIN (tp_node
);
1689 flt_node
= TREE_CHAIN (flt_node
);
1695 /* We delay the generation of the _Unwind_Resume until we generate
1696 landing pads. We emit a marker here so as to get good control
1697 flow data in the meantime. */
1699 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
1705 emit_insn_before (seq
, region
->u
.try.catch->label
);
1708 case ERT_ALLOWED_EXCEPTIONS
:
1709 region
->post_landing_pad
= gen_label_rtx ();
1713 emit_label (region
->post_landing_pad
);
1715 emit_cmp_and_jump_insns (cfun
->eh
->filter
,
1716 GEN_INT (region
->u
.allowed
.filter
),
1717 EQ
, NULL_RTX
, word_mode
, 0, region
->label
);
1719 /* We delay the generation of the _Unwind_Resume until we generate
1720 landing pads. We emit a marker here so as to get good control
1721 flow data in the meantime. */
1723 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
1729 emit_insn_before (seq
, region
->label
);
1733 case ERT_MUST_NOT_THROW
:
1734 region
->post_landing_pad
= region
->label
;
1739 /* Nothing to do. */
1748 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1749 _Unwind_Resume otherwise. */
1752 connect_post_landing_pads (void)
1756 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1758 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1759 struct eh_region
*outer
;
1762 /* Mind we don't process a region more than once. */
1763 if (!region
|| region
->region_number
!= i
)
1766 /* If there is no RESX, or it has been deleted by flow, there's
1767 nothing to fix up. */
1768 if (! region
->resume
|| INSN_DELETED_P (region
->resume
))
1771 /* Search for another landing pad in this function. */
1772 for (outer
= region
->outer
; outer
; outer
= outer
->outer
)
1773 if (outer
->post_landing_pad
)
1779 emit_jump (outer
->post_landing_pad
);
1781 emit_library_call (unwind_resume_libfunc
, LCT_THROW
,
1782 VOIDmode
, 1, cfun
->eh
->exc_ptr
, ptr_mode
);
1786 emit_insn_before (seq
, region
->resume
);
1787 delete_insn (region
->resume
);
1793 dw2_build_landing_pads (void)
1798 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1800 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1802 bool clobbers_hard_regs
= false;
1804 /* Mind we don't process a region more than once. */
1805 if (!region
|| region
->region_number
!= i
)
1808 if (region
->type
!= ERT_CLEANUP
1809 && region
->type
!= ERT_TRY
1810 && region
->type
!= ERT_ALLOWED_EXCEPTIONS
)
1815 region
->landing_pad
= gen_label_rtx ();
1816 emit_label (region
->landing_pad
);
1818 #ifdef HAVE_exception_receiver
1819 if (HAVE_exception_receiver
)
1820 emit_insn (gen_exception_receiver ());
1823 #ifdef HAVE_nonlocal_goto_receiver
1824 if (HAVE_nonlocal_goto_receiver
)
1825 emit_insn (gen_nonlocal_goto_receiver ());
1830 /* If the eh_return data registers are call-saved, then we
1831 won't have considered them clobbered from the call that
1832 threw. Kill them now. */
1835 unsigned r
= EH_RETURN_DATA_REGNO (j
);
1836 if (r
== INVALID_REGNUM
)
1838 if (! call_used_regs
[r
])
1840 emit_insn (gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, r
)));
1841 clobbers_hard_regs
= true;
1845 if (clobbers_hard_regs
)
1847 /* @@@ This is a kludge. Not all machine descriptions define a
1848 blockage insn, but we must not allow the code we just generated
1849 to be reordered by scheduling. So emit an ASM_INPUT to act as
1851 emit_insn (gen_rtx_ASM_INPUT (VOIDmode
, ""));
1854 emit_move_insn (cfun
->eh
->exc_ptr
,
1855 gen_rtx_REG (ptr_mode
, EH_RETURN_DATA_REGNO (0)));
1856 emit_move_insn (cfun
->eh
->filter
,
1857 gen_rtx_REG (word_mode
, EH_RETURN_DATA_REGNO (1)));
1862 emit_insn_before (seq
, region
->post_landing_pad
);
1869 int directly_reachable
;
1872 int call_site_index
;
1876 sjlj_find_directly_reachable_regions (struct sjlj_lp_info
*lp_info
)
1879 bool found_one
= false;
1881 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1883 struct eh_region
*region
;
1884 enum reachable_code rc
;
1888 if (! INSN_P (insn
))
1891 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1892 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
1895 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
1897 type_thrown
= NULL_TREE
;
1898 if (region
->type
== ERT_THROW
)
1900 type_thrown
= region
->u
.throw.type
;
1901 region
= region
->outer
;
1904 /* Find the first containing region that might handle the exception.
1905 That's the landing pad to which we will transfer control. */
1906 rc
= RNL_NOT_CAUGHT
;
1907 for (; region
; region
= region
->outer
)
1909 rc
= reachable_next_level (region
, type_thrown
, 0);
1910 if (rc
!= RNL_NOT_CAUGHT
)
1913 if (rc
== RNL_MAYBE_CAUGHT
|| rc
== RNL_CAUGHT
)
1915 lp_info
[region
->region_number
].directly_reachable
= 1;
1924 sjlj_assign_call_site_values (rtx dispatch_label
, struct sjlj_lp_info
*lp_info
)
1929 /* First task: build the action table. */
1931 VARRAY_UCHAR_INIT (cfun
->eh
->action_record_data
, 64, "action_record_data");
1932 ar_hash
= htab_create (31, action_record_hash
, action_record_eq
, free
);
1934 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1935 if (lp_info
[i
].directly_reachable
)
1937 struct eh_region
*r
= cfun
->eh
->region_array
[i
];
1938 r
->landing_pad
= dispatch_label
;
1939 lp_info
[i
].action_index
= collect_one_action_chain (ar_hash
, r
);
1940 if (lp_info
[i
].action_index
!= -1)
1941 cfun
->uses_eh_lsda
= 1;
1944 htab_delete (ar_hash
);
1946 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1947 landing pad label for the region. For sjlj though, there is one
1948 common landing pad from which we dispatch to the post-landing pads.
1950 A region receives a dispatch index if it is directly reachable
1951 and requires in-function processing. Regions that share post-landing
1952 pads may share dispatch indices. */
1953 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1954 (see build_post_landing_pads) so we don't bother checking for it. */
1957 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1958 if (lp_info
[i
].directly_reachable
)
1959 lp_info
[i
].dispatch_index
= index
++;
1961 /* Finally: assign call-site values. If dwarf2 terms, this would be
1962 the region number assigned by convert_to_eh_region_ranges, but
1963 handles no-action and must-not-throw differently. */
1966 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1967 if (lp_info
[i
].directly_reachable
)
1969 int action
= lp_info
[i
].action_index
;
1971 /* Map must-not-throw to otherwise unused call-site index 0. */
1974 /* Map no-action to otherwise unused call-site index -1. */
1975 else if (action
== -1)
1977 /* Otherwise, look it up in the table. */
1979 index
= add_call_site (GEN_INT (lp_info
[i
].dispatch_index
), action
);
1981 lp_info
[i
].call_site_index
= index
;
1986 sjlj_mark_call_sites (struct sjlj_lp_info
*lp_info
)
1988 int last_call_site
= -2;
1991 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1993 struct eh_region
*region
;
1995 rtx note
, before
, p
;
1997 /* Reset value tracking at extended basic block boundaries. */
1998 if (GET_CODE (insn
) == CODE_LABEL
)
1999 last_call_site
= -2;
2001 if (! INSN_P (insn
))
2004 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2007 /* Calls (and trapping insns) without notes are outside any
2008 exception handling region in this function. Mark them as
2010 if (GET_CODE (insn
) == CALL_INSN
2011 || (flag_non_call_exceptions
2012 && may_trap_p (PATTERN (insn
))))
2013 this_call_site
= -1;
2019 /* Calls that are known to not throw need not be marked. */
2020 if (INTVAL (XEXP (note
, 0)) <= 0)
2023 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
2024 this_call_site
= lp_info
[region
->region_number
].call_site_index
;
2027 if (this_call_site
== last_call_site
)
2030 /* Don't separate a call from it's argument loads. */
2032 if (GET_CODE (insn
) == CALL_INSN
)
2033 before
= find_first_parameter_load (insn
, NULL_RTX
);
2036 mem
= adjust_address (cfun
->eh
->sjlj_fc
, TYPE_MODE (integer_type_node
),
2037 sjlj_fc_call_site_ofs
);
2038 emit_move_insn (mem
, GEN_INT (this_call_site
));
2042 emit_insn_before (p
, before
);
2043 last_call_site
= this_call_site
;
2047 /* Construct the SjLj_Function_Context. */
2050 sjlj_emit_function_enter (rtx dispatch_label
)
2052 rtx fn_begin
, fc
, mem
, seq
;
2054 fc
= cfun
->eh
->sjlj_fc
;
2058 /* We're storing this libcall's address into memory instead of
2059 calling it directly. Thus, we must call assemble_external_libcall
2060 here, as we can not depend on emit_library_call to do it for us. */
2061 assemble_external_libcall (eh_personality_libfunc
);
2062 mem
= adjust_address (fc
, Pmode
, sjlj_fc_personality_ofs
);
2063 emit_move_insn (mem
, eh_personality_libfunc
);
2065 mem
= adjust_address (fc
, Pmode
, sjlj_fc_lsda_ofs
);
2066 if (cfun
->uses_eh_lsda
)
2071 ASM_GENERATE_INTERNAL_LABEL (buf
, "LLSDA", current_function_funcdef_no
);
2072 sym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
2073 SYMBOL_REF_FLAGS (sym
) = SYMBOL_FLAG_LOCAL
;
2074 emit_move_insn (mem
, sym
);
2077 emit_move_insn (mem
, const0_rtx
);
2079 #ifdef DONT_USE_BUILTIN_SETJMP
2082 x
= emit_library_call_value (setjmp_libfunc
, NULL_RTX
, LCT_RETURNS_TWICE
,
2083 TYPE_MODE (integer_type_node
), 1,
2084 plus_constant (XEXP (fc
, 0),
2085 sjlj_fc_jbuf_ofs
), Pmode
);
2087 note
= emit_note (NOTE_INSN_EXPECTED_VALUE
);
2088 NOTE_EXPECTED_VALUE (note
) = gen_rtx_EQ (VOIDmode
, x
, const0_rtx
);
2090 emit_cmp_and_jump_insns (x
, const0_rtx
, NE
, 0,
2091 TYPE_MODE (integer_type_node
), 0, dispatch_label
);
2094 expand_builtin_setjmp_setup (plus_constant (XEXP (fc
, 0), sjlj_fc_jbuf_ofs
),
2098 emit_library_call (unwind_sjlj_register_libfunc
, LCT_NORMAL
, VOIDmode
,
2099 1, XEXP (fc
, 0), Pmode
);
2104 /* ??? Instead of doing this at the beginning of the function,
2105 do this in a block that is at loop level 0 and dominates all
2106 can_throw_internal instructions. */
2108 for (fn_begin
= get_insns (); ; fn_begin
= NEXT_INSN (fn_begin
))
2109 if (GET_CODE (fn_begin
) == NOTE
2110 && NOTE_LINE_NUMBER (fn_begin
) == NOTE_INSN_FUNCTION_BEG
)
2112 emit_insn_after (seq
, fn_begin
);
2115 /* Call back from expand_function_end to know where we should put
2116 the call to unwind_sjlj_unregister_libfunc if needed. */
2119 sjlj_emit_function_exit_after (rtx after
)
2121 cfun
->eh
->sjlj_exit_after
= after
;
2125 sjlj_emit_function_exit (void)
2131 emit_library_call (unwind_sjlj_unregister_libfunc
, LCT_NORMAL
, VOIDmode
,
2132 1, XEXP (cfun
->eh
->sjlj_fc
, 0), Pmode
);
2137 /* ??? Really this can be done in any block at loop level 0 that
2138 post-dominates all can_throw_internal instructions. This is
2139 the last possible moment. */
2141 emit_insn_after (seq
, cfun
->eh
->sjlj_exit_after
);
2145 sjlj_emit_dispatch_table (rtx dispatch_label
, struct sjlj_lp_info
*lp_info
)
2147 int i
, first_reachable
;
2148 rtx mem
, dispatch
, seq
, fc
;
2150 fc
= cfun
->eh
->sjlj_fc
;
2154 emit_label (dispatch_label
);
2156 #ifndef DONT_USE_BUILTIN_SETJMP
2157 expand_builtin_setjmp_receiver (dispatch_label
);
2160 /* Load up dispatch index, exc_ptr and filter values from the
2161 function context. */
2162 mem
= adjust_address (fc
, TYPE_MODE (integer_type_node
),
2163 sjlj_fc_call_site_ofs
);
2164 dispatch
= copy_to_reg (mem
);
2166 mem
= adjust_address (fc
, word_mode
, sjlj_fc_data_ofs
);
2167 if (word_mode
!= ptr_mode
)
2169 #ifdef POINTERS_EXTEND_UNSIGNED
2170 mem
= convert_memory_address (ptr_mode
, mem
);
2172 mem
= convert_to_mode (ptr_mode
, mem
, 0);
2175 emit_move_insn (cfun
->eh
->exc_ptr
, mem
);
2177 mem
= adjust_address (fc
, word_mode
, sjlj_fc_data_ofs
+ UNITS_PER_WORD
);
2178 emit_move_insn (cfun
->eh
->filter
, mem
);
2180 /* Jump to one of the directly reachable regions. */
2181 /* ??? This really ought to be using a switch statement. */
2183 first_reachable
= 0;
2184 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2186 if (! lp_info
[i
].directly_reachable
)
2189 if (! first_reachable
)
2191 first_reachable
= i
;
2195 emit_cmp_and_jump_insns (dispatch
, GEN_INT (lp_info
[i
].dispatch_index
),
2196 EQ
, NULL_RTX
, TYPE_MODE (integer_type_node
), 0,
2197 cfun
->eh
->region_array
[i
]->post_landing_pad
);
2203 emit_insn_before (seq
, (cfun
->eh
->region_array
[first_reachable
]
2204 ->post_landing_pad
));
2208 sjlj_build_landing_pads (void)
2210 struct sjlj_lp_info
*lp_info
;
2212 lp_info
= xcalloc (cfun
->eh
->last_region_number
+ 1,
2213 sizeof (struct sjlj_lp_info
));
2215 if (sjlj_find_directly_reachable_regions (lp_info
))
2217 rtx dispatch_label
= gen_label_rtx ();
2220 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node
),
2221 int_size_in_bytes (sjlj_fc_type_node
),
2222 TYPE_ALIGN (sjlj_fc_type_node
));
2224 sjlj_assign_call_site_values (dispatch_label
, lp_info
);
2225 sjlj_mark_call_sites (lp_info
);
2227 sjlj_emit_function_enter (dispatch_label
);
2228 sjlj_emit_dispatch_table (dispatch_label
, lp_info
);
2229 sjlj_emit_function_exit ();
2236 finish_eh_generation (void)
2238 /* Nothing to do if no regions created. */
2239 if (cfun
->eh
->region_tree
== NULL
)
2242 /* The object here is to provide find_basic_blocks with detailed
2243 information (via reachable_handlers) on how exception control
2244 flows within the function. In this first pass, we can include
2245 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2246 regions, and hope that it will be useful in deleting unreachable
2247 handlers. Subsequently, we will generate landing pads which will
2248 connect many of the handlers, and then type information will not
2249 be effective. Still, this is a win over previous implementations. */
2251 cleanup_cfg (CLEANUP_PRE_LOOP
| CLEANUP_NO_INSN_DEL
);
2253 /* These registers are used by the landing pads. Make sure they
2254 have been generated. */
2255 get_exception_pointer (cfun
);
2256 get_exception_filter (cfun
);
2258 /* Construct the landing pads. */
2260 assign_filter_values ();
2261 build_post_landing_pads ();
2262 connect_post_landing_pads ();
2263 if (USING_SJLJ_EXCEPTIONS
)
2264 sjlj_build_landing_pads ();
2266 dw2_build_landing_pads ();
2268 cfun
->eh
->built_landing_pads
= 1;
2270 /* We've totally changed the CFG. Start over. */
2271 find_exception_handler_labels ();
2272 rebuild_jump_labels (get_insns ());
2273 find_basic_blocks (get_insns (), max_reg_num (), 0);
2274 cleanup_cfg (CLEANUP_PRE_LOOP
| CLEANUP_NO_INSN_DEL
);
2278 ehl_hash (const void *pentry
)
2280 struct ehl_map_entry
*entry
= (struct ehl_map_entry
*) pentry
;
2282 /* 2^32 * ((sqrt(5) - 1) / 2) */
2283 const hashval_t scaled_golden_ratio
= 0x9e3779b9;
2284 return CODE_LABEL_NUMBER (entry
->label
) * scaled_golden_ratio
;
2288 ehl_eq (const void *pentry
, const void *pdata
)
2290 struct ehl_map_entry
*entry
= (struct ehl_map_entry
*) pentry
;
2291 struct ehl_map_entry
*data
= (struct ehl_map_entry
*) pdata
;
2293 return entry
->label
== data
->label
;
2296 /* This section handles removing dead code for flow. */
2298 /* Remove LABEL from exception_handler_label_map. */
2301 remove_exception_handler_label (rtx label
)
2303 struct ehl_map_entry
**slot
, tmp
;
2305 /* If exception_handler_label_map was not built yet,
2306 there is nothing to do. */
2307 if (cfun
->eh
->exception_handler_label_map
== NULL
)
2311 slot
= (struct ehl_map_entry
**)
2312 htab_find_slot (cfun
->eh
->exception_handler_label_map
, &tmp
, NO_INSERT
);
2316 htab_clear_slot (cfun
->eh
->exception_handler_label_map
, (void **) slot
);
2319 /* Splice REGION from the region tree etc. */
2322 remove_eh_handler (struct eh_region
*region
)
2324 struct eh_region
**pp
, **pp_start
, *p
, *outer
, *inner
;
2327 /* For the benefit of efficiently handling REG_EH_REGION notes,
2328 replace this region in the region array with its containing
2329 region. Note that previous region deletions may result in
2330 multiple copies of this region in the array, so we have a
2331 list of alternate numbers by which we are known. */
2333 outer
= region
->outer
;
2334 cfun
->eh
->region_array
[region
->region_number
] = outer
;
2338 EXECUTE_IF_SET_IN_BITMAP (region
->aka
, 0, i
,
2339 { cfun
->eh
->region_array
[i
] = outer
; });
2345 outer
->aka
= BITMAP_GGC_ALLOC ();
2347 bitmap_a_or_b (outer
->aka
, outer
->aka
, region
->aka
);
2348 bitmap_set_bit (outer
->aka
, region
->region_number
);
2351 if (cfun
->eh
->built_landing_pads
)
2352 lab
= region
->landing_pad
;
2354 lab
= region
->label
;
2356 remove_exception_handler_label (lab
);
2359 pp_start
= &outer
->inner
;
2361 pp_start
= &cfun
->eh
->region_tree
;
2362 for (pp
= pp_start
, p
= *pp
; p
!= region
; pp
= &p
->next_peer
, p
= *pp
)
2364 *pp
= region
->next_peer
;
2366 inner
= region
->inner
;
2369 for (p
= inner
; p
->next_peer
; p
= p
->next_peer
)
2373 p
->next_peer
= *pp_start
;
2377 if (region
->type
== ERT_CATCH
)
2379 struct eh_region
*try, *next
, *prev
;
2381 for (try = region
->next_peer
;
2382 try->type
== ERT_CATCH
;
2383 try = try->next_peer
)
2385 if (try->type
!= ERT_TRY
)
2388 next
= region
->u
.catch.next_catch
;
2389 prev
= region
->u
.catch.prev_catch
;
2392 next
->u
.catch.prev_catch
= prev
;
2394 try->u
.try.last_catch
= prev
;
2396 prev
->u
.catch.next_catch
= next
;
2399 try->u
.try.catch = next
;
2401 remove_eh_handler (try);
2406 /* LABEL heads a basic block that is about to be deleted. If this
2407 label corresponds to an exception region, we may be able to
2408 delete the region. */
2411 maybe_remove_eh_handler (rtx label
)
2413 struct ehl_map_entry
**slot
, tmp
;
2414 struct eh_region
*region
;
2416 /* ??? After generating landing pads, it's not so simple to determine
2417 if the region data is completely unused. One must examine the
2418 landing pad and the post landing pad, and whether an inner try block
2419 is referencing the catch handlers directly. */
2420 if (cfun
->eh
->built_landing_pads
)
2424 slot
= (struct ehl_map_entry
**)
2425 htab_find_slot (cfun
->eh
->exception_handler_label_map
, &tmp
, NO_INSERT
);
2428 region
= (*slot
)->region
;
2432 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2433 because there is no path to the fallback call to terminate.
2434 But the region continues to affect call-site data until there
2435 are no more contained calls, which we don't see here. */
2436 if (region
->type
== ERT_MUST_NOT_THROW
)
2438 htab_clear_slot (cfun
->eh
->exception_handler_label_map
, (void **) slot
);
2439 region
->label
= NULL_RTX
;
2442 remove_eh_handler (region
);
2445 /* Invokes CALLBACK for every exception handler label. Only used by old
2446 loop hackery; should not be used by new code. */
2449 for_each_eh_label (void (*callback
) (rtx
))
2451 htab_traverse (cfun
->eh
->exception_handler_label_map
, for_each_eh_label_1
,
2452 (void *) &callback
);
2456 for_each_eh_label_1 (void **pentry
, void *data
)
2458 struct ehl_map_entry
*entry
= *(struct ehl_map_entry
**)pentry
;
2459 void (*callback
) (rtx
) = *(void (**) (rtx
)) data
;
2461 (*callback
) (entry
->label
);
2465 /* This section describes CFG exception edges for flow. */
2467 /* For communicating between calls to reachable_next_level. */
2468 struct reachable_info
GTY(())
2475 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2476 base class of TYPE, is in HANDLED. */
2479 check_handled (tree handled
, tree type
)
2483 /* We can check for exact matches without front-end help. */
2484 if (! lang_eh_type_covers
)
2486 for (t
= handled
; t
; t
= TREE_CHAIN (t
))
2487 if (TREE_VALUE (t
) == type
)
2492 for (t
= handled
; t
; t
= TREE_CHAIN (t
))
2493 if ((*lang_eh_type_covers
) (TREE_VALUE (t
), type
))
2500 /* A subroutine of reachable_next_level. If we are collecting a list
2501 of handlers, add one. After landing pad generation, reference
2502 it instead of the handlers themselves. Further, the handlers are
2503 all wired together, so by referencing one, we've got them all.
2504 Before landing pad generation we reference each handler individually.
2506 LP_REGION contains the landing pad; REGION is the handler. */
2509 add_reachable_handler (struct reachable_info
*info
, struct eh_region
*lp_region
, struct eh_region
*region
)
2514 if (cfun
->eh
->built_landing_pads
)
2516 if (! info
->handlers
)
2517 info
->handlers
= alloc_INSN_LIST (lp_region
->landing_pad
, NULL_RTX
);
2520 info
->handlers
= alloc_INSN_LIST (region
->label
, info
->handlers
);
2523 /* Process one level of exception regions for reachability.
2524 If TYPE_THROWN is non-null, then it is the *exact* type being
2525 propagated. If INFO is non-null, then collect handler labels
2526 and caught/allowed type information between invocations. */
2528 static enum reachable_code
2529 reachable_next_level (struct eh_region
*region
, tree type_thrown
,
2530 struct reachable_info
*info
)
2532 switch (region
->type
)
2535 /* Before landing-pad generation, we model control flow
2536 directly to the individual handlers. In this way we can
2537 see that catch handler types may shadow one another. */
2538 add_reachable_handler (info
, region
, region
);
2539 return RNL_MAYBE_CAUGHT
;
2543 struct eh_region
*c
;
2544 enum reachable_code ret
= RNL_NOT_CAUGHT
;
2546 for (c
= region
->u
.try.catch; c
; c
= c
->u
.catch.next_catch
)
2548 /* A catch-all handler ends the search. */
2549 if (c
->u
.catch.type_list
== NULL
)
2551 add_reachable_handler (info
, region
, c
);
2557 /* If we have at least one type match, end the search. */
2558 tree tp_node
= c
->u
.catch.type_list
;
2560 for (; tp_node
; tp_node
= TREE_CHAIN (tp_node
))
2562 tree type
= TREE_VALUE (tp_node
);
2564 if (type
== type_thrown
2565 || (lang_eh_type_covers
2566 && (*lang_eh_type_covers
) (type
, type_thrown
)))
2568 add_reachable_handler (info
, region
, c
);
2573 /* If we have definitive information of a match failure,
2574 the catch won't trigger. */
2575 if (lang_eh_type_covers
)
2576 return RNL_NOT_CAUGHT
;
2579 /* At this point, we either don't know what type is thrown or
2580 don't have front-end assistance to help deciding if it is
2581 covered by one of the types in the list for this region.
2583 We'd then like to add this region to the list of reachable
2584 handlers since it is indeed potentially reachable based on the
2585 information we have.
2587 Actually, this handler is for sure not reachable if all the
2588 types it matches have already been caught. That is, it is only
2589 potentially reachable if at least one of the types it catches
2590 has not been previously caught. */
2593 ret
= RNL_MAYBE_CAUGHT
;
2596 tree tp_node
= c
->u
.catch.type_list
;
2597 bool maybe_reachable
= false;
2599 /* Compute the potential reachability of this handler and
2600 update the list of types caught at the same time. */
2601 for (; tp_node
; tp_node
= TREE_CHAIN (tp_node
))
2603 tree type
= TREE_VALUE (tp_node
);
2605 if (! check_handled (info
->types_caught
, type
))
2608 = tree_cons (NULL
, type
, info
->types_caught
);
2610 maybe_reachable
= true;
2614 if (maybe_reachable
)
2616 add_reachable_handler (info
, region
, c
);
2618 /* ??? If the catch type is a base class of every allowed
2619 type, then we know we can stop the search. */
2620 ret
= RNL_MAYBE_CAUGHT
;
2628 case ERT_ALLOWED_EXCEPTIONS
:
2629 /* An empty list of types definitely ends the search. */
2630 if (region
->u
.allowed
.type_list
== NULL_TREE
)
2632 add_reachable_handler (info
, region
, region
);
2636 /* Collect a list of lists of allowed types for use in detecting
2637 when a catch may be transformed into a catch-all. */
2639 info
->types_allowed
= tree_cons (NULL_TREE
,
2640 region
->u
.allowed
.type_list
,
2641 info
->types_allowed
);
2643 /* If we have definitive information about the type hierarchy,
2644 then we can tell if the thrown type will pass through the
2646 if (type_thrown
&& lang_eh_type_covers
)
2648 if (check_handled (region
->u
.allowed
.type_list
, type_thrown
))
2649 return RNL_NOT_CAUGHT
;
2652 add_reachable_handler (info
, region
, region
);
2657 add_reachable_handler (info
, region
, region
);
2658 return RNL_MAYBE_CAUGHT
;
2661 /* Catch regions are handled by their controlling try region. */
2662 return RNL_NOT_CAUGHT
;
2664 case ERT_MUST_NOT_THROW
:
2665 /* Here we end our search, since no exceptions may propagate.
2666 If we've touched down at some landing pad previous, then the
2667 explicit function call we generated may be used. Otherwise
2668 the call is made by the runtime. */
2669 if (info
&& info
->handlers
)
2671 add_reachable_handler (info
, region
, region
);
2680 /* Shouldn't see these here. */
2687 /* Retrieve a list of labels of exception handlers which can be
2688 reached by a given insn. */
2691 reachable_handlers (rtx insn
)
2693 struct reachable_info info
;
2694 struct eh_region
*region
;
2698 if (GET_CODE (insn
) == JUMP_INSN
2699 && GET_CODE (PATTERN (insn
)) == RESX
)
2700 region_number
= XINT (PATTERN (insn
), 0);
2703 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2704 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
2706 region_number
= INTVAL (XEXP (note
, 0));
2709 memset (&info
, 0, sizeof (info
));
2711 region
= cfun
->eh
->region_array
[region_number
];
2713 type_thrown
= NULL_TREE
;
2714 if (GET_CODE (insn
) == JUMP_INSN
2715 && GET_CODE (PATTERN (insn
)) == RESX
)
2717 /* A RESX leaves a region instead of entering it. Thus the
2718 region itself may have been deleted out from under us. */
2721 region
= region
->outer
;
2723 else if (region
->type
== ERT_THROW
)
2725 type_thrown
= region
->u
.throw.type
;
2726 region
= region
->outer
;
2731 if (reachable_next_level (region
, type_thrown
, &info
) >= RNL_CAUGHT
)
2733 /* If we have processed one cleanup, there is no point in
2734 processing any more of them. Each cleanup will have an edge
2735 to the next outer cleanup region, so the flow graph will be
2737 if (region
->type
== ERT_CLEANUP
)
2738 region
= region
->u
.cleanup
.prev_try
;
2740 region
= region
->outer
;
2743 return info
.handlers
;
2746 /* Determine if the given INSN can throw an exception that is caught
2747 within the function. */
2750 can_throw_internal (rtx insn
)
2752 struct eh_region
*region
;
2756 if (! INSN_P (insn
))
2759 if (GET_CODE (insn
) == INSN
2760 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2761 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2763 if (GET_CODE (insn
) == CALL_INSN
2764 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
2767 for (i
= 0; i
< 3; ++i
)
2769 rtx sub
= XEXP (PATTERN (insn
), i
);
2770 for (; sub
; sub
= NEXT_INSN (sub
))
2771 if (can_throw_internal (sub
))
2777 /* Every insn that might throw has an EH_REGION note. */
2778 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2779 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
2782 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
2784 type_thrown
= NULL_TREE
;
2785 if (region
->type
== ERT_THROW
)
2787 type_thrown
= region
->u
.throw.type
;
2788 region
= region
->outer
;
2791 /* If this exception is ignored by each and every containing region,
2792 then control passes straight out. The runtime may handle some
2793 regions, which also do not require processing internally. */
2794 for (; region
; region
= region
->outer
)
2796 enum reachable_code how
= reachable_next_level (region
, type_thrown
, 0);
2797 if (how
== RNL_BLOCKED
)
2799 if (how
!= RNL_NOT_CAUGHT
)
2806 /* Determine if the given INSN can throw an exception that is
2807 visible outside the function. */
2810 can_throw_external (rtx insn
)
2812 struct eh_region
*region
;
2816 if (! INSN_P (insn
))
2819 if (GET_CODE (insn
) == INSN
2820 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2821 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2823 if (GET_CODE (insn
) == CALL_INSN
2824 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
2827 for (i
= 0; i
< 3; ++i
)
2829 rtx sub
= XEXP (PATTERN (insn
), i
);
2830 for (; sub
; sub
= NEXT_INSN (sub
))
2831 if (can_throw_external (sub
))
2837 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2840 /* Calls (and trapping insns) without notes are outside any
2841 exception handling region in this function. We have to
2842 assume it might throw. Given that the front end and middle
2843 ends mark known NOTHROW functions, this isn't so wildly
2845 return (GET_CODE (insn
) == CALL_INSN
2846 || (flag_non_call_exceptions
2847 && may_trap_p (PATTERN (insn
))));
2849 if (INTVAL (XEXP (note
, 0)) <= 0)
2852 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
2854 type_thrown
= NULL_TREE
;
2855 if (region
->type
== ERT_THROW
)
2857 type_thrown
= region
->u
.throw.type
;
2858 region
= region
->outer
;
2861 /* If the exception is caught or blocked by any containing region,
2862 then it is not seen by any calling function. */
2863 for (; region
; region
= region
->outer
)
2864 if (reachable_next_level (region
, type_thrown
, NULL
) >= RNL_CAUGHT
)
2870 /* Set current_function_nothrow and cfun->all_throwers_are_sibcalls. */
2873 set_nothrow_function_flags (void)
2877 current_function_nothrow
= 1;
2879 /* Assume cfun->all_throwers_are_sibcalls until we encounter
2880 something that can throw an exception. We specifically exempt
2881 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2882 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2885 cfun
->all_throwers_are_sibcalls
= 1;
2887 if (! flag_exceptions
)
2890 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2891 if (can_throw_external (insn
))
2893 current_function_nothrow
= 0;
2895 if (GET_CODE (insn
) != CALL_INSN
|| !SIBLING_CALL_P (insn
))
2897 cfun
->all_throwers_are_sibcalls
= 0;
2902 for (insn
= current_function_epilogue_delay_list
; insn
;
2903 insn
= XEXP (insn
, 1))
2904 if (can_throw_external (insn
))
2906 current_function_nothrow
= 0;
2908 if (GET_CODE (insn
) != CALL_INSN
|| !SIBLING_CALL_P (insn
))
2910 cfun
->all_throwers_are_sibcalls
= 0;
2917 /* Various hooks for unwind library. */
2919 /* Do any necessary initialization to access arbitrary stack frames.
2920 On the SPARC, this means flushing the register windows. */
2923 expand_builtin_unwind_init (void)
2925 /* Set this so all the registers get saved in our frame; we need to be
2926 able to copy the saved values for any registers from frames we unwind. */
2927 current_function_has_nonlocal_label
= 1;
2929 #ifdef SETUP_FRAME_ADDRESSES
2930 SETUP_FRAME_ADDRESSES ();
2935 expand_builtin_eh_return_data_regno (tree arglist
)
2937 tree which
= TREE_VALUE (arglist
);
2938 unsigned HOST_WIDE_INT iwhich
;
2940 if (TREE_CODE (which
) != INTEGER_CST
)
2942 error ("argument of `__builtin_eh_return_regno' must be constant");
2946 iwhich
= tree_low_cst (which
, 1);
2947 iwhich
= EH_RETURN_DATA_REGNO (iwhich
);
2948 if (iwhich
== INVALID_REGNUM
)
2951 #ifdef DWARF_FRAME_REGNUM
2952 iwhich
= DWARF_FRAME_REGNUM (iwhich
);
2954 iwhich
= DBX_REGISTER_NUMBER (iwhich
);
2957 return GEN_INT (iwhich
);
2960 /* Given a value extracted from the return address register or stack slot,
2961 return the actual address encoded in that value. */
2964 expand_builtin_extract_return_addr (tree addr_tree
)
2966 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, Pmode
, 0);
2968 if (GET_MODE (addr
) != Pmode
2969 && GET_MODE (addr
) != VOIDmode
)
2971 #ifdef POINTERS_EXTEND_UNSIGNED
2972 addr
= convert_memory_address (Pmode
, addr
);
2974 addr
= convert_to_mode (Pmode
, addr
, 0);
2978 /* First mask out any unwanted bits. */
2979 #ifdef MASK_RETURN_ADDR
2980 expand_and (Pmode
, addr
, MASK_RETURN_ADDR
, addr
);
2983 /* Then adjust to find the real return address. */
2984 #if defined (RETURN_ADDR_OFFSET)
2985 addr
= plus_constant (addr
, RETURN_ADDR_OFFSET
);
2991 /* Given an actual address in addr_tree, do any necessary encoding
2992 and return the value to be stored in the return address register or
2993 stack slot so the epilogue will return to that address. */
2996 expand_builtin_frob_return_addr (tree addr_tree
)
2998 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, ptr_mode
, 0);
3000 addr
= convert_memory_address (Pmode
, addr
);
3002 #ifdef RETURN_ADDR_OFFSET
3003 addr
= force_reg (Pmode
, addr
);
3004 addr
= plus_constant (addr
, -RETURN_ADDR_OFFSET
);
3010 /* Set up the epilogue with the magic bits we'll need to return to the
3011 exception handler. */
3014 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED
,
3019 #ifdef EH_RETURN_STACKADJ_RTX
3020 tmp
= expand_expr (stackadj_tree
, cfun
->eh
->ehr_stackadj
, VOIDmode
, 0);
3021 tmp
= convert_memory_address (Pmode
, tmp
);
3022 if (!cfun
->eh
->ehr_stackadj
)
3023 cfun
->eh
->ehr_stackadj
= copy_to_reg (tmp
);
3024 else if (tmp
!= cfun
->eh
->ehr_stackadj
)
3025 emit_move_insn (cfun
->eh
->ehr_stackadj
, tmp
);
3028 tmp
= expand_expr (handler_tree
, cfun
->eh
->ehr_handler
, VOIDmode
, 0);
3029 tmp
= convert_memory_address (Pmode
, tmp
);
3030 if (!cfun
->eh
->ehr_handler
)
3031 cfun
->eh
->ehr_handler
= copy_to_reg (tmp
);
3032 else if (tmp
!= cfun
->eh
->ehr_handler
)
3033 emit_move_insn (cfun
->eh
->ehr_handler
, tmp
);
3035 if (!cfun
->eh
->ehr_label
)
3036 cfun
->eh
->ehr_label
= gen_label_rtx ();
3037 emit_jump (cfun
->eh
->ehr_label
);
3041 expand_eh_return (void)
3045 if (! cfun
->eh
->ehr_label
)
3048 current_function_calls_eh_return
= 1;
3050 #ifdef EH_RETURN_STACKADJ_RTX
3051 emit_move_insn (EH_RETURN_STACKADJ_RTX
, const0_rtx
);
3054 around_label
= gen_label_rtx ();
3055 emit_jump (around_label
);
3057 emit_label (cfun
->eh
->ehr_label
);
3058 clobber_return_register ();
3060 #ifdef EH_RETURN_STACKADJ_RTX
3061 emit_move_insn (EH_RETURN_STACKADJ_RTX
, cfun
->eh
->ehr_stackadj
);
3064 #ifdef HAVE_eh_return
3066 emit_insn (gen_eh_return (cfun
->eh
->ehr_handler
));
3070 #ifdef EH_RETURN_HANDLER_RTX
3071 emit_move_insn (EH_RETURN_HANDLER_RTX
, cfun
->eh
->ehr_handler
);
3073 error ("__builtin_eh_return not supported on this target");
3077 emit_label (around_label
);
3080 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3081 POINTERS_EXTEND_UNSIGNED and return it. */
3084 expand_builtin_extend_pointer (tree addr_tree
)
3086 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, ptr_mode
, 0);
3089 #ifdef POINTERS_EXTEND_UNSIGNED
3090 extend
= POINTERS_EXTEND_UNSIGNED
;
3092 /* The previous EH code did an unsigned extend by default, so we do this also
3097 return convert_modes (word_mode
, ptr_mode
, addr
, extend
);
3100 /* In the following functions, we represent entries in the action table
3101 as 1-based indices. Special cases are:
3103 0: null action record, non-null landing pad; implies cleanups
3104 -1: null action record, null landing pad; implies no action
3105 -2: no call-site entry; implies must_not_throw
3106 -3: we have yet to process outer regions
3108 Further, no special cases apply to the "next" field of the record.
3109 For next, 0 means end of list. */
3111 struct action_record
3119 action_record_eq (const void *pentry
, const void *pdata
)
3121 const struct action_record
*entry
= (const struct action_record
*) pentry
;
3122 const struct action_record
*data
= (const struct action_record
*) pdata
;
3123 return entry
->filter
== data
->filter
&& entry
->next
== data
->next
;
3127 action_record_hash (const void *pentry
)
3129 const struct action_record
*entry
= (const struct action_record
*) pentry
;
3130 return entry
->next
* 1009 + entry
->filter
;
3134 add_action_record (htab_t ar_hash
, int filter
, int next
)
3136 struct action_record
**slot
, *new, tmp
;
3138 tmp
.filter
= filter
;
3140 slot
= (struct action_record
**) htab_find_slot (ar_hash
, &tmp
, INSERT
);
3142 if ((new = *slot
) == NULL
)
3144 new = xmalloc (sizeof (*new));
3145 new->offset
= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
) + 1;
3146 new->filter
= filter
;
3150 /* The filter value goes in untouched. The link to the next
3151 record is a "self-relative" byte offset, or zero to indicate
3152 that there is no next record. So convert the absolute 1 based
3153 indices we've been carrying around into a displacement. */
3155 push_sleb128 (&cfun
->eh
->action_record_data
, filter
);
3157 next
-= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
) + 1;
3158 push_sleb128 (&cfun
->eh
->action_record_data
, next
);
3165 collect_one_action_chain (htab_t ar_hash
, struct eh_region
*region
)
3167 struct eh_region
*c
;
3170 /* If we've reached the top of the region chain, then we have
3171 no actions, and require no landing pad. */
3175 switch (region
->type
)
3178 /* A cleanup adds a zero filter to the beginning of the chain, but
3179 there are special cases to look out for. If there are *only*
3180 cleanups along a path, then it compresses to a zero action.
3181 Further, if there are multiple cleanups along a path, we only
3182 need to represent one of them, as that is enough to trigger
3183 entry to the landing pad at runtime. */
3184 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3187 for (c
= region
->outer
; c
; c
= c
->outer
)
3188 if (c
->type
== ERT_CLEANUP
)
3190 return add_action_record (ar_hash
, 0, next
);
3193 /* Process the associated catch regions in reverse order.
3194 If there's a catch-all handler, then we don't need to
3195 search outer regions. Use a magic -3 value to record
3196 that we haven't done the outer search. */
3198 for (c
= region
->u
.try.last_catch
; c
; c
= c
->u
.catch.prev_catch
)
3200 if (c
->u
.catch.type_list
== NULL
)
3202 /* Retrieve the filter from the head of the filter list
3203 where we have stored it (see assign_filter_values). */
3205 = TREE_INT_CST_LOW (TREE_VALUE (c
->u
.catch.filter_list
));
3207 next
= add_action_record (ar_hash
, filter
, 0);
3211 /* Once the outer search is done, trigger an action record for
3212 each filter we have. */
3217 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3219 /* If there is no next action, terminate the chain. */
3222 /* If all outer actions are cleanups or must_not_throw,
3223 we'll have no action record for it, since we had wanted
3224 to encode these states in the call-site record directly.
3225 Add a cleanup action to the chain to catch these. */
3227 next
= add_action_record (ar_hash
, 0, 0);
3230 flt_node
= c
->u
.catch.filter_list
;
3231 for (; flt_node
; flt_node
= TREE_CHAIN (flt_node
))
3233 int filter
= TREE_INT_CST_LOW (TREE_VALUE (flt_node
));
3234 next
= add_action_record (ar_hash
, filter
, next
);
3240 case ERT_ALLOWED_EXCEPTIONS
:
3241 /* An exception specification adds its filter to the
3242 beginning of the chain. */
3243 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3244 return add_action_record (ar_hash
, region
->u
.allowed
.filter
,
3245 next
< 0 ? 0 : next
);
3247 case ERT_MUST_NOT_THROW
:
3248 /* A must-not-throw region with no inner handlers or cleanups
3249 requires no call-site entry. Note that this differs from
3250 the no handler or cleanup case in that we do require an lsda
3251 to be generated. Return a magic -2 value to record this. */
3256 /* CATCH regions are handled in TRY above. THROW regions are
3257 for optimization information only and produce no output. */
3258 return collect_one_action_chain (ar_hash
, region
->outer
);
3266 add_call_site (rtx landing_pad
, int action
)
3268 struct call_site_record
*data
= cfun
->eh
->call_site_data
;
3269 int used
= cfun
->eh
->call_site_data_used
;
3270 int size
= cfun
->eh
->call_site_data_size
;
3274 size
= (size
? size
* 2 : 64);
3275 data
= ggc_realloc (data
, sizeof (*data
) * size
);
3276 cfun
->eh
->call_site_data
= data
;
3277 cfun
->eh
->call_site_data_size
= size
;
3280 data
[used
].landing_pad
= landing_pad
;
3281 data
[used
].action
= action
;
3283 cfun
->eh
->call_site_data_used
= used
+ 1;
3285 return used
+ call_site_base
;
3288 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3289 The new note numbers will not refer to region numbers, but
3290 instead to call site entries. */
3293 convert_to_eh_region_ranges (void)
3295 rtx insn
, iter
, note
;
3297 int last_action
= -3;
3298 rtx last_action_insn
= NULL_RTX
;
3299 rtx last_landing_pad
= NULL_RTX
;
3300 rtx first_no_action_insn
= NULL_RTX
;
3303 if (USING_SJLJ_EXCEPTIONS
|| cfun
->eh
->region_tree
== NULL
)
3306 VARRAY_UCHAR_INIT (cfun
->eh
->action_record_data
, 64, "action_record_data");
3308 ar_hash
= htab_create (31, action_record_hash
, action_record_eq
, free
);
3310 for (iter
= get_insns (); iter
; iter
= NEXT_INSN (iter
))
3313 struct eh_region
*region
;
3315 rtx this_landing_pad
;
3318 if (GET_CODE (insn
) == INSN
3319 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3320 insn
= XVECEXP (PATTERN (insn
), 0, 0);
3322 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
3325 if (! (GET_CODE (insn
) == CALL_INSN
3326 || (flag_non_call_exceptions
3327 && may_trap_p (PATTERN (insn
)))))
3334 if (INTVAL (XEXP (note
, 0)) <= 0)
3336 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
3337 this_action
= collect_one_action_chain (ar_hash
, region
);
3340 /* Existence of catch handlers, or must-not-throw regions
3341 implies that an lsda is needed (even if empty). */
3342 if (this_action
!= -1)
3343 cfun
->uses_eh_lsda
= 1;
3345 /* Delay creation of region notes for no-action regions
3346 until we're sure that an lsda will be required. */
3347 else if (last_action
== -3)
3349 first_no_action_insn
= iter
;
3353 /* Cleanups and handlers may share action chains but not
3354 landing pads. Collect the landing pad for this region. */
3355 if (this_action
>= 0)
3357 struct eh_region
*o
;
3358 for (o
= region
; ! o
->landing_pad
; o
= o
->outer
)
3360 this_landing_pad
= o
->landing_pad
;
3363 this_landing_pad
= NULL_RTX
;
3365 /* Differing actions or landing pads implies a change in call-site
3366 info, which implies some EH_REGION note should be emitted. */
3367 if (last_action
!= this_action
3368 || last_landing_pad
!= this_landing_pad
)
3370 /* If we'd not seen a previous action (-3) or the previous
3371 action was must-not-throw (-2), then we do not need an
3373 if (last_action
>= -1)
3375 /* If we delayed the creation of the begin, do it now. */
3376 if (first_no_action_insn
)
3378 call_site
= add_call_site (NULL_RTX
, 0);
3379 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
,
3380 first_no_action_insn
);
3381 NOTE_EH_HANDLER (note
) = call_site
;
3382 first_no_action_insn
= NULL_RTX
;
3385 note
= emit_note_after (NOTE_INSN_EH_REGION_END
,
3387 NOTE_EH_HANDLER (note
) = call_site
;
3390 /* If the new action is must-not-throw, then no region notes
3392 if (this_action
>= -1)
3394 call_site
= add_call_site (this_landing_pad
,
3395 this_action
< 0 ? 0 : this_action
);
3396 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
, iter
);
3397 NOTE_EH_HANDLER (note
) = call_site
;
3400 last_action
= this_action
;
3401 last_landing_pad
= this_landing_pad
;
3403 last_action_insn
= iter
;
3406 if (last_action
>= -1 && ! first_no_action_insn
)
3408 note
= emit_note_after (NOTE_INSN_EH_REGION_END
, last_action_insn
);
3409 NOTE_EH_HANDLER (note
) = call_site
;
3412 htab_delete (ar_hash
);
3417 push_uleb128 (varray_type
*data_area
, unsigned int value
)
3421 unsigned char byte
= value
& 0x7f;
3425 VARRAY_PUSH_UCHAR (*data_area
, byte
);
3431 push_sleb128 (varray_type
*data_area
, int value
)
3438 byte
= value
& 0x7f;
3440 more
= ! ((value
== 0 && (byte
& 0x40) == 0)
3441 || (value
== -1 && (byte
& 0x40) != 0));
3444 VARRAY_PUSH_UCHAR (*data_area
, byte
);
3450 #ifndef HAVE_AS_LEB128
3452 dw2_size_of_call_site_table (void)
3454 int n
= cfun
->eh
->call_site_data_used
;
3455 int size
= n
* (4 + 4 + 4);
3458 for (i
= 0; i
< n
; ++i
)
3460 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3461 size
+= size_of_uleb128 (cs
->action
);
3468 sjlj_size_of_call_site_table (void)
3470 int n
= cfun
->eh
->call_site_data_used
;
3474 for (i
= 0; i
< n
; ++i
)
3476 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3477 size
+= size_of_uleb128 (INTVAL (cs
->landing_pad
));
3478 size
+= size_of_uleb128 (cs
->action
);
3486 dw2_output_call_site_table (void)
3488 const char *const function_start_lab
3489 = IDENTIFIER_POINTER (current_function_func_begin_label
);
3490 int n
= cfun
->eh
->call_site_data_used
;
3493 for (i
= 0; i
< n
; ++i
)
3495 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3496 char reg_start_lab
[32];
3497 char reg_end_lab
[32];
3498 char landing_pad_lab
[32];
3500 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab
, "LEHB", call_site_base
+ i
);
3501 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab
, "LEHE", call_site_base
+ i
);
3503 if (cs
->landing_pad
)
3504 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab
, "L",
3505 CODE_LABEL_NUMBER (cs
->landing_pad
));
3507 /* ??? Perhaps use insn length scaling if the assembler supports
3508 generic arithmetic. */
3509 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3510 data4 if the function is small enough. */
3511 #ifdef HAVE_AS_LEB128
3512 dw2_asm_output_delta_uleb128 (reg_start_lab
, function_start_lab
,
3513 "region %d start", i
);
3514 dw2_asm_output_delta_uleb128 (reg_end_lab
, reg_start_lab
,
3516 if (cs
->landing_pad
)
3517 dw2_asm_output_delta_uleb128 (landing_pad_lab
, function_start_lab
,
3520 dw2_asm_output_data_uleb128 (0, "landing pad");
3522 dw2_asm_output_delta (4, reg_start_lab
, function_start_lab
,
3523 "region %d start", i
);
3524 dw2_asm_output_delta (4, reg_end_lab
, reg_start_lab
, "length");
3525 if (cs
->landing_pad
)
3526 dw2_asm_output_delta (4, landing_pad_lab
, function_start_lab
,
3529 dw2_asm_output_data (4, 0, "landing pad");
3531 dw2_asm_output_data_uleb128 (cs
->action
, "action");
3534 call_site_base
+= n
;
3538 sjlj_output_call_site_table (void)
3540 int n
= cfun
->eh
->call_site_data_used
;
3543 for (i
= 0; i
< n
; ++i
)
3545 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3547 dw2_asm_output_data_uleb128 (INTVAL (cs
->landing_pad
),
3548 "region %d landing pad", i
);
3549 dw2_asm_output_data_uleb128 (cs
->action
, "action");
3552 call_site_base
+= n
;
3555 /* Tell assembler to switch to the section for the exception handling
3559 default_exception_section (void)
3561 if (targetm
.have_named_sections
)
3564 #ifdef HAVE_LD_RO_RW_SECTION_MIXING
3565 int tt_format
= ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3568 || ((tt_format
& 0x70) != DW_EH_PE_absptr
3569 && (tt_format
& 0x70) != DW_EH_PE_aligned
))
3570 ? 0 : SECTION_WRITE
;
3572 flags
= SECTION_WRITE
;
3574 named_section_flags (".gcc_except_table", flags
);
3579 readonly_data_section ();
3583 output_function_exception_table (void)
3585 int tt_format
, cs_format
, lp_format
, i
, n
;
3586 #ifdef HAVE_AS_LEB128
3587 char ttype_label
[32];
3588 char cs_after_size_label
[32];
3589 char cs_end_label
[32];
3594 int tt_format_size
= 0;
3596 /* Not all functions need anything. */
3597 if (! cfun
->uses_eh_lsda
)
3600 #ifdef IA64_UNWIND_INFO
3601 fputs ("\t.personality\t", asm_out_file
);
3602 output_addr_const (asm_out_file
, eh_personality_libfunc
);
3603 fputs ("\n\t.handlerdata\n", asm_out_file
);
3604 /* Note that varasm still thinks we're in the function's code section.
3605 The ".endp" directive that will immediately follow will take us back. */
3607 (*targetm
.asm_out
.exception_section
) ();
3610 have_tt_data
= (VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
) > 0
3611 || VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
) > 0);
3613 /* Indicate the format of the @TType entries. */
3615 tt_format
= DW_EH_PE_omit
;
3618 tt_format
= ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3619 #ifdef HAVE_AS_LEB128
3620 ASM_GENERATE_INTERNAL_LABEL (ttype_label
, "LLSDATT",
3621 current_function_funcdef_no
);
3623 tt_format_size
= size_of_encoded_value (tt_format
);
3625 assemble_align (tt_format_size
* BITS_PER_UNIT
);
3628 (*targetm
.asm_out
.internal_label
) (asm_out_file
, "LLSDA",
3629 current_function_funcdef_no
);
3631 /* The LSDA header. */
3633 /* Indicate the format of the landing pad start pointer. An omitted
3634 field implies @LPStart == @Start. */
3635 /* Currently we always put @LPStart == @Start. This field would
3636 be most useful in moving the landing pads completely out of
3637 line to another section, but it could also be used to minimize
3638 the size of uleb128 landing pad offsets. */
3639 lp_format
= DW_EH_PE_omit
;
3640 dw2_asm_output_data (1, lp_format
, "@LPStart format (%s)",
3641 eh_data_format_name (lp_format
));
3643 /* @LPStart pointer would go here. */
3645 dw2_asm_output_data (1, tt_format
, "@TType format (%s)",
3646 eh_data_format_name (tt_format
));
3648 #ifndef HAVE_AS_LEB128
3649 if (USING_SJLJ_EXCEPTIONS
)
3650 call_site_len
= sjlj_size_of_call_site_table ();
3652 call_site_len
= dw2_size_of_call_site_table ();
3655 /* A pc-relative 4-byte displacement to the @TType data. */
3658 #ifdef HAVE_AS_LEB128
3659 char ttype_after_disp_label
[32];
3660 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label
, "LLSDATTD",
3661 current_function_funcdef_no
);
3662 dw2_asm_output_delta_uleb128 (ttype_label
, ttype_after_disp_label
,
3663 "@TType base offset");
3664 ASM_OUTPUT_LABEL (asm_out_file
, ttype_after_disp_label
);
3666 /* Ug. Alignment queers things. */
3667 unsigned int before_disp
, after_disp
, last_disp
, disp
;
3669 before_disp
= 1 + 1;
3670 after_disp
= (1 + size_of_uleb128 (call_site_len
)
3672 + VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
)
3673 + (VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
)
3679 unsigned int disp_size
, pad
;
3682 disp_size
= size_of_uleb128 (disp
);
3683 pad
= before_disp
+ disp_size
+ after_disp
;
3684 if (pad
% tt_format_size
)
3685 pad
= tt_format_size
- (pad
% tt_format_size
);
3688 disp
= after_disp
+ pad
;
3690 while (disp
!= last_disp
);
3692 dw2_asm_output_data_uleb128 (disp
, "@TType base offset");
3696 /* Indicate the format of the call-site offsets. */
3697 #ifdef HAVE_AS_LEB128
3698 cs_format
= DW_EH_PE_uleb128
;
3700 cs_format
= DW_EH_PE_udata4
;
3702 dw2_asm_output_data (1, cs_format
, "call-site format (%s)",
3703 eh_data_format_name (cs_format
));
3705 #ifdef HAVE_AS_LEB128
3706 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label
, "LLSDACSB",
3707 current_function_funcdef_no
);
3708 ASM_GENERATE_INTERNAL_LABEL (cs_end_label
, "LLSDACSE",
3709 current_function_funcdef_no
);
3710 dw2_asm_output_delta_uleb128 (cs_end_label
, cs_after_size_label
,
3711 "Call-site table length");
3712 ASM_OUTPUT_LABEL (asm_out_file
, cs_after_size_label
);
3713 if (USING_SJLJ_EXCEPTIONS
)
3714 sjlj_output_call_site_table ();
3716 dw2_output_call_site_table ();
3717 ASM_OUTPUT_LABEL (asm_out_file
, cs_end_label
);
3719 dw2_asm_output_data_uleb128 (call_site_len
,"Call-site table length");
3720 if (USING_SJLJ_EXCEPTIONS
)
3721 sjlj_output_call_site_table ();
3723 dw2_output_call_site_table ();
3726 /* ??? Decode and interpret the data for flag_debug_asm. */
3727 n
= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
);
3728 for (i
= 0; i
< n
; ++i
)
3729 dw2_asm_output_data (1, VARRAY_UCHAR (cfun
->eh
->action_record_data
, i
),
3730 (i
? NULL
: "Action record table"));
3733 assemble_align (tt_format_size
* BITS_PER_UNIT
);
3735 i
= VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
);
3738 tree type
= VARRAY_TREE (cfun
->eh
->ttype_data
, i
);
3741 if (type
== NULL_TREE
)
3745 struct cgraph_varpool_node
*node
;
3747 type
= lookup_type_for_runtime (type
);
3748 value
= expand_expr (type
, NULL_RTX
, VOIDmode
, EXPAND_INITIALIZER
);
3750 /* Let cgraph know that the rtti decl is used. Not all of the
3751 paths below go through assemble_integer, which would take
3752 care of this for us. */
3753 if (TREE_CODE (type
) == ADDR_EXPR
)
3755 type
= TREE_OPERAND (type
, 0);
3756 node
= cgraph_varpool_node (type
);
3758 cgraph_varpool_mark_needed_node (node
);
3760 else if (TREE_CODE (type
) != INTEGER_CST
)
3764 if (tt_format
== DW_EH_PE_absptr
|| tt_format
== DW_EH_PE_aligned
)
3765 assemble_integer (value
, tt_format_size
,
3766 tt_format_size
* BITS_PER_UNIT
, 1);
3768 dw2_asm_output_encoded_addr_rtx (tt_format
, value
, NULL
);
3771 #ifdef HAVE_AS_LEB128
3773 ASM_OUTPUT_LABEL (asm_out_file
, ttype_label
);
3776 /* ??? Decode and interpret the data for flag_debug_asm. */
3777 n
= VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
);
3778 for (i
= 0; i
< n
; ++i
)
3779 dw2_asm_output_data (1, VARRAY_UCHAR (cfun
->eh
->ehspec_data
, i
),
3780 (i
? NULL
: "Exception specification table"));
3782 function_section (current_function_decl
);
3785 #include "gt-except.h"