1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
58 #include "insn-config.h"
60 #include "integrate.h"
61 #include "hard-reg-set.h"
62 #include "basic-block.h"
64 #include "dwarf2asm.h"
65 #include "dwarf2out.h"
73 #include "langhooks.h"
75 /* Provide defaults for stuff that may not be defined when using
77 #ifndef EH_RETURN_STACKADJ_RTX
78 #define EH_RETURN_STACKADJ_RTX 0
80 #ifndef EH_RETURN_HANDLER_RTX
81 #define EH_RETURN_HANDLER_RTX 0
83 #ifndef EH_RETURN_DATA_REGNO
84 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
88 /* Nonzero means enable synchronous exceptions for non-call instructions. */
89 int flag_non_call_exceptions
;
91 /* Protect cleanup actions with must-not-throw regions, with a call
92 to the given failure handler. */
93 tree (*lang_protect_cleanup_actions
) PARAMS ((void));
95 /* Return true if type A catches type B. */
96 int (*lang_eh_type_covers
) PARAMS ((tree a
, tree b
));
98 /* Map a type to a runtime object to match type. */
99 tree (*lang_eh_runtime_type
) PARAMS ((tree
));
101 /* A hash table of label to region number. */
103 struct ehl_map_entry
GTY(())
106 struct eh_region
*region
;
109 static int call_site_base
;
110 static GTY ((param_is (union tree_node
)))
111 htab_t type_to_runtime_map
;
113 /* Describe the SjLj_Function_Context structure. */
114 static GTY(()) tree sjlj_fc_type_node
;
115 static int sjlj_fc_call_site_ofs
;
116 static int sjlj_fc_data_ofs
;
117 static int sjlj_fc_personality_ofs
;
118 static int sjlj_fc_lsda_ofs
;
119 static int sjlj_fc_jbuf_ofs
;
121 /* Describes one exception region. */
122 struct eh_region
GTY(())
124 /* The immediately surrounding region. */
125 struct eh_region
*outer
;
127 /* The list of immediately contained regions. */
128 struct eh_region
*inner
;
129 struct eh_region
*next_peer
;
131 /* An identifier for this region. */
134 /* When a region is deleted, its parents inherit the REG_EH_REGION
135 numbers already assigned. */
138 /* Each region does exactly one thing. */
145 ERT_ALLOWED_EXCEPTIONS
,
151 /* Holds the action to perform based on the preceding type. */
153 /* A list of catch blocks, a surrounding try block,
154 and the label for continuing after a catch. */
155 struct eh_region_u_try
{
156 struct eh_region
*catch;
157 struct eh_region
*last_catch
;
158 struct eh_region
*prev_try
;
160 } GTY ((tag ("ERT_TRY"))) try;
162 /* The list through the catch handlers, the list of type objects
163 matched, and the list of associated filters. */
164 struct eh_region_u_catch
{
165 struct eh_region
*next_catch
;
166 struct eh_region
*prev_catch
;
169 } GTY ((tag ("ERT_CATCH"))) catch;
171 /* A tree_list of allowed types. */
172 struct eh_region_u_allowed
{
175 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed
;
177 /* The type given by a call to "throw foo();", or discovered
179 struct eh_region_u_throw
{
181 } GTY ((tag ("ERT_THROW"))) throw;
183 /* Retain the cleanup expression even after expansion so that
184 we can match up fixup regions. */
185 struct eh_region_u_cleanup
{
187 } GTY ((tag ("ERT_CLEANUP"))) cleanup
;
189 /* The real region (by expression and by pointer) that fixup code
191 struct eh_region_u_fixup
{
193 struct eh_region
*real_region
;
194 } GTY ((tag ("ERT_FIXUP"))) fixup
;
195 } GTY ((desc ("%0.type"))) u
;
197 /* Entry point for this region's handler before landing pads are built. */
200 /* Entry point for this region's handler from the runtime eh library. */
203 /* Entry point for this region's handler from an inner region. */
204 rtx post_landing_pad
;
206 /* The RESX insn for handing off control to the next outermost handler,
211 struct call_site_record
GTY(())
217 /* Used to save exception status for each function. */
218 struct eh_status
GTY(())
220 /* The tree of all regions for this function. */
221 struct eh_region
*region_tree
;
223 /* The same information as an indexable array. */
224 struct eh_region
** GTY ((length ("%h.last_region_number"))) region_array
;
226 /* The most recently open region. */
227 struct eh_region
*cur_region
;
229 /* This is the region for which we are processing catch blocks. */
230 struct eh_region
*try_region
;
235 int built_landing_pads
;
236 int last_region_number
;
238 varray_type ttype_data
;
239 varray_type ehspec_data
;
240 varray_type action_record_data
;
242 htab_t
GTY ((param_is (struct ehl_map_entry
))) exception_handler_label_map
;
244 struct call_site_record
* GTY ((length ("%h.call_site_data_used")))
246 int call_site_data_used
;
247 int call_site_data_size
;
258 static int t2r_eq
PARAMS ((const PTR
,
260 static hashval_t t2r_hash
PARAMS ((const PTR
));
261 static void add_type_for_runtime
PARAMS ((tree
));
262 static tree lookup_type_for_runtime
PARAMS ((tree
));
264 static struct eh_region
*expand_eh_region_end
PARAMS ((void));
266 static rtx get_exception_filter
PARAMS ((struct function
*));
268 static void collect_eh_region_array
PARAMS ((void));
269 static void resolve_fixup_regions
PARAMS ((void));
270 static void remove_fixup_regions
PARAMS ((void));
271 static void remove_unreachable_regions
PARAMS ((rtx
));
272 static void convert_from_eh_region_ranges_1
PARAMS ((rtx
*, int *, int));
274 static struct eh_region
*duplicate_eh_region_1
PARAMS ((struct eh_region
*,
275 struct inline_remap
*));
276 static void duplicate_eh_region_2
PARAMS ((struct eh_region
*,
277 struct eh_region
**));
278 static int ttypes_filter_eq
PARAMS ((const PTR
,
280 static hashval_t ttypes_filter_hash
PARAMS ((const PTR
));
281 static int ehspec_filter_eq
PARAMS ((const PTR
,
283 static hashval_t ehspec_filter_hash
PARAMS ((const PTR
));
284 static int add_ttypes_entry
PARAMS ((htab_t
, tree
));
285 static int add_ehspec_entry
PARAMS ((htab_t
, htab_t
,
287 static void assign_filter_values
PARAMS ((void));
288 static void build_post_landing_pads
PARAMS ((void));
289 static void connect_post_landing_pads
PARAMS ((void));
290 static void dw2_build_landing_pads
PARAMS ((void));
293 static bool sjlj_find_directly_reachable_regions
294 PARAMS ((struct sjlj_lp_info
*));
295 static void sjlj_assign_call_site_values
296 PARAMS ((rtx
, struct sjlj_lp_info
*));
297 static void sjlj_mark_call_sites
298 PARAMS ((struct sjlj_lp_info
*));
299 static void sjlj_emit_function_enter
PARAMS ((rtx
));
300 static void sjlj_emit_function_exit
PARAMS ((void));
301 static void sjlj_emit_dispatch_table
302 PARAMS ((rtx
, struct sjlj_lp_info
*));
303 static void sjlj_build_landing_pads
PARAMS ((void));
305 static hashval_t ehl_hash
PARAMS ((const PTR
));
306 static int ehl_eq
PARAMS ((const PTR
,
308 static void add_ehl_entry
PARAMS ((rtx
,
309 struct eh_region
*));
310 static void remove_exception_handler_label
PARAMS ((rtx
));
311 static void remove_eh_handler
PARAMS ((struct eh_region
*));
312 static int for_each_eh_label_1
PARAMS ((PTR
*, PTR
));
314 struct reachable_info
;
316 /* The return value of reachable_next_level. */
319 /* The given exception is not processed by the given region. */
321 /* The given exception may need processing by the given region. */
323 /* The given exception is completely processed by the given region. */
325 /* The given exception is completely processed by the runtime. */
329 static int check_handled
PARAMS ((tree
, tree
));
330 static void add_reachable_handler
331 PARAMS ((struct reachable_info
*, struct eh_region
*,
332 struct eh_region
*));
333 static enum reachable_code reachable_next_level
334 PARAMS ((struct eh_region
*, tree
, struct reachable_info
*));
336 static int action_record_eq
PARAMS ((const PTR
,
338 static hashval_t action_record_hash
PARAMS ((const PTR
));
339 static int add_action_record
PARAMS ((htab_t
, int, int));
340 static int collect_one_action_chain
PARAMS ((htab_t
,
341 struct eh_region
*));
342 static int add_call_site
PARAMS ((rtx
, int));
344 static void push_uleb128
PARAMS ((varray_type
*,
346 static void push_sleb128
PARAMS ((varray_type
*, int));
347 #ifndef HAVE_AS_LEB128
348 static int dw2_size_of_call_site_table
PARAMS ((void));
349 static int sjlj_size_of_call_site_table
PARAMS ((void));
351 static void dw2_output_call_site_table
PARAMS ((void));
352 static void sjlj_output_call_site_table
PARAMS ((void));
355 /* Routine to see if exception handling is turned on.
356 DO_WARN is non-zero if we want to inform the user that exception
357 handling is turned off.
359 This is used to ensure that -fexceptions has been specified if the
360 compiler tries to use any exception-specific functions. */
366 if (! flag_exceptions
)
368 static int warned
= 0;
369 if (! warned
&& do_warn
)
371 error ("exception handling disabled, use -fexceptions to enable");
383 if (! flag_exceptions
)
386 type_to_runtime_map
= htab_create_ggc (31, t2r_hash
, t2r_eq
, NULL
);
388 /* Create the SjLj_Function_Context structure. This should match
389 the definition in unwind-sjlj.c. */
390 if (USING_SJLJ_EXCEPTIONS
)
392 tree f_jbuf
, f_per
, f_lsda
, f_prev
, f_cs
, f_data
, tmp
;
394 sjlj_fc_type_node
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
396 f_prev
= build_decl (FIELD_DECL
, get_identifier ("__prev"),
397 build_pointer_type (sjlj_fc_type_node
));
398 DECL_FIELD_CONTEXT (f_prev
) = sjlj_fc_type_node
;
400 f_cs
= build_decl (FIELD_DECL
, get_identifier ("__call_site"),
402 DECL_FIELD_CONTEXT (f_cs
) = sjlj_fc_type_node
;
404 tmp
= build_index_type (build_int_2 (4 - 1, 0));
405 tmp
= build_array_type ((*lang_hooks
.types
.type_for_mode
) (word_mode
, 1),
407 f_data
= build_decl (FIELD_DECL
, get_identifier ("__data"), tmp
);
408 DECL_FIELD_CONTEXT (f_data
) = sjlj_fc_type_node
;
410 f_per
= build_decl (FIELD_DECL
, get_identifier ("__personality"),
412 DECL_FIELD_CONTEXT (f_per
) = sjlj_fc_type_node
;
414 f_lsda
= build_decl (FIELD_DECL
, get_identifier ("__lsda"),
416 DECL_FIELD_CONTEXT (f_lsda
) = sjlj_fc_type_node
;
418 #ifdef DONT_USE_BUILTIN_SETJMP
420 tmp
= build_int_2 (JMP_BUF_SIZE
- 1, 0);
422 /* Should be large enough for most systems, if it is not,
423 JMP_BUF_SIZE should be defined with the proper value. It will
424 also tend to be larger than necessary for most systems, a more
425 optimal port will define JMP_BUF_SIZE. */
426 tmp
= build_int_2 (FIRST_PSEUDO_REGISTER
+ 2 - 1, 0);
429 /* This is 2 for builtin_setjmp, plus whatever the target requires
430 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
431 tmp
= build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
))
432 / GET_MODE_SIZE (Pmode
)) + 2 - 1, 0);
434 tmp
= build_index_type (tmp
);
435 tmp
= build_array_type (ptr_type_node
, tmp
);
436 f_jbuf
= build_decl (FIELD_DECL
, get_identifier ("__jbuf"), tmp
);
437 #ifdef DONT_USE_BUILTIN_SETJMP
438 /* We don't know what the alignment requirements of the
439 runtime's jmp_buf has. Overestimate. */
440 DECL_ALIGN (f_jbuf
) = BIGGEST_ALIGNMENT
;
441 DECL_USER_ALIGN (f_jbuf
) = 1;
443 DECL_FIELD_CONTEXT (f_jbuf
) = sjlj_fc_type_node
;
445 TYPE_FIELDS (sjlj_fc_type_node
) = f_prev
;
446 TREE_CHAIN (f_prev
) = f_cs
;
447 TREE_CHAIN (f_cs
) = f_data
;
448 TREE_CHAIN (f_data
) = f_per
;
449 TREE_CHAIN (f_per
) = f_lsda
;
450 TREE_CHAIN (f_lsda
) = f_jbuf
;
452 layout_type (sjlj_fc_type_node
);
454 /* Cache the interesting field offsets so that we have
455 easy access from rtl. */
456 sjlj_fc_call_site_ofs
457 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs
), 1)
458 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs
), 1) / BITS_PER_UNIT
);
460 = (tree_low_cst (DECL_FIELD_OFFSET (f_data
), 1)
461 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data
), 1) / BITS_PER_UNIT
);
462 sjlj_fc_personality_ofs
463 = (tree_low_cst (DECL_FIELD_OFFSET (f_per
), 1)
464 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per
), 1) / BITS_PER_UNIT
);
466 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda
), 1)
467 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda
), 1) / BITS_PER_UNIT
);
469 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf
), 1)
470 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf
), 1) / BITS_PER_UNIT
);
475 init_eh_for_function ()
477 cfun
->eh
= (struct eh_status
*)
478 ggc_alloc_cleared (sizeof (struct eh_status
));
481 /* Start an exception handling region. All instructions emitted
482 after this point are considered to be part of the region until
483 expand_eh_region_end is invoked. */
486 expand_eh_region_start ()
488 struct eh_region
*new_region
;
489 struct eh_region
*cur_region
;
495 /* Insert a new blank region as a leaf in the tree. */
496 new_region
= (struct eh_region
*) ggc_alloc_cleared (sizeof (*new_region
));
497 cur_region
= cfun
->eh
->cur_region
;
498 new_region
->outer
= cur_region
;
501 new_region
->next_peer
= cur_region
->inner
;
502 cur_region
->inner
= new_region
;
506 new_region
->next_peer
= cfun
->eh
->region_tree
;
507 cfun
->eh
->region_tree
= new_region
;
509 cfun
->eh
->cur_region
= new_region
;
511 /* Create a note marking the start of this region. */
512 new_region
->region_number
= ++cfun
->eh
->last_region_number
;
513 note
= emit_note (NULL
, NOTE_INSN_EH_REGION_BEG
);
514 NOTE_EH_HANDLER (note
) = new_region
->region_number
;
517 /* Common code to end a region. Returns the region just ended. */
519 static struct eh_region
*
520 expand_eh_region_end ()
522 struct eh_region
*cur_region
= cfun
->eh
->cur_region
;
525 /* Create a note marking the end of this region. */
526 note
= emit_note (NULL
, NOTE_INSN_EH_REGION_END
);
527 NOTE_EH_HANDLER (note
) = cur_region
->region_number
;
530 cfun
->eh
->cur_region
= cur_region
->outer
;
535 /* End an exception handling region for a cleanup. HANDLER is an
536 expression to expand for the cleanup. */
539 expand_eh_region_end_cleanup (handler
)
542 struct eh_region
*region
;
543 tree protect_cleanup_actions
;
550 region
= expand_eh_region_end ();
551 region
->type
= ERT_CLEANUP
;
552 region
->label
= gen_label_rtx ();
553 region
->u
.cleanup
.exp
= handler
;
555 around_label
= gen_label_rtx ();
556 emit_jump (around_label
);
558 emit_label (region
->label
);
560 /* Give the language a chance to specify an action to be taken if an
561 exception is thrown that would propagate out of the HANDLER. */
562 protect_cleanup_actions
563 = (lang_protect_cleanup_actions
564 ? (*lang_protect_cleanup_actions
) ()
567 if (protect_cleanup_actions
)
568 expand_eh_region_start ();
570 /* In case this cleanup involves an inline destructor with a try block in
571 it, we need to save the EH return data registers around it. */
572 data_save
[0] = gen_reg_rtx (ptr_mode
);
573 emit_move_insn (data_save
[0], get_exception_pointer (cfun
));
574 data_save
[1] = gen_reg_rtx (word_mode
);
575 emit_move_insn (data_save
[1], get_exception_filter (cfun
));
577 expand_expr (handler
, const0_rtx
, VOIDmode
, 0);
579 emit_move_insn (cfun
->eh
->exc_ptr
, data_save
[0]);
580 emit_move_insn (cfun
->eh
->filter
, data_save
[1]);
582 if (protect_cleanup_actions
)
583 expand_eh_region_end_must_not_throw (protect_cleanup_actions
);
585 /* We need any stack adjustment complete before the around_label. */
586 do_pending_stack_adjust ();
588 /* We delay the generation of the _Unwind_Resume until we generate
589 landing pads. We emit a marker here so as to get good control
590 flow data in the meantime. */
592 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
595 emit_label (around_label
);
598 /* End an exception handling region for a try block, and prepares
599 for subsequent calls to expand_start_catch. */
602 expand_start_all_catch ()
604 struct eh_region
*region
;
609 region
= expand_eh_region_end ();
610 region
->type
= ERT_TRY
;
611 region
->u
.try.prev_try
= cfun
->eh
->try_region
;
612 region
->u
.try.continue_label
= gen_label_rtx ();
614 cfun
->eh
->try_region
= region
;
616 emit_jump (region
->u
.try.continue_label
);
619 /* Begin a catch clause. TYPE is the type caught, a list of such types, or
620 null if this is a catch-all clause. Providing a type list enables to
621 associate the catch region with potentially several exception types, which
622 is useful e.g. for Ada. */
625 expand_start_catch (type_or_list
)
628 struct eh_region
*t
, *c
, *l
;
634 type_list
= type_or_list
;
638 /* Ensure to always end up with a type list to normalize further
639 processing, then register each type against the runtime types
643 if (TREE_CODE (type_or_list
) != TREE_LIST
)
644 type_list
= tree_cons (NULL_TREE
, type_or_list
, NULL_TREE
);
646 type_node
= type_list
;
647 for (; type_node
; type_node
= TREE_CHAIN (type_node
))
648 add_type_for_runtime (TREE_VALUE (type_node
));
651 expand_eh_region_start ();
653 t
= cfun
->eh
->try_region
;
654 c
= cfun
->eh
->cur_region
;
656 c
->u
.catch.type_list
= type_list
;
657 c
->label
= gen_label_rtx ();
659 l
= t
->u
.try.last_catch
;
660 c
->u
.catch.prev_catch
= l
;
662 l
->u
.catch.next_catch
= c
;
665 t
->u
.try.last_catch
= c
;
667 emit_label (c
->label
);
670 /* End a catch clause. Control will resume after the try/catch block. */
675 struct eh_region
*try_region
, *catch_region
;
680 catch_region
= expand_eh_region_end ();
681 try_region
= cfun
->eh
->try_region
;
683 emit_jump (try_region
->u
.try.continue_label
);
686 /* End a sequence of catch handlers for a try block. */
689 expand_end_all_catch ()
691 struct eh_region
*try_region
;
696 try_region
= cfun
->eh
->try_region
;
697 cfun
->eh
->try_region
= try_region
->u
.try.prev_try
;
699 emit_label (try_region
->u
.try.continue_label
);
702 /* End an exception region for an exception type filter. ALLOWED is a
703 TREE_LIST of types to be matched by the runtime. FAILURE is an
704 expression to invoke if a mismatch occurs.
706 ??? We could use these semantics for calls to rethrow, too; if we can
707 see the surrounding catch clause, we know that the exception we're
708 rethrowing satisfies the "filter" of the catch type. */
711 expand_eh_region_end_allowed (allowed
, failure
)
712 tree allowed
, failure
;
714 struct eh_region
*region
;
720 region
= expand_eh_region_end ();
721 region
->type
= ERT_ALLOWED_EXCEPTIONS
;
722 region
->u
.allowed
.type_list
= allowed
;
723 region
->label
= gen_label_rtx ();
725 for (; allowed
; allowed
= TREE_CHAIN (allowed
))
726 add_type_for_runtime (TREE_VALUE (allowed
));
728 /* We must emit the call to FAILURE here, so that if this function
729 throws a different exception, that it will be processed by the
732 around_label
= gen_label_rtx ();
733 emit_jump (around_label
);
735 emit_label (region
->label
);
736 expand_expr (failure
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
737 /* We must adjust the stack before we reach the AROUND_LABEL because
738 the call to FAILURE does not occur on all paths to the
740 do_pending_stack_adjust ();
742 emit_label (around_label
);
745 /* End an exception region for a must-not-throw filter. FAILURE is an
746 expression invoke if an uncaught exception propagates this far.
748 This is conceptually identical to expand_eh_region_end_allowed with
749 an empty allowed list (if you passed "std::terminate" instead of
750 "__cxa_call_unexpected"), but they are represented differently in
754 expand_eh_region_end_must_not_throw (failure
)
757 struct eh_region
*region
;
763 region
= expand_eh_region_end ();
764 region
->type
= ERT_MUST_NOT_THROW
;
765 region
->label
= gen_label_rtx ();
767 /* We must emit the call to FAILURE here, so that if this function
768 throws a different exception, that it will be processed by the
771 around_label
= gen_label_rtx ();
772 emit_jump (around_label
);
774 emit_label (region
->label
);
775 expand_expr (failure
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
777 emit_label (around_label
);
780 /* End an exception region for a throw. No handling goes on here,
781 but it's the easiest way for the front-end to indicate what type
785 expand_eh_region_end_throw (type
)
788 struct eh_region
*region
;
793 region
= expand_eh_region_end ();
794 region
->type
= ERT_THROW
;
795 region
->u
.throw.type
= type
;
798 /* End a fixup region. Within this region the cleanups for the immediately
799 enclosing region are _not_ run. This is used for goto cleanup to avoid
800 destroying an object twice.
802 This would be an extraordinarily simple prospect, were it not for the
803 fact that we don't actually know what the immediately enclosing region
804 is. This surprising fact is because expand_cleanups is currently
805 generating a sequence that it will insert somewhere else. We collect
806 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
809 expand_eh_region_end_fixup (handler
)
812 struct eh_region
*fixup
;
817 fixup
= expand_eh_region_end ();
818 fixup
->type
= ERT_FIXUP
;
819 fixup
->u
.fixup
.cleanup_exp
= handler
;
822 /* Return an rtl expression for a pointer to the exception object
826 get_exception_pointer (fun
)
827 struct function
*fun
;
829 rtx exc_ptr
= fun
->eh
->exc_ptr
;
830 if (fun
== cfun
&& ! exc_ptr
)
832 exc_ptr
= gen_reg_rtx (ptr_mode
);
833 fun
->eh
->exc_ptr
= exc_ptr
;
838 /* Return an rtl expression for the exception dispatch filter
842 get_exception_filter (fun
)
843 struct function
*fun
;
845 rtx filter
= fun
->eh
->filter
;
846 if (fun
== cfun
&& ! filter
)
848 filter
= gen_reg_rtx (word_mode
);
849 fun
->eh
->filter
= filter
;
854 /* This section is for the exception handling specific optimization pass. */
856 /* Random access the exception region tree. It's just as simple to
857 collect the regions this way as in expand_eh_region_start, but
858 without having to realloc memory. */
861 collect_eh_region_array ()
863 struct eh_region
**array
, *i
;
865 i
= cfun
->eh
->region_tree
;
869 array
= ggc_alloc_cleared ((cfun
->eh
->last_region_number
+ 1)
871 cfun
->eh
->region_array
= array
;
875 array
[i
->region_number
] = i
;
877 /* If there are sub-regions, process them. */
880 /* If there are peers, process them. */
881 else if (i
->next_peer
)
883 /* Otherwise, step back up the tree to the next peer. */
890 } while (i
->next_peer
== NULL
);
897 resolve_fixup_regions ()
899 int i
, j
, n
= cfun
->eh
->last_region_number
;
901 for (i
= 1; i
<= n
; ++i
)
903 struct eh_region
*fixup
= cfun
->eh
->region_array
[i
];
904 struct eh_region
*cleanup
= 0;
906 if (! fixup
|| fixup
->type
!= ERT_FIXUP
)
909 for (j
= 1; j
<= n
; ++j
)
911 cleanup
= cfun
->eh
->region_array
[j
];
912 if (cleanup
->type
== ERT_CLEANUP
913 && cleanup
->u
.cleanup
.exp
== fixup
->u
.fixup
.cleanup_exp
)
919 fixup
->u
.fixup
.real_region
= cleanup
->outer
;
923 /* Now that we've discovered what region actually encloses a fixup,
924 we can shuffle pointers and remove them from the tree. */
927 remove_fixup_regions ()
931 struct eh_region
*fixup
;
933 /* Walk the insn chain and adjust the REG_EH_REGION numbers
934 for instructions referencing fixup regions. This is only
935 strictly necessary for fixup regions with no parent, but
936 doesn't hurt to do it for all regions. */
937 for (insn
= get_insns(); insn
; insn
= NEXT_INSN (insn
))
939 && (note
= find_reg_note (insn
, REG_EH_REGION
, NULL
))
940 && INTVAL (XEXP (note
, 0)) > 0
941 && (fixup
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))])
942 && fixup
->type
== ERT_FIXUP
)
944 if (fixup
->u
.fixup
.real_region
)
945 XEXP (note
, 0) = GEN_INT (fixup
->u
.fixup
.real_region
->region_number
);
947 remove_note (insn
, note
);
950 /* Remove the fixup regions from the tree. */
951 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
953 fixup
= cfun
->eh
->region_array
[i
];
957 /* Allow GC to maybe free some memory. */
958 if (fixup
->type
== ERT_CLEANUP
)
959 fixup
->u
.cleanup
.exp
= NULL_TREE
;
961 if (fixup
->type
!= ERT_FIXUP
)
966 struct eh_region
*parent
, *p
, **pp
;
968 parent
= fixup
->u
.fixup
.real_region
;
970 /* Fix up the children's parent pointers; find the end of
972 for (p
= fixup
->inner
; ; p
= p
->next_peer
)
979 /* In the tree of cleanups, only outer-inner ordering matters.
980 So link the children back in anywhere at the correct level. */
984 pp
= &cfun
->eh
->region_tree
;
990 remove_eh_handler (fixup
);
994 /* Remove all regions whose labels are not reachable from insns. */
997 remove_unreachable_regions (insns
)
1000 int i
, *uid_region_num
;
1002 struct eh_region
*r
;
1005 uid_region_num
= xcalloc (get_max_uid (), sizeof(int));
1006 reachable
= xcalloc (cfun
->eh
->last_region_number
+ 1, sizeof(bool));
1008 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1010 r
= cfun
->eh
->region_array
[i
];
1011 if (!r
|| r
->region_number
!= i
)
1016 if (uid_region_num
[INSN_UID (r
->resume
)])
1018 uid_region_num
[INSN_UID (r
->resume
)] = i
;
1022 if (uid_region_num
[INSN_UID (r
->label
)])
1024 uid_region_num
[INSN_UID (r
->label
)] = i
;
1026 if (r
->type
== ERT_TRY
&& r
->u
.try.continue_label
)
1028 if (uid_region_num
[INSN_UID (r
->u
.try.continue_label
)])
1030 uid_region_num
[INSN_UID (r
->u
.try.continue_label
)] = i
;
1034 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1035 reachable
[uid_region_num
[INSN_UID (insn
)]] = true;
1037 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1039 r
= cfun
->eh
->region_array
[i
];
1040 if (r
&& r
->region_number
== i
&& !reachable
[i
])
1042 /* Don't remove ERT_THROW regions if their outer region
1044 if (r
->type
== ERT_THROW
1046 && reachable
[r
->outer
->region_number
])
1049 remove_eh_handler (r
);
1054 free (uid_region_num
);
1057 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1058 can_throw instruction in the region. */
1061 convert_from_eh_region_ranges_1 (pinsns
, orig_sp
, cur
)
1069 for (insn
= *pinsns
; insn
; insn
= next
)
1071 next
= NEXT_INSN (insn
);
1072 if (GET_CODE (insn
) == NOTE
)
1074 int kind
= NOTE_LINE_NUMBER (insn
);
1075 if (kind
== NOTE_INSN_EH_REGION_BEG
1076 || kind
== NOTE_INSN_EH_REGION_END
)
1078 if (kind
== NOTE_INSN_EH_REGION_BEG
)
1080 struct eh_region
*r
;
1083 cur
= NOTE_EH_HANDLER (insn
);
1085 r
= cfun
->eh
->region_array
[cur
];
1086 if (r
->type
== ERT_FIXUP
)
1088 r
= r
->u
.fixup
.real_region
;
1089 cur
= r
? r
->region_number
: 0;
1091 else if (r
->type
== ERT_CATCH
)
1094 cur
= r
? r
->region_number
: 0;
1100 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1101 requires extra care to adjust sequence start. */
1102 if (insn
== *pinsns
)
1108 else if (INSN_P (insn
))
1111 && ! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
1112 /* Calls can always potentially throw exceptions, unless
1113 they have a REG_EH_REGION note with a value of 0 or less.
1114 Which should be the only possible kind so far. */
1115 && (GET_CODE (insn
) == CALL_INSN
1116 /* If we wanted exceptions for non-call insns, then
1117 any may_trap_p instruction could throw. */
1118 || (flag_non_call_exceptions
1119 && GET_CODE (PATTERN (insn
)) != CLOBBER
1120 && GET_CODE (PATTERN (insn
)) != USE
1121 && may_trap_p (PATTERN (insn
)))))
1123 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_EH_REGION
, GEN_INT (cur
),
1127 if (GET_CODE (insn
) == CALL_INSN
1128 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
1130 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn
), 0),
1132 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn
), 1),
1134 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn
), 2),
1145 convert_from_eh_region_ranges ()
1150 collect_eh_region_array ();
1151 resolve_fixup_regions ();
1153 stack
= xmalloc (sizeof (int) * (cfun
->eh
->last_region_number
+ 1));
1154 insns
= get_insns ();
1155 convert_from_eh_region_ranges_1 (&insns
, stack
, 0);
1158 remove_fixup_regions ();
1159 remove_unreachable_regions (insns
);
1163 add_ehl_entry (label
, region
)
1165 struct eh_region
*region
;
1167 struct ehl_map_entry
**slot
, *entry
;
1169 LABEL_PRESERVE_P (label
) = 1;
1171 entry
= (struct ehl_map_entry
*) ggc_alloc (sizeof (*entry
));
1172 entry
->label
= label
;
1173 entry
->region
= region
;
1175 slot
= (struct ehl_map_entry
**)
1176 htab_find_slot (cfun
->eh
->exception_handler_label_map
, entry
, INSERT
);
1178 /* Before landing pad creation, each exception handler has its own
1179 label. After landing pad creation, the exception handlers may
1180 share landing pads. This is ok, since maybe_remove_eh_handler
1181 only requires the 1-1 mapping before landing pad creation. */
1182 if (*slot
&& !cfun
->eh
->built_landing_pads
)
1189 find_exception_handler_labels ()
1193 if (cfun
->eh
->exception_handler_label_map
)
1194 htab_empty (cfun
->eh
->exception_handler_label_map
);
1197 /* ??? The expansion factor here (3/2) must be greater than the htab
1198 occupancy factor (4/3) to avoid unnecessary resizing. */
1199 cfun
->eh
->exception_handler_label_map
1200 = htab_create_ggc (cfun
->eh
->last_region_number
* 3 / 2,
1201 ehl_hash
, ehl_eq
, NULL
);
1204 if (cfun
->eh
->region_tree
== NULL
)
1207 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1209 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1212 if (! region
|| region
->region_number
!= i
)
1214 if (cfun
->eh
->built_landing_pads
)
1215 lab
= region
->landing_pad
;
1217 lab
= region
->label
;
1220 add_ehl_entry (lab
, region
);
1223 /* For sjlj exceptions, need the return label to remain live until
1224 after landing pad generation. */
1225 if (USING_SJLJ_EXCEPTIONS
&& ! cfun
->eh
->built_landing_pads
)
1226 add_ehl_entry (return_label
, NULL
);
1230 current_function_has_exception_handlers ()
1234 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1236 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1238 if (! region
|| region
->region_number
!= i
)
1240 if (region
->type
!= ERT_THROW
)
1247 static struct eh_region
*
1248 duplicate_eh_region_1 (o
, map
)
1249 struct eh_region
*o
;
1250 struct inline_remap
*map
;
1253 = (struct eh_region
*) ggc_alloc_cleared (sizeof (struct eh_region
));
1255 n
->region_number
= o
->region_number
+ cfun
->eh
->last_region_number
;
1261 case ERT_MUST_NOT_THROW
:
1265 if (o
->u
.try.continue_label
)
1266 n
->u
.try.continue_label
1267 = get_label_from_map (map
,
1268 CODE_LABEL_NUMBER (o
->u
.try.continue_label
));
1272 n
->u
.catch.type_list
= o
->u
.catch.type_list
;
1275 case ERT_ALLOWED_EXCEPTIONS
:
1276 n
->u
.allowed
.type_list
= o
->u
.allowed
.type_list
;
1280 n
->u
.throw.type
= o
->u
.throw.type
;
1287 n
->label
= get_label_from_map (map
, CODE_LABEL_NUMBER (o
->label
));
1290 n
->resume
= map
->insn_map
[INSN_UID (o
->resume
)];
1291 if (n
->resume
== NULL
)
1299 duplicate_eh_region_2 (o
, n_array
)
1300 struct eh_region
*o
;
1301 struct eh_region
**n_array
;
1303 struct eh_region
*n
= n_array
[o
->region_number
];
1308 n
->u
.try.catch = n_array
[o
->u
.try.catch->region_number
];
1309 n
->u
.try.last_catch
= n_array
[o
->u
.try.last_catch
->region_number
];
1313 if (o
->u
.catch.next_catch
)
1314 n
->u
.catch.next_catch
= n_array
[o
->u
.catch.next_catch
->region_number
];
1315 if (o
->u
.catch.prev_catch
)
1316 n
->u
.catch.prev_catch
= n_array
[o
->u
.catch.prev_catch
->region_number
];
1324 n
->outer
= n_array
[o
->outer
->region_number
];
1326 n
->inner
= n_array
[o
->inner
->region_number
];
1328 n
->next_peer
= n_array
[o
->next_peer
->region_number
];
1332 duplicate_eh_regions (ifun
, map
)
1333 struct function
*ifun
;
1334 struct inline_remap
*map
;
1336 int ifun_last_region_number
= ifun
->eh
->last_region_number
;
1337 struct eh_region
**n_array
, *root
, *cur
;
1340 if (ifun_last_region_number
== 0)
1343 n_array
= xcalloc (ifun_last_region_number
+ 1, sizeof (*n_array
));
1345 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
1347 cur
= ifun
->eh
->region_array
[i
];
1348 if (!cur
|| cur
->region_number
!= i
)
1350 n_array
[i
] = duplicate_eh_region_1 (cur
, map
);
1352 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
1354 cur
= ifun
->eh
->region_array
[i
];
1355 if (!cur
|| cur
->region_number
!= i
)
1357 duplicate_eh_region_2 (cur
, n_array
);
1360 root
= n_array
[ifun
->eh
->region_tree
->region_number
];
1361 cur
= cfun
->eh
->cur_region
;
1364 struct eh_region
*p
= cur
->inner
;
1367 while (p
->next_peer
)
1369 p
->next_peer
= root
;
1374 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
1375 if (n_array
[i
] && n_array
[i
]->outer
== NULL
)
1376 n_array
[i
]->outer
= cur
;
1380 struct eh_region
*p
= cfun
->eh
->region_tree
;
1383 while (p
->next_peer
)
1385 p
->next_peer
= root
;
1388 cfun
->eh
->region_tree
= root
;
1393 i
= cfun
->eh
->last_region_number
;
1394 cfun
->eh
->last_region_number
= i
+ ifun_last_region_number
;
1400 t2r_eq (pentry
, pdata
)
1404 tree entry
= (tree
) pentry
;
1405 tree data
= (tree
) pdata
;
1407 return TREE_PURPOSE (entry
) == data
;
1414 tree entry
= (tree
) pentry
;
1415 return TYPE_HASH (TREE_PURPOSE (entry
));
1419 add_type_for_runtime (type
)
1424 slot
= (tree
*) htab_find_slot_with_hash (type_to_runtime_map
, type
,
1425 TYPE_HASH (type
), INSERT
);
1428 tree runtime
= (*lang_eh_runtime_type
) (type
);
1429 *slot
= tree_cons (type
, runtime
, NULL_TREE
);
1434 lookup_type_for_runtime (type
)
1439 slot
= (tree
*) htab_find_slot_with_hash (type_to_runtime_map
, type
,
1440 TYPE_HASH (type
), NO_INSERT
);
1442 /* We should have always inserted the data earlier. */
1443 return TREE_VALUE (*slot
);
1447 /* Represent an entry in @TTypes for either catch actions
1448 or exception filter actions. */
1449 struct ttypes_filter
GTY(())
1455 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1456 (a tree) for a @TTypes type node we are thinking about adding. */
1459 ttypes_filter_eq (pentry
, pdata
)
1463 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1464 tree data
= (tree
) pdata
;
1466 return entry
->t
== data
;
1470 ttypes_filter_hash (pentry
)
1473 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1474 return TYPE_HASH (entry
->t
);
1477 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1478 exception specification list we are thinking about adding. */
1479 /* ??? Currently we use the type lists in the order given. Someone
1480 should put these in some canonical order. */
1483 ehspec_filter_eq (pentry
, pdata
)
1487 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1488 const struct ttypes_filter
*data
= (const struct ttypes_filter
*) pdata
;
1490 return type_list_equal (entry
->t
, data
->t
);
1493 /* Hash function for exception specification lists. */
1496 ehspec_filter_hash (pentry
)
1499 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1503 for (list
= entry
->t
; list
; list
= TREE_CHAIN (list
))
1504 h
= (h
<< 5) + (h
>> 27) + TYPE_HASH (TREE_VALUE (list
));
1508 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1509 up the search. Return the filter value to be used. */
1512 add_ttypes_entry (ttypes_hash
, type
)
1516 struct ttypes_filter
**slot
, *n
;
1518 slot
= (struct ttypes_filter
**)
1519 htab_find_slot_with_hash (ttypes_hash
, type
, TYPE_HASH (type
), INSERT
);
1521 if ((n
= *slot
) == NULL
)
1523 /* Filter value is a 1 based table index. */
1525 n
= (struct ttypes_filter
*) xmalloc (sizeof (*n
));
1527 n
->filter
= VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
) + 1;
1530 VARRAY_PUSH_TREE (cfun
->eh
->ttype_data
, type
);
1536 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1537 to speed up the search. Return the filter value to be used. */
1540 add_ehspec_entry (ehspec_hash
, ttypes_hash
, list
)
1545 struct ttypes_filter
**slot
, *n
;
1546 struct ttypes_filter dummy
;
1549 slot
= (struct ttypes_filter
**)
1550 htab_find_slot (ehspec_hash
, &dummy
, INSERT
);
1552 if ((n
= *slot
) == NULL
)
1554 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1556 n
= (struct ttypes_filter
*) xmalloc (sizeof (*n
));
1558 n
->filter
= -(VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
) + 1);
1561 /* Look up each type in the list and encode its filter
1562 value as a uleb128. Terminate the list with 0. */
1563 for (; list
; list
= TREE_CHAIN (list
))
1564 push_uleb128 (&cfun
->eh
->ehspec_data
,
1565 add_ttypes_entry (ttypes_hash
, TREE_VALUE (list
)));
1566 VARRAY_PUSH_UCHAR (cfun
->eh
->ehspec_data
, 0);
1572 /* Generate the action filter values to be used for CATCH and
1573 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1574 we use lots of landing pads, and so every type or list can share
1575 the same filter value, which saves table space. */
1578 assign_filter_values ()
1581 htab_t ttypes
, ehspec
;
1583 VARRAY_TREE_INIT (cfun
->eh
->ttype_data
, 16, "ttype_data");
1584 VARRAY_UCHAR_INIT (cfun
->eh
->ehspec_data
, 64, "ehspec_data");
1586 ttypes
= htab_create (31, ttypes_filter_hash
, ttypes_filter_eq
, free
);
1587 ehspec
= htab_create (31, ehspec_filter_hash
, ehspec_filter_eq
, free
);
1589 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1591 struct eh_region
*r
= cfun
->eh
->region_array
[i
];
1593 /* Mind we don't process a region more than once. */
1594 if (!r
|| r
->region_number
!= i
)
1600 /* Whatever type_list is (NULL or true list), we build a list
1601 of filters for the region. */
1602 r
->u
.catch.filter_list
= NULL_TREE
;
1604 if (r
->u
.catch.type_list
!= NULL
)
1606 /* Get a filter value for each of the types caught and store
1607 them in the region's dedicated list. */
1608 tree tp_node
= r
->u
.catch.type_list
;
1610 for (;tp_node
; tp_node
= TREE_CHAIN (tp_node
))
1612 int flt
= add_ttypes_entry (ttypes
, TREE_VALUE (tp_node
));
1613 tree flt_node
= build_int_2 (flt
, 0);
1615 r
->u
.catch.filter_list
1616 = tree_cons (NULL_TREE
, flt_node
, r
->u
.catch.filter_list
);
1621 /* Get a filter value for the NULL list also since it will need
1622 an action record anyway. */
1623 int flt
= add_ttypes_entry (ttypes
, NULL
);
1624 tree flt_node
= build_int_2 (flt
, 0);
1626 r
->u
.catch.filter_list
1627 = tree_cons (NULL_TREE
, flt_node
, r
->u
.catch.filter_list
);
1632 case ERT_ALLOWED_EXCEPTIONS
:
1634 = add_ehspec_entry (ehspec
, ttypes
, r
->u
.allowed
.type_list
);
1642 htab_delete (ttypes
);
1643 htab_delete (ehspec
);
1647 build_post_landing_pads ()
1651 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1653 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1656 /* Mind we don't process a region more than once. */
1657 if (!region
|| region
->region_number
!= i
)
1660 switch (region
->type
)
1663 /* ??? Collect the set of all non-overlapping catch handlers
1664 all the way up the chain until blocked by a cleanup. */
1665 /* ??? Outer try regions can share landing pads with inner
1666 try regions if the types are completely non-overlapping,
1667 and there are no intervening cleanups. */
1669 region
->post_landing_pad
= gen_label_rtx ();
1673 emit_label (region
->post_landing_pad
);
1675 /* ??? It is mighty inconvenient to call back into the
1676 switch statement generation code in expand_end_case.
1677 Rapid prototyping sez a sequence of ifs. */
1679 struct eh_region
*c
;
1680 for (c
= region
->u
.try.catch; c
; c
= c
->u
.catch.next_catch
)
1682 /* ??? _Unwind_ForcedUnwind wants no match here. */
1683 if (c
->u
.catch.type_list
== NULL
)
1684 emit_jump (c
->label
);
1687 /* Need for one cmp/jump per type caught. Each type
1688 list entry has a matching entry in the filter list
1689 (see assign_filter_values). */
1690 tree tp_node
= c
->u
.catch.type_list
;
1691 tree flt_node
= c
->u
.catch.filter_list
;
1695 emit_cmp_and_jump_insns
1697 GEN_INT (tree_low_cst (TREE_VALUE (flt_node
), 0)),
1698 EQ
, NULL_RTX
, word_mode
, 0, c
->label
);
1700 tp_node
= TREE_CHAIN (tp_node
);
1701 flt_node
= TREE_CHAIN (flt_node
);
1707 /* We delay the generation of the _Unwind_Resume until we generate
1708 landing pads. We emit a marker here so as to get good control
1709 flow data in the meantime. */
1711 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
1717 emit_insn_before (seq
, region
->u
.try.catch->label
);
1720 case ERT_ALLOWED_EXCEPTIONS
:
1721 region
->post_landing_pad
= gen_label_rtx ();
1725 emit_label (region
->post_landing_pad
);
1727 emit_cmp_and_jump_insns (cfun
->eh
->filter
,
1728 GEN_INT (region
->u
.allowed
.filter
),
1729 EQ
, NULL_RTX
, word_mode
, 0, region
->label
);
1731 /* We delay the generation of the _Unwind_Resume until we generate
1732 landing pads. We emit a marker here so as to get good control
1733 flow data in the meantime. */
1735 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
1741 emit_insn_before (seq
, region
->label
);
1745 case ERT_MUST_NOT_THROW
:
1746 region
->post_landing_pad
= region
->label
;
1751 /* Nothing to do. */
1760 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1761 _Unwind_Resume otherwise. */
1764 connect_post_landing_pads ()
1768 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1770 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1771 struct eh_region
*outer
;
1774 /* Mind we don't process a region more than once. */
1775 if (!region
|| region
->region_number
!= i
)
1778 /* If there is no RESX, or it has been deleted by flow, there's
1779 nothing to fix up. */
1780 if (! region
->resume
|| INSN_DELETED_P (region
->resume
))
1783 /* Search for another landing pad in this function. */
1784 for (outer
= region
->outer
; outer
; outer
= outer
->outer
)
1785 if (outer
->post_landing_pad
)
1791 emit_jump (outer
->post_landing_pad
);
1793 emit_library_call (unwind_resume_libfunc
, LCT_THROW
,
1794 VOIDmode
, 1, cfun
->eh
->exc_ptr
, ptr_mode
);
1798 emit_insn_before (seq
, region
->resume
);
1799 delete_insn (region
->resume
);
1805 dw2_build_landing_pads ()
1810 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1812 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1814 bool clobbers_hard_regs
= false;
1816 /* Mind we don't process a region more than once. */
1817 if (!region
|| region
->region_number
!= i
)
1820 if (region
->type
!= ERT_CLEANUP
1821 && region
->type
!= ERT_TRY
1822 && region
->type
!= ERT_ALLOWED_EXCEPTIONS
)
1827 region
->landing_pad
= gen_label_rtx ();
1828 emit_label (region
->landing_pad
);
1830 #ifdef HAVE_exception_receiver
1831 if (HAVE_exception_receiver
)
1832 emit_insn (gen_exception_receiver ());
1835 #ifdef HAVE_nonlocal_goto_receiver
1836 if (HAVE_nonlocal_goto_receiver
)
1837 emit_insn (gen_nonlocal_goto_receiver ());
1842 /* If the eh_return data registers are call-saved, then we
1843 won't have considered them clobbered from the call that
1844 threw. Kill them now. */
1847 unsigned r
= EH_RETURN_DATA_REGNO (j
);
1848 if (r
== INVALID_REGNUM
)
1850 if (! call_used_regs
[r
])
1852 emit_insn (gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, r
)));
1853 clobbers_hard_regs
= true;
1857 if (clobbers_hard_regs
)
1859 /* @@@ This is a kludge. Not all machine descriptions define a
1860 blockage insn, but we must not allow the code we just generated
1861 to be reordered by scheduling. So emit an ASM_INPUT to act as
1863 emit_insn (gen_rtx_ASM_INPUT (VOIDmode
, ""));
1866 emit_move_insn (cfun
->eh
->exc_ptr
,
1867 gen_rtx_REG (ptr_mode
, EH_RETURN_DATA_REGNO (0)));
1868 emit_move_insn (cfun
->eh
->filter
,
1869 gen_rtx_REG (word_mode
, EH_RETURN_DATA_REGNO (1)));
1874 emit_insn_before (seq
, region
->post_landing_pad
);
1881 int directly_reachable
;
1884 int call_site_index
;
1888 sjlj_find_directly_reachable_regions (lp_info
)
1889 struct sjlj_lp_info
*lp_info
;
1892 bool found_one
= false;
1894 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1896 struct eh_region
*region
;
1897 enum reachable_code rc
;
1901 if (! INSN_P (insn
))
1904 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1905 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
1908 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
1910 type_thrown
= NULL_TREE
;
1911 if (region
->type
== ERT_THROW
)
1913 type_thrown
= region
->u
.throw.type
;
1914 region
= region
->outer
;
1917 /* Find the first containing region that might handle the exception.
1918 That's the landing pad to which we will transfer control. */
1919 rc
= RNL_NOT_CAUGHT
;
1920 for (; region
; region
= region
->outer
)
1922 rc
= reachable_next_level (region
, type_thrown
, 0);
1923 if (rc
!= RNL_NOT_CAUGHT
)
1926 if (rc
== RNL_MAYBE_CAUGHT
|| rc
== RNL_CAUGHT
)
1928 lp_info
[region
->region_number
].directly_reachable
= 1;
1937 sjlj_assign_call_site_values (dispatch_label
, lp_info
)
1939 struct sjlj_lp_info
*lp_info
;
1944 /* First task: build the action table. */
1946 VARRAY_UCHAR_INIT (cfun
->eh
->action_record_data
, 64, "action_record_data");
1947 ar_hash
= htab_create (31, action_record_hash
, action_record_eq
, free
);
1949 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1950 if (lp_info
[i
].directly_reachable
)
1952 struct eh_region
*r
= cfun
->eh
->region_array
[i
];
1953 r
->landing_pad
= dispatch_label
;
1954 lp_info
[i
].action_index
= collect_one_action_chain (ar_hash
, r
);
1955 if (lp_info
[i
].action_index
!= -1)
1956 cfun
->uses_eh_lsda
= 1;
1959 htab_delete (ar_hash
);
1961 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1962 landing pad label for the region. For sjlj though, there is one
1963 common landing pad from which we dispatch to the post-landing pads.
1965 A region receives a dispatch index if it is directly reachable
1966 and requires in-function processing. Regions that share post-landing
1967 pads may share dispatch indices. */
1968 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1969 (see build_post_landing_pads) so we don't bother checking for it. */
1972 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1973 if (lp_info
[i
].directly_reachable
)
1974 lp_info
[i
].dispatch_index
= index
++;
1976 /* Finally: assign call-site values. If dwarf2 terms, this would be
1977 the region number assigned by convert_to_eh_region_ranges, but
1978 handles no-action and must-not-throw differently. */
1981 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1982 if (lp_info
[i
].directly_reachable
)
1984 int action
= lp_info
[i
].action_index
;
1986 /* Map must-not-throw to otherwise unused call-site index 0. */
1989 /* Map no-action to otherwise unused call-site index -1. */
1990 else if (action
== -1)
1992 /* Otherwise, look it up in the table. */
1994 index
= add_call_site (GEN_INT (lp_info
[i
].dispatch_index
), action
);
1996 lp_info
[i
].call_site_index
= index
;
2001 sjlj_mark_call_sites (lp_info
)
2002 struct sjlj_lp_info
*lp_info
;
2004 int last_call_site
= -2;
2007 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2009 struct eh_region
*region
;
2011 rtx note
, before
, p
;
2013 /* Reset value tracking at extended basic block boundaries. */
2014 if (GET_CODE (insn
) == CODE_LABEL
)
2015 last_call_site
= -2;
2017 if (! INSN_P (insn
))
2020 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2023 /* Calls (and trapping insns) without notes are outside any
2024 exception handling region in this function. Mark them as
2026 if (GET_CODE (insn
) == CALL_INSN
2027 || (flag_non_call_exceptions
2028 && may_trap_p (PATTERN (insn
))))
2029 this_call_site
= -1;
2035 /* Calls that are known to not throw need not be marked. */
2036 if (INTVAL (XEXP (note
, 0)) <= 0)
2039 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
2040 this_call_site
= lp_info
[region
->region_number
].call_site_index
;
2043 if (this_call_site
== last_call_site
)
2046 /* Don't separate a call from it's argument loads. */
2048 if (GET_CODE (insn
) == CALL_INSN
)
2049 before
= find_first_parameter_load (insn
, NULL_RTX
);
2052 mem
= adjust_address (cfun
->eh
->sjlj_fc
, TYPE_MODE (integer_type_node
),
2053 sjlj_fc_call_site_ofs
);
2054 emit_move_insn (mem
, GEN_INT (this_call_site
));
2058 emit_insn_before (p
, before
);
2059 last_call_site
= this_call_site
;
2063 /* Construct the SjLj_Function_Context. */
2066 sjlj_emit_function_enter (dispatch_label
)
2069 rtx fn_begin
, fc
, mem
, seq
;
2071 fc
= cfun
->eh
->sjlj_fc
;
2075 /* We're storing this libcall's address into memory instead of
2076 calling it directly. Thus, we must call assemble_external_libcall
2077 here, as we can not depend on emit_library_call to do it for us. */
2078 assemble_external_libcall (eh_personality_libfunc
);
2079 mem
= adjust_address (fc
, Pmode
, sjlj_fc_personality_ofs
);
2080 emit_move_insn (mem
, eh_personality_libfunc
);
2082 mem
= adjust_address (fc
, Pmode
, sjlj_fc_lsda_ofs
);
2083 if (cfun
->uses_eh_lsda
)
2086 ASM_GENERATE_INTERNAL_LABEL (buf
, "LLSDA", current_function_funcdef_no
);
2087 emit_move_insn (mem
, gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
)));
2090 emit_move_insn (mem
, const0_rtx
);
2092 #ifdef DONT_USE_BUILTIN_SETJMP
2095 x
= emit_library_call_value (setjmp_libfunc
, NULL_RTX
, LCT_RETURNS_TWICE
,
2096 TYPE_MODE (integer_type_node
), 1,
2097 plus_constant (XEXP (fc
, 0),
2098 sjlj_fc_jbuf_ofs
), Pmode
);
2100 note
= emit_note (NULL
, NOTE_INSN_EXPECTED_VALUE
);
2101 NOTE_EXPECTED_VALUE (note
) = gen_rtx_EQ (VOIDmode
, x
, const0_rtx
);
2103 emit_cmp_and_jump_insns (x
, const0_rtx
, NE
, 0,
2104 TYPE_MODE (integer_type_node
), 0, dispatch_label
);
2107 expand_builtin_setjmp_setup (plus_constant (XEXP (fc
, 0), sjlj_fc_jbuf_ofs
),
2111 emit_library_call (unwind_sjlj_register_libfunc
, LCT_NORMAL
, VOIDmode
,
2112 1, XEXP (fc
, 0), Pmode
);
2117 /* ??? Instead of doing this at the beginning of the function,
2118 do this in a block that is at loop level 0 and dominates all
2119 can_throw_internal instructions. */
2121 for (fn_begin
= get_insns (); ; fn_begin
= NEXT_INSN (fn_begin
))
2122 if (GET_CODE (fn_begin
) == NOTE
2123 && NOTE_LINE_NUMBER (fn_begin
) == NOTE_INSN_FUNCTION_BEG
)
2125 emit_insn_after (seq
, fn_begin
);
2128 /* Call back from expand_function_end to know where we should put
2129 the call to unwind_sjlj_unregister_libfunc if needed. */
2132 sjlj_emit_function_exit_after (after
)
2135 cfun
->eh
->sjlj_exit_after
= after
;
2139 sjlj_emit_function_exit ()
2145 emit_library_call (unwind_sjlj_unregister_libfunc
, LCT_NORMAL
, VOIDmode
,
2146 1, XEXP (cfun
->eh
->sjlj_fc
, 0), Pmode
);
2151 /* ??? Really this can be done in any block at loop level 0 that
2152 post-dominates all can_throw_internal instructions. This is
2153 the last possible moment. */
2155 emit_insn_after (seq
, cfun
->eh
->sjlj_exit_after
);
2159 sjlj_emit_dispatch_table (dispatch_label
, lp_info
)
2161 struct sjlj_lp_info
*lp_info
;
2163 int i
, first_reachable
;
2164 rtx mem
, dispatch
, seq
, fc
;
2166 fc
= cfun
->eh
->sjlj_fc
;
2170 emit_label (dispatch_label
);
2172 #ifndef DONT_USE_BUILTIN_SETJMP
2173 expand_builtin_setjmp_receiver (dispatch_label
);
2176 /* Load up dispatch index, exc_ptr and filter values from the
2177 function context. */
2178 mem
= adjust_address (fc
, TYPE_MODE (integer_type_node
),
2179 sjlj_fc_call_site_ofs
);
2180 dispatch
= copy_to_reg (mem
);
2182 mem
= adjust_address (fc
, word_mode
, sjlj_fc_data_ofs
);
2183 if (word_mode
!= Pmode
)
2185 #ifdef POINTERS_EXTEND_UNSIGNED
2186 mem
= convert_memory_address (Pmode
, mem
);
2188 mem
= convert_to_mode (Pmode
, mem
, 0);
2191 emit_move_insn (cfun
->eh
->exc_ptr
, mem
);
2193 mem
= adjust_address (fc
, word_mode
, sjlj_fc_data_ofs
+ UNITS_PER_WORD
);
2194 emit_move_insn (cfun
->eh
->filter
, mem
);
2196 /* Jump to one of the directly reachable regions. */
2197 /* ??? This really ought to be using a switch statement. */
2199 first_reachable
= 0;
2200 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2202 if (! lp_info
[i
].directly_reachable
)
2205 if (! first_reachable
)
2207 first_reachable
= i
;
2211 emit_cmp_and_jump_insns (dispatch
, GEN_INT (lp_info
[i
].dispatch_index
),
2212 EQ
, NULL_RTX
, TYPE_MODE (integer_type_node
), 0,
2213 cfun
->eh
->region_array
[i
]->post_landing_pad
);
2219 emit_insn_before (seq
, (cfun
->eh
->region_array
[first_reachable
]
2220 ->post_landing_pad
));
2224 sjlj_build_landing_pads ()
2226 struct sjlj_lp_info
*lp_info
;
2228 lp_info
= (struct sjlj_lp_info
*) xcalloc (cfun
->eh
->last_region_number
+ 1,
2229 sizeof (struct sjlj_lp_info
));
2231 if (sjlj_find_directly_reachable_regions (lp_info
))
2233 rtx dispatch_label
= gen_label_rtx ();
2236 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node
),
2237 int_size_in_bytes (sjlj_fc_type_node
),
2238 TYPE_ALIGN (sjlj_fc_type_node
));
2240 sjlj_assign_call_site_values (dispatch_label
, lp_info
);
2241 sjlj_mark_call_sites (lp_info
);
2243 sjlj_emit_function_enter (dispatch_label
);
2244 sjlj_emit_dispatch_table (dispatch_label
, lp_info
);
2245 sjlj_emit_function_exit ();
2252 finish_eh_generation ()
2254 /* Nothing to do if no regions created. */
2255 if (cfun
->eh
->region_tree
== NULL
)
2258 /* The object here is to provide find_basic_blocks with detailed
2259 information (via reachable_handlers) on how exception control
2260 flows within the function. In this first pass, we can include
2261 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2262 regions, and hope that it will be useful in deleting unreachable
2263 handlers. Subsequently, we will generate landing pads which will
2264 connect many of the handlers, and then type information will not
2265 be effective. Still, this is a win over previous implementations. */
2267 cleanup_cfg (CLEANUP_PRE_LOOP
| CLEANUP_NO_INSN_DEL
);
2269 /* These registers are used by the landing pads. Make sure they
2270 have been generated. */
2271 get_exception_pointer (cfun
);
2272 get_exception_filter (cfun
);
2274 /* Construct the landing pads. */
2276 assign_filter_values ();
2277 build_post_landing_pads ();
2278 connect_post_landing_pads ();
2279 if (USING_SJLJ_EXCEPTIONS
)
2280 sjlj_build_landing_pads ();
2282 dw2_build_landing_pads ();
2284 cfun
->eh
->built_landing_pads
= 1;
2286 /* We've totally changed the CFG. Start over. */
2287 find_exception_handler_labels ();
2288 rebuild_jump_labels (get_insns ());
2289 find_basic_blocks (get_insns (), max_reg_num (), 0);
2290 cleanup_cfg (CLEANUP_PRE_LOOP
| CLEANUP_NO_INSN_DEL
);
2297 struct ehl_map_entry
*entry
= (struct ehl_map_entry
*) pentry
;
2299 /* 2^32 * ((sqrt(5) - 1) / 2) */
2300 const hashval_t scaled_golden_ratio
= 0x9e3779b9;
2301 return CODE_LABEL_NUMBER (entry
->label
) * scaled_golden_ratio
;
2305 ehl_eq (pentry
, pdata
)
2309 struct ehl_map_entry
*entry
= (struct ehl_map_entry
*) pentry
;
2310 struct ehl_map_entry
*data
= (struct ehl_map_entry
*) pdata
;
2312 return entry
->label
== data
->label
;
2315 /* This section handles removing dead code for flow. */
2317 /* Remove LABEL from exception_handler_label_map. */
2320 remove_exception_handler_label (label
)
2323 struct ehl_map_entry
**slot
, tmp
;
2325 /* If exception_handler_label_map was not built yet,
2326 there is nothing to do. */
2327 if (cfun
->eh
->exception_handler_label_map
== NULL
)
2331 slot
= (struct ehl_map_entry
**)
2332 htab_find_slot (cfun
->eh
->exception_handler_label_map
, &tmp
, NO_INSERT
);
2336 htab_clear_slot (cfun
->eh
->exception_handler_label_map
, (void **) slot
);
2339 /* Splice REGION from the region tree etc. */
2342 remove_eh_handler (region
)
2343 struct eh_region
*region
;
2345 struct eh_region
**pp
, **pp_start
, *p
, *outer
, *inner
;
2348 /* For the benefit of efficiently handling REG_EH_REGION notes,
2349 replace this region in the region array with its containing
2350 region. Note that previous region deletions may result in
2351 multiple copies of this region in the array, so we have a
2352 list of alternate numbers by which we are known. */
2354 outer
= region
->outer
;
2355 cfun
->eh
->region_array
[region
->region_number
] = outer
;
2359 EXECUTE_IF_SET_IN_BITMAP (region
->aka
, 0, i
,
2360 { cfun
->eh
->region_array
[i
] = outer
; });
2366 outer
->aka
= BITMAP_GGC_ALLOC ();
2368 bitmap_a_or_b (outer
->aka
, outer
->aka
, region
->aka
);
2369 bitmap_set_bit (outer
->aka
, region
->region_number
);
2372 if (cfun
->eh
->built_landing_pads
)
2373 lab
= region
->landing_pad
;
2375 lab
= region
->label
;
2377 remove_exception_handler_label (lab
);
2380 pp_start
= &outer
->inner
;
2382 pp_start
= &cfun
->eh
->region_tree
;
2383 for (pp
= pp_start
, p
= *pp
; p
!= region
; pp
= &p
->next_peer
, p
= *pp
)
2385 *pp
= region
->next_peer
;
2387 inner
= region
->inner
;
2390 for (p
= inner
; p
->next_peer
; p
= p
->next_peer
)
2394 p
->next_peer
= *pp_start
;
2398 if (region
->type
== ERT_CATCH
)
2400 struct eh_region
*try, *next
, *prev
;
2402 for (try = region
->next_peer
;
2403 try->type
== ERT_CATCH
;
2404 try = try->next_peer
)
2406 if (try->type
!= ERT_TRY
)
2409 next
= region
->u
.catch.next_catch
;
2410 prev
= region
->u
.catch.prev_catch
;
2413 next
->u
.catch.prev_catch
= prev
;
2415 try->u
.try.last_catch
= prev
;
2417 prev
->u
.catch.next_catch
= next
;
2420 try->u
.try.catch = next
;
2422 remove_eh_handler (try);
2427 /* LABEL heads a basic block that is about to be deleted. If this
2428 label corresponds to an exception region, we may be able to
2429 delete the region. */
2432 maybe_remove_eh_handler (label
)
2435 struct ehl_map_entry
**slot
, tmp
;
2436 struct eh_region
*region
;
2438 /* ??? After generating landing pads, it's not so simple to determine
2439 if the region data is completely unused. One must examine the
2440 landing pad and the post landing pad, and whether an inner try block
2441 is referencing the catch handlers directly. */
2442 if (cfun
->eh
->built_landing_pads
)
2446 slot
= (struct ehl_map_entry
**)
2447 htab_find_slot (cfun
->eh
->exception_handler_label_map
, &tmp
, NO_INSERT
);
2450 region
= (*slot
)->region
;
2454 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2455 because there is no path to the fallback call to terminate.
2456 But the region continues to affect call-site data until there
2457 are no more contained calls, which we don't see here. */
2458 if (region
->type
== ERT_MUST_NOT_THROW
)
2460 htab_clear_slot (cfun
->eh
->exception_handler_label_map
, (void **) slot
);
2461 region
->label
= NULL_RTX
;
2464 remove_eh_handler (region
);
2467 /* Invokes CALLBACK for every exception handler label. Only used by old
2468 loop hackery; should not be used by new code. */
2471 for_each_eh_label (callback
)
2472 void (*callback
) PARAMS ((rtx
));
2474 htab_traverse (cfun
->eh
->exception_handler_label_map
, for_each_eh_label_1
,
2479 for_each_eh_label_1 (pentry
, data
)
2483 struct ehl_map_entry
*entry
= *(struct ehl_map_entry
**)pentry
;
2484 void (*callback
) PARAMS ((rtx
)) = (void (*) PARAMS ((rtx
))) data
;
2486 (*callback
) (entry
->label
);
2490 /* This section describes CFG exception edges for flow. */
2492 /* For communicating between calls to reachable_next_level. */
2493 struct reachable_info
GTY(())
2500 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2501 base class of TYPE, is in HANDLED. */
2504 check_handled (handled
, type
)
2509 /* We can check for exact matches without front-end help. */
2510 if (! lang_eh_type_covers
)
2512 for (t
= handled
; t
; t
= TREE_CHAIN (t
))
2513 if (TREE_VALUE (t
) == type
)
2518 for (t
= handled
; t
; t
= TREE_CHAIN (t
))
2519 if ((*lang_eh_type_covers
) (TREE_VALUE (t
), type
))
2526 /* A subroutine of reachable_next_level. If we are collecting a list
2527 of handlers, add one. After landing pad generation, reference
2528 it instead of the handlers themselves. Further, the handlers are
2529 all wired together, so by referencing one, we've got them all.
2530 Before landing pad generation we reference each handler individually.
2532 LP_REGION contains the landing pad; REGION is the handler. */
2535 add_reachable_handler (info
, lp_region
, region
)
2536 struct reachable_info
*info
;
2537 struct eh_region
*lp_region
;
2538 struct eh_region
*region
;
2543 if (cfun
->eh
->built_landing_pads
)
2545 if (! info
->handlers
)
2546 info
->handlers
= alloc_INSN_LIST (lp_region
->landing_pad
, NULL_RTX
);
2549 info
->handlers
= alloc_INSN_LIST (region
->label
, info
->handlers
);
2552 /* Process one level of exception regions for reachability.
2553 If TYPE_THROWN is non-null, then it is the *exact* type being
2554 propagated. If INFO is non-null, then collect handler labels
2555 and caught/allowed type information between invocations. */
2557 static enum reachable_code
2558 reachable_next_level (region
, type_thrown
, info
)
2559 struct eh_region
*region
;
2561 struct reachable_info
*info
;
2563 switch (region
->type
)
2566 /* Before landing-pad generation, we model control flow
2567 directly to the individual handlers. In this way we can
2568 see that catch handler types may shadow one another. */
2569 add_reachable_handler (info
, region
, region
);
2570 return RNL_MAYBE_CAUGHT
;
2574 struct eh_region
*c
;
2575 enum reachable_code ret
= RNL_NOT_CAUGHT
;
2577 for (c
= region
->u
.try.catch; c
; c
= c
->u
.catch.next_catch
)
2579 /* A catch-all handler ends the search. */
2580 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2581 to be run as well. */
2582 if (c
->u
.catch.type_list
== NULL
)
2584 add_reachable_handler (info
, region
, c
);
2590 /* If we have at least one type match, end the search. */
2591 tree tp_node
= c
->u
.catch.type_list
;
2593 for (; tp_node
; tp_node
= TREE_CHAIN (tp_node
))
2595 tree type
= TREE_VALUE (tp_node
);
2597 if (type
== type_thrown
2598 || (lang_eh_type_covers
2599 && (*lang_eh_type_covers
) (type
, type_thrown
)))
2601 add_reachable_handler (info
, region
, c
);
2606 /* If we have definitive information of a match failure,
2607 the catch won't trigger. */
2608 if (lang_eh_type_covers
)
2609 return RNL_NOT_CAUGHT
;
2612 /* At this point, we either don't know what type is thrown or
2613 don't have front-end assistance to help deciding if it is
2614 covered by one of the types in the list for this region.
2616 We'd then like to add this region to the list of reachable
2617 handlers since it is indeed potentially reachable based on the
2618 information we have.
2620 Actually, this handler is for sure not reachable if all the
2621 types it matches have already been caught. That is, it is only
2622 potentially reachable if at least one of the types it catches
2623 has not been previously caught. */
2626 ret
= RNL_MAYBE_CAUGHT
;
2629 tree tp_node
= c
->u
.catch.type_list
;
2630 bool maybe_reachable
= false;
2632 /* Compute the potential reachability of this handler and
2633 update the list of types caught at the same time. */
2634 for (; tp_node
; tp_node
= TREE_CHAIN (tp_node
))
2636 tree type
= TREE_VALUE (tp_node
);
2638 if (! check_handled (info
->types_caught
, type
))
2641 = tree_cons (NULL
, type
, info
->types_caught
);
2643 maybe_reachable
= true;
2647 if (maybe_reachable
)
2649 add_reachable_handler (info
, region
, c
);
2651 /* ??? If the catch type is a base class of every allowed
2652 type, then we know we can stop the search. */
2653 ret
= RNL_MAYBE_CAUGHT
;
2661 case ERT_ALLOWED_EXCEPTIONS
:
2662 /* An empty list of types definitely ends the search. */
2663 if (region
->u
.allowed
.type_list
== NULL_TREE
)
2665 add_reachable_handler (info
, region
, region
);
2669 /* Collect a list of lists of allowed types for use in detecting
2670 when a catch may be transformed into a catch-all. */
2672 info
->types_allowed
= tree_cons (NULL_TREE
,
2673 region
->u
.allowed
.type_list
,
2674 info
->types_allowed
);
2676 /* If we have definitive information about the type hierarchy,
2677 then we can tell if the thrown type will pass through the
2679 if (type_thrown
&& lang_eh_type_covers
)
2681 if (check_handled (region
->u
.allowed
.type_list
, type_thrown
))
2682 return RNL_NOT_CAUGHT
;
2685 add_reachable_handler (info
, region
, region
);
2690 add_reachable_handler (info
, region
, region
);
2691 return RNL_MAYBE_CAUGHT
;
2694 /* Catch regions are handled by their controling try region. */
2695 return RNL_NOT_CAUGHT
;
2697 case ERT_MUST_NOT_THROW
:
2698 /* Here we end our search, since no exceptions may propagate.
2699 If we've touched down at some landing pad previous, then the
2700 explicit function call we generated may be used. Otherwise
2701 the call is made by the runtime. */
2702 if (info
&& info
->handlers
)
2704 add_reachable_handler (info
, region
, region
);
2713 /* Shouldn't see these here. */
2720 /* Retrieve a list of labels of exception handlers which can be
2721 reached by a given insn. */
2724 reachable_handlers (insn
)
2727 struct reachable_info info
;
2728 struct eh_region
*region
;
2732 if (GET_CODE (insn
) == JUMP_INSN
2733 && GET_CODE (PATTERN (insn
)) == RESX
)
2734 region_number
= XINT (PATTERN (insn
), 0);
2737 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2738 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
2740 region_number
= INTVAL (XEXP (note
, 0));
2743 memset (&info
, 0, sizeof (info
));
2745 region
= cfun
->eh
->region_array
[region_number
];
2747 type_thrown
= NULL_TREE
;
2748 if (GET_CODE (insn
) == JUMP_INSN
2749 && GET_CODE (PATTERN (insn
)) == RESX
)
2751 /* A RESX leaves a region instead of entering it. Thus the
2752 region itself may have been deleted out from under us. */
2755 region
= region
->outer
;
2757 else if (region
->type
== ERT_THROW
)
2759 type_thrown
= region
->u
.throw.type
;
2760 region
= region
->outer
;
2763 for (; region
; region
= region
->outer
)
2764 if (reachable_next_level (region
, type_thrown
, &info
) >= RNL_CAUGHT
)
2767 return info
.handlers
;
2770 /* Determine if the given INSN can throw an exception that is caught
2771 within the function. */
2774 can_throw_internal (insn
)
2777 struct eh_region
*region
;
2781 if (! INSN_P (insn
))
2784 if (GET_CODE (insn
) == INSN
2785 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2786 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2788 if (GET_CODE (insn
) == CALL_INSN
2789 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
2792 for (i
= 0; i
< 3; ++i
)
2794 rtx sub
= XEXP (PATTERN (insn
), i
);
2795 for (; sub
; sub
= NEXT_INSN (sub
))
2796 if (can_throw_internal (sub
))
2802 /* Every insn that might throw has an EH_REGION note. */
2803 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2804 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
2807 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
2809 type_thrown
= NULL_TREE
;
2810 if (region
->type
== ERT_THROW
)
2812 type_thrown
= region
->u
.throw.type
;
2813 region
= region
->outer
;
2816 /* If this exception is ignored by each and every containing region,
2817 then control passes straight out. The runtime may handle some
2818 regions, which also do not require processing internally. */
2819 for (; region
; region
= region
->outer
)
2821 enum reachable_code how
= reachable_next_level (region
, type_thrown
, 0);
2822 if (how
== RNL_BLOCKED
)
2824 if (how
!= RNL_NOT_CAUGHT
)
2831 /* Determine if the given INSN can throw an exception that is
2832 visible outside the function. */
2835 can_throw_external (insn
)
2838 struct eh_region
*region
;
2842 if (! INSN_P (insn
))
2845 if (GET_CODE (insn
) == INSN
2846 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2847 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2849 if (GET_CODE (insn
) == CALL_INSN
2850 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
2853 for (i
= 0; i
< 3; ++i
)
2855 rtx sub
= XEXP (PATTERN (insn
), i
);
2856 for (; sub
; sub
= NEXT_INSN (sub
))
2857 if (can_throw_external (sub
))
2863 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2866 /* Calls (and trapping insns) without notes are outside any
2867 exception handling region in this function. We have to
2868 assume it might throw. Given that the front end and middle
2869 ends mark known NOTHROW functions, this isn't so wildly
2871 return (GET_CODE (insn
) == CALL_INSN
2872 || (flag_non_call_exceptions
2873 && may_trap_p (PATTERN (insn
))));
2875 if (INTVAL (XEXP (note
, 0)) <= 0)
2878 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
2880 type_thrown
= NULL_TREE
;
2881 if (region
->type
== ERT_THROW
)
2883 type_thrown
= region
->u
.throw.type
;
2884 region
= region
->outer
;
2887 /* If the exception is caught or blocked by any containing region,
2888 then it is not seen by any calling function. */
2889 for (; region
; region
= region
->outer
)
2890 if (reachable_next_level (region
, type_thrown
, NULL
) >= RNL_CAUGHT
)
2896 /* Set current_function_nothrow and cfun->all_throwers_are_sibcalls. */
2899 set_nothrow_function_flags ()
2903 current_function_nothrow
= 1;
2905 /* Assume cfun->all_throwers_are_sibcalls until we encounter
2906 something that can throw an exception. We specifically exempt
2907 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2908 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2911 cfun
->all_throwers_are_sibcalls
= 1;
2913 if (! flag_exceptions
)
2916 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2917 if (can_throw_external (insn
))
2919 current_function_nothrow
= 0;
2921 if (GET_CODE (insn
) != CALL_INSN
|| !SIBLING_CALL_P (insn
))
2923 cfun
->all_throwers_are_sibcalls
= 0;
2928 for (insn
= current_function_epilogue_delay_list
; insn
;
2929 insn
= XEXP (insn
, 1))
2930 if (can_throw_external (insn
))
2932 current_function_nothrow
= 0;
2934 if (GET_CODE (insn
) != CALL_INSN
|| !SIBLING_CALL_P (insn
))
2936 cfun
->all_throwers_are_sibcalls
= 0;
2943 /* Various hooks for unwind library. */
2945 /* Do any necessary initialization to access arbitrary stack frames.
2946 On the SPARC, this means flushing the register windows. */
2949 expand_builtin_unwind_init ()
2951 /* Set this so all the registers get saved in our frame; we need to be
2952 able to copy the saved values for any registers from frames we unwind. */
2953 current_function_has_nonlocal_label
= 1;
2955 #ifdef SETUP_FRAME_ADDRESSES
2956 SETUP_FRAME_ADDRESSES ();
2961 expand_builtin_eh_return_data_regno (arglist
)
2964 tree which
= TREE_VALUE (arglist
);
2965 unsigned HOST_WIDE_INT iwhich
;
2967 if (TREE_CODE (which
) != INTEGER_CST
)
2969 error ("argument of `__builtin_eh_return_regno' must be constant");
2973 iwhich
= tree_low_cst (which
, 1);
2974 iwhich
= EH_RETURN_DATA_REGNO (iwhich
);
2975 if (iwhich
== INVALID_REGNUM
)
2978 #ifdef DWARF_FRAME_REGNUM
2979 iwhich
= DWARF_FRAME_REGNUM (iwhich
);
2981 iwhich
= DBX_REGISTER_NUMBER (iwhich
);
2984 return GEN_INT (iwhich
);
2987 /* Given a value extracted from the return address register or stack slot,
2988 return the actual address encoded in that value. */
2991 expand_builtin_extract_return_addr (addr_tree
)
2994 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, Pmode
, 0);
2996 /* First mask out any unwanted bits. */
2997 #ifdef MASK_RETURN_ADDR
2998 expand_and (Pmode
, addr
, MASK_RETURN_ADDR
, addr
);
3001 /* Then adjust to find the real return address. */
3002 #if defined (RETURN_ADDR_OFFSET)
3003 addr
= plus_constant (addr
, RETURN_ADDR_OFFSET
);
3009 /* Given an actual address in addr_tree, do any necessary encoding
3010 and return the value to be stored in the return address register or
3011 stack slot so the epilogue will return to that address. */
3014 expand_builtin_frob_return_addr (addr_tree
)
3017 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, ptr_mode
, 0);
3019 #ifdef POINTERS_EXTEND_UNSIGNED
3020 if (GET_MODE (addr
) != Pmode
)
3021 addr
= convert_memory_address (Pmode
, addr
);
3024 #ifdef RETURN_ADDR_OFFSET
3025 addr
= force_reg (Pmode
, addr
);
3026 addr
= plus_constant (addr
, -RETURN_ADDR_OFFSET
);
3032 /* Set up the epilogue with the magic bits we'll need to return to the
3033 exception handler. */
3036 expand_builtin_eh_return (stackadj_tree
, handler_tree
)
3037 tree stackadj_tree
, handler_tree
;
3039 rtx stackadj
, handler
;
3041 stackadj
= expand_expr (stackadj_tree
, cfun
->eh
->ehr_stackadj
, VOIDmode
, 0);
3042 handler
= expand_expr (handler_tree
, cfun
->eh
->ehr_handler
, VOIDmode
, 0);
3044 #ifdef POINTERS_EXTEND_UNSIGNED
3045 if (GET_MODE (stackadj
) != Pmode
)
3046 stackadj
= convert_memory_address (Pmode
, stackadj
);
3048 if (GET_MODE (handler
) != Pmode
)
3049 handler
= convert_memory_address (Pmode
, handler
);
3052 if (! cfun
->eh
->ehr_label
)
3054 cfun
->eh
->ehr_stackadj
= copy_to_reg (stackadj
);
3055 cfun
->eh
->ehr_handler
= copy_to_reg (handler
);
3056 cfun
->eh
->ehr_label
= gen_label_rtx ();
3060 if (stackadj
!= cfun
->eh
->ehr_stackadj
)
3061 emit_move_insn (cfun
->eh
->ehr_stackadj
, stackadj
);
3062 if (handler
!= cfun
->eh
->ehr_handler
)
3063 emit_move_insn (cfun
->eh
->ehr_handler
, handler
);
3066 emit_jump (cfun
->eh
->ehr_label
);
3072 rtx sa
, ra
, around_label
;
3074 if (! cfun
->eh
->ehr_label
)
3077 sa
= EH_RETURN_STACKADJ_RTX
;
3080 error ("__builtin_eh_return not supported on this target");
3084 current_function_calls_eh_return
= 1;
3086 around_label
= gen_label_rtx ();
3087 emit_move_insn (sa
, const0_rtx
);
3088 emit_jump (around_label
);
3090 emit_label (cfun
->eh
->ehr_label
);
3091 clobber_return_register ();
3093 #ifdef HAVE_eh_return
3095 emit_insn (gen_eh_return (cfun
->eh
->ehr_stackadj
, cfun
->eh
->ehr_handler
));
3099 ra
= EH_RETURN_HANDLER_RTX
;
3102 error ("__builtin_eh_return not supported on this target");
3103 ra
= gen_reg_rtx (Pmode
);
3106 emit_move_insn (sa
, cfun
->eh
->ehr_stackadj
);
3107 emit_move_insn (ra
, cfun
->eh
->ehr_handler
);
3110 emit_label (around_label
);
3113 /* In the following functions, we represent entries in the action table
3114 as 1-based indices. Special cases are:
3116 0: null action record, non-null landing pad; implies cleanups
3117 -1: null action record, null landing pad; implies no action
3118 -2: no call-site entry; implies must_not_throw
3119 -3: we have yet to process outer regions
3121 Further, no special cases apply to the "next" field of the record.
3122 For next, 0 means end of list. */
3124 struct action_record
3132 action_record_eq (pentry
, pdata
)
3136 const struct action_record
*entry
= (const struct action_record
*) pentry
;
3137 const struct action_record
*data
= (const struct action_record
*) pdata
;
3138 return entry
->filter
== data
->filter
&& entry
->next
== data
->next
;
3142 action_record_hash (pentry
)
3145 const struct action_record
*entry
= (const struct action_record
*) pentry
;
3146 return entry
->next
* 1009 + entry
->filter
;
3150 add_action_record (ar_hash
, filter
, next
)
3154 struct action_record
**slot
, *new, tmp
;
3156 tmp
.filter
= filter
;
3158 slot
= (struct action_record
**) htab_find_slot (ar_hash
, &tmp
, INSERT
);
3160 if ((new = *slot
) == NULL
)
3162 new = (struct action_record
*) xmalloc (sizeof (*new));
3163 new->offset
= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
) + 1;
3164 new->filter
= filter
;
3168 /* The filter value goes in untouched. The link to the next
3169 record is a "self-relative" byte offset, or zero to indicate
3170 that there is no next record. So convert the absolute 1 based
3171 indices we've been carrying around into a displacement. */
3173 push_sleb128 (&cfun
->eh
->action_record_data
, filter
);
3175 next
-= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
) + 1;
3176 push_sleb128 (&cfun
->eh
->action_record_data
, next
);
3183 collect_one_action_chain (ar_hash
, region
)
3185 struct eh_region
*region
;
3187 struct eh_region
*c
;
3190 /* If we've reached the top of the region chain, then we have
3191 no actions, and require no landing pad. */
3195 switch (region
->type
)
3198 /* A cleanup adds a zero filter to the beginning of the chain, but
3199 there are special cases to look out for. If there are *only*
3200 cleanups along a path, then it compresses to a zero action.
3201 Further, if there are multiple cleanups along a path, we only
3202 need to represent one of them, as that is enough to trigger
3203 entry to the landing pad at runtime. */
3204 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3207 for (c
= region
->outer
; c
; c
= c
->outer
)
3208 if (c
->type
== ERT_CLEANUP
)
3210 return add_action_record (ar_hash
, 0, next
);
3213 /* Process the associated catch regions in reverse order.
3214 If there's a catch-all handler, then we don't need to
3215 search outer regions. Use a magic -3 value to record
3216 that we haven't done the outer search. */
3218 for (c
= region
->u
.try.last_catch
; c
; c
= c
->u
.catch.prev_catch
)
3220 if (c
->u
.catch.type_list
== NULL
)
3222 /* Retrieve the filter from the head of the filter list
3223 where we have stored it (see assign_filter_values). */
3225 = TREE_INT_CST_LOW (TREE_VALUE (c
->u
.catch.filter_list
));
3227 next
= add_action_record (ar_hash
, filter
, 0);
3231 /* Once the outer search is done, trigger an action record for
3232 each filter we have. */
3237 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3239 /* If there is no next action, terminate the chain. */
3242 /* If all outer actions are cleanups or must_not_throw,
3243 we'll have no action record for it, since we had wanted
3244 to encode these states in the call-site record directly.
3245 Add a cleanup action to the chain to catch these. */
3247 next
= add_action_record (ar_hash
, 0, 0);
3250 flt_node
= c
->u
.catch.filter_list
;
3251 for (; flt_node
; flt_node
= TREE_CHAIN (flt_node
))
3253 int filter
= TREE_INT_CST_LOW (TREE_VALUE (flt_node
));
3254 next
= add_action_record (ar_hash
, filter
, next
);
3260 case ERT_ALLOWED_EXCEPTIONS
:
3261 /* An exception specification adds its filter to the
3262 beginning of the chain. */
3263 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3264 return add_action_record (ar_hash
, region
->u
.allowed
.filter
,
3265 next
< 0 ? 0 : next
);
3267 case ERT_MUST_NOT_THROW
:
3268 /* A must-not-throw region with no inner handlers or cleanups
3269 requires no call-site entry. Note that this differs from
3270 the no handler or cleanup case in that we do require an lsda
3271 to be generated. Return a magic -2 value to record this. */
3276 /* CATCH regions are handled in TRY above. THROW regions are
3277 for optimization information only and produce no output. */
3278 return collect_one_action_chain (ar_hash
, region
->outer
);
3286 add_call_site (landing_pad
, action
)
3290 struct call_site_record
*data
= cfun
->eh
->call_site_data
;
3291 int used
= cfun
->eh
->call_site_data_used
;
3292 int size
= cfun
->eh
->call_site_data_size
;
3296 size
= (size
? size
* 2 : 64);
3297 data
= (struct call_site_record
*)
3298 ggc_realloc (data
, sizeof (*data
) * size
);
3299 cfun
->eh
->call_site_data
= data
;
3300 cfun
->eh
->call_site_data_size
= size
;
3303 data
[used
].landing_pad
= landing_pad
;
3304 data
[used
].action
= action
;
3306 cfun
->eh
->call_site_data_used
= used
+ 1;
3308 return used
+ call_site_base
;
3311 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3312 The new note numbers will not refer to region numbers, but
3313 instead to call site entries. */
3316 convert_to_eh_region_ranges ()
3318 rtx insn
, iter
, note
;
3320 int last_action
= -3;
3321 rtx last_action_insn
= NULL_RTX
;
3322 rtx last_landing_pad
= NULL_RTX
;
3323 rtx first_no_action_insn
= NULL_RTX
;
3326 if (USING_SJLJ_EXCEPTIONS
|| cfun
->eh
->region_tree
== NULL
)
3329 VARRAY_UCHAR_INIT (cfun
->eh
->action_record_data
, 64, "action_record_data");
3331 ar_hash
= htab_create (31, action_record_hash
, action_record_eq
, free
);
3333 for (iter
= get_insns (); iter
; iter
= NEXT_INSN (iter
))
3336 struct eh_region
*region
;
3338 rtx this_landing_pad
;
3341 if (GET_CODE (insn
) == INSN
3342 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3343 insn
= XVECEXP (PATTERN (insn
), 0, 0);
3345 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
3348 if (! (GET_CODE (insn
) == CALL_INSN
3349 || (flag_non_call_exceptions
3350 && may_trap_p (PATTERN (insn
)))))
3357 if (INTVAL (XEXP (note
, 0)) <= 0)
3359 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
3360 this_action
= collect_one_action_chain (ar_hash
, region
);
3363 /* Existence of catch handlers, or must-not-throw regions
3364 implies that an lsda is needed (even if empty). */
3365 if (this_action
!= -1)
3366 cfun
->uses_eh_lsda
= 1;
3368 /* Delay creation of region notes for no-action regions
3369 until we're sure that an lsda will be required. */
3370 else if (last_action
== -3)
3372 first_no_action_insn
= iter
;
3376 /* Cleanups and handlers may share action chains but not
3377 landing pads. Collect the landing pad for this region. */
3378 if (this_action
>= 0)
3380 struct eh_region
*o
;
3381 for (o
= region
; ! o
->landing_pad
; o
= o
->outer
)
3383 this_landing_pad
= o
->landing_pad
;
3386 this_landing_pad
= NULL_RTX
;
3388 /* Differing actions or landing pads implies a change in call-site
3389 info, which implies some EH_REGION note should be emitted. */
3390 if (last_action
!= this_action
3391 || last_landing_pad
!= this_landing_pad
)
3393 /* If we'd not seen a previous action (-3) or the previous
3394 action was must-not-throw (-2), then we do not need an
3396 if (last_action
>= -1)
3398 /* If we delayed the creation of the begin, do it now. */
3399 if (first_no_action_insn
)
3401 call_site
= add_call_site (NULL_RTX
, 0);
3402 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
,
3403 first_no_action_insn
);
3404 NOTE_EH_HANDLER (note
) = call_site
;
3405 first_no_action_insn
= NULL_RTX
;
3408 note
= emit_note_after (NOTE_INSN_EH_REGION_END
,
3410 NOTE_EH_HANDLER (note
) = call_site
;
3413 /* If the new action is must-not-throw, then no region notes
3415 if (this_action
>= -1)
3417 call_site
= add_call_site (this_landing_pad
,
3418 this_action
< 0 ? 0 : this_action
);
3419 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
, iter
);
3420 NOTE_EH_HANDLER (note
) = call_site
;
3423 last_action
= this_action
;
3424 last_landing_pad
= this_landing_pad
;
3426 last_action_insn
= iter
;
3429 if (last_action
>= -1 && ! first_no_action_insn
)
3431 note
= emit_note_after (NOTE_INSN_EH_REGION_END
, last_action_insn
);
3432 NOTE_EH_HANDLER (note
) = call_site
;
3435 htab_delete (ar_hash
);
3440 push_uleb128 (data_area
, value
)
3441 varray_type
*data_area
;
3446 unsigned char byte
= value
& 0x7f;
3450 VARRAY_PUSH_UCHAR (*data_area
, byte
);
3456 push_sleb128 (data_area
, value
)
3457 varray_type
*data_area
;
3465 byte
= value
& 0x7f;
3467 more
= ! ((value
== 0 && (byte
& 0x40) == 0)
3468 || (value
== -1 && (byte
& 0x40) != 0));
3471 VARRAY_PUSH_UCHAR (*data_area
, byte
);
3477 #ifndef HAVE_AS_LEB128
3479 dw2_size_of_call_site_table ()
3481 int n
= cfun
->eh
->call_site_data_used
;
3482 int size
= n
* (4 + 4 + 4);
3485 for (i
= 0; i
< n
; ++i
)
3487 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3488 size
+= size_of_uleb128 (cs
->action
);
3495 sjlj_size_of_call_site_table ()
3497 int n
= cfun
->eh
->call_site_data_used
;
3501 for (i
= 0; i
< n
; ++i
)
3503 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3504 size
+= size_of_uleb128 (INTVAL (cs
->landing_pad
));
3505 size
+= size_of_uleb128 (cs
->action
);
3513 dw2_output_call_site_table ()
3515 const char *const function_start_lab
3516 = IDENTIFIER_POINTER (current_function_func_begin_label
);
3517 int n
= cfun
->eh
->call_site_data_used
;
3520 for (i
= 0; i
< n
; ++i
)
3522 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3523 char reg_start_lab
[32];
3524 char reg_end_lab
[32];
3525 char landing_pad_lab
[32];
3527 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab
, "LEHB", call_site_base
+ i
);
3528 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab
, "LEHE", call_site_base
+ i
);
3530 if (cs
->landing_pad
)
3531 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab
, "L",
3532 CODE_LABEL_NUMBER (cs
->landing_pad
));
3534 /* ??? Perhaps use insn length scaling if the assembler supports
3535 generic arithmetic. */
3536 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3537 data4 if the function is small enough. */
3538 #ifdef HAVE_AS_LEB128
3539 dw2_asm_output_delta_uleb128 (reg_start_lab
, function_start_lab
,
3540 "region %d start", i
);
3541 dw2_asm_output_delta_uleb128 (reg_end_lab
, reg_start_lab
,
3543 if (cs
->landing_pad
)
3544 dw2_asm_output_delta_uleb128 (landing_pad_lab
, function_start_lab
,
3547 dw2_asm_output_data_uleb128 (0, "landing pad");
3549 dw2_asm_output_delta (4, reg_start_lab
, function_start_lab
,
3550 "region %d start", i
);
3551 dw2_asm_output_delta (4, reg_end_lab
, reg_start_lab
, "length");
3552 if (cs
->landing_pad
)
3553 dw2_asm_output_delta (4, landing_pad_lab
, function_start_lab
,
3556 dw2_asm_output_data (4, 0, "landing pad");
3558 dw2_asm_output_data_uleb128 (cs
->action
, "action");
3561 call_site_base
+= n
;
3565 sjlj_output_call_site_table ()
3567 int n
= cfun
->eh
->call_site_data_used
;
3570 for (i
= 0; i
< n
; ++i
)
3572 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3574 dw2_asm_output_data_uleb128 (INTVAL (cs
->landing_pad
),
3575 "region %d landing pad", i
);
3576 dw2_asm_output_data_uleb128 (cs
->action
, "action");
3579 call_site_base
+= n
;
3583 output_function_exception_table ()
3585 int tt_format
, cs_format
, lp_format
, i
, n
;
3586 #ifdef HAVE_AS_LEB128
3587 char ttype_label
[32];
3588 char cs_after_size_label
[32];
3589 char cs_end_label
[32];
3594 int tt_format_size
= 0;
3596 /* Not all functions need anything. */
3597 if (! cfun
->uses_eh_lsda
)
3600 #ifdef IA64_UNWIND_INFO
3601 fputs ("\t.personality\t", asm_out_file
);
3602 output_addr_const (asm_out_file
, eh_personality_libfunc
);
3603 fputs ("\n\t.handlerdata\n", asm_out_file
);
3604 /* Note that varasm still thinks we're in the function's code section.
3605 The ".endp" directive that will immediately follow will take us back. */
3607 (*targetm
.asm_out
.exception_section
) ();
3610 have_tt_data
= (VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
) > 0
3611 || VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
) > 0);
3613 /* Indicate the format of the @TType entries. */
3615 tt_format
= DW_EH_PE_omit
;
3618 tt_format
= ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3619 #ifdef HAVE_AS_LEB128
3620 ASM_GENERATE_INTERNAL_LABEL (ttype_label
, "LLSDATT",
3621 current_function_funcdef_no
);
3623 tt_format_size
= size_of_encoded_value (tt_format
);
3625 assemble_align (tt_format_size
* BITS_PER_UNIT
);
3628 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "LLSDA",
3629 current_function_funcdef_no
);
3631 /* The LSDA header. */
3633 /* Indicate the format of the landing pad start pointer. An omitted
3634 field implies @LPStart == @Start. */
3635 /* Currently we always put @LPStart == @Start. This field would
3636 be most useful in moving the landing pads completely out of
3637 line to another section, but it could also be used to minimize
3638 the size of uleb128 landing pad offsets. */
3639 lp_format
= DW_EH_PE_omit
;
3640 dw2_asm_output_data (1, lp_format
, "@LPStart format (%s)",
3641 eh_data_format_name (lp_format
));
3643 /* @LPStart pointer would go here. */
3645 dw2_asm_output_data (1, tt_format
, "@TType format (%s)",
3646 eh_data_format_name (tt_format
));
3648 #ifndef HAVE_AS_LEB128
3649 if (USING_SJLJ_EXCEPTIONS
)
3650 call_site_len
= sjlj_size_of_call_site_table ();
3652 call_site_len
= dw2_size_of_call_site_table ();
3655 /* A pc-relative 4-byte displacement to the @TType data. */
3658 #ifdef HAVE_AS_LEB128
3659 char ttype_after_disp_label
[32];
3660 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label
, "LLSDATTD",
3661 current_function_funcdef_no
);
3662 dw2_asm_output_delta_uleb128 (ttype_label
, ttype_after_disp_label
,
3663 "@TType base offset");
3664 ASM_OUTPUT_LABEL (asm_out_file
, ttype_after_disp_label
);
3666 /* Ug. Alignment queers things. */
3667 unsigned int before_disp
, after_disp
, last_disp
, disp
;
3669 before_disp
= 1 + 1;
3670 after_disp
= (1 + size_of_uleb128 (call_site_len
)
3672 + VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
)
3673 + (VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
)
3679 unsigned int disp_size
, pad
;
3682 disp_size
= size_of_uleb128 (disp
);
3683 pad
= before_disp
+ disp_size
+ after_disp
;
3684 if (pad
% tt_format_size
)
3685 pad
= tt_format_size
- (pad
% tt_format_size
);
3688 disp
= after_disp
+ pad
;
3690 while (disp
!= last_disp
);
3692 dw2_asm_output_data_uleb128 (disp
, "@TType base offset");
3696 /* Indicate the format of the call-site offsets. */
3697 #ifdef HAVE_AS_LEB128
3698 cs_format
= DW_EH_PE_uleb128
;
3700 cs_format
= DW_EH_PE_udata4
;
3702 dw2_asm_output_data (1, cs_format
, "call-site format (%s)",
3703 eh_data_format_name (cs_format
));
3705 #ifdef HAVE_AS_LEB128
3706 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label
, "LLSDACSB",
3707 current_function_funcdef_no
);
3708 ASM_GENERATE_INTERNAL_LABEL (cs_end_label
, "LLSDACSE",
3709 current_function_funcdef_no
);
3710 dw2_asm_output_delta_uleb128 (cs_end_label
, cs_after_size_label
,
3711 "Call-site table length");
3712 ASM_OUTPUT_LABEL (asm_out_file
, cs_after_size_label
);
3713 if (USING_SJLJ_EXCEPTIONS
)
3714 sjlj_output_call_site_table ();
3716 dw2_output_call_site_table ();
3717 ASM_OUTPUT_LABEL (asm_out_file
, cs_end_label
);
3719 dw2_asm_output_data_uleb128 (call_site_len
,"Call-site table length");
3720 if (USING_SJLJ_EXCEPTIONS
)
3721 sjlj_output_call_site_table ();
3723 dw2_output_call_site_table ();
3726 /* ??? Decode and interpret the data for flag_debug_asm. */
3727 n
= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
);
3728 for (i
= 0; i
< n
; ++i
)
3729 dw2_asm_output_data (1, VARRAY_UCHAR (cfun
->eh
->action_record_data
, i
),
3730 (i
? NULL
: "Action record table"));
3733 assemble_align (tt_format_size
* BITS_PER_UNIT
);
3735 i
= VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
);
3738 tree type
= VARRAY_TREE (cfun
->eh
->ttype_data
, i
);
3741 if (type
== NULL_TREE
)
3742 type
= integer_zero_node
;
3744 type
= lookup_type_for_runtime (type
);
3746 value
= expand_expr (type
, NULL_RTX
, VOIDmode
, EXPAND_INITIALIZER
);
3747 if (tt_format
== DW_EH_PE_absptr
|| tt_format
== DW_EH_PE_aligned
)
3748 assemble_integer (value
, tt_format_size
,
3749 tt_format_size
* BITS_PER_UNIT
, 1);
3751 dw2_asm_output_encoded_addr_rtx (tt_format
, value
, NULL
);
3754 #ifdef HAVE_AS_LEB128
3756 ASM_OUTPUT_LABEL (asm_out_file
, ttype_label
);
3759 /* ??? Decode and interpret the data for flag_debug_asm. */
3760 n
= VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
);
3761 for (i
= 0; i
< n
; ++i
)
3762 dw2_asm_output_data (1, VARRAY_UCHAR (cfun
->eh
->ehspec_data
, i
),
3763 (i
? NULL
: "Exception specification table"));
3765 function_section (current_function_decl
);
3768 #include "gt-except.h"