1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
52 #include "coretypes.h"
60 #include "insn-config.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
75 #include "langhooks.h"
77 /* Provide defaults for stuff that may not be defined when using
79 #ifndef EH_RETURN_STACKADJ_RTX
80 #define EH_RETURN_STACKADJ_RTX 0
82 #ifndef EH_RETURN_HANDLER_RTX
83 #define EH_RETURN_HANDLER_RTX 0
85 #ifndef EH_RETURN_DATA_REGNO
86 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
90 /* Nonzero means enable synchronous exceptions for non-call instructions. */
91 int flag_non_call_exceptions
;
93 /* Protect cleanup actions with must-not-throw regions, with a call
94 to the given failure handler. */
95 tree (*lang_protect_cleanup_actions
) PARAMS ((void));
97 /* Return true if type A catches type B. */
98 int (*lang_eh_type_covers
) PARAMS ((tree a
, tree b
));
100 /* Map a type to a runtime object to match type. */
101 tree (*lang_eh_runtime_type
) PARAMS ((tree
));
103 /* A hash table of label to region number. */
105 struct ehl_map_entry
GTY(())
108 struct eh_region
*region
;
111 static int call_site_base
;
112 static GTY ((param_is (union tree_node
)))
113 htab_t type_to_runtime_map
;
115 /* Describe the SjLj_Function_Context structure. */
116 static GTY(()) tree sjlj_fc_type_node
;
117 static int sjlj_fc_call_site_ofs
;
118 static int sjlj_fc_data_ofs
;
119 static int sjlj_fc_personality_ofs
;
120 static int sjlj_fc_lsda_ofs
;
121 static int sjlj_fc_jbuf_ofs
;
123 /* Describes one exception region. */
124 struct eh_region
GTY(())
126 /* The immediately surrounding region. */
127 struct eh_region
*outer
;
129 /* The list of immediately contained regions. */
130 struct eh_region
*inner
;
131 struct eh_region
*next_peer
;
133 /* An identifier for this region. */
136 /* When a region is deleted, its parents inherit the REG_EH_REGION
137 numbers already assigned. */
140 /* Each region does exactly one thing. */
147 ERT_ALLOWED_EXCEPTIONS
,
153 /* Holds the action to perform based on the preceding type. */
155 /* A list of catch blocks, a surrounding try block,
156 and the label for continuing after a catch. */
157 struct eh_region_u_try
{
158 struct eh_region
*catch;
159 struct eh_region
*last_catch
;
160 struct eh_region
*prev_try
;
162 } GTY ((tag ("ERT_TRY"))) try;
164 /* The list through the catch handlers, the list of type objects
165 matched, and the list of associated filters. */
166 struct eh_region_u_catch
{
167 struct eh_region
*next_catch
;
168 struct eh_region
*prev_catch
;
171 } GTY ((tag ("ERT_CATCH"))) catch;
173 /* A tree_list of allowed types. */
174 struct eh_region_u_allowed
{
177 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed
;
179 /* The type given by a call to "throw foo();", or discovered
181 struct eh_region_u_throw
{
183 } GTY ((tag ("ERT_THROW"))) throw;
185 /* Retain the cleanup expression even after expansion so that
186 we can match up fixup regions. */
187 struct eh_region_u_cleanup
{
189 } GTY ((tag ("ERT_CLEANUP"))) cleanup
;
191 /* The real region (by expression and by pointer) that fixup code
193 struct eh_region_u_fixup
{
195 struct eh_region
*real_region
;
196 } GTY ((tag ("ERT_FIXUP"))) fixup
;
197 } GTY ((desc ("%0.type"))) u
;
199 /* Entry point for this region's handler before landing pads are built. */
202 /* Entry point for this region's handler from the runtime eh library. */
205 /* Entry point for this region's handler from an inner region. */
206 rtx post_landing_pad
;
208 /* The RESX insn for handing off control to the next outermost handler,
213 struct call_site_record
GTY(())
219 /* Used to save exception status for each function. */
220 struct eh_status
GTY(())
222 /* The tree of all regions for this function. */
223 struct eh_region
*region_tree
;
225 /* The same information as an indexable array. */
226 struct eh_region
** GTY ((length ("%h.last_region_number"))) region_array
;
228 /* The most recently open region. */
229 struct eh_region
*cur_region
;
231 /* This is the region for which we are processing catch blocks. */
232 struct eh_region
*try_region
;
237 int built_landing_pads
;
238 int last_region_number
;
240 varray_type ttype_data
;
241 varray_type ehspec_data
;
242 varray_type action_record_data
;
244 htab_t
GTY ((param_is (struct ehl_map_entry
))) exception_handler_label_map
;
246 struct call_site_record
* GTY ((length ("%h.call_site_data_used")))
248 int call_site_data_used
;
249 int call_site_data_size
;
260 static int t2r_eq
PARAMS ((const PTR
,
262 static hashval_t t2r_hash
PARAMS ((const PTR
));
263 static void add_type_for_runtime
PARAMS ((tree
));
264 static tree lookup_type_for_runtime
PARAMS ((tree
));
266 static struct eh_region
*expand_eh_region_end
PARAMS ((void));
268 static rtx get_exception_filter
PARAMS ((struct function
*));
270 static void collect_eh_region_array
PARAMS ((void));
271 static void resolve_fixup_regions
PARAMS ((void));
272 static void remove_fixup_regions
PARAMS ((void));
273 static void remove_unreachable_regions
PARAMS ((rtx
));
274 static void convert_from_eh_region_ranges_1
PARAMS ((rtx
*, int *, int));
276 static struct eh_region
*duplicate_eh_region_1
PARAMS ((struct eh_region
*,
277 struct inline_remap
*));
278 static void duplicate_eh_region_2
PARAMS ((struct eh_region
*,
279 struct eh_region
**));
280 static int ttypes_filter_eq
PARAMS ((const PTR
,
282 static hashval_t ttypes_filter_hash
PARAMS ((const PTR
));
283 static int ehspec_filter_eq
PARAMS ((const PTR
,
285 static hashval_t ehspec_filter_hash
PARAMS ((const PTR
));
286 static int add_ttypes_entry
PARAMS ((htab_t
, tree
));
287 static int add_ehspec_entry
PARAMS ((htab_t
, htab_t
,
289 static void assign_filter_values
PARAMS ((void));
290 static void build_post_landing_pads
PARAMS ((void));
291 static void connect_post_landing_pads
PARAMS ((void));
292 static void dw2_build_landing_pads
PARAMS ((void));
295 static bool sjlj_find_directly_reachable_regions
296 PARAMS ((struct sjlj_lp_info
*));
297 static void sjlj_assign_call_site_values
298 PARAMS ((rtx
, struct sjlj_lp_info
*));
299 static void sjlj_mark_call_sites
300 PARAMS ((struct sjlj_lp_info
*));
301 static void sjlj_emit_function_enter
PARAMS ((rtx
));
302 static void sjlj_emit_function_exit
PARAMS ((void));
303 static void sjlj_emit_dispatch_table
304 PARAMS ((rtx
, struct sjlj_lp_info
*));
305 static void sjlj_build_landing_pads
PARAMS ((void));
307 static hashval_t ehl_hash
PARAMS ((const PTR
));
308 static int ehl_eq
PARAMS ((const PTR
,
310 static void add_ehl_entry
PARAMS ((rtx
,
311 struct eh_region
*));
312 static void remove_exception_handler_label
PARAMS ((rtx
));
313 static void remove_eh_handler
PARAMS ((struct eh_region
*));
314 static int for_each_eh_label_1
PARAMS ((PTR
*, PTR
));
316 struct reachable_info
;
318 /* The return value of reachable_next_level. */
321 /* The given exception is not processed by the given region. */
323 /* The given exception may need processing by the given region. */
325 /* The given exception is completely processed by the given region. */
327 /* The given exception is completely processed by the runtime. */
331 static int check_handled
PARAMS ((tree
, tree
));
332 static void add_reachable_handler
333 PARAMS ((struct reachable_info
*, struct eh_region
*,
334 struct eh_region
*));
335 static enum reachable_code reachable_next_level
336 PARAMS ((struct eh_region
*, tree
, struct reachable_info
*));
338 static int action_record_eq
PARAMS ((const PTR
,
340 static hashval_t action_record_hash
PARAMS ((const PTR
));
341 static int add_action_record
PARAMS ((htab_t
, int, int));
342 static int collect_one_action_chain
PARAMS ((htab_t
,
343 struct eh_region
*));
344 static int add_call_site
PARAMS ((rtx
, int));
346 static void push_uleb128
PARAMS ((varray_type
*,
348 static void push_sleb128
PARAMS ((varray_type
*, int));
349 #ifndef HAVE_AS_LEB128
350 static int dw2_size_of_call_site_table
PARAMS ((void));
351 static int sjlj_size_of_call_site_table
PARAMS ((void));
353 static void dw2_output_call_site_table
PARAMS ((void));
354 static void sjlj_output_call_site_table
PARAMS ((void));
357 /* Routine to see if exception handling is turned on.
358 DO_WARN is nonzero if we want to inform the user that exception
359 handling is turned off.
361 This is used to ensure that -fexceptions has been specified if the
362 compiler tries to use any exception-specific functions. */
368 if (! flag_exceptions
)
370 static int warned
= 0;
371 if (! warned
&& do_warn
)
373 error ("exception handling disabled, use -fexceptions to enable");
385 if (! flag_exceptions
)
388 type_to_runtime_map
= htab_create_ggc (31, t2r_hash
, t2r_eq
, NULL
);
390 /* Create the SjLj_Function_Context structure. This should match
391 the definition in unwind-sjlj.c. */
392 if (USING_SJLJ_EXCEPTIONS
)
394 tree f_jbuf
, f_per
, f_lsda
, f_prev
, f_cs
, f_data
, tmp
;
396 sjlj_fc_type_node
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
398 f_prev
= build_decl (FIELD_DECL
, get_identifier ("__prev"),
399 build_pointer_type (sjlj_fc_type_node
));
400 DECL_FIELD_CONTEXT (f_prev
) = sjlj_fc_type_node
;
402 f_cs
= build_decl (FIELD_DECL
, get_identifier ("__call_site"),
404 DECL_FIELD_CONTEXT (f_cs
) = sjlj_fc_type_node
;
406 tmp
= build_index_type (build_int_2 (4 - 1, 0));
407 tmp
= build_array_type ((*lang_hooks
.types
.type_for_mode
) (word_mode
, 1),
409 f_data
= build_decl (FIELD_DECL
, get_identifier ("__data"), tmp
);
410 DECL_FIELD_CONTEXT (f_data
) = sjlj_fc_type_node
;
412 f_per
= build_decl (FIELD_DECL
, get_identifier ("__personality"),
414 DECL_FIELD_CONTEXT (f_per
) = sjlj_fc_type_node
;
416 f_lsda
= build_decl (FIELD_DECL
, get_identifier ("__lsda"),
418 DECL_FIELD_CONTEXT (f_lsda
) = sjlj_fc_type_node
;
420 #ifdef DONT_USE_BUILTIN_SETJMP
422 tmp
= build_int_2 (JMP_BUF_SIZE
- 1, 0);
424 /* Should be large enough for most systems, if it is not,
425 JMP_BUF_SIZE should be defined with the proper value. It will
426 also tend to be larger than necessary for most systems, a more
427 optimal port will define JMP_BUF_SIZE. */
428 tmp
= build_int_2 (FIRST_PSEUDO_REGISTER
+ 2 - 1, 0);
431 /* This is 2 for builtin_setjmp, plus whatever the target requires
432 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
433 tmp
= build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
))
434 / GET_MODE_SIZE (Pmode
)) + 2 - 1, 0);
436 tmp
= build_index_type (tmp
);
437 tmp
= build_array_type (ptr_type_node
, tmp
);
438 f_jbuf
= build_decl (FIELD_DECL
, get_identifier ("__jbuf"), tmp
);
439 #ifdef DONT_USE_BUILTIN_SETJMP
440 /* We don't know what the alignment requirements of the
441 runtime's jmp_buf has. Overestimate. */
442 DECL_ALIGN (f_jbuf
) = BIGGEST_ALIGNMENT
;
443 DECL_USER_ALIGN (f_jbuf
) = 1;
445 DECL_FIELD_CONTEXT (f_jbuf
) = sjlj_fc_type_node
;
447 TYPE_FIELDS (sjlj_fc_type_node
) = f_prev
;
448 TREE_CHAIN (f_prev
) = f_cs
;
449 TREE_CHAIN (f_cs
) = f_data
;
450 TREE_CHAIN (f_data
) = f_per
;
451 TREE_CHAIN (f_per
) = f_lsda
;
452 TREE_CHAIN (f_lsda
) = f_jbuf
;
454 layout_type (sjlj_fc_type_node
);
456 /* Cache the interesting field offsets so that we have
457 easy access from rtl. */
458 sjlj_fc_call_site_ofs
459 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs
), 1)
460 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs
), 1) / BITS_PER_UNIT
);
462 = (tree_low_cst (DECL_FIELD_OFFSET (f_data
), 1)
463 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data
), 1) / BITS_PER_UNIT
);
464 sjlj_fc_personality_ofs
465 = (tree_low_cst (DECL_FIELD_OFFSET (f_per
), 1)
466 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per
), 1) / BITS_PER_UNIT
);
468 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda
), 1)
469 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda
), 1) / BITS_PER_UNIT
);
471 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf
), 1)
472 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf
), 1) / BITS_PER_UNIT
);
477 init_eh_for_function ()
479 cfun
->eh
= (struct eh_status
*)
480 ggc_alloc_cleared (sizeof (struct eh_status
));
483 /* Start an exception handling region. All instructions emitted
484 after this point are considered to be part of the region until
485 expand_eh_region_end is invoked. */
488 expand_eh_region_start ()
490 struct eh_region
*new_region
;
491 struct eh_region
*cur_region
;
497 /* Insert a new blank region as a leaf in the tree. */
498 new_region
= (struct eh_region
*) ggc_alloc_cleared (sizeof (*new_region
));
499 cur_region
= cfun
->eh
->cur_region
;
500 new_region
->outer
= cur_region
;
503 new_region
->next_peer
= cur_region
->inner
;
504 cur_region
->inner
= new_region
;
508 new_region
->next_peer
= cfun
->eh
->region_tree
;
509 cfun
->eh
->region_tree
= new_region
;
511 cfun
->eh
->cur_region
= new_region
;
513 /* Create a note marking the start of this region. */
514 new_region
->region_number
= ++cfun
->eh
->last_region_number
;
515 note
= emit_note (NULL
, NOTE_INSN_EH_REGION_BEG
);
516 NOTE_EH_HANDLER (note
) = new_region
->region_number
;
519 /* Common code to end a region. Returns the region just ended. */
521 static struct eh_region
*
522 expand_eh_region_end ()
524 struct eh_region
*cur_region
= cfun
->eh
->cur_region
;
527 /* Create a note marking the end of this region. */
528 note
= emit_note (NULL
, NOTE_INSN_EH_REGION_END
);
529 NOTE_EH_HANDLER (note
) = cur_region
->region_number
;
532 cfun
->eh
->cur_region
= cur_region
->outer
;
537 /* End an exception handling region for a cleanup. HANDLER is an
538 expression to expand for the cleanup. */
541 expand_eh_region_end_cleanup (handler
)
544 struct eh_region
*region
;
545 tree protect_cleanup_actions
;
552 region
= expand_eh_region_end ();
553 region
->type
= ERT_CLEANUP
;
554 region
->label
= gen_label_rtx ();
555 region
->u
.cleanup
.exp
= handler
;
557 around_label
= gen_label_rtx ();
558 emit_jump (around_label
);
560 emit_label (region
->label
);
562 /* Give the language a chance to specify an action to be taken if an
563 exception is thrown that would propagate out of the HANDLER. */
564 protect_cleanup_actions
565 = (lang_protect_cleanup_actions
566 ? (*lang_protect_cleanup_actions
) ()
569 if (protect_cleanup_actions
)
570 expand_eh_region_start ();
572 /* In case this cleanup involves an inline destructor with a try block in
573 it, we need to save the EH return data registers around it. */
574 data_save
[0] = gen_reg_rtx (ptr_mode
);
575 emit_move_insn (data_save
[0], get_exception_pointer (cfun
));
576 data_save
[1] = gen_reg_rtx (word_mode
);
577 emit_move_insn (data_save
[1], get_exception_filter (cfun
));
579 expand_expr (handler
, const0_rtx
, VOIDmode
, 0);
581 emit_move_insn (cfun
->eh
->exc_ptr
, data_save
[0]);
582 emit_move_insn (cfun
->eh
->filter
, data_save
[1]);
584 if (protect_cleanup_actions
)
585 expand_eh_region_end_must_not_throw (protect_cleanup_actions
);
587 /* We need any stack adjustment complete before the around_label. */
588 do_pending_stack_adjust ();
590 /* We delay the generation of the _Unwind_Resume until we generate
591 landing pads. We emit a marker here so as to get good control
592 flow data in the meantime. */
594 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
597 emit_label (around_label
);
600 /* End an exception handling region for a try block, and prepares
601 for subsequent calls to expand_start_catch. */
604 expand_start_all_catch ()
606 struct eh_region
*region
;
611 region
= expand_eh_region_end ();
612 region
->type
= ERT_TRY
;
613 region
->u
.try.prev_try
= cfun
->eh
->try_region
;
614 region
->u
.try.continue_label
= gen_label_rtx ();
616 cfun
->eh
->try_region
= region
;
618 emit_jump (region
->u
.try.continue_label
);
621 /* Begin a catch clause. TYPE is the type caught, a list of such types, or
622 null if this is a catch-all clause. Providing a type list enables to
623 associate the catch region with potentially several exception types, which
624 is useful e.g. for Ada. */
627 expand_start_catch (type_or_list
)
630 struct eh_region
*t
, *c
, *l
;
636 type_list
= type_or_list
;
640 /* Ensure to always end up with a type list to normalize further
641 processing, then register each type against the runtime types
645 if (TREE_CODE (type_or_list
) != TREE_LIST
)
646 type_list
= tree_cons (NULL_TREE
, type_or_list
, NULL_TREE
);
648 type_node
= type_list
;
649 for (; type_node
; type_node
= TREE_CHAIN (type_node
))
650 add_type_for_runtime (TREE_VALUE (type_node
));
653 expand_eh_region_start ();
655 t
= cfun
->eh
->try_region
;
656 c
= cfun
->eh
->cur_region
;
658 c
->u
.catch.type_list
= type_list
;
659 c
->label
= gen_label_rtx ();
661 l
= t
->u
.try.last_catch
;
662 c
->u
.catch.prev_catch
= l
;
664 l
->u
.catch.next_catch
= c
;
667 t
->u
.try.last_catch
= c
;
669 emit_label (c
->label
);
672 /* End a catch clause. Control will resume after the try/catch block. */
677 struct eh_region
*try_region
;
682 expand_eh_region_end ();
683 try_region
= cfun
->eh
->try_region
;
685 emit_jump (try_region
->u
.try.continue_label
);
688 /* End a sequence of catch handlers for a try block. */
691 expand_end_all_catch ()
693 struct eh_region
*try_region
;
698 try_region
= cfun
->eh
->try_region
;
699 cfun
->eh
->try_region
= try_region
->u
.try.prev_try
;
701 emit_label (try_region
->u
.try.continue_label
);
704 /* End an exception region for an exception type filter. ALLOWED is a
705 TREE_LIST of types to be matched by the runtime. FAILURE is an
706 expression to invoke if a mismatch occurs.
708 ??? We could use these semantics for calls to rethrow, too; if we can
709 see the surrounding catch clause, we know that the exception we're
710 rethrowing satisfies the "filter" of the catch type. */
713 expand_eh_region_end_allowed (allowed
, failure
)
714 tree allowed
, failure
;
716 struct eh_region
*region
;
722 region
= expand_eh_region_end ();
723 region
->type
= ERT_ALLOWED_EXCEPTIONS
;
724 region
->u
.allowed
.type_list
= allowed
;
725 region
->label
= gen_label_rtx ();
727 for (; allowed
; allowed
= TREE_CHAIN (allowed
))
728 add_type_for_runtime (TREE_VALUE (allowed
));
730 /* We must emit the call to FAILURE here, so that if this function
731 throws a different exception, that it will be processed by the
734 around_label
= gen_label_rtx ();
735 emit_jump (around_label
);
737 emit_label (region
->label
);
738 expand_expr (failure
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
739 /* We must adjust the stack before we reach the AROUND_LABEL because
740 the call to FAILURE does not occur on all paths to the
742 do_pending_stack_adjust ();
744 emit_label (around_label
);
747 /* End an exception region for a must-not-throw filter. FAILURE is an
748 expression invoke if an uncaught exception propagates this far.
750 This is conceptually identical to expand_eh_region_end_allowed with
751 an empty allowed list (if you passed "std::terminate" instead of
752 "__cxa_call_unexpected"), but they are represented differently in
756 expand_eh_region_end_must_not_throw (failure
)
759 struct eh_region
*region
;
765 region
= expand_eh_region_end ();
766 region
->type
= ERT_MUST_NOT_THROW
;
767 region
->label
= gen_label_rtx ();
769 /* We must emit the call to FAILURE here, so that if this function
770 throws a different exception, that it will be processed by the
773 around_label
= gen_label_rtx ();
774 emit_jump (around_label
);
776 emit_label (region
->label
);
777 expand_expr (failure
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
779 emit_label (around_label
);
782 /* End an exception region for a throw. No handling goes on here,
783 but it's the easiest way for the front-end to indicate what type
787 expand_eh_region_end_throw (type
)
790 struct eh_region
*region
;
795 region
= expand_eh_region_end ();
796 region
->type
= ERT_THROW
;
797 region
->u
.throw.type
= type
;
800 /* End a fixup region. Within this region the cleanups for the immediately
801 enclosing region are _not_ run. This is used for goto cleanup to avoid
802 destroying an object twice.
804 This would be an extraordinarily simple prospect, were it not for the
805 fact that we don't actually know what the immediately enclosing region
806 is. This surprising fact is because expand_cleanups is currently
807 generating a sequence that it will insert somewhere else. We collect
808 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
811 expand_eh_region_end_fixup (handler
)
814 struct eh_region
*fixup
;
819 fixup
= expand_eh_region_end ();
820 fixup
->type
= ERT_FIXUP
;
821 fixup
->u
.fixup
.cleanup_exp
= handler
;
824 /* Return an rtl expression for a pointer to the exception object
828 get_exception_pointer (fun
)
829 struct function
*fun
;
831 rtx exc_ptr
= fun
->eh
->exc_ptr
;
832 if (fun
== cfun
&& ! exc_ptr
)
834 exc_ptr
= gen_reg_rtx (ptr_mode
);
835 fun
->eh
->exc_ptr
= exc_ptr
;
840 /* Return an rtl expression for the exception dispatch filter
844 get_exception_filter (fun
)
845 struct function
*fun
;
847 rtx filter
= fun
->eh
->filter
;
848 if (fun
== cfun
&& ! filter
)
850 filter
= gen_reg_rtx (word_mode
);
851 fun
->eh
->filter
= filter
;
856 /* This section is for the exception handling specific optimization pass. */
858 /* Random access the exception region tree. It's just as simple to
859 collect the regions this way as in expand_eh_region_start, but
860 without having to realloc memory. */
863 collect_eh_region_array ()
865 struct eh_region
**array
, *i
;
867 i
= cfun
->eh
->region_tree
;
871 array
= ggc_alloc_cleared ((cfun
->eh
->last_region_number
+ 1)
873 cfun
->eh
->region_array
= array
;
877 array
[i
->region_number
] = i
;
879 /* If there are sub-regions, process them. */
882 /* If there are peers, process them. */
883 else if (i
->next_peer
)
885 /* Otherwise, step back up the tree to the next peer. */
892 } while (i
->next_peer
== NULL
);
899 resolve_fixup_regions ()
901 int i
, j
, n
= cfun
->eh
->last_region_number
;
903 for (i
= 1; i
<= n
; ++i
)
905 struct eh_region
*fixup
= cfun
->eh
->region_array
[i
];
906 struct eh_region
*cleanup
= 0;
908 if (! fixup
|| fixup
->type
!= ERT_FIXUP
)
911 for (j
= 1; j
<= n
; ++j
)
913 cleanup
= cfun
->eh
->region_array
[j
];
914 if (cleanup
->type
== ERT_CLEANUP
915 && cleanup
->u
.cleanup
.exp
== fixup
->u
.fixup
.cleanup_exp
)
921 fixup
->u
.fixup
.real_region
= cleanup
->outer
;
925 /* Now that we've discovered what region actually encloses a fixup,
926 we can shuffle pointers and remove them from the tree. */
929 remove_fixup_regions ()
933 struct eh_region
*fixup
;
935 /* Walk the insn chain and adjust the REG_EH_REGION numbers
936 for instructions referencing fixup regions. This is only
937 strictly necessary for fixup regions with no parent, but
938 doesn't hurt to do it for all regions. */
939 for (insn
= get_insns(); insn
; insn
= NEXT_INSN (insn
))
941 && (note
= find_reg_note (insn
, REG_EH_REGION
, NULL
))
942 && INTVAL (XEXP (note
, 0)) > 0
943 && (fixup
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))])
944 && fixup
->type
== ERT_FIXUP
)
946 if (fixup
->u
.fixup
.real_region
)
947 XEXP (note
, 0) = GEN_INT (fixup
->u
.fixup
.real_region
->region_number
);
949 remove_note (insn
, note
);
952 /* Remove the fixup regions from the tree. */
953 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
955 fixup
= cfun
->eh
->region_array
[i
];
959 /* Allow GC to maybe free some memory. */
960 if (fixup
->type
== ERT_CLEANUP
)
961 fixup
->u
.cleanup
.exp
= NULL_TREE
;
963 if (fixup
->type
!= ERT_FIXUP
)
968 struct eh_region
*parent
, *p
, **pp
;
970 parent
= fixup
->u
.fixup
.real_region
;
972 /* Fix up the children's parent pointers; find the end of
974 for (p
= fixup
->inner
; ; p
= p
->next_peer
)
981 /* In the tree of cleanups, only outer-inner ordering matters.
982 So link the children back in anywhere at the correct level. */
986 pp
= &cfun
->eh
->region_tree
;
992 remove_eh_handler (fixup
);
996 /* Remove all regions whose labels are not reachable from insns. */
999 remove_unreachable_regions (insns
)
1002 int i
, *uid_region_num
;
1004 struct eh_region
*r
;
1007 uid_region_num
= xcalloc (get_max_uid (), sizeof(int));
1008 reachable
= xcalloc (cfun
->eh
->last_region_number
+ 1, sizeof(bool));
1010 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1012 r
= cfun
->eh
->region_array
[i
];
1013 if (!r
|| r
->region_number
!= i
)
1018 if (uid_region_num
[INSN_UID (r
->resume
)])
1020 uid_region_num
[INSN_UID (r
->resume
)] = i
;
1024 if (uid_region_num
[INSN_UID (r
->label
)])
1026 uid_region_num
[INSN_UID (r
->label
)] = i
;
1028 if (r
->type
== ERT_TRY
&& r
->u
.try.continue_label
)
1030 if (uid_region_num
[INSN_UID (r
->u
.try.continue_label
)])
1032 uid_region_num
[INSN_UID (r
->u
.try.continue_label
)] = i
;
1036 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1037 reachable
[uid_region_num
[INSN_UID (insn
)]] = true;
1039 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1041 r
= cfun
->eh
->region_array
[i
];
1042 if (r
&& r
->region_number
== i
&& !reachable
[i
])
1044 /* Don't remove ERT_THROW regions if their outer region
1046 if (r
->type
== ERT_THROW
1048 && reachable
[r
->outer
->region_number
])
1051 remove_eh_handler (r
);
1056 free (uid_region_num
);
1059 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1060 can_throw instruction in the region. */
1063 convert_from_eh_region_ranges_1 (pinsns
, orig_sp
, cur
)
1071 for (insn
= *pinsns
; insn
; insn
= next
)
1073 next
= NEXT_INSN (insn
);
1074 if (GET_CODE (insn
) == NOTE
)
1076 int kind
= NOTE_LINE_NUMBER (insn
);
1077 if (kind
== NOTE_INSN_EH_REGION_BEG
1078 || kind
== NOTE_INSN_EH_REGION_END
)
1080 if (kind
== NOTE_INSN_EH_REGION_BEG
)
1082 struct eh_region
*r
;
1085 cur
= NOTE_EH_HANDLER (insn
);
1087 r
= cfun
->eh
->region_array
[cur
];
1088 if (r
->type
== ERT_FIXUP
)
1090 r
= r
->u
.fixup
.real_region
;
1091 cur
= r
? r
->region_number
: 0;
1093 else if (r
->type
== ERT_CATCH
)
1096 cur
= r
? r
->region_number
: 0;
1102 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1103 requires extra care to adjust sequence start. */
1104 if (insn
== *pinsns
)
1110 else if (INSN_P (insn
))
1113 && ! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
1114 /* Calls can always potentially throw exceptions, unless
1115 they have a REG_EH_REGION note with a value of 0 or less.
1116 Which should be the only possible kind so far. */
1117 && (GET_CODE (insn
) == CALL_INSN
1118 /* If we wanted exceptions for non-call insns, then
1119 any may_trap_p instruction could throw. */
1120 || (flag_non_call_exceptions
1121 && GET_CODE (PATTERN (insn
)) != CLOBBER
1122 && GET_CODE (PATTERN (insn
)) != USE
1123 && may_trap_p (PATTERN (insn
)))))
1125 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_EH_REGION
, GEN_INT (cur
),
1129 if (GET_CODE (insn
) == CALL_INSN
1130 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
1132 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn
), 0),
1134 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn
), 1),
1136 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn
), 2),
1147 convert_from_eh_region_ranges ()
1152 collect_eh_region_array ();
1153 resolve_fixup_regions ();
1155 stack
= xmalloc (sizeof (int) * (cfun
->eh
->last_region_number
+ 1));
1156 insns
= get_insns ();
1157 convert_from_eh_region_ranges_1 (&insns
, stack
, 0);
1160 remove_fixup_regions ();
1161 remove_unreachable_regions (insns
);
1165 add_ehl_entry (label
, region
)
1167 struct eh_region
*region
;
1169 struct ehl_map_entry
**slot
, *entry
;
1171 LABEL_PRESERVE_P (label
) = 1;
1173 entry
= (struct ehl_map_entry
*) ggc_alloc (sizeof (*entry
));
1174 entry
->label
= label
;
1175 entry
->region
= region
;
1177 slot
= (struct ehl_map_entry
**)
1178 htab_find_slot (cfun
->eh
->exception_handler_label_map
, entry
, INSERT
);
1180 /* Before landing pad creation, each exception handler has its own
1181 label. After landing pad creation, the exception handlers may
1182 share landing pads. This is ok, since maybe_remove_eh_handler
1183 only requires the 1-1 mapping before landing pad creation. */
1184 if (*slot
&& !cfun
->eh
->built_landing_pads
)
1191 find_exception_handler_labels ()
1195 if (cfun
->eh
->exception_handler_label_map
)
1196 htab_empty (cfun
->eh
->exception_handler_label_map
);
1199 /* ??? The expansion factor here (3/2) must be greater than the htab
1200 occupancy factor (4/3) to avoid unnecessary resizing. */
1201 cfun
->eh
->exception_handler_label_map
1202 = htab_create_ggc (cfun
->eh
->last_region_number
* 3 / 2,
1203 ehl_hash
, ehl_eq
, NULL
);
1206 if (cfun
->eh
->region_tree
== NULL
)
1209 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1211 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1214 if (! region
|| region
->region_number
!= i
)
1216 if (cfun
->eh
->built_landing_pads
)
1217 lab
= region
->landing_pad
;
1219 lab
= region
->label
;
1222 add_ehl_entry (lab
, region
);
1225 /* For sjlj exceptions, need the return label to remain live until
1226 after landing pad generation. */
1227 if (USING_SJLJ_EXCEPTIONS
&& ! cfun
->eh
->built_landing_pads
)
1228 add_ehl_entry (return_label
, NULL
);
1232 current_function_has_exception_handlers ()
1236 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1238 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1240 if (! region
|| region
->region_number
!= i
)
1242 if (region
->type
!= ERT_THROW
)
1249 static struct eh_region
*
1250 duplicate_eh_region_1 (o
, map
)
1251 struct eh_region
*o
;
1252 struct inline_remap
*map
;
1255 = (struct eh_region
*) ggc_alloc_cleared (sizeof (struct eh_region
));
1257 n
->region_number
= o
->region_number
+ cfun
->eh
->last_region_number
;
1263 case ERT_MUST_NOT_THROW
:
1267 if (o
->u
.try.continue_label
)
1268 n
->u
.try.continue_label
1269 = get_label_from_map (map
,
1270 CODE_LABEL_NUMBER (o
->u
.try.continue_label
));
1274 n
->u
.catch.type_list
= o
->u
.catch.type_list
;
1277 case ERT_ALLOWED_EXCEPTIONS
:
1278 n
->u
.allowed
.type_list
= o
->u
.allowed
.type_list
;
1282 n
->u
.throw.type
= o
->u
.throw.type
;
1289 n
->label
= get_label_from_map (map
, CODE_LABEL_NUMBER (o
->label
));
1292 n
->resume
= map
->insn_map
[INSN_UID (o
->resume
)];
1293 if (n
->resume
== NULL
)
1301 duplicate_eh_region_2 (o
, n_array
)
1302 struct eh_region
*o
;
1303 struct eh_region
**n_array
;
1305 struct eh_region
*n
= n_array
[o
->region_number
];
1310 n
->u
.try.catch = n_array
[o
->u
.try.catch->region_number
];
1311 n
->u
.try.last_catch
= n_array
[o
->u
.try.last_catch
->region_number
];
1315 if (o
->u
.catch.next_catch
)
1316 n
->u
.catch.next_catch
= n_array
[o
->u
.catch.next_catch
->region_number
];
1317 if (o
->u
.catch.prev_catch
)
1318 n
->u
.catch.prev_catch
= n_array
[o
->u
.catch.prev_catch
->region_number
];
1326 n
->outer
= n_array
[o
->outer
->region_number
];
1328 n
->inner
= n_array
[o
->inner
->region_number
];
1330 n
->next_peer
= n_array
[o
->next_peer
->region_number
];
1334 duplicate_eh_regions (ifun
, map
)
1335 struct function
*ifun
;
1336 struct inline_remap
*map
;
1338 int ifun_last_region_number
= ifun
->eh
->last_region_number
;
1339 struct eh_region
**n_array
, *root
, *cur
;
1342 if (ifun_last_region_number
== 0)
1345 n_array
= xcalloc (ifun_last_region_number
+ 1, sizeof (*n_array
));
1347 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
1349 cur
= ifun
->eh
->region_array
[i
];
1350 if (!cur
|| cur
->region_number
!= i
)
1352 n_array
[i
] = duplicate_eh_region_1 (cur
, map
);
1354 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
1356 cur
= ifun
->eh
->region_array
[i
];
1357 if (!cur
|| cur
->region_number
!= i
)
1359 duplicate_eh_region_2 (cur
, n_array
);
1362 root
= n_array
[ifun
->eh
->region_tree
->region_number
];
1363 cur
= cfun
->eh
->cur_region
;
1366 struct eh_region
*p
= cur
->inner
;
1369 while (p
->next_peer
)
1371 p
->next_peer
= root
;
1376 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
1377 if (n_array
[i
] && n_array
[i
]->outer
== NULL
)
1378 n_array
[i
]->outer
= cur
;
1382 struct eh_region
*p
= cfun
->eh
->region_tree
;
1385 while (p
->next_peer
)
1387 p
->next_peer
= root
;
1390 cfun
->eh
->region_tree
= root
;
1395 i
= cfun
->eh
->last_region_number
;
1396 cfun
->eh
->last_region_number
= i
+ ifun_last_region_number
;
1402 t2r_eq (pentry
, pdata
)
1406 tree entry
= (tree
) pentry
;
1407 tree data
= (tree
) pdata
;
1409 return TREE_PURPOSE (entry
) == data
;
1416 tree entry
= (tree
) pentry
;
1417 return TYPE_HASH (TREE_PURPOSE (entry
));
1421 add_type_for_runtime (type
)
1426 slot
= (tree
*) htab_find_slot_with_hash (type_to_runtime_map
, type
,
1427 TYPE_HASH (type
), INSERT
);
1430 tree runtime
= (*lang_eh_runtime_type
) (type
);
1431 *slot
= tree_cons (type
, runtime
, NULL_TREE
);
1436 lookup_type_for_runtime (type
)
1441 slot
= (tree
*) htab_find_slot_with_hash (type_to_runtime_map
, type
,
1442 TYPE_HASH (type
), NO_INSERT
);
1444 /* We should have always inserted the data earlier. */
1445 return TREE_VALUE (*slot
);
1449 /* Represent an entry in @TTypes for either catch actions
1450 or exception filter actions. */
1451 struct ttypes_filter
GTY(())
1457 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1458 (a tree) for a @TTypes type node we are thinking about adding. */
1461 ttypes_filter_eq (pentry
, pdata
)
1465 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1466 tree data
= (tree
) pdata
;
1468 return entry
->t
== data
;
1472 ttypes_filter_hash (pentry
)
1475 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1476 return TYPE_HASH (entry
->t
);
1479 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1480 exception specification list we are thinking about adding. */
1481 /* ??? Currently we use the type lists in the order given. Someone
1482 should put these in some canonical order. */
1485 ehspec_filter_eq (pentry
, pdata
)
1489 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1490 const struct ttypes_filter
*data
= (const struct ttypes_filter
*) pdata
;
1492 return type_list_equal (entry
->t
, data
->t
);
1495 /* Hash function for exception specification lists. */
1498 ehspec_filter_hash (pentry
)
1501 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1505 for (list
= entry
->t
; list
; list
= TREE_CHAIN (list
))
1506 h
= (h
<< 5) + (h
>> 27) + TYPE_HASH (TREE_VALUE (list
));
1510 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1511 up the search. Return the filter value to be used. */
1514 add_ttypes_entry (ttypes_hash
, type
)
1518 struct ttypes_filter
**slot
, *n
;
1520 slot
= (struct ttypes_filter
**)
1521 htab_find_slot_with_hash (ttypes_hash
, type
, TYPE_HASH (type
), INSERT
);
1523 if ((n
= *slot
) == NULL
)
1525 /* Filter value is a 1 based table index. */
1527 n
= (struct ttypes_filter
*) xmalloc (sizeof (*n
));
1529 n
->filter
= VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
) + 1;
1532 VARRAY_PUSH_TREE (cfun
->eh
->ttype_data
, type
);
1538 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1539 to speed up the search. Return the filter value to be used. */
1542 add_ehspec_entry (ehspec_hash
, ttypes_hash
, list
)
1547 struct ttypes_filter
**slot
, *n
;
1548 struct ttypes_filter dummy
;
1551 slot
= (struct ttypes_filter
**)
1552 htab_find_slot (ehspec_hash
, &dummy
, INSERT
);
1554 if ((n
= *slot
) == NULL
)
1556 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1558 n
= (struct ttypes_filter
*) xmalloc (sizeof (*n
));
1560 n
->filter
= -(VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
) + 1);
1563 /* Look up each type in the list and encode its filter
1564 value as a uleb128. Terminate the list with 0. */
1565 for (; list
; list
= TREE_CHAIN (list
))
1566 push_uleb128 (&cfun
->eh
->ehspec_data
,
1567 add_ttypes_entry (ttypes_hash
, TREE_VALUE (list
)));
1568 VARRAY_PUSH_UCHAR (cfun
->eh
->ehspec_data
, 0);
1574 /* Generate the action filter values to be used for CATCH and
1575 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1576 we use lots of landing pads, and so every type or list can share
1577 the same filter value, which saves table space. */
1580 assign_filter_values ()
1583 htab_t ttypes
, ehspec
;
1585 VARRAY_TREE_INIT (cfun
->eh
->ttype_data
, 16, "ttype_data");
1586 VARRAY_UCHAR_INIT (cfun
->eh
->ehspec_data
, 64, "ehspec_data");
1588 ttypes
= htab_create (31, ttypes_filter_hash
, ttypes_filter_eq
, free
);
1589 ehspec
= htab_create (31, ehspec_filter_hash
, ehspec_filter_eq
, free
);
1591 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1593 struct eh_region
*r
= cfun
->eh
->region_array
[i
];
1595 /* Mind we don't process a region more than once. */
1596 if (!r
|| r
->region_number
!= i
)
1602 /* Whatever type_list is (NULL or true list), we build a list
1603 of filters for the region. */
1604 r
->u
.catch.filter_list
= NULL_TREE
;
1606 if (r
->u
.catch.type_list
!= NULL
)
1608 /* Get a filter value for each of the types caught and store
1609 them in the region's dedicated list. */
1610 tree tp_node
= r
->u
.catch.type_list
;
1612 for (;tp_node
; tp_node
= TREE_CHAIN (tp_node
))
1614 int flt
= add_ttypes_entry (ttypes
, TREE_VALUE (tp_node
));
1615 tree flt_node
= build_int_2 (flt
, 0);
1617 r
->u
.catch.filter_list
1618 = tree_cons (NULL_TREE
, flt_node
, r
->u
.catch.filter_list
);
1623 /* Get a filter value for the NULL list also since it will need
1624 an action record anyway. */
1625 int flt
= add_ttypes_entry (ttypes
, NULL
);
1626 tree flt_node
= build_int_2 (flt
, 0);
1628 r
->u
.catch.filter_list
1629 = tree_cons (NULL_TREE
, flt_node
, r
->u
.catch.filter_list
);
1634 case ERT_ALLOWED_EXCEPTIONS
:
1636 = add_ehspec_entry (ehspec
, ttypes
, r
->u
.allowed
.type_list
);
1644 htab_delete (ttypes
);
1645 htab_delete (ehspec
);
1649 build_post_landing_pads ()
1653 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1655 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1658 /* Mind we don't process a region more than once. */
1659 if (!region
|| region
->region_number
!= i
)
1662 switch (region
->type
)
1665 /* ??? Collect the set of all non-overlapping catch handlers
1666 all the way up the chain until blocked by a cleanup. */
1667 /* ??? Outer try regions can share landing pads with inner
1668 try regions if the types are completely non-overlapping,
1669 and there are no intervening cleanups. */
1671 region
->post_landing_pad
= gen_label_rtx ();
1675 emit_label (region
->post_landing_pad
);
1677 /* ??? It is mighty inconvenient to call back into the
1678 switch statement generation code in expand_end_case.
1679 Rapid prototyping sez a sequence of ifs. */
1681 struct eh_region
*c
;
1682 for (c
= region
->u
.try.catch; c
; c
= c
->u
.catch.next_catch
)
1684 /* ??? _Unwind_ForcedUnwind wants no match here. */
1685 if (c
->u
.catch.type_list
== NULL
)
1686 emit_jump (c
->label
);
1689 /* Need for one cmp/jump per type caught. Each type
1690 list entry has a matching entry in the filter list
1691 (see assign_filter_values). */
1692 tree tp_node
= c
->u
.catch.type_list
;
1693 tree flt_node
= c
->u
.catch.filter_list
;
1697 emit_cmp_and_jump_insns
1699 GEN_INT (tree_low_cst (TREE_VALUE (flt_node
), 0)),
1700 EQ
, NULL_RTX
, word_mode
, 0, c
->label
);
1702 tp_node
= TREE_CHAIN (tp_node
);
1703 flt_node
= TREE_CHAIN (flt_node
);
1709 /* We delay the generation of the _Unwind_Resume until we generate
1710 landing pads. We emit a marker here so as to get good control
1711 flow data in the meantime. */
1713 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
1719 emit_insn_before (seq
, region
->u
.try.catch->label
);
1722 case ERT_ALLOWED_EXCEPTIONS
:
1723 region
->post_landing_pad
= gen_label_rtx ();
1727 emit_label (region
->post_landing_pad
);
1729 emit_cmp_and_jump_insns (cfun
->eh
->filter
,
1730 GEN_INT (region
->u
.allowed
.filter
),
1731 EQ
, NULL_RTX
, word_mode
, 0, region
->label
);
1733 /* We delay the generation of the _Unwind_Resume until we generate
1734 landing pads. We emit a marker here so as to get good control
1735 flow data in the meantime. */
1737 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
1743 emit_insn_before (seq
, region
->label
);
1747 case ERT_MUST_NOT_THROW
:
1748 region
->post_landing_pad
= region
->label
;
1753 /* Nothing to do. */
1762 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1763 _Unwind_Resume otherwise. */
1766 connect_post_landing_pads ()
1770 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1772 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1773 struct eh_region
*outer
;
1776 /* Mind we don't process a region more than once. */
1777 if (!region
|| region
->region_number
!= i
)
1780 /* If there is no RESX, or it has been deleted by flow, there's
1781 nothing to fix up. */
1782 if (! region
->resume
|| INSN_DELETED_P (region
->resume
))
1785 /* Search for another landing pad in this function. */
1786 for (outer
= region
->outer
; outer
; outer
= outer
->outer
)
1787 if (outer
->post_landing_pad
)
1793 emit_jump (outer
->post_landing_pad
);
1795 emit_library_call (unwind_resume_libfunc
, LCT_THROW
,
1796 VOIDmode
, 1, cfun
->eh
->exc_ptr
, ptr_mode
);
1800 emit_insn_before (seq
, region
->resume
);
1801 delete_insn (region
->resume
);
1807 dw2_build_landing_pads ()
1812 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1814 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1816 bool clobbers_hard_regs
= false;
1818 /* Mind we don't process a region more than once. */
1819 if (!region
|| region
->region_number
!= i
)
1822 if (region
->type
!= ERT_CLEANUP
1823 && region
->type
!= ERT_TRY
1824 && region
->type
!= ERT_ALLOWED_EXCEPTIONS
)
1829 region
->landing_pad
= gen_label_rtx ();
1830 emit_label (region
->landing_pad
);
1832 #ifdef HAVE_exception_receiver
1833 if (HAVE_exception_receiver
)
1834 emit_insn (gen_exception_receiver ());
1837 #ifdef HAVE_nonlocal_goto_receiver
1838 if (HAVE_nonlocal_goto_receiver
)
1839 emit_insn (gen_nonlocal_goto_receiver ());
1844 /* If the eh_return data registers are call-saved, then we
1845 won't have considered them clobbered from the call that
1846 threw. Kill them now. */
1849 unsigned r
= EH_RETURN_DATA_REGNO (j
);
1850 if (r
== INVALID_REGNUM
)
1852 if (! call_used_regs
[r
])
1854 emit_insn (gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, r
)));
1855 clobbers_hard_regs
= true;
1859 if (clobbers_hard_regs
)
1861 /* @@@ This is a kludge. Not all machine descriptions define a
1862 blockage insn, but we must not allow the code we just generated
1863 to be reordered by scheduling. So emit an ASM_INPUT to act as
1865 emit_insn (gen_rtx_ASM_INPUT (VOIDmode
, ""));
1868 emit_move_insn (cfun
->eh
->exc_ptr
,
1869 gen_rtx_REG (ptr_mode
, EH_RETURN_DATA_REGNO (0)));
1870 emit_move_insn (cfun
->eh
->filter
,
1871 gen_rtx_REG (word_mode
, EH_RETURN_DATA_REGNO (1)));
1876 emit_insn_before (seq
, region
->post_landing_pad
);
1883 int directly_reachable
;
1886 int call_site_index
;
1890 sjlj_find_directly_reachable_regions (lp_info
)
1891 struct sjlj_lp_info
*lp_info
;
1894 bool found_one
= false;
1896 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1898 struct eh_region
*region
;
1899 enum reachable_code rc
;
1903 if (! INSN_P (insn
))
1906 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1907 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
1910 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
1912 type_thrown
= NULL_TREE
;
1913 if (region
->type
== ERT_THROW
)
1915 type_thrown
= region
->u
.throw.type
;
1916 region
= region
->outer
;
1919 /* Find the first containing region that might handle the exception.
1920 That's the landing pad to which we will transfer control. */
1921 rc
= RNL_NOT_CAUGHT
;
1922 for (; region
; region
= region
->outer
)
1924 rc
= reachable_next_level (region
, type_thrown
, 0);
1925 if (rc
!= RNL_NOT_CAUGHT
)
1928 if (rc
== RNL_MAYBE_CAUGHT
|| rc
== RNL_CAUGHT
)
1930 lp_info
[region
->region_number
].directly_reachable
= 1;
1939 sjlj_assign_call_site_values (dispatch_label
, lp_info
)
1941 struct sjlj_lp_info
*lp_info
;
1946 /* First task: build the action table. */
1948 VARRAY_UCHAR_INIT (cfun
->eh
->action_record_data
, 64, "action_record_data");
1949 ar_hash
= htab_create (31, action_record_hash
, action_record_eq
, free
);
1951 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1952 if (lp_info
[i
].directly_reachable
)
1954 struct eh_region
*r
= cfun
->eh
->region_array
[i
];
1955 r
->landing_pad
= dispatch_label
;
1956 lp_info
[i
].action_index
= collect_one_action_chain (ar_hash
, r
);
1957 if (lp_info
[i
].action_index
!= -1)
1958 cfun
->uses_eh_lsda
= 1;
1961 htab_delete (ar_hash
);
1963 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1964 landing pad label for the region. For sjlj though, there is one
1965 common landing pad from which we dispatch to the post-landing pads.
1967 A region receives a dispatch index if it is directly reachable
1968 and requires in-function processing. Regions that share post-landing
1969 pads may share dispatch indices. */
1970 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1971 (see build_post_landing_pads) so we don't bother checking for it. */
1974 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1975 if (lp_info
[i
].directly_reachable
)
1976 lp_info
[i
].dispatch_index
= index
++;
1978 /* Finally: assign call-site values. If dwarf2 terms, this would be
1979 the region number assigned by convert_to_eh_region_ranges, but
1980 handles no-action and must-not-throw differently. */
1983 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1984 if (lp_info
[i
].directly_reachable
)
1986 int action
= lp_info
[i
].action_index
;
1988 /* Map must-not-throw to otherwise unused call-site index 0. */
1991 /* Map no-action to otherwise unused call-site index -1. */
1992 else if (action
== -1)
1994 /* Otherwise, look it up in the table. */
1996 index
= add_call_site (GEN_INT (lp_info
[i
].dispatch_index
), action
);
1998 lp_info
[i
].call_site_index
= index
;
2003 sjlj_mark_call_sites (lp_info
)
2004 struct sjlj_lp_info
*lp_info
;
2006 int last_call_site
= -2;
2009 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2011 struct eh_region
*region
;
2013 rtx note
, before
, p
;
2015 /* Reset value tracking at extended basic block boundaries. */
2016 if (GET_CODE (insn
) == CODE_LABEL
)
2017 last_call_site
= -2;
2019 if (! INSN_P (insn
))
2022 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2025 /* Calls (and trapping insns) without notes are outside any
2026 exception handling region in this function. Mark them as
2028 if (GET_CODE (insn
) == CALL_INSN
2029 || (flag_non_call_exceptions
2030 && may_trap_p (PATTERN (insn
))))
2031 this_call_site
= -1;
2037 /* Calls that are known to not throw need not be marked. */
2038 if (INTVAL (XEXP (note
, 0)) <= 0)
2041 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
2042 this_call_site
= lp_info
[region
->region_number
].call_site_index
;
2045 if (this_call_site
== last_call_site
)
2048 /* Don't separate a call from it's argument loads. */
2050 if (GET_CODE (insn
) == CALL_INSN
)
2051 before
= find_first_parameter_load (insn
, NULL_RTX
);
2054 mem
= adjust_address (cfun
->eh
->sjlj_fc
, TYPE_MODE (integer_type_node
),
2055 sjlj_fc_call_site_ofs
);
2056 emit_move_insn (mem
, GEN_INT (this_call_site
));
2060 emit_insn_before (p
, before
);
2061 last_call_site
= this_call_site
;
2065 /* Construct the SjLj_Function_Context. */
2068 sjlj_emit_function_enter (dispatch_label
)
2071 rtx fn_begin
, fc
, mem
, seq
;
2073 fc
= cfun
->eh
->sjlj_fc
;
2077 /* We're storing this libcall's address into memory instead of
2078 calling it directly. Thus, we must call assemble_external_libcall
2079 here, as we can not depend on emit_library_call to do it for us. */
2080 assemble_external_libcall (eh_personality_libfunc
);
2081 mem
= adjust_address (fc
, Pmode
, sjlj_fc_personality_ofs
);
2082 emit_move_insn (mem
, eh_personality_libfunc
);
2084 mem
= adjust_address (fc
, Pmode
, sjlj_fc_lsda_ofs
);
2085 if (cfun
->uses_eh_lsda
)
2088 ASM_GENERATE_INTERNAL_LABEL (buf
, "LLSDA", current_function_funcdef_no
);
2089 emit_move_insn (mem
, gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
)));
2092 emit_move_insn (mem
, const0_rtx
);
2094 #ifdef DONT_USE_BUILTIN_SETJMP
2097 x
= emit_library_call_value (setjmp_libfunc
, NULL_RTX
, LCT_RETURNS_TWICE
,
2098 TYPE_MODE (integer_type_node
), 1,
2099 plus_constant (XEXP (fc
, 0),
2100 sjlj_fc_jbuf_ofs
), Pmode
);
2102 note
= emit_note (NULL
, NOTE_INSN_EXPECTED_VALUE
);
2103 NOTE_EXPECTED_VALUE (note
) = gen_rtx_EQ (VOIDmode
, x
, const0_rtx
);
2105 emit_cmp_and_jump_insns (x
, const0_rtx
, NE
, 0,
2106 TYPE_MODE (integer_type_node
), 0, dispatch_label
);
2109 expand_builtin_setjmp_setup (plus_constant (XEXP (fc
, 0), sjlj_fc_jbuf_ofs
),
2113 emit_library_call (unwind_sjlj_register_libfunc
, LCT_NORMAL
, VOIDmode
,
2114 1, XEXP (fc
, 0), Pmode
);
2119 /* ??? Instead of doing this at the beginning of the function,
2120 do this in a block that is at loop level 0 and dominates all
2121 can_throw_internal instructions. */
2123 for (fn_begin
= get_insns (); ; fn_begin
= NEXT_INSN (fn_begin
))
2124 if (GET_CODE (fn_begin
) == NOTE
2125 && NOTE_LINE_NUMBER (fn_begin
) == NOTE_INSN_FUNCTION_BEG
)
2127 emit_insn_after (seq
, fn_begin
);
2130 /* Call back from expand_function_end to know where we should put
2131 the call to unwind_sjlj_unregister_libfunc if needed. */
2134 sjlj_emit_function_exit_after (after
)
2137 cfun
->eh
->sjlj_exit_after
= after
;
2141 sjlj_emit_function_exit ()
2147 emit_library_call (unwind_sjlj_unregister_libfunc
, LCT_NORMAL
, VOIDmode
,
2148 1, XEXP (cfun
->eh
->sjlj_fc
, 0), Pmode
);
2153 /* ??? Really this can be done in any block at loop level 0 that
2154 post-dominates all can_throw_internal instructions. This is
2155 the last possible moment. */
2157 emit_insn_after (seq
, cfun
->eh
->sjlj_exit_after
);
2161 sjlj_emit_dispatch_table (dispatch_label
, lp_info
)
2163 struct sjlj_lp_info
*lp_info
;
2165 int i
, first_reachable
;
2166 rtx mem
, dispatch
, seq
, fc
;
2168 fc
= cfun
->eh
->sjlj_fc
;
2172 emit_label (dispatch_label
);
2174 #ifndef DONT_USE_BUILTIN_SETJMP
2175 expand_builtin_setjmp_receiver (dispatch_label
);
2178 /* Load up dispatch index, exc_ptr and filter values from the
2179 function context. */
2180 mem
= adjust_address (fc
, TYPE_MODE (integer_type_node
),
2181 sjlj_fc_call_site_ofs
);
2182 dispatch
= copy_to_reg (mem
);
2184 mem
= adjust_address (fc
, word_mode
, sjlj_fc_data_ofs
);
2185 if (word_mode
!= Pmode
)
2187 #ifdef POINTERS_EXTEND_UNSIGNED
2188 mem
= convert_memory_address (Pmode
, mem
);
2190 mem
= convert_to_mode (Pmode
, mem
, 0);
2193 emit_move_insn (cfun
->eh
->exc_ptr
, mem
);
2195 mem
= adjust_address (fc
, word_mode
, sjlj_fc_data_ofs
+ UNITS_PER_WORD
);
2196 emit_move_insn (cfun
->eh
->filter
, mem
);
2198 /* Jump to one of the directly reachable regions. */
2199 /* ??? This really ought to be using a switch statement. */
2201 first_reachable
= 0;
2202 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2204 if (! lp_info
[i
].directly_reachable
)
2207 if (! first_reachable
)
2209 first_reachable
= i
;
2213 emit_cmp_and_jump_insns (dispatch
, GEN_INT (lp_info
[i
].dispatch_index
),
2214 EQ
, NULL_RTX
, TYPE_MODE (integer_type_node
), 0,
2215 cfun
->eh
->region_array
[i
]->post_landing_pad
);
2221 emit_insn_before (seq
, (cfun
->eh
->region_array
[first_reachable
]
2222 ->post_landing_pad
));
2226 sjlj_build_landing_pads ()
2228 struct sjlj_lp_info
*lp_info
;
2230 lp_info
= (struct sjlj_lp_info
*) xcalloc (cfun
->eh
->last_region_number
+ 1,
2231 sizeof (struct sjlj_lp_info
));
2233 if (sjlj_find_directly_reachable_regions (lp_info
))
2235 rtx dispatch_label
= gen_label_rtx ();
2238 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node
),
2239 int_size_in_bytes (sjlj_fc_type_node
),
2240 TYPE_ALIGN (sjlj_fc_type_node
));
2242 sjlj_assign_call_site_values (dispatch_label
, lp_info
);
2243 sjlj_mark_call_sites (lp_info
);
2245 sjlj_emit_function_enter (dispatch_label
);
2246 sjlj_emit_dispatch_table (dispatch_label
, lp_info
);
2247 sjlj_emit_function_exit ();
2254 finish_eh_generation ()
2256 /* Nothing to do if no regions created. */
2257 if (cfun
->eh
->region_tree
== NULL
)
2260 /* The object here is to provide find_basic_blocks with detailed
2261 information (via reachable_handlers) on how exception control
2262 flows within the function. In this first pass, we can include
2263 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2264 regions, and hope that it will be useful in deleting unreachable
2265 handlers. Subsequently, we will generate landing pads which will
2266 connect many of the handlers, and then type information will not
2267 be effective. Still, this is a win over previous implementations. */
2269 cleanup_cfg (CLEANUP_PRE_LOOP
| CLEANUP_NO_INSN_DEL
);
2271 /* These registers are used by the landing pads. Make sure they
2272 have been generated. */
2273 get_exception_pointer (cfun
);
2274 get_exception_filter (cfun
);
2276 /* Construct the landing pads. */
2278 assign_filter_values ();
2279 build_post_landing_pads ();
2280 connect_post_landing_pads ();
2281 if (USING_SJLJ_EXCEPTIONS
)
2282 sjlj_build_landing_pads ();
2284 dw2_build_landing_pads ();
2286 cfun
->eh
->built_landing_pads
= 1;
2288 /* We've totally changed the CFG. Start over. */
2289 find_exception_handler_labels ();
2290 rebuild_jump_labels (get_insns ());
2291 find_basic_blocks (get_insns (), max_reg_num (), 0);
2292 cleanup_cfg (CLEANUP_PRE_LOOP
| CLEANUP_NO_INSN_DEL
);
2299 struct ehl_map_entry
*entry
= (struct ehl_map_entry
*) pentry
;
2301 /* 2^32 * ((sqrt(5) - 1) / 2) */
2302 const hashval_t scaled_golden_ratio
= 0x9e3779b9;
2303 return CODE_LABEL_NUMBER (entry
->label
) * scaled_golden_ratio
;
2307 ehl_eq (pentry
, pdata
)
2311 struct ehl_map_entry
*entry
= (struct ehl_map_entry
*) pentry
;
2312 struct ehl_map_entry
*data
= (struct ehl_map_entry
*) pdata
;
2314 return entry
->label
== data
->label
;
2317 /* This section handles removing dead code for flow. */
2319 /* Remove LABEL from exception_handler_label_map. */
2322 remove_exception_handler_label (label
)
2325 struct ehl_map_entry
**slot
, tmp
;
2327 /* If exception_handler_label_map was not built yet,
2328 there is nothing to do. */
2329 if (cfun
->eh
->exception_handler_label_map
== NULL
)
2333 slot
= (struct ehl_map_entry
**)
2334 htab_find_slot (cfun
->eh
->exception_handler_label_map
, &tmp
, NO_INSERT
);
2338 htab_clear_slot (cfun
->eh
->exception_handler_label_map
, (void **) slot
);
2341 /* Splice REGION from the region tree etc. */
2344 remove_eh_handler (region
)
2345 struct eh_region
*region
;
2347 struct eh_region
**pp
, **pp_start
, *p
, *outer
, *inner
;
2350 /* For the benefit of efficiently handling REG_EH_REGION notes,
2351 replace this region in the region array with its containing
2352 region. Note that previous region deletions may result in
2353 multiple copies of this region in the array, so we have a
2354 list of alternate numbers by which we are known. */
2356 outer
= region
->outer
;
2357 cfun
->eh
->region_array
[region
->region_number
] = outer
;
2361 EXECUTE_IF_SET_IN_BITMAP (region
->aka
, 0, i
,
2362 { cfun
->eh
->region_array
[i
] = outer
; });
2368 outer
->aka
= BITMAP_GGC_ALLOC ();
2370 bitmap_a_or_b (outer
->aka
, outer
->aka
, region
->aka
);
2371 bitmap_set_bit (outer
->aka
, region
->region_number
);
2374 if (cfun
->eh
->built_landing_pads
)
2375 lab
= region
->landing_pad
;
2377 lab
= region
->label
;
2379 remove_exception_handler_label (lab
);
2382 pp_start
= &outer
->inner
;
2384 pp_start
= &cfun
->eh
->region_tree
;
2385 for (pp
= pp_start
, p
= *pp
; p
!= region
; pp
= &p
->next_peer
, p
= *pp
)
2387 *pp
= region
->next_peer
;
2389 inner
= region
->inner
;
2392 for (p
= inner
; p
->next_peer
; p
= p
->next_peer
)
2396 p
->next_peer
= *pp_start
;
2400 if (region
->type
== ERT_CATCH
)
2402 struct eh_region
*try, *next
, *prev
;
2404 for (try = region
->next_peer
;
2405 try->type
== ERT_CATCH
;
2406 try = try->next_peer
)
2408 if (try->type
!= ERT_TRY
)
2411 next
= region
->u
.catch.next_catch
;
2412 prev
= region
->u
.catch.prev_catch
;
2415 next
->u
.catch.prev_catch
= prev
;
2417 try->u
.try.last_catch
= prev
;
2419 prev
->u
.catch.next_catch
= next
;
2422 try->u
.try.catch = next
;
2424 remove_eh_handler (try);
2429 /* LABEL heads a basic block that is about to be deleted. If this
2430 label corresponds to an exception region, we may be able to
2431 delete the region. */
2434 maybe_remove_eh_handler (label
)
2437 struct ehl_map_entry
**slot
, tmp
;
2438 struct eh_region
*region
;
2440 /* ??? After generating landing pads, it's not so simple to determine
2441 if the region data is completely unused. One must examine the
2442 landing pad and the post landing pad, and whether an inner try block
2443 is referencing the catch handlers directly. */
2444 if (cfun
->eh
->built_landing_pads
)
2448 slot
= (struct ehl_map_entry
**)
2449 htab_find_slot (cfun
->eh
->exception_handler_label_map
, &tmp
, NO_INSERT
);
2452 region
= (*slot
)->region
;
2456 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2457 because there is no path to the fallback call to terminate.
2458 But the region continues to affect call-site data until there
2459 are no more contained calls, which we don't see here. */
2460 if (region
->type
== ERT_MUST_NOT_THROW
)
2462 htab_clear_slot (cfun
->eh
->exception_handler_label_map
, (void **) slot
);
2463 region
->label
= NULL_RTX
;
2466 remove_eh_handler (region
);
2469 /* Invokes CALLBACK for every exception handler label. Only used by old
2470 loop hackery; should not be used by new code. */
2473 for_each_eh_label (callback
)
2474 void (*callback
) PARAMS ((rtx
));
2476 htab_traverse (cfun
->eh
->exception_handler_label_map
, for_each_eh_label_1
,
2481 for_each_eh_label_1 (pentry
, data
)
2485 struct ehl_map_entry
*entry
= *(struct ehl_map_entry
**)pentry
;
2486 void (*callback
) PARAMS ((rtx
)) = (void (*) PARAMS ((rtx
))) data
;
2488 (*callback
) (entry
->label
);
2492 /* This section describes CFG exception edges for flow. */
2494 /* For communicating between calls to reachable_next_level. */
2495 struct reachable_info
GTY(())
2502 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2503 base class of TYPE, is in HANDLED. */
2506 check_handled (handled
, type
)
2511 /* We can check for exact matches without front-end help. */
2512 if (! lang_eh_type_covers
)
2514 for (t
= handled
; t
; t
= TREE_CHAIN (t
))
2515 if (TREE_VALUE (t
) == type
)
2520 for (t
= handled
; t
; t
= TREE_CHAIN (t
))
2521 if ((*lang_eh_type_covers
) (TREE_VALUE (t
), type
))
2528 /* A subroutine of reachable_next_level. If we are collecting a list
2529 of handlers, add one. After landing pad generation, reference
2530 it instead of the handlers themselves. Further, the handlers are
2531 all wired together, so by referencing one, we've got them all.
2532 Before landing pad generation we reference each handler individually.
2534 LP_REGION contains the landing pad; REGION is the handler. */
2537 add_reachable_handler (info
, lp_region
, region
)
2538 struct reachable_info
*info
;
2539 struct eh_region
*lp_region
;
2540 struct eh_region
*region
;
2545 if (cfun
->eh
->built_landing_pads
)
2547 if (! info
->handlers
)
2548 info
->handlers
= alloc_INSN_LIST (lp_region
->landing_pad
, NULL_RTX
);
2551 info
->handlers
= alloc_INSN_LIST (region
->label
, info
->handlers
);
2554 /* Process one level of exception regions for reachability.
2555 If TYPE_THROWN is non-null, then it is the *exact* type being
2556 propagated. If INFO is non-null, then collect handler labels
2557 and caught/allowed type information between invocations. */
2559 static enum reachable_code
2560 reachable_next_level (region
, type_thrown
, info
)
2561 struct eh_region
*region
;
2563 struct reachable_info
*info
;
2565 switch (region
->type
)
2568 /* Before landing-pad generation, we model control flow
2569 directly to the individual handlers. In this way we can
2570 see that catch handler types may shadow one another. */
2571 add_reachable_handler (info
, region
, region
);
2572 return RNL_MAYBE_CAUGHT
;
2576 struct eh_region
*c
;
2577 enum reachable_code ret
= RNL_NOT_CAUGHT
;
2579 for (c
= region
->u
.try.catch; c
; c
= c
->u
.catch.next_catch
)
2581 /* A catch-all handler ends the search. */
2582 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2583 to be run as well. */
2584 if (c
->u
.catch.type_list
== NULL
)
2586 add_reachable_handler (info
, region
, c
);
2592 /* If we have at least one type match, end the search. */
2593 tree tp_node
= c
->u
.catch.type_list
;
2595 for (; tp_node
; tp_node
= TREE_CHAIN (tp_node
))
2597 tree type
= TREE_VALUE (tp_node
);
2599 if (type
== type_thrown
2600 || (lang_eh_type_covers
2601 && (*lang_eh_type_covers
) (type
, type_thrown
)))
2603 add_reachable_handler (info
, region
, c
);
2608 /* If we have definitive information of a match failure,
2609 the catch won't trigger. */
2610 if (lang_eh_type_covers
)
2611 return RNL_NOT_CAUGHT
;
2614 /* At this point, we either don't know what type is thrown or
2615 don't have front-end assistance to help deciding if it is
2616 covered by one of the types in the list for this region.
2618 We'd then like to add this region to the list of reachable
2619 handlers since it is indeed potentially reachable based on the
2620 information we have.
2622 Actually, this handler is for sure not reachable if all the
2623 types it matches have already been caught. That is, it is only
2624 potentially reachable if at least one of the types it catches
2625 has not been previously caught. */
2628 ret
= RNL_MAYBE_CAUGHT
;
2631 tree tp_node
= c
->u
.catch.type_list
;
2632 bool maybe_reachable
= false;
2634 /* Compute the potential reachability of this handler and
2635 update the list of types caught at the same time. */
2636 for (; tp_node
; tp_node
= TREE_CHAIN (tp_node
))
2638 tree type
= TREE_VALUE (tp_node
);
2640 if (! check_handled (info
->types_caught
, type
))
2643 = tree_cons (NULL
, type
, info
->types_caught
);
2645 maybe_reachable
= true;
2649 if (maybe_reachable
)
2651 add_reachable_handler (info
, region
, c
);
2653 /* ??? If the catch type is a base class of every allowed
2654 type, then we know we can stop the search. */
2655 ret
= RNL_MAYBE_CAUGHT
;
2663 case ERT_ALLOWED_EXCEPTIONS
:
2664 /* An empty list of types definitely ends the search. */
2665 if (region
->u
.allowed
.type_list
== NULL_TREE
)
2667 add_reachable_handler (info
, region
, region
);
2671 /* Collect a list of lists of allowed types for use in detecting
2672 when a catch may be transformed into a catch-all. */
2674 info
->types_allowed
= tree_cons (NULL_TREE
,
2675 region
->u
.allowed
.type_list
,
2676 info
->types_allowed
);
2678 /* If we have definitive information about the type hierarchy,
2679 then we can tell if the thrown type will pass through the
2681 if (type_thrown
&& lang_eh_type_covers
)
2683 if (check_handled (region
->u
.allowed
.type_list
, type_thrown
))
2684 return RNL_NOT_CAUGHT
;
2687 add_reachable_handler (info
, region
, region
);
2692 add_reachable_handler (info
, region
, region
);
2693 return RNL_MAYBE_CAUGHT
;
2696 /* Catch regions are handled by their controling try region. */
2697 return RNL_NOT_CAUGHT
;
2699 case ERT_MUST_NOT_THROW
:
2700 /* Here we end our search, since no exceptions may propagate.
2701 If we've touched down at some landing pad previous, then the
2702 explicit function call we generated may be used. Otherwise
2703 the call is made by the runtime. */
2704 if (info
&& info
->handlers
)
2706 add_reachable_handler (info
, region
, region
);
2715 /* Shouldn't see these here. */
2722 /* Retrieve a list of labels of exception handlers which can be
2723 reached by a given insn. */
2726 reachable_handlers (insn
)
2729 struct reachable_info info
;
2730 struct eh_region
*region
;
2734 if (GET_CODE (insn
) == JUMP_INSN
2735 && GET_CODE (PATTERN (insn
)) == RESX
)
2736 region_number
= XINT (PATTERN (insn
), 0);
2739 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2740 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
2742 region_number
= INTVAL (XEXP (note
, 0));
2745 memset (&info
, 0, sizeof (info
));
2747 region
= cfun
->eh
->region_array
[region_number
];
2749 type_thrown
= NULL_TREE
;
2750 if (GET_CODE (insn
) == JUMP_INSN
2751 && GET_CODE (PATTERN (insn
)) == RESX
)
2753 /* A RESX leaves a region instead of entering it. Thus the
2754 region itself may have been deleted out from under us. */
2757 region
= region
->outer
;
2759 else if (region
->type
== ERT_THROW
)
2761 type_thrown
= region
->u
.throw.type
;
2762 region
= region
->outer
;
2765 for (; region
; region
= region
->outer
)
2766 if (reachable_next_level (region
, type_thrown
, &info
) >= RNL_CAUGHT
)
2769 return info
.handlers
;
2772 /* Determine if the given INSN can throw an exception that is caught
2773 within the function. */
2776 can_throw_internal (insn
)
2779 struct eh_region
*region
;
2783 if (! INSN_P (insn
))
2786 if (GET_CODE (insn
) == INSN
2787 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2788 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2790 if (GET_CODE (insn
) == CALL_INSN
2791 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
2794 for (i
= 0; i
< 3; ++i
)
2796 rtx sub
= XEXP (PATTERN (insn
), i
);
2797 for (; sub
; sub
= NEXT_INSN (sub
))
2798 if (can_throw_internal (sub
))
2804 /* Every insn that might throw has an EH_REGION note. */
2805 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2806 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
2809 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
2811 type_thrown
= NULL_TREE
;
2812 if (region
->type
== ERT_THROW
)
2814 type_thrown
= region
->u
.throw.type
;
2815 region
= region
->outer
;
2818 /* If this exception is ignored by each and every containing region,
2819 then control passes straight out. The runtime may handle some
2820 regions, which also do not require processing internally. */
2821 for (; region
; region
= region
->outer
)
2823 enum reachable_code how
= reachable_next_level (region
, type_thrown
, 0);
2824 if (how
== RNL_BLOCKED
)
2826 if (how
!= RNL_NOT_CAUGHT
)
2833 /* Determine if the given INSN can throw an exception that is
2834 visible outside the function. */
2837 can_throw_external (insn
)
2840 struct eh_region
*region
;
2844 if (! INSN_P (insn
))
2847 if (GET_CODE (insn
) == INSN
2848 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2849 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2851 if (GET_CODE (insn
) == CALL_INSN
2852 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
2855 for (i
= 0; i
< 3; ++i
)
2857 rtx sub
= XEXP (PATTERN (insn
), i
);
2858 for (; sub
; sub
= NEXT_INSN (sub
))
2859 if (can_throw_external (sub
))
2865 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2868 /* Calls (and trapping insns) without notes are outside any
2869 exception handling region in this function. We have to
2870 assume it might throw. Given that the front end and middle
2871 ends mark known NOTHROW functions, this isn't so wildly
2873 return (GET_CODE (insn
) == CALL_INSN
2874 || (flag_non_call_exceptions
2875 && may_trap_p (PATTERN (insn
))));
2877 if (INTVAL (XEXP (note
, 0)) <= 0)
2880 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
2882 type_thrown
= NULL_TREE
;
2883 if (region
->type
== ERT_THROW
)
2885 type_thrown
= region
->u
.throw.type
;
2886 region
= region
->outer
;
2889 /* If the exception is caught or blocked by any containing region,
2890 then it is not seen by any calling function. */
2891 for (; region
; region
= region
->outer
)
2892 if (reachable_next_level (region
, type_thrown
, NULL
) >= RNL_CAUGHT
)
2898 /* Set current_function_nothrow and cfun->all_throwers_are_sibcalls. */
2901 set_nothrow_function_flags ()
2905 current_function_nothrow
= 1;
2907 /* Assume cfun->all_throwers_are_sibcalls until we encounter
2908 something that can throw an exception. We specifically exempt
2909 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2910 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2913 cfun
->all_throwers_are_sibcalls
= 1;
2915 if (! flag_exceptions
)
2918 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2919 if (can_throw_external (insn
))
2921 current_function_nothrow
= 0;
2923 if (GET_CODE (insn
) != CALL_INSN
|| !SIBLING_CALL_P (insn
))
2925 cfun
->all_throwers_are_sibcalls
= 0;
2930 for (insn
= current_function_epilogue_delay_list
; insn
;
2931 insn
= XEXP (insn
, 1))
2932 if (can_throw_external (insn
))
2934 current_function_nothrow
= 0;
2936 if (GET_CODE (insn
) != CALL_INSN
|| !SIBLING_CALL_P (insn
))
2938 cfun
->all_throwers_are_sibcalls
= 0;
2945 /* Various hooks for unwind library. */
2947 /* Do any necessary initialization to access arbitrary stack frames.
2948 On the SPARC, this means flushing the register windows. */
2951 expand_builtin_unwind_init ()
2953 /* Set this so all the registers get saved in our frame; we need to be
2954 able to copy the saved values for any registers from frames we unwind. */
2955 current_function_has_nonlocal_label
= 1;
2957 #ifdef SETUP_FRAME_ADDRESSES
2958 SETUP_FRAME_ADDRESSES ();
2963 expand_builtin_eh_return_data_regno (arglist
)
2966 tree which
= TREE_VALUE (arglist
);
2967 unsigned HOST_WIDE_INT iwhich
;
2969 if (TREE_CODE (which
) != INTEGER_CST
)
2971 error ("argument of `__builtin_eh_return_regno' must be constant");
2975 iwhich
= tree_low_cst (which
, 1);
2976 iwhich
= EH_RETURN_DATA_REGNO (iwhich
);
2977 if (iwhich
== INVALID_REGNUM
)
2980 #ifdef DWARF_FRAME_REGNUM
2981 iwhich
= DWARF_FRAME_REGNUM (iwhich
);
2983 iwhich
= DBX_REGISTER_NUMBER (iwhich
);
2986 return GEN_INT (iwhich
);
2989 /* Given a value extracted from the return address register or stack slot,
2990 return the actual address encoded in that value. */
2993 expand_builtin_extract_return_addr (addr_tree
)
2996 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, Pmode
, 0);
2998 if (GET_MODE (addr
) != Pmode
2999 && GET_MODE (addr
) != VOIDmode
)
3001 #ifdef POINTERS_EXTEND_UNSIGNED
3002 addr
= convert_memory_address (Pmode
, addr
);
3004 addr
= convert_to_mode (Pmode
, addr
, 0);
3008 /* First mask out any unwanted bits. */
3009 #ifdef MASK_RETURN_ADDR
3010 expand_and (Pmode
, addr
, MASK_RETURN_ADDR
, addr
);
3013 /* Then adjust to find the real return address. */
3014 #if defined (RETURN_ADDR_OFFSET)
3015 addr
= plus_constant (addr
, RETURN_ADDR_OFFSET
);
3021 /* Given an actual address in addr_tree, do any necessary encoding
3022 and return the value to be stored in the return address register or
3023 stack slot so the epilogue will return to that address. */
3026 expand_builtin_frob_return_addr (addr_tree
)
3029 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, ptr_mode
, 0);
3031 #ifdef POINTERS_EXTEND_UNSIGNED
3032 if (GET_MODE (addr
) != Pmode
)
3033 addr
= convert_memory_address (Pmode
, addr
);
3036 #ifdef RETURN_ADDR_OFFSET
3037 addr
= force_reg (Pmode
, addr
);
3038 addr
= plus_constant (addr
, -RETURN_ADDR_OFFSET
);
3044 /* Set up the epilogue with the magic bits we'll need to return to the
3045 exception handler. */
3048 expand_builtin_eh_return (stackadj_tree
, handler_tree
)
3049 tree stackadj_tree
, handler_tree
;
3051 rtx stackadj
, handler
;
3053 stackadj
= expand_expr (stackadj_tree
, cfun
->eh
->ehr_stackadj
, VOIDmode
, 0);
3054 handler
= expand_expr (handler_tree
, cfun
->eh
->ehr_handler
, VOIDmode
, 0);
3056 #ifdef POINTERS_EXTEND_UNSIGNED
3057 if (GET_MODE (stackadj
) != Pmode
)
3058 stackadj
= convert_memory_address (Pmode
, stackadj
);
3060 if (GET_MODE (handler
) != Pmode
)
3061 handler
= convert_memory_address (Pmode
, handler
);
3064 if (! cfun
->eh
->ehr_label
)
3066 cfun
->eh
->ehr_stackadj
= copy_to_reg (stackadj
);
3067 cfun
->eh
->ehr_handler
= copy_to_reg (handler
);
3068 cfun
->eh
->ehr_label
= gen_label_rtx ();
3072 if (stackadj
!= cfun
->eh
->ehr_stackadj
)
3073 emit_move_insn (cfun
->eh
->ehr_stackadj
, stackadj
);
3074 if (handler
!= cfun
->eh
->ehr_handler
)
3075 emit_move_insn (cfun
->eh
->ehr_handler
, handler
);
3078 emit_jump (cfun
->eh
->ehr_label
);
3084 rtx sa
, ra
, around_label
;
3086 if (! cfun
->eh
->ehr_label
)
3089 sa
= EH_RETURN_STACKADJ_RTX
;
3092 error ("__builtin_eh_return not supported on this target");
3096 current_function_calls_eh_return
= 1;
3098 around_label
= gen_label_rtx ();
3099 emit_move_insn (sa
, const0_rtx
);
3100 emit_jump (around_label
);
3102 emit_label (cfun
->eh
->ehr_label
);
3103 clobber_return_register ();
3105 #ifdef HAVE_eh_return
3107 emit_insn (gen_eh_return (cfun
->eh
->ehr_stackadj
, cfun
->eh
->ehr_handler
));
3111 ra
= EH_RETURN_HANDLER_RTX
;
3114 error ("__builtin_eh_return not supported on this target");
3115 ra
= gen_reg_rtx (Pmode
);
3118 emit_move_insn (sa
, cfun
->eh
->ehr_stackadj
);
3119 emit_move_insn (ra
, cfun
->eh
->ehr_handler
);
3122 emit_label (around_label
);
3125 /* In the following functions, we represent entries in the action table
3126 as 1-based indices. Special cases are:
3128 0: null action record, non-null landing pad; implies cleanups
3129 -1: null action record, null landing pad; implies no action
3130 -2: no call-site entry; implies must_not_throw
3131 -3: we have yet to process outer regions
3133 Further, no special cases apply to the "next" field of the record.
3134 For next, 0 means end of list. */
3136 struct action_record
3144 action_record_eq (pentry
, pdata
)
3148 const struct action_record
*entry
= (const struct action_record
*) pentry
;
3149 const struct action_record
*data
= (const struct action_record
*) pdata
;
3150 return entry
->filter
== data
->filter
&& entry
->next
== data
->next
;
3154 action_record_hash (pentry
)
3157 const struct action_record
*entry
= (const struct action_record
*) pentry
;
3158 return entry
->next
* 1009 + entry
->filter
;
3162 add_action_record (ar_hash
, filter
, next
)
3166 struct action_record
**slot
, *new, tmp
;
3168 tmp
.filter
= filter
;
3170 slot
= (struct action_record
**) htab_find_slot (ar_hash
, &tmp
, INSERT
);
3172 if ((new = *slot
) == NULL
)
3174 new = (struct action_record
*) xmalloc (sizeof (*new));
3175 new->offset
= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
) + 1;
3176 new->filter
= filter
;
3180 /* The filter value goes in untouched. The link to the next
3181 record is a "self-relative" byte offset, or zero to indicate
3182 that there is no next record. So convert the absolute 1 based
3183 indices we've been carrying around into a displacement. */
3185 push_sleb128 (&cfun
->eh
->action_record_data
, filter
);
3187 next
-= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
) + 1;
3188 push_sleb128 (&cfun
->eh
->action_record_data
, next
);
3195 collect_one_action_chain (ar_hash
, region
)
3197 struct eh_region
*region
;
3199 struct eh_region
*c
;
3202 /* If we've reached the top of the region chain, then we have
3203 no actions, and require no landing pad. */
3207 switch (region
->type
)
3210 /* A cleanup adds a zero filter to the beginning of the chain, but
3211 there are special cases to look out for. If there are *only*
3212 cleanups along a path, then it compresses to a zero action.
3213 Further, if there are multiple cleanups along a path, we only
3214 need to represent one of them, as that is enough to trigger
3215 entry to the landing pad at runtime. */
3216 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3219 for (c
= region
->outer
; c
; c
= c
->outer
)
3220 if (c
->type
== ERT_CLEANUP
)
3222 return add_action_record (ar_hash
, 0, next
);
3225 /* Process the associated catch regions in reverse order.
3226 If there's a catch-all handler, then we don't need to
3227 search outer regions. Use a magic -3 value to record
3228 that we haven't done the outer search. */
3230 for (c
= region
->u
.try.last_catch
; c
; c
= c
->u
.catch.prev_catch
)
3232 if (c
->u
.catch.type_list
== NULL
)
3234 /* Retrieve the filter from the head of the filter list
3235 where we have stored it (see assign_filter_values). */
3237 = TREE_INT_CST_LOW (TREE_VALUE (c
->u
.catch.filter_list
));
3239 next
= add_action_record (ar_hash
, filter
, 0);
3243 /* Once the outer search is done, trigger an action record for
3244 each filter we have. */
3249 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3251 /* If there is no next action, terminate the chain. */
3254 /* If all outer actions are cleanups or must_not_throw,
3255 we'll have no action record for it, since we had wanted
3256 to encode these states in the call-site record directly.
3257 Add a cleanup action to the chain to catch these. */
3259 next
= add_action_record (ar_hash
, 0, 0);
3262 flt_node
= c
->u
.catch.filter_list
;
3263 for (; flt_node
; flt_node
= TREE_CHAIN (flt_node
))
3265 int filter
= TREE_INT_CST_LOW (TREE_VALUE (flt_node
));
3266 next
= add_action_record (ar_hash
, filter
, next
);
3272 case ERT_ALLOWED_EXCEPTIONS
:
3273 /* An exception specification adds its filter to the
3274 beginning of the chain. */
3275 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3276 return add_action_record (ar_hash
, region
->u
.allowed
.filter
,
3277 next
< 0 ? 0 : next
);
3279 case ERT_MUST_NOT_THROW
:
3280 /* A must-not-throw region with no inner handlers or cleanups
3281 requires no call-site entry. Note that this differs from
3282 the no handler or cleanup case in that we do require an lsda
3283 to be generated. Return a magic -2 value to record this. */
3288 /* CATCH regions are handled in TRY above. THROW regions are
3289 for optimization information only and produce no output. */
3290 return collect_one_action_chain (ar_hash
, region
->outer
);
3298 add_call_site (landing_pad
, action
)
3302 struct call_site_record
*data
= cfun
->eh
->call_site_data
;
3303 int used
= cfun
->eh
->call_site_data_used
;
3304 int size
= cfun
->eh
->call_site_data_size
;
3308 size
= (size
? size
* 2 : 64);
3309 data
= (struct call_site_record
*)
3310 ggc_realloc (data
, sizeof (*data
) * size
);
3311 cfun
->eh
->call_site_data
= data
;
3312 cfun
->eh
->call_site_data_size
= size
;
3315 data
[used
].landing_pad
= landing_pad
;
3316 data
[used
].action
= action
;
3318 cfun
->eh
->call_site_data_used
= used
+ 1;
3320 return used
+ call_site_base
;
3323 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3324 The new note numbers will not refer to region numbers, but
3325 instead to call site entries. */
3328 convert_to_eh_region_ranges ()
3330 rtx insn
, iter
, note
;
3332 int last_action
= -3;
3333 rtx last_action_insn
= NULL_RTX
;
3334 rtx last_landing_pad
= NULL_RTX
;
3335 rtx first_no_action_insn
= NULL_RTX
;
3338 if (USING_SJLJ_EXCEPTIONS
|| cfun
->eh
->region_tree
== NULL
)
3341 VARRAY_UCHAR_INIT (cfun
->eh
->action_record_data
, 64, "action_record_data");
3343 ar_hash
= htab_create (31, action_record_hash
, action_record_eq
, free
);
3345 for (iter
= get_insns (); iter
; iter
= NEXT_INSN (iter
))
3348 struct eh_region
*region
;
3350 rtx this_landing_pad
;
3353 if (GET_CODE (insn
) == INSN
3354 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3355 insn
= XVECEXP (PATTERN (insn
), 0, 0);
3357 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
3360 if (! (GET_CODE (insn
) == CALL_INSN
3361 || (flag_non_call_exceptions
3362 && may_trap_p (PATTERN (insn
)))))
3369 if (INTVAL (XEXP (note
, 0)) <= 0)
3371 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
3372 this_action
= collect_one_action_chain (ar_hash
, region
);
3375 /* Existence of catch handlers, or must-not-throw regions
3376 implies that an lsda is needed (even if empty). */
3377 if (this_action
!= -1)
3378 cfun
->uses_eh_lsda
= 1;
3380 /* Delay creation of region notes for no-action regions
3381 until we're sure that an lsda will be required. */
3382 else if (last_action
== -3)
3384 first_no_action_insn
= iter
;
3388 /* Cleanups and handlers may share action chains but not
3389 landing pads. Collect the landing pad for this region. */
3390 if (this_action
>= 0)
3392 struct eh_region
*o
;
3393 for (o
= region
; ! o
->landing_pad
; o
= o
->outer
)
3395 this_landing_pad
= o
->landing_pad
;
3398 this_landing_pad
= NULL_RTX
;
3400 /* Differing actions or landing pads implies a change in call-site
3401 info, which implies some EH_REGION note should be emitted. */
3402 if (last_action
!= this_action
3403 || last_landing_pad
!= this_landing_pad
)
3405 /* If we'd not seen a previous action (-3) or the previous
3406 action was must-not-throw (-2), then we do not need an
3408 if (last_action
>= -1)
3410 /* If we delayed the creation of the begin, do it now. */
3411 if (first_no_action_insn
)
3413 call_site
= add_call_site (NULL_RTX
, 0);
3414 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
,
3415 first_no_action_insn
);
3416 NOTE_EH_HANDLER (note
) = call_site
;
3417 first_no_action_insn
= NULL_RTX
;
3420 note
= emit_note_after (NOTE_INSN_EH_REGION_END
,
3422 NOTE_EH_HANDLER (note
) = call_site
;
3425 /* If the new action is must-not-throw, then no region notes
3427 if (this_action
>= -1)
3429 call_site
= add_call_site (this_landing_pad
,
3430 this_action
< 0 ? 0 : this_action
);
3431 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
, iter
);
3432 NOTE_EH_HANDLER (note
) = call_site
;
3435 last_action
= this_action
;
3436 last_landing_pad
= this_landing_pad
;
3438 last_action_insn
= iter
;
3441 if (last_action
>= -1 && ! first_no_action_insn
)
3443 note
= emit_note_after (NOTE_INSN_EH_REGION_END
, last_action_insn
);
3444 NOTE_EH_HANDLER (note
) = call_site
;
3447 htab_delete (ar_hash
);
3452 push_uleb128 (data_area
, value
)
3453 varray_type
*data_area
;
3458 unsigned char byte
= value
& 0x7f;
3462 VARRAY_PUSH_UCHAR (*data_area
, byte
);
3468 push_sleb128 (data_area
, value
)
3469 varray_type
*data_area
;
3477 byte
= value
& 0x7f;
3479 more
= ! ((value
== 0 && (byte
& 0x40) == 0)
3480 || (value
== -1 && (byte
& 0x40) != 0));
3483 VARRAY_PUSH_UCHAR (*data_area
, byte
);
3489 #ifndef HAVE_AS_LEB128
3491 dw2_size_of_call_site_table ()
3493 int n
= cfun
->eh
->call_site_data_used
;
3494 int size
= n
* (4 + 4 + 4);
3497 for (i
= 0; i
< n
; ++i
)
3499 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3500 size
+= size_of_uleb128 (cs
->action
);
3507 sjlj_size_of_call_site_table ()
3509 int n
= cfun
->eh
->call_site_data_used
;
3513 for (i
= 0; i
< n
; ++i
)
3515 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3516 size
+= size_of_uleb128 (INTVAL (cs
->landing_pad
));
3517 size
+= size_of_uleb128 (cs
->action
);
3525 dw2_output_call_site_table ()
3527 const char *const function_start_lab
3528 = IDENTIFIER_POINTER (current_function_func_begin_label
);
3529 int n
= cfun
->eh
->call_site_data_used
;
3532 for (i
= 0; i
< n
; ++i
)
3534 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3535 char reg_start_lab
[32];
3536 char reg_end_lab
[32];
3537 char landing_pad_lab
[32];
3539 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab
, "LEHB", call_site_base
+ i
);
3540 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab
, "LEHE", call_site_base
+ i
);
3542 if (cs
->landing_pad
)
3543 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab
, "L",
3544 CODE_LABEL_NUMBER (cs
->landing_pad
));
3546 /* ??? Perhaps use insn length scaling if the assembler supports
3547 generic arithmetic. */
3548 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3549 data4 if the function is small enough. */
3550 #ifdef HAVE_AS_LEB128
3551 dw2_asm_output_delta_uleb128 (reg_start_lab
, function_start_lab
,
3552 "region %d start", i
);
3553 dw2_asm_output_delta_uleb128 (reg_end_lab
, reg_start_lab
,
3555 if (cs
->landing_pad
)
3556 dw2_asm_output_delta_uleb128 (landing_pad_lab
, function_start_lab
,
3559 dw2_asm_output_data_uleb128 (0, "landing pad");
3561 dw2_asm_output_delta (4, reg_start_lab
, function_start_lab
,
3562 "region %d start", i
);
3563 dw2_asm_output_delta (4, reg_end_lab
, reg_start_lab
, "length");
3564 if (cs
->landing_pad
)
3565 dw2_asm_output_delta (4, landing_pad_lab
, function_start_lab
,
3568 dw2_asm_output_data (4, 0, "landing pad");
3570 dw2_asm_output_data_uleb128 (cs
->action
, "action");
3573 call_site_base
+= n
;
3577 sjlj_output_call_site_table ()
3579 int n
= cfun
->eh
->call_site_data_used
;
3582 for (i
= 0; i
< n
; ++i
)
3584 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3586 dw2_asm_output_data_uleb128 (INTVAL (cs
->landing_pad
),
3587 "region %d landing pad", i
);
3588 dw2_asm_output_data_uleb128 (cs
->action
, "action");
3591 call_site_base
+= n
;
3594 /* Tell assembler to switch to the section for the exception handling
3598 default_exception_section ()
3600 if (targetm
.have_named_sections
)
3603 #ifdef HAVE_LD_RO_RW_SECTION_MIXING
3604 int tt_format
= ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3607 || ((tt_format
& 0x70) != DW_EH_PE_absptr
3608 && (tt_format
& 0x70) != DW_EH_PE_aligned
))
3609 ? 0 : SECTION_WRITE
;
3611 flags
= SECTION_WRITE
;
3613 named_section_flags (".gcc_except_table", flags
);
3618 readonly_data_section ();
3622 output_function_exception_table ()
3624 int tt_format
, cs_format
, lp_format
, i
, n
;
3625 #ifdef HAVE_AS_LEB128
3626 char ttype_label
[32];
3627 char cs_after_size_label
[32];
3628 char cs_end_label
[32];
3633 int tt_format_size
= 0;
3635 /* Not all functions need anything. */
3636 if (! cfun
->uses_eh_lsda
)
3639 #ifdef IA64_UNWIND_INFO
3640 fputs ("\t.personality\t", asm_out_file
);
3641 output_addr_const (asm_out_file
, eh_personality_libfunc
);
3642 fputs ("\n\t.handlerdata\n", asm_out_file
);
3643 /* Note that varasm still thinks we're in the function's code section.
3644 The ".endp" directive that will immediately follow will take us back. */
3646 (*targetm
.asm_out
.exception_section
) ();
3649 have_tt_data
= (VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
) > 0
3650 || VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
) > 0);
3652 /* Indicate the format of the @TType entries. */
3654 tt_format
= DW_EH_PE_omit
;
3657 tt_format
= ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3658 #ifdef HAVE_AS_LEB128
3659 ASM_GENERATE_INTERNAL_LABEL (ttype_label
, "LLSDATT",
3660 current_function_funcdef_no
);
3662 tt_format_size
= size_of_encoded_value (tt_format
);
3664 assemble_align (tt_format_size
* BITS_PER_UNIT
);
3667 (*targetm
.asm_out
.internal_label
) (asm_out_file
, "LLSDA",
3668 current_function_funcdef_no
);
3670 /* The LSDA header. */
3672 /* Indicate the format of the landing pad start pointer. An omitted
3673 field implies @LPStart == @Start. */
3674 /* Currently we always put @LPStart == @Start. This field would
3675 be most useful in moving the landing pads completely out of
3676 line to another section, but it could also be used to minimize
3677 the size of uleb128 landing pad offsets. */
3678 lp_format
= DW_EH_PE_omit
;
3679 dw2_asm_output_data (1, lp_format
, "@LPStart format (%s)",
3680 eh_data_format_name (lp_format
));
3682 /* @LPStart pointer would go here. */
3684 dw2_asm_output_data (1, tt_format
, "@TType format (%s)",
3685 eh_data_format_name (tt_format
));
3687 #ifndef HAVE_AS_LEB128
3688 if (USING_SJLJ_EXCEPTIONS
)
3689 call_site_len
= sjlj_size_of_call_site_table ();
3691 call_site_len
= dw2_size_of_call_site_table ();
3694 /* A pc-relative 4-byte displacement to the @TType data. */
3697 #ifdef HAVE_AS_LEB128
3698 char ttype_after_disp_label
[32];
3699 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label
, "LLSDATTD",
3700 current_function_funcdef_no
);
3701 dw2_asm_output_delta_uleb128 (ttype_label
, ttype_after_disp_label
,
3702 "@TType base offset");
3703 ASM_OUTPUT_LABEL (asm_out_file
, ttype_after_disp_label
);
3705 /* Ug. Alignment queers things. */
3706 unsigned int before_disp
, after_disp
, last_disp
, disp
;
3708 before_disp
= 1 + 1;
3709 after_disp
= (1 + size_of_uleb128 (call_site_len
)
3711 + VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
)
3712 + (VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
)
3718 unsigned int disp_size
, pad
;
3721 disp_size
= size_of_uleb128 (disp
);
3722 pad
= before_disp
+ disp_size
+ after_disp
;
3723 if (pad
% tt_format_size
)
3724 pad
= tt_format_size
- (pad
% tt_format_size
);
3727 disp
= after_disp
+ pad
;
3729 while (disp
!= last_disp
);
3731 dw2_asm_output_data_uleb128 (disp
, "@TType base offset");
3735 /* Indicate the format of the call-site offsets. */
3736 #ifdef HAVE_AS_LEB128
3737 cs_format
= DW_EH_PE_uleb128
;
3739 cs_format
= DW_EH_PE_udata4
;
3741 dw2_asm_output_data (1, cs_format
, "call-site format (%s)",
3742 eh_data_format_name (cs_format
));
3744 #ifdef HAVE_AS_LEB128
3745 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label
, "LLSDACSB",
3746 current_function_funcdef_no
);
3747 ASM_GENERATE_INTERNAL_LABEL (cs_end_label
, "LLSDACSE",
3748 current_function_funcdef_no
);
3749 dw2_asm_output_delta_uleb128 (cs_end_label
, cs_after_size_label
,
3750 "Call-site table length");
3751 ASM_OUTPUT_LABEL (asm_out_file
, cs_after_size_label
);
3752 if (USING_SJLJ_EXCEPTIONS
)
3753 sjlj_output_call_site_table ();
3755 dw2_output_call_site_table ();
3756 ASM_OUTPUT_LABEL (asm_out_file
, cs_end_label
);
3758 dw2_asm_output_data_uleb128 (call_site_len
,"Call-site table length");
3759 if (USING_SJLJ_EXCEPTIONS
)
3760 sjlj_output_call_site_table ();
3762 dw2_output_call_site_table ();
3765 /* ??? Decode and interpret the data for flag_debug_asm. */
3766 n
= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
);
3767 for (i
= 0; i
< n
; ++i
)
3768 dw2_asm_output_data (1, VARRAY_UCHAR (cfun
->eh
->action_record_data
, i
),
3769 (i
? NULL
: "Action record table"));
3772 assemble_align (tt_format_size
* BITS_PER_UNIT
);
3774 i
= VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
);
3777 tree type
= VARRAY_TREE (cfun
->eh
->ttype_data
, i
);
3780 if (type
== NULL_TREE
)
3781 type
= integer_zero_node
;
3783 type
= lookup_type_for_runtime (type
);
3785 value
= expand_expr (type
, NULL_RTX
, VOIDmode
, EXPAND_INITIALIZER
);
3786 if (tt_format
== DW_EH_PE_absptr
|| tt_format
== DW_EH_PE_aligned
)
3787 assemble_integer (value
, tt_format_size
,
3788 tt_format_size
* BITS_PER_UNIT
, 1);
3790 dw2_asm_output_encoded_addr_rtx (tt_format
, value
, NULL
);
3793 #ifdef HAVE_AS_LEB128
3795 ASM_OUTPUT_LABEL (asm_out_file
, ttype_label
);
3798 /* ??? Decode and interpret the data for flag_debug_asm. */
3799 n
= VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
);
3800 for (i
= 0; i
< n
; ++i
)
3801 dw2_asm_output_data (1, VARRAY_UCHAR (cfun
->eh
->ehspec_data
, i
),
3802 (i
? NULL
: "Exception specification table"));
3804 function_section (current_function_decl
);
3807 #include "gt-except.h"