1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
57 #include "insn-config.h"
59 #include "integrate.h"
60 #include "hard-reg-set.h"
61 #include "basic-block.h"
63 #include "dwarf2asm.h"
64 #include "dwarf2out.h"
73 /* Provide defaults for stuff that may not be defined when using
75 #ifndef EH_RETURN_STACKADJ_RTX
76 #define EH_RETURN_STACKADJ_RTX 0
78 #ifndef EH_RETURN_HANDLER_RTX
79 #define EH_RETURN_HANDLER_RTX 0
81 #ifndef EH_RETURN_DATA_REGNO
82 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
86 /* Nonzero means enable synchronous exceptions for non-call instructions. */
87 int flag_non_call_exceptions
;
89 /* Protect cleanup actions with must-not-throw regions, with a call
90 to the given failure handler. */
91 tree (*lang_protect_cleanup_actions
) PARAMS ((void));
93 /* Return true if type A catches type B. */
94 int (*lang_eh_type_covers
) PARAMS ((tree a
, tree b
));
96 /* Map a type to a runtime object to match type. */
97 tree (*lang_eh_runtime_type
) PARAMS ((tree
));
99 /* A list of labels used for exception handlers. */
100 rtx exception_handler_labels
;
102 static int call_site_base
;
103 static int sjlj_funcdef_number
;
104 static htab_t type_to_runtime_map
;
106 /* Describe the SjLj_Function_Context structure. */
107 static tree sjlj_fc_type_node
;
108 static int sjlj_fc_call_site_ofs
;
109 static int sjlj_fc_data_ofs
;
110 static int sjlj_fc_personality_ofs
;
111 static int sjlj_fc_lsda_ofs
;
112 static int sjlj_fc_jbuf_ofs
;
114 /* Describes one exception region. */
117 /* The immediately surrounding region. */
118 struct eh_region
*outer
;
120 /* The list of immediately contained regions. */
121 struct eh_region
*inner
;
122 struct eh_region
*next_peer
;
124 /* An identifier for this region. */
127 /* Each region does exactly one thing. */
133 ERT_ALLOWED_EXCEPTIONS
,
139 /* Holds the action to perform based on the preceeding type. */
141 /* A list of catch blocks, a surrounding try block,
142 and the label for continuing after a catch. */
144 struct eh_region
*catch;
145 struct eh_region
*last_catch
;
146 struct eh_region
*prev_try
;
150 /* The list through the catch handlers, the type object
151 matched, and a pointer to the generated code. */
153 struct eh_region
*next_catch
;
154 struct eh_region
*prev_catch
;
159 /* A tree_list of allowed types. */
165 /* The type given by a call to "throw foo();", or discovered
171 /* Retain the cleanup expression even after expansion so that
172 we can match up fixup regions. */
177 /* The real region (by expression and by pointer) that fixup code
181 struct eh_region
*real_region
;
185 /* Entry point for this region's handler before landing pads are built. */
188 /* Entry point for this region's handler from the runtime eh library. */
191 /* Entry point for this region's handler from an inner region. */
192 rtx post_landing_pad
;
194 /* The RESX insn for handing off control to the next outermost handler,
199 /* Used to save exception status for each function. */
202 /* The tree of all regions for this function. */
203 struct eh_region
*region_tree
;
205 /* The same information as an indexable array. */
206 struct eh_region
**region_array
;
208 /* The most recently open region. */
209 struct eh_region
*cur_region
;
211 /* This is the region for which we are processing catch blocks. */
212 struct eh_region
*try_region
;
214 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
215 node is itself a TREE_CHAINed list of handlers for regions that
216 are not yet closed. The TREE_VALUE of each entry contains the
217 handler for the corresponding entry on the ehstack. */
223 int built_landing_pads
;
224 int last_region_number
;
226 varray_type ttype_data
;
227 varray_type ehspec_data
;
228 varray_type action_record_data
;
230 struct call_site_record
235 int call_site_data_used
;
236 int call_site_data_size
;
247 static void mark_eh_region
PARAMS ((struct eh_region
*));
249 static int t2r_eq
PARAMS ((const PTR
,
251 static hashval_t t2r_hash
PARAMS ((const PTR
));
252 static int t2r_mark_1
PARAMS ((PTR
*, PTR
));
253 static void t2r_mark
PARAMS ((PTR
));
254 static void add_type_for_runtime
PARAMS ((tree
));
255 static tree lookup_type_for_runtime
PARAMS ((tree
));
257 static struct eh_region
*expand_eh_region_end
PARAMS ((void));
259 static rtx get_exception_filter
PARAMS ((struct function
*));
261 static void collect_eh_region_array
PARAMS ((void));
262 static void resolve_fixup_regions
PARAMS ((void));
263 static void remove_fixup_regions
PARAMS ((void));
264 static void convert_from_eh_region_ranges_1
PARAMS ((rtx
*, int *, int));
266 static struct eh_region
*duplicate_eh_region_1
PARAMS ((struct eh_region
*,
267 struct inline_remap
*));
268 static void duplicate_eh_region_2
PARAMS ((struct eh_region
*,
269 struct eh_region
**));
270 static int ttypes_filter_eq
PARAMS ((const PTR
,
272 static hashval_t ttypes_filter_hash
PARAMS ((const PTR
));
273 static int ehspec_filter_eq
PARAMS ((const PTR
,
275 static hashval_t ehspec_filter_hash
PARAMS ((const PTR
));
276 static int add_ttypes_entry
PARAMS ((htab_t
, tree
));
277 static int add_ehspec_entry
PARAMS ((htab_t
, htab_t
,
279 static void assign_filter_values
PARAMS ((void));
280 static void build_post_landing_pads
PARAMS ((void));
281 static void connect_post_landing_pads
PARAMS ((void));
282 static void dw2_build_landing_pads
PARAMS ((void));
285 static bool sjlj_find_directly_reachable_regions
286 PARAMS ((struct sjlj_lp_info
*));
287 static void sjlj_assign_call_site_values
288 PARAMS ((rtx
, struct sjlj_lp_info
*));
289 static void sjlj_mark_call_sites
290 PARAMS ((struct sjlj_lp_info
*));
291 static void sjlj_emit_function_enter
PARAMS ((rtx
));
292 static void sjlj_emit_function_exit
PARAMS ((void));
293 static void sjlj_emit_dispatch_table
294 PARAMS ((rtx
, struct sjlj_lp_info
*));
295 static void sjlj_build_landing_pads
PARAMS ((void));
297 static void remove_exception_handler_label
PARAMS ((rtx
));
298 static void remove_eh_handler
PARAMS ((struct eh_region
*));
300 struct reachable_info
;
302 /* The return value of reachable_next_level. */
305 /* The given exception is not processed by the given region. */
307 /* The given exception may need processing by the given region. */
309 /* The given exception is completely processed by the given region. */
311 /* The given exception is completely processed by the runtime. */
315 static int check_handled
PARAMS ((tree
, tree
));
316 static void add_reachable_handler
317 PARAMS ((struct reachable_info
*, struct eh_region
*,
318 struct eh_region
*));
319 static enum reachable_code reachable_next_level
320 PARAMS ((struct eh_region
*, tree
, struct reachable_info
*));
322 static int action_record_eq
PARAMS ((const PTR
,
324 static hashval_t action_record_hash
PARAMS ((const PTR
));
325 static int add_action_record
PARAMS ((htab_t
, int, int));
326 static int collect_one_action_chain
PARAMS ((htab_t
,
327 struct eh_region
*));
328 static int add_call_site
PARAMS ((rtx
, int));
330 static void push_uleb128
PARAMS ((varray_type
*,
332 static void push_sleb128
PARAMS ((varray_type
*, int));
333 #ifndef HAVE_AS_LEB128
334 static int dw2_size_of_call_site_table
PARAMS ((void));
335 static int sjlj_size_of_call_site_table
PARAMS ((void));
337 static void dw2_output_call_site_table
PARAMS ((void));
338 static void sjlj_output_call_site_table
PARAMS ((void));
341 /* Routine to see if exception handling is turned on.
342 DO_WARN is non-zero if we want to inform the user that exception
343 handling is turned off.
345 This is used to ensure that -fexceptions has been specified if the
346 compiler tries to use any exception-specific functions. */
352 if (! flag_exceptions
)
354 static int warned
= 0;
355 if (! warned
&& do_warn
)
357 error ("exception handling disabled, use -fexceptions to enable");
369 ggc_add_rtx_root (&exception_handler_labels
, 1);
371 if (! flag_exceptions
)
374 type_to_runtime_map
= htab_create (31, t2r_hash
, t2r_eq
, NULL
);
375 ggc_add_root (&type_to_runtime_map
, 1, sizeof (htab_t
), t2r_mark
);
377 /* Create the SjLj_Function_Context structure. This should match
378 the definition in unwind-sjlj.c. */
379 if (USING_SJLJ_EXCEPTIONS
)
381 tree f_jbuf
, f_per
, f_lsda
, f_prev
, f_cs
, f_data
, tmp
;
383 sjlj_fc_type_node
= make_lang_type (RECORD_TYPE
);
384 ggc_add_tree_root (&sjlj_fc_type_node
, 1);
386 f_prev
= build_decl (FIELD_DECL
, get_identifier ("__prev"),
387 build_pointer_type (sjlj_fc_type_node
));
388 DECL_FIELD_CONTEXT (f_prev
) = sjlj_fc_type_node
;
390 f_cs
= build_decl (FIELD_DECL
, get_identifier ("__call_site"),
392 DECL_FIELD_CONTEXT (f_cs
) = sjlj_fc_type_node
;
394 tmp
= build_index_type (build_int_2 (4 - 1, 0));
395 tmp
= build_array_type (type_for_mode (word_mode
, 1), tmp
);
396 f_data
= build_decl (FIELD_DECL
, get_identifier ("__data"), tmp
);
397 DECL_FIELD_CONTEXT (f_data
) = sjlj_fc_type_node
;
399 f_per
= build_decl (FIELD_DECL
, get_identifier ("__personality"),
401 DECL_FIELD_CONTEXT (f_per
) = sjlj_fc_type_node
;
403 f_lsda
= build_decl (FIELD_DECL
, get_identifier ("__lsda"),
405 DECL_FIELD_CONTEXT (f_lsda
) = sjlj_fc_type_node
;
407 #ifdef DONT_USE_BUILTIN_SETJMP
409 tmp
= build_int_2 (JMP_BUF_SIZE
- 1, 0);
411 /* Should be large enough for most systems, if it is not,
412 JMP_BUF_SIZE should be defined with the proper value. It will
413 also tend to be larger than necessary for most systems, a more
414 optimal port will define JMP_BUF_SIZE. */
415 tmp
= build_int_2 (FIRST_PSEUDO_REGISTER
+ 2 - 1, 0);
418 /* This is 2 for builtin_setjmp, plus whatever the target requires
419 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
420 tmp
= build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
))
421 / GET_MODE_SIZE (Pmode
)) + 2 - 1, 0);
423 tmp
= build_index_type (tmp
);
424 tmp
= build_array_type (ptr_type_node
, tmp
);
425 f_jbuf
= build_decl (FIELD_DECL
, get_identifier ("__jbuf"), tmp
);
426 #ifdef DONT_USE_BUILTIN_SETJMP
427 /* We don't know what the alignment requirements of the
428 runtime's jmp_buf has. Overestimate. */
429 DECL_ALIGN (f_jbuf
) = BIGGEST_ALIGNMENT
;
430 DECL_USER_ALIGN (f_jbuf
) = 1;
432 DECL_FIELD_CONTEXT (f_jbuf
) = sjlj_fc_type_node
;
434 TYPE_FIELDS (sjlj_fc_type_node
) = f_prev
;
435 TREE_CHAIN (f_prev
) = f_cs
;
436 TREE_CHAIN (f_cs
) = f_data
;
437 TREE_CHAIN (f_data
) = f_per
;
438 TREE_CHAIN (f_per
) = f_lsda
;
439 TREE_CHAIN (f_lsda
) = f_jbuf
;
441 layout_type (sjlj_fc_type_node
);
443 /* Cache the interesting field offsets so that we have
444 easy access from rtl. */
445 sjlj_fc_call_site_ofs
446 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs
), 1)
447 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs
), 1) / BITS_PER_UNIT
);
449 = (tree_low_cst (DECL_FIELD_OFFSET (f_data
), 1)
450 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data
), 1) / BITS_PER_UNIT
);
451 sjlj_fc_personality_ofs
452 = (tree_low_cst (DECL_FIELD_OFFSET (f_per
), 1)
453 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per
), 1) / BITS_PER_UNIT
);
455 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda
), 1)
456 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda
), 1) / BITS_PER_UNIT
);
458 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf
), 1)
459 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf
), 1) / BITS_PER_UNIT
);
464 init_eh_for_function ()
466 cfun
->eh
= (struct eh_status
*) xcalloc (1, sizeof (struct eh_status
));
469 /* Mark EH for GC. */
472 mark_eh_region (region
)
473 struct eh_region
*region
;
478 switch (region
->type
)
481 ggc_mark_tree (region
->u
.cleanup
.exp
);
484 ggc_mark_rtx (region
->u
.try.continue_label
);
487 ggc_mark_tree (region
->u
.catch.type
);
489 case ERT_ALLOWED_EXCEPTIONS
:
490 ggc_mark_tree (region
->u
.allowed
.type_list
);
492 case ERT_MUST_NOT_THROW
:
495 ggc_mark_tree (region
->u
.throw.type
);
498 ggc_mark_tree (region
->u
.fixup
.cleanup_exp
);
504 ggc_mark_rtx (region
->label
);
505 ggc_mark_rtx (region
->resume
);
506 ggc_mark_rtx (region
->landing_pad
);
507 ggc_mark_rtx (region
->post_landing_pad
);
512 struct eh_status
*eh
;
519 /* If we've called collect_eh_region_array, use it. Otherwise walk
520 the tree non-recursively. */
521 if (eh
->region_array
)
523 for (i
= eh
->last_region_number
; i
> 0; --i
)
525 struct eh_region
*r
= eh
->region_array
[i
];
526 if (r
&& r
->region_number
== i
)
530 else if (eh
->region_tree
)
532 struct eh_region
*r
= eh
->region_tree
;
538 else if (r
->next_peer
)
546 } while (r
->next_peer
== NULL
);
553 ggc_mark_tree (eh
->protect_list
);
554 ggc_mark_rtx (eh
->filter
);
555 ggc_mark_rtx (eh
->exc_ptr
);
556 ggc_mark_tree_varray (eh
->ttype_data
);
558 if (eh
->call_site_data
)
560 for (i
= eh
->call_site_data_used
- 1; i
>= 0; --i
)
561 ggc_mark_rtx (eh
->call_site_data
[i
].landing_pad
);
564 ggc_mark_rtx (eh
->ehr_stackadj
);
565 ggc_mark_rtx (eh
->ehr_handler
);
566 ggc_mark_rtx (eh
->ehr_label
);
568 ggc_mark_rtx (eh
->sjlj_fc
);
569 ggc_mark_rtx (eh
->sjlj_exit_after
);
576 struct eh_status
*eh
= f
->eh
;
578 if (eh
->region_array
)
581 for (i
= eh
->last_region_number
; i
> 0; --i
)
583 struct eh_region
*r
= eh
->region_array
[i
];
584 /* Mind we don't free a region struct more than once. */
585 if (r
&& r
->region_number
== i
)
588 free (eh
->region_array
);
590 else if (eh
->region_tree
)
592 struct eh_region
*next
, *r
= eh
->region_tree
;
597 else if (r
->next_peer
)
611 } while (r
->next_peer
== NULL
);
620 VARRAY_FREE (eh
->ttype_data
);
621 VARRAY_FREE (eh
->ehspec_data
);
622 VARRAY_FREE (eh
->action_record_data
);
623 if (eh
->call_site_data
)
624 free (eh
->call_site_data
);
631 /* Start an exception handling region. All instructions emitted
632 after this point are considered to be part of the region until
633 expand_eh_region_end is invoked. */
636 expand_eh_region_start ()
638 struct eh_region
*new_region
;
639 struct eh_region
*cur_region
;
645 /* Insert a new blank region as a leaf in the tree. */
646 new_region
= (struct eh_region
*) xcalloc (1, sizeof (*new_region
));
647 cur_region
= cfun
->eh
->cur_region
;
648 new_region
->outer
= cur_region
;
651 new_region
->next_peer
= cur_region
->inner
;
652 cur_region
->inner
= new_region
;
656 new_region
->next_peer
= cfun
->eh
->region_tree
;
657 cfun
->eh
->region_tree
= new_region
;
659 cfun
->eh
->cur_region
= new_region
;
661 /* Create a note marking the start of this region. */
662 new_region
->region_number
= ++cfun
->eh
->last_region_number
;
663 note
= emit_note (NULL
, NOTE_INSN_EH_REGION_BEG
);
664 NOTE_EH_HANDLER (note
) = new_region
->region_number
;
667 /* Common code to end a region. Returns the region just ended. */
669 static struct eh_region
*
670 expand_eh_region_end ()
672 struct eh_region
*cur_region
= cfun
->eh
->cur_region
;
675 /* Create a nute marking the end of this region. */
676 note
= emit_note (NULL
, NOTE_INSN_EH_REGION_END
);
677 NOTE_EH_HANDLER (note
) = cur_region
->region_number
;
680 cfun
->eh
->cur_region
= cur_region
->outer
;
685 /* End an exception handling region for a cleanup. HANDLER is an
686 expression to expand for the cleanup. */
689 expand_eh_region_end_cleanup (handler
)
692 struct eh_region
*region
;
693 tree protect_cleanup_actions
;
700 region
= expand_eh_region_end ();
701 region
->type
= ERT_CLEANUP
;
702 region
->label
= gen_label_rtx ();
703 region
->u
.cleanup
.exp
= handler
;
705 around_label
= gen_label_rtx ();
706 emit_jump (around_label
);
708 emit_label (region
->label
);
710 /* Give the language a chance to specify an action to be taken if an
711 exception is thrown that would propogate out of the HANDLER. */
712 protect_cleanup_actions
713 = (lang_protect_cleanup_actions
714 ? (*lang_protect_cleanup_actions
) ()
717 if (protect_cleanup_actions
)
718 expand_eh_region_start ();
720 /* In case this cleanup involves an inline destructor with a try block in
721 it, we need to save the EH return data registers around it. */
722 data_save
[0] = gen_reg_rtx (Pmode
);
723 emit_move_insn (data_save
[0], get_exception_pointer (cfun
));
724 data_save
[1] = gen_reg_rtx (word_mode
);
725 emit_move_insn (data_save
[1], get_exception_filter (cfun
));
727 expand_expr (handler
, const0_rtx
, VOIDmode
, 0);
729 emit_move_insn (cfun
->eh
->exc_ptr
, data_save
[0]);
730 emit_move_insn (cfun
->eh
->filter
, data_save
[1]);
732 if (protect_cleanup_actions
)
733 expand_eh_region_end_must_not_throw (protect_cleanup_actions
);
735 /* We need any stack adjustment complete before the around_label. */
736 do_pending_stack_adjust ();
738 /* We delay the generation of the _Unwind_Resume until we generate
739 landing pads. We emit a marker here so as to get good control
740 flow data in the meantime. */
742 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
745 emit_label (around_label
);
748 /* End an exception handling region for a try block, and prepares
749 for subsequent calls to expand_start_catch. */
752 expand_start_all_catch ()
754 struct eh_region
*region
;
759 region
= expand_eh_region_end ();
760 region
->type
= ERT_TRY
;
761 region
->u
.try.prev_try
= cfun
->eh
->try_region
;
762 region
->u
.try.continue_label
= gen_label_rtx ();
764 cfun
->eh
->try_region
= region
;
766 emit_jump (region
->u
.try.continue_label
);
769 /* Begin a catch clause. TYPE is the type caught, or null if this is
770 a catch-all clause. */
773 expand_start_catch (type
)
776 struct eh_region
*t
, *c
, *l
;
782 add_type_for_runtime (type
);
783 expand_eh_region_start ();
785 t
= cfun
->eh
->try_region
;
786 c
= cfun
->eh
->cur_region
;
788 c
->u
.catch.type
= type
;
789 c
->label
= gen_label_rtx ();
791 l
= t
->u
.try.last_catch
;
792 c
->u
.catch.prev_catch
= l
;
794 l
->u
.catch.next_catch
= c
;
797 t
->u
.try.last_catch
= c
;
799 emit_label (c
->label
);
802 /* End a catch clause. Control will resume after the try/catch block. */
807 struct eh_region
*try_region
, *catch_region
;
812 catch_region
= expand_eh_region_end ();
813 try_region
= cfun
->eh
->try_region
;
815 emit_jump (try_region
->u
.try.continue_label
);
818 /* End a sequence of catch handlers for a try block. */
821 expand_end_all_catch ()
823 struct eh_region
*try_region
;
828 try_region
= cfun
->eh
->try_region
;
829 cfun
->eh
->try_region
= try_region
->u
.try.prev_try
;
831 emit_label (try_region
->u
.try.continue_label
);
834 /* End an exception region for an exception type filter. ALLOWED is a
835 TREE_LIST of types to be matched by the runtime. FAILURE is an
836 expression to invoke if a mismatch ocurrs. */
839 expand_eh_region_end_allowed (allowed
, failure
)
840 tree allowed
, failure
;
842 struct eh_region
*region
;
848 region
= expand_eh_region_end ();
849 region
->type
= ERT_ALLOWED_EXCEPTIONS
;
850 region
->u
.allowed
.type_list
= allowed
;
851 region
->label
= gen_label_rtx ();
853 for (; allowed
; allowed
= TREE_CHAIN (allowed
))
854 add_type_for_runtime (TREE_VALUE (allowed
));
856 /* We must emit the call to FAILURE here, so that if this function
857 throws a different exception, that it will be processed by the
860 /* If there are any pending stack adjustments, we must emit them
861 before we branch -- otherwise, we won't know how much adjustment
862 is required later. */
863 do_pending_stack_adjust ();
864 around_label
= gen_label_rtx ();
865 emit_jump (around_label
);
867 emit_label (region
->label
);
868 expand_expr (failure
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
869 /* We must adjust the stack before we reach the AROUND_LABEL because
870 the call to FAILURE does not occur on all paths to the
872 do_pending_stack_adjust ();
874 emit_label (around_label
);
877 /* End an exception region for a must-not-throw filter. FAILURE is an
878 expression invoke if an uncaught exception propagates this far.
880 This is conceptually identical to expand_eh_region_end_allowed with
881 an empty allowed list (if you passed "std::terminate" instead of
882 "__cxa_call_unexpected"), but they are represented differently in
886 expand_eh_region_end_must_not_throw (failure
)
889 struct eh_region
*region
;
895 region
= expand_eh_region_end ();
896 region
->type
= ERT_MUST_NOT_THROW
;
897 region
->label
= gen_label_rtx ();
899 /* We must emit the call to FAILURE here, so that if this function
900 throws a different exception, that it will be processed by the
903 around_label
= gen_label_rtx ();
904 emit_jump (around_label
);
906 emit_label (region
->label
);
907 expand_expr (failure
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
909 emit_label (around_label
);
912 /* End an exception region for a throw. No handling goes on here,
913 but it's the easiest way for the front-end to indicate what type
917 expand_eh_region_end_throw (type
)
920 struct eh_region
*region
;
925 region
= expand_eh_region_end ();
926 region
->type
= ERT_THROW
;
927 region
->u
.throw.type
= type
;
930 /* End a fixup region. Within this region the cleanups for the immediately
931 enclosing region are _not_ run. This is used for goto cleanup to avoid
932 destroying an object twice.
934 This would be an extraordinarily simple prospect, were it not for the
935 fact that we don't actually know what the immediately enclosing region
936 is. This surprising fact is because expand_cleanups is currently
937 generating a sequence that it will insert somewhere else. We collect
938 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
941 expand_eh_region_end_fixup (handler
)
944 struct eh_region
*fixup
;
949 fixup
= expand_eh_region_end ();
950 fixup
->type
= ERT_FIXUP
;
951 fixup
->u
.fixup
.cleanup_exp
= handler
;
954 /* Return an rtl expression for a pointer to the exception object
958 get_exception_pointer (fun
)
959 struct function
*fun
;
961 rtx exc_ptr
= fun
->eh
->exc_ptr
;
962 if (fun
== cfun
&& ! exc_ptr
)
964 exc_ptr
= gen_reg_rtx (Pmode
);
965 fun
->eh
->exc_ptr
= exc_ptr
;
970 /* Return an rtl expression for the exception dispatch filter
974 get_exception_filter (fun
)
975 struct function
*fun
;
977 rtx filter
= fun
->eh
->filter
;
978 if (fun
== cfun
&& ! filter
)
980 filter
= gen_reg_rtx (word_mode
);
981 fun
->eh
->filter
= filter
;
986 /* Begin a region that will contain entries created with
987 add_partial_entry. */
990 begin_protect_partials ()
992 /* Push room for a new list. */
993 cfun
->eh
->protect_list
994 = tree_cons (NULL_TREE
, NULL_TREE
, cfun
->eh
->protect_list
);
997 /* Start a new exception region for a region of code that has a
998 cleanup action and push the HANDLER for the region onto
999 protect_list. All of the regions created with add_partial_entry
1000 will be ended when end_protect_partials is invoked. */
1003 add_partial_entry (handler
)
1006 expand_eh_region_start ();
1008 /* ??? This comment was old before the most recent rewrite. We
1009 really ought to fix the callers at some point. */
1010 /* For backwards compatibility, we allow callers to omit calls to
1011 begin_protect_partials for the outermost region. So, we must
1012 explicitly do so here. */
1013 if (!cfun
->eh
->protect_list
)
1014 begin_protect_partials ();
1016 /* Add this entry to the front of the list. */
1017 TREE_VALUE (cfun
->eh
->protect_list
)
1018 = tree_cons (NULL_TREE
, handler
, TREE_VALUE (cfun
->eh
->protect_list
));
1021 /* End all the pending exception regions on protect_list. */
1024 end_protect_partials ()
1028 /* ??? This comment was old before the most recent rewrite. We
1029 really ought to fix the callers at some point. */
1030 /* For backwards compatibility, we allow callers to omit the call to
1031 begin_protect_partials for the outermost region. So,
1032 PROTECT_LIST may be NULL. */
1033 if (!cfun
->eh
->protect_list
)
1036 /* Pop the topmost entry. */
1037 t
= TREE_VALUE (cfun
->eh
->protect_list
);
1038 cfun
->eh
->protect_list
= TREE_CHAIN (cfun
->eh
->protect_list
);
1040 /* End all the exception regions. */
1041 for (; t
; t
= TREE_CHAIN (t
))
1042 expand_eh_region_end_cleanup (TREE_VALUE (t
));
1046 /* This section is for the exception handling specific optimization pass. */
1048 /* Random access the exception region tree. It's just as simple to
1049 collect the regions this way as in expand_eh_region_start, but
1050 without having to realloc memory. */
1053 collect_eh_region_array ()
1055 struct eh_region
**array
, *i
;
1057 i
= cfun
->eh
->region_tree
;
1061 array
= xcalloc (cfun
->eh
->last_region_number
+ 1, sizeof (*array
));
1062 cfun
->eh
->region_array
= array
;
1066 array
[i
->region_number
] = i
;
1068 /* If there are sub-regions, process them. */
1071 /* If there are peers, process them. */
1072 else if (i
->next_peer
)
1074 /* Otherwise, step back up the tree to the next peer. */
1081 } while (i
->next_peer
== NULL
);
1088 resolve_fixup_regions ()
1090 int i
, j
, n
= cfun
->eh
->last_region_number
;
1092 for (i
= 1; i
<= n
; ++i
)
1094 struct eh_region
*fixup
= cfun
->eh
->region_array
[i
];
1095 struct eh_region
*cleanup
;
1097 if (! fixup
|| fixup
->type
!= ERT_FIXUP
)
1100 for (j
= 1; j
<= n
; ++j
)
1102 cleanup
= cfun
->eh
->region_array
[j
];
1103 if (cleanup
->type
== ERT_CLEANUP
1104 && cleanup
->u
.cleanup
.exp
== fixup
->u
.fixup
.cleanup_exp
)
1110 fixup
->u
.fixup
.real_region
= cleanup
->outer
;
1114 /* Now that we've discovered what region actually encloses a fixup,
1115 we can shuffle pointers and remove them from the tree. */
1118 remove_fixup_regions ()
1122 struct eh_region
*fixup
;
1124 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1125 for instructions referencing fixup regions. This is only
1126 strictly necessary for fixup regions with no parent, but
1127 doesn't hurt to do it for all regions. */
1128 for (insn
= get_insns(); insn
; insn
= NEXT_INSN (insn
))
1130 && (note
= find_reg_note (insn
, REG_EH_REGION
, NULL
))
1131 && INTVAL (XEXP (note
, 0)) > 0
1132 && (fixup
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))])
1133 && fixup
->type
== ERT_FIXUP
)
1135 if (fixup
->u
.fixup
.real_region
)
1136 XEXP (note
, 1) = GEN_INT (fixup
->u
.fixup
.real_region
->region_number
);
1138 remove_note (insn
, note
);
1141 /* Remove the fixup regions from the tree. */
1142 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1144 fixup
= cfun
->eh
->region_array
[i
];
1148 /* Allow GC to maybe free some memory. */
1149 if (fixup
->type
== ERT_CLEANUP
)
1150 fixup
->u
.cleanup
.exp
= NULL_TREE
;
1152 if (fixup
->type
!= ERT_FIXUP
)
1157 struct eh_region
*parent
, *p
, **pp
;
1159 parent
= fixup
->u
.fixup
.real_region
;
1161 /* Fix up the children's parent pointers; find the end of
1163 for (p
= fixup
->inner
; ; p
= p
->next_peer
)
1170 /* In the tree of cleanups, only outer-inner ordering matters.
1171 So link the children back in anywhere at the correct level. */
1173 pp
= &parent
->inner
;
1175 pp
= &cfun
->eh
->region_tree
;
1178 fixup
->inner
= NULL
;
1181 remove_eh_handler (fixup
);
1185 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1186 can_throw instruction in the region. */
1189 convert_from_eh_region_ranges_1 (pinsns
, orig_sp
, cur
)
1197 for (insn
= *pinsns
; insn
; insn
= next
)
1199 next
= NEXT_INSN (insn
);
1200 if (GET_CODE (insn
) == NOTE
)
1202 int kind
= NOTE_LINE_NUMBER (insn
);
1203 if (kind
== NOTE_INSN_EH_REGION_BEG
1204 || kind
== NOTE_INSN_EH_REGION_END
)
1206 if (kind
== NOTE_INSN_EH_REGION_BEG
)
1208 struct eh_region
*r
;
1211 cur
= NOTE_EH_HANDLER (insn
);
1213 r
= cfun
->eh
->region_array
[cur
];
1214 if (r
->type
== ERT_FIXUP
)
1216 r
= r
->u
.fixup
.real_region
;
1217 cur
= r
? r
->region_number
: 0;
1219 else if (r
->type
== ERT_CATCH
)
1222 cur
= r
? r
->region_number
: 0;
1228 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1229 requires extra care to adjust sequence start. */
1230 if (insn
== *pinsns
)
1236 else if (INSN_P (insn
))
1239 && ! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
1240 /* Calls can always potentially throw exceptions, unless
1241 they have a REG_EH_REGION note with a value of 0 or less.
1242 Which should be the only possible kind so far. */
1243 && (GET_CODE (insn
) == CALL_INSN
1244 /* If we wanted exceptions for non-call insns, then
1245 any may_trap_p instruction could throw. */
1246 || (flag_non_call_exceptions
1247 && may_trap_p (PATTERN (insn
)))))
1249 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_EH_REGION
, GEN_INT (cur
),
1253 if (GET_CODE (insn
) == CALL_INSN
1254 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
1256 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn
), 0),
1258 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn
), 1),
1260 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn
), 2),
1271 convert_from_eh_region_ranges ()
1276 collect_eh_region_array ();
1277 resolve_fixup_regions ();
1279 stack
= xmalloc (sizeof (int) * (cfun
->eh
->last_region_number
+ 1));
1280 insns
= get_insns ();
1281 convert_from_eh_region_ranges_1 (&insns
, stack
, 0);
1284 remove_fixup_regions ();
1288 find_exception_handler_labels ()
1290 rtx list
= NULL_RTX
;
1293 free_EXPR_LIST_list (&exception_handler_labels
);
1295 if (cfun
->eh
->region_tree
== NULL
)
1298 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1300 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1305 if (cfun
->eh
->built_landing_pads
)
1306 lab
= region
->landing_pad
;
1308 lab
= region
->label
;
1311 list
= alloc_EXPR_LIST (0, lab
, list
);
1314 /* For sjlj exceptions, need the return label to remain live until
1315 after landing pad generation. */
1316 if (USING_SJLJ_EXCEPTIONS
&& ! cfun
->eh
->built_landing_pads
)
1317 list
= alloc_EXPR_LIST (0, return_label
, list
);
1319 exception_handler_labels
= list
;
1323 static struct eh_region
*
1324 duplicate_eh_region_1 (o
, map
)
1325 struct eh_region
*o
;
1326 struct inline_remap
*map
;
1329 = (struct eh_region
*) xcalloc (1, sizeof (struct eh_region
));
1331 n
->region_number
= o
->region_number
+ cfun
->eh
->last_region_number
;
1337 case ERT_MUST_NOT_THROW
:
1341 if (o
->u
.try.continue_label
)
1342 n
->u
.try.continue_label
1343 = get_label_from_map (map
,
1344 CODE_LABEL_NUMBER (o
->u
.try.continue_label
));
1348 n
->u
.catch.type
= o
->u
.catch.type
;
1351 case ERT_ALLOWED_EXCEPTIONS
:
1352 n
->u
.allowed
.type_list
= o
->u
.allowed
.type_list
;
1356 n
->u
.throw.type
= o
->u
.throw.type
;
1363 n
->label
= get_label_from_map (map
, CODE_LABEL_NUMBER (o
->label
));
1366 n
->resume
= map
->insn_map
[INSN_UID (o
->resume
)];
1367 if (n
->resume
== NULL
)
1375 duplicate_eh_region_2 (o
, n_array
)
1376 struct eh_region
*o
;
1377 struct eh_region
**n_array
;
1379 struct eh_region
*n
= n_array
[o
->region_number
];
1384 n
->u
.try.catch = n_array
[o
->u
.try.catch->region_number
];
1385 n
->u
.try.last_catch
= n_array
[o
->u
.try.last_catch
->region_number
];
1389 if (o
->u
.catch.next_catch
)
1390 n
->u
.catch.next_catch
= n_array
[o
->u
.catch.next_catch
->region_number
];
1391 if (o
->u
.catch.prev_catch
)
1392 n
->u
.catch.prev_catch
= n_array
[o
->u
.catch.prev_catch
->region_number
];
1400 n
->outer
= n_array
[o
->outer
->region_number
];
1402 n
->inner
= n_array
[o
->inner
->region_number
];
1404 n
->next_peer
= n_array
[o
->next_peer
->region_number
];
1408 duplicate_eh_regions (ifun
, map
)
1409 struct function
*ifun
;
1410 struct inline_remap
*map
;
1412 int ifun_last_region_number
= ifun
->eh
->last_region_number
;
1413 struct eh_region
**n_array
, *root
, *cur
;
1416 if (ifun_last_region_number
== 0)
1419 n_array
= xcalloc (ifun_last_region_number
+ 1, sizeof (*n_array
));
1421 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
1423 cur
= ifun
->eh
->region_array
[i
];
1424 if (!cur
|| cur
->region_number
!= i
)
1426 n_array
[i
] = duplicate_eh_region_1 (cur
, map
);
1428 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
1430 cur
= ifun
->eh
->region_array
[i
];
1431 if (!cur
|| cur
->region_number
!= i
)
1433 duplicate_eh_region_2 (cur
, n_array
);
1436 root
= n_array
[ifun
->eh
->region_tree
->region_number
];
1437 cur
= cfun
->eh
->cur_region
;
1440 struct eh_region
*p
= cur
->inner
;
1443 while (p
->next_peer
)
1445 p
->next_peer
= root
;
1450 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
1451 if (n_array
[i
]->outer
== NULL
)
1452 n_array
[i
]->outer
= cur
;
1456 struct eh_region
*p
= cfun
->eh
->region_tree
;
1459 while (p
->next_peer
)
1461 p
->next_peer
= root
;
1464 cfun
->eh
->region_tree
= root
;
1469 i
= cfun
->eh
->last_region_number
;
1470 cfun
->eh
->last_region_number
= i
+ ifun_last_region_number
;
1475 /* ??? Move from tree.c to tree.h. */
1476 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
1479 t2r_eq (pentry
, pdata
)
1483 tree entry
= (tree
) pentry
;
1484 tree data
= (tree
) pdata
;
1486 return TREE_PURPOSE (entry
) == data
;
1493 tree entry
= (tree
) pentry
;
1494 return TYPE_HASH (TREE_PURPOSE (entry
));
1498 t2r_mark_1 (slot
, data
)
1500 PTR data ATTRIBUTE_UNUSED
;
1502 tree contents
= (tree
) *slot
;
1503 ggc_mark_tree (contents
);
1511 htab_traverse (*(htab_t
*)addr
, t2r_mark_1
, NULL
);
1515 add_type_for_runtime (type
)
1520 slot
= (tree
*) htab_find_slot_with_hash (type_to_runtime_map
, type
,
1521 TYPE_HASH (type
), INSERT
);
1524 tree runtime
= (*lang_eh_runtime_type
) (type
);
1525 *slot
= tree_cons (type
, runtime
, NULL_TREE
);
1530 lookup_type_for_runtime (type
)
1535 slot
= (tree
*) htab_find_slot_with_hash (type_to_runtime_map
, type
,
1536 TYPE_HASH (type
), NO_INSERT
);
1538 /* We should have always inserrted the data earlier. */
1539 return TREE_VALUE (*slot
);
1543 /* Represent an entry in @TTypes for either catch actions
1544 or exception filter actions. */
1545 struct ttypes_filter
1551 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1552 (a tree) for a @TTypes type node we are thinking about adding. */
1555 ttypes_filter_eq (pentry
, pdata
)
1559 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1560 tree data
= (tree
) pdata
;
1562 return entry
->t
== data
;
1566 ttypes_filter_hash (pentry
)
1569 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1570 return TYPE_HASH (entry
->t
);
1573 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1574 exception specification list we are thinking about adding. */
1575 /* ??? Currently we use the type lists in the order given. Someone
1576 should put these in some canonical order. */
1579 ehspec_filter_eq (pentry
, pdata
)
1583 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1584 const struct ttypes_filter
*data
= (const struct ttypes_filter
*) pdata
;
1586 return type_list_equal (entry
->t
, data
->t
);
1589 /* Hash function for exception specification lists. */
1592 ehspec_filter_hash (pentry
)
1595 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1599 for (list
= entry
->t
; list
; list
= TREE_CHAIN (list
))
1600 h
= (h
<< 5) + (h
>> 27) + TYPE_HASH (TREE_VALUE (list
));
1604 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1605 up the search. Return the filter value to be used. */
1608 add_ttypes_entry (ttypes_hash
, type
)
1612 struct ttypes_filter
**slot
, *n
;
1614 slot
= (struct ttypes_filter
**)
1615 htab_find_slot_with_hash (ttypes_hash
, type
, TYPE_HASH (type
), INSERT
);
1617 if ((n
= *slot
) == NULL
)
1619 /* Filter value is a 1 based table index. */
1621 n
= (struct ttypes_filter
*) xmalloc (sizeof (*n
));
1623 n
->filter
= VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
) + 1;
1626 VARRAY_PUSH_TREE (cfun
->eh
->ttype_data
, type
);
1632 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1633 to speed up the search. Return the filter value to be used. */
1636 add_ehspec_entry (ehspec_hash
, ttypes_hash
, list
)
1641 struct ttypes_filter
**slot
, *n
;
1642 struct ttypes_filter dummy
;
1645 slot
= (struct ttypes_filter
**)
1646 htab_find_slot (ehspec_hash
, &dummy
, INSERT
);
1648 if ((n
= *slot
) == NULL
)
1650 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1652 n
= (struct ttypes_filter
*) xmalloc (sizeof (*n
));
1654 n
->filter
= -(VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
) + 1);
1657 /* Look up each type in the list and encode its filter
1658 value as a uleb128. Terminate the list with 0. */
1659 for (; list
; list
= TREE_CHAIN (list
))
1660 push_uleb128 (&cfun
->eh
->ehspec_data
,
1661 add_ttypes_entry (ttypes_hash
, TREE_VALUE (list
)));
1662 VARRAY_PUSH_UCHAR (cfun
->eh
->ehspec_data
, 0);
1668 /* Generate the action filter values to be used for CATCH and
1669 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1670 we use lots of landing pads, and so every type or list can share
1671 the same filter value, which saves table space. */
1674 assign_filter_values ()
1677 htab_t ttypes
, ehspec
;
1679 VARRAY_TREE_INIT (cfun
->eh
->ttype_data
, 16, "ttype_data");
1680 VARRAY_UCHAR_INIT (cfun
->eh
->ehspec_data
, 64, "ehspec_data");
1682 ttypes
= htab_create (31, ttypes_filter_hash
, ttypes_filter_eq
, free
);
1683 ehspec
= htab_create (31, ehspec_filter_hash
, ehspec_filter_eq
, free
);
1685 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1687 struct eh_region
*r
= cfun
->eh
->region_array
[i
];
1689 /* Mind we don't process a region more than once. */
1690 if (!r
|| r
->region_number
!= i
)
1696 r
->u
.catch.filter
= add_ttypes_entry (ttypes
, r
->u
.catch.type
);
1699 case ERT_ALLOWED_EXCEPTIONS
:
1701 = add_ehspec_entry (ehspec
, ttypes
, r
->u
.allowed
.type_list
);
1709 htab_delete (ttypes
);
1710 htab_delete (ehspec
);
1714 build_post_landing_pads ()
1718 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1720 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1723 /* Mind we don't process a region more than once. */
1724 if (!region
|| region
->region_number
!= i
)
1727 switch (region
->type
)
1730 /* ??? Collect the set of all non-overlapping catch handlers
1731 all the way up the chain until blocked by a cleanup. */
1732 /* ??? Outer try regions can share landing pads with inner
1733 try regions if the types are completely non-overlapping,
1734 and there are no interveaning cleanups. */
1736 region
->post_landing_pad
= gen_label_rtx ();
1740 emit_label (region
->post_landing_pad
);
1742 /* ??? It is mighty inconvenient to call back into the
1743 switch statement generation code in expand_end_case.
1744 Rapid prototyping sez a sequence of ifs. */
1746 struct eh_region
*c
;
1747 for (c
= region
->u
.try.catch; c
; c
= c
->u
.catch.next_catch
)
1749 /* ??? _Unwind_ForcedUnwind wants no match here. */
1750 if (c
->u
.catch.type
== NULL
)
1751 emit_jump (c
->label
);
1753 emit_cmp_and_jump_insns (cfun
->eh
->filter
,
1754 GEN_INT (c
->u
.catch.filter
),
1755 EQ
, NULL_RTX
, word_mode
,
1760 /* We delay the generation of the _Unwind_Resume until we generate
1761 landing pads. We emit a marker here so as to get good control
1762 flow data in the meantime. */
1764 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
1770 emit_insns_before (seq
, region
->u
.try.catch->label
);
1773 case ERT_ALLOWED_EXCEPTIONS
:
1774 region
->post_landing_pad
= gen_label_rtx ();
1778 emit_label (region
->post_landing_pad
);
1780 emit_cmp_and_jump_insns (cfun
->eh
->filter
,
1781 GEN_INT (region
->u
.allowed
.filter
),
1782 EQ
, NULL_RTX
, word_mode
, 0, 0,
1785 /* We delay the generation of the _Unwind_Resume until we generate
1786 landing pads. We emit a marker here so as to get good control
1787 flow data in the meantime. */
1789 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
1795 emit_insns_before (seq
, region
->label
);
1799 case ERT_MUST_NOT_THROW
:
1800 region
->post_landing_pad
= region
->label
;
1805 /* Nothing to do. */
1814 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1815 _Unwind_Resume otherwise. */
1818 connect_post_landing_pads ()
1822 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1824 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1825 struct eh_region
*outer
;
1828 /* Mind we don't process a region more than once. */
1829 if (!region
|| region
->region_number
!= i
)
1832 /* If there is no RESX, or it has been deleted by flow, there's
1833 nothing to fix up. */
1834 if (! region
->resume
|| INSN_DELETED_P (region
->resume
))
1837 /* Search for another landing pad in this function. */
1838 for (outer
= region
->outer
; outer
; outer
= outer
->outer
)
1839 if (outer
->post_landing_pad
)
1845 emit_jump (outer
->post_landing_pad
);
1847 emit_library_call (unwind_resume_libfunc
, LCT_THROW
,
1848 VOIDmode
, 1, cfun
->eh
->exc_ptr
, Pmode
);
1852 emit_insns_before (seq
, region
->resume
);
1854 /* Leave the RESX to be deleted by flow. */
1860 dw2_build_landing_pads ()
1864 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1866 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1869 /* Mind we don't process a region more than once. */
1870 if (!region
|| region
->region_number
!= i
)
1873 if (region
->type
!= ERT_CLEANUP
1874 && region
->type
!= ERT_TRY
1875 && region
->type
!= ERT_ALLOWED_EXCEPTIONS
)
1880 region
->landing_pad
= gen_label_rtx ();
1881 emit_label (region
->landing_pad
);
1883 #ifdef HAVE_exception_receiver
1884 if (HAVE_exception_receiver
)
1885 emit_insn (gen_exception_receiver ());
1888 #ifdef HAVE_nonlocal_goto_receiver
1889 if (HAVE_nonlocal_goto_receiver
)
1890 emit_insn (gen_nonlocal_goto_receiver ());
1895 /* If the eh_return data registers are call-saved, then we
1896 won't have considered them clobbered from the call that
1897 threw. Kill them now. */
1900 unsigned r
= EH_RETURN_DATA_REGNO (j
);
1901 if (r
== INVALID_REGNUM
)
1903 if (! call_used_regs
[r
])
1904 emit_insn (gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, r
)));
1907 emit_move_insn (cfun
->eh
->exc_ptr
,
1908 gen_rtx_REG (Pmode
, EH_RETURN_DATA_REGNO (0)));
1909 emit_move_insn (cfun
->eh
->filter
,
1910 gen_rtx_REG (word_mode
, EH_RETURN_DATA_REGNO (1)));
1915 emit_insns_before (seq
, region
->post_landing_pad
);
1922 int directly_reachable
;
1925 int call_site_index
;
1929 sjlj_find_directly_reachable_regions (lp_info
)
1930 struct sjlj_lp_info
*lp_info
;
1933 bool found_one
= false;
1935 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1937 struct eh_region
*region
;
1941 if (! INSN_P (insn
))
1944 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1945 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
1948 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
1950 type_thrown
= NULL_TREE
;
1951 if (region
->type
== ERT_THROW
)
1953 type_thrown
= region
->u
.throw.type
;
1954 region
= region
->outer
;
1957 /* Find the first containing region that might handle the exception.
1958 That's the landing pad to which we will transfer control. */
1959 for (; region
; region
= region
->outer
)
1960 if (reachable_next_level (region
, type_thrown
, 0) != RNL_NOT_CAUGHT
)
1965 lp_info
[region
->region_number
].directly_reachable
= 1;
1974 sjlj_assign_call_site_values (dispatch_label
, lp_info
)
1976 struct sjlj_lp_info
*lp_info
;
1981 /* First task: build the action table. */
1983 VARRAY_UCHAR_INIT (cfun
->eh
->action_record_data
, 64, "action_record_data");
1984 ar_hash
= htab_create (31, action_record_hash
, action_record_eq
, free
);
1986 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1987 if (lp_info
[i
].directly_reachable
)
1989 struct eh_region
*r
= cfun
->eh
->region_array
[i
];
1990 r
->landing_pad
= dispatch_label
;
1991 lp_info
[i
].action_index
= collect_one_action_chain (ar_hash
, r
);
1992 if (lp_info
[i
].action_index
!= -1)
1993 cfun
->uses_eh_lsda
= 1;
1996 htab_delete (ar_hash
);
1998 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1999 landing pad label for the region. For sjlj though, there is one
2000 common landing pad from which we dispatch to the post-landing pads.
2002 A region receives a dispatch index if it is directly reachable
2003 and requires in-function processing. Regions that share post-landing
2004 pads may share dispatch indicies. */
2005 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2006 (see build_post_landing_pads) so we don't bother checking for it. */
2009 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2010 if (lp_info
[i
].directly_reachable
2011 && lp_info
[i
].action_index
>= 0)
2012 lp_info
[i
].dispatch_index
= index
++;
2014 /* Finally: assign call-site values. If dwarf2 terms, this would be
2015 the region number assigned by convert_to_eh_region_ranges, but
2016 handles no-action and must-not-throw differently. */
2019 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2020 if (lp_info
[i
].directly_reachable
)
2022 int action
= lp_info
[i
].action_index
;
2024 /* Map must-not-throw to otherwise unused call-site index 0. */
2027 /* Map no-action to otherwise unused call-site index -1. */
2028 else if (action
== -1)
2030 /* Otherwise, look it up in the table. */
2032 index
= add_call_site (GEN_INT (lp_info
[i
].dispatch_index
), action
);
2034 lp_info
[i
].call_site_index
= index
;
2039 sjlj_mark_call_sites (lp_info
)
2040 struct sjlj_lp_info
*lp_info
;
2042 int last_call_site
= -2;
2045 mem
= change_address (cfun
->eh
->sjlj_fc
, TYPE_MODE (integer_type_node
),
2046 plus_constant (XEXP (cfun
->eh
->sjlj_fc
, 0),
2047 sjlj_fc_call_site_ofs
));
2049 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2051 struct eh_region
*region
;
2053 rtx note
, before
, p
;
2055 /* Reset value tracking at extended basic block boundaries. */
2056 if (GET_CODE (insn
) == CODE_LABEL
)
2057 last_call_site
= -2;
2059 if (! INSN_P (insn
))
2062 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2065 /* Calls (and trapping insns) without notes are outside any
2066 exception handling region in this function. Mark them as
2068 if (GET_CODE (insn
) == CALL_INSN
2069 || (flag_non_call_exceptions
2070 && may_trap_p (PATTERN (insn
))))
2071 this_call_site
= -1;
2077 /* Calls that are known to not throw need not be marked. */
2078 if (INTVAL (XEXP (note
, 0)) <= 0)
2081 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
2082 this_call_site
= lp_info
[region
->region_number
].call_site_index
;
2085 if (this_call_site
== last_call_site
)
2088 /* Don't separate a call from it's argument loads. */
2090 if (GET_CODE (insn
) == CALL_INSN
)
2092 HARD_REG_SET parm_regs
;
2095 /* Since different machines initialize their parameter registers
2096 in different orders, assume nothing. Collect the set of all
2097 parameter registers. */
2098 CLEAR_HARD_REG_SET (parm_regs
);
2100 for (p
= CALL_INSN_FUNCTION_USAGE (insn
); p
; p
= XEXP (p
, 1))
2101 if (GET_CODE (XEXP (p
, 0)) == USE
2102 && GET_CODE (XEXP (XEXP (p
, 0), 0)) == REG
)
2104 if (REGNO (XEXP (XEXP (p
, 0), 0)) >= FIRST_PSEUDO_REGISTER
)
2107 /* We only care about registers which can hold function
2109 if (! FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p
, 0), 0))))
2112 SET_HARD_REG_BIT (parm_regs
, REGNO (XEXP (XEXP (p
, 0), 0)));
2116 /* Search backward for the first set of a register in this set. */
2119 before
= PREV_INSN (before
);
2121 /* Given that we've done no other optimizations yet,
2122 the arguments should be immediately available. */
2123 if (GET_CODE (before
) == CODE_LABEL
)
2126 p
= single_set (before
);
2127 if (p
&& GET_CODE (SET_DEST (p
)) == REG
2128 && REGNO (SET_DEST (p
)) < FIRST_PSEUDO_REGISTER
2129 && TEST_HARD_REG_BIT (parm_regs
, REGNO (SET_DEST (p
))))
2131 CLEAR_HARD_REG_BIT (parm_regs
, REGNO (SET_DEST (p
)));
2138 emit_move_insn (mem
, GEN_INT (this_call_site
));
2142 emit_insns_before (p
, before
);
2143 last_call_site
= this_call_site
;
2147 /* Construct the SjLj_Function_Context. */
2150 sjlj_emit_function_enter (dispatch_label
)
2153 rtx fn_begin
, fc
, mem
, seq
;
2155 fc
= cfun
->eh
->sjlj_fc
;
2159 /* We're storing this libcall's address into memory instead of
2160 calling it directly. Thus, we must call assemble_external_libcall
2161 here, as we can not depend on emit_library_call to do it for us. */
2162 assemble_external_libcall (eh_personality_libfunc
);
2163 mem
= change_address (fc
, Pmode
,
2164 plus_constant (XEXP (fc
, 0), sjlj_fc_personality_ofs
));
2165 emit_move_insn (mem
, eh_personality_libfunc
);
2167 mem
= change_address (fc
, Pmode
,
2168 plus_constant (XEXP (fc
, 0), sjlj_fc_lsda_ofs
));
2169 if (cfun
->uses_eh_lsda
)
2172 ASM_GENERATE_INTERNAL_LABEL (buf
, "LLSDA", sjlj_funcdef_number
);
2173 emit_move_insn (mem
, gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
)));
2176 emit_move_insn (mem
, const0_rtx
);
2178 #ifdef DONT_USE_BUILTIN_SETJMP
2181 x
= emit_library_call_value (setjmp_libfunc
, NULL_RTX
, LCT_NORMAL
,
2182 TYPE_MODE (integer_type_node
), 1,
2183 plus_constant (XEXP (fc
, 0),
2184 sjlj_fc_jbuf_ofs
), Pmode
);
2186 note
= emit_note (NULL
, NOTE_INSN_EXPECTED_VALUE
);
2187 NOTE_EXPECTED_VALUE (note
) = gen_rtx_EQ (VOIDmode
, x
, const0_rtx
);
2189 emit_cmp_and_jump_insns (x
, const0_rtx
, NE
, 0,
2190 TYPE_MODE (integer_type_node
), 0, 0,
2194 expand_builtin_setjmp_setup (plus_constant (XEXP (fc
, 0), sjlj_fc_jbuf_ofs
),
2198 emit_library_call (unwind_sjlj_register_libfunc
, LCT_NORMAL
, VOIDmode
,
2199 1, XEXP (fc
, 0), Pmode
);
2204 /* ??? Instead of doing this at the beginning of the function,
2205 do this in a block that is at loop level 0 and dominates all
2206 can_throw_internal instructions. */
2208 for (fn_begin
= get_insns (); ; fn_begin
= NEXT_INSN (fn_begin
))
2209 if (GET_CODE (fn_begin
) == NOTE
2210 && NOTE_LINE_NUMBER (fn_begin
) == NOTE_INSN_FUNCTION_BEG
)
2212 emit_insns_after (seq
, fn_begin
);
2215 /* Call back from expand_function_end to know where we should put
2216 the call to unwind_sjlj_unregister_libfunc if needed. */
2219 sjlj_emit_function_exit_after (after
)
2222 cfun
->eh
->sjlj_exit_after
= after
;
2226 sjlj_emit_function_exit ()
2232 emit_library_call (unwind_sjlj_unregister_libfunc
, LCT_NORMAL
, VOIDmode
,
2233 1, XEXP (cfun
->eh
->sjlj_fc
, 0), Pmode
);
2238 /* ??? Really this can be done in any block at loop level 0 that
2239 post-dominates all can_throw_internal instructions. This is
2240 the last possible moment. */
2242 emit_insns_after (seq
, cfun
->eh
->sjlj_exit_after
);
2246 sjlj_emit_dispatch_table (dispatch_label
, lp_info
)
2248 struct sjlj_lp_info
*lp_info
;
2250 int i
, first_reachable
;
2251 rtx mem
, dispatch
, seq
, fc
;
2253 fc
= cfun
->eh
->sjlj_fc
;
2257 emit_label (dispatch_label
);
2259 #ifndef DONT_USE_BUILTIN_SETJMP
2260 expand_builtin_setjmp_receiver (dispatch_label
);
2263 /* Load up dispatch index, exc_ptr and filter values from the
2264 function context. */
2265 mem
= change_address (fc
, TYPE_MODE (integer_type_node
),
2266 plus_constant (XEXP (fc
, 0), sjlj_fc_call_site_ofs
));
2267 dispatch
= copy_to_reg (mem
);
2269 mem
= change_address (fc
, word_mode
,
2270 plus_constant (XEXP (fc
, 0), sjlj_fc_data_ofs
));
2271 if (word_mode
!= Pmode
)
2273 #ifdef POINTERS_EXTEND_UNSIGNED
2274 mem
= convert_memory_address (Pmode
, mem
);
2276 mem
= convert_to_mode (Pmode
, mem
, 0);
2279 emit_move_insn (cfun
->eh
->exc_ptr
, mem
);
2281 mem
= change_address (fc
, word_mode
,
2282 plus_constant (XEXP (fc
, 0),
2283 sjlj_fc_data_ofs
+ UNITS_PER_WORD
));
2284 emit_move_insn (cfun
->eh
->filter
, mem
);
2286 /* Jump to one of the directly reachable regions. */
2287 /* ??? This really ought to be using a switch statement. */
2289 first_reachable
= 0;
2290 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2292 if (! lp_info
[i
].directly_reachable
2293 || lp_info
[i
].action_index
< 0)
2296 if (! first_reachable
)
2298 first_reachable
= i
;
2302 emit_cmp_and_jump_insns (dispatch
,
2303 GEN_INT (lp_info
[i
].dispatch_index
), EQ
,
2304 NULL_RTX
, TYPE_MODE (integer_type_node
), 0, 0,
2305 cfun
->eh
->region_array
[i
]->post_landing_pad
);
2311 emit_insns_before (seq
, (cfun
->eh
->region_array
[first_reachable
]
2312 ->post_landing_pad
));
2316 sjlj_build_landing_pads ()
2318 struct sjlj_lp_info
*lp_info
;
2320 lp_info
= (struct sjlj_lp_info
*) xcalloc (cfun
->eh
->last_region_number
+ 1,
2321 sizeof (struct sjlj_lp_info
));
2323 if (sjlj_find_directly_reachable_regions (lp_info
))
2325 rtx dispatch_label
= gen_label_rtx ();
2328 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node
),
2329 int_size_in_bytes (sjlj_fc_type_node
),
2330 TYPE_ALIGN (sjlj_fc_type_node
));
2332 sjlj_assign_call_site_values (dispatch_label
, lp_info
);
2333 sjlj_mark_call_sites (lp_info
);
2335 sjlj_emit_function_enter (dispatch_label
);
2336 sjlj_emit_dispatch_table (dispatch_label
, lp_info
);
2337 sjlj_emit_function_exit ();
2344 finish_eh_generation ()
2346 /* Nothing to do if no regions created. */
2347 if (cfun
->eh
->region_tree
== NULL
)
2350 /* The object here is to provide find_basic_blocks with detailed
2351 information (via reachable_handlers) on how exception control
2352 flows within the function. In this first pass, we can include
2353 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2354 regions, and hope that it will be useful in deleting unreachable
2355 handlers. Subsequently, we will generate landing pads which will
2356 connect many of the handlers, and then type information will not
2357 be effective. Still, this is a win over previous implementations. */
2359 jump_optimize_minimal (get_insns ());
2360 find_basic_blocks (get_insns (), max_reg_num (), 0);
2363 /* These registers are used by the landing pads. Make sure they
2364 have been generated. */
2365 get_exception_pointer (cfun
);
2366 get_exception_filter (cfun
);
2368 /* Construct the landing pads. */
2370 assign_filter_values ();
2371 build_post_landing_pads ();
2372 connect_post_landing_pads ();
2373 if (USING_SJLJ_EXCEPTIONS
)
2374 sjlj_build_landing_pads ();
2376 dw2_build_landing_pads ();
2378 cfun
->eh
->built_landing_pads
= 1;
2380 /* We've totally changed the CFG. Start over. */
2381 find_exception_handler_labels ();
2382 jump_optimize_minimal (get_insns ());
2383 find_basic_blocks (get_insns (), max_reg_num (), 0);
2387 /* This section handles removing dead code for flow. */
2389 /* Remove LABEL from the exception_handler_labels list. */
2392 remove_exception_handler_label (label
)
2397 for (pl
= &exception_handler_labels
, l
= *pl
;
2398 XEXP (l
, 0) != label
;
2399 pl
= &XEXP (l
, 1), l
= *pl
)
2403 free_EXPR_LIST_node (l
);
2406 /* Splice REGION from the region tree etc. */
2409 remove_eh_handler (region
)
2410 struct eh_region
*region
;
2412 struct eh_region
**pp
, *p
;
2416 /* For the benefit of efficiently handling REG_EH_REGION notes,
2417 replace this region in the region array with its containing
2418 region. Note that previous region deletions may result in
2419 multiple copies of this region in the array, so we have to
2420 search the whole thing. */
2421 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2422 if (cfun
->eh
->region_array
[i
] == region
)
2423 cfun
->eh
->region_array
[i
] = region
->outer
;
2425 if (cfun
->eh
->built_landing_pads
)
2426 lab
= region
->landing_pad
;
2428 lab
= region
->label
;
2430 remove_exception_handler_label (lab
);
2433 pp
= ®ion
->outer
->inner
;
2435 pp
= &cfun
->eh
->region_tree
;
2436 for (p
= *pp
; p
!= region
; pp
= &p
->next_peer
, p
= *pp
)
2441 for (p
= region
->inner
; p
->next_peer
; p
= p
->next_peer
)
2442 p
->outer
= region
->outer
;
2443 p
->next_peer
= region
->next_peer
;
2444 p
->outer
= region
->outer
;
2445 *pp
= region
->inner
;
2448 *pp
= region
->next_peer
;
2450 if (region
->type
== ERT_CATCH
)
2452 struct eh_region
*try, *next
, *prev
;
2454 for (try = region
->next_peer
;
2455 try->type
== ERT_CATCH
;
2456 try = try->next_peer
)
2458 if (try->type
!= ERT_TRY
)
2461 next
= region
->u
.catch.next_catch
;
2462 prev
= region
->u
.catch.prev_catch
;
2465 next
->u
.catch.prev_catch
= prev
;
2467 try->u
.try.last_catch
= prev
;
2469 prev
->u
.catch.next_catch
= next
;
2472 try->u
.try.catch = next
;
2474 remove_eh_handler (try);
2481 /* LABEL heads a basic block that is about to be deleted. If this
2482 label corresponds to an exception region, we may be able to
2483 delete the region. */
2486 maybe_remove_eh_handler (label
)
2491 /* ??? After generating landing pads, it's not so simple to determine
2492 if the region data is completely unused. One must examine the
2493 landing pad and the post landing pad, and whether an inner try block
2494 is referencing the catch handlers directly. */
2495 if (cfun
->eh
->built_landing_pads
)
2498 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2500 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
2501 if (region
&& region
->label
== label
)
2503 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2504 because there is no path to the fallback call to terminate.
2505 But the region continues to affect call-site data until there
2506 are no more contained calls, which we don't see here. */
2507 if (region
->type
== ERT_MUST_NOT_THROW
)
2509 remove_exception_handler_label (region
->label
);
2510 region
->label
= NULL_RTX
;
2513 remove_eh_handler (region
);
2520 /* This section describes CFG exception edges for flow. */
2522 /* For communicating between calls to reachable_next_level. */
2523 struct reachable_info
2530 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2531 base class of TYPE, is in HANDLED. */
2534 check_handled (handled
, type
)
2539 /* We can check for exact matches without front-end help. */
2540 if (! lang_eh_type_covers
)
2542 for (t
= handled
; t
; t
= TREE_CHAIN (t
))
2543 if (TREE_VALUE (t
) == type
)
2548 for (t
= handled
; t
; t
= TREE_CHAIN (t
))
2549 if ((*lang_eh_type_covers
) (TREE_VALUE (t
), type
))
2556 /* A subroutine of reachable_next_level. If we are collecting a list
2557 of handlers, add one. After landing pad generation, reference
2558 it instead of the handlers themselves. Further, the handlers are
2559 all wired together, so by referencing one, we've got them all.
2560 Before landing pad generation we reference each handler individually.
2562 LP_REGION contains the landing pad; REGION is the handler. */
2565 add_reachable_handler (info
, lp_region
, region
)
2566 struct reachable_info
*info
;
2567 struct eh_region
*lp_region
;
2568 struct eh_region
*region
;
2573 if (cfun
->eh
->built_landing_pads
)
2575 if (! info
->handlers
)
2576 info
->handlers
= alloc_INSN_LIST (lp_region
->landing_pad
, NULL_RTX
);
2579 info
->handlers
= alloc_INSN_LIST (region
->label
, info
->handlers
);
2582 /* Process one level of exception regions for reachability.
2583 If TYPE_THROWN is non-null, then it is the *exact* type being
2584 propagated. If INFO is non-null, then collect handler labels
2585 and caught/allowed type information between invocations. */
2587 static enum reachable_code
2588 reachable_next_level (region
, type_thrown
, info
)
2589 struct eh_region
*region
;
2591 struct reachable_info
*info
;
2593 switch (region
->type
)
2596 /* Before landing-pad generation, we model control flow
2597 directly to the individual handlers. In this way we can
2598 see that catch handler types may shadow one another. */
2599 add_reachable_handler (info
, region
, region
);
2600 return RNL_MAYBE_CAUGHT
;
2604 struct eh_region
*c
;
2605 enum reachable_code ret
= RNL_NOT_CAUGHT
;
2607 for (c
= region
->u
.try.catch; c
; c
= c
->u
.catch.next_catch
)
2609 /* A catch-all handler ends the search. */
2610 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2611 to be run as well. */
2612 if (c
->u
.catch.type
== NULL
)
2614 add_reachable_handler (info
, region
, c
);
2620 /* If we have a type match, end the search. */
2621 if (c
->u
.catch.type
== type_thrown
2622 || (lang_eh_type_covers
2623 && (*lang_eh_type_covers
) (c
->u
.catch.type
,
2626 add_reachable_handler (info
, region
, c
);
2630 /* If we have definitive information of a match failure,
2631 the catch won't trigger. */
2632 if (lang_eh_type_covers
)
2633 return RNL_NOT_CAUGHT
;
2637 ret
= RNL_MAYBE_CAUGHT
;
2639 /* A type must not have been previously caught. */
2640 else if (! check_handled (info
->types_caught
, c
->u
.catch.type
))
2642 add_reachable_handler (info
, region
, c
);
2643 info
->types_caught
= tree_cons (NULL
, c
->u
.catch.type
,
2644 info
->types_caught
);
2646 /* ??? If the catch type is a base class of every allowed
2647 type, then we know we can stop the search. */
2648 ret
= RNL_MAYBE_CAUGHT
;
2655 case ERT_ALLOWED_EXCEPTIONS
:
2656 /* An empty list of types definitely ends the search. */
2657 if (region
->u
.allowed
.type_list
== NULL_TREE
)
2659 add_reachable_handler (info
, region
, region
);
2663 /* Collect a list of lists of allowed types for use in detecting
2664 when a catch may be transformed into a catch-all. */
2666 info
->types_allowed
= tree_cons (NULL_TREE
,
2667 region
->u
.allowed
.type_list
,
2668 info
->types_allowed
);
2670 /* If we have definitive information about the type heirarchy,
2671 then we can tell if the thrown type will pass through the
2673 if (type_thrown
&& lang_eh_type_covers
)
2675 if (check_handled (region
->u
.allowed
.type_list
, type_thrown
))
2676 return RNL_NOT_CAUGHT
;
2679 add_reachable_handler (info
, region
, region
);
2684 add_reachable_handler (info
, region
, region
);
2685 return RNL_MAYBE_CAUGHT
;
2688 /* Catch regions are handled by their controling try region. */
2689 return RNL_NOT_CAUGHT
;
2691 case ERT_MUST_NOT_THROW
:
2692 /* Here we end our search, since no exceptions may propagate.
2693 If we've touched down at some landing pad previous, then the
2694 explicit function call we generated may be used. Otherwise
2695 the call is made by the runtime. */
2696 if (info
&& info
->handlers
)
2698 add_reachable_handler (info
, region
, region
);
2706 /* Shouldn't see these here. */
2713 /* Retrieve a list of labels of exception handlers which can be
2714 reached by a given insn. */
2717 reachable_handlers (insn
)
2720 struct reachable_info info
;
2721 struct eh_region
*region
;
2725 if (GET_CODE (insn
) == JUMP_INSN
2726 && GET_CODE (PATTERN (insn
)) == RESX
)
2727 region_number
= XINT (PATTERN (insn
), 0);
2730 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2731 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
2733 region_number
= INTVAL (XEXP (note
, 0));
2736 memset (&info
, 0, sizeof (info
));
2738 region
= cfun
->eh
->region_array
[region_number
];
2740 type_thrown
= NULL_TREE
;
2741 if (region
->type
== ERT_THROW
)
2743 type_thrown
= region
->u
.throw.type
;
2744 region
= region
->outer
;
2746 else if (GET_CODE (insn
) == JUMP_INSN
2747 && GET_CODE (PATTERN (insn
)) == RESX
)
2748 region
= region
->outer
;
2750 for (; region
; region
= region
->outer
)
2751 if (reachable_next_level (region
, type_thrown
, &info
) >= RNL_CAUGHT
)
2754 return info
.handlers
;
2757 /* Determine if the given INSN can throw an exception that is caught
2758 within the function. */
2761 can_throw_internal (insn
)
2764 struct eh_region
*region
;
2768 if (! INSN_P (insn
))
2771 if (GET_CODE (insn
) == INSN
2772 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2773 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2775 if (GET_CODE (insn
) == CALL_INSN
2776 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
2779 for (i
= 0; i
< 3; ++i
)
2781 rtx sub
= XEXP (PATTERN (insn
), i
);
2782 for (; sub
; sub
= NEXT_INSN (sub
))
2783 if (can_throw_internal (sub
))
2789 /* Every insn that might throw has an EH_REGION note. */
2790 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2791 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
2794 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
2796 type_thrown
= NULL_TREE
;
2797 if (region
->type
== ERT_THROW
)
2799 type_thrown
= region
->u
.throw.type
;
2800 region
= region
->outer
;
2803 /* If this exception is ignored by each and every containing region,
2804 then control passes straight out. The runtime may handle some
2805 regions, which also do not require processing internally. */
2806 for (; region
; region
= region
->outer
)
2808 enum reachable_code how
= reachable_next_level (region
, type_thrown
, 0);
2809 if (how
== RNL_BLOCKED
)
2811 if (how
!= RNL_NOT_CAUGHT
)
2818 /* Determine if the given INSN can throw an exception that is
2819 visible outside the function. */
2822 can_throw_external (insn
)
2825 struct eh_region
*region
;
2829 if (! INSN_P (insn
))
2832 if (GET_CODE (insn
) == INSN
2833 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2834 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2836 if (GET_CODE (insn
) == CALL_INSN
2837 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
2840 for (i
= 0; i
< 3; ++i
)
2842 rtx sub
= XEXP (PATTERN (insn
), i
);
2843 for (; sub
; sub
= NEXT_INSN (sub
))
2844 if (can_throw_external (sub
))
2850 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2853 /* Calls (and trapping insns) without notes are outside any
2854 exception handling region in this function. We have to
2855 assume it might throw. Given that the front end and middle
2856 ends mark known NOTHROW functions, this isn't so wildly
2858 return (GET_CODE (insn
) == CALL_INSN
2859 || (flag_non_call_exceptions
2860 && may_trap_p (PATTERN (insn
))));
2862 if (INTVAL (XEXP (note
, 0)) <= 0)
2865 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
2867 type_thrown
= NULL_TREE
;
2868 if (region
->type
== ERT_THROW
)
2870 type_thrown
= region
->u
.throw.type
;
2871 region
= region
->outer
;
2874 /* If the exception is caught or blocked by any containing region,
2875 then it is not seen by any calling function. */
2876 for (; region
; region
= region
->outer
)
2877 if (reachable_next_level (region
, type_thrown
, NULL
) >= RNL_CAUGHT
)
2883 /* True if nothing in this function can throw outside this function. */
2886 nothrow_function_p ()
2890 if (! flag_exceptions
)
2893 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2894 if (can_throw_external (insn
))
2896 for (insn
= current_function_epilogue_delay_list
; insn
;
2897 insn
= XEXP (insn
, 1))
2898 if (can_throw_external (insn
))
2905 /* Various hooks for unwind library. */
2907 /* Do any necessary initialization to access arbitrary stack frames.
2908 On the SPARC, this means flushing the register windows. */
2911 expand_builtin_unwind_init ()
2913 /* Set this so all the registers get saved in our frame; we need to be
2914 able to copy the saved values for any registers from frames we unwind. */
2915 current_function_has_nonlocal_label
= 1;
2917 #ifdef SETUP_FRAME_ADDRESSES
2918 SETUP_FRAME_ADDRESSES ();
2923 expand_builtin_eh_return_data_regno (arglist
)
2926 tree which
= TREE_VALUE (arglist
);
2927 unsigned HOST_WIDE_INT iwhich
;
2929 if (TREE_CODE (which
) != INTEGER_CST
)
2931 error ("argument of `__builtin_eh_return_regno' must be constant");
2935 iwhich
= tree_low_cst (which
, 1);
2936 iwhich
= EH_RETURN_DATA_REGNO (iwhich
);
2937 if (iwhich
== INVALID_REGNUM
)
2940 #ifdef DWARF_FRAME_REGNUM
2941 iwhich
= DWARF_FRAME_REGNUM (iwhich
);
2943 iwhich
= DBX_REGISTER_NUMBER (iwhich
);
2946 return GEN_INT (iwhich
);
2949 /* Given a value extracted from the return address register or stack slot,
2950 return the actual address encoded in that value. */
2953 expand_builtin_extract_return_addr (addr_tree
)
2956 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, Pmode
, 0);
2958 /* First mask out any unwanted bits. */
2959 #ifdef MASK_RETURN_ADDR
2960 expand_and (addr
, MASK_RETURN_ADDR
, addr
);
2963 /* Then adjust to find the real return address. */
2964 #if defined (RETURN_ADDR_OFFSET)
2965 addr
= plus_constant (addr
, RETURN_ADDR_OFFSET
);
2971 /* Given an actual address in addr_tree, do any necessary encoding
2972 and return the value to be stored in the return address register or
2973 stack slot so the epilogue will return to that address. */
2976 expand_builtin_frob_return_addr (addr_tree
)
2979 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, Pmode
, 0);
2981 #ifdef RETURN_ADDR_OFFSET
2982 addr
= force_reg (Pmode
, addr
);
2983 addr
= plus_constant (addr
, -RETURN_ADDR_OFFSET
);
2989 /* Set up the epilogue with the magic bits we'll need to return to the
2990 exception handler. */
2993 expand_builtin_eh_return (stackadj_tree
, handler_tree
)
2994 tree stackadj_tree
, handler_tree
;
2996 rtx stackadj
, handler
;
2998 stackadj
= expand_expr (stackadj_tree
, cfun
->eh
->ehr_stackadj
, VOIDmode
, 0);
2999 handler
= expand_expr (handler_tree
, cfun
->eh
->ehr_handler
, VOIDmode
, 0);
3001 if (! cfun
->eh
->ehr_label
)
3003 cfun
->eh
->ehr_stackadj
= copy_to_reg (stackadj
);
3004 cfun
->eh
->ehr_handler
= copy_to_reg (handler
);
3005 cfun
->eh
->ehr_label
= gen_label_rtx ();
3009 if (stackadj
!= cfun
->eh
->ehr_stackadj
)
3010 emit_move_insn (cfun
->eh
->ehr_stackadj
, stackadj
);
3011 if (handler
!= cfun
->eh
->ehr_handler
)
3012 emit_move_insn (cfun
->eh
->ehr_handler
, handler
);
3015 emit_jump (cfun
->eh
->ehr_label
);
3021 rtx sa
, ra
, around_label
;
3023 if (! cfun
->eh
->ehr_label
)
3026 sa
= EH_RETURN_STACKADJ_RTX
;
3029 error ("__builtin_eh_return not supported on this target");
3033 current_function_calls_eh_return
= 1;
3035 around_label
= gen_label_rtx ();
3036 emit_move_insn (sa
, const0_rtx
);
3037 emit_jump (around_label
);
3039 emit_label (cfun
->eh
->ehr_label
);
3040 clobber_return_register ();
3042 #ifdef HAVE_eh_return
3044 emit_insn (gen_eh_return (cfun
->eh
->ehr_stackadj
, cfun
->eh
->ehr_handler
));
3050 ra
= EH_RETURN_HANDLER_RTX
;
3053 error ("__builtin_eh_return not supported on this target");
3054 ra
= gen_reg_rtx (Pmode
);
3057 emit_move_insn (sa
, cfun
->eh
->ehr_stackadj
);
3059 handler
= cfun
->eh
->ehr_handler
;
3060 if (GET_MODE (ra
) != Pmode
)
3062 #ifdef POINTERS_EXTEND_UNSIGNED
3063 handler
= convert_memory_address (GET_MODE (ra
), handler
);
3065 handler
= convert_to_mode (GET_MODE (ra
), handler
, 0);
3068 emit_move_insn (ra
, handler
);
3071 emit_label (around_label
);
3074 struct action_record
3082 action_record_eq (pentry
, pdata
)
3086 const struct action_record
*entry
= (const struct action_record
*) pentry
;
3087 const struct action_record
*data
= (const struct action_record
*) pdata
;
3088 return entry
->filter
== data
->filter
&& entry
->next
== data
->next
;
3092 action_record_hash (pentry
)
3095 const struct action_record
*entry
= (const struct action_record
*) pentry
;
3096 return entry
->next
* 1009 + entry
->filter
;
3100 add_action_record (ar_hash
, filter
, next
)
3104 struct action_record
**slot
, *new, tmp
;
3106 tmp
.filter
= filter
;
3108 slot
= (struct action_record
**) htab_find_slot (ar_hash
, &tmp
, INSERT
);
3110 if ((new = *slot
) == NULL
)
3112 new = (struct action_record
*) xmalloc (sizeof (*new));
3113 new->offset
= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
) + 1;
3114 new->filter
= filter
;
3118 /* The filter value goes in untouched. The link to the next
3119 record is a "self-relative" byte offset, or zero to indicate
3120 that there is no next record. So convert the absolute 1 based
3121 indicies we've been carrying around into a displacement. */
3123 push_sleb128 (&cfun
->eh
->action_record_data
, filter
);
3125 next
-= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
) + 1;
3126 push_sleb128 (&cfun
->eh
->action_record_data
, next
);
3133 collect_one_action_chain (ar_hash
, region
)
3135 struct eh_region
*region
;
3137 struct eh_region
*c
;
3140 /* If we've reached the top of the region chain, then we have
3141 no actions, and require no landing pad. */
3145 switch (region
->type
)
3148 /* A cleanup adds a zero filter to the beginning of the chain, but
3149 there are special cases to look out for. If there are *only*
3150 cleanups along a path, then it compresses to a zero action.
3151 Further, if there are multiple cleanups along a path, we only
3152 need to represent one of them, as that is enough to trigger
3153 entry to the landing pad at runtime. */
3154 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3157 for (c
= region
->outer
; c
; c
= c
->outer
)
3158 if (c
->type
== ERT_CLEANUP
)
3160 return add_action_record (ar_hash
, 0, next
);
3163 /* Process the associated catch regions in reverse order.
3164 If there's a catch-all handler, then we don't need to
3165 search outer regions. Use a magic -3 value to record
3166 that we havn't done the outer search. */
3168 for (c
= region
->u
.try.last_catch
; c
; c
= c
->u
.catch.prev_catch
)
3170 if (c
->u
.catch.type
== NULL
)
3171 next
= add_action_record (ar_hash
, c
->u
.catch.filter
, 0);
3176 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3180 next
= add_action_record (ar_hash
, c
->u
.catch.filter
, next
);
3185 case ERT_ALLOWED_EXCEPTIONS
:
3186 /* An exception specification adds its filter to the
3187 beginning of the chain. */
3188 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3189 return add_action_record (ar_hash
, region
->u
.allowed
.filter
,
3190 next
< 0 ? 0 : next
);
3192 case ERT_MUST_NOT_THROW
:
3193 /* A must-not-throw region with no inner handlers or cleanups
3194 requires no call-site entry. Note that this differs from
3195 the no handler or cleanup case in that we do require an lsda
3196 to be generated. Return a magic -2 value to record this. */
3201 /* CATCH regions are handled in TRY above. THROW regions are
3202 for optimization information only and produce no output. */
3203 return collect_one_action_chain (ar_hash
, region
->outer
);
3211 add_call_site (landing_pad
, action
)
3215 struct call_site_record
*data
= cfun
->eh
->call_site_data
;
3216 int used
= cfun
->eh
->call_site_data_used
;
3217 int size
= cfun
->eh
->call_site_data_size
;
3221 size
= (size
? size
* 2 : 64);
3222 data
= (struct call_site_record
*)
3223 xrealloc (data
, sizeof (*data
) * size
);
3224 cfun
->eh
->call_site_data
= data
;
3225 cfun
->eh
->call_site_data_size
= size
;
3228 data
[used
].landing_pad
= landing_pad
;
3229 data
[used
].action
= action
;
3231 cfun
->eh
->call_site_data_used
= used
+ 1;
3233 return used
+ call_site_base
;
3236 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3237 The new note numbers will not refer to region numbers, but
3238 instead to call site entries. */
3241 convert_to_eh_region_ranges ()
3243 rtx insn
, iter
, note
;
3245 int last_action
= -3;
3246 rtx last_action_insn
= NULL_RTX
;
3247 rtx last_landing_pad
= NULL_RTX
;
3248 rtx first_no_action_insn
= NULL_RTX
;
3251 if (USING_SJLJ_EXCEPTIONS
|| cfun
->eh
->region_tree
== NULL
)
3254 VARRAY_UCHAR_INIT (cfun
->eh
->action_record_data
, 64, "action_record_data");
3256 ar_hash
= htab_create (31, action_record_hash
, action_record_eq
, free
);
3258 for (iter
= get_insns (); iter
; iter
= NEXT_INSN (iter
))
3261 struct eh_region
*region
;
3263 rtx this_landing_pad
;
3266 if (GET_CODE (insn
) == INSN
3267 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3268 insn
= XVECEXP (PATTERN (insn
), 0, 0);
3270 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
3273 if (! (GET_CODE (insn
) == CALL_INSN
3274 || (flag_non_call_exceptions
3275 && may_trap_p (PATTERN (insn
)))))
3282 if (INTVAL (XEXP (note
, 0)) <= 0)
3284 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
3285 this_action
= collect_one_action_chain (ar_hash
, region
);
3288 /* Existence of catch handlers, or must-not-throw regions
3289 implies that an lsda is needed (even if empty). */
3290 if (this_action
!= -1)
3291 cfun
->uses_eh_lsda
= 1;
3293 /* Delay creation of region notes for no-action regions
3294 until we're sure that an lsda will be required. */
3295 else if (last_action
== -3)
3297 first_no_action_insn
= iter
;
3301 /* Cleanups and handlers may share action chains but not
3302 landing pads. Collect the landing pad for this region. */
3303 if (this_action
>= 0)
3305 struct eh_region
*o
;
3306 for (o
= region
; ! o
->landing_pad
; o
= o
->outer
)
3308 this_landing_pad
= o
->landing_pad
;
3311 this_landing_pad
= NULL_RTX
;
3313 /* Differing actions or landing pads implies a change in call-site
3314 info, which implies some EH_REGION note should be emitted. */
3315 if (last_action
!= this_action
3316 || last_landing_pad
!= this_landing_pad
)
3318 /* If we'd not seen a previous action (-3) or the previous
3319 action was must-not-throw (-2), then we do not need an
3321 if (last_action
>= -1)
3323 /* If we delayed the creation of the begin, do it now. */
3324 if (first_no_action_insn
)
3326 call_site
= add_call_site (NULL_RTX
, 0);
3327 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
,
3328 first_no_action_insn
);
3329 NOTE_EH_HANDLER (note
) = call_site
;
3330 first_no_action_insn
= NULL_RTX
;
3333 note
= emit_note_after (NOTE_INSN_EH_REGION_END
,
3335 NOTE_EH_HANDLER (note
) = call_site
;
3338 /* If the new action is must-not-throw, then no region notes
3340 if (this_action
>= -1)
3342 call_site
= add_call_site (this_landing_pad
,
3343 this_action
< 0 ? 0 : this_action
);
3344 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
, iter
);
3345 NOTE_EH_HANDLER (note
) = call_site
;
3348 last_action
= this_action
;
3349 last_landing_pad
= this_landing_pad
;
3351 last_action_insn
= iter
;
3354 if (last_action
>= -1 && ! first_no_action_insn
)
3356 note
= emit_note_after (NOTE_INSN_EH_REGION_END
, last_action_insn
);
3357 NOTE_EH_HANDLER (note
) = call_site
;
3360 htab_delete (ar_hash
);
3365 push_uleb128 (data_area
, value
)
3366 varray_type
*data_area
;
3371 unsigned char byte
= value
& 0x7f;
3375 VARRAY_PUSH_UCHAR (*data_area
, byte
);
3381 push_sleb128 (data_area
, value
)
3382 varray_type
*data_area
;
3390 byte
= value
& 0x7f;
3392 more
= ! ((value
== 0 && (byte
& 0x40) == 0)
3393 || (value
== -1 && (byte
& 0x40) != 0));
3396 VARRAY_PUSH_UCHAR (*data_area
, byte
);
3402 #ifndef HAVE_AS_LEB128
3404 dw2_size_of_call_site_table ()
3406 int n
= cfun
->eh
->call_site_data_used
;
3407 int size
= n
* (4 + 4 + 4);
3410 for (i
= 0; i
< n
; ++i
)
3412 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3413 size
+= size_of_uleb128 (cs
->action
);
3420 sjlj_size_of_call_site_table ()
3422 int n
= cfun
->eh
->call_site_data_used
;
3426 for (i
= 0; i
< n
; ++i
)
3428 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3429 size
+= size_of_uleb128 (INTVAL (cs
->landing_pad
));
3430 size
+= size_of_uleb128 (cs
->action
);
3438 dw2_output_call_site_table ()
3440 const char *function_start_lab
3441 = IDENTIFIER_POINTER (current_function_func_begin_label
);
3442 int n
= cfun
->eh
->call_site_data_used
;
3445 for (i
= 0; i
< n
; ++i
)
3447 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3448 char reg_start_lab
[32];
3449 char reg_end_lab
[32];
3450 char landing_pad_lab
[32];
3452 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab
, "LEHB", call_site_base
+ i
);
3453 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab
, "LEHE", call_site_base
+ i
);
3455 if (cs
->landing_pad
)
3456 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab
, "L",
3457 CODE_LABEL_NUMBER (cs
->landing_pad
));
3459 /* ??? Perhaps use insn length scaling if the assembler supports
3460 generic arithmetic. */
3461 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3462 data4 if the function is small enough. */
3463 #ifdef HAVE_AS_LEB128
3464 dw2_asm_output_delta_uleb128 (reg_start_lab
, function_start_lab
,
3465 "region %d start", i
);
3466 dw2_asm_output_delta_uleb128 (reg_end_lab
, reg_start_lab
,
3468 if (cs
->landing_pad
)
3469 dw2_asm_output_delta_uleb128 (landing_pad_lab
, function_start_lab
,
3472 dw2_asm_output_data_uleb128 (0, "landing pad");
3474 dw2_asm_output_delta (4, reg_start_lab
, function_start_lab
,
3475 "region %d start", i
);
3476 dw2_asm_output_delta (4, reg_end_lab
, reg_start_lab
, "length");
3477 if (cs
->landing_pad
)
3478 dw2_asm_output_delta (4, landing_pad_lab
, function_start_lab
,
3481 dw2_asm_output_data (4, 0, "landing pad");
3483 dw2_asm_output_data_uleb128 (cs
->action
, "action");
3486 call_site_base
+= n
;
3490 sjlj_output_call_site_table ()
3492 int n
= cfun
->eh
->call_site_data_used
;
3495 for (i
= 0; i
< n
; ++i
)
3497 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3499 dw2_asm_output_data_uleb128 (INTVAL (cs
->landing_pad
),
3500 "region %d landing pad", i
);
3501 dw2_asm_output_data_uleb128 (cs
->action
, "action");
3504 call_site_base
+= n
;
3508 output_function_exception_table ()
3510 int tt_format
, cs_format
, lp_format
, i
, n
;
3511 #ifdef HAVE_AS_LEB128
3512 char ttype_label
[32];
3513 char cs_after_size_label
[32];
3514 char cs_end_label
[32];
3522 /* Not all functions need anything. */
3523 if (! cfun
->uses_eh_lsda
)
3526 funcdef_number
= (USING_SJLJ_EXCEPTIONS
3527 ? sjlj_funcdef_number
3528 : current_funcdef_number
);
3530 #ifdef IA64_UNWIND_INFO
3531 fputs ("\t.personality\t", asm_out_file
);
3532 output_addr_const (asm_out_file
, eh_personality_libfunc
);
3533 fputs ("\n\t.handlerdata\n", asm_out_file
);
3534 /* Note that varasm still thinks we're in the function's code section.
3535 The ".endp" directive that will immediately follow will take us back. */
3537 exception_section ();
3540 have_tt_data
= (VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
) > 0
3541 || VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
) > 0);
3543 /* Indicate the format of the @TType entries. */
3545 tt_format
= DW_EH_PE_omit
;
3548 tt_format
= ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3549 #ifdef HAVE_AS_LEB128
3550 ASM_GENERATE_INTERNAL_LABEL (ttype_label
, "LLSDATT", funcdef_number
);
3552 tt_format_size
= size_of_encoded_value (tt_format
);
3554 assemble_eh_align (tt_format_size
* BITS_PER_UNIT
);
3557 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "LLSDA", funcdef_number
);
3559 /* The LSDA header. */
3561 /* Indicate the format of the landing pad start pointer. An omitted
3562 field implies @LPStart == @Start. */
3563 /* Currently we always put @LPStart == @Start. This field would
3564 be most useful in moving the landing pads completely out of
3565 line to another section, but it could also be used to minimize
3566 the size of uleb128 landing pad offsets. */
3567 lp_format
= DW_EH_PE_omit
;
3568 dw2_asm_output_data (1, lp_format
, "@LPStart format (%s)",
3569 eh_data_format_name (lp_format
));
3571 /* @LPStart pointer would go here. */
3573 dw2_asm_output_data (1, tt_format
, "@TType format (%s)",
3574 eh_data_format_name (tt_format
));
3576 #ifndef HAVE_AS_LEB128
3577 if (USING_SJLJ_EXCEPTIONS
)
3578 call_site_len
= sjlj_size_of_call_site_table ();
3580 call_site_len
= dw2_size_of_call_site_table ();
3583 /* A pc-relative 4-byte displacement to the @TType data. */
3586 #ifdef HAVE_AS_LEB128
3587 char ttype_after_disp_label
[32];
3588 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label
, "LLSDATTD",
3590 dw2_asm_output_delta_uleb128 (ttype_label
, ttype_after_disp_label
,
3591 "@TType base offset");
3592 ASM_OUTPUT_LABEL (asm_out_file
, ttype_after_disp_label
);
3594 /* Ug. Alignment queers things. */
3595 unsigned int before_disp
, after_disp
, last_disp
, disp
;
3597 before_disp
= 1 + 1;
3598 after_disp
= (1 + size_of_uleb128 (call_site_len
)
3600 + VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
)
3601 + (VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
)
3607 unsigned int disp_size
, pad
;
3610 disp_size
= size_of_uleb128 (disp
);
3611 pad
= before_disp
+ disp_size
+ after_disp
;
3612 if (pad
% tt_format_size
)
3613 pad
= tt_format_size
- (pad
% tt_format_size
);
3616 disp
= after_disp
+ pad
;
3618 while (disp
!= last_disp
);
3620 dw2_asm_output_data_uleb128 (disp
, "@TType base offset");
3624 /* Indicate the format of the call-site offsets. */
3625 #ifdef HAVE_AS_LEB128
3626 cs_format
= DW_EH_PE_uleb128
;
3628 cs_format
= DW_EH_PE_udata4
;
3630 dw2_asm_output_data (1, cs_format
, "call-site format (%s)",
3631 eh_data_format_name (cs_format
));
3633 #ifdef HAVE_AS_LEB128
3634 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label
, "LLSDACSB",
3636 ASM_GENERATE_INTERNAL_LABEL (cs_end_label
, "LLSDACSE",
3638 dw2_asm_output_delta_uleb128 (cs_end_label
, cs_after_size_label
,
3639 "Call-site table length");
3640 ASM_OUTPUT_LABEL (asm_out_file
, cs_after_size_label
);
3641 if (USING_SJLJ_EXCEPTIONS
)
3642 sjlj_output_call_site_table ();
3644 dw2_output_call_site_table ();
3645 ASM_OUTPUT_LABEL (asm_out_file
, cs_end_label
);
3647 dw2_asm_output_data_uleb128 (call_site_len
,"Call-site table length");
3648 if (USING_SJLJ_EXCEPTIONS
)
3649 sjlj_output_call_site_table ();
3651 dw2_output_call_site_table ();
3654 /* ??? Decode and interpret the data for flag_debug_asm. */
3655 n
= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
);
3656 for (i
= 0; i
< n
; ++i
)
3657 dw2_asm_output_data (1, VARRAY_UCHAR (cfun
->eh
->action_record_data
, i
),
3658 (i
? NULL
: "Action record table"));
3661 assemble_eh_align (tt_format_size
* BITS_PER_UNIT
);
3663 i
= VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
);
3666 tree type
= VARRAY_TREE (cfun
->eh
->ttype_data
, i
);
3668 if (type
== NULL_TREE
)
3669 type
= integer_zero_node
;
3671 type
= lookup_type_for_runtime (type
);
3673 dw2_asm_output_encoded_addr_rtx (tt_format
,
3674 expand_expr (type
, NULL_RTX
, VOIDmode
,
3675 EXPAND_INITIALIZER
),
3679 #ifdef HAVE_AS_LEB128
3681 ASM_OUTPUT_LABEL (asm_out_file
, ttype_label
);
3684 /* ??? Decode and interpret the data for flag_debug_asm. */
3685 n
= VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
);
3686 for (i
= 0; i
< n
; ++i
)
3687 dw2_asm_output_data (1, VARRAY_UCHAR (cfun
->eh
->ehspec_data
, i
),
3688 (i
? NULL
: "Exception specification table"));
3690 function_section (current_function_decl
);
3692 if (USING_SJLJ_EXCEPTIONS
)
3693 sjlj_funcdef_number
+= 1;