1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
52 #include "coretypes.h"
60 #include "insn-config.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
75 #include "langhooks.h"
77 /* Provide defaults for stuff that may not be defined when using
79 #ifndef EH_RETURN_STACKADJ_RTX
80 #define EH_RETURN_STACKADJ_RTX 0
82 #ifndef EH_RETURN_HANDLER_RTX
83 #define EH_RETURN_HANDLER_RTX 0
85 #ifndef EH_RETURN_DATA_REGNO
86 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
90 /* Nonzero means enable synchronous exceptions for non-call instructions. */
91 int flag_non_call_exceptions
;
93 /* Protect cleanup actions with must-not-throw regions, with a call
94 to the given failure handler. */
95 tree (*lang_protect_cleanup_actions
) PARAMS ((void));
97 /* Return true if type A catches type B. */
98 int (*lang_eh_type_covers
) PARAMS ((tree a
, tree b
));
100 /* Map a type to a runtime object to match type. */
101 tree (*lang_eh_runtime_type
) PARAMS ((tree
));
103 /* A hash table of label to region number. */
105 struct ehl_map_entry
GTY(())
108 struct eh_region
*region
;
111 static int call_site_base
;
112 static GTY ((param_is (union tree_node
)))
113 htab_t type_to_runtime_map
;
115 /* Describe the SjLj_Function_Context structure. */
116 static GTY(()) tree sjlj_fc_type_node
;
117 static int sjlj_fc_call_site_ofs
;
118 static int sjlj_fc_data_ofs
;
119 static int sjlj_fc_personality_ofs
;
120 static int sjlj_fc_lsda_ofs
;
121 static int sjlj_fc_jbuf_ofs
;
123 /* Describes one exception region. */
124 struct eh_region
GTY(())
126 /* The immediately surrounding region. */
127 struct eh_region
*outer
;
129 /* The list of immediately contained regions. */
130 struct eh_region
*inner
;
131 struct eh_region
*next_peer
;
133 /* An identifier for this region. */
136 /* When a region is deleted, its parents inherit the REG_EH_REGION
137 numbers already assigned. */
140 /* Each region does exactly one thing. */
147 ERT_ALLOWED_EXCEPTIONS
,
153 /* Holds the action to perform based on the preceding type. */
155 /* A list of catch blocks, a surrounding try block,
156 and the label for continuing after a catch. */
157 struct eh_region_u_try
{
158 struct eh_region
*catch;
159 struct eh_region
*last_catch
;
160 struct eh_region
*prev_try
;
162 } GTY ((tag ("ERT_TRY"))) try;
164 /* The list through the catch handlers, the list of type objects
165 matched, and the list of associated filters. */
166 struct eh_region_u_catch
{
167 struct eh_region
*next_catch
;
168 struct eh_region
*prev_catch
;
171 } GTY ((tag ("ERT_CATCH"))) catch;
173 /* A tree_list of allowed types. */
174 struct eh_region_u_allowed
{
177 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed
;
179 /* The type given by a call to "throw foo();", or discovered
181 struct eh_region_u_throw
{
183 } GTY ((tag ("ERT_THROW"))) throw;
185 /* Retain the cleanup expression even after expansion so that
186 we can match up fixup regions. */
187 struct eh_region_u_cleanup
{
189 struct eh_region
*prev_try
;
190 } GTY ((tag ("ERT_CLEANUP"))) cleanup
;
192 /* The real region (by expression and by pointer) that fixup code
194 struct eh_region_u_fixup
{
196 struct eh_region
*real_region
;
197 } GTY ((tag ("ERT_FIXUP"))) fixup
;
198 } GTY ((desc ("%0.type"))) u
;
200 /* Entry point for this region's handler before landing pads are built. */
203 /* Entry point for this region's handler from the runtime eh library. */
206 /* Entry point for this region's handler from an inner region. */
207 rtx post_landing_pad
;
209 /* The RESX insn for handing off control to the next outermost handler,
213 /* True if something in this region may throw. */
214 unsigned may_contain_throw
: 1;
217 struct call_site_record
GTY(())
223 /* Used to save exception status for each function. */
224 struct eh_status
GTY(())
226 /* The tree of all regions for this function. */
227 struct eh_region
*region_tree
;
229 /* The same information as an indexable array. */
230 struct eh_region
** GTY ((length ("%h.last_region_number"))) region_array
;
232 /* The most recently open region. */
233 struct eh_region
*cur_region
;
235 /* This is the region for which we are processing catch blocks. */
236 struct eh_region
*try_region
;
241 int built_landing_pads
;
242 int last_region_number
;
244 varray_type ttype_data
;
245 varray_type ehspec_data
;
246 varray_type action_record_data
;
248 htab_t
GTY ((param_is (struct ehl_map_entry
))) exception_handler_label_map
;
250 struct call_site_record
* GTY ((length ("%h.call_site_data_used")))
252 int call_site_data_used
;
253 int call_site_data_size
;
264 static int t2r_eq
PARAMS ((const PTR
,
266 static hashval_t t2r_hash
PARAMS ((const PTR
));
267 static void add_type_for_runtime
PARAMS ((tree
));
268 static tree lookup_type_for_runtime
PARAMS ((tree
));
270 static struct eh_region
*expand_eh_region_end
PARAMS ((void));
272 static rtx get_exception_filter
PARAMS ((struct function
*));
274 static void collect_eh_region_array
PARAMS ((void));
275 static void resolve_fixup_regions
PARAMS ((void));
276 static void remove_fixup_regions
PARAMS ((void));
277 static void remove_unreachable_regions
PARAMS ((rtx
));
278 static void convert_from_eh_region_ranges_1
PARAMS ((rtx
*, int *, int));
280 static struct eh_region
*duplicate_eh_region_1
PARAMS ((struct eh_region
*,
281 struct inline_remap
*));
282 static void duplicate_eh_region_2
PARAMS ((struct eh_region
*,
283 struct eh_region
**));
284 static int ttypes_filter_eq
PARAMS ((const PTR
,
286 static hashval_t ttypes_filter_hash
PARAMS ((const PTR
));
287 static int ehspec_filter_eq
PARAMS ((const PTR
,
289 static hashval_t ehspec_filter_hash
PARAMS ((const PTR
));
290 static int add_ttypes_entry
PARAMS ((htab_t
, tree
));
291 static int add_ehspec_entry
PARAMS ((htab_t
, htab_t
,
293 static void assign_filter_values
PARAMS ((void));
294 static void build_post_landing_pads
PARAMS ((void));
295 static void connect_post_landing_pads
PARAMS ((void));
296 static void dw2_build_landing_pads
PARAMS ((void));
299 static bool sjlj_find_directly_reachable_regions
300 PARAMS ((struct sjlj_lp_info
*));
301 static void sjlj_assign_call_site_values
302 PARAMS ((rtx
, struct sjlj_lp_info
*));
303 static void sjlj_mark_call_sites
304 PARAMS ((struct sjlj_lp_info
*));
305 static void sjlj_emit_function_enter
PARAMS ((rtx
));
306 static void sjlj_emit_function_exit
PARAMS ((void));
307 static void sjlj_emit_dispatch_table
308 PARAMS ((rtx
, struct sjlj_lp_info
*));
309 static void sjlj_build_landing_pads
PARAMS ((void));
311 static hashval_t ehl_hash
PARAMS ((const PTR
));
312 static int ehl_eq
PARAMS ((const PTR
,
314 static void add_ehl_entry
PARAMS ((rtx
,
315 struct eh_region
*));
316 static void remove_exception_handler_label
PARAMS ((rtx
));
317 static void remove_eh_handler
PARAMS ((struct eh_region
*));
318 static int for_each_eh_label_1
PARAMS ((PTR
*, PTR
));
320 struct reachable_info
;
322 /* The return value of reachable_next_level. */
325 /* The given exception is not processed by the given region. */
327 /* The given exception may need processing by the given region. */
329 /* The given exception is completely processed by the given region. */
331 /* The given exception is completely processed by the runtime. */
335 static int check_handled
PARAMS ((tree
, tree
));
336 static void add_reachable_handler
337 PARAMS ((struct reachable_info
*, struct eh_region
*,
338 struct eh_region
*));
339 static enum reachable_code reachable_next_level
340 PARAMS ((struct eh_region
*, tree
, struct reachable_info
*));
342 static int action_record_eq
PARAMS ((const PTR
,
344 static hashval_t action_record_hash
PARAMS ((const PTR
));
345 static int add_action_record
PARAMS ((htab_t
, int, int));
346 static int collect_one_action_chain
PARAMS ((htab_t
,
347 struct eh_region
*));
348 static int add_call_site
PARAMS ((rtx
, int));
350 static void push_uleb128
PARAMS ((varray_type
*,
352 static void push_sleb128
PARAMS ((varray_type
*, int));
353 #ifndef HAVE_AS_LEB128
354 static int dw2_size_of_call_site_table
PARAMS ((void));
355 static int sjlj_size_of_call_site_table
PARAMS ((void));
357 static void dw2_output_call_site_table
PARAMS ((void));
358 static void sjlj_output_call_site_table
PARAMS ((void));
361 /* Routine to see if exception handling is turned on.
362 DO_WARN is nonzero if we want to inform the user that exception
363 handling is turned off.
365 This is used to ensure that -fexceptions has been specified if the
366 compiler tries to use any exception-specific functions. */
372 if (! flag_exceptions
)
374 static int warned
= 0;
375 if (! warned
&& do_warn
)
377 error ("exception handling disabled, use -fexceptions to enable");
389 if (! flag_exceptions
)
392 type_to_runtime_map
= htab_create_ggc (31, t2r_hash
, t2r_eq
, NULL
);
394 /* Create the SjLj_Function_Context structure. This should match
395 the definition in unwind-sjlj.c. */
396 if (USING_SJLJ_EXCEPTIONS
)
398 tree f_jbuf
, f_per
, f_lsda
, f_prev
, f_cs
, f_data
, tmp
;
400 sjlj_fc_type_node
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
402 f_prev
= build_decl (FIELD_DECL
, get_identifier ("__prev"),
403 build_pointer_type (sjlj_fc_type_node
));
404 DECL_FIELD_CONTEXT (f_prev
) = sjlj_fc_type_node
;
406 f_cs
= build_decl (FIELD_DECL
, get_identifier ("__call_site"),
408 DECL_FIELD_CONTEXT (f_cs
) = sjlj_fc_type_node
;
410 tmp
= build_index_type (build_int_2 (4 - 1, 0));
411 tmp
= build_array_type ((*lang_hooks
.types
.type_for_mode
) (word_mode
, 1),
413 f_data
= build_decl (FIELD_DECL
, get_identifier ("__data"), tmp
);
414 DECL_FIELD_CONTEXT (f_data
) = sjlj_fc_type_node
;
416 f_per
= build_decl (FIELD_DECL
, get_identifier ("__personality"),
418 DECL_FIELD_CONTEXT (f_per
) = sjlj_fc_type_node
;
420 f_lsda
= build_decl (FIELD_DECL
, get_identifier ("__lsda"),
422 DECL_FIELD_CONTEXT (f_lsda
) = sjlj_fc_type_node
;
424 #ifdef DONT_USE_BUILTIN_SETJMP
426 tmp
= build_int_2 (JMP_BUF_SIZE
- 1, 0);
428 /* Should be large enough for most systems, if it is not,
429 JMP_BUF_SIZE should be defined with the proper value. It will
430 also tend to be larger than necessary for most systems, a more
431 optimal port will define JMP_BUF_SIZE. */
432 tmp
= build_int_2 (FIRST_PSEUDO_REGISTER
+ 2 - 1, 0);
435 /* This is 2 for builtin_setjmp, plus whatever the target requires
436 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
437 tmp
= build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
))
438 / GET_MODE_SIZE (Pmode
)) + 2 - 1, 0);
440 tmp
= build_index_type (tmp
);
441 tmp
= build_array_type (ptr_type_node
, tmp
);
442 f_jbuf
= build_decl (FIELD_DECL
, get_identifier ("__jbuf"), tmp
);
443 #ifdef DONT_USE_BUILTIN_SETJMP
444 /* We don't know what the alignment requirements of the
445 runtime's jmp_buf has. Overestimate. */
446 DECL_ALIGN (f_jbuf
) = BIGGEST_ALIGNMENT
;
447 DECL_USER_ALIGN (f_jbuf
) = 1;
449 DECL_FIELD_CONTEXT (f_jbuf
) = sjlj_fc_type_node
;
451 TYPE_FIELDS (sjlj_fc_type_node
) = f_prev
;
452 TREE_CHAIN (f_prev
) = f_cs
;
453 TREE_CHAIN (f_cs
) = f_data
;
454 TREE_CHAIN (f_data
) = f_per
;
455 TREE_CHAIN (f_per
) = f_lsda
;
456 TREE_CHAIN (f_lsda
) = f_jbuf
;
458 layout_type (sjlj_fc_type_node
);
460 /* Cache the interesting field offsets so that we have
461 easy access from rtl. */
462 sjlj_fc_call_site_ofs
463 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs
), 1)
464 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs
), 1) / BITS_PER_UNIT
);
466 = (tree_low_cst (DECL_FIELD_OFFSET (f_data
), 1)
467 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data
), 1) / BITS_PER_UNIT
);
468 sjlj_fc_personality_ofs
469 = (tree_low_cst (DECL_FIELD_OFFSET (f_per
), 1)
470 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per
), 1) / BITS_PER_UNIT
);
472 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda
), 1)
473 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda
), 1) / BITS_PER_UNIT
);
475 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf
), 1)
476 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf
), 1) / BITS_PER_UNIT
);
481 init_eh_for_function ()
483 cfun
->eh
= (struct eh_status
*)
484 ggc_alloc_cleared (sizeof (struct eh_status
));
487 /* Start an exception handling region. All instructions emitted
488 after this point are considered to be part of the region until
489 expand_eh_region_end is invoked. */
492 expand_eh_region_start ()
494 struct eh_region
*new_region
;
495 struct eh_region
*cur_region
;
501 /* Insert a new blank region as a leaf in the tree. */
502 new_region
= (struct eh_region
*) ggc_alloc_cleared (sizeof (*new_region
));
503 cur_region
= cfun
->eh
->cur_region
;
504 new_region
->outer
= cur_region
;
507 new_region
->next_peer
= cur_region
->inner
;
508 cur_region
->inner
= new_region
;
512 new_region
->next_peer
= cfun
->eh
->region_tree
;
513 cfun
->eh
->region_tree
= new_region
;
515 cfun
->eh
->cur_region
= new_region
;
517 /* Create a note marking the start of this region. */
518 new_region
->region_number
= ++cfun
->eh
->last_region_number
;
519 note
= emit_note (NULL
, NOTE_INSN_EH_REGION_BEG
);
520 NOTE_EH_HANDLER (note
) = new_region
->region_number
;
523 /* Common code to end a region. Returns the region just ended. */
525 static struct eh_region
*
526 expand_eh_region_end ()
528 struct eh_region
*cur_region
= cfun
->eh
->cur_region
;
531 /* Create a note marking the end of this region. */
532 note
= emit_note (NULL
, NOTE_INSN_EH_REGION_END
);
533 NOTE_EH_HANDLER (note
) = cur_region
->region_number
;
536 cfun
->eh
->cur_region
= cur_region
->outer
;
541 /* End an exception handling region for a cleanup. HANDLER is an
542 expression to expand for the cleanup. */
545 expand_eh_region_end_cleanup (handler
)
548 struct eh_region
*region
;
549 tree protect_cleanup_actions
;
556 region
= expand_eh_region_end ();
557 region
->type
= ERT_CLEANUP
;
558 region
->label
= gen_label_rtx ();
559 region
->u
.cleanup
.exp
= handler
;
560 region
->u
.cleanup
.prev_try
= cfun
->eh
->try_region
;
562 around_label
= gen_label_rtx ();
563 emit_jump (around_label
);
565 emit_label (region
->label
);
567 if (flag_non_call_exceptions
568 || flag_forced_unwind_exceptions
569 || region
->may_contain_throw
)
571 /* Give the language a chance to specify an action to be taken if an
572 exception is thrown that would propagate out of the HANDLER. */
573 protect_cleanup_actions
574 = (lang_protect_cleanup_actions
575 ? (*lang_protect_cleanup_actions
) ()
578 if (protect_cleanup_actions
)
579 expand_eh_region_start ();
581 /* In case this cleanup involves an inline destructor with a try block in
582 it, we need to save the EH return data registers around it. */
583 data_save
[0] = gen_reg_rtx (ptr_mode
);
584 emit_move_insn (data_save
[0], get_exception_pointer (cfun
));
585 data_save
[1] = gen_reg_rtx (word_mode
);
586 emit_move_insn (data_save
[1], get_exception_filter (cfun
));
588 expand_expr (handler
, const0_rtx
, VOIDmode
, 0);
590 emit_move_insn (cfun
->eh
->exc_ptr
, data_save
[0]);
591 emit_move_insn (cfun
->eh
->filter
, data_save
[1]);
593 if (protect_cleanup_actions
)
594 expand_eh_region_end_must_not_throw (protect_cleanup_actions
);
596 /* We need any stack adjustment complete before the around_label. */
597 do_pending_stack_adjust ();
600 /* We delay the generation of the _Unwind_Resume until we generate
601 landing pads. We emit a marker here so as to get good control
602 flow data in the meantime. */
604 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
607 emit_label (around_label
);
610 /* End an exception handling region for a try block, and prepares
611 for subsequent calls to expand_start_catch. */
614 expand_start_all_catch ()
616 struct eh_region
*region
;
621 region
= expand_eh_region_end ();
622 region
->type
= ERT_TRY
;
623 region
->u
.try.prev_try
= cfun
->eh
->try_region
;
624 region
->u
.try.continue_label
= gen_label_rtx ();
626 cfun
->eh
->try_region
= region
;
628 emit_jump (region
->u
.try.continue_label
);
631 /* Begin a catch clause. TYPE is the type caught, a list of such types, or
632 null if this is a catch-all clause. Providing a type list enables to
633 associate the catch region with potentially several exception types, which
634 is useful e.g. for Ada. */
637 expand_start_catch (type_or_list
)
640 struct eh_region
*t
, *c
, *l
;
646 type_list
= type_or_list
;
650 /* Ensure to always end up with a type list to normalize further
651 processing, then register each type against the runtime types
655 if (TREE_CODE (type_or_list
) != TREE_LIST
)
656 type_list
= tree_cons (NULL_TREE
, type_or_list
, NULL_TREE
);
658 type_node
= type_list
;
659 for (; type_node
; type_node
= TREE_CHAIN (type_node
))
660 add_type_for_runtime (TREE_VALUE (type_node
));
663 expand_eh_region_start ();
665 t
= cfun
->eh
->try_region
;
666 c
= cfun
->eh
->cur_region
;
668 c
->u
.catch.type_list
= type_list
;
669 c
->label
= gen_label_rtx ();
671 l
= t
->u
.try.last_catch
;
672 c
->u
.catch.prev_catch
= l
;
674 l
->u
.catch.next_catch
= c
;
677 t
->u
.try.last_catch
= c
;
679 emit_label (c
->label
);
682 /* End a catch clause. Control will resume after the try/catch block. */
687 struct eh_region
*try_region
;
692 expand_eh_region_end ();
693 try_region
= cfun
->eh
->try_region
;
695 emit_jump (try_region
->u
.try.continue_label
);
698 /* End a sequence of catch handlers for a try block. */
701 expand_end_all_catch ()
703 struct eh_region
*try_region
;
708 try_region
= cfun
->eh
->try_region
;
709 cfun
->eh
->try_region
= try_region
->u
.try.prev_try
;
711 emit_label (try_region
->u
.try.continue_label
);
714 /* End an exception region for an exception type filter. ALLOWED is a
715 TREE_LIST of types to be matched by the runtime. FAILURE is an
716 expression to invoke if a mismatch occurs.
718 ??? We could use these semantics for calls to rethrow, too; if we can
719 see the surrounding catch clause, we know that the exception we're
720 rethrowing satisfies the "filter" of the catch type. */
723 expand_eh_region_end_allowed (allowed
, failure
)
724 tree allowed
, failure
;
726 struct eh_region
*region
;
732 region
= expand_eh_region_end ();
733 region
->type
= ERT_ALLOWED_EXCEPTIONS
;
734 region
->u
.allowed
.type_list
= allowed
;
735 region
->label
= gen_label_rtx ();
737 for (; allowed
; allowed
= TREE_CHAIN (allowed
))
738 add_type_for_runtime (TREE_VALUE (allowed
));
740 /* We must emit the call to FAILURE here, so that if this function
741 throws a different exception, that it will be processed by the
744 around_label
= gen_label_rtx ();
745 emit_jump (around_label
);
747 emit_label (region
->label
);
748 expand_expr (failure
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
749 /* We must adjust the stack before we reach the AROUND_LABEL because
750 the call to FAILURE does not occur on all paths to the
752 do_pending_stack_adjust ();
754 emit_label (around_label
);
757 /* End an exception region for a must-not-throw filter. FAILURE is an
758 expression invoke if an uncaught exception propagates this far.
760 This is conceptually identical to expand_eh_region_end_allowed with
761 an empty allowed list (if you passed "std::terminate" instead of
762 "__cxa_call_unexpected"), but they are represented differently in
766 expand_eh_region_end_must_not_throw (failure
)
769 struct eh_region
*region
;
775 region
= expand_eh_region_end ();
776 region
->type
= ERT_MUST_NOT_THROW
;
777 region
->label
= gen_label_rtx ();
779 /* We must emit the call to FAILURE here, so that if this function
780 throws a different exception, that it will be processed by the
783 around_label
= gen_label_rtx ();
784 emit_jump (around_label
);
786 emit_label (region
->label
);
787 expand_expr (failure
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
789 emit_label (around_label
);
792 /* End an exception region for a throw. No handling goes on here,
793 but it's the easiest way for the front-end to indicate what type
797 expand_eh_region_end_throw (type
)
800 struct eh_region
*region
;
805 region
= expand_eh_region_end ();
806 region
->type
= ERT_THROW
;
807 region
->u
.throw.type
= type
;
810 /* End a fixup region. Within this region the cleanups for the immediately
811 enclosing region are _not_ run. This is used for goto cleanup to avoid
812 destroying an object twice.
814 This would be an extraordinarily simple prospect, were it not for the
815 fact that we don't actually know what the immediately enclosing region
816 is. This surprising fact is because expand_cleanups is currently
817 generating a sequence that it will insert somewhere else. We collect
818 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
821 expand_eh_region_end_fixup (handler
)
824 struct eh_region
*fixup
;
829 fixup
= expand_eh_region_end ();
830 fixup
->type
= ERT_FIXUP
;
831 fixup
->u
.fixup
.cleanup_exp
= handler
;
834 /* Note that the current EH region (if any) may contain a throw, or a
835 call to a function which itself may contain a throw. */
838 note_eh_region_may_contain_throw ()
840 struct eh_region
*region
;
842 region
= cfun
->eh
->cur_region
;
843 while (region
&& !region
->may_contain_throw
)
845 region
->may_contain_throw
= 1;
846 region
= region
->outer
;
850 /* Return an rtl expression for a pointer to the exception object
854 get_exception_pointer (fun
)
855 struct function
*fun
;
857 rtx exc_ptr
= fun
->eh
->exc_ptr
;
858 if (fun
== cfun
&& ! exc_ptr
)
860 exc_ptr
= gen_reg_rtx (ptr_mode
);
861 fun
->eh
->exc_ptr
= exc_ptr
;
866 /* Return an rtl expression for the exception dispatch filter
870 get_exception_filter (fun
)
871 struct function
*fun
;
873 rtx filter
= fun
->eh
->filter
;
874 if (fun
== cfun
&& ! filter
)
876 filter
= gen_reg_rtx (word_mode
);
877 fun
->eh
->filter
= filter
;
882 /* This section is for the exception handling specific optimization pass. */
884 /* Random access the exception region tree. It's just as simple to
885 collect the regions this way as in expand_eh_region_start, but
886 without having to realloc memory. */
889 collect_eh_region_array ()
891 struct eh_region
**array
, *i
;
893 i
= cfun
->eh
->region_tree
;
897 array
= ggc_alloc_cleared ((cfun
->eh
->last_region_number
+ 1)
899 cfun
->eh
->region_array
= array
;
903 array
[i
->region_number
] = i
;
905 /* If there are sub-regions, process them. */
908 /* If there are peers, process them. */
909 else if (i
->next_peer
)
911 /* Otherwise, step back up the tree to the next peer. */
918 } while (i
->next_peer
== NULL
);
925 resolve_fixup_regions ()
927 int i
, j
, n
= cfun
->eh
->last_region_number
;
929 for (i
= 1; i
<= n
; ++i
)
931 struct eh_region
*fixup
= cfun
->eh
->region_array
[i
];
932 struct eh_region
*cleanup
= 0;
934 if (! fixup
|| fixup
->type
!= ERT_FIXUP
)
937 for (j
= 1; j
<= n
; ++j
)
939 cleanup
= cfun
->eh
->region_array
[j
];
940 if (cleanup
&& cleanup
->type
== ERT_CLEANUP
941 && cleanup
->u
.cleanup
.exp
== fixup
->u
.fixup
.cleanup_exp
)
947 fixup
->u
.fixup
.real_region
= cleanup
->outer
;
951 /* Now that we've discovered what region actually encloses a fixup,
952 we can shuffle pointers and remove them from the tree. */
955 remove_fixup_regions ()
959 struct eh_region
*fixup
;
961 /* Walk the insn chain and adjust the REG_EH_REGION numbers
962 for instructions referencing fixup regions. This is only
963 strictly necessary for fixup regions with no parent, but
964 doesn't hurt to do it for all regions. */
965 for (insn
= get_insns(); insn
; insn
= NEXT_INSN (insn
))
967 && (note
= find_reg_note (insn
, REG_EH_REGION
, NULL
))
968 && INTVAL (XEXP (note
, 0)) > 0
969 && (fixup
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))])
970 && fixup
->type
== ERT_FIXUP
)
972 if (fixup
->u
.fixup
.real_region
)
973 XEXP (note
, 0) = GEN_INT (fixup
->u
.fixup
.real_region
->region_number
);
975 remove_note (insn
, note
);
978 /* Remove the fixup regions from the tree. */
979 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
981 fixup
= cfun
->eh
->region_array
[i
];
985 /* Allow GC to maybe free some memory. */
986 if (fixup
->type
== ERT_CLEANUP
)
987 fixup
->u
.cleanup
.exp
= NULL_TREE
;
989 if (fixup
->type
!= ERT_FIXUP
)
994 struct eh_region
*parent
, *p
, **pp
;
996 parent
= fixup
->u
.fixup
.real_region
;
998 /* Fix up the children's parent pointers; find the end of
1000 for (p
= fixup
->inner
; ; p
= p
->next_peer
)
1007 /* In the tree of cleanups, only outer-inner ordering matters.
1008 So link the children back in anywhere at the correct level. */
1010 pp
= &parent
->inner
;
1012 pp
= &cfun
->eh
->region_tree
;
1015 fixup
->inner
= NULL
;
1018 remove_eh_handler (fixup
);
1022 /* Remove all regions whose labels are not reachable from insns. */
1025 remove_unreachable_regions (insns
)
1028 int i
, *uid_region_num
;
1030 struct eh_region
*r
;
1033 uid_region_num
= xcalloc (get_max_uid (), sizeof(int));
1034 reachable
= xcalloc (cfun
->eh
->last_region_number
+ 1, sizeof(bool));
1036 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1038 r
= cfun
->eh
->region_array
[i
];
1039 if (!r
|| r
->region_number
!= i
)
1044 if (uid_region_num
[INSN_UID (r
->resume
)])
1046 uid_region_num
[INSN_UID (r
->resume
)] = i
;
1050 if (uid_region_num
[INSN_UID (r
->label
)])
1052 uid_region_num
[INSN_UID (r
->label
)] = i
;
1054 if (r
->type
== ERT_TRY
&& r
->u
.try.continue_label
)
1056 if (uid_region_num
[INSN_UID (r
->u
.try.continue_label
)])
1058 uid_region_num
[INSN_UID (r
->u
.try.continue_label
)] = i
;
1062 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1063 reachable
[uid_region_num
[INSN_UID (insn
)]] = true;
1065 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1067 r
= cfun
->eh
->region_array
[i
];
1068 if (r
&& r
->region_number
== i
&& !reachable
[i
])
1070 /* Don't remove ERT_THROW regions if their outer region
1072 if (r
->type
== ERT_THROW
1074 && reachable
[r
->outer
->region_number
])
1077 remove_eh_handler (r
);
1082 free (uid_region_num
);
1085 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1086 can_throw instruction in the region. */
1089 convert_from_eh_region_ranges_1 (pinsns
, orig_sp
, cur
)
1097 for (insn
= *pinsns
; insn
; insn
= next
)
1099 next
= NEXT_INSN (insn
);
1100 if (GET_CODE (insn
) == NOTE
)
1102 int kind
= NOTE_LINE_NUMBER (insn
);
1103 if (kind
== NOTE_INSN_EH_REGION_BEG
1104 || kind
== NOTE_INSN_EH_REGION_END
)
1106 if (kind
== NOTE_INSN_EH_REGION_BEG
)
1108 struct eh_region
*r
;
1111 cur
= NOTE_EH_HANDLER (insn
);
1113 r
= cfun
->eh
->region_array
[cur
];
1114 if (r
->type
== ERT_FIXUP
)
1116 r
= r
->u
.fixup
.real_region
;
1117 cur
= r
? r
->region_number
: 0;
1119 else if (r
->type
== ERT_CATCH
)
1122 cur
= r
? r
->region_number
: 0;
1128 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1129 requires extra care to adjust sequence start. */
1130 if (insn
== *pinsns
)
1136 else if (INSN_P (insn
))
1142 /* An existing region note may be present to suppress
1143 exception handling. Anything with a note value of -1
1144 cannot throw an exception of any kind. A note value
1145 of 0 means that "normal" exceptions are suppressed,
1146 but not necessarily "forced unwind" exceptions. */
1147 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1150 if (flag_forced_unwind_exceptions
1151 && INTVAL (XEXP (note
, 0)) >= 0)
1152 XEXP (note
, 0) = GEN_INT (cur
);
1156 /* Calls can always potentially throw exceptions; if we wanted
1157 exceptions for non-call insns, then any may_trap_p
1158 instruction can throw. */
1159 if (GET_CODE (insn
) != CALL_INSN
1160 && (!flag_non_call_exceptions
1161 || GET_CODE (PATTERN (insn
)) == CLOBBER
1162 || GET_CODE (PATTERN (insn
)) == USE
1163 || !may_trap_p (PATTERN (insn
))))
1166 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_EH_REGION
,
1174 if (GET_CODE (insn
) == CALL_INSN
1175 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
1177 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn
), 0),
1179 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn
), 1),
1181 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn
), 2),
1192 convert_from_eh_region_ranges ()
1197 collect_eh_region_array ();
1198 resolve_fixup_regions ();
1200 stack
= xmalloc (sizeof (int) * (cfun
->eh
->last_region_number
+ 1));
1201 insns
= get_insns ();
1202 convert_from_eh_region_ranges_1 (&insns
, stack
, 0);
1205 remove_fixup_regions ();
1206 remove_unreachable_regions (insns
);
1210 add_ehl_entry (label
, region
)
1212 struct eh_region
*region
;
1214 struct ehl_map_entry
**slot
, *entry
;
1216 LABEL_PRESERVE_P (label
) = 1;
1218 entry
= (struct ehl_map_entry
*) ggc_alloc (sizeof (*entry
));
1219 entry
->label
= label
;
1220 entry
->region
= region
;
1222 slot
= (struct ehl_map_entry
**)
1223 htab_find_slot (cfun
->eh
->exception_handler_label_map
, entry
, INSERT
);
1225 /* Before landing pad creation, each exception handler has its own
1226 label. After landing pad creation, the exception handlers may
1227 share landing pads. This is ok, since maybe_remove_eh_handler
1228 only requires the 1-1 mapping before landing pad creation. */
1229 if (*slot
&& !cfun
->eh
->built_landing_pads
)
1236 find_exception_handler_labels ()
1240 if (cfun
->eh
->exception_handler_label_map
)
1241 htab_empty (cfun
->eh
->exception_handler_label_map
);
1244 /* ??? The expansion factor here (3/2) must be greater than the htab
1245 occupancy factor (4/3) to avoid unnecessary resizing. */
1246 cfun
->eh
->exception_handler_label_map
1247 = htab_create_ggc (cfun
->eh
->last_region_number
* 3 / 2,
1248 ehl_hash
, ehl_eq
, NULL
);
1251 if (cfun
->eh
->region_tree
== NULL
)
1254 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1256 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1259 if (! region
|| region
->region_number
!= i
)
1261 if (cfun
->eh
->built_landing_pads
)
1262 lab
= region
->landing_pad
;
1264 lab
= region
->label
;
1267 add_ehl_entry (lab
, region
);
1270 /* For sjlj exceptions, need the return label to remain live until
1271 after landing pad generation. */
1272 if (USING_SJLJ_EXCEPTIONS
&& ! cfun
->eh
->built_landing_pads
)
1273 add_ehl_entry (return_label
, NULL
);
1277 current_function_has_exception_handlers ()
1281 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1283 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1285 if (! region
|| region
->region_number
!= i
)
1287 if (region
->type
!= ERT_THROW
)
1294 static struct eh_region
*
1295 duplicate_eh_region_1 (o
, map
)
1296 struct eh_region
*o
;
1297 struct inline_remap
*map
;
1300 = (struct eh_region
*) ggc_alloc_cleared (sizeof (struct eh_region
));
1302 n
->region_number
= o
->region_number
+ cfun
->eh
->last_region_number
;
1308 case ERT_MUST_NOT_THROW
:
1312 if (o
->u
.try.continue_label
)
1313 n
->u
.try.continue_label
1314 = get_label_from_map (map
,
1315 CODE_LABEL_NUMBER (o
->u
.try.continue_label
));
1319 n
->u
.catch.type_list
= o
->u
.catch.type_list
;
1322 case ERT_ALLOWED_EXCEPTIONS
:
1323 n
->u
.allowed
.type_list
= o
->u
.allowed
.type_list
;
1327 n
->u
.throw.type
= o
->u
.throw.type
;
1334 n
->label
= get_label_from_map (map
, CODE_LABEL_NUMBER (o
->label
));
1337 n
->resume
= map
->insn_map
[INSN_UID (o
->resume
)];
1338 if (n
->resume
== NULL
)
1346 duplicate_eh_region_2 (o
, n_array
)
1347 struct eh_region
*o
;
1348 struct eh_region
**n_array
;
1350 struct eh_region
*n
= n_array
[o
->region_number
];
1355 n
->u
.try.catch = n_array
[o
->u
.try.catch->region_number
];
1356 n
->u
.try.last_catch
= n_array
[o
->u
.try.last_catch
->region_number
];
1360 if (o
->u
.catch.next_catch
)
1361 n
->u
.catch.next_catch
= n_array
[o
->u
.catch.next_catch
->region_number
];
1362 if (o
->u
.catch.prev_catch
)
1363 n
->u
.catch.prev_catch
= n_array
[o
->u
.catch.prev_catch
->region_number
];
1371 n
->outer
= n_array
[o
->outer
->region_number
];
1373 n
->inner
= n_array
[o
->inner
->region_number
];
1375 n
->next_peer
= n_array
[o
->next_peer
->region_number
];
1379 duplicate_eh_regions (ifun
, map
)
1380 struct function
*ifun
;
1381 struct inline_remap
*map
;
1383 int ifun_last_region_number
= ifun
->eh
->last_region_number
;
1384 struct eh_region
**n_array
, *root
, *cur
;
1387 if (ifun_last_region_number
== 0)
1390 n_array
= xcalloc (ifun_last_region_number
+ 1, sizeof (*n_array
));
1392 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
1394 cur
= ifun
->eh
->region_array
[i
];
1395 if (!cur
|| cur
->region_number
!= i
)
1397 n_array
[i
] = duplicate_eh_region_1 (cur
, map
);
1399 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
1401 cur
= ifun
->eh
->region_array
[i
];
1402 if (!cur
|| cur
->region_number
!= i
)
1404 duplicate_eh_region_2 (cur
, n_array
);
1407 root
= n_array
[ifun
->eh
->region_tree
->region_number
];
1408 cur
= cfun
->eh
->cur_region
;
1411 struct eh_region
*p
= cur
->inner
;
1414 while (p
->next_peer
)
1416 p
->next_peer
= root
;
1421 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
1422 if (n_array
[i
] && n_array
[i
]->outer
== NULL
)
1423 n_array
[i
]->outer
= cur
;
1427 struct eh_region
*p
= cfun
->eh
->region_tree
;
1430 while (p
->next_peer
)
1432 p
->next_peer
= root
;
1435 cfun
->eh
->region_tree
= root
;
1440 i
= cfun
->eh
->last_region_number
;
1441 cfun
->eh
->last_region_number
= i
+ ifun_last_region_number
;
1447 t2r_eq (pentry
, pdata
)
1451 tree entry
= (tree
) pentry
;
1452 tree data
= (tree
) pdata
;
1454 return TREE_PURPOSE (entry
) == data
;
1461 tree entry
= (tree
) pentry
;
1462 return TYPE_HASH (TREE_PURPOSE (entry
));
1466 add_type_for_runtime (type
)
1471 slot
= (tree
*) htab_find_slot_with_hash (type_to_runtime_map
, type
,
1472 TYPE_HASH (type
), INSERT
);
1475 tree runtime
= (*lang_eh_runtime_type
) (type
);
1476 *slot
= tree_cons (type
, runtime
, NULL_TREE
);
1481 lookup_type_for_runtime (type
)
1486 slot
= (tree
*) htab_find_slot_with_hash (type_to_runtime_map
, type
,
1487 TYPE_HASH (type
), NO_INSERT
);
1489 /* We should have always inserted the data earlier. */
1490 return TREE_VALUE (*slot
);
1494 /* Represent an entry in @TTypes for either catch actions
1495 or exception filter actions. */
1496 struct ttypes_filter
GTY(())
1502 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1503 (a tree) for a @TTypes type node we are thinking about adding. */
1506 ttypes_filter_eq (pentry
, pdata
)
1510 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1511 tree data
= (tree
) pdata
;
1513 return entry
->t
== data
;
1517 ttypes_filter_hash (pentry
)
1520 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1521 return TYPE_HASH (entry
->t
);
1524 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1525 exception specification list we are thinking about adding. */
1526 /* ??? Currently we use the type lists in the order given. Someone
1527 should put these in some canonical order. */
1530 ehspec_filter_eq (pentry
, pdata
)
1534 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1535 const struct ttypes_filter
*data
= (const struct ttypes_filter
*) pdata
;
1537 return type_list_equal (entry
->t
, data
->t
);
1540 /* Hash function for exception specification lists. */
1543 ehspec_filter_hash (pentry
)
1546 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1550 for (list
= entry
->t
; list
; list
= TREE_CHAIN (list
))
1551 h
= (h
<< 5) + (h
>> 27) + TYPE_HASH (TREE_VALUE (list
));
1555 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1556 up the search. Return the filter value to be used. */
1559 add_ttypes_entry (ttypes_hash
, type
)
1563 struct ttypes_filter
**slot
, *n
;
1565 slot
= (struct ttypes_filter
**)
1566 htab_find_slot_with_hash (ttypes_hash
, type
, TYPE_HASH (type
), INSERT
);
1568 if ((n
= *slot
) == NULL
)
1570 /* Filter value is a 1 based table index. */
1572 n
= (struct ttypes_filter
*) xmalloc (sizeof (*n
));
1574 n
->filter
= VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
) + 1;
1577 VARRAY_PUSH_TREE (cfun
->eh
->ttype_data
, type
);
1583 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1584 to speed up the search. Return the filter value to be used. */
1587 add_ehspec_entry (ehspec_hash
, ttypes_hash
, list
)
1592 struct ttypes_filter
**slot
, *n
;
1593 struct ttypes_filter dummy
;
1596 slot
= (struct ttypes_filter
**)
1597 htab_find_slot (ehspec_hash
, &dummy
, INSERT
);
1599 if ((n
= *slot
) == NULL
)
1601 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1603 n
= (struct ttypes_filter
*) xmalloc (sizeof (*n
));
1605 n
->filter
= -(VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
) + 1);
1608 /* Look up each type in the list and encode its filter
1609 value as a uleb128. Terminate the list with 0. */
1610 for (; list
; list
= TREE_CHAIN (list
))
1611 push_uleb128 (&cfun
->eh
->ehspec_data
,
1612 add_ttypes_entry (ttypes_hash
, TREE_VALUE (list
)));
1613 VARRAY_PUSH_UCHAR (cfun
->eh
->ehspec_data
, 0);
1619 /* Generate the action filter values to be used for CATCH and
1620 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1621 we use lots of landing pads, and so every type or list can share
1622 the same filter value, which saves table space. */
1625 assign_filter_values ()
1628 htab_t ttypes
, ehspec
;
1630 VARRAY_TREE_INIT (cfun
->eh
->ttype_data
, 16, "ttype_data");
1631 VARRAY_UCHAR_INIT (cfun
->eh
->ehspec_data
, 64, "ehspec_data");
1633 ttypes
= htab_create (31, ttypes_filter_hash
, ttypes_filter_eq
, free
);
1634 ehspec
= htab_create (31, ehspec_filter_hash
, ehspec_filter_eq
, free
);
1636 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1638 struct eh_region
*r
= cfun
->eh
->region_array
[i
];
1640 /* Mind we don't process a region more than once. */
1641 if (!r
|| r
->region_number
!= i
)
1647 /* Whatever type_list is (NULL or true list), we build a list
1648 of filters for the region. */
1649 r
->u
.catch.filter_list
= NULL_TREE
;
1651 if (r
->u
.catch.type_list
!= NULL
)
1653 /* Get a filter value for each of the types caught and store
1654 them in the region's dedicated list. */
1655 tree tp_node
= r
->u
.catch.type_list
;
1657 for (;tp_node
; tp_node
= TREE_CHAIN (tp_node
))
1659 int flt
= add_ttypes_entry (ttypes
, TREE_VALUE (tp_node
));
1660 tree flt_node
= build_int_2 (flt
, 0);
1662 r
->u
.catch.filter_list
1663 = tree_cons (NULL_TREE
, flt_node
, r
->u
.catch.filter_list
);
1668 /* Get a filter value for the NULL list also since it will need
1669 an action record anyway. */
1670 int flt
= add_ttypes_entry (ttypes
, NULL
);
1671 tree flt_node
= build_int_2 (flt
, 0);
1673 r
->u
.catch.filter_list
1674 = tree_cons (NULL_TREE
, flt_node
, r
->u
.catch.filter_list
);
1679 case ERT_ALLOWED_EXCEPTIONS
:
1681 = add_ehspec_entry (ehspec
, ttypes
, r
->u
.allowed
.type_list
);
1689 htab_delete (ttypes
);
1690 htab_delete (ehspec
);
1694 build_post_landing_pads ()
1698 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1700 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1703 /* Mind we don't process a region more than once. */
1704 if (!region
|| region
->region_number
!= i
)
1707 switch (region
->type
)
1710 /* ??? Collect the set of all non-overlapping catch handlers
1711 all the way up the chain until blocked by a cleanup. */
1712 /* ??? Outer try regions can share landing pads with inner
1713 try regions if the types are completely non-overlapping,
1714 and there are no intervening cleanups. */
1716 region
->post_landing_pad
= gen_label_rtx ();
1720 emit_label (region
->post_landing_pad
);
1722 /* ??? It is mighty inconvenient to call back into the
1723 switch statement generation code in expand_end_case.
1724 Rapid prototyping sez a sequence of ifs. */
1726 struct eh_region
*c
;
1727 for (c
= region
->u
.try.catch; c
; c
= c
->u
.catch.next_catch
)
1729 if (c
->u
.catch.type_list
== NULL
)
1731 if (flag_forced_unwind_exceptions
)
1732 emit_cmp_and_jump_insns
1733 (cfun
->eh
->filter
, const0_rtx
, GT
, NULL_RTX
,
1734 word_mode
, 0, c
->label
);
1736 emit_jump (c
->label
);
1740 /* Need for one cmp/jump per type caught. Each type
1741 list entry has a matching entry in the filter list
1742 (see assign_filter_values). */
1743 tree tp_node
= c
->u
.catch.type_list
;
1744 tree flt_node
= c
->u
.catch.filter_list
;
1748 emit_cmp_and_jump_insns
1750 GEN_INT (tree_low_cst (TREE_VALUE (flt_node
), 0)),
1751 EQ
, NULL_RTX
, word_mode
, 0, c
->label
);
1753 tp_node
= TREE_CHAIN (tp_node
);
1754 flt_node
= TREE_CHAIN (flt_node
);
1760 /* We delay the generation of the _Unwind_Resume until we generate
1761 landing pads. We emit a marker here so as to get good control
1762 flow data in the meantime. */
1764 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
1770 emit_insn_before (seq
, region
->u
.try.catch->label
);
1773 case ERT_ALLOWED_EXCEPTIONS
:
1774 region
->post_landing_pad
= gen_label_rtx ();
1778 emit_label (region
->post_landing_pad
);
1780 emit_cmp_and_jump_insns (cfun
->eh
->filter
,
1781 GEN_INT (region
->u
.allowed
.filter
),
1782 EQ
, NULL_RTX
, word_mode
, 0, region
->label
);
1784 /* We delay the generation of the _Unwind_Resume until we generate
1785 landing pads. We emit a marker here so as to get good control
1786 flow data in the meantime. */
1788 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
1794 emit_insn_before (seq
, region
->label
);
1798 region
->post_landing_pad
= region
->label
;
1801 case ERT_MUST_NOT_THROW
:
1802 /* See maybe_remove_eh_handler about removing region->label. */
1803 if (flag_forced_unwind_exceptions
&& region
->label
)
1805 region
->post_landing_pad
= gen_label_rtx ();
1809 emit_label (region
->post_landing_pad
);
1810 emit_cmp_and_jump_insns (cfun
->eh
->filter
, const0_rtx
, GT
,
1811 NULL_RTX
, word_mode
, 0, region
->label
);
1814 = emit_jump_insn (gen_rtx_RESX (VOIDmode
,
1815 region
->region_number
));
1821 emit_insn_before (seq
, region
->label
);
1824 region
->post_landing_pad
= region
->label
;
1829 /* Nothing to do. */
1838 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1839 _Unwind_Resume otherwise. */
1842 connect_post_landing_pads ()
1846 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1848 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1849 struct eh_region
*outer
;
1852 /* Mind we don't process a region more than once. */
1853 if (!region
|| region
->region_number
!= i
)
1856 /* If there is no RESX, or it has been deleted by flow, there's
1857 nothing to fix up. */
1858 if (! region
->resume
|| INSN_DELETED_P (region
->resume
))
1861 /* Search for another landing pad in this function. */
1862 for (outer
= region
->outer
; outer
; outer
= outer
->outer
)
1863 if (outer
->post_landing_pad
)
1869 emit_jump (outer
->post_landing_pad
);
1871 emit_library_call (unwind_resume_libfunc
, LCT_THROW
,
1872 VOIDmode
, 1, cfun
->eh
->exc_ptr
, ptr_mode
);
1876 emit_insn_before (seq
, region
->resume
);
1877 delete_insn (region
->resume
);
1883 dw2_build_landing_pads ()
1888 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1890 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1892 bool clobbers_hard_regs
= false;
1894 /* Mind we don't process a region more than once. */
1895 if (!region
|| region
->region_number
!= i
)
1898 if (region
->type
!= ERT_CLEANUP
1899 && region
->type
!= ERT_TRY
1900 && region
->type
!= ERT_ALLOWED_EXCEPTIONS
)
1905 region
->landing_pad
= gen_label_rtx ();
1906 emit_label (region
->landing_pad
);
1908 #ifdef HAVE_exception_receiver
1909 if (HAVE_exception_receiver
)
1910 emit_insn (gen_exception_receiver ());
1913 #ifdef HAVE_nonlocal_goto_receiver
1914 if (HAVE_nonlocal_goto_receiver
)
1915 emit_insn (gen_nonlocal_goto_receiver ());
1920 /* If the eh_return data registers are call-saved, then we
1921 won't have considered them clobbered from the call that
1922 threw. Kill them now. */
1925 unsigned r
= EH_RETURN_DATA_REGNO (j
);
1926 if (r
== INVALID_REGNUM
)
1928 if (! call_used_regs
[r
])
1930 emit_insn (gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, r
)));
1931 clobbers_hard_regs
= true;
1935 if (clobbers_hard_regs
)
1937 /* @@@ This is a kludge. Not all machine descriptions define a
1938 blockage insn, but we must not allow the code we just generated
1939 to be reordered by scheduling. So emit an ASM_INPUT to act as
1941 emit_insn (gen_rtx_ASM_INPUT (VOIDmode
, ""));
1944 emit_move_insn (cfun
->eh
->exc_ptr
,
1945 gen_rtx_REG (ptr_mode
, EH_RETURN_DATA_REGNO (0)));
1946 emit_move_insn (cfun
->eh
->filter
,
1947 gen_rtx_REG (word_mode
, EH_RETURN_DATA_REGNO (1)));
1952 emit_insn_before (seq
, region
->post_landing_pad
);
1959 int directly_reachable
;
1962 int call_site_index
;
1966 sjlj_find_directly_reachable_regions (lp_info
)
1967 struct sjlj_lp_info
*lp_info
;
1970 bool found_one
= false;
1972 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1974 struct eh_region
*region
;
1975 enum reachable_code rc
;
1979 if (! INSN_P (insn
))
1982 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1983 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
1986 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
1988 type_thrown
= NULL_TREE
;
1989 if (region
->type
== ERT_THROW
)
1991 type_thrown
= region
->u
.throw.type
;
1992 region
= region
->outer
;
1995 /* Find the first containing region that might handle the exception.
1996 That's the landing pad to which we will transfer control. */
1997 rc
= RNL_NOT_CAUGHT
;
1998 for (; region
; region
= region
->outer
)
2000 rc
= reachable_next_level (region
, type_thrown
, 0);
2001 if (rc
!= RNL_NOT_CAUGHT
)
2005 /* Forced unwind exceptions aren't blocked. */
2006 if (flag_forced_unwind_exceptions
&& rc
== RNL_BLOCKED
)
2008 struct eh_region
*r
;
2009 for (r
= region
->outer
; r
; r
= r
->outer
)
2010 if (r
->type
== ERT_CLEANUP
)
2012 rc
= RNL_MAYBE_CAUGHT
;
2013 if (! region
->label
)
2019 if (rc
== RNL_MAYBE_CAUGHT
|| rc
== RNL_CAUGHT
)
2021 lp_info
[region
->region_number
].directly_reachable
= 1;
2030 sjlj_assign_call_site_values (dispatch_label
, lp_info
)
2032 struct sjlj_lp_info
*lp_info
;
2037 /* First task: build the action table. */
2039 VARRAY_UCHAR_INIT (cfun
->eh
->action_record_data
, 64, "action_record_data");
2040 ar_hash
= htab_create (31, action_record_hash
, action_record_eq
, free
);
2042 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2043 if (lp_info
[i
].directly_reachable
)
2045 struct eh_region
*r
= cfun
->eh
->region_array
[i
];
2046 r
->landing_pad
= dispatch_label
;
2047 lp_info
[i
].action_index
= collect_one_action_chain (ar_hash
, r
);
2048 if (lp_info
[i
].action_index
!= -1)
2049 cfun
->uses_eh_lsda
= 1;
2052 htab_delete (ar_hash
);
2054 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2055 landing pad label for the region. For sjlj though, there is one
2056 common landing pad from which we dispatch to the post-landing pads.
2058 A region receives a dispatch index if it is directly reachable
2059 and requires in-function processing. Regions that share post-landing
2060 pads may share dispatch indices. */
2061 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2062 (see build_post_landing_pads) so we don't bother checking for it. */
2065 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2066 if (lp_info
[i
].directly_reachable
)
2067 lp_info
[i
].dispatch_index
= index
++;
2069 /* Finally: assign call-site values. If dwarf2 terms, this would be
2070 the region number assigned by convert_to_eh_region_ranges, but
2071 handles no-action and must-not-throw differently. */
2074 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2075 if (lp_info
[i
].directly_reachable
)
2077 int action
= lp_info
[i
].action_index
;
2079 /* Map must-not-throw to otherwise unused call-site index 0. */
2082 /* Map no-action to otherwise unused call-site index -1. */
2083 else if (action
== -1)
2085 /* Otherwise, look it up in the table. */
2087 index
= add_call_site (GEN_INT (lp_info
[i
].dispatch_index
), action
);
2089 lp_info
[i
].call_site_index
= index
;
2094 sjlj_mark_call_sites (lp_info
)
2095 struct sjlj_lp_info
*lp_info
;
2097 int last_call_site
= -2;
2100 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2102 struct eh_region
*region
;
2104 rtx note
, before
, p
;
2106 /* Reset value tracking at extended basic block boundaries. */
2107 if (GET_CODE (insn
) == CODE_LABEL
)
2108 last_call_site
= -2;
2110 if (! INSN_P (insn
))
2113 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2116 /* Calls (and trapping insns) without notes are outside any
2117 exception handling region in this function. Mark them as
2119 if (GET_CODE (insn
) == CALL_INSN
2120 || (flag_non_call_exceptions
2121 && may_trap_p (PATTERN (insn
))))
2122 this_call_site
= -1;
2128 /* Calls that are known to not throw need not be marked. */
2129 if (INTVAL (XEXP (note
, 0)) <= 0)
2132 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
2133 this_call_site
= lp_info
[region
->region_number
].call_site_index
;
2136 if (this_call_site
== last_call_site
)
2139 /* Don't separate a call from it's argument loads. */
2141 if (GET_CODE (insn
) == CALL_INSN
)
2142 before
= find_first_parameter_load (insn
, NULL_RTX
);
2145 mem
= adjust_address (cfun
->eh
->sjlj_fc
, TYPE_MODE (integer_type_node
),
2146 sjlj_fc_call_site_ofs
);
2147 emit_move_insn (mem
, GEN_INT (this_call_site
));
2151 emit_insn_before (p
, before
);
2152 last_call_site
= this_call_site
;
2156 /* Construct the SjLj_Function_Context. */
2159 sjlj_emit_function_enter (dispatch_label
)
2162 rtx fn_begin
, fc
, mem
, seq
;
2164 fc
= cfun
->eh
->sjlj_fc
;
2168 /* We're storing this libcall's address into memory instead of
2169 calling it directly. Thus, we must call assemble_external_libcall
2170 here, as we can not depend on emit_library_call to do it for us. */
2171 assemble_external_libcall (eh_personality_libfunc
);
2172 mem
= adjust_address (fc
, Pmode
, sjlj_fc_personality_ofs
);
2173 emit_move_insn (mem
, eh_personality_libfunc
);
2175 mem
= adjust_address (fc
, Pmode
, sjlj_fc_lsda_ofs
);
2176 if (cfun
->uses_eh_lsda
)
2179 ASM_GENERATE_INTERNAL_LABEL (buf
, "LLSDA", current_function_funcdef_no
);
2180 emit_move_insn (mem
, gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
)));
2183 emit_move_insn (mem
, const0_rtx
);
2185 #ifdef DONT_USE_BUILTIN_SETJMP
2188 x
= emit_library_call_value (setjmp_libfunc
, NULL_RTX
, LCT_RETURNS_TWICE
,
2189 TYPE_MODE (integer_type_node
), 1,
2190 plus_constant (XEXP (fc
, 0),
2191 sjlj_fc_jbuf_ofs
), Pmode
);
2193 note
= emit_note (NULL
, NOTE_INSN_EXPECTED_VALUE
);
2194 NOTE_EXPECTED_VALUE (note
) = gen_rtx_EQ (VOIDmode
, x
, const0_rtx
);
2196 emit_cmp_and_jump_insns (x
, const0_rtx
, NE
, 0,
2197 TYPE_MODE (integer_type_node
), 0, dispatch_label
);
2200 expand_builtin_setjmp_setup (plus_constant (XEXP (fc
, 0), sjlj_fc_jbuf_ofs
),
2204 emit_library_call (unwind_sjlj_register_libfunc
, LCT_NORMAL
, VOIDmode
,
2205 1, XEXP (fc
, 0), Pmode
);
2210 /* ??? Instead of doing this at the beginning of the function,
2211 do this in a block that is at loop level 0 and dominates all
2212 can_throw_internal instructions. */
2214 for (fn_begin
= get_insns (); ; fn_begin
= NEXT_INSN (fn_begin
))
2215 if (GET_CODE (fn_begin
) == NOTE
2216 && NOTE_LINE_NUMBER (fn_begin
) == NOTE_INSN_FUNCTION_BEG
)
2218 emit_insn_after (seq
, fn_begin
);
2221 /* Call back from expand_function_end to know where we should put
2222 the call to unwind_sjlj_unregister_libfunc if needed. */
2225 sjlj_emit_function_exit_after (after
)
2228 cfun
->eh
->sjlj_exit_after
= after
;
2232 sjlj_emit_function_exit ()
2238 emit_library_call (unwind_sjlj_unregister_libfunc
, LCT_NORMAL
, VOIDmode
,
2239 1, XEXP (cfun
->eh
->sjlj_fc
, 0), Pmode
);
2244 /* ??? Really this can be done in any block at loop level 0 that
2245 post-dominates all can_throw_internal instructions. This is
2246 the last possible moment. */
2248 emit_insn_after (seq
, cfun
->eh
->sjlj_exit_after
);
2252 sjlj_emit_dispatch_table (dispatch_label
, lp_info
)
2254 struct sjlj_lp_info
*lp_info
;
2256 int i
, first_reachable
;
2257 rtx mem
, dispatch
, seq
, fc
;
2259 fc
= cfun
->eh
->sjlj_fc
;
2263 emit_label (dispatch_label
);
2265 #ifndef DONT_USE_BUILTIN_SETJMP
2266 expand_builtin_setjmp_receiver (dispatch_label
);
2269 /* Load up dispatch index, exc_ptr and filter values from the
2270 function context. */
2271 mem
= adjust_address (fc
, TYPE_MODE (integer_type_node
),
2272 sjlj_fc_call_site_ofs
);
2273 dispatch
= copy_to_reg (mem
);
2275 mem
= adjust_address (fc
, word_mode
, sjlj_fc_data_ofs
);
2276 if (word_mode
!= Pmode
)
2278 #ifdef POINTERS_EXTEND_UNSIGNED
2279 mem
= convert_memory_address (Pmode
, mem
);
2281 mem
= convert_to_mode (Pmode
, mem
, 0);
2284 emit_move_insn (cfun
->eh
->exc_ptr
, mem
);
2286 mem
= adjust_address (fc
, word_mode
, sjlj_fc_data_ofs
+ UNITS_PER_WORD
);
2287 emit_move_insn (cfun
->eh
->filter
, mem
);
2289 /* Jump to one of the directly reachable regions. */
2290 /* ??? This really ought to be using a switch statement. */
2292 first_reachable
= 0;
2293 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2295 if (! lp_info
[i
].directly_reachable
)
2298 if (! first_reachable
)
2300 first_reachable
= i
;
2304 emit_cmp_and_jump_insns (dispatch
, GEN_INT (lp_info
[i
].dispatch_index
),
2305 EQ
, NULL_RTX
, TYPE_MODE (integer_type_node
), 0,
2306 cfun
->eh
->region_array
[i
]->post_landing_pad
);
2312 emit_insn_before (seq
, (cfun
->eh
->region_array
[first_reachable
]
2313 ->post_landing_pad
));
2317 sjlj_build_landing_pads ()
2319 struct sjlj_lp_info
*lp_info
;
2321 lp_info
= (struct sjlj_lp_info
*) xcalloc (cfun
->eh
->last_region_number
+ 1,
2322 sizeof (struct sjlj_lp_info
));
2324 if (sjlj_find_directly_reachable_regions (lp_info
))
2326 rtx dispatch_label
= gen_label_rtx ();
2329 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node
),
2330 int_size_in_bytes (sjlj_fc_type_node
),
2331 TYPE_ALIGN (sjlj_fc_type_node
));
2333 sjlj_assign_call_site_values (dispatch_label
, lp_info
);
2334 sjlj_mark_call_sites (lp_info
);
2336 sjlj_emit_function_enter (dispatch_label
);
2337 sjlj_emit_dispatch_table (dispatch_label
, lp_info
);
2338 sjlj_emit_function_exit ();
2345 finish_eh_generation ()
2347 /* Nothing to do if no regions created. */
2348 if (cfun
->eh
->region_tree
== NULL
)
2351 /* The object here is to provide find_basic_blocks with detailed
2352 information (via reachable_handlers) on how exception control
2353 flows within the function. In this first pass, we can include
2354 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2355 regions, and hope that it will be useful in deleting unreachable
2356 handlers. Subsequently, we will generate landing pads which will
2357 connect many of the handlers, and then type information will not
2358 be effective. Still, this is a win over previous implementations. */
2360 cleanup_cfg (CLEANUP_PRE_LOOP
| CLEANUP_NO_INSN_DEL
);
2362 /* These registers are used by the landing pads. Make sure they
2363 have been generated. */
2364 get_exception_pointer (cfun
);
2365 get_exception_filter (cfun
);
2367 /* Construct the landing pads. */
2369 assign_filter_values ();
2370 build_post_landing_pads ();
2371 connect_post_landing_pads ();
2372 if (USING_SJLJ_EXCEPTIONS
)
2373 sjlj_build_landing_pads ();
2375 dw2_build_landing_pads ();
2377 cfun
->eh
->built_landing_pads
= 1;
2379 /* We've totally changed the CFG. Start over. */
2380 find_exception_handler_labels ();
2381 rebuild_jump_labels (get_insns ());
2382 find_basic_blocks (get_insns (), max_reg_num (), 0);
2383 cleanup_cfg (CLEANUP_PRE_LOOP
| CLEANUP_NO_INSN_DEL
);
2390 struct ehl_map_entry
*entry
= (struct ehl_map_entry
*) pentry
;
2392 /* 2^32 * ((sqrt(5) - 1) / 2) */
2393 const hashval_t scaled_golden_ratio
= 0x9e3779b9;
2394 return CODE_LABEL_NUMBER (entry
->label
) * scaled_golden_ratio
;
2398 ehl_eq (pentry
, pdata
)
2402 struct ehl_map_entry
*entry
= (struct ehl_map_entry
*) pentry
;
2403 struct ehl_map_entry
*data
= (struct ehl_map_entry
*) pdata
;
2405 return entry
->label
== data
->label
;
2408 /* This section handles removing dead code for flow. */
2410 /* Remove LABEL from exception_handler_label_map. */
2413 remove_exception_handler_label (label
)
2416 struct ehl_map_entry
**slot
, tmp
;
2418 /* If exception_handler_label_map was not built yet,
2419 there is nothing to do. */
2420 if (cfun
->eh
->exception_handler_label_map
== NULL
)
2424 slot
= (struct ehl_map_entry
**)
2425 htab_find_slot (cfun
->eh
->exception_handler_label_map
, &tmp
, NO_INSERT
);
2429 htab_clear_slot (cfun
->eh
->exception_handler_label_map
, (void **) slot
);
2432 /* Splice REGION from the region tree etc. */
2435 remove_eh_handler (region
)
2436 struct eh_region
*region
;
2438 struct eh_region
**pp
, **pp_start
, *p
, *outer
, *inner
;
2441 /* For the benefit of efficiently handling REG_EH_REGION notes,
2442 replace this region in the region array with its containing
2443 region. Note that previous region deletions may result in
2444 multiple copies of this region in the array, so we have a
2445 list of alternate numbers by which we are known. */
2447 outer
= region
->outer
;
2448 cfun
->eh
->region_array
[region
->region_number
] = outer
;
2452 EXECUTE_IF_SET_IN_BITMAP (region
->aka
, 0, i
,
2453 { cfun
->eh
->region_array
[i
] = outer
; });
2459 outer
->aka
= BITMAP_GGC_ALLOC ();
2461 bitmap_a_or_b (outer
->aka
, outer
->aka
, region
->aka
);
2462 bitmap_set_bit (outer
->aka
, region
->region_number
);
2465 if (cfun
->eh
->built_landing_pads
)
2466 lab
= region
->landing_pad
;
2468 lab
= region
->label
;
2470 remove_exception_handler_label (lab
);
2473 pp_start
= &outer
->inner
;
2475 pp_start
= &cfun
->eh
->region_tree
;
2476 for (pp
= pp_start
, p
= *pp
; p
!= region
; pp
= &p
->next_peer
, p
= *pp
)
2478 *pp
= region
->next_peer
;
2480 inner
= region
->inner
;
2483 for (p
= inner
; p
->next_peer
; p
= p
->next_peer
)
2487 p
->next_peer
= *pp_start
;
2491 if (region
->type
== ERT_CATCH
)
2493 struct eh_region
*try, *next
, *prev
;
2495 for (try = region
->next_peer
;
2496 try->type
== ERT_CATCH
;
2497 try = try->next_peer
)
2499 if (try->type
!= ERT_TRY
)
2502 next
= region
->u
.catch.next_catch
;
2503 prev
= region
->u
.catch.prev_catch
;
2506 next
->u
.catch.prev_catch
= prev
;
2508 try->u
.try.last_catch
= prev
;
2510 prev
->u
.catch.next_catch
= next
;
2513 try->u
.try.catch = next
;
2515 remove_eh_handler (try);
2520 /* LABEL heads a basic block that is about to be deleted. If this
2521 label corresponds to an exception region, we may be able to
2522 delete the region. */
2525 maybe_remove_eh_handler (label
)
2528 struct ehl_map_entry
**slot
, tmp
;
2529 struct eh_region
*region
;
2531 /* ??? After generating landing pads, it's not so simple to determine
2532 if the region data is completely unused. One must examine the
2533 landing pad and the post landing pad, and whether an inner try block
2534 is referencing the catch handlers directly. */
2535 if (cfun
->eh
->built_landing_pads
)
2539 slot
= (struct ehl_map_entry
**)
2540 htab_find_slot (cfun
->eh
->exception_handler_label_map
, &tmp
, NO_INSERT
);
2543 region
= (*slot
)->region
;
2547 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2548 because there is no path to the fallback call to terminate.
2549 But the region continues to affect call-site data until there
2550 are no more contained calls, which we don't see here. */
2551 if (region
->type
== ERT_MUST_NOT_THROW
)
2553 htab_clear_slot (cfun
->eh
->exception_handler_label_map
, (void **) slot
);
2554 region
->label
= NULL_RTX
;
2557 remove_eh_handler (region
);
2560 /* Invokes CALLBACK for every exception handler label. Only used by old
2561 loop hackery; should not be used by new code. */
2564 for_each_eh_label (callback
)
2565 void (*callback
) PARAMS ((rtx
));
2567 htab_traverse (cfun
->eh
->exception_handler_label_map
, for_each_eh_label_1
,
2572 for_each_eh_label_1 (pentry
, data
)
2576 struct ehl_map_entry
*entry
= *(struct ehl_map_entry
**)pentry
;
2577 void (*callback
) PARAMS ((rtx
)) = (void (*) PARAMS ((rtx
))) data
;
2579 (*callback
) (entry
->label
);
2583 /* This section describes CFG exception edges for flow. */
2585 /* For communicating between calls to reachable_next_level. */
2586 struct reachable_info
GTY(())
2593 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2594 base class of TYPE, is in HANDLED. */
2597 check_handled (handled
, type
)
2602 /* We can check for exact matches without front-end help. */
2603 if (! lang_eh_type_covers
)
2605 for (t
= handled
; t
; t
= TREE_CHAIN (t
))
2606 if (TREE_VALUE (t
) == type
)
2611 for (t
= handled
; t
; t
= TREE_CHAIN (t
))
2612 if ((*lang_eh_type_covers
) (TREE_VALUE (t
), type
))
2619 /* A subroutine of reachable_next_level. If we are collecting a list
2620 of handlers, add one. After landing pad generation, reference
2621 it instead of the handlers themselves. Further, the handlers are
2622 all wired together, so by referencing one, we've got them all.
2623 Before landing pad generation we reference each handler individually.
2625 LP_REGION contains the landing pad; REGION is the handler. */
2628 add_reachable_handler (info
, lp_region
, region
)
2629 struct reachable_info
*info
;
2630 struct eh_region
*lp_region
;
2631 struct eh_region
*region
;
2636 if (cfun
->eh
->built_landing_pads
)
2638 if (! info
->handlers
)
2639 info
->handlers
= alloc_INSN_LIST (lp_region
->landing_pad
, NULL_RTX
);
2642 info
->handlers
= alloc_INSN_LIST (region
->label
, info
->handlers
);
2645 /* Process one level of exception regions for reachability.
2646 If TYPE_THROWN is non-null, then it is the *exact* type being
2647 propagated. If INFO is non-null, then collect handler labels
2648 and caught/allowed type information between invocations. */
2650 static enum reachable_code
2651 reachable_next_level (region
, type_thrown
, info
)
2652 struct eh_region
*region
;
2654 struct reachable_info
*info
;
2656 switch (region
->type
)
2659 /* Before landing-pad generation, we model control flow
2660 directly to the individual handlers. In this way we can
2661 see that catch handler types may shadow one another. */
2662 add_reachable_handler (info
, region
, region
);
2663 return RNL_MAYBE_CAUGHT
;
2667 struct eh_region
*c
;
2668 enum reachable_code ret
= RNL_NOT_CAUGHT
;
2670 for (c
= region
->u
.try.catch; c
; c
= c
->u
.catch.next_catch
)
2672 /* A catch-all handler ends the search. */
2673 if (c
->u
.catch.type_list
== NULL
)
2675 add_reachable_handler (info
, region
, c
);
2681 /* If we have at least one type match, end the search. */
2682 tree tp_node
= c
->u
.catch.type_list
;
2684 for (; tp_node
; tp_node
= TREE_CHAIN (tp_node
))
2686 tree type
= TREE_VALUE (tp_node
);
2688 if (type
== type_thrown
2689 || (lang_eh_type_covers
2690 && (*lang_eh_type_covers
) (type
, type_thrown
)))
2692 add_reachable_handler (info
, region
, c
);
2697 /* If we have definitive information of a match failure,
2698 the catch won't trigger. */
2699 if (lang_eh_type_covers
)
2700 return RNL_NOT_CAUGHT
;
2703 /* At this point, we either don't know what type is thrown or
2704 don't have front-end assistance to help deciding if it is
2705 covered by one of the types in the list for this region.
2707 We'd then like to add this region to the list of reachable
2708 handlers since it is indeed potentially reachable based on the
2709 information we have.
2711 Actually, this handler is for sure not reachable if all the
2712 types it matches have already been caught. That is, it is only
2713 potentially reachable if at least one of the types it catches
2714 has not been previously caught. */
2717 ret
= RNL_MAYBE_CAUGHT
;
2720 tree tp_node
= c
->u
.catch.type_list
;
2721 bool maybe_reachable
= false;
2723 /* Compute the potential reachability of this handler and
2724 update the list of types caught at the same time. */
2725 for (; tp_node
; tp_node
= TREE_CHAIN (tp_node
))
2727 tree type
= TREE_VALUE (tp_node
);
2729 if (! check_handled (info
->types_caught
, type
))
2732 = tree_cons (NULL
, type
, info
->types_caught
);
2734 maybe_reachable
= true;
2738 if (maybe_reachable
)
2740 add_reachable_handler (info
, region
, c
);
2742 /* ??? If the catch type is a base class of every allowed
2743 type, then we know we can stop the search. */
2744 ret
= RNL_MAYBE_CAUGHT
;
2752 case ERT_ALLOWED_EXCEPTIONS
:
2753 /* An empty list of types definitely ends the search. */
2754 if (region
->u
.allowed
.type_list
== NULL_TREE
)
2756 add_reachable_handler (info
, region
, region
);
2760 /* Collect a list of lists of allowed types for use in detecting
2761 when a catch may be transformed into a catch-all. */
2763 info
->types_allowed
= tree_cons (NULL_TREE
,
2764 region
->u
.allowed
.type_list
,
2765 info
->types_allowed
);
2767 /* If we have definitive information about the type hierarchy,
2768 then we can tell if the thrown type will pass through the
2770 if (type_thrown
&& lang_eh_type_covers
)
2772 if (check_handled (region
->u
.allowed
.type_list
, type_thrown
))
2773 return RNL_NOT_CAUGHT
;
2776 add_reachable_handler (info
, region
, region
);
2781 add_reachable_handler (info
, region
, region
);
2782 return RNL_MAYBE_CAUGHT
;
2785 /* Catch regions are handled by their controlling try region. */
2786 return RNL_NOT_CAUGHT
;
2788 case ERT_MUST_NOT_THROW
:
2789 /* Here we end our search, since no exceptions may propagate.
2790 If we've touched down at some landing pad previous, then the
2791 explicit function call we generated may be used. Otherwise
2792 the call is made by the runtime. */
2793 if (info
&& info
->handlers
)
2795 add_reachable_handler (info
, region
, region
);
2804 /* Shouldn't see these here. */
2811 /* Retrieve a list of labels of exception handlers which can be
2812 reached by a given insn. */
2815 reachable_handlers (insn
)
2818 struct reachable_info info
;
2819 struct eh_region
*region
;
2823 if (GET_CODE (insn
) == JUMP_INSN
2824 && GET_CODE (PATTERN (insn
)) == RESX
)
2825 region_number
= XINT (PATTERN (insn
), 0);
2828 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2829 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
2831 region_number
= INTVAL (XEXP (note
, 0));
2834 memset (&info
, 0, sizeof (info
));
2836 region
= cfun
->eh
->region_array
[region_number
];
2838 type_thrown
= NULL_TREE
;
2839 if (GET_CODE (insn
) == JUMP_INSN
2840 && GET_CODE (PATTERN (insn
)) == RESX
)
2842 /* A RESX leaves a region instead of entering it. Thus the
2843 region itself may have been deleted out from under us. */
2846 region
= region
->outer
;
2848 else if (region
->type
== ERT_THROW
)
2850 type_thrown
= region
->u
.throw.type
;
2851 region
= region
->outer
;
2856 if (reachable_next_level (region
, type_thrown
, &info
) >= RNL_CAUGHT
)
2858 /* Forced unwind exceptions are neither BLOCKED nor CAUGHT.
2859 Make sure the cleanup regions are reachable. */
2860 if (flag_forced_unwind_exceptions
)
2862 while ((region
= region
->outer
) != NULL
)
2863 if (region
->type
== ERT_CLEANUP
)
2865 add_reachable_handler (&info
, region
, region
);
2872 /* If we have processed one cleanup, there is no point in
2873 processing any more of them. Each cleanup will have an edge
2874 to the next outer cleanup region, so the flow graph will be
2876 if (region
->type
== ERT_CLEANUP
)
2877 region
= region
->u
.cleanup
.prev_try
;
2879 region
= region
->outer
;
2882 return info
.handlers
;
2885 /* Determine if the given INSN can throw an exception that is caught
2886 within the function. */
2889 can_throw_internal (insn
)
2892 struct eh_region
*region
;
2896 if (! INSN_P (insn
))
2899 if (GET_CODE (insn
) == INSN
2900 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2901 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2903 if (GET_CODE (insn
) == CALL_INSN
2904 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
2907 for (i
= 0; i
< 3; ++i
)
2909 rtx sub
= XEXP (PATTERN (insn
), i
);
2910 for (; sub
; sub
= NEXT_INSN (sub
))
2911 if (can_throw_internal (sub
))
2917 /* Every insn that might throw has an EH_REGION note. */
2918 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2919 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
2922 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
2924 type_thrown
= NULL_TREE
;
2925 if (region
->type
== ERT_THROW
)
2927 type_thrown
= region
->u
.throw.type
;
2928 region
= region
->outer
;
2931 /* If this exception is ignored by each and every containing region,
2932 then control passes straight out. The runtime may handle some
2933 regions, which also do not require processing internally. */
2934 for (; region
; region
= region
->outer
)
2936 enum reachable_code how
= reachable_next_level (region
, type_thrown
, 0);
2937 if (how
== RNL_BLOCKED
)
2939 if (how
!= RNL_NOT_CAUGHT
)
2946 /* Determine if the given INSN can throw an exception that is
2947 visible outside the function. */
2950 can_throw_external (insn
)
2953 struct eh_region
*region
;
2957 if (! INSN_P (insn
))
2960 if (GET_CODE (insn
) == INSN
2961 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2962 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2964 if (GET_CODE (insn
) == CALL_INSN
2965 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
2968 for (i
= 0; i
< 3; ++i
)
2970 rtx sub
= XEXP (PATTERN (insn
), i
);
2971 for (; sub
; sub
= NEXT_INSN (sub
))
2972 if (can_throw_external (sub
))
2978 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2981 /* Calls (and trapping insns) without notes are outside any
2982 exception handling region in this function. We have to
2983 assume it might throw. Given that the front end and middle
2984 ends mark known NOTHROW functions, this isn't so wildly
2986 return (GET_CODE (insn
) == CALL_INSN
2987 || (flag_non_call_exceptions
2988 && may_trap_p (PATTERN (insn
))));
2990 if (INTVAL (XEXP (note
, 0)) <= 0)
2993 /* Forced unwind excptions are not catchable. */
2994 if (flag_forced_unwind_exceptions
&& GET_CODE (insn
) == CALL_INSN
)
2997 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
2999 type_thrown
= NULL_TREE
;
3000 if (region
->type
== ERT_THROW
)
3002 type_thrown
= region
->u
.throw.type
;
3003 region
= region
->outer
;
3006 /* If the exception is caught or blocked by any containing region,
3007 then it is not seen by any calling function. */
3008 for (; region
; region
= region
->outer
)
3009 if (reachable_next_level (region
, type_thrown
, NULL
) >= RNL_CAUGHT
)
3015 /* Set current_function_nothrow and cfun->all_throwers_are_sibcalls. */
3018 set_nothrow_function_flags ()
3022 current_function_nothrow
= 1;
3024 /* Assume cfun->all_throwers_are_sibcalls until we encounter
3025 something that can throw an exception. We specifically exempt
3026 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
3027 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
3030 cfun
->all_throwers_are_sibcalls
= 1;
3032 if (! flag_exceptions
)
3035 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3036 if (can_throw_external (insn
))
3038 current_function_nothrow
= 0;
3040 if (GET_CODE (insn
) != CALL_INSN
|| !SIBLING_CALL_P (insn
))
3042 cfun
->all_throwers_are_sibcalls
= 0;
3047 for (insn
= current_function_epilogue_delay_list
; insn
;
3048 insn
= XEXP (insn
, 1))
3049 if (can_throw_external (insn
))
3051 current_function_nothrow
= 0;
3053 if (GET_CODE (insn
) != CALL_INSN
|| !SIBLING_CALL_P (insn
))
3055 cfun
->all_throwers_are_sibcalls
= 0;
3062 /* Various hooks for unwind library. */
3064 /* Do any necessary initialization to access arbitrary stack frames.
3065 On the SPARC, this means flushing the register windows. */
3068 expand_builtin_unwind_init ()
3070 /* Set this so all the registers get saved in our frame; we need to be
3071 able to copy the saved values for any registers from frames we unwind. */
3072 current_function_has_nonlocal_label
= 1;
3074 #ifdef SETUP_FRAME_ADDRESSES
3075 SETUP_FRAME_ADDRESSES ();
3080 expand_builtin_eh_return_data_regno (arglist
)
3083 tree which
= TREE_VALUE (arglist
);
3084 unsigned HOST_WIDE_INT iwhich
;
3086 if (TREE_CODE (which
) != INTEGER_CST
)
3088 error ("argument of `__builtin_eh_return_regno' must be constant");
3092 iwhich
= tree_low_cst (which
, 1);
3093 iwhich
= EH_RETURN_DATA_REGNO (iwhich
);
3094 if (iwhich
== INVALID_REGNUM
)
3097 #ifdef DWARF_FRAME_REGNUM
3098 iwhich
= DWARF_FRAME_REGNUM (iwhich
);
3100 iwhich
= DBX_REGISTER_NUMBER (iwhich
);
3103 return GEN_INT (iwhich
);
3106 /* Given a value extracted from the return address register or stack slot,
3107 return the actual address encoded in that value. */
3110 expand_builtin_extract_return_addr (addr_tree
)
3113 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, Pmode
, 0);
3115 if (GET_MODE (addr
) != Pmode
3116 && GET_MODE (addr
) != VOIDmode
)
3118 #ifdef POINTERS_EXTEND_UNSIGNED
3119 addr
= convert_memory_address (Pmode
, addr
);
3121 addr
= convert_to_mode (Pmode
, addr
, 0);
3125 /* First mask out any unwanted bits. */
3126 #ifdef MASK_RETURN_ADDR
3127 expand_and (Pmode
, addr
, MASK_RETURN_ADDR
, addr
);
3130 /* Then adjust to find the real return address. */
3131 #if defined (RETURN_ADDR_OFFSET)
3132 addr
= plus_constant (addr
, RETURN_ADDR_OFFSET
);
3138 /* Given an actual address in addr_tree, do any necessary encoding
3139 and return the value to be stored in the return address register or
3140 stack slot so the epilogue will return to that address. */
3143 expand_builtin_frob_return_addr (addr_tree
)
3146 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, ptr_mode
, 0);
3148 #ifdef POINTERS_EXTEND_UNSIGNED
3149 if (GET_MODE (addr
) != Pmode
)
3150 addr
= convert_memory_address (Pmode
, addr
);
3153 #ifdef RETURN_ADDR_OFFSET
3154 addr
= force_reg (Pmode
, addr
);
3155 addr
= plus_constant (addr
, -RETURN_ADDR_OFFSET
);
3161 /* Set up the epilogue with the magic bits we'll need to return to the
3162 exception handler. */
3165 expand_builtin_eh_return (stackadj_tree
, handler_tree
)
3166 tree stackadj_tree
, handler_tree
;
3168 rtx stackadj
, handler
;
3170 stackadj
= expand_expr (stackadj_tree
, cfun
->eh
->ehr_stackadj
, VOIDmode
, 0);
3171 handler
= expand_expr (handler_tree
, cfun
->eh
->ehr_handler
, VOIDmode
, 0);
3173 #ifdef POINTERS_EXTEND_UNSIGNED
3174 if (GET_MODE (stackadj
) != Pmode
)
3175 stackadj
= convert_memory_address (Pmode
, stackadj
);
3177 if (GET_MODE (handler
) != Pmode
)
3178 handler
= convert_memory_address (Pmode
, handler
);
3181 if (! cfun
->eh
->ehr_label
)
3183 cfun
->eh
->ehr_stackadj
= copy_to_reg (stackadj
);
3184 cfun
->eh
->ehr_handler
= copy_to_reg (handler
);
3185 cfun
->eh
->ehr_label
= gen_label_rtx ();
3189 if (stackadj
!= cfun
->eh
->ehr_stackadj
)
3190 emit_move_insn (cfun
->eh
->ehr_stackadj
, stackadj
);
3191 if (handler
!= cfun
->eh
->ehr_handler
)
3192 emit_move_insn (cfun
->eh
->ehr_handler
, handler
);
3195 emit_jump (cfun
->eh
->ehr_label
);
3201 rtx sa
, ra
, around_label
;
3203 if (! cfun
->eh
->ehr_label
)
3206 sa
= EH_RETURN_STACKADJ_RTX
;
3209 error ("__builtin_eh_return not supported on this target");
3213 current_function_calls_eh_return
= 1;
3215 around_label
= gen_label_rtx ();
3216 emit_move_insn (sa
, const0_rtx
);
3217 emit_jump (around_label
);
3219 emit_label (cfun
->eh
->ehr_label
);
3220 clobber_return_register ();
3222 #ifdef HAVE_eh_return
3224 emit_insn (gen_eh_return (cfun
->eh
->ehr_stackadj
, cfun
->eh
->ehr_handler
));
3228 ra
= EH_RETURN_HANDLER_RTX
;
3231 error ("__builtin_eh_return not supported on this target");
3232 ra
= gen_reg_rtx (Pmode
);
3235 emit_move_insn (sa
, cfun
->eh
->ehr_stackadj
);
3236 emit_move_insn (ra
, cfun
->eh
->ehr_handler
);
3239 emit_label (around_label
);
3242 /* In the following functions, we represent entries in the action table
3243 as 1-based indices. Special cases are:
3245 0: null action record, non-null landing pad; implies cleanups
3246 -1: null action record, null landing pad; implies no action
3247 -2: no call-site entry; implies must_not_throw
3248 -3: we have yet to process outer regions
3250 Further, no special cases apply to the "next" field of the record.
3251 For next, 0 means end of list. */
3253 struct action_record
3261 action_record_eq (pentry
, pdata
)
3265 const struct action_record
*entry
= (const struct action_record
*) pentry
;
3266 const struct action_record
*data
= (const struct action_record
*) pdata
;
3267 return entry
->filter
== data
->filter
&& entry
->next
== data
->next
;
3271 action_record_hash (pentry
)
3274 const struct action_record
*entry
= (const struct action_record
*) pentry
;
3275 return entry
->next
* 1009 + entry
->filter
;
3279 add_action_record (ar_hash
, filter
, next
)
3283 struct action_record
**slot
, *new, tmp
;
3285 tmp
.filter
= filter
;
3287 slot
= (struct action_record
**) htab_find_slot (ar_hash
, &tmp
, INSERT
);
3289 if ((new = *slot
) == NULL
)
3291 new = (struct action_record
*) xmalloc (sizeof (*new));
3292 new->offset
= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
) + 1;
3293 new->filter
= filter
;
3297 /* The filter value goes in untouched. The link to the next
3298 record is a "self-relative" byte offset, or zero to indicate
3299 that there is no next record. So convert the absolute 1 based
3300 indices we've been carrying around into a displacement. */
3302 push_sleb128 (&cfun
->eh
->action_record_data
, filter
);
3304 next
-= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
) + 1;
3305 push_sleb128 (&cfun
->eh
->action_record_data
, next
);
3312 collect_one_action_chain (ar_hash
, region
)
3314 struct eh_region
*region
;
3316 struct eh_region
*c
;
3319 /* If we've reached the top of the region chain, then we have
3320 no actions, and require no landing pad. */
3324 switch (region
->type
)
3327 /* A cleanup adds a zero filter to the beginning of the chain, but
3328 there are special cases to look out for. If there are *only*
3329 cleanups along a path, then it compresses to a zero action.
3330 Further, if there are multiple cleanups along a path, we only
3331 need to represent one of them, as that is enough to trigger
3332 entry to the landing pad at runtime. */
3333 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3336 for (c
= region
->outer
; c
; c
= c
->outer
)
3337 if (c
->type
== ERT_CLEANUP
)
3339 return add_action_record (ar_hash
, 0, next
);
3342 /* Process the associated catch regions in reverse order.
3343 If there's a catch-all handler, then we don't need to
3344 search outer regions. Use a magic -3 value to record
3345 that we haven't done the outer search. */
3347 for (c
= region
->u
.try.last_catch
; c
; c
= c
->u
.catch.prev_catch
)
3349 if (c
->u
.catch.type_list
== NULL
)
3353 /* Forced exceptions run cleanups, always. Record them if
3356 if (flag_forced_unwind_exceptions
)
3358 struct eh_region
*r
;
3359 for (r
= c
->outer
; r
; r
= r
->outer
)
3360 if (r
->type
== ERT_CLEANUP
)
3362 next
= add_action_record (ar_hash
, 0, 0);
3367 /* Retrieve the filter from the head of the filter list
3368 where we have stored it (see assign_filter_values). */
3369 filter
= TREE_INT_CST_LOW (TREE_VALUE (c
->u
.catch.filter_list
));
3370 next
= add_action_record (ar_hash
, filter
, next
);
3374 /* Once the outer search is done, trigger an action record for
3375 each filter we have. */
3380 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3382 /* If there is no next action, terminate the chain. */
3385 /* If all outer actions are cleanups or must_not_throw,
3386 we'll have no action record for it, since we had wanted
3387 to encode these states in the call-site record directly.
3388 Add a cleanup action to the chain to catch these. */
3390 next
= add_action_record (ar_hash
, 0, 0);
3393 flt_node
= c
->u
.catch.filter_list
;
3394 for (; flt_node
; flt_node
= TREE_CHAIN (flt_node
))
3396 int filter
= TREE_INT_CST_LOW (TREE_VALUE (flt_node
));
3397 next
= add_action_record (ar_hash
, filter
, next
);
3403 case ERT_ALLOWED_EXCEPTIONS
:
3404 /* An exception specification adds its filter to the
3405 beginning of the chain. */
3406 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3407 return add_action_record (ar_hash
, region
->u
.allowed
.filter
,
3408 next
< 0 ? 0 : next
);
3410 case ERT_MUST_NOT_THROW
:
3411 /* A must-not-throw region with no inner handlers or cleanups
3412 requires no call-site entry. Note that this differs from
3413 the no handler or cleanup case in that we do require an lsda
3414 to be generated. Return a magic -2 value to record this. */
3415 if (flag_forced_unwind_exceptions
)
3417 struct eh_region
*r
;
3418 for (r
= region
->outer
; r
; r
= r
->outer
)
3419 if (r
->type
== ERT_CLEANUP
)
3426 /* CATCH regions are handled in TRY above. THROW regions are
3427 for optimization information only and produce no output. */
3428 return collect_one_action_chain (ar_hash
, region
->outer
);
3436 add_call_site (landing_pad
, action
)
3440 struct call_site_record
*data
= cfun
->eh
->call_site_data
;
3441 int used
= cfun
->eh
->call_site_data_used
;
3442 int size
= cfun
->eh
->call_site_data_size
;
3446 size
= (size
? size
* 2 : 64);
3447 data
= (struct call_site_record
*)
3448 ggc_realloc (data
, sizeof (*data
) * size
);
3449 cfun
->eh
->call_site_data
= data
;
3450 cfun
->eh
->call_site_data_size
= size
;
3453 data
[used
].landing_pad
= landing_pad
;
3454 data
[used
].action
= action
;
3456 cfun
->eh
->call_site_data_used
= used
+ 1;
3458 return used
+ call_site_base
;
3461 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3462 The new note numbers will not refer to region numbers, but
3463 instead to call site entries. */
3466 convert_to_eh_region_ranges ()
3468 rtx insn
, iter
, note
;
3470 int last_action
= -3;
3471 rtx last_action_insn
= NULL_RTX
;
3472 rtx last_landing_pad
= NULL_RTX
;
3473 rtx first_no_action_insn
= NULL_RTX
;
3476 if (USING_SJLJ_EXCEPTIONS
|| cfun
->eh
->region_tree
== NULL
)
3479 VARRAY_UCHAR_INIT (cfun
->eh
->action_record_data
, 64, "action_record_data");
3481 ar_hash
= htab_create (31, action_record_hash
, action_record_eq
, free
);
3483 for (iter
= get_insns (); iter
; iter
= NEXT_INSN (iter
))
3486 struct eh_region
*region
;
3488 rtx this_landing_pad
;
3491 if (GET_CODE (insn
) == INSN
3492 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3493 insn
= XVECEXP (PATTERN (insn
), 0, 0);
3495 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
3498 if (! (GET_CODE (insn
) == CALL_INSN
3499 || (flag_non_call_exceptions
3500 && may_trap_p (PATTERN (insn
)))))
3507 if (INTVAL (XEXP (note
, 0)) <= 0)
3509 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
3510 this_action
= collect_one_action_chain (ar_hash
, region
);
3513 /* Existence of catch handlers, or must-not-throw regions
3514 implies that an lsda is needed (even if empty). */
3515 if (this_action
!= -1)
3516 cfun
->uses_eh_lsda
= 1;
3518 /* Delay creation of region notes for no-action regions
3519 until we're sure that an lsda will be required. */
3520 else if (last_action
== -3)
3522 first_no_action_insn
= iter
;
3526 /* Cleanups and handlers may share action chains but not
3527 landing pads. Collect the landing pad for this region. */
3528 if (this_action
>= 0)
3530 struct eh_region
*o
;
3531 for (o
= region
; ! o
->landing_pad
; o
= o
->outer
)
3533 this_landing_pad
= o
->landing_pad
;
3536 this_landing_pad
= NULL_RTX
;
3538 /* Differing actions or landing pads implies a change in call-site
3539 info, which implies some EH_REGION note should be emitted. */
3540 if (last_action
!= this_action
3541 || last_landing_pad
!= this_landing_pad
)
3543 /* If we'd not seen a previous action (-3) or the previous
3544 action was must-not-throw (-2), then we do not need an
3546 if (last_action
>= -1)
3548 /* If we delayed the creation of the begin, do it now. */
3549 if (first_no_action_insn
)
3551 call_site
= add_call_site (NULL_RTX
, 0);
3552 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
,
3553 first_no_action_insn
);
3554 NOTE_EH_HANDLER (note
) = call_site
;
3555 first_no_action_insn
= NULL_RTX
;
3558 note
= emit_note_after (NOTE_INSN_EH_REGION_END
,
3560 NOTE_EH_HANDLER (note
) = call_site
;
3563 /* If the new action is must-not-throw, then no region notes
3565 if (this_action
>= -1)
3567 call_site
= add_call_site (this_landing_pad
,
3568 this_action
< 0 ? 0 : this_action
);
3569 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
, iter
);
3570 NOTE_EH_HANDLER (note
) = call_site
;
3573 last_action
= this_action
;
3574 last_landing_pad
= this_landing_pad
;
3576 last_action_insn
= iter
;
3579 if (last_action
>= -1 && ! first_no_action_insn
)
3581 note
= emit_note_after (NOTE_INSN_EH_REGION_END
, last_action_insn
);
3582 NOTE_EH_HANDLER (note
) = call_site
;
3585 htab_delete (ar_hash
);
3590 push_uleb128 (data_area
, value
)
3591 varray_type
*data_area
;
3596 unsigned char byte
= value
& 0x7f;
3600 VARRAY_PUSH_UCHAR (*data_area
, byte
);
3606 push_sleb128 (data_area
, value
)
3607 varray_type
*data_area
;
3615 byte
= value
& 0x7f;
3617 more
= ! ((value
== 0 && (byte
& 0x40) == 0)
3618 || (value
== -1 && (byte
& 0x40) != 0));
3621 VARRAY_PUSH_UCHAR (*data_area
, byte
);
3627 #ifndef HAVE_AS_LEB128
3629 dw2_size_of_call_site_table ()
3631 int n
= cfun
->eh
->call_site_data_used
;
3632 int size
= n
* (4 + 4 + 4);
3635 for (i
= 0; i
< n
; ++i
)
3637 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3638 size
+= size_of_uleb128 (cs
->action
);
3645 sjlj_size_of_call_site_table ()
3647 int n
= cfun
->eh
->call_site_data_used
;
3651 for (i
= 0; i
< n
; ++i
)
3653 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3654 size
+= size_of_uleb128 (INTVAL (cs
->landing_pad
));
3655 size
+= size_of_uleb128 (cs
->action
);
3663 dw2_output_call_site_table ()
3665 const char *const function_start_lab
3666 = IDENTIFIER_POINTER (current_function_func_begin_label
);
3667 int n
= cfun
->eh
->call_site_data_used
;
3670 for (i
= 0; i
< n
; ++i
)
3672 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3673 char reg_start_lab
[32];
3674 char reg_end_lab
[32];
3675 char landing_pad_lab
[32];
3677 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab
, "LEHB", call_site_base
+ i
);
3678 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab
, "LEHE", call_site_base
+ i
);
3680 if (cs
->landing_pad
)
3681 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab
, "L",
3682 CODE_LABEL_NUMBER (cs
->landing_pad
));
3684 /* ??? Perhaps use insn length scaling if the assembler supports
3685 generic arithmetic. */
3686 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3687 data4 if the function is small enough. */
3688 #ifdef HAVE_AS_LEB128
3689 dw2_asm_output_delta_uleb128 (reg_start_lab
, function_start_lab
,
3690 "region %d start", i
);
3691 dw2_asm_output_delta_uleb128 (reg_end_lab
, reg_start_lab
,
3693 if (cs
->landing_pad
)
3694 dw2_asm_output_delta_uleb128 (landing_pad_lab
, function_start_lab
,
3697 dw2_asm_output_data_uleb128 (0, "landing pad");
3699 dw2_asm_output_delta (4, reg_start_lab
, function_start_lab
,
3700 "region %d start", i
);
3701 dw2_asm_output_delta (4, reg_end_lab
, reg_start_lab
, "length");
3702 if (cs
->landing_pad
)
3703 dw2_asm_output_delta (4, landing_pad_lab
, function_start_lab
,
3706 dw2_asm_output_data (4, 0, "landing pad");
3708 dw2_asm_output_data_uleb128 (cs
->action
, "action");
3711 call_site_base
+= n
;
3715 sjlj_output_call_site_table ()
3717 int n
= cfun
->eh
->call_site_data_used
;
3720 for (i
= 0; i
< n
; ++i
)
3722 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3724 dw2_asm_output_data_uleb128 (INTVAL (cs
->landing_pad
),
3725 "region %d landing pad", i
);
3726 dw2_asm_output_data_uleb128 (cs
->action
, "action");
3729 call_site_base
+= n
;
3732 /* Tell assembler to switch to the section for the exception handling
3736 default_exception_section ()
3738 if (targetm
.have_named_sections
)
3741 #ifdef HAVE_LD_RO_RW_SECTION_MIXING
3742 int tt_format
= ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3745 || ((tt_format
& 0x70) != DW_EH_PE_absptr
3746 && (tt_format
& 0x70) != DW_EH_PE_aligned
))
3747 ? 0 : SECTION_WRITE
;
3749 flags
= SECTION_WRITE
;
3751 named_section_flags (".gcc_except_table", flags
);
3756 readonly_data_section ();
3760 output_function_exception_table ()
3762 int tt_format
, cs_format
, lp_format
, i
, n
;
3763 #ifdef HAVE_AS_LEB128
3764 char ttype_label
[32];
3765 char cs_after_size_label
[32];
3766 char cs_end_label
[32];
3771 int tt_format_size
= 0;
3773 /* Not all functions need anything. */
3774 if (! cfun
->uses_eh_lsda
)
3777 #ifdef IA64_UNWIND_INFO
3778 fputs ("\t.personality\t", asm_out_file
);
3779 output_addr_const (asm_out_file
, eh_personality_libfunc
);
3780 fputs ("\n\t.handlerdata\n", asm_out_file
);
3781 /* Note that varasm still thinks we're in the function's code section.
3782 The ".endp" directive that will immediately follow will take us back. */
3784 (*targetm
.asm_out
.exception_section
) ();
3787 have_tt_data
= (VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
) > 0
3788 || VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
) > 0);
3790 /* Indicate the format of the @TType entries. */
3792 tt_format
= DW_EH_PE_omit
;
3795 tt_format
= ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3796 #ifdef HAVE_AS_LEB128
3797 ASM_GENERATE_INTERNAL_LABEL (ttype_label
, "LLSDATT",
3798 current_function_funcdef_no
);
3800 tt_format_size
= size_of_encoded_value (tt_format
);
3802 assemble_align (tt_format_size
* BITS_PER_UNIT
);
3805 (*targetm
.asm_out
.internal_label
) (asm_out_file
, "LLSDA",
3806 current_function_funcdef_no
);
3808 /* The LSDA header. */
3810 /* Indicate the format of the landing pad start pointer. An omitted
3811 field implies @LPStart == @Start. */
3812 /* Currently we always put @LPStart == @Start. This field would
3813 be most useful in moving the landing pads completely out of
3814 line to another section, but it could also be used to minimize
3815 the size of uleb128 landing pad offsets. */
3816 lp_format
= DW_EH_PE_omit
;
3817 dw2_asm_output_data (1, lp_format
, "@LPStart format (%s)",
3818 eh_data_format_name (lp_format
));
3820 /* @LPStart pointer would go here. */
3822 dw2_asm_output_data (1, tt_format
, "@TType format (%s)",
3823 eh_data_format_name (tt_format
));
3825 #ifndef HAVE_AS_LEB128
3826 if (USING_SJLJ_EXCEPTIONS
)
3827 call_site_len
= sjlj_size_of_call_site_table ();
3829 call_site_len
= dw2_size_of_call_site_table ();
3832 /* A pc-relative 4-byte displacement to the @TType data. */
3835 #ifdef HAVE_AS_LEB128
3836 char ttype_after_disp_label
[32];
3837 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label
, "LLSDATTD",
3838 current_function_funcdef_no
);
3839 dw2_asm_output_delta_uleb128 (ttype_label
, ttype_after_disp_label
,
3840 "@TType base offset");
3841 ASM_OUTPUT_LABEL (asm_out_file
, ttype_after_disp_label
);
3843 /* Ug. Alignment queers things. */
3844 unsigned int before_disp
, after_disp
, last_disp
, disp
;
3846 before_disp
= 1 + 1;
3847 after_disp
= (1 + size_of_uleb128 (call_site_len
)
3849 + VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
)
3850 + (VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
)
3856 unsigned int disp_size
, pad
;
3859 disp_size
= size_of_uleb128 (disp
);
3860 pad
= before_disp
+ disp_size
+ after_disp
;
3861 if (pad
% tt_format_size
)
3862 pad
= tt_format_size
- (pad
% tt_format_size
);
3865 disp
= after_disp
+ pad
;
3867 while (disp
!= last_disp
);
3869 dw2_asm_output_data_uleb128 (disp
, "@TType base offset");
3873 /* Indicate the format of the call-site offsets. */
3874 #ifdef HAVE_AS_LEB128
3875 cs_format
= DW_EH_PE_uleb128
;
3877 cs_format
= DW_EH_PE_udata4
;
3879 dw2_asm_output_data (1, cs_format
, "call-site format (%s)",
3880 eh_data_format_name (cs_format
));
3882 #ifdef HAVE_AS_LEB128
3883 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label
, "LLSDACSB",
3884 current_function_funcdef_no
);
3885 ASM_GENERATE_INTERNAL_LABEL (cs_end_label
, "LLSDACSE",
3886 current_function_funcdef_no
);
3887 dw2_asm_output_delta_uleb128 (cs_end_label
, cs_after_size_label
,
3888 "Call-site table length");
3889 ASM_OUTPUT_LABEL (asm_out_file
, cs_after_size_label
);
3890 if (USING_SJLJ_EXCEPTIONS
)
3891 sjlj_output_call_site_table ();
3893 dw2_output_call_site_table ();
3894 ASM_OUTPUT_LABEL (asm_out_file
, cs_end_label
);
3896 dw2_asm_output_data_uleb128 (call_site_len
,"Call-site table length");
3897 if (USING_SJLJ_EXCEPTIONS
)
3898 sjlj_output_call_site_table ();
3900 dw2_output_call_site_table ();
3903 /* ??? Decode and interpret the data for flag_debug_asm. */
3904 n
= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
);
3905 for (i
= 0; i
< n
; ++i
)
3906 dw2_asm_output_data (1, VARRAY_UCHAR (cfun
->eh
->action_record_data
, i
),
3907 (i
? NULL
: "Action record table"));
3910 assemble_align (tt_format_size
* BITS_PER_UNIT
);
3912 i
= VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
);
3915 tree type
= VARRAY_TREE (cfun
->eh
->ttype_data
, i
);
3918 if (type
== NULL_TREE
)
3919 type
= integer_zero_node
;
3921 type
= lookup_type_for_runtime (type
);
3923 value
= expand_expr (type
, NULL_RTX
, VOIDmode
, EXPAND_INITIALIZER
);
3924 if (tt_format
== DW_EH_PE_absptr
|| tt_format
== DW_EH_PE_aligned
)
3925 assemble_integer (value
, tt_format_size
,
3926 tt_format_size
* BITS_PER_UNIT
, 1);
3928 dw2_asm_output_encoded_addr_rtx (tt_format
, value
, NULL
);
3931 #ifdef HAVE_AS_LEB128
3933 ASM_OUTPUT_LABEL (asm_out_file
, ttype_label
);
3936 /* ??? Decode and interpret the data for flag_debug_asm. */
3937 n
= VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
);
3938 for (i
= 0; i
< n
; ++i
)
3939 dw2_asm_output_data (1, VARRAY_UCHAR (cfun
->eh
->ehspec_data
, i
),
3940 (i
? NULL
: "Exception specification table"));
3942 function_section (current_function_decl
);
3945 #include "gt-except.h"