1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
58 #include "insn-config.h"
60 #include "integrate.h"
61 #include "hard-reg-set.h"
62 #include "basic-block.h"
64 #include "dwarf2asm.h"
65 #include "dwarf2out.h"
74 /* Provide defaults for stuff that may not be defined when using
76 #ifndef EH_RETURN_STACKADJ_RTX
77 #define EH_RETURN_STACKADJ_RTX 0
79 #ifndef EH_RETURN_HANDLER_RTX
80 #define EH_RETURN_HANDLER_RTX 0
82 #ifndef EH_RETURN_DATA_REGNO
83 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
87 /* Nonzero means enable synchronous exceptions for non-call instructions. */
88 int flag_non_call_exceptions
;
90 /* Protect cleanup actions with must-not-throw regions, with a call
91 to the given failure handler. */
92 tree (*lang_protect_cleanup_actions
) PARAMS ((void));
94 /* Return true if type A catches type B. */
95 int (*lang_eh_type_covers
) PARAMS ((tree a
, tree b
));
97 /* Map a type to a runtime object to match type. */
98 tree (*lang_eh_runtime_type
) PARAMS ((tree
));
100 /* A list of labels used for exception handlers. */
101 rtx exception_handler_labels
;
103 static int call_site_base
;
104 static unsigned int sjlj_funcdef_number
;
105 static htab_t type_to_runtime_map
;
107 /* Describe the SjLj_Function_Context structure. */
108 static tree sjlj_fc_type_node
;
109 static int sjlj_fc_call_site_ofs
;
110 static int sjlj_fc_data_ofs
;
111 static int sjlj_fc_personality_ofs
;
112 static int sjlj_fc_lsda_ofs
;
113 static int sjlj_fc_jbuf_ofs
;
115 /* Describes one exception region. */
118 /* The immediately surrounding region. */
119 struct eh_region
*outer
;
121 /* The list of immediately contained regions. */
122 struct eh_region
*inner
;
123 struct eh_region
*next_peer
;
125 /* An identifier for this region. */
128 /* Each region does exactly one thing. */
135 ERT_ALLOWED_EXCEPTIONS
,
141 /* Holds the action to perform based on the preceding type. */
143 /* A list of catch blocks, a surrounding try block,
144 and the label for continuing after a catch. */
146 struct eh_region
*catch;
147 struct eh_region
*last_catch
;
148 struct eh_region
*prev_try
;
152 /* The list through the catch handlers, the list of type objects
153 matched, and the list of associated filters. */
155 struct eh_region
*next_catch
;
156 struct eh_region
*prev_catch
;
161 /* A tree_list of allowed types. */
167 /* The type given by a call to "throw foo();", or discovered
173 /* Retain the cleanup expression even after expansion so that
174 we can match up fixup regions. */
179 /* The real region (by expression and by pointer) that fixup code
183 struct eh_region
*real_region
;
187 /* Entry point for this region's handler before landing pads are built. */
190 /* Entry point for this region's handler from the runtime eh library. */
193 /* Entry point for this region's handler from an inner region. */
194 rtx post_landing_pad
;
196 /* The RESX insn for handing off control to the next outermost handler,
201 /* Used to save exception status for each function. */
204 /* The tree of all regions for this function. */
205 struct eh_region
*region_tree
;
207 /* The same information as an indexable array. */
208 struct eh_region
**region_array
;
210 /* The most recently open region. */
211 struct eh_region
*cur_region
;
213 /* This is the region for which we are processing catch blocks. */
214 struct eh_region
*try_region
;
216 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
217 node is itself a TREE_CHAINed list of handlers for regions that
218 are not yet closed. The TREE_VALUE of each entry contains the
219 handler for the corresponding entry on the ehstack. */
225 int built_landing_pads
;
226 int last_region_number
;
228 varray_type ttype_data
;
229 varray_type ehspec_data
;
230 varray_type action_record_data
;
232 struct call_site_record
237 int call_site_data_used
;
238 int call_site_data_size
;
249 static void mark_eh_region
PARAMS ((struct eh_region
*));
251 static int t2r_eq
PARAMS ((const PTR
,
253 static hashval_t t2r_hash
PARAMS ((const PTR
));
254 static int t2r_mark_1
PARAMS ((PTR
*, PTR
));
255 static void t2r_mark
PARAMS ((PTR
));
256 static void add_type_for_runtime
PARAMS ((tree
));
257 static tree lookup_type_for_runtime
PARAMS ((tree
));
259 static struct eh_region
*expand_eh_region_end
PARAMS ((void));
261 static rtx get_exception_filter
PARAMS ((struct function
*));
263 static void collect_eh_region_array
PARAMS ((void));
264 static void resolve_fixup_regions
PARAMS ((void));
265 static void remove_fixup_regions
PARAMS ((void));
266 static void convert_from_eh_region_ranges_1
PARAMS ((rtx
*, int *, int));
268 static struct eh_region
*duplicate_eh_region_1
PARAMS ((struct eh_region
*,
269 struct inline_remap
*));
270 static void duplicate_eh_region_2
PARAMS ((struct eh_region
*,
271 struct eh_region
**));
272 static int ttypes_filter_eq
PARAMS ((const PTR
,
274 static hashval_t ttypes_filter_hash
PARAMS ((const PTR
));
275 static int ehspec_filter_eq
PARAMS ((const PTR
,
277 static hashval_t ehspec_filter_hash
PARAMS ((const PTR
));
278 static int add_ttypes_entry
PARAMS ((htab_t
, tree
));
279 static int add_ehspec_entry
PARAMS ((htab_t
, htab_t
,
281 static void assign_filter_values
PARAMS ((void));
282 static void build_post_landing_pads
PARAMS ((void));
283 static void connect_post_landing_pads
PARAMS ((void));
284 static void dw2_build_landing_pads
PARAMS ((void));
287 static bool sjlj_find_directly_reachable_regions
288 PARAMS ((struct sjlj_lp_info
*));
289 static void sjlj_assign_call_site_values
290 PARAMS ((rtx
, struct sjlj_lp_info
*));
291 static void sjlj_mark_call_sites
292 PARAMS ((struct sjlj_lp_info
*));
293 static void sjlj_emit_function_enter
PARAMS ((rtx
));
294 static void sjlj_emit_function_exit
PARAMS ((void));
295 static void sjlj_emit_dispatch_table
296 PARAMS ((rtx
, struct sjlj_lp_info
*));
297 static void sjlj_build_landing_pads
PARAMS ((void));
299 static void remove_exception_handler_label
PARAMS ((rtx
));
300 static void remove_eh_handler
PARAMS ((struct eh_region
*));
302 struct reachable_info
;
304 /* The return value of reachable_next_level. */
307 /* The given exception is not processed by the given region. */
309 /* The given exception may need processing by the given region. */
311 /* The given exception is completely processed by the given region. */
313 /* The given exception is completely processed by the runtime. */
317 static int check_handled
PARAMS ((tree
, tree
));
318 static void add_reachable_handler
319 PARAMS ((struct reachable_info
*, struct eh_region
*,
320 struct eh_region
*));
321 static enum reachable_code reachable_next_level
322 PARAMS ((struct eh_region
*, tree
, struct reachable_info
*));
324 static int action_record_eq
PARAMS ((const PTR
,
326 static hashval_t action_record_hash
PARAMS ((const PTR
));
327 static int add_action_record
PARAMS ((htab_t
, int, int));
328 static int collect_one_action_chain
PARAMS ((htab_t
,
329 struct eh_region
*));
330 static int add_call_site
PARAMS ((rtx
, int));
332 static void push_uleb128
PARAMS ((varray_type
*,
334 static void push_sleb128
PARAMS ((varray_type
*, int));
335 #ifndef HAVE_AS_LEB128
336 static int dw2_size_of_call_site_table
PARAMS ((void));
337 static int sjlj_size_of_call_site_table
PARAMS ((void));
339 static void dw2_output_call_site_table
PARAMS ((void));
340 static void sjlj_output_call_site_table
PARAMS ((void));
343 /* Routine to see if exception handling is turned on.
344 DO_WARN is non-zero if we want to inform the user that exception
345 handling is turned off.
347 This is used to ensure that -fexceptions has been specified if the
348 compiler tries to use any exception-specific functions. */
354 if (! flag_exceptions
)
356 static int warned
= 0;
357 if (! warned
&& do_warn
)
359 error ("exception handling disabled, use -fexceptions to enable");
371 ggc_add_rtx_root (&exception_handler_labels
, 1);
373 if (! flag_exceptions
)
376 type_to_runtime_map
= htab_create (31, t2r_hash
, t2r_eq
, NULL
);
377 ggc_add_root (&type_to_runtime_map
, 1, sizeof (htab_t
), t2r_mark
);
379 /* Create the SjLj_Function_Context structure. This should match
380 the definition in unwind-sjlj.c. */
381 if (USING_SJLJ_EXCEPTIONS
)
383 tree f_jbuf
, f_per
, f_lsda
, f_prev
, f_cs
, f_data
, tmp
;
385 sjlj_fc_type_node
= make_lang_type (RECORD_TYPE
);
386 ggc_add_tree_root (&sjlj_fc_type_node
, 1);
388 f_prev
= build_decl (FIELD_DECL
, get_identifier ("__prev"),
389 build_pointer_type (sjlj_fc_type_node
));
390 DECL_FIELD_CONTEXT (f_prev
) = sjlj_fc_type_node
;
392 f_cs
= build_decl (FIELD_DECL
, get_identifier ("__call_site"),
394 DECL_FIELD_CONTEXT (f_cs
) = sjlj_fc_type_node
;
396 tmp
= build_index_type (build_int_2 (4 - 1, 0));
397 tmp
= build_array_type (type_for_mode (word_mode
, 1), tmp
);
398 f_data
= build_decl (FIELD_DECL
, get_identifier ("__data"), tmp
);
399 DECL_FIELD_CONTEXT (f_data
) = sjlj_fc_type_node
;
401 f_per
= build_decl (FIELD_DECL
, get_identifier ("__personality"),
403 DECL_FIELD_CONTEXT (f_per
) = sjlj_fc_type_node
;
405 f_lsda
= build_decl (FIELD_DECL
, get_identifier ("__lsda"),
407 DECL_FIELD_CONTEXT (f_lsda
) = sjlj_fc_type_node
;
409 #ifdef DONT_USE_BUILTIN_SETJMP
411 tmp
= build_int_2 (JMP_BUF_SIZE
- 1, 0);
413 /* Should be large enough for most systems, if it is not,
414 JMP_BUF_SIZE should be defined with the proper value. It will
415 also tend to be larger than necessary for most systems, a more
416 optimal port will define JMP_BUF_SIZE. */
417 tmp
= build_int_2 (FIRST_PSEUDO_REGISTER
+ 2 - 1, 0);
420 /* This is 2 for builtin_setjmp, plus whatever the target requires
421 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
422 tmp
= build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
))
423 / GET_MODE_SIZE (Pmode
)) + 2 - 1, 0);
425 tmp
= build_index_type (tmp
);
426 tmp
= build_array_type (ptr_type_node
, tmp
);
427 f_jbuf
= build_decl (FIELD_DECL
, get_identifier ("__jbuf"), tmp
);
428 #ifdef DONT_USE_BUILTIN_SETJMP
429 /* We don't know what the alignment requirements of the
430 runtime's jmp_buf has. Overestimate. */
431 DECL_ALIGN (f_jbuf
) = BIGGEST_ALIGNMENT
;
432 DECL_USER_ALIGN (f_jbuf
) = 1;
434 DECL_FIELD_CONTEXT (f_jbuf
) = sjlj_fc_type_node
;
436 TYPE_FIELDS (sjlj_fc_type_node
) = f_prev
;
437 TREE_CHAIN (f_prev
) = f_cs
;
438 TREE_CHAIN (f_cs
) = f_data
;
439 TREE_CHAIN (f_data
) = f_per
;
440 TREE_CHAIN (f_per
) = f_lsda
;
441 TREE_CHAIN (f_lsda
) = f_jbuf
;
443 layout_type (sjlj_fc_type_node
);
445 /* Cache the interesting field offsets so that we have
446 easy access from rtl. */
447 sjlj_fc_call_site_ofs
448 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs
), 1)
449 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs
), 1) / BITS_PER_UNIT
);
451 = (tree_low_cst (DECL_FIELD_OFFSET (f_data
), 1)
452 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data
), 1) / BITS_PER_UNIT
);
453 sjlj_fc_personality_ofs
454 = (tree_low_cst (DECL_FIELD_OFFSET (f_per
), 1)
455 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per
), 1) / BITS_PER_UNIT
);
457 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda
), 1)
458 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda
), 1) / BITS_PER_UNIT
);
460 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf
), 1)
461 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf
), 1) / BITS_PER_UNIT
);
466 init_eh_for_function ()
468 cfun
->eh
= (struct eh_status
*) xcalloc (1, sizeof (struct eh_status
));
471 /* Mark EH for GC. */
474 mark_eh_region (region
)
475 struct eh_region
*region
;
480 switch (region
->type
)
483 /* This can happen if a nested function is inside the body of a region
484 and we do a GC as part of processing it. */
487 ggc_mark_tree (region
->u
.cleanup
.exp
);
490 ggc_mark_rtx (region
->u
.try.continue_label
);
493 ggc_mark_tree (region
->u
.catch.type_list
);
494 ggc_mark_tree (region
->u
.catch.filter_list
);
496 case ERT_ALLOWED_EXCEPTIONS
:
497 ggc_mark_tree (region
->u
.allowed
.type_list
);
499 case ERT_MUST_NOT_THROW
:
502 ggc_mark_tree (region
->u
.throw.type
);
505 ggc_mark_tree (region
->u
.fixup
.cleanup_exp
);
511 ggc_mark_rtx (region
->label
);
512 ggc_mark_rtx (region
->resume
);
513 ggc_mark_rtx (region
->landing_pad
);
514 ggc_mark_rtx (region
->post_landing_pad
);
519 struct eh_status
*eh
;
526 /* If we've called collect_eh_region_array, use it. Otherwise walk
527 the tree non-recursively. */
528 if (eh
->region_array
)
530 for (i
= eh
->last_region_number
; i
> 0; --i
)
532 struct eh_region
*r
= eh
->region_array
[i
];
533 if (r
&& r
->region_number
== i
)
537 else if (eh
->region_tree
)
539 struct eh_region
*r
= eh
->region_tree
;
545 else if (r
->next_peer
)
553 } while (r
->next_peer
== NULL
);
560 ggc_mark_tree (eh
->protect_list
);
561 ggc_mark_rtx (eh
->filter
);
562 ggc_mark_rtx (eh
->exc_ptr
);
563 ggc_mark_tree_varray (eh
->ttype_data
);
565 if (eh
->call_site_data
)
567 for (i
= eh
->call_site_data_used
- 1; i
>= 0; --i
)
568 ggc_mark_rtx (eh
->call_site_data
[i
].landing_pad
);
571 ggc_mark_rtx (eh
->ehr_stackadj
);
572 ggc_mark_rtx (eh
->ehr_handler
);
573 ggc_mark_rtx (eh
->ehr_label
);
575 ggc_mark_rtx (eh
->sjlj_fc
);
576 ggc_mark_rtx (eh
->sjlj_exit_after
);
583 struct eh_status
*eh
= f
->eh
;
585 if (eh
->region_array
)
588 for (i
= eh
->last_region_number
; i
> 0; --i
)
590 struct eh_region
*r
= eh
->region_array
[i
];
591 /* Mind we don't free a region struct more than once. */
592 if (r
&& r
->region_number
== i
)
595 free (eh
->region_array
);
597 else if (eh
->region_tree
)
599 struct eh_region
*next
, *r
= eh
->region_tree
;
604 else if (r
->next_peer
)
618 } while (r
->next_peer
== NULL
);
627 VARRAY_FREE (eh
->ttype_data
);
628 VARRAY_FREE (eh
->ehspec_data
);
629 VARRAY_FREE (eh
->action_record_data
);
630 if (eh
->call_site_data
)
631 free (eh
->call_site_data
);
638 /* Start an exception handling region. All instructions emitted
639 after this point are considered to be part of the region until
640 expand_eh_region_end is invoked. */
643 expand_eh_region_start ()
645 struct eh_region
*new_region
;
646 struct eh_region
*cur_region
;
652 /* Insert a new blank region as a leaf in the tree. */
653 new_region
= (struct eh_region
*) xcalloc (1, sizeof (*new_region
));
654 cur_region
= cfun
->eh
->cur_region
;
655 new_region
->outer
= cur_region
;
658 new_region
->next_peer
= cur_region
->inner
;
659 cur_region
->inner
= new_region
;
663 new_region
->next_peer
= cfun
->eh
->region_tree
;
664 cfun
->eh
->region_tree
= new_region
;
666 cfun
->eh
->cur_region
= new_region
;
668 /* Create a note marking the start of this region. */
669 new_region
->region_number
= ++cfun
->eh
->last_region_number
;
670 note
= emit_note (NULL
, NOTE_INSN_EH_REGION_BEG
);
671 NOTE_EH_HANDLER (note
) = new_region
->region_number
;
674 /* Common code to end a region. Returns the region just ended. */
676 static struct eh_region
*
677 expand_eh_region_end ()
679 struct eh_region
*cur_region
= cfun
->eh
->cur_region
;
682 /* Create a note marking the end of this region. */
683 note
= emit_note (NULL
, NOTE_INSN_EH_REGION_END
);
684 NOTE_EH_HANDLER (note
) = cur_region
->region_number
;
687 cfun
->eh
->cur_region
= cur_region
->outer
;
692 /* End an exception handling region for a cleanup. HANDLER is an
693 expression to expand for the cleanup. */
696 expand_eh_region_end_cleanup (handler
)
699 struct eh_region
*region
;
700 tree protect_cleanup_actions
;
707 region
= expand_eh_region_end ();
708 region
->type
= ERT_CLEANUP
;
709 region
->label
= gen_label_rtx ();
710 region
->u
.cleanup
.exp
= handler
;
712 around_label
= gen_label_rtx ();
713 emit_jump (around_label
);
715 emit_label (region
->label
);
717 /* Give the language a chance to specify an action to be taken if an
718 exception is thrown that would propagate out of the HANDLER. */
719 protect_cleanup_actions
720 = (lang_protect_cleanup_actions
721 ? (*lang_protect_cleanup_actions
) ()
724 if (protect_cleanup_actions
)
725 expand_eh_region_start ();
727 /* In case this cleanup involves an inline destructor with a try block in
728 it, we need to save the EH return data registers around it. */
729 data_save
[0] = gen_reg_rtx (Pmode
);
730 emit_move_insn (data_save
[0], get_exception_pointer (cfun
));
731 data_save
[1] = gen_reg_rtx (word_mode
);
732 emit_move_insn (data_save
[1], get_exception_filter (cfun
));
734 expand_expr (handler
, const0_rtx
, VOIDmode
, 0);
736 emit_move_insn (cfun
->eh
->exc_ptr
, data_save
[0]);
737 emit_move_insn (cfun
->eh
->filter
, data_save
[1]);
739 if (protect_cleanup_actions
)
740 expand_eh_region_end_must_not_throw (protect_cleanup_actions
);
742 /* We need any stack adjustment complete before the around_label. */
743 do_pending_stack_adjust ();
745 /* We delay the generation of the _Unwind_Resume until we generate
746 landing pads. We emit a marker here so as to get good control
747 flow data in the meantime. */
749 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
752 emit_label (around_label
);
755 /* End an exception handling region for a try block, and prepares
756 for subsequent calls to expand_start_catch. */
759 expand_start_all_catch ()
761 struct eh_region
*region
;
766 region
= expand_eh_region_end ();
767 region
->type
= ERT_TRY
;
768 region
->u
.try.prev_try
= cfun
->eh
->try_region
;
769 region
->u
.try.continue_label
= gen_label_rtx ();
771 cfun
->eh
->try_region
= region
;
773 emit_jump (region
->u
.try.continue_label
);
776 /* Begin a catch clause. TYPE is the type caught, a list of such types, or
777 null if this is a catch-all clause. Providing a type list enables to
778 associate the catch region with potentially several exception types, which
779 is useful e.g. for Ada. */
782 expand_start_catch (type_or_list
)
785 struct eh_region
*t
, *c
, *l
;
791 type_list
= type_or_list
;
795 /* Ensure to always end up with a type list to normalize further
796 processing, then register each type against the runtime types
800 if (TREE_CODE (type_or_list
) != TREE_LIST
)
801 type_list
= tree_cons (NULL_TREE
, type_or_list
, NULL_TREE
);
803 type_node
= type_list
;
804 for (; type_node
; type_node
= TREE_CHAIN (type_node
))
805 add_type_for_runtime (TREE_VALUE (type_node
));
808 expand_eh_region_start ();
810 t
= cfun
->eh
->try_region
;
811 c
= cfun
->eh
->cur_region
;
813 c
->u
.catch.type_list
= type_list
;
814 c
->label
= gen_label_rtx ();
816 l
= t
->u
.try.last_catch
;
817 c
->u
.catch.prev_catch
= l
;
819 l
->u
.catch.next_catch
= c
;
822 t
->u
.try.last_catch
= c
;
824 emit_label (c
->label
);
827 /* End a catch clause. Control will resume after the try/catch block. */
832 struct eh_region
*try_region
, *catch_region
;
837 catch_region
= expand_eh_region_end ();
838 try_region
= cfun
->eh
->try_region
;
840 emit_jump (try_region
->u
.try.continue_label
);
843 /* End a sequence of catch handlers for a try block. */
846 expand_end_all_catch ()
848 struct eh_region
*try_region
;
853 try_region
= cfun
->eh
->try_region
;
854 cfun
->eh
->try_region
= try_region
->u
.try.prev_try
;
856 emit_label (try_region
->u
.try.continue_label
);
859 /* End an exception region for an exception type filter. ALLOWED is a
860 TREE_LIST of types to be matched by the runtime. FAILURE is an
861 expression to invoke if a mismatch occurs.
863 ??? We could use these semantics for calls to rethrow, too; if we can
864 see the surrounding catch clause, we know that the exception we're
865 rethrowing satisfies the "filter" of the catch type. */
868 expand_eh_region_end_allowed (allowed
, failure
)
869 tree allowed
, failure
;
871 struct eh_region
*region
;
877 region
= expand_eh_region_end ();
878 region
->type
= ERT_ALLOWED_EXCEPTIONS
;
879 region
->u
.allowed
.type_list
= allowed
;
880 region
->label
= gen_label_rtx ();
882 for (; allowed
; allowed
= TREE_CHAIN (allowed
))
883 add_type_for_runtime (TREE_VALUE (allowed
));
885 /* We must emit the call to FAILURE here, so that if this function
886 throws a different exception, that it will be processed by the
889 around_label
= gen_label_rtx ();
890 emit_jump (around_label
);
892 emit_label (region
->label
);
893 expand_expr (failure
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
894 /* We must adjust the stack before we reach the AROUND_LABEL because
895 the call to FAILURE does not occur on all paths to the
897 do_pending_stack_adjust ();
899 emit_label (around_label
);
902 /* End an exception region for a must-not-throw filter. FAILURE is an
903 expression invoke if an uncaught exception propagates this far.
905 This is conceptually identical to expand_eh_region_end_allowed with
906 an empty allowed list (if you passed "std::terminate" instead of
907 "__cxa_call_unexpected"), but they are represented differently in
911 expand_eh_region_end_must_not_throw (failure
)
914 struct eh_region
*region
;
920 region
= expand_eh_region_end ();
921 region
->type
= ERT_MUST_NOT_THROW
;
922 region
->label
= gen_label_rtx ();
924 /* We must emit the call to FAILURE here, so that if this function
925 throws a different exception, that it will be processed by the
928 around_label
= gen_label_rtx ();
929 emit_jump (around_label
);
931 emit_label (region
->label
);
932 expand_expr (failure
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
934 emit_label (around_label
);
937 /* End an exception region for a throw. No handling goes on here,
938 but it's the easiest way for the front-end to indicate what type
942 expand_eh_region_end_throw (type
)
945 struct eh_region
*region
;
950 region
= expand_eh_region_end ();
951 region
->type
= ERT_THROW
;
952 region
->u
.throw.type
= type
;
955 /* End a fixup region. Within this region the cleanups for the immediately
956 enclosing region are _not_ run. This is used for goto cleanup to avoid
957 destroying an object twice.
959 This would be an extraordinarily simple prospect, were it not for the
960 fact that we don't actually know what the immediately enclosing region
961 is. This surprising fact is because expand_cleanups is currently
962 generating a sequence that it will insert somewhere else. We collect
963 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
966 expand_eh_region_end_fixup (handler
)
969 struct eh_region
*fixup
;
974 fixup
= expand_eh_region_end ();
975 fixup
->type
= ERT_FIXUP
;
976 fixup
->u
.fixup
.cleanup_exp
= handler
;
979 /* Return an rtl expression for a pointer to the exception object
983 get_exception_pointer (fun
)
984 struct function
*fun
;
986 rtx exc_ptr
= fun
->eh
->exc_ptr
;
987 if (fun
== cfun
&& ! exc_ptr
)
989 exc_ptr
= gen_reg_rtx (Pmode
);
990 fun
->eh
->exc_ptr
= exc_ptr
;
995 /* Return an rtl expression for the exception dispatch filter
999 get_exception_filter (fun
)
1000 struct function
*fun
;
1002 rtx filter
= fun
->eh
->filter
;
1003 if (fun
== cfun
&& ! filter
)
1005 filter
= gen_reg_rtx (word_mode
);
1006 fun
->eh
->filter
= filter
;
1011 /* Begin a region that will contain entries created with
1012 add_partial_entry. */
1015 begin_protect_partials ()
1017 /* Push room for a new list. */
1018 cfun
->eh
->protect_list
1019 = tree_cons (NULL_TREE
, NULL_TREE
, cfun
->eh
->protect_list
);
1022 /* Start a new exception region for a region of code that has a
1023 cleanup action and push the HANDLER for the region onto
1024 protect_list. All of the regions created with add_partial_entry
1025 will be ended when end_protect_partials is invoked.
1027 ??? The only difference between this purpose and that of
1028 expand_decl_cleanup is that in this case, we only want the cleanup to
1029 run if an exception is thrown. This should also be handled using
1033 add_partial_entry (handler
)
1036 expand_eh_region_start ();
1038 /* ??? This comment was old before the most recent rewrite. We
1039 really ought to fix the callers at some point. */
1040 /* For backwards compatibility, we allow callers to omit calls to
1041 begin_protect_partials for the outermost region. So, we must
1042 explicitly do so here. */
1043 if (!cfun
->eh
->protect_list
)
1044 begin_protect_partials ();
1046 /* Add this entry to the front of the list. */
1047 TREE_VALUE (cfun
->eh
->protect_list
)
1048 = tree_cons (NULL_TREE
, handler
, TREE_VALUE (cfun
->eh
->protect_list
));
1051 /* End all the pending exception regions on protect_list. */
1054 end_protect_partials ()
1058 /* ??? This comment was old before the most recent rewrite. We
1059 really ought to fix the callers at some point. */
1060 /* For backwards compatibility, we allow callers to omit the call to
1061 begin_protect_partials for the outermost region. So,
1062 PROTECT_LIST may be NULL. */
1063 if (!cfun
->eh
->protect_list
)
1066 /* Pop the topmost entry. */
1067 t
= TREE_VALUE (cfun
->eh
->protect_list
);
1068 cfun
->eh
->protect_list
= TREE_CHAIN (cfun
->eh
->protect_list
);
1070 /* End all the exception regions. */
1071 for (; t
; t
= TREE_CHAIN (t
))
1072 expand_eh_region_end_cleanup (TREE_VALUE (t
));
1076 /* This section is for the exception handling specific optimization pass. */
1078 /* Random access the exception region tree. It's just as simple to
1079 collect the regions this way as in expand_eh_region_start, but
1080 without having to realloc memory. */
1083 collect_eh_region_array ()
1085 struct eh_region
**array
, *i
;
1087 i
= cfun
->eh
->region_tree
;
1091 array
= xcalloc (cfun
->eh
->last_region_number
+ 1, sizeof (*array
));
1092 cfun
->eh
->region_array
= array
;
1096 array
[i
->region_number
] = i
;
1098 /* If there are sub-regions, process them. */
1101 /* If there are peers, process them. */
1102 else if (i
->next_peer
)
1104 /* Otherwise, step back up the tree to the next peer. */
1111 } while (i
->next_peer
== NULL
);
1118 resolve_fixup_regions ()
1120 int i
, j
, n
= cfun
->eh
->last_region_number
;
1122 for (i
= 1; i
<= n
; ++i
)
1124 struct eh_region
*fixup
= cfun
->eh
->region_array
[i
];
1125 struct eh_region
*cleanup
= 0;
1127 if (! fixup
|| fixup
->type
!= ERT_FIXUP
)
1130 for (j
= 1; j
<= n
; ++j
)
1132 cleanup
= cfun
->eh
->region_array
[j
];
1133 if (cleanup
->type
== ERT_CLEANUP
1134 && cleanup
->u
.cleanup
.exp
== fixup
->u
.fixup
.cleanup_exp
)
1140 fixup
->u
.fixup
.real_region
= cleanup
->outer
;
1144 /* Now that we've discovered what region actually encloses a fixup,
1145 we can shuffle pointers and remove them from the tree. */
1148 remove_fixup_regions ()
1152 struct eh_region
*fixup
;
1154 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1155 for instructions referencing fixup regions. This is only
1156 strictly necessary for fixup regions with no parent, but
1157 doesn't hurt to do it for all regions. */
1158 for (insn
= get_insns(); insn
; insn
= NEXT_INSN (insn
))
1160 && (note
= find_reg_note (insn
, REG_EH_REGION
, NULL
))
1161 && INTVAL (XEXP (note
, 0)) > 0
1162 && (fixup
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))])
1163 && fixup
->type
== ERT_FIXUP
)
1165 if (fixup
->u
.fixup
.real_region
)
1166 XEXP (note
, 0) = GEN_INT (fixup
->u
.fixup
.real_region
->region_number
);
1168 remove_note (insn
, note
);
1171 /* Remove the fixup regions from the tree. */
1172 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1174 fixup
= cfun
->eh
->region_array
[i
];
1178 /* Allow GC to maybe free some memory. */
1179 if (fixup
->type
== ERT_CLEANUP
)
1180 fixup
->u
.cleanup
.exp
= NULL_TREE
;
1182 if (fixup
->type
!= ERT_FIXUP
)
1187 struct eh_region
*parent
, *p
, **pp
;
1189 parent
= fixup
->u
.fixup
.real_region
;
1191 /* Fix up the children's parent pointers; find the end of
1193 for (p
= fixup
->inner
; ; p
= p
->next_peer
)
1200 /* In the tree of cleanups, only outer-inner ordering matters.
1201 So link the children back in anywhere at the correct level. */
1203 pp
= &parent
->inner
;
1205 pp
= &cfun
->eh
->region_tree
;
1208 fixup
->inner
= NULL
;
1211 remove_eh_handler (fixup
);
1215 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1216 can_throw instruction in the region. */
1219 convert_from_eh_region_ranges_1 (pinsns
, orig_sp
, cur
)
1227 for (insn
= *pinsns
; insn
; insn
= next
)
1229 next
= NEXT_INSN (insn
);
1230 if (GET_CODE (insn
) == NOTE
)
1232 int kind
= NOTE_LINE_NUMBER (insn
);
1233 if (kind
== NOTE_INSN_EH_REGION_BEG
1234 || kind
== NOTE_INSN_EH_REGION_END
)
1236 if (kind
== NOTE_INSN_EH_REGION_BEG
)
1238 struct eh_region
*r
;
1241 cur
= NOTE_EH_HANDLER (insn
);
1243 r
= cfun
->eh
->region_array
[cur
];
1244 if (r
->type
== ERT_FIXUP
)
1246 r
= r
->u
.fixup
.real_region
;
1247 cur
= r
? r
->region_number
: 0;
1249 else if (r
->type
== ERT_CATCH
)
1252 cur
= r
? r
->region_number
: 0;
1258 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1259 requires extra care to adjust sequence start. */
1260 if (insn
== *pinsns
)
1266 else if (INSN_P (insn
))
1269 && ! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
1270 /* Calls can always potentially throw exceptions, unless
1271 they have a REG_EH_REGION note with a value of 0 or less.
1272 Which should be the only possible kind so far. */
1273 && (GET_CODE (insn
) == CALL_INSN
1274 /* If we wanted exceptions for non-call insns, then
1275 any may_trap_p instruction could throw. */
1276 || (flag_non_call_exceptions
1277 && GET_CODE (PATTERN (insn
)) != CLOBBER
1278 && GET_CODE (PATTERN (insn
)) != USE
1279 && may_trap_p (PATTERN (insn
)))))
1281 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_EH_REGION
, GEN_INT (cur
),
1285 if (GET_CODE (insn
) == CALL_INSN
1286 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
1288 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn
), 0),
1290 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn
), 1),
1292 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn
), 2),
1303 convert_from_eh_region_ranges ()
1308 collect_eh_region_array ();
1309 resolve_fixup_regions ();
1311 stack
= xmalloc (sizeof (int) * (cfun
->eh
->last_region_number
+ 1));
1312 insns
= get_insns ();
1313 convert_from_eh_region_ranges_1 (&insns
, stack
, 0);
1316 remove_fixup_regions ();
1320 find_exception_handler_labels ()
1322 rtx list
= NULL_RTX
;
1325 free_EXPR_LIST_list (&exception_handler_labels
);
1327 if (cfun
->eh
->region_tree
== NULL
)
1330 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1332 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1337 if (cfun
->eh
->built_landing_pads
)
1338 lab
= region
->landing_pad
;
1340 lab
= region
->label
;
1343 list
= alloc_EXPR_LIST (0, lab
, list
);
1346 /* For sjlj exceptions, need the return label to remain live until
1347 after landing pad generation. */
1348 if (USING_SJLJ_EXCEPTIONS
&& ! cfun
->eh
->built_landing_pads
)
1349 list
= alloc_EXPR_LIST (0, return_label
, list
);
1351 exception_handler_labels
= list
;
1355 static struct eh_region
*
1356 duplicate_eh_region_1 (o
, map
)
1357 struct eh_region
*o
;
1358 struct inline_remap
*map
;
1361 = (struct eh_region
*) xcalloc (1, sizeof (struct eh_region
));
1363 n
->region_number
= o
->region_number
+ cfun
->eh
->last_region_number
;
1369 case ERT_MUST_NOT_THROW
:
1373 if (o
->u
.try.continue_label
)
1374 n
->u
.try.continue_label
1375 = get_label_from_map (map
,
1376 CODE_LABEL_NUMBER (o
->u
.try.continue_label
));
1380 n
->u
.catch.type_list
= o
->u
.catch.type_list
;
1383 case ERT_ALLOWED_EXCEPTIONS
:
1384 n
->u
.allowed
.type_list
= o
->u
.allowed
.type_list
;
1388 n
->u
.throw.type
= o
->u
.throw.type
;
1395 n
->label
= get_label_from_map (map
, CODE_LABEL_NUMBER (o
->label
));
1398 n
->resume
= map
->insn_map
[INSN_UID (o
->resume
)];
1399 if (n
->resume
== NULL
)
1407 duplicate_eh_region_2 (o
, n_array
)
1408 struct eh_region
*o
;
1409 struct eh_region
**n_array
;
1411 struct eh_region
*n
= n_array
[o
->region_number
];
1416 n
->u
.try.catch = n_array
[o
->u
.try.catch->region_number
];
1417 n
->u
.try.last_catch
= n_array
[o
->u
.try.last_catch
->region_number
];
1421 if (o
->u
.catch.next_catch
)
1422 n
->u
.catch.next_catch
= n_array
[o
->u
.catch.next_catch
->region_number
];
1423 if (o
->u
.catch.prev_catch
)
1424 n
->u
.catch.prev_catch
= n_array
[o
->u
.catch.prev_catch
->region_number
];
1432 n
->outer
= n_array
[o
->outer
->region_number
];
1434 n
->inner
= n_array
[o
->inner
->region_number
];
1436 n
->next_peer
= n_array
[o
->next_peer
->region_number
];
1440 duplicate_eh_regions (ifun
, map
)
1441 struct function
*ifun
;
1442 struct inline_remap
*map
;
1444 int ifun_last_region_number
= ifun
->eh
->last_region_number
;
1445 struct eh_region
**n_array
, *root
, *cur
;
1448 if (ifun_last_region_number
== 0)
1451 n_array
= xcalloc (ifun_last_region_number
+ 1, sizeof (*n_array
));
1453 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
1455 cur
= ifun
->eh
->region_array
[i
];
1456 if (!cur
|| cur
->region_number
!= i
)
1458 n_array
[i
] = duplicate_eh_region_1 (cur
, map
);
1460 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
1462 cur
= ifun
->eh
->region_array
[i
];
1463 if (!cur
|| cur
->region_number
!= i
)
1465 duplicate_eh_region_2 (cur
, n_array
);
1468 root
= n_array
[ifun
->eh
->region_tree
->region_number
];
1469 cur
= cfun
->eh
->cur_region
;
1472 struct eh_region
*p
= cur
->inner
;
1475 while (p
->next_peer
)
1477 p
->next_peer
= root
;
1482 for (i
= 1; i
<= ifun_last_region_number
; ++i
)
1483 if (n_array
[i
] && n_array
[i
]->outer
== NULL
)
1484 n_array
[i
]->outer
= cur
;
1488 struct eh_region
*p
= cfun
->eh
->region_tree
;
1491 while (p
->next_peer
)
1493 p
->next_peer
= root
;
1496 cfun
->eh
->region_tree
= root
;
1501 i
= cfun
->eh
->last_region_number
;
1502 cfun
->eh
->last_region_number
= i
+ ifun_last_region_number
;
1508 t2r_eq (pentry
, pdata
)
1512 tree entry
= (tree
) pentry
;
1513 tree data
= (tree
) pdata
;
1515 return TREE_PURPOSE (entry
) == data
;
1522 tree entry
= (tree
) pentry
;
1523 return TYPE_HASH (TREE_PURPOSE (entry
));
1527 t2r_mark_1 (slot
, data
)
1529 PTR data ATTRIBUTE_UNUSED
;
1531 tree contents
= (tree
) *slot
;
1532 ggc_mark_tree (contents
);
1540 htab_traverse (*(htab_t
*)addr
, t2r_mark_1
, NULL
);
1544 add_type_for_runtime (type
)
1549 slot
= (tree
*) htab_find_slot_with_hash (type_to_runtime_map
, type
,
1550 TYPE_HASH (type
), INSERT
);
1553 tree runtime
= (*lang_eh_runtime_type
) (type
);
1554 *slot
= tree_cons (type
, runtime
, NULL_TREE
);
1559 lookup_type_for_runtime (type
)
1564 slot
= (tree
*) htab_find_slot_with_hash (type_to_runtime_map
, type
,
1565 TYPE_HASH (type
), NO_INSERT
);
1567 /* We should have always inserted the data earlier. */
1568 return TREE_VALUE (*slot
);
1572 /* Represent an entry in @TTypes for either catch actions
1573 or exception filter actions. */
1574 struct ttypes_filter
1580 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1581 (a tree) for a @TTypes type node we are thinking about adding. */
1584 ttypes_filter_eq (pentry
, pdata
)
1588 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1589 tree data
= (tree
) pdata
;
1591 return entry
->t
== data
;
1595 ttypes_filter_hash (pentry
)
1598 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1599 return TYPE_HASH (entry
->t
);
1602 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1603 exception specification list we are thinking about adding. */
1604 /* ??? Currently we use the type lists in the order given. Someone
1605 should put these in some canonical order. */
1608 ehspec_filter_eq (pentry
, pdata
)
1612 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1613 const struct ttypes_filter
*data
= (const struct ttypes_filter
*) pdata
;
1615 return type_list_equal (entry
->t
, data
->t
);
1618 /* Hash function for exception specification lists. */
1621 ehspec_filter_hash (pentry
)
1624 const struct ttypes_filter
*entry
= (const struct ttypes_filter
*) pentry
;
1628 for (list
= entry
->t
; list
; list
= TREE_CHAIN (list
))
1629 h
= (h
<< 5) + (h
>> 27) + TYPE_HASH (TREE_VALUE (list
));
1633 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1634 up the search. Return the filter value to be used. */
1637 add_ttypes_entry (ttypes_hash
, type
)
1641 struct ttypes_filter
**slot
, *n
;
1643 slot
= (struct ttypes_filter
**)
1644 htab_find_slot_with_hash (ttypes_hash
, type
, TYPE_HASH (type
), INSERT
);
1646 if ((n
= *slot
) == NULL
)
1648 /* Filter value is a 1 based table index. */
1650 n
= (struct ttypes_filter
*) xmalloc (sizeof (*n
));
1652 n
->filter
= VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
) + 1;
1655 VARRAY_PUSH_TREE (cfun
->eh
->ttype_data
, type
);
1661 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1662 to speed up the search. Return the filter value to be used. */
1665 add_ehspec_entry (ehspec_hash
, ttypes_hash
, list
)
1670 struct ttypes_filter
**slot
, *n
;
1671 struct ttypes_filter dummy
;
1674 slot
= (struct ttypes_filter
**)
1675 htab_find_slot (ehspec_hash
, &dummy
, INSERT
);
1677 if ((n
= *slot
) == NULL
)
1679 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1681 n
= (struct ttypes_filter
*) xmalloc (sizeof (*n
));
1683 n
->filter
= -(VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
) + 1);
1686 /* Look up each type in the list and encode its filter
1687 value as a uleb128. Terminate the list with 0. */
1688 for (; list
; list
= TREE_CHAIN (list
))
1689 push_uleb128 (&cfun
->eh
->ehspec_data
,
1690 add_ttypes_entry (ttypes_hash
, TREE_VALUE (list
)));
1691 VARRAY_PUSH_UCHAR (cfun
->eh
->ehspec_data
, 0);
1697 /* Generate the action filter values to be used for CATCH and
1698 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1699 we use lots of landing pads, and so every type or list can share
1700 the same filter value, which saves table space. */
1703 assign_filter_values ()
1706 htab_t ttypes
, ehspec
;
1708 VARRAY_TREE_INIT (cfun
->eh
->ttype_data
, 16, "ttype_data");
1709 VARRAY_UCHAR_INIT (cfun
->eh
->ehspec_data
, 64, "ehspec_data");
1711 ttypes
= htab_create (31, ttypes_filter_hash
, ttypes_filter_eq
, free
);
1712 ehspec
= htab_create (31, ehspec_filter_hash
, ehspec_filter_eq
, free
);
1714 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1716 struct eh_region
*r
= cfun
->eh
->region_array
[i
];
1718 /* Mind we don't process a region more than once. */
1719 if (!r
|| r
->region_number
!= i
)
1725 /* Whatever type_list is (NULL or true list), we build a list
1726 of filters for the region. */
1727 r
->u
.catch.filter_list
= NULL_TREE
;
1729 if (r
->u
.catch.type_list
!= NULL
)
1731 /* Get a filter value for each of the types caught and store
1732 them in the region's dedicated list. */
1733 tree tp_node
= r
->u
.catch.type_list
;
1735 for (;tp_node
; tp_node
= TREE_CHAIN (tp_node
))
1737 int flt
= add_ttypes_entry (ttypes
, TREE_VALUE (tp_node
));
1738 tree flt_node
= build_int_2 (flt
, 0);
1740 r
->u
.catch.filter_list
1741 = tree_cons (NULL_TREE
, flt_node
, r
->u
.catch.filter_list
);
1746 /* Get a filter value for the NULL list also since it will need
1747 an action record anyway. */
1748 int flt
= add_ttypes_entry (ttypes
, NULL
);
1749 tree flt_node
= build_int_2 (flt
, 0);
1751 r
->u
.catch.filter_list
1752 = tree_cons (NULL_TREE
, flt_node
, r
->u
.catch.filter_list
);
1757 case ERT_ALLOWED_EXCEPTIONS
:
1759 = add_ehspec_entry (ehspec
, ttypes
, r
->u
.allowed
.type_list
);
1767 htab_delete (ttypes
);
1768 htab_delete (ehspec
);
1772 build_post_landing_pads ()
1776 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1778 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1781 /* Mind we don't process a region more than once. */
1782 if (!region
|| region
->region_number
!= i
)
1785 switch (region
->type
)
1788 /* ??? Collect the set of all non-overlapping catch handlers
1789 all the way up the chain until blocked by a cleanup. */
1790 /* ??? Outer try regions can share landing pads with inner
1791 try regions if the types are completely non-overlapping,
1792 and there are no intervening cleanups. */
1794 region
->post_landing_pad
= gen_label_rtx ();
1798 emit_label (region
->post_landing_pad
);
1800 /* ??? It is mighty inconvenient to call back into the
1801 switch statement generation code in expand_end_case.
1802 Rapid prototyping sez a sequence of ifs. */
1804 struct eh_region
*c
;
1805 for (c
= region
->u
.try.catch; c
; c
= c
->u
.catch.next_catch
)
1807 /* ??? _Unwind_ForcedUnwind wants no match here. */
1808 if (c
->u
.catch.type_list
== NULL
)
1809 emit_jump (c
->label
);
1812 /* Need for one cmp/jump per type caught. Each type
1813 list entry has a matching entry in the filter list
1814 (see assign_filter_values). */
1815 tree tp_node
= c
->u
.catch.type_list
;
1816 tree flt_node
= c
->u
.catch.filter_list
;
1820 emit_cmp_and_jump_insns
1822 GEN_INT (tree_low_cst (TREE_VALUE (flt_node
), 0)),
1823 EQ
, NULL_RTX
, word_mode
, 0, c
->label
);
1825 tp_node
= TREE_CHAIN (tp_node
);
1826 flt_node
= TREE_CHAIN (flt_node
);
1832 /* We delay the generation of the _Unwind_Resume until we generate
1833 landing pads. We emit a marker here so as to get good control
1834 flow data in the meantime. */
1836 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
1842 emit_insns_before (seq
, region
->u
.try.catch->label
);
1845 case ERT_ALLOWED_EXCEPTIONS
:
1846 region
->post_landing_pad
= gen_label_rtx ();
1850 emit_label (region
->post_landing_pad
);
1852 emit_cmp_and_jump_insns (cfun
->eh
->filter
,
1853 GEN_INT (region
->u
.allowed
.filter
),
1854 EQ
, NULL_RTX
, word_mode
, 0, region
->label
);
1856 /* We delay the generation of the _Unwind_Resume until we generate
1857 landing pads. We emit a marker here so as to get good control
1858 flow data in the meantime. */
1860 = emit_jump_insn (gen_rtx_RESX (VOIDmode
, region
->region_number
));
1866 emit_insns_before (seq
, region
->label
);
1870 case ERT_MUST_NOT_THROW
:
1871 region
->post_landing_pad
= region
->label
;
1876 /* Nothing to do. */
1885 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1886 _Unwind_Resume otherwise. */
1889 connect_post_landing_pads ()
1893 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1895 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1896 struct eh_region
*outer
;
1899 /* Mind we don't process a region more than once. */
1900 if (!region
|| region
->region_number
!= i
)
1903 /* If there is no RESX, or it has been deleted by flow, there's
1904 nothing to fix up. */
1905 if (! region
->resume
|| INSN_DELETED_P (region
->resume
))
1908 /* Search for another landing pad in this function. */
1909 for (outer
= region
->outer
; outer
; outer
= outer
->outer
)
1910 if (outer
->post_landing_pad
)
1916 emit_jump (outer
->post_landing_pad
);
1918 emit_library_call (unwind_resume_libfunc
, LCT_THROW
,
1919 VOIDmode
, 1, cfun
->eh
->exc_ptr
, Pmode
);
1923 emit_insns_before (seq
, region
->resume
);
1924 delete_insn (region
->resume
);
1930 dw2_build_landing_pads ()
1935 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
1937 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
1939 bool clobbers_hard_regs
= false;
1941 /* Mind we don't process a region more than once. */
1942 if (!region
|| region
->region_number
!= i
)
1945 if (region
->type
!= ERT_CLEANUP
1946 && region
->type
!= ERT_TRY
1947 && region
->type
!= ERT_ALLOWED_EXCEPTIONS
)
1952 region
->landing_pad
= gen_label_rtx ();
1953 emit_label (region
->landing_pad
);
1955 #ifdef HAVE_exception_receiver
1956 if (HAVE_exception_receiver
)
1957 emit_insn (gen_exception_receiver ());
1960 #ifdef HAVE_nonlocal_goto_receiver
1961 if (HAVE_nonlocal_goto_receiver
)
1962 emit_insn (gen_nonlocal_goto_receiver ());
1967 /* If the eh_return data registers are call-saved, then we
1968 won't have considered them clobbered from the call that
1969 threw. Kill them now. */
1972 unsigned r
= EH_RETURN_DATA_REGNO (j
);
1973 if (r
== INVALID_REGNUM
)
1975 if (! call_used_regs
[r
])
1977 emit_insn (gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, r
)));
1978 clobbers_hard_regs
= true;
1982 if (clobbers_hard_regs
)
1984 /* @@@ This is a kludge. Not all machine descriptions define a
1985 blockage insn, but we must not allow the code we just generated
1986 to be reordered by scheduling. So emit an ASM_INPUT to act as
1988 emit_insn (gen_rtx_ASM_INPUT (VOIDmode
, ""));
1991 emit_move_insn (cfun
->eh
->exc_ptr
,
1992 gen_rtx_REG (Pmode
, EH_RETURN_DATA_REGNO (0)));
1993 emit_move_insn (cfun
->eh
->filter
,
1994 gen_rtx_REG (word_mode
, EH_RETURN_DATA_REGNO (1)));
1999 emit_insns_before (seq
, region
->post_landing_pad
);
2006 int directly_reachable
;
2009 int call_site_index
;
2013 sjlj_find_directly_reachable_regions (lp_info
)
2014 struct sjlj_lp_info
*lp_info
;
2017 bool found_one
= false;
2019 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2021 struct eh_region
*region
;
2022 enum reachable_code rc
;
2026 if (! INSN_P (insn
))
2029 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2030 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
2033 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
2035 type_thrown
= NULL_TREE
;
2036 if (region
->type
== ERT_THROW
)
2038 type_thrown
= region
->u
.throw.type
;
2039 region
= region
->outer
;
2042 /* Find the first containing region that might handle the exception.
2043 That's the landing pad to which we will transfer control. */
2044 rc
= RNL_NOT_CAUGHT
;
2045 for (; region
; region
= region
->outer
)
2047 rc
= reachable_next_level (region
, type_thrown
, 0);
2048 if (rc
!= RNL_NOT_CAUGHT
)
2051 if (rc
== RNL_MAYBE_CAUGHT
|| rc
== RNL_CAUGHT
)
2053 lp_info
[region
->region_number
].directly_reachable
= 1;
2062 sjlj_assign_call_site_values (dispatch_label
, lp_info
)
2064 struct sjlj_lp_info
*lp_info
;
2069 /* First task: build the action table. */
2071 VARRAY_UCHAR_INIT (cfun
->eh
->action_record_data
, 64, "action_record_data");
2072 ar_hash
= htab_create (31, action_record_hash
, action_record_eq
, free
);
2074 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2075 if (lp_info
[i
].directly_reachable
)
2077 struct eh_region
*r
= cfun
->eh
->region_array
[i
];
2078 r
->landing_pad
= dispatch_label
;
2079 lp_info
[i
].action_index
= collect_one_action_chain (ar_hash
, r
);
2080 if (lp_info
[i
].action_index
!= -1)
2081 cfun
->uses_eh_lsda
= 1;
2084 htab_delete (ar_hash
);
2086 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2087 landing pad label for the region. For sjlj though, there is one
2088 common landing pad from which we dispatch to the post-landing pads.
2090 A region receives a dispatch index if it is directly reachable
2091 and requires in-function processing. Regions that share post-landing
2092 pads may share dispatch indices. */
2093 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2094 (see build_post_landing_pads) so we don't bother checking for it. */
2097 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2098 if (lp_info
[i
].directly_reachable
)
2099 lp_info
[i
].dispatch_index
= index
++;
2101 /* Finally: assign call-site values. If dwarf2 terms, this would be
2102 the region number assigned by convert_to_eh_region_ranges, but
2103 handles no-action and must-not-throw differently. */
2106 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2107 if (lp_info
[i
].directly_reachable
)
2109 int action
= lp_info
[i
].action_index
;
2111 /* Map must-not-throw to otherwise unused call-site index 0. */
2114 /* Map no-action to otherwise unused call-site index -1. */
2115 else if (action
== -1)
2117 /* Otherwise, look it up in the table. */
2119 index
= add_call_site (GEN_INT (lp_info
[i
].dispatch_index
), action
);
2121 lp_info
[i
].call_site_index
= index
;
2126 sjlj_mark_call_sites (lp_info
)
2127 struct sjlj_lp_info
*lp_info
;
2129 int last_call_site
= -2;
2132 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2134 struct eh_region
*region
;
2136 rtx note
, before
, p
;
2138 /* Reset value tracking at extended basic block boundaries. */
2139 if (GET_CODE (insn
) == CODE_LABEL
)
2140 last_call_site
= -2;
2142 if (! INSN_P (insn
))
2145 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2148 /* Calls (and trapping insns) without notes are outside any
2149 exception handling region in this function. Mark them as
2151 if (GET_CODE (insn
) == CALL_INSN
2152 || (flag_non_call_exceptions
2153 && may_trap_p (PATTERN (insn
))))
2154 this_call_site
= -1;
2160 /* Calls that are known to not throw need not be marked. */
2161 if (INTVAL (XEXP (note
, 0)) <= 0)
2164 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
2165 this_call_site
= lp_info
[region
->region_number
].call_site_index
;
2168 if (this_call_site
== last_call_site
)
2171 /* Don't separate a call from it's argument loads. */
2173 if (GET_CODE (insn
) == CALL_INSN
)
2174 before
= find_first_parameter_load (insn
, NULL_RTX
);
2177 mem
= adjust_address (cfun
->eh
->sjlj_fc
, TYPE_MODE (integer_type_node
),
2178 sjlj_fc_call_site_ofs
);
2179 emit_move_insn (mem
, GEN_INT (this_call_site
));
2183 emit_insns_before (p
, before
);
2184 last_call_site
= this_call_site
;
2188 /* Construct the SjLj_Function_Context. */
2191 sjlj_emit_function_enter (dispatch_label
)
2194 rtx fn_begin
, fc
, mem
, seq
;
2196 fc
= cfun
->eh
->sjlj_fc
;
2200 /* We're storing this libcall's address into memory instead of
2201 calling it directly. Thus, we must call assemble_external_libcall
2202 here, as we can not depend on emit_library_call to do it for us. */
2203 assemble_external_libcall (eh_personality_libfunc
);
2204 mem
= adjust_address (fc
, Pmode
, sjlj_fc_personality_ofs
);
2205 emit_move_insn (mem
, eh_personality_libfunc
);
2207 mem
= adjust_address (fc
, Pmode
, sjlj_fc_lsda_ofs
);
2208 if (cfun
->uses_eh_lsda
)
2211 ASM_GENERATE_INTERNAL_LABEL (buf
, "LLSDA", sjlj_funcdef_number
);
2212 emit_move_insn (mem
, gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
)));
2215 emit_move_insn (mem
, const0_rtx
);
2217 #ifdef DONT_USE_BUILTIN_SETJMP
2220 x
= emit_library_call_value (setjmp_libfunc
, NULL_RTX
, LCT_RETURNS_TWICE
,
2221 TYPE_MODE (integer_type_node
), 1,
2222 plus_constant (XEXP (fc
, 0),
2223 sjlj_fc_jbuf_ofs
), Pmode
);
2225 note
= emit_note (NULL
, NOTE_INSN_EXPECTED_VALUE
);
2226 NOTE_EXPECTED_VALUE (note
) = gen_rtx_EQ (VOIDmode
, x
, const0_rtx
);
2228 emit_cmp_and_jump_insns (x
, const0_rtx
, NE
, 0,
2229 TYPE_MODE (integer_type_node
), 0, dispatch_label
);
2232 expand_builtin_setjmp_setup (plus_constant (XEXP (fc
, 0), sjlj_fc_jbuf_ofs
),
2236 emit_library_call (unwind_sjlj_register_libfunc
, LCT_NORMAL
, VOIDmode
,
2237 1, XEXP (fc
, 0), Pmode
);
2242 /* ??? Instead of doing this at the beginning of the function,
2243 do this in a block that is at loop level 0 and dominates all
2244 can_throw_internal instructions. */
2246 for (fn_begin
= get_insns (); ; fn_begin
= NEXT_INSN (fn_begin
))
2247 if (GET_CODE (fn_begin
) == NOTE
2248 && NOTE_LINE_NUMBER (fn_begin
) == NOTE_INSN_FUNCTION_BEG
)
2250 emit_insns_after (seq
, fn_begin
);
2253 /* Call back from expand_function_end to know where we should put
2254 the call to unwind_sjlj_unregister_libfunc if needed. */
2257 sjlj_emit_function_exit_after (after
)
2260 cfun
->eh
->sjlj_exit_after
= after
;
2264 sjlj_emit_function_exit ()
2270 emit_library_call (unwind_sjlj_unregister_libfunc
, LCT_NORMAL
, VOIDmode
,
2271 1, XEXP (cfun
->eh
->sjlj_fc
, 0), Pmode
);
2276 /* ??? Really this can be done in any block at loop level 0 that
2277 post-dominates all can_throw_internal instructions. This is
2278 the last possible moment. */
2280 emit_insns_after (seq
, cfun
->eh
->sjlj_exit_after
);
2284 sjlj_emit_dispatch_table (dispatch_label
, lp_info
)
2286 struct sjlj_lp_info
*lp_info
;
2288 int i
, first_reachable
;
2289 rtx mem
, dispatch
, seq
, fc
;
2291 fc
= cfun
->eh
->sjlj_fc
;
2295 emit_label (dispatch_label
);
2297 #ifndef DONT_USE_BUILTIN_SETJMP
2298 expand_builtin_setjmp_receiver (dispatch_label
);
2301 /* Load up dispatch index, exc_ptr and filter values from the
2302 function context. */
2303 mem
= adjust_address (fc
, TYPE_MODE (integer_type_node
),
2304 sjlj_fc_call_site_ofs
);
2305 dispatch
= copy_to_reg (mem
);
2307 mem
= adjust_address (fc
, word_mode
, sjlj_fc_data_ofs
);
2308 if (word_mode
!= Pmode
)
2310 #ifdef POINTERS_EXTEND_UNSIGNED
2311 mem
= convert_memory_address (Pmode
, mem
);
2313 mem
= convert_to_mode (Pmode
, mem
, 0);
2316 emit_move_insn (cfun
->eh
->exc_ptr
, mem
);
2318 mem
= adjust_address (fc
, word_mode
, sjlj_fc_data_ofs
+ UNITS_PER_WORD
);
2319 emit_move_insn (cfun
->eh
->filter
, mem
);
2321 /* Jump to one of the directly reachable regions. */
2322 /* ??? This really ought to be using a switch statement. */
2324 first_reachable
= 0;
2325 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2327 if (! lp_info
[i
].directly_reachable
)
2330 if (! first_reachable
)
2332 first_reachable
= i
;
2336 emit_cmp_and_jump_insns (dispatch
, GEN_INT (lp_info
[i
].dispatch_index
),
2337 EQ
, NULL_RTX
, TYPE_MODE (integer_type_node
), 0,
2338 cfun
->eh
->region_array
[i
]->post_landing_pad
);
2344 emit_insns_before (seq
, (cfun
->eh
->region_array
[first_reachable
]
2345 ->post_landing_pad
));
2349 sjlj_build_landing_pads ()
2351 struct sjlj_lp_info
*lp_info
;
2353 lp_info
= (struct sjlj_lp_info
*) xcalloc (cfun
->eh
->last_region_number
+ 1,
2354 sizeof (struct sjlj_lp_info
));
2356 if (sjlj_find_directly_reachable_regions (lp_info
))
2358 rtx dispatch_label
= gen_label_rtx ();
2361 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node
),
2362 int_size_in_bytes (sjlj_fc_type_node
),
2363 TYPE_ALIGN (sjlj_fc_type_node
));
2365 sjlj_assign_call_site_values (dispatch_label
, lp_info
);
2366 sjlj_mark_call_sites (lp_info
);
2368 sjlj_emit_function_enter (dispatch_label
);
2369 sjlj_emit_dispatch_table (dispatch_label
, lp_info
);
2370 sjlj_emit_function_exit ();
2377 finish_eh_generation ()
2379 /* Nothing to do if no regions created. */
2380 if (cfun
->eh
->region_tree
== NULL
)
2383 /* The object here is to provide find_basic_blocks with detailed
2384 information (via reachable_handlers) on how exception control
2385 flows within the function. In this first pass, we can include
2386 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2387 regions, and hope that it will be useful in deleting unreachable
2388 handlers. Subsequently, we will generate landing pads which will
2389 connect many of the handlers, and then type information will not
2390 be effective. Still, this is a win over previous implementations. */
2392 rebuild_jump_labels (get_insns ());
2393 find_basic_blocks (get_insns (), max_reg_num (), 0);
2394 cleanup_cfg (CLEANUP_PRE_LOOP
);
2396 /* These registers are used by the landing pads. Make sure they
2397 have been generated. */
2398 get_exception_pointer (cfun
);
2399 get_exception_filter (cfun
);
2401 /* Construct the landing pads. */
2403 assign_filter_values ();
2404 build_post_landing_pads ();
2405 connect_post_landing_pads ();
2406 if (USING_SJLJ_EXCEPTIONS
)
2407 sjlj_build_landing_pads ();
2409 dw2_build_landing_pads ();
2411 cfun
->eh
->built_landing_pads
= 1;
2413 /* We've totally changed the CFG. Start over. */
2414 find_exception_handler_labels ();
2415 rebuild_jump_labels (get_insns ());
2416 find_basic_blocks (get_insns (), max_reg_num (), 0);
2417 cleanup_cfg (CLEANUP_PRE_LOOP
);
2420 /* This section handles removing dead code for flow. */
2422 /* Remove LABEL from the exception_handler_labels list. */
2425 remove_exception_handler_label (label
)
2430 for (pl
= &exception_handler_labels
, l
= *pl
;
2431 XEXP (l
, 0) != label
;
2432 pl
= &XEXP (l
, 1), l
= *pl
)
2436 free_EXPR_LIST_node (l
);
2439 /* Splice REGION from the region tree etc. */
2442 remove_eh_handler (region
)
2443 struct eh_region
*region
;
2445 struct eh_region
**pp
, *p
;
2449 /* For the benefit of efficiently handling REG_EH_REGION notes,
2450 replace this region in the region array with its containing
2451 region. Note that previous region deletions may result in
2452 multiple copies of this region in the array, so we have to
2453 search the whole thing. */
2454 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2455 if (cfun
->eh
->region_array
[i
] == region
)
2456 cfun
->eh
->region_array
[i
] = region
->outer
;
2458 if (cfun
->eh
->built_landing_pads
)
2459 lab
= region
->landing_pad
;
2461 lab
= region
->label
;
2463 remove_exception_handler_label (lab
);
2466 pp
= ®ion
->outer
->inner
;
2468 pp
= &cfun
->eh
->region_tree
;
2469 for (p
= *pp
; p
!= region
; pp
= &p
->next_peer
, p
= *pp
)
2474 for (p
= region
->inner
; p
->next_peer
; p
= p
->next_peer
)
2475 p
->outer
= region
->outer
;
2476 p
->next_peer
= region
->next_peer
;
2477 p
->outer
= region
->outer
;
2478 *pp
= region
->inner
;
2481 *pp
= region
->next_peer
;
2483 if (region
->type
== ERT_CATCH
)
2485 struct eh_region
*try, *next
, *prev
;
2487 for (try = region
->next_peer
;
2488 try->type
== ERT_CATCH
;
2489 try = try->next_peer
)
2491 if (try->type
!= ERT_TRY
)
2494 next
= region
->u
.catch.next_catch
;
2495 prev
= region
->u
.catch.prev_catch
;
2498 next
->u
.catch.prev_catch
= prev
;
2500 try->u
.try.last_catch
= prev
;
2502 prev
->u
.catch.next_catch
= next
;
2505 try->u
.try.catch = next
;
2507 remove_eh_handler (try);
2514 /* LABEL heads a basic block that is about to be deleted. If this
2515 label corresponds to an exception region, we may be able to
2516 delete the region. */
2519 maybe_remove_eh_handler (label
)
2524 /* ??? After generating landing pads, it's not so simple to determine
2525 if the region data is completely unused. One must examine the
2526 landing pad and the post landing pad, and whether an inner try block
2527 is referencing the catch handlers directly. */
2528 if (cfun
->eh
->built_landing_pads
)
2531 for (i
= cfun
->eh
->last_region_number
; i
> 0; --i
)
2533 struct eh_region
*region
= cfun
->eh
->region_array
[i
];
2534 if (region
&& region
->label
== label
)
2536 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2537 because there is no path to the fallback call to terminate.
2538 But the region continues to affect call-site data until there
2539 are no more contained calls, which we don't see here. */
2540 if (region
->type
== ERT_MUST_NOT_THROW
)
2542 remove_exception_handler_label (region
->label
);
2543 region
->label
= NULL_RTX
;
2546 remove_eh_handler (region
);
2553 /* This section describes CFG exception edges for flow. */
2555 /* For communicating between calls to reachable_next_level. */
2556 struct reachable_info
2563 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2564 base class of TYPE, is in HANDLED. */
2567 check_handled (handled
, type
)
2572 /* We can check for exact matches without front-end help. */
2573 if (! lang_eh_type_covers
)
2575 for (t
= handled
; t
; t
= TREE_CHAIN (t
))
2576 if (TREE_VALUE (t
) == type
)
2581 for (t
= handled
; t
; t
= TREE_CHAIN (t
))
2582 if ((*lang_eh_type_covers
) (TREE_VALUE (t
), type
))
2589 /* A subroutine of reachable_next_level. If we are collecting a list
2590 of handlers, add one. After landing pad generation, reference
2591 it instead of the handlers themselves. Further, the handlers are
2592 all wired together, so by referencing one, we've got them all.
2593 Before landing pad generation we reference each handler individually.
2595 LP_REGION contains the landing pad; REGION is the handler. */
2598 add_reachable_handler (info
, lp_region
, region
)
2599 struct reachable_info
*info
;
2600 struct eh_region
*lp_region
;
2601 struct eh_region
*region
;
2606 if (cfun
->eh
->built_landing_pads
)
2608 if (! info
->handlers
)
2609 info
->handlers
= alloc_INSN_LIST (lp_region
->landing_pad
, NULL_RTX
);
2612 info
->handlers
= alloc_INSN_LIST (region
->label
, info
->handlers
);
2615 /* Process one level of exception regions for reachability.
2616 If TYPE_THROWN is non-null, then it is the *exact* type being
2617 propagated. If INFO is non-null, then collect handler labels
2618 and caught/allowed type information between invocations. */
2620 static enum reachable_code
2621 reachable_next_level (region
, type_thrown
, info
)
2622 struct eh_region
*region
;
2624 struct reachable_info
*info
;
2626 switch (region
->type
)
2629 /* Before landing-pad generation, we model control flow
2630 directly to the individual handlers. In this way we can
2631 see that catch handler types may shadow one another. */
2632 add_reachable_handler (info
, region
, region
);
2633 return RNL_MAYBE_CAUGHT
;
2637 struct eh_region
*c
;
2638 enum reachable_code ret
= RNL_NOT_CAUGHT
;
2640 for (c
= region
->u
.try.catch; c
; c
= c
->u
.catch.next_catch
)
2642 /* A catch-all handler ends the search. */
2643 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2644 to be run as well. */
2645 if (c
->u
.catch.type_list
== NULL
)
2647 add_reachable_handler (info
, region
, c
);
2653 /* If we have a at least one type match, end the search. */
2654 tree tp_node
= c
->u
.catch.type_list
;
2656 for (; tp_node
; tp_node
= TREE_CHAIN (tp_node
))
2658 tree type
= TREE_VALUE (tp_node
);
2660 if (type
== type_thrown
2661 || (lang_eh_type_covers
2662 && (*lang_eh_type_covers
) (type
, type_thrown
)))
2664 add_reachable_handler (info
, region
, c
);
2669 /* If we have definitive information of a match failure,
2670 the catch won't trigger. */
2671 if (lang_eh_type_covers
)
2672 return RNL_NOT_CAUGHT
;
2675 /* At this point, we either don't know what type is thrown or
2676 don't have front-end assistance to help deciding if it is
2677 covered by one of the types in the list for this region.
2679 We'd then like to add this region to the list of reachable
2680 handlers since it is indeed potentially reachable based on the
2681 information we have.
2683 Actually, this handler is for sure not reachable if all the
2684 types it matches have already been caught. That is, it is only
2685 potentially reachable if at least one of the types it catches
2686 has not been previously caught. */
2689 ret
= RNL_MAYBE_CAUGHT
;
2692 tree tp_node
= c
->u
.catch.type_list
;
2693 bool maybe_reachable
= false;
2695 /* Compute the potential reachability of this handler and
2696 update the list of types caught at the same time. */
2697 for (; tp_node
; tp_node
= TREE_CHAIN (tp_node
))
2699 tree type
= TREE_VALUE (tp_node
);
2701 if (! check_handled (info
->types_caught
, type
))
2704 = tree_cons (NULL
, type
, info
->types_caught
);
2706 maybe_reachable
= true;
2710 if (maybe_reachable
)
2712 add_reachable_handler (info
, region
, c
);
2714 /* ??? If the catch type is a base class of every allowed
2715 type, then we know we can stop the search. */
2716 ret
= RNL_MAYBE_CAUGHT
;
2724 case ERT_ALLOWED_EXCEPTIONS
:
2725 /* An empty list of types definitely ends the search. */
2726 if (region
->u
.allowed
.type_list
== NULL_TREE
)
2728 add_reachable_handler (info
, region
, region
);
2732 /* Collect a list of lists of allowed types for use in detecting
2733 when a catch may be transformed into a catch-all. */
2735 info
->types_allowed
= tree_cons (NULL_TREE
,
2736 region
->u
.allowed
.type_list
,
2737 info
->types_allowed
);
2739 /* If we have definitive information about the type hierarchy,
2740 then we can tell if the thrown type will pass through the
2742 if (type_thrown
&& lang_eh_type_covers
)
2744 if (check_handled (region
->u
.allowed
.type_list
, type_thrown
))
2745 return RNL_NOT_CAUGHT
;
2748 add_reachable_handler (info
, region
, region
);
2753 add_reachable_handler (info
, region
, region
);
2754 return RNL_MAYBE_CAUGHT
;
2757 /* Catch regions are handled by their controling try region. */
2758 return RNL_NOT_CAUGHT
;
2760 case ERT_MUST_NOT_THROW
:
2761 /* Here we end our search, since no exceptions may propagate.
2762 If we've touched down at some landing pad previous, then the
2763 explicit function call we generated may be used. Otherwise
2764 the call is made by the runtime. */
2765 if (info
&& info
->handlers
)
2767 add_reachable_handler (info
, region
, region
);
2776 /* Shouldn't see these here. */
2783 /* Retrieve a list of labels of exception handlers which can be
2784 reached by a given insn. */
2787 reachable_handlers (insn
)
2790 struct reachable_info info
;
2791 struct eh_region
*region
;
2795 if (GET_CODE (insn
) == JUMP_INSN
2796 && GET_CODE (PATTERN (insn
)) == RESX
)
2797 region_number
= XINT (PATTERN (insn
), 0);
2800 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2801 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
2803 region_number
= INTVAL (XEXP (note
, 0));
2806 memset (&info
, 0, sizeof (info
));
2808 region
= cfun
->eh
->region_array
[region_number
];
2810 type_thrown
= NULL_TREE
;
2811 if (GET_CODE (insn
) == JUMP_INSN
2812 && GET_CODE (PATTERN (insn
)) == RESX
)
2814 /* A RESX leaves a region instead of entering it. Thus the
2815 region itself may have been deleted out from under us. */
2818 region
= region
->outer
;
2820 else if (region
->type
== ERT_THROW
)
2822 type_thrown
= region
->u
.throw.type
;
2823 region
= region
->outer
;
2826 for (; region
; region
= region
->outer
)
2827 if (reachable_next_level (region
, type_thrown
, &info
) >= RNL_CAUGHT
)
2830 return info
.handlers
;
2833 /* Determine if the given INSN can throw an exception that is caught
2834 within the function. */
2837 can_throw_internal (insn
)
2840 struct eh_region
*region
;
2844 if (! INSN_P (insn
))
2847 if (GET_CODE (insn
) == INSN
2848 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2849 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2851 if (GET_CODE (insn
) == CALL_INSN
2852 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
2855 for (i
= 0; i
< 3; ++i
)
2857 rtx sub
= XEXP (PATTERN (insn
), i
);
2858 for (; sub
; sub
= NEXT_INSN (sub
))
2859 if (can_throw_internal (sub
))
2865 /* Every insn that might throw has an EH_REGION note. */
2866 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2867 if (!note
|| INTVAL (XEXP (note
, 0)) <= 0)
2870 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
2872 type_thrown
= NULL_TREE
;
2873 if (region
->type
== ERT_THROW
)
2875 type_thrown
= region
->u
.throw.type
;
2876 region
= region
->outer
;
2879 /* If this exception is ignored by each and every containing region,
2880 then control passes straight out. The runtime may handle some
2881 regions, which also do not require processing internally. */
2882 for (; region
; region
= region
->outer
)
2884 enum reachable_code how
= reachable_next_level (region
, type_thrown
, 0);
2885 if (how
== RNL_BLOCKED
)
2887 if (how
!= RNL_NOT_CAUGHT
)
2894 /* Determine if the given INSN can throw an exception that is
2895 visible outside the function. */
2898 can_throw_external (insn
)
2901 struct eh_region
*region
;
2905 if (! INSN_P (insn
))
2908 if (GET_CODE (insn
) == INSN
2909 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2910 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2912 if (GET_CODE (insn
) == CALL_INSN
2913 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
2916 for (i
= 0; i
< 3; ++i
)
2918 rtx sub
= XEXP (PATTERN (insn
), i
);
2919 for (; sub
; sub
= NEXT_INSN (sub
))
2920 if (can_throw_external (sub
))
2926 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2929 /* Calls (and trapping insns) without notes are outside any
2930 exception handling region in this function. We have to
2931 assume it might throw. Given that the front end and middle
2932 ends mark known NOTHROW functions, this isn't so wildly
2934 return (GET_CODE (insn
) == CALL_INSN
2935 || (flag_non_call_exceptions
2936 && may_trap_p (PATTERN (insn
))));
2938 if (INTVAL (XEXP (note
, 0)) <= 0)
2941 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
2943 type_thrown
= NULL_TREE
;
2944 if (region
->type
== ERT_THROW
)
2946 type_thrown
= region
->u
.throw.type
;
2947 region
= region
->outer
;
2950 /* If the exception is caught or blocked by any containing region,
2951 then it is not seen by any calling function. */
2952 for (; region
; region
= region
->outer
)
2953 if (reachable_next_level (region
, type_thrown
, NULL
) >= RNL_CAUGHT
)
2959 /* True if nothing in this function can throw outside this function. */
2962 nothrow_function_p ()
2966 if (! flag_exceptions
)
2969 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2970 if (can_throw_external (insn
))
2972 for (insn
= current_function_epilogue_delay_list
; insn
;
2973 insn
= XEXP (insn
, 1))
2974 if (can_throw_external (insn
))
2981 /* Various hooks for unwind library. */
2983 /* Do any necessary initialization to access arbitrary stack frames.
2984 On the SPARC, this means flushing the register windows. */
2987 expand_builtin_unwind_init ()
2989 /* Set this so all the registers get saved in our frame; we need to be
2990 able to copy the saved values for any registers from frames we unwind. */
2991 current_function_has_nonlocal_label
= 1;
2993 #ifdef SETUP_FRAME_ADDRESSES
2994 SETUP_FRAME_ADDRESSES ();
2999 expand_builtin_eh_return_data_regno (arglist
)
3002 tree which
= TREE_VALUE (arglist
);
3003 unsigned HOST_WIDE_INT iwhich
;
3005 if (TREE_CODE (which
) != INTEGER_CST
)
3007 error ("argument of `__builtin_eh_return_regno' must be constant");
3011 iwhich
= tree_low_cst (which
, 1);
3012 iwhich
= EH_RETURN_DATA_REGNO (iwhich
);
3013 if (iwhich
== INVALID_REGNUM
)
3016 #ifdef DWARF_FRAME_REGNUM
3017 iwhich
= DWARF_FRAME_REGNUM (iwhich
);
3019 iwhich
= DBX_REGISTER_NUMBER (iwhich
);
3022 return GEN_INT (iwhich
);
3025 /* Given a value extracted from the return address register or stack slot,
3026 return the actual address encoded in that value. */
3029 expand_builtin_extract_return_addr (addr_tree
)
3032 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, Pmode
, 0);
3034 /* First mask out any unwanted bits. */
3035 #ifdef MASK_RETURN_ADDR
3036 expand_and (addr
, MASK_RETURN_ADDR
, addr
);
3039 /* Then adjust to find the real return address. */
3040 #if defined (RETURN_ADDR_OFFSET)
3041 addr
= plus_constant (addr
, RETURN_ADDR_OFFSET
);
3047 /* Given an actual address in addr_tree, do any necessary encoding
3048 and return the value to be stored in the return address register or
3049 stack slot so the epilogue will return to that address. */
3052 expand_builtin_frob_return_addr (addr_tree
)
3055 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, ptr_mode
, 0);
3057 #ifdef POINTERS_EXTEND_UNSIGNED
3058 if (GET_MODE (addr
) != Pmode
)
3059 addr
= convert_memory_address (Pmode
, addr
);
3062 #ifdef RETURN_ADDR_OFFSET
3063 addr
= force_reg (Pmode
, addr
);
3064 addr
= plus_constant (addr
, -RETURN_ADDR_OFFSET
);
3070 /* Set up the epilogue with the magic bits we'll need to return to the
3071 exception handler. */
3074 expand_builtin_eh_return (stackadj_tree
, handler_tree
)
3075 tree stackadj_tree
, handler_tree
;
3077 rtx stackadj
, handler
;
3079 stackadj
= expand_expr (stackadj_tree
, cfun
->eh
->ehr_stackadj
, VOIDmode
, 0);
3080 handler
= expand_expr (handler_tree
, cfun
->eh
->ehr_handler
, VOIDmode
, 0);
3082 #ifdef POINTERS_EXTEND_UNSIGNED
3083 if (GET_MODE (stackadj
) != Pmode
)
3084 stackadj
= convert_memory_address (Pmode
, stackadj
);
3086 if (GET_MODE (handler
) != Pmode
)
3087 handler
= convert_memory_address (Pmode
, handler
);
3090 if (! cfun
->eh
->ehr_label
)
3092 cfun
->eh
->ehr_stackadj
= copy_to_reg (stackadj
);
3093 cfun
->eh
->ehr_handler
= copy_to_reg (handler
);
3094 cfun
->eh
->ehr_label
= gen_label_rtx ();
3098 if (stackadj
!= cfun
->eh
->ehr_stackadj
)
3099 emit_move_insn (cfun
->eh
->ehr_stackadj
, stackadj
);
3100 if (handler
!= cfun
->eh
->ehr_handler
)
3101 emit_move_insn (cfun
->eh
->ehr_handler
, handler
);
3104 emit_jump (cfun
->eh
->ehr_label
);
3110 rtx sa
, ra
, around_label
;
3112 if (! cfun
->eh
->ehr_label
)
3115 sa
= EH_RETURN_STACKADJ_RTX
;
3118 error ("__builtin_eh_return not supported on this target");
3122 current_function_calls_eh_return
= 1;
3124 around_label
= gen_label_rtx ();
3125 emit_move_insn (sa
, const0_rtx
);
3126 emit_jump (around_label
);
3128 emit_label (cfun
->eh
->ehr_label
);
3129 clobber_return_register ();
3131 #ifdef HAVE_eh_return
3133 emit_insn (gen_eh_return (cfun
->eh
->ehr_stackadj
, cfun
->eh
->ehr_handler
));
3137 ra
= EH_RETURN_HANDLER_RTX
;
3140 error ("__builtin_eh_return not supported on this target");
3141 ra
= gen_reg_rtx (Pmode
);
3144 emit_move_insn (sa
, cfun
->eh
->ehr_stackadj
);
3145 emit_move_insn (ra
, cfun
->eh
->ehr_handler
);
3148 emit_label (around_label
);
3151 /* In the following functions, we represent entries in the action table
3152 as 1-based indices. Special cases are:
3154 0: null action record, non-null landing pad; implies cleanups
3155 -1: null action record, null landing pad; implies no action
3156 -2: no call-site entry; implies must_not_throw
3157 -3: we have yet to process outer regions
3159 Further, no special cases apply to the "next" field of the record.
3160 For next, 0 means end of list. */
3162 struct action_record
3170 action_record_eq (pentry
, pdata
)
3174 const struct action_record
*entry
= (const struct action_record
*) pentry
;
3175 const struct action_record
*data
= (const struct action_record
*) pdata
;
3176 return entry
->filter
== data
->filter
&& entry
->next
== data
->next
;
3180 action_record_hash (pentry
)
3183 const struct action_record
*entry
= (const struct action_record
*) pentry
;
3184 return entry
->next
* 1009 + entry
->filter
;
3188 add_action_record (ar_hash
, filter
, next
)
3192 struct action_record
**slot
, *new, tmp
;
3194 tmp
.filter
= filter
;
3196 slot
= (struct action_record
**) htab_find_slot (ar_hash
, &tmp
, INSERT
);
3198 if ((new = *slot
) == NULL
)
3200 new = (struct action_record
*) xmalloc (sizeof (*new));
3201 new->offset
= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
) + 1;
3202 new->filter
= filter
;
3206 /* The filter value goes in untouched. The link to the next
3207 record is a "self-relative" byte offset, or zero to indicate
3208 that there is no next record. So convert the absolute 1 based
3209 indices we've been carrying around into a displacement. */
3211 push_sleb128 (&cfun
->eh
->action_record_data
, filter
);
3213 next
-= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
) + 1;
3214 push_sleb128 (&cfun
->eh
->action_record_data
, next
);
3221 collect_one_action_chain (ar_hash
, region
)
3223 struct eh_region
*region
;
3225 struct eh_region
*c
;
3228 /* If we've reached the top of the region chain, then we have
3229 no actions, and require no landing pad. */
3233 switch (region
->type
)
3236 /* A cleanup adds a zero filter to the beginning of the chain, but
3237 there are special cases to look out for. If there are *only*
3238 cleanups along a path, then it compresses to a zero action.
3239 Further, if there are multiple cleanups along a path, we only
3240 need to represent one of them, as that is enough to trigger
3241 entry to the landing pad at runtime. */
3242 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3245 for (c
= region
->outer
; c
; c
= c
->outer
)
3246 if (c
->type
== ERT_CLEANUP
)
3248 return add_action_record (ar_hash
, 0, next
);
3251 /* Process the associated catch regions in reverse order.
3252 If there's a catch-all handler, then we don't need to
3253 search outer regions. Use a magic -3 value to record
3254 that we haven't done the outer search. */
3256 for (c
= region
->u
.try.last_catch
; c
; c
= c
->u
.catch.prev_catch
)
3258 if (c
->u
.catch.type_list
== NULL
)
3260 /* Retrieve the filter from the head of the filter list
3261 where we have stored it (see assign_filter_values). */
3263 = TREE_INT_CST_LOW (TREE_VALUE (c
->u
.catch.filter_list
));
3265 next
= add_action_record (ar_hash
, filter
, 0);
3269 /* Once the outer search is done, trigger an action record for
3270 each filter we have. */
3275 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3277 /* If there is no next action, terminate the chain. */
3280 /* If all outer actions are cleanups or must_not_throw,
3281 we'll have no action record for it, since we had wanted
3282 to encode these states in the call-site record directly.
3283 Add a cleanup action to the chain to catch these. */
3285 next
= add_action_record (ar_hash
, 0, 0);
3288 flt_node
= c
->u
.catch.filter_list
;
3289 for (; flt_node
; flt_node
= TREE_CHAIN (flt_node
))
3291 int filter
= TREE_INT_CST_LOW (TREE_VALUE (flt_node
));
3292 next
= add_action_record (ar_hash
, filter
, next
);
3298 case ERT_ALLOWED_EXCEPTIONS
:
3299 /* An exception specification adds its filter to the
3300 beginning of the chain. */
3301 next
= collect_one_action_chain (ar_hash
, region
->outer
);
3302 return add_action_record (ar_hash
, region
->u
.allowed
.filter
,
3303 next
< 0 ? 0 : next
);
3305 case ERT_MUST_NOT_THROW
:
3306 /* A must-not-throw region with no inner handlers or cleanups
3307 requires no call-site entry. Note that this differs from
3308 the no handler or cleanup case in that we do require an lsda
3309 to be generated. Return a magic -2 value to record this. */
3314 /* CATCH regions are handled in TRY above. THROW regions are
3315 for optimization information only and produce no output. */
3316 return collect_one_action_chain (ar_hash
, region
->outer
);
3324 add_call_site (landing_pad
, action
)
3328 struct call_site_record
*data
= cfun
->eh
->call_site_data
;
3329 int used
= cfun
->eh
->call_site_data_used
;
3330 int size
= cfun
->eh
->call_site_data_size
;
3334 size
= (size
? size
* 2 : 64);
3335 data
= (struct call_site_record
*)
3336 xrealloc (data
, sizeof (*data
) * size
);
3337 cfun
->eh
->call_site_data
= data
;
3338 cfun
->eh
->call_site_data_size
= size
;
3341 data
[used
].landing_pad
= landing_pad
;
3342 data
[used
].action
= action
;
3344 cfun
->eh
->call_site_data_used
= used
+ 1;
3346 return used
+ call_site_base
;
3349 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3350 The new note numbers will not refer to region numbers, but
3351 instead to call site entries. */
3354 convert_to_eh_region_ranges ()
3356 rtx insn
, iter
, note
;
3358 int last_action
= -3;
3359 rtx last_action_insn
= NULL_RTX
;
3360 rtx last_landing_pad
= NULL_RTX
;
3361 rtx first_no_action_insn
= NULL_RTX
;
3364 if (USING_SJLJ_EXCEPTIONS
|| cfun
->eh
->region_tree
== NULL
)
3367 VARRAY_UCHAR_INIT (cfun
->eh
->action_record_data
, 64, "action_record_data");
3369 ar_hash
= htab_create (31, action_record_hash
, action_record_eq
, free
);
3371 for (iter
= get_insns (); iter
; iter
= NEXT_INSN (iter
))
3374 struct eh_region
*region
;
3376 rtx this_landing_pad
;
3379 if (GET_CODE (insn
) == INSN
3380 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3381 insn
= XVECEXP (PATTERN (insn
), 0, 0);
3383 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
3386 if (! (GET_CODE (insn
) == CALL_INSN
3387 || (flag_non_call_exceptions
3388 && may_trap_p (PATTERN (insn
)))))
3395 if (INTVAL (XEXP (note
, 0)) <= 0)
3397 region
= cfun
->eh
->region_array
[INTVAL (XEXP (note
, 0))];
3398 this_action
= collect_one_action_chain (ar_hash
, region
);
3401 /* Existence of catch handlers, or must-not-throw regions
3402 implies that an lsda is needed (even if empty). */
3403 if (this_action
!= -1)
3404 cfun
->uses_eh_lsda
= 1;
3406 /* Delay creation of region notes for no-action regions
3407 until we're sure that an lsda will be required. */
3408 else if (last_action
== -3)
3410 first_no_action_insn
= iter
;
3414 /* Cleanups and handlers may share action chains but not
3415 landing pads. Collect the landing pad for this region. */
3416 if (this_action
>= 0)
3418 struct eh_region
*o
;
3419 for (o
= region
; ! o
->landing_pad
; o
= o
->outer
)
3421 this_landing_pad
= o
->landing_pad
;
3424 this_landing_pad
= NULL_RTX
;
3426 /* Differing actions or landing pads implies a change in call-site
3427 info, which implies some EH_REGION note should be emitted. */
3428 if (last_action
!= this_action
3429 || last_landing_pad
!= this_landing_pad
)
3431 /* If we'd not seen a previous action (-3) or the previous
3432 action was must-not-throw (-2), then we do not need an
3434 if (last_action
>= -1)
3436 /* If we delayed the creation of the begin, do it now. */
3437 if (first_no_action_insn
)
3439 call_site
= add_call_site (NULL_RTX
, 0);
3440 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
,
3441 first_no_action_insn
);
3442 NOTE_EH_HANDLER (note
) = call_site
;
3443 first_no_action_insn
= NULL_RTX
;
3446 note
= emit_note_after (NOTE_INSN_EH_REGION_END
,
3448 NOTE_EH_HANDLER (note
) = call_site
;
3451 /* If the new action is must-not-throw, then no region notes
3453 if (this_action
>= -1)
3455 call_site
= add_call_site (this_landing_pad
,
3456 this_action
< 0 ? 0 : this_action
);
3457 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
, iter
);
3458 NOTE_EH_HANDLER (note
) = call_site
;
3461 last_action
= this_action
;
3462 last_landing_pad
= this_landing_pad
;
3464 last_action_insn
= iter
;
3467 if (last_action
>= -1 && ! first_no_action_insn
)
3469 note
= emit_note_after (NOTE_INSN_EH_REGION_END
, last_action_insn
);
3470 NOTE_EH_HANDLER (note
) = call_site
;
3473 htab_delete (ar_hash
);
3478 push_uleb128 (data_area
, value
)
3479 varray_type
*data_area
;
3484 unsigned char byte
= value
& 0x7f;
3488 VARRAY_PUSH_UCHAR (*data_area
, byte
);
3494 push_sleb128 (data_area
, value
)
3495 varray_type
*data_area
;
3503 byte
= value
& 0x7f;
3505 more
= ! ((value
== 0 && (byte
& 0x40) == 0)
3506 || (value
== -1 && (byte
& 0x40) != 0));
3509 VARRAY_PUSH_UCHAR (*data_area
, byte
);
3515 #ifndef HAVE_AS_LEB128
3517 dw2_size_of_call_site_table ()
3519 int n
= cfun
->eh
->call_site_data_used
;
3520 int size
= n
* (4 + 4 + 4);
3523 for (i
= 0; i
< n
; ++i
)
3525 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3526 size
+= size_of_uleb128 (cs
->action
);
3533 sjlj_size_of_call_site_table ()
3535 int n
= cfun
->eh
->call_site_data_used
;
3539 for (i
= 0; i
< n
; ++i
)
3541 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3542 size
+= size_of_uleb128 (INTVAL (cs
->landing_pad
));
3543 size
+= size_of_uleb128 (cs
->action
);
3551 dw2_output_call_site_table ()
3553 const char *const function_start_lab
3554 = IDENTIFIER_POINTER (current_function_func_begin_label
);
3555 int n
= cfun
->eh
->call_site_data_used
;
3558 for (i
= 0; i
< n
; ++i
)
3560 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3561 char reg_start_lab
[32];
3562 char reg_end_lab
[32];
3563 char landing_pad_lab
[32];
3565 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab
, "LEHB", call_site_base
+ i
);
3566 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab
, "LEHE", call_site_base
+ i
);
3568 if (cs
->landing_pad
)
3569 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab
, "L",
3570 CODE_LABEL_NUMBER (cs
->landing_pad
));
3572 /* ??? Perhaps use insn length scaling if the assembler supports
3573 generic arithmetic. */
3574 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3575 data4 if the function is small enough. */
3576 #ifdef HAVE_AS_LEB128
3577 dw2_asm_output_delta_uleb128 (reg_start_lab
, function_start_lab
,
3578 "region %d start", i
);
3579 dw2_asm_output_delta_uleb128 (reg_end_lab
, reg_start_lab
,
3581 if (cs
->landing_pad
)
3582 dw2_asm_output_delta_uleb128 (landing_pad_lab
, function_start_lab
,
3585 dw2_asm_output_data_uleb128 (0, "landing pad");
3587 dw2_asm_output_delta (4, reg_start_lab
, function_start_lab
,
3588 "region %d start", i
);
3589 dw2_asm_output_delta (4, reg_end_lab
, reg_start_lab
, "length");
3590 if (cs
->landing_pad
)
3591 dw2_asm_output_delta (4, landing_pad_lab
, function_start_lab
,
3594 dw2_asm_output_data (4, 0, "landing pad");
3596 dw2_asm_output_data_uleb128 (cs
->action
, "action");
3599 call_site_base
+= n
;
3603 sjlj_output_call_site_table ()
3605 int n
= cfun
->eh
->call_site_data_used
;
3608 for (i
= 0; i
< n
; ++i
)
3610 struct call_site_record
*cs
= &cfun
->eh
->call_site_data
[i
];
3612 dw2_asm_output_data_uleb128 (INTVAL (cs
->landing_pad
),
3613 "region %d landing pad", i
);
3614 dw2_asm_output_data_uleb128 (cs
->action
, "action");
3617 call_site_base
+= n
;
3621 output_function_exception_table ()
3623 int tt_format
, cs_format
, lp_format
, i
, n
;
3624 #ifdef HAVE_AS_LEB128
3625 char ttype_label
[32];
3626 char cs_after_size_label
[32];
3627 char cs_end_label
[32];
3633 int tt_format_size
= 0;
3635 /* Not all functions need anything. */
3636 if (! cfun
->uses_eh_lsda
)
3639 funcdef_number
= (USING_SJLJ_EXCEPTIONS
3640 ? sjlj_funcdef_number
3641 : current_funcdef_number
);
3643 #ifdef IA64_UNWIND_INFO
3644 fputs ("\t.personality\t", asm_out_file
);
3645 output_addr_const (asm_out_file
, eh_personality_libfunc
);
3646 fputs ("\n\t.handlerdata\n", asm_out_file
);
3647 /* Note that varasm still thinks we're in the function's code section.
3648 The ".endp" directive that will immediately follow will take us back. */
3650 (*targetm
.asm_out
.exception_section
) ();
3653 have_tt_data
= (VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
) > 0
3654 || VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
) > 0);
3656 /* Indicate the format of the @TType entries. */
3658 tt_format
= DW_EH_PE_omit
;
3661 tt_format
= ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3662 #ifdef HAVE_AS_LEB128
3663 ASM_GENERATE_INTERNAL_LABEL (ttype_label
, "LLSDATT", funcdef_number
);
3665 tt_format_size
= size_of_encoded_value (tt_format
);
3667 assemble_align (tt_format_size
* BITS_PER_UNIT
);
3670 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "LLSDA", funcdef_number
);
3672 /* The LSDA header. */
3674 /* Indicate the format of the landing pad start pointer. An omitted
3675 field implies @LPStart == @Start. */
3676 /* Currently we always put @LPStart == @Start. This field would
3677 be most useful in moving the landing pads completely out of
3678 line to another section, but it could also be used to minimize
3679 the size of uleb128 landing pad offsets. */
3680 lp_format
= DW_EH_PE_omit
;
3681 dw2_asm_output_data (1, lp_format
, "@LPStart format (%s)",
3682 eh_data_format_name (lp_format
));
3684 /* @LPStart pointer would go here. */
3686 dw2_asm_output_data (1, tt_format
, "@TType format (%s)",
3687 eh_data_format_name (tt_format
));
3689 #ifndef HAVE_AS_LEB128
3690 if (USING_SJLJ_EXCEPTIONS
)
3691 call_site_len
= sjlj_size_of_call_site_table ();
3693 call_site_len
= dw2_size_of_call_site_table ();
3696 /* A pc-relative 4-byte displacement to the @TType data. */
3699 #ifdef HAVE_AS_LEB128
3700 char ttype_after_disp_label
[32];
3701 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label
, "LLSDATTD",
3703 dw2_asm_output_delta_uleb128 (ttype_label
, ttype_after_disp_label
,
3704 "@TType base offset");
3705 ASM_OUTPUT_LABEL (asm_out_file
, ttype_after_disp_label
);
3707 /* Ug. Alignment queers things. */
3708 unsigned int before_disp
, after_disp
, last_disp
, disp
;
3710 before_disp
= 1 + 1;
3711 after_disp
= (1 + size_of_uleb128 (call_site_len
)
3713 + VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
)
3714 + (VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
)
3720 unsigned int disp_size
, pad
;
3723 disp_size
= size_of_uleb128 (disp
);
3724 pad
= before_disp
+ disp_size
+ after_disp
;
3725 if (pad
% tt_format_size
)
3726 pad
= tt_format_size
- (pad
% tt_format_size
);
3729 disp
= after_disp
+ pad
;
3731 while (disp
!= last_disp
);
3733 dw2_asm_output_data_uleb128 (disp
, "@TType base offset");
3737 /* Indicate the format of the call-site offsets. */
3738 #ifdef HAVE_AS_LEB128
3739 cs_format
= DW_EH_PE_uleb128
;
3741 cs_format
= DW_EH_PE_udata4
;
3743 dw2_asm_output_data (1, cs_format
, "call-site format (%s)",
3744 eh_data_format_name (cs_format
));
3746 #ifdef HAVE_AS_LEB128
3747 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label
, "LLSDACSB",
3749 ASM_GENERATE_INTERNAL_LABEL (cs_end_label
, "LLSDACSE",
3751 dw2_asm_output_delta_uleb128 (cs_end_label
, cs_after_size_label
,
3752 "Call-site table length");
3753 ASM_OUTPUT_LABEL (asm_out_file
, cs_after_size_label
);
3754 if (USING_SJLJ_EXCEPTIONS
)
3755 sjlj_output_call_site_table ();
3757 dw2_output_call_site_table ();
3758 ASM_OUTPUT_LABEL (asm_out_file
, cs_end_label
);
3760 dw2_asm_output_data_uleb128 (call_site_len
,"Call-site table length");
3761 if (USING_SJLJ_EXCEPTIONS
)
3762 sjlj_output_call_site_table ();
3764 dw2_output_call_site_table ();
3767 /* ??? Decode and interpret the data for flag_debug_asm. */
3768 n
= VARRAY_ACTIVE_SIZE (cfun
->eh
->action_record_data
);
3769 for (i
= 0; i
< n
; ++i
)
3770 dw2_asm_output_data (1, VARRAY_UCHAR (cfun
->eh
->action_record_data
, i
),
3771 (i
? NULL
: "Action record table"));
3774 assemble_align (tt_format_size
* BITS_PER_UNIT
);
3776 i
= VARRAY_ACTIVE_SIZE (cfun
->eh
->ttype_data
);
3779 tree type
= VARRAY_TREE (cfun
->eh
->ttype_data
, i
);
3782 if (type
== NULL_TREE
)
3783 type
= integer_zero_node
;
3785 type
= lookup_type_for_runtime (type
);
3787 value
= expand_expr (type
, NULL_RTX
, VOIDmode
, EXPAND_INITIALIZER
);
3788 if (tt_format
== DW_EH_PE_absptr
|| tt_format
== DW_EH_PE_aligned
)
3789 assemble_integer (value
, tt_format_size
,
3790 tt_format_size
* BITS_PER_UNIT
, 1);
3792 dw2_asm_output_encoded_addr_rtx (tt_format
, value
, NULL
);
3795 #ifdef HAVE_AS_LEB128
3797 ASM_OUTPUT_LABEL (asm_out_file
, ttype_label
);
3800 /* ??? Decode and interpret the data for flag_debug_asm. */
3801 n
= VARRAY_ACTIVE_SIZE (cfun
->eh
->ehspec_data
);
3802 for (i
= 0; i
< n
; ++i
)
3803 dw2_asm_output_data (1, VARRAY_UCHAR (cfun
->eh
->ehspec_data
, i
),
3804 (i
? NULL
: "Exception specification table"));
3806 function_section (current_function_decl
);
3808 if (USING_SJLJ_EXCEPTIONS
)
3809 sjlj_funcdef_number
+= 1;