Update version
[official-gcc.git] / gcc / except.c
blob6f01f95e845f0d1875cf9c7e5ae208bfa8bb1ed9
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "rtl.h"
53 #include "tree.h"
54 #include "flags.h"
55 #include "function.h"
56 #include "expr.h"
57 #include "insn-config.h"
58 #include "except.h"
59 #include "integrate.h"
60 #include "hard-reg-set.h"
61 #include "basic-block.h"
62 #include "output.h"
63 #include "dwarf2asm.h"
64 #include "dwarf2out.h"
65 #include "dwarf2.h"
66 #include "toplev.h"
67 #include "hashtab.h"
68 #include "intl.h"
69 #include "ggc.h"
70 #include "tm_p.h"
73 /* Provide defaults for stuff that may not be defined when using
74 sjlj exceptions. */
75 #ifndef EH_RETURN_STACKADJ_RTX
76 #define EH_RETURN_STACKADJ_RTX 0
77 #endif
78 #ifndef EH_RETURN_HANDLER_RTX
79 #define EH_RETURN_HANDLER_RTX 0
80 #endif
81 #ifndef EH_RETURN_DATA_REGNO
82 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
83 #endif
86 /* Nonzero means enable synchronous exceptions for non-call instructions. */
87 int flag_non_call_exceptions;
89 /* Protect cleanup actions with must-not-throw regions, with a call
90 to the given failure handler. */
91 tree (*lang_protect_cleanup_actions) PARAMS ((void));
93 /* Return true if type A catches type B. */
94 int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
96 /* Map a type to a runtime object to match type. */
97 tree (*lang_eh_runtime_type) PARAMS ((tree));
99 /* A list of labels used for exception handlers. */
100 rtx exception_handler_labels;
102 static int call_site_base;
103 static int sjlj_funcdef_number;
104 static htab_t type_to_runtime_map;
106 /* Describe the SjLj_Function_Context structure. */
107 static tree sjlj_fc_type_node;
108 static int sjlj_fc_call_site_ofs;
109 static int sjlj_fc_data_ofs;
110 static int sjlj_fc_personality_ofs;
111 static int sjlj_fc_lsda_ofs;
112 static int sjlj_fc_jbuf_ofs;
114 /* Describes one exception region. */
115 struct eh_region
117 /* The immediately surrounding region. */
118 struct eh_region *outer;
120 /* The list of immediately contained regions. */
121 struct eh_region *inner;
122 struct eh_region *next_peer;
124 /* An identifier for this region. */
125 int region_number;
127 /* Each region does exactly one thing. */
128 enum eh_region_type
130 ERT_CLEANUP = 1,
131 ERT_TRY,
132 ERT_CATCH,
133 ERT_ALLOWED_EXCEPTIONS,
134 ERT_MUST_NOT_THROW,
135 ERT_THROW,
136 ERT_FIXUP
137 } type;
139 /* Holds the action to perform based on the preceeding type. */
140 union {
141 /* A list of catch blocks, a surrounding try block,
142 and the label for continuing after a catch. */
143 struct {
144 struct eh_region *catch;
145 struct eh_region *last_catch;
146 struct eh_region *prev_try;
147 rtx continue_label;
148 } try;
150 /* The list through the catch handlers, the type object
151 matched, and a pointer to the generated code. */
152 struct {
153 struct eh_region *next_catch;
154 struct eh_region *prev_catch;
155 tree type;
156 int filter;
157 } catch;
159 /* A tree_list of allowed types. */
160 struct {
161 tree type_list;
162 int filter;
163 } allowed;
165 /* The type given by a call to "throw foo();", or discovered
166 for a throw. */
167 struct {
168 tree type;
169 } throw;
171 /* Retain the cleanup expression even after expansion so that
172 we can match up fixup regions. */
173 struct {
174 tree exp;
175 } cleanup;
177 /* The real region (by expression and by pointer) that fixup code
178 should live in. */
179 struct {
180 tree cleanup_exp;
181 struct eh_region *real_region;
182 } fixup;
183 } u;
185 /* Entry point for this region's handler before landing pads are built. */
186 rtx label;
188 /* Entry point for this region's handler from the runtime eh library. */
189 rtx landing_pad;
191 /* Entry point for this region's handler from an inner region. */
192 rtx post_landing_pad;
194 /* The RESX insn for handing off control to the next outermost handler,
195 if appropriate. */
196 rtx resume;
199 /* Used to save exception status for each function. */
200 struct eh_status
202 /* The tree of all regions for this function. */
203 struct eh_region *region_tree;
205 /* The same information as an indexable array. */
206 struct eh_region **region_array;
208 /* The most recently open region. */
209 struct eh_region *cur_region;
211 /* This is the region for which we are processing catch blocks. */
212 struct eh_region *try_region;
214 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
215 node is itself a TREE_CHAINed list of handlers for regions that
216 are not yet closed. The TREE_VALUE of each entry contains the
217 handler for the corresponding entry on the ehstack. */
218 tree protect_list;
220 rtx filter;
221 rtx exc_ptr;
223 int built_landing_pads;
224 int last_region_number;
226 varray_type ttype_data;
227 varray_type ehspec_data;
228 varray_type action_record_data;
230 struct call_site_record
232 rtx landing_pad;
233 int action;
234 } *call_site_data;
235 int call_site_data_used;
236 int call_site_data_size;
238 rtx ehr_stackadj;
239 rtx ehr_handler;
240 rtx ehr_label;
242 rtx sjlj_fc;
243 rtx sjlj_exit_after;
247 static void mark_eh_region PARAMS ((struct eh_region *));
249 static int t2r_eq PARAMS ((const PTR,
250 const PTR));
251 static hashval_t t2r_hash PARAMS ((const PTR));
252 static int t2r_mark_1 PARAMS ((PTR *, PTR));
253 static void t2r_mark PARAMS ((PTR));
254 static void add_type_for_runtime PARAMS ((tree));
255 static tree lookup_type_for_runtime PARAMS ((tree));
257 static struct eh_region *expand_eh_region_end PARAMS ((void));
259 static rtx get_exception_filter PARAMS ((struct function *));
261 static void collect_eh_region_array PARAMS ((void));
262 static void resolve_fixup_regions PARAMS ((void));
263 static void remove_fixup_regions PARAMS ((void));
264 static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
266 static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
267 struct inline_remap *));
268 static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
269 struct eh_region **));
270 static int ttypes_filter_eq PARAMS ((const PTR,
271 const PTR));
272 static hashval_t ttypes_filter_hash PARAMS ((const PTR));
273 static int ehspec_filter_eq PARAMS ((const PTR,
274 const PTR));
275 static hashval_t ehspec_filter_hash PARAMS ((const PTR));
276 static int add_ttypes_entry PARAMS ((htab_t, tree));
277 static int add_ehspec_entry PARAMS ((htab_t, htab_t,
278 tree));
279 static void assign_filter_values PARAMS ((void));
280 static void build_post_landing_pads PARAMS ((void));
281 static void connect_post_landing_pads PARAMS ((void));
282 static void dw2_build_landing_pads PARAMS ((void));
284 struct sjlj_lp_info;
285 static bool sjlj_find_directly_reachable_regions
286 PARAMS ((struct sjlj_lp_info *));
287 static void sjlj_assign_call_site_values
288 PARAMS ((rtx, struct sjlj_lp_info *));
289 static void sjlj_mark_call_sites
290 PARAMS ((struct sjlj_lp_info *));
291 static void sjlj_emit_function_enter PARAMS ((rtx));
292 static void sjlj_emit_function_exit PARAMS ((void));
293 static void sjlj_emit_dispatch_table
294 PARAMS ((rtx, struct sjlj_lp_info *));
295 static void sjlj_build_landing_pads PARAMS ((void));
297 static void remove_exception_handler_label PARAMS ((rtx));
298 static void remove_eh_handler PARAMS ((struct eh_region *));
300 struct reachable_info;
302 /* The return value of reachable_next_level. */
303 enum reachable_code
305 /* The given exception is not processed by the given region. */
306 RNL_NOT_CAUGHT,
307 /* The given exception may need processing by the given region. */
308 RNL_MAYBE_CAUGHT,
309 /* The given exception is completely processed by the given region. */
310 RNL_CAUGHT,
311 /* The given exception is completely processed by the runtime. */
312 RNL_BLOCKED
315 static int check_handled PARAMS ((tree, tree));
316 static void add_reachable_handler
317 PARAMS ((struct reachable_info *, struct eh_region *,
318 struct eh_region *));
319 static enum reachable_code reachable_next_level
320 PARAMS ((struct eh_region *, tree, struct reachable_info *));
322 static int action_record_eq PARAMS ((const PTR,
323 const PTR));
324 static hashval_t action_record_hash PARAMS ((const PTR));
325 static int add_action_record PARAMS ((htab_t, int, int));
326 static int collect_one_action_chain PARAMS ((htab_t,
327 struct eh_region *));
328 static int add_call_site PARAMS ((rtx, int));
330 static void push_uleb128 PARAMS ((varray_type *,
331 unsigned int));
332 static void push_sleb128 PARAMS ((varray_type *, int));
333 #ifndef HAVE_AS_LEB128
334 static int dw2_size_of_call_site_table PARAMS ((void));
335 static int sjlj_size_of_call_site_table PARAMS ((void));
336 #endif
337 static void dw2_output_call_site_table PARAMS ((void));
338 static void sjlj_output_call_site_table PARAMS ((void));
341 /* Routine to see if exception handling is turned on.
342 DO_WARN is non-zero if we want to inform the user that exception
343 handling is turned off.
345 This is used to ensure that -fexceptions has been specified if the
346 compiler tries to use any exception-specific functions. */
349 doing_eh (do_warn)
350 int do_warn;
352 if (! flag_exceptions)
354 static int warned = 0;
355 if (! warned && do_warn)
357 error ("exception handling disabled, use -fexceptions to enable");
358 warned = 1;
360 return 0;
362 return 1;
366 void
367 init_eh ()
369 ggc_add_rtx_root (&exception_handler_labels, 1);
371 if (! flag_exceptions)
372 return;
374 type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
375 ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
377 /* Create the SjLj_Function_Context structure. This should match
378 the definition in unwind-sjlj.c. */
379 if (USING_SJLJ_EXCEPTIONS)
381 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
383 sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
384 ggc_add_tree_root (&sjlj_fc_type_node, 1);
386 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
387 build_pointer_type (sjlj_fc_type_node));
388 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
390 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
391 integer_type_node);
392 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
394 tmp = build_index_type (build_int_2 (4 - 1, 0));
395 tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
396 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
397 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
399 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
400 ptr_type_node);
401 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
403 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
404 ptr_type_node);
405 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
407 #ifdef DONT_USE_BUILTIN_SETJMP
408 #ifdef JMP_BUF_SIZE
409 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
410 #else
411 /* Should be large enough for most systems, if it is not,
412 JMP_BUF_SIZE should be defined with the proper value. It will
413 also tend to be larger than necessary for most systems, a more
414 optimal port will define JMP_BUF_SIZE. */
415 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
416 #endif
417 #else
418 /* This is 2 for builtin_setjmp, plus whatever the target requires
419 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
420 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
421 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
422 #endif
423 tmp = build_index_type (tmp);
424 tmp = build_array_type (ptr_type_node, tmp);
425 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
426 #ifdef DONT_USE_BUILTIN_SETJMP
427 /* We don't know what the alignment requirements of the
428 runtime's jmp_buf has. Overestimate. */
429 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
430 DECL_USER_ALIGN (f_jbuf) = 1;
431 #endif
432 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
434 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
435 TREE_CHAIN (f_prev) = f_cs;
436 TREE_CHAIN (f_cs) = f_data;
437 TREE_CHAIN (f_data) = f_per;
438 TREE_CHAIN (f_per) = f_lsda;
439 TREE_CHAIN (f_lsda) = f_jbuf;
441 layout_type (sjlj_fc_type_node);
443 /* Cache the interesting field offsets so that we have
444 easy access from rtl. */
445 sjlj_fc_call_site_ofs
446 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
447 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
448 sjlj_fc_data_ofs
449 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
450 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
451 sjlj_fc_personality_ofs
452 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
453 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
454 sjlj_fc_lsda_ofs
455 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
456 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
457 sjlj_fc_jbuf_ofs
458 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
459 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
463 void
464 init_eh_for_function ()
466 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
469 /* Mark EH for GC. */
471 static void
472 mark_eh_region (region)
473 struct eh_region *region;
475 if (! region)
476 return;
478 switch (region->type)
480 case ERT_CLEANUP:
481 ggc_mark_tree (region->u.cleanup.exp);
482 break;
483 case ERT_TRY:
484 ggc_mark_rtx (region->u.try.continue_label);
485 break;
486 case ERT_CATCH:
487 ggc_mark_tree (region->u.catch.type);
488 break;
489 case ERT_ALLOWED_EXCEPTIONS:
490 ggc_mark_tree (region->u.allowed.type_list);
491 break;
492 case ERT_MUST_NOT_THROW:
493 break;
494 case ERT_THROW:
495 ggc_mark_tree (region->u.throw.type);
496 break;
497 case ERT_FIXUP:
498 ggc_mark_tree (region->u.fixup.cleanup_exp);
499 break;
500 default:
501 abort ();
504 ggc_mark_rtx (region->label);
505 ggc_mark_rtx (region->resume);
506 ggc_mark_rtx (region->landing_pad);
507 ggc_mark_rtx (region->post_landing_pad);
510 void
511 mark_eh_status (eh)
512 struct eh_status *eh;
514 int i;
516 if (eh == 0)
517 return;
519 /* If we've called collect_eh_region_array, use it. Otherwise walk
520 the tree non-recursively. */
521 if (eh->region_array)
523 for (i = eh->last_region_number; i > 0; --i)
525 struct eh_region *r = eh->region_array[i];
526 if (r && r->region_number == i)
527 mark_eh_region (r);
530 else if (eh->region_tree)
532 struct eh_region *r = eh->region_tree;
533 while (1)
535 mark_eh_region (r);
536 if (r->inner)
537 r = r->inner;
538 else if (r->next_peer)
539 r = r->next_peer;
540 else
542 do {
543 r = r->outer;
544 if (r == NULL)
545 goto tree_done;
546 } while (r->next_peer == NULL);
547 r = r->next_peer;
550 tree_done:;
553 ggc_mark_tree (eh->protect_list);
554 ggc_mark_rtx (eh->filter);
555 ggc_mark_rtx (eh->exc_ptr);
556 ggc_mark_tree_varray (eh->ttype_data);
558 if (eh->call_site_data)
560 for (i = eh->call_site_data_used - 1; i >= 0; --i)
561 ggc_mark_rtx (eh->call_site_data[i].landing_pad);
564 ggc_mark_rtx (eh->ehr_stackadj);
565 ggc_mark_rtx (eh->ehr_handler);
566 ggc_mark_rtx (eh->ehr_label);
568 ggc_mark_rtx (eh->sjlj_fc);
569 ggc_mark_rtx (eh->sjlj_exit_after);
572 void
573 free_eh_status (f)
574 struct function *f;
576 struct eh_status *eh = f->eh;
578 if (eh->region_array)
580 int i;
581 for (i = eh->last_region_number; i > 0; --i)
583 struct eh_region *r = eh->region_array[i];
584 /* Mind we don't free a region struct more than once. */
585 if (r && r->region_number == i)
586 free (r);
588 free (eh->region_array);
590 else if (eh->region_tree)
592 struct eh_region *next, *r = eh->region_tree;
593 while (1)
595 if (r->inner)
596 r = r->inner;
597 else if (r->next_peer)
599 next = r->next_peer;
600 free (r);
601 r = next;
603 else
605 do {
606 next = r->outer;
607 free (r);
608 r = next;
609 if (r == NULL)
610 goto tree_done;
611 } while (r->next_peer == NULL);
612 next = r->next_peer;
613 free (r);
614 r = next;
617 tree_done:;
620 VARRAY_FREE (eh->ttype_data);
621 VARRAY_FREE (eh->ehspec_data);
622 VARRAY_FREE (eh->action_record_data);
623 if (eh->call_site_data)
624 free (eh->call_site_data);
626 free (eh);
627 f->eh = NULL;
631 /* Start an exception handling region. All instructions emitted
632 after this point are considered to be part of the region until
633 expand_eh_region_end is invoked. */
635 void
636 expand_eh_region_start ()
638 struct eh_region *new_region;
639 struct eh_region *cur_region;
640 rtx note;
642 if (! doing_eh (0))
643 return;
645 /* Insert a new blank region as a leaf in the tree. */
646 new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
647 cur_region = cfun->eh->cur_region;
648 new_region->outer = cur_region;
649 if (cur_region)
651 new_region->next_peer = cur_region->inner;
652 cur_region->inner = new_region;
654 else
656 new_region->next_peer = cfun->eh->region_tree;
657 cfun->eh->region_tree = new_region;
659 cfun->eh->cur_region = new_region;
661 /* Create a note marking the start of this region. */
662 new_region->region_number = ++cfun->eh->last_region_number;
663 note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
664 NOTE_EH_HANDLER (note) = new_region->region_number;
667 /* Common code to end a region. Returns the region just ended. */
669 static struct eh_region *
670 expand_eh_region_end ()
672 struct eh_region *cur_region = cfun->eh->cur_region;
673 rtx note;
675 /* Create a nute marking the end of this region. */
676 note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
677 NOTE_EH_HANDLER (note) = cur_region->region_number;
679 /* Pop. */
680 cfun->eh->cur_region = cur_region->outer;
682 return cur_region;
685 /* End an exception handling region for a cleanup. HANDLER is an
686 expression to expand for the cleanup. */
688 void
689 expand_eh_region_end_cleanup (handler)
690 tree handler;
692 struct eh_region *region;
693 tree protect_cleanup_actions;
694 rtx around_label;
695 rtx data_save[2];
697 if (! doing_eh (0))
698 return;
700 region = expand_eh_region_end ();
701 region->type = ERT_CLEANUP;
702 region->label = gen_label_rtx ();
703 region->u.cleanup.exp = handler;
705 around_label = gen_label_rtx ();
706 emit_jump (around_label);
708 emit_label (region->label);
710 /* Give the language a chance to specify an action to be taken if an
711 exception is thrown that would propogate out of the HANDLER. */
712 protect_cleanup_actions
713 = (lang_protect_cleanup_actions
714 ? (*lang_protect_cleanup_actions) ()
715 : NULL_TREE);
717 if (protect_cleanup_actions)
718 expand_eh_region_start ();
720 /* In case this cleanup involves an inline destructor with a try block in
721 it, we need to save the EH return data registers around it. */
722 data_save[0] = gen_reg_rtx (Pmode);
723 emit_move_insn (data_save[0], get_exception_pointer (cfun));
724 data_save[1] = gen_reg_rtx (word_mode);
725 emit_move_insn (data_save[1], get_exception_filter (cfun));
727 expand_expr (handler, const0_rtx, VOIDmode, 0);
729 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
730 emit_move_insn (cfun->eh->filter, data_save[1]);
732 if (protect_cleanup_actions)
733 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
735 /* We need any stack adjustment complete before the around_label. */
736 do_pending_stack_adjust ();
738 /* We delay the generation of the _Unwind_Resume until we generate
739 landing pads. We emit a marker here so as to get good control
740 flow data in the meantime. */
741 region->resume
742 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
743 emit_barrier ();
745 emit_label (around_label);
748 /* End an exception handling region for a try block, and prepares
749 for subsequent calls to expand_start_catch. */
751 void
752 expand_start_all_catch ()
754 struct eh_region *region;
756 if (! doing_eh (1))
757 return;
759 region = expand_eh_region_end ();
760 region->type = ERT_TRY;
761 region->u.try.prev_try = cfun->eh->try_region;
762 region->u.try.continue_label = gen_label_rtx ();
764 cfun->eh->try_region = region;
766 emit_jump (region->u.try.continue_label);
769 /* Begin a catch clause. TYPE is the type caught, or null if this is
770 a catch-all clause. */
772 void
773 expand_start_catch (type)
774 tree type;
776 struct eh_region *t, *c, *l;
778 if (! doing_eh (0))
779 return;
781 if (type)
782 add_type_for_runtime (type);
783 expand_eh_region_start ();
785 t = cfun->eh->try_region;
786 c = cfun->eh->cur_region;
787 c->type = ERT_CATCH;
788 c->u.catch.type = type;
789 c->label = gen_label_rtx ();
791 l = t->u.try.last_catch;
792 c->u.catch.prev_catch = l;
793 if (l)
794 l->u.catch.next_catch = c;
795 else
796 t->u.try.catch = c;
797 t->u.try.last_catch = c;
799 emit_label (c->label);
802 /* End a catch clause. Control will resume after the try/catch block. */
804 void
805 expand_end_catch ()
807 struct eh_region *try_region, *catch_region;
809 if (! doing_eh (0))
810 return;
812 catch_region = expand_eh_region_end ();
813 try_region = cfun->eh->try_region;
815 emit_jump (try_region->u.try.continue_label);
818 /* End a sequence of catch handlers for a try block. */
820 void
821 expand_end_all_catch ()
823 struct eh_region *try_region;
825 if (! doing_eh (0))
826 return;
828 try_region = cfun->eh->try_region;
829 cfun->eh->try_region = try_region->u.try.prev_try;
831 emit_label (try_region->u.try.continue_label);
834 /* End an exception region for an exception type filter. ALLOWED is a
835 TREE_LIST of types to be matched by the runtime. FAILURE is an
836 expression to invoke if a mismatch ocurrs. */
838 void
839 expand_eh_region_end_allowed (allowed, failure)
840 tree allowed, failure;
842 struct eh_region *region;
843 rtx around_label;
845 if (! doing_eh (0))
846 return;
848 region = expand_eh_region_end ();
849 region->type = ERT_ALLOWED_EXCEPTIONS;
850 region->u.allowed.type_list = allowed;
851 region->label = gen_label_rtx ();
853 for (; allowed ; allowed = TREE_CHAIN (allowed))
854 add_type_for_runtime (TREE_VALUE (allowed));
856 /* We must emit the call to FAILURE here, so that if this function
857 throws a different exception, that it will be processed by the
858 correct region. */
860 /* If there are any pending stack adjustments, we must emit them
861 before we branch -- otherwise, we won't know how much adjustment
862 is required later. */
863 do_pending_stack_adjust ();
864 around_label = gen_label_rtx ();
865 emit_jump (around_label);
867 emit_label (region->label);
868 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
869 /* We must adjust the stack before we reach the AROUND_LABEL because
870 the call to FAILURE does not occur on all paths to the
871 AROUND_LABEL. */
872 do_pending_stack_adjust ();
874 emit_label (around_label);
877 /* End an exception region for a must-not-throw filter. FAILURE is an
878 expression invoke if an uncaught exception propagates this far.
880 This is conceptually identical to expand_eh_region_end_allowed with
881 an empty allowed list (if you passed "std::terminate" instead of
882 "__cxa_call_unexpected"), but they are represented differently in
883 the C++ LSDA. */
885 void
886 expand_eh_region_end_must_not_throw (failure)
887 tree failure;
889 struct eh_region *region;
890 rtx around_label;
892 if (! doing_eh (0))
893 return;
895 region = expand_eh_region_end ();
896 region->type = ERT_MUST_NOT_THROW;
897 region->label = gen_label_rtx ();
899 /* We must emit the call to FAILURE here, so that if this function
900 throws a different exception, that it will be processed by the
901 correct region. */
903 around_label = gen_label_rtx ();
904 emit_jump (around_label);
906 emit_label (region->label);
907 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
909 emit_label (around_label);
912 /* End an exception region for a throw. No handling goes on here,
913 but it's the easiest way for the front-end to indicate what type
914 is being thrown. */
916 void
917 expand_eh_region_end_throw (type)
918 tree type;
920 struct eh_region *region;
922 if (! doing_eh (0))
923 return;
925 region = expand_eh_region_end ();
926 region->type = ERT_THROW;
927 region->u.throw.type = type;
930 /* End a fixup region. Within this region the cleanups for the immediately
931 enclosing region are _not_ run. This is used for goto cleanup to avoid
932 destroying an object twice.
934 This would be an extraordinarily simple prospect, were it not for the
935 fact that we don't actually know what the immediately enclosing region
936 is. This surprising fact is because expand_cleanups is currently
937 generating a sequence that it will insert somewhere else. We collect
938 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
940 void
941 expand_eh_region_end_fixup (handler)
942 tree handler;
944 struct eh_region *fixup;
946 if (! doing_eh (0))
947 return;
949 fixup = expand_eh_region_end ();
950 fixup->type = ERT_FIXUP;
951 fixup->u.fixup.cleanup_exp = handler;
954 /* Return an rtl expression for a pointer to the exception object
955 within a handler. */
958 get_exception_pointer (fun)
959 struct function *fun;
961 rtx exc_ptr = fun->eh->exc_ptr;
962 if (fun == cfun && ! exc_ptr)
964 exc_ptr = gen_reg_rtx (Pmode);
965 fun->eh->exc_ptr = exc_ptr;
967 return exc_ptr;
970 /* Return an rtl expression for the exception dispatch filter
971 within a handler. */
973 static rtx
974 get_exception_filter (fun)
975 struct function *fun;
977 rtx filter = fun->eh->filter;
978 if (fun == cfun && ! filter)
980 filter = gen_reg_rtx (word_mode);
981 fun->eh->filter = filter;
983 return filter;
986 /* Begin a region that will contain entries created with
987 add_partial_entry. */
989 void
990 begin_protect_partials ()
992 /* Push room for a new list. */
993 cfun->eh->protect_list
994 = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
997 /* Start a new exception region for a region of code that has a
998 cleanup action and push the HANDLER for the region onto
999 protect_list. All of the regions created with add_partial_entry
1000 will be ended when end_protect_partials is invoked. */
1002 void
1003 add_partial_entry (handler)
1004 tree handler;
1006 expand_eh_region_start ();
1008 /* ??? This comment was old before the most recent rewrite. We
1009 really ought to fix the callers at some point. */
1010 /* For backwards compatibility, we allow callers to omit calls to
1011 begin_protect_partials for the outermost region. So, we must
1012 explicitly do so here. */
1013 if (!cfun->eh->protect_list)
1014 begin_protect_partials ();
1016 /* Add this entry to the front of the list. */
1017 TREE_VALUE (cfun->eh->protect_list)
1018 = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
1021 /* End all the pending exception regions on protect_list. */
1023 void
1024 end_protect_partials ()
1026 tree t;
1028 /* ??? This comment was old before the most recent rewrite. We
1029 really ought to fix the callers at some point. */
1030 /* For backwards compatibility, we allow callers to omit the call to
1031 begin_protect_partials for the outermost region. So,
1032 PROTECT_LIST may be NULL. */
1033 if (!cfun->eh->protect_list)
1034 return;
1036 /* Pop the topmost entry. */
1037 t = TREE_VALUE (cfun->eh->protect_list);
1038 cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
1040 /* End all the exception regions. */
1041 for (; t; t = TREE_CHAIN (t))
1042 expand_eh_region_end_cleanup (TREE_VALUE (t));
1046 /* This section is for the exception handling specific optimization pass. */
1048 /* Random access the exception region tree. It's just as simple to
1049 collect the regions this way as in expand_eh_region_start, but
1050 without having to realloc memory. */
1052 static void
1053 collect_eh_region_array ()
1055 struct eh_region **array, *i;
1057 i = cfun->eh->region_tree;
1058 if (! i)
1059 return;
1061 array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1062 cfun->eh->region_array = array;
1064 while (1)
1066 array[i->region_number] = i;
1068 /* If there are sub-regions, process them. */
1069 if (i->inner)
1070 i = i->inner;
1071 /* If there are peers, process them. */
1072 else if (i->next_peer)
1073 i = i->next_peer;
1074 /* Otherwise, step back up the tree to the next peer. */
1075 else
1077 do {
1078 i = i->outer;
1079 if (i == NULL)
1080 return;
1081 } while (i->next_peer == NULL);
1082 i = i->next_peer;
1087 static void
1088 resolve_fixup_regions ()
1090 int i, j, n = cfun->eh->last_region_number;
1092 for (i = 1; i <= n; ++i)
1094 struct eh_region *fixup = cfun->eh->region_array[i];
1095 struct eh_region *cleanup;
1097 if (! fixup || fixup->type != ERT_FIXUP)
1098 continue;
1100 for (j = 1; j <= n; ++j)
1102 cleanup = cfun->eh->region_array[j];
1103 if (cleanup->type == ERT_CLEANUP
1104 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1105 break;
1107 if (j > n)
1108 abort ();
1110 fixup->u.fixup.real_region = cleanup->outer;
1114 /* Now that we've discovered what region actually encloses a fixup,
1115 we can shuffle pointers and remove them from the tree. */
1117 static void
1118 remove_fixup_regions ()
1120 int i;
1121 rtx insn, note;
1122 struct eh_region *fixup;
1124 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1125 for instructions referencing fixup regions. This is only
1126 strictly necessary for fixup regions with no parent, but
1127 doesn't hurt to do it for all regions. */
1128 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
1129 if (INSN_P (insn)
1130 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
1131 && INTVAL (XEXP (note, 0)) > 0
1132 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
1133 && fixup->type == ERT_FIXUP)
1135 if (fixup->u.fixup.real_region)
1136 XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
1137 else
1138 remove_note (insn, note);
1141 /* Remove the fixup regions from the tree. */
1142 for (i = cfun->eh->last_region_number; i > 0; --i)
1144 fixup = cfun->eh->region_array[i];
1145 if (! fixup)
1146 continue;
1148 /* Allow GC to maybe free some memory. */
1149 if (fixup->type == ERT_CLEANUP)
1150 fixup->u.cleanup.exp = NULL_TREE;
1152 if (fixup->type != ERT_FIXUP)
1153 continue;
1155 if (fixup->inner)
1157 struct eh_region *parent, *p, **pp;
1159 parent = fixup->u.fixup.real_region;
1161 /* Fix up the children's parent pointers; find the end of
1162 the list. */
1163 for (p = fixup->inner; ; p = p->next_peer)
1165 p->outer = parent;
1166 if (! p->next_peer)
1167 break;
1170 /* In the tree of cleanups, only outer-inner ordering matters.
1171 So link the children back in anywhere at the correct level. */
1172 if (parent)
1173 pp = &parent->inner;
1174 else
1175 pp = &cfun->eh->region_tree;
1176 p->next_peer = *pp;
1177 *pp = fixup->inner;
1178 fixup->inner = NULL;
1181 remove_eh_handler (fixup);
1185 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1186 can_throw instruction in the region. */
1188 static void
1189 convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1190 rtx *pinsns;
1191 int *orig_sp;
1192 int cur;
1194 int *sp = orig_sp;
1195 rtx insn, next;
1197 for (insn = *pinsns; insn ; insn = next)
1199 next = NEXT_INSN (insn);
1200 if (GET_CODE (insn) == NOTE)
1202 int kind = NOTE_LINE_NUMBER (insn);
1203 if (kind == NOTE_INSN_EH_REGION_BEG
1204 || kind == NOTE_INSN_EH_REGION_END)
1206 if (kind == NOTE_INSN_EH_REGION_BEG)
1208 struct eh_region *r;
1210 *sp++ = cur;
1211 cur = NOTE_EH_HANDLER (insn);
1213 r = cfun->eh->region_array[cur];
1214 if (r->type == ERT_FIXUP)
1216 r = r->u.fixup.real_region;
1217 cur = r ? r->region_number : 0;
1219 else if (r->type == ERT_CATCH)
1221 r = r->outer;
1222 cur = r ? r->region_number : 0;
1225 else
1226 cur = *--sp;
1228 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1229 requires extra care to adjust sequence start. */
1230 if (insn == *pinsns)
1231 *pinsns = next;
1232 remove_insn (insn);
1233 continue;
1236 else if (INSN_P (insn))
1238 if (cur > 0
1239 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1240 /* Calls can always potentially throw exceptions, unless
1241 they have a REG_EH_REGION note with a value of 0 or less.
1242 Which should be the only possible kind so far. */
1243 && (GET_CODE (insn) == CALL_INSN
1244 /* If we wanted exceptions for non-call insns, then
1245 any may_trap_p instruction could throw. */
1246 || (flag_non_call_exceptions
1247 && may_trap_p (PATTERN (insn)))))
1249 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1250 REG_NOTES (insn));
1253 if (GET_CODE (insn) == CALL_INSN
1254 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1256 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1257 sp, cur);
1258 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1259 sp, cur);
1260 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1261 sp, cur);
1266 if (sp != orig_sp)
1267 abort ();
1270 void
1271 convert_from_eh_region_ranges ()
1273 int *stack;
1274 rtx insns;
1276 collect_eh_region_array ();
1277 resolve_fixup_regions ();
1279 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1280 insns = get_insns ();
1281 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1282 free (stack);
1284 remove_fixup_regions ();
1287 void
1288 find_exception_handler_labels ()
1290 rtx list = NULL_RTX;
1291 int i;
1293 free_EXPR_LIST_list (&exception_handler_labels);
1295 if (cfun->eh->region_tree == NULL)
1296 return;
1298 for (i = cfun->eh->last_region_number; i > 0; --i)
1300 struct eh_region *region = cfun->eh->region_array[i];
1301 rtx lab;
1303 if (! region)
1304 continue;
1305 if (cfun->eh->built_landing_pads)
1306 lab = region->landing_pad;
1307 else
1308 lab = region->label;
1310 if (lab)
1311 list = alloc_EXPR_LIST (0, lab, list);
1314 /* For sjlj exceptions, need the return label to remain live until
1315 after landing pad generation. */
1316 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1317 list = alloc_EXPR_LIST (0, return_label, list);
1319 exception_handler_labels = list;
1323 static struct eh_region *
1324 duplicate_eh_region_1 (o, map)
1325 struct eh_region *o;
1326 struct inline_remap *map;
1328 struct eh_region *n
1329 = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
1331 n->region_number = o->region_number + cfun->eh->last_region_number;
1332 n->type = o->type;
1334 switch (n->type)
1336 case ERT_CLEANUP:
1337 case ERT_MUST_NOT_THROW:
1338 break;
1340 case ERT_TRY:
1341 if (o->u.try.continue_label)
1342 n->u.try.continue_label
1343 = get_label_from_map (map,
1344 CODE_LABEL_NUMBER (o->u.try.continue_label));
1345 break;
1347 case ERT_CATCH:
1348 n->u.catch.type = o->u.catch.type;
1349 break;
1351 case ERT_ALLOWED_EXCEPTIONS:
1352 n->u.allowed.type_list = o->u.allowed.type_list;
1353 break;
1355 case ERT_THROW:
1356 n->u.throw.type = o->u.throw.type;
1358 default:
1359 abort ();
1362 if (o->label)
1363 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1364 if (o->resume)
1366 n->resume = map->insn_map[INSN_UID (o->resume)];
1367 if (n->resume == NULL)
1368 abort ();
1371 return n;
1374 static void
1375 duplicate_eh_region_2 (o, n_array)
1376 struct eh_region *o;
1377 struct eh_region **n_array;
1379 struct eh_region *n = n_array[o->region_number];
1381 switch (n->type)
1383 case ERT_TRY:
1384 n->u.try.catch = n_array[o->u.try.catch->region_number];
1385 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1386 break;
1388 case ERT_CATCH:
1389 if (o->u.catch.next_catch)
1390 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1391 if (o->u.catch.prev_catch)
1392 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1393 break;
1395 default:
1396 break;
1399 if (o->outer)
1400 n->outer = n_array[o->outer->region_number];
1401 if (o->inner)
1402 n->inner = n_array[o->inner->region_number];
1403 if (o->next_peer)
1404 n->next_peer = n_array[o->next_peer->region_number];
1408 duplicate_eh_regions (ifun, map)
1409 struct function *ifun;
1410 struct inline_remap *map;
1412 int ifun_last_region_number = ifun->eh->last_region_number;
1413 struct eh_region **n_array, *root, *cur;
1414 int i;
1416 if (ifun_last_region_number == 0)
1417 return 0;
1419 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1421 for (i = 1; i <= ifun_last_region_number; ++i)
1423 cur = ifun->eh->region_array[i];
1424 if (!cur || cur->region_number != i)
1425 continue;
1426 n_array[i] = duplicate_eh_region_1 (cur, map);
1428 for (i = 1; i <= ifun_last_region_number; ++i)
1430 cur = ifun->eh->region_array[i];
1431 if (!cur || cur->region_number != i)
1432 continue;
1433 duplicate_eh_region_2 (cur, n_array);
1436 root = n_array[ifun->eh->region_tree->region_number];
1437 cur = cfun->eh->cur_region;
1438 if (cur)
1440 struct eh_region *p = cur->inner;
1441 if (p)
1443 while (p->next_peer)
1444 p = p->next_peer;
1445 p->next_peer = root;
1447 else
1448 cur->inner = root;
1450 for (i = 1; i <= ifun_last_region_number; ++i)
1451 if (n_array[i] && n_array[i]->outer == NULL)
1452 n_array[i]->outer = cur;
1454 else
1456 struct eh_region *p = cfun->eh->region_tree;
1457 if (p)
1459 while (p->next_peer)
1460 p = p->next_peer;
1461 p->next_peer = root;
1463 else
1464 cfun->eh->region_tree = root;
1467 free (n_array);
1469 i = cfun->eh->last_region_number;
1470 cfun->eh->last_region_number = i + ifun_last_region_number;
1471 return i;
1475 /* ??? Move from tree.c to tree.h. */
1476 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
1478 static int
1479 t2r_eq (pentry, pdata)
1480 const PTR pentry;
1481 const PTR pdata;
1483 tree entry = (tree) pentry;
1484 tree data = (tree) pdata;
1486 return TREE_PURPOSE (entry) == data;
1489 static hashval_t
1490 t2r_hash (pentry)
1491 const PTR pentry;
1493 tree entry = (tree) pentry;
1494 return TYPE_HASH (TREE_PURPOSE (entry));
1497 static int
1498 t2r_mark_1 (slot, data)
1499 PTR *slot;
1500 PTR data ATTRIBUTE_UNUSED;
1502 tree contents = (tree) *slot;
1503 ggc_mark_tree (contents);
1504 return 1;
1507 static void
1508 t2r_mark (addr)
1509 PTR addr;
1511 htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1514 static void
1515 add_type_for_runtime (type)
1516 tree type;
1518 tree *slot;
1520 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1521 TYPE_HASH (type), INSERT);
1522 if (*slot == NULL)
1524 tree runtime = (*lang_eh_runtime_type) (type);
1525 *slot = tree_cons (type, runtime, NULL_TREE);
1529 static tree
1530 lookup_type_for_runtime (type)
1531 tree type;
1533 tree *slot;
1535 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1536 TYPE_HASH (type), NO_INSERT);
1538 /* We should have always inserrted the data earlier. */
1539 return TREE_VALUE (*slot);
1543 /* Represent an entry in @TTypes for either catch actions
1544 or exception filter actions. */
1545 struct ttypes_filter
1547 tree t;
1548 int filter;
1551 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1552 (a tree) for a @TTypes type node we are thinking about adding. */
1554 static int
1555 ttypes_filter_eq (pentry, pdata)
1556 const PTR pentry;
1557 const PTR pdata;
1559 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1560 tree data = (tree) pdata;
1562 return entry->t == data;
1565 static hashval_t
1566 ttypes_filter_hash (pentry)
1567 const PTR pentry;
1569 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1570 return TYPE_HASH (entry->t);
1573 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1574 exception specification list we are thinking about adding. */
1575 /* ??? Currently we use the type lists in the order given. Someone
1576 should put these in some canonical order. */
1578 static int
1579 ehspec_filter_eq (pentry, pdata)
1580 const PTR pentry;
1581 const PTR pdata;
1583 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1584 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1586 return type_list_equal (entry->t, data->t);
1589 /* Hash function for exception specification lists. */
1591 static hashval_t
1592 ehspec_filter_hash (pentry)
1593 const PTR pentry;
1595 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1596 hashval_t h = 0;
1597 tree list;
1599 for (list = entry->t; list ; list = TREE_CHAIN (list))
1600 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1601 return h;
1604 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1605 up the search. Return the filter value to be used. */
1607 static int
1608 add_ttypes_entry (ttypes_hash, type)
1609 htab_t ttypes_hash;
1610 tree type;
1612 struct ttypes_filter **slot, *n;
1614 slot = (struct ttypes_filter **)
1615 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1617 if ((n = *slot) == NULL)
1619 /* Filter value is a 1 based table index. */
1621 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1622 n->t = type;
1623 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1624 *slot = n;
1626 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1629 return n->filter;
1632 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1633 to speed up the search. Return the filter value to be used. */
1635 static int
1636 add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1637 htab_t ehspec_hash;
1638 htab_t ttypes_hash;
1639 tree list;
1641 struct ttypes_filter **slot, *n;
1642 struct ttypes_filter dummy;
1644 dummy.t = list;
1645 slot = (struct ttypes_filter **)
1646 htab_find_slot (ehspec_hash, &dummy, INSERT);
1648 if ((n = *slot) == NULL)
1650 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1652 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1653 n->t = list;
1654 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1655 *slot = n;
1657 /* Look up each type in the list and encode its filter
1658 value as a uleb128. Terminate the list with 0. */
1659 for (; list ; list = TREE_CHAIN (list))
1660 push_uleb128 (&cfun->eh->ehspec_data,
1661 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1662 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1665 return n->filter;
1668 /* Generate the action filter values to be used for CATCH and
1669 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1670 we use lots of landing pads, and so every type or list can share
1671 the same filter value, which saves table space. */
1673 static void
1674 assign_filter_values ()
1676 int i;
1677 htab_t ttypes, ehspec;
1679 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1680 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1682 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1683 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1685 for (i = cfun->eh->last_region_number; i > 0; --i)
1687 struct eh_region *r = cfun->eh->region_array[i];
1689 /* Mind we don't process a region more than once. */
1690 if (!r || r->region_number != i)
1691 continue;
1693 switch (r->type)
1695 case ERT_CATCH:
1696 r->u.catch.filter = add_ttypes_entry (ttypes, r->u.catch.type);
1697 break;
1699 case ERT_ALLOWED_EXCEPTIONS:
1700 r->u.allowed.filter
1701 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1702 break;
1704 default:
1705 break;
1709 htab_delete (ttypes);
1710 htab_delete (ehspec);
1713 static void
1714 build_post_landing_pads ()
1716 int i;
1718 for (i = cfun->eh->last_region_number; i > 0; --i)
1720 struct eh_region *region = cfun->eh->region_array[i];
1721 rtx seq;
1723 /* Mind we don't process a region more than once. */
1724 if (!region || region->region_number != i)
1725 continue;
1727 switch (region->type)
1729 case ERT_TRY:
1730 /* ??? Collect the set of all non-overlapping catch handlers
1731 all the way up the chain until blocked by a cleanup. */
1732 /* ??? Outer try regions can share landing pads with inner
1733 try regions if the types are completely non-overlapping,
1734 and there are no interveaning cleanups. */
1736 region->post_landing_pad = gen_label_rtx ();
1738 start_sequence ();
1740 emit_label (region->post_landing_pad);
1742 /* ??? It is mighty inconvenient to call back into the
1743 switch statement generation code in expand_end_case.
1744 Rapid prototyping sez a sequence of ifs. */
1746 struct eh_region *c;
1747 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1749 /* ??? _Unwind_ForcedUnwind wants no match here. */
1750 if (c->u.catch.type == NULL)
1751 emit_jump (c->label);
1752 else
1753 emit_cmp_and_jump_insns (cfun->eh->filter,
1754 GEN_INT (c->u.catch.filter),
1755 EQ, NULL_RTX, word_mode,
1756 0, 0, c->label);
1760 /* We delay the generation of the _Unwind_Resume until we generate
1761 landing pads. We emit a marker here so as to get good control
1762 flow data in the meantime. */
1763 region->resume
1764 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1765 emit_barrier ();
1767 seq = get_insns ();
1768 end_sequence ();
1770 emit_insns_before (seq, region->u.try.catch->label);
1771 break;
1773 case ERT_ALLOWED_EXCEPTIONS:
1774 region->post_landing_pad = gen_label_rtx ();
1776 start_sequence ();
1778 emit_label (region->post_landing_pad);
1780 emit_cmp_and_jump_insns (cfun->eh->filter,
1781 GEN_INT (region->u.allowed.filter),
1782 EQ, NULL_RTX, word_mode, 0, 0,
1783 region->label);
1785 /* We delay the generation of the _Unwind_Resume until we generate
1786 landing pads. We emit a marker here so as to get good control
1787 flow data in the meantime. */
1788 region->resume
1789 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1790 emit_barrier ();
1792 seq = get_insns ();
1793 end_sequence ();
1795 emit_insns_before (seq, region->label);
1796 break;
1798 case ERT_CLEANUP:
1799 case ERT_MUST_NOT_THROW:
1800 region->post_landing_pad = region->label;
1801 break;
1803 case ERT_CATCH:
1804 case ERT_THROW:
1805 /* Nothing to do. */
1806 break;
1808 default:
1809 abort ();
1814 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1815 _Unwind_Resume otherwise. */
1817 static void
1818 connect_post_landing_pads ()
1820 int i;
1822 for (i = cfun->eh->last_region_number; i > 0; --i)
1824 struct eh_region *region = cfun->eh->region_array[i];
1825 struct eh_region *outer;
1826 rtx seq;
1828 /* Mind we don't process a region more than once. */
1829 if (!region || region->region_number != i)
1830 continue;
1832 /* If there is no RESX, or it has been deleted by flow, there's
1833 nothing to fix up. */
1834 if (! region->resume || INSN_DELETED_P (region->resume))
1835 continue;
1837 /* Search for another landing pad in this function. */
1838 for (outer = region->outer; outer ; outer = outer->outer)
1839 if (outer->post_landing_pad)
1840 break;
1842 start_sequence ();
1844 if (outer)
1845 emit_jump (outer->post_landing_pad);
1846 else
1847 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1848 VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
1850 seq = get_insns ();
1851 end_sequence ();
1852 emit_insns_before (seq, region->resume);
1854 /* Leave the RESX to be deleted by flow. */
1859 static void
1860 dw2_build_landing_pads ()
1862 int i, j;
1864 for (i = cfun->eh->last_region_number; i > 0; --i)
1866 struct eh_region *region = cfun->eh->region_array[i];
1867 rtx seq;
1868 bool clobbers_hard_regs = false;
1870 /* Mind we don't process a region more than once. */
1871 if (!region || region->region_number != i)
1872 continue;
1874 if (region->type != ERT_CLEANUP
1875 && region->type != ERT_TRY
1876 && region->type != ERT_ALLOWED_EXCEPTIONS)
1877 continue;
1879 start_sequence ();
1881 region->landing_pad = gen_label_rtx ();
1882 emit_label (region->landing_pad);
1884 #ifdef HAVE_exception_receiver
1885 if (HAVE_exception_receiver)
1886 emit_insn (gen_exception_receiver ());
1887 else
1888 #endif
1889 #ifdef HAVE_nonlocal_goto_receiver
1890 if (HAVE_nonlocal_goto_receiver)
1891 emit_insn (gen_nonlocal_goto_receiver ());
1892 else
1893 #endif
1894 { /* Nothing */ }
1896 /* If the eh_return data registers are call-saved, then we
1897 won't have considered them clobbered from the call that
1898 threw. Kill them now. */
1899 for (j = 0; ; ++j)
1901 unsigned r = EH_RETURN_DATA_REGNO (j);
1902 if (r == INVALID_REGNUM)
1903 break;
1904 if (! call_used_regs[r])
1906 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1907 clobbers_hard_regs = true;
1911 if (clobbers_hard_regs)
1913 /* @@@ This is a kludge. Not all machine descriptions define a
1914 blockage insn, but we must not allow the code we just generated
1915 to be reordered by scheduling. So emit an ASM_INPUT to act as
1916 blockage insn. */
1917 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1920 emit_move_insn (cfun->eh->exc_ptr,
1921 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
1922 emit_move_insn (cfun->eh->filter,
1923 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
1925 seq = get_insns ();
1926 end_sequence ();
1928 emit_insns_before (seq, region->post_landing_pad);
1933 struct sjlj_lp_info
1935 int directly_reachable;
1936 int action_index;
1937 int dispatch_index;
1938 int call_site_index;
1941 static bool
1942 sjlj_find_directly_reachable_regions (lp_info)
1943 struct sjlj_lp_info *lp_info;
1945 rtx insn;
1946 bool found_one = false;
1948 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1950 struct eh_region *region;
1951 tree type_thrown;
1952 rtx note;
1954 if (! INSN_P (insn))
1955 continue;
1957 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1958 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1959 continue;
1961 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1963 type_thrown = NULL_TREE;
1964 if (region->type == ERT_THROW)
1966 type_thrown = region->u.throw.type;
1967 region = region->outer;
1970 /* Find the first containing region that might handle the exception.
1971 That's the landing pad to which we will transfer control. */
1972 for (; region; region = region->outer)
1973 if (reachable_next_level (region, type_thrown, 0) != RNL_NOT_CAUGHT)
1974 break;
1976 if (region)
1978 lp_info[region->region_number].directly_reachable = 1;
1979 found_one = true;
1983 return found_one;
1986 static void
1987 sjlj_assign_call_site_values (dispatch_label, lp_info)
1988 rtx dispatch_label;
1989 struct sjlj_lp_info *lp_info;
1991 htab_t ar_hash;
1992 int i, index;
1994 /* First task: build the action table. */
1996 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1997 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1999 for (i = cfun->eh->last_region_number; i > 0; --i)
2000 if (lp_info[i].directly_reachable)
2002 struct eh_region *r = cfun->eh->region_array[i];
2003 r->landing_pad = dispatch_label;
2004 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2005 if (lp_info[i].action_index != -1)
2006 cfun->uses_eh_lsda = 1;
2009 htab_delete (ar_hash);
2011 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2012 landing pad label for the region. For sjlj though, there is one
2013 common landing pad from which we dispatch to the post-landing pads.
2015 A region receives a dispatch index if it is directly reachable
2016 and requires in-function processing. Regions that share post-landing
2017 pads may share dispatch indicies. */
2018 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2019 (see build_post_landing_pads) so we don't bother checking for it. */
2021 index = 0;
2022 for (i = cfun->eh->last_region_number; i > 0; --i)
2023 if (lp_info[i].directly_reachable
2024 && lp_info[i].action_index >= 0)
2025 lp_info[i].dispatch_index = index++;
2027 /* Finally: assign call-site values. If dwarf2 terms, this would be
2028 the region number assigned by convert_to_eh_region_ranges, but
2029 handles no-action and must-not-throw differently. */
2031 call_site_base = 1;
2032 for (i = cfun->eh->last_region_number; i > 0; --i)
2033 if (lp_info[i].directly_reachable)
2035 int action = lp_info[i].action_index;
2037 /* Map must-not-throw to otherwise unused call-site index 0. */
2038 if (action == -2)
2039 index = 0;
2040 /* Map no-action to otherwise unused call-site index -1. */
2041 else if (action == -1)
2042 index = -1;
2043 /* Otherwise, look it up in the table. */
2044 else
2045 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2047 lp_info[i].call_site_index = index;
2051 static void
2052 sjlj_mark_call_sites (lp_info)
2053 struct sjlj_lp_info *lp_info;
2055 int last_call_site = -2;
2056 rtx insn, mem;
2058 mem = change_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2059 plus_constant (XEXP (cfun->eh->sjlj_fc, 0),
2060 sjlj_fc_call_site_ofs));
2062 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2064 struct eh_region *region;
2065 int this_call_site;
2066 rtx note, before, p;
2068 /* Reset value tracking at extended basic block boundaries. */
2069 if (GET_CODE (insn) == CODE_LABEL)
2070 last_call_site = -2;
2072 if (! INSN_P (insn))
2073 continue;
2075 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2076 if (!note)
2078 /* Calls (and trapping insns) without notes are outside any
2079 exception handling region in this function. Mark them as
2080 no action. */
2081 if (GET_CODE (insn) == CALL_INSN
2082 || (flag_non_call_exceptions
2083 && may_trap_p (PATTERN (insn))))
2084 this_call_site = -1;
2085 else
2086 continue;
2088 else
2090 /* Calls that are known to not throw need not be marked. */
2091 if (INTVAL (XEXP (note, 0)) <= 0)
2092 continue;
2094 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2095 this_call_site = lp_info[region->region_number].call_site_index;
2098 if (this_call_site == last_call_site)
2099 continue;
2101 /* Don't separate a call from it's argument loads. */
2102 before = insn;
2103 if (GET_CODE (insn) == CALL_INSN)
2105 HARD_REG_SET parm_regs;
2106 int nparm_regs;
2108 /* Since different machines initialize their parameter registers
2109 in different orders, assume nothing. Collect the set of all
2110 parameter registers. */
2111 CLEAR_HARD_REG_SET (parm_regs);
2112 nparm_regs = 0;
2113 for (p = CALL_INSN_FUNCTION_USAGE (insn); p ; p = XEXP (p, 1))
2114 if (GET_CODE (XEXP (p, 0)) == USE
2115 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
2117 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
2118 abort ();
2120 /* We only care about registers which can hold function
2121 arguments. */
2122 if (! FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
2123 continue;
2125 SET_HARD_REG_BIT (parm_regs, REGNO (XEXP (XEXP (p, 0), 0)));
2126 nparm_regs++;
2129 /* Search backward for the first set of a register in this set. */
2130 while (nparm_regs)
2132 before = PREV_INSN (before);
2134 /* Given that we've done no other optimizations yet,
2135 the arguments should be immediately available. */
2136 if (GET_CODE (before) == CODE_LABEL)
2137 abort ();
2139 p = single_set (before);
2140 if (p && GET_CODE (SET_DEST (p)) == REG
2141 && REGNO (SET_DEST (p)) < FIRST_PSEUDO_REGISTER
2142 && TEST_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p))))
2144 CLEAR_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p)));
2145 nparm_regs--;
2150 start_sequence ();
2151 emit_move_insn (mem, GEN_INT (this_call_site));
2152 p = get_insns ();
2153 end_sequence ();
2155 emit_insns_before (p, before);
2156 last_call_site = this_call_site;
2160 /* Construct the SjLj_Function_Context. */
2162 static void
2163 sjlj_emit_function_enter (dispatch_label)
2164 rtx dispatch_label;
2166 rtx fn_begin, fc, mem, seq;
2168 fc = cfun->eh->sjlj_fc;
2170 start_sequence ();
2172 /* We're storing this libcall's address into memory instead of
2173 calling it directly. Thus, we must call assemble_external_libcall
2174 here, as we can not depend on emit_library_call to do it for us. */
2175 assemble_external_libcall (eh_personality_libfunc);
2176 mem = change_address (fc, Pmode,
2177 plus_constant (XEXP (fc, 0), sjlj_fc_personality_ofs));
2178 emit_move_insn (mem, eh_personality_libfunc);
2180 mem = change_address (fc, Pmode,
2181 plus_constant (XEXP (fc, 0), sjlj_fc_lsda_ofs));
2182 if (cfun->uses_eh_lsda)
2184 char buf[20];
2185 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2186 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
2188 else
2189 emit_move_insn (mem, const0_rtx);
2191 #ifdef DONT_USE_BUILTIN_SETJMP
2193 rtx x, note;
2194 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_NORMAL,
2195 TYPE_MODE (integer_type_node), 1,
2196 plus_constant (XEXP (fc, 0),
2197 sjlj_fc_jbuf_ofs), Pmode);
2199 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2200 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2202 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2203 TYPE_MODE (integer_type_node), 0, 0,
2204 dispatch_label);
2206 #else
2207 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2208 dispatch_label);
2209 #endif
2211 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2212 1, XEXP (fc, 0), Pmode);
2214 seq = get_insns ();
2215 end_sequence ();
2217 /* ??? Instead of doing this at the beginning of the function,
2218 do this in a block that is at loop level 0 and dominates all
2219 can_throw_internal instructions. */
2221 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2222 if (GET_CODE (fn_begin) == NOTE
2223 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2224 break;
2225 emit_insns_after (seq, fn_begin);
2228 /* Call back from expand_function_end to know where we should put
2229 the call to unwind_sjlj_unregister_libfunc if needed. */
2231 void
2232 sjlj_emit_function_exit_after (after)
2233 rtx after;
2235 cfun->eh->sjlj_exit_after = after;
2238 static void
2239 sjlj_emit_function_exit ()
2241 rtx seq;
2243 start_sequence ();
2245 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2246 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2248 seq = get_insns ();
2249 end_sequence ();
2251 /* ??? Really this can be done in any block at loop level 0 that
2252 post-dominates all can_throw_internal instructions. This is
2253 the last possible moment. */
2255 emit_insns_after (seq, cfun->eh->sjlj_exit_after);
2258 static void
2259 sjlj_emit_dispatch_table (dispatch_label, lp_info)
2260 rtx dispatch_label;
2261 struct sjlj_lp_info *lp_info;
2263 int i, first_reachable;
2264 rtx mem, dispatch, seq, fc;
2266 fc = cfun->eh->sjlj_fc;
2268 start_sequence ();
2270 emit_label (dispatch_label);
2272 #ifndef DONT_USE_BUILTIN_SETJMP
2273 expand_builtin_setjmp_receiver (dispatch_label);
2274 #endif
2276 /* Load up dispatch index, exc_ptr and filter values from the
2277 function context. */
2278 mem = change_address (fc, TYPE_MODE (integer_type_node),
2279 plus_constant (XEXP (fc, 0), sjlj_fc_call_site_ofs));
2280 dispatch = copy_to_reg (mem);
2282 mem = change_address (fc, word_mode,
2283 plus_constant (XEXP (fc, 0), sjlj_fc_data_ofs));
2284 if (word_mode != Pmode)
2286 #ifdef POINTERS_EXTEND_UNSIGNED
2287 mem = convert_memory_address (Pmode, mem);
2288 #else
2289 mem = convert_to_mode (Pmode, mem, 0);
2290 #endif
2292 emit_move_insn (cfun->eh->exc_ptr, mem);
2294 mem = change_address (fc, word_mode,
2295 plus_constant (XEXP (fc, 0),
2296 sjlj_fc_data_ofs + UNITS_PER_WORD));
2297 emit_move_insn (cfun->eh->filter, mem);
2299 /* Jump to one of the directly reachable regions. */
2300 /* ??? This really ought to be using a switch statement. */
2302 first_reachable = 0;
2303 for (i = cfun->eh->last_region_number; i > 0; --i)
2305 if (! lp_info[i].directly_reachable
2306 || lp_info[i].action_index < 0)
2307 continue;
2309 if (! first_reachable)
2311 first_reachable = i;
2312 continue;
2315 emit_cmp_and_jump_insns (dispatch,
2316 GEN_INT (lp_info[i].dispatch_index), EQ,
2317 NULL_RTX, TYPE_MODE (integer_type_node), 0, 0,
2318 cfun->eh->region_array[i]->post_landing_pad);
2321 seq = get_insns ();
2322 end_sequence ();
2324 emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2325 ->post_landing_pad));
2328 static void
2329 sjlj_build_landing_pads ()
2331 struct sjlj_lp_info *lp_info;
2333 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2334 sizeof (struct sjlj_lp_info));
2336 if (sjlj_find_directly_reachable_regions (lp_info))
2338 rtx dispatch_label = gen_label_rtx ();
2340 cfun->eh->sjlj_fc
2341 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2342 int_size_in_bytes (sjlj_fc_type_node),
2343 TYPE_ALIGN (sjlj_fc_type_node));
2345 sjlj_assign_call_site_values (dispatch_label, lp_info);
2346 sjlj_mark_call_sites (lp_info);
2348 sjlj_emit_function_enter (dispatch_label);
2349 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2350 sjlj_emit_function_exit ();
2353 free (lp_info);
2356 void
2357 finish_eh_generation ()
2359 /* Nothing to do if no regions created. */
2360 if (cfun->eh->region_tree == NULL)
2361 return;
2363 /* The object here is to provide find_basic_blocks with detailed
2364 information (via reachable_handlers) on how exception control
2365 flows within the function. In this first pass, we can include
2366 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2367 regions, and hope that it will be useful in deleting unreachable
2368 handlers. Subsequently, we will generate landing pads which will
2369 connect many of the handlers, and then type information will not
2370 be effective. Still, this is a win over previous implementations. */
2372 jump_optimize_minimal (get_insns ());
2373 find_basic_blocks (get_insns (), max_reg_num (), 0);
2374 cleanup_cfg ();
2376 /* These registers are used by the landing pads. Make sure they
2377 have been generated. */
2378 get_exception_pointer (cfun);
2379 get_exception_filter (cfun);
2381 /* Construct the landing pads. */
2383 assign_filter_values ();
2384 build_post_landing_pads ();
2385 connect_post_landing_pads ();
2386 if (USING_SJLJ_EXCEPTIONS)
2387 sjlj_build_landing_pads ();
2388 else
2389 dw2_build_landing_pads ();
2391 cfun->eh->built_landing_pads = 1;
2393 /* We've totally changed the CFG. Start over. */
2394 find_exception_handler_labels ();
2395 jump_optimize_minimal (get_insns ());
2396 find_basic_blocks (get_insns (), max_reg_num (), 0);
2397 cleanup_cfg ();
2400 /* This section handles removing dead code for flow. */
2402 /* Remove LABEL from the exception_handler_labels list. */
2404 static void
2405 remove_exception_handler_label (label)
2406 rtx label;
2408 rtx *pl, l;
2410 for (pl = &exception_handler_labels, l = *pl;
2411 XEXP (l, 0) != label;
2412 pl = &XEXP (l, 1), l = *pl)
2413 continue;
2415 *pl = XEXP (l, 1);
2416 free_EXPR_LIST_node (l);
2419 /* Splice REGION from the region tree etc. */
2421 static void
2422 remove_eh_handler (region)
2423 struct eh_region *region;
2425 struct eh_region **pp, *p;
2426 rtx lab;
2427 int i;
2429 /* For the benefit of efficiently handling REG_EH_REGION notes,
2430 replace this region in the region array with its containing
2431 region. Note that previous region deletions may result in
2432 multiple copies of this region in the array, so we have to
2433 search the whole thing. */
2434 for (i = cfun->eh->last_region_number; i > 0; --i)
2435 if (cfun->eh->region_array[i] == region)
2436 cfun->eh->region_array[i] = region->outer;
2438 if (cfun->eh->built_landing_pads)
2439 lab = region->landing_pad;
2440 else
2441 lab = region->label;
2442 if (lab)
2443 remove_exception_handler_label (lab);
2445 if (region->outer)
2446 pp = &region->outer->inner;
2447 else
2448 pp = &cfun->eh->region_tree;
2449 for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
2450 continue;
2452 if (region->inner)
2454 for (p = region->inner; p->next_peer ; p = p->next_peer)
2455 p->outer = region->outer;
2456 p->next_peer = region->next_peer;
2457 p->outer = region->outer;
2458 *pp = region->inner;
2460 else
2461 *pp = region->next_peer;
2463 if (region->type == ERT_CATCH)
2465 struct eh_region *try, *next, *prev;
2467 for (try = region->next_peer;
2468 try->type == ERT_CATCH;
2469 try = try->next_peer)
2470 continue;
2471 if (try->type != ERT_TRY)
2472 abort ();
2474 next = region->u.catch.next_catch;
2475 prev = region->u.catch.prev_catch;
2477 if (next)
2478 next->u.catch.prev_catch = prev;
2479 else
2480 try->u.try.last_catch = prev;
2481 if (prev)
2482 prev->u.catch.next_catch = next;
2483 else
2485 try->u.try.catch = next;
2486 if (! next)
2487 remove_eh_handler (try);
2491 free (region);
2494 /* LABEL heads a basic block that is about to be deleted. If this
2495 label corresponds to an exception region, we may be able to
2496 delete the region. */
2498 void
2499 maybe_remove_eh_handler (label)
2500 rtx label;
2502 int i;
2504 /* ??? After generating landing pads, it's not so simple to determine
2505 if the region data is completely unused. One must examine the
2506 landing pad and the post landing pad, and whether an inner try block
2507 is referencing the catch handlers directly. */
2508 if (cfun->eh->built_landing_pads)
2509 return;
2511 for (i = cfun->eh->last_region_number; i > 0; --i)
2513 struct eh_region *region = cfun->eh->region_array[i];
2514 if (region && region->label == label)
2516 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2517 because there is no path to the fallback call to terminate.
2518 But the region continues to affect call-site data until there
2519 are no more contained calls, which we don't see here. */
2520 if (region->type == ERT_MUST_NOT_THROW)
2522 remove_exception_handler_label (region->label);
2523 region->label = NULL_RTX;
2525 else
2526 remove_eh_handler (region);
2527 break;
2533 /* This section describes CFG exception edges for flow. */
2535 /* For communicating between calls to reachable_next_level. */
2536 struct reachable_info
2538 tree types_caught;
2539 tree types_allowed;
2540 rtx handlers;
2543 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2544 base class of TYPE, is in HANDLED. */
2546 static int
2547 check_handled (handled, type)
2548 tree handled, type;
2550 tree t;
2552 /* We can check for exact matches without front-end help. */
2553 if (! lang_eh_type_covers)
2555 for (t = handled; t ; t = TREE_CHAIN (t))
2556 if (TREE_VALUE (t) == type)
2557 return 1;
2559 else
2561 for (t = handled; t ; t = TREE_CHAIN (t))
2562 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2563 return 1;
2566 return 0;
2569 /* A subroutine of reachable_next_level. If we are collecting a list
2570 of handlers, add one. After landing pad generation, reference
2571 it instead of the handlers themselves. Further, the handlers are
2572 all wired together, so by referencing one, we've got them all.
2573 Before landing pad generation we reference each handler individually.
2575 LP_REGION contains the landing pad; REGION is the handler. */
2577 static void
2578 add_reachable_handler (info, lp_region, region)
2579 struct reachable_info *info;
2580 struct eh_region *lp_region;
2581 struct eh_region *region;
2583 if (! info)
2584 return;
2586 if (cfun->eh->built_landing_pads)
2588 if (! info->handlers)
2589 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2591 else
2592 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2595 /* Process one level of exception regions for reachability.
2596 If TYPE_THROWN is non-null, then it is the *exact* type being
2597 propagated. If INFO is non-null, then collect handler labels
2598 and caught/allowed type information between invocations. */
2600 static enum reachable_code
2601 reachable_next_level (region, type_thrown, info)
2602 struct eh_region *region;
2603 tree type_thrown;
2604 struct reachable_info *info;
2606 switch (region->type)
2608 case ERT_CLEANUP:
2609 /* Before landing-pad generation, we model control flow
2610 directly to the individual handlers. In this way we can
2611 see that catch handler types may shadow one another. */
2612 add_reachable_handler (info, region, region);
2613 return RNL_MAYBE_CAUGHT;
2615 case ERT_TRY:
2617 struct eh_region *c;
2618 enum reachable_code ret = RNL_NOT_CAUGHT;
2620 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2622 /* A catch-all handler ends the search. */
2623 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2624 to be run as well. */
2625 if (c->u.catch.type == NULL)
2627 add_reachable_handler (info, region, c);
2628 return RNL_CAUGHT;
2631 if (type_thrown)
2633 /* If we have a type match, end the search. */
2634 if (c->u.catch.type == type_thrown
2635 || (lang_eh_type_covers
2636 && (*lang_eh_type_covers) (c->u.catch.type,
2637 type_thrown)))
2639 add_reachable_handler (info, region, c);
2640 return RNL_CAUGHT;
2643 /* If we have definitive information of a match failure,
2644 the catch won't trigger. */
2645 if (lang_eh_type_covers)
2646 return RNL_NOT_CAUGHT;
2649 if (! info)
2650 ret = RNL_MAYBE_CAUGHT;
2652 /* A type must not have been previously caught. */
2653 else if (! check_handled (info->types_caught, c->u.catch.type))
2655 add_reachable_handler (info, region, c);
2656 info->types_caught = tree_cons (NULL, c->u.catch.type,
2657 info->types_caught);
2659 /* ??? If the catch type is a base class of every allowed
2660 type, then we know we can stop the search. */
2661 ret = RNL_MAYBE_CAUGHT;
2665 return ret;
2668 case ERT_ALLOWED_EXCEPTIONS:
2669 /* An empty list of types definitely ends the search. */
2670 if (region->u.allowed.type_list == NULL_TREE)
2672 add_reachable_handler (info, region, region);
2673 return RNL_CAUGHT;
2676 /* Collect a list of lists of allowed types for use in detecting
2677 when a catch may be transformed into a catch-all. */
2678 if (info)
2679 info->types_allowed = tree_cons (NULL_TREE,
2680 region->u.allowed.type_list,
2681 info->types_allowed);
2683 /* If we have definitive information about the type heirarchy,
2684 then we can tell if the thrown type will pass through the
2685 filter. */
2686 if (type_thrown && lang_eh_type_covers)
2688 if (check_handled (region->u.allowed.type_list, type_thrown))
2689 return RNL_NOT_CAUGHT;
2690 else
2692 add_reachable_handler (info, region, region);
2693 return RNL_CAUGHT;
2697 add_reachable_handler (info, region, region);
2698 return RNL_MAYBE_CAUGHT;
2700 case ERT_CATCH:
2701 /* Catch regions are handled by their controling try region. */
2702 return RNL_NOT_CAUGHT;
2704 case ERT_MUST_NOT_THROW:
2705 /* Here we end our search, since no exceptions may propagate.
2706 If we've touched down at some landing pad previous, then the
2707 explicit function call we generated may be used. Otherwise
2708 the call is made by the runtime. */
2709 if (info && info->handlers)
2711 add_reachable_handler (info, region, region);
2712 return RNL_CAUGHT;
2714 else
2715 return RNL_BLOCKED;
2717 case ERT_THROW:
2718 case ERT_FIXUP:
2719 /* Shouldn't see these here. */
2720 break;
2723 abort ();
2726 /* Retrieve a list of labels of exception handlers which can be
2727 reached by a given insn. */
2730 reachable_handlers (insn)
2731 rtx insn;
2733 struct reachable_info info;
2734 struct eh_region *region;
2735 tree type_thrown;
2736 int region_number;
2738 if (GET_CODE (insn) == JUMP_INSN
2739 && GET_CODE (PATTERN (insn)) == RESX)
2740 region_number = XINT (PATTERN (insn), 0);
2741 else
2743 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2744 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2745 return NULL;
2746 region_number = INTVAL (XEXP (note, 0));
2749 memset (&info, 0, sizeof (info));
2751 region = cfun->eh->region_array[region_number];
2753 type_thrown = NULL_TREE;
2754 if (region->type == ERT_THROW)
2756 type_thrown = region->u.throw.type;
2757 region = region->outer;
2759 else if (GET_CODE (insn) == JUMP_INSN
2760 && GET_CODE (PATTERN (insn)) == RESX)
2761 region = region->outer;
2763 for (; region; region = region->outer)
2764 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2765 break;
2767 return info.handlers;
2770 /* Determine if the given INSN can throw an exception that is caught
2771 within the function. */
2773 bool
2774 can_throw_internal (insn)
2775 rtx insn;
2777 struct eh_region *region;
2778 tree type_thrown;
2779 rtx note;
2781 if (! INSN_P (insn))
2782 return false;
2784 if (GET_CODE (insn) == INSN
2785 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2786 insn = XVECEXP (PATTERN (insn), 0, 0);
2788 if (GET_CODE (insn) == CALL_INSN
2789 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2791 int i;
2792 for (i = 0; i < 3; ++i)
2794 rtx sub = XEXP (PATTERN (insn), i);
2795 for (; sub ; sub = NEXT_INSN (sub))
2796 if (can_throw_internal (sub))
2797 return true;
2799 return false;
2802 /* Every insn that might throw has an EH_REGION note. */
2803 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2804 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2805 return false;
2807 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2809 type_thrown = NULL_TREE;
2810 if (region->type == ERT_THROW)
2812 type_thrown = region->u.throw.type;
2813 region = region->outer;
2816 /* If this exception is ignored by each and every containing region,
2817 then control passes straight out. The runtime may handle some
2818 regions, which also do not require processing internally. */
2819 for (; region; region = region->outer)
2821 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2822 if (how == RNL_BLOCKED)
2823 return false;
2824 if (how != RNL_NOT_CAUGHT)
2825 return true;
2828 return false;
2831 /* Determine if the given INSN can throw an exception that is
2832 visible outside the function. */
2834 bool
2835 can_throw_external (insn)
2836 rtx insn;
2838 struct eh_region *region;
2839 tree type_thrown;
2840 rtx note;
2842 if (! INSN_P (insn))
2843 return false;
2845 if (GET_CODE (insn) == INSN
2846 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2847 insn = XVECEXP (PATTERN (insn), 0, 0);
2849 if (GET_CODE (insn) == CALL_INSN
2850 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2852 int i;
2853 for (i = 0; i < 3; ++i)
2855 rtx sub = XEXP (PATTERN (insn), i);
2856 for (; sub ; sub = NEXT_INSN (sub))
2857 if (can_throw_external (sub))
2858 return true;
2860 return false;
2863 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2864 if (!note)
2866 /* Calls (and trapping insns) without notes are outside any
2867 exception handling region in this function. We have to
2868 assume it might throw. Given that the front end and middle
2869 ends mark known NOTHROW functions, this isn't so wildly
2870 inaccurate. */
2871 return (GET_CODE (insn) == CALL_INSN
2872 || (flag_non_call_exceptions
2873 && may_trap_p (PATTERN (insn))));
2875 if (INTVAL (XEXP (note, 0)) <= 0)
2876 return false;
2878 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2880 type_thrown = NULL_TREE;
2881 if (region->type == ERT_THROW)
2883 type_thrown = region->u.throw.type;
2884 region = region->outer;
2887 /* If the exception is caught or blocked by any containing region,
2888 then it is not seen by any calling function. */
2889 for (; region ; region = region->outer)
2890 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2891 return false;
2893 return true;
2896 /* True if nothing in this function can throw outside this function. */
2898 bool
2899 nothrow_function_p ()
2901 rtx insn;
2903 if (! flag_exceptions)
2904 return true;
2906 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2907 if (can_throw_external (insn))
2908 return false;
2909 for (insn = current_function_epilogue_delay_list; insn;
2910 insn = XEXP (insn, 1))
2911 if (can_throw_external (insn))
2912 return false;
2914 return true;
2918 /* Various hooks for unwind library. */
2920 /* Do any necessary initialization to access arbitrary stack frames.
2921 On the SPARC, this means flushing the register windows. */
2923 void
2924 expand_builtin_unwind_init ()
2926 /* Set this so all the registers get saved in our frame; we need to be
2927 able to copy the saved values for any registers from frames we unwind. */
2928 current_function_has_nonlocal_label = 1;
2930 #ifdef SETUP_FRAME_ADDRESSES
2931 SETUP_FRAME_ADDRESSES ();
2932 #endif
2936 expand_builtin_eh_return_data_regno (arglist)
2937 tree arglist;
2939 tree which = TREE_VALUE (arglist);
2940 unsigned HOST_WIDE_INT iwhich;
2942 if (TREE_CODE (which) != INTEGER_CST)
2944 error ("argument of `__builtin_eh_return_regno' must be constant");
2945 return constm1_rtx;
2948 iwhich = tree_low_cst (which, 1);
2949 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2950 if (iwhich == INVALID_REGNUM)
2951 return constm1_rtx;
2953 #ifdef DWARF_FRAME_REGNUM
2954 iwhich = DWARF_FRAME_REGNUM (iwhich);
2955 #else
2956 iwhich = DBX_REGISTER_NUMBER (iwhich);
2957 #endif
2959 return GEN_INT (iwhich);
2962 /* Given a value extracted from the return address register or stack slot,
2963 return the actual address encoded in that value. */
2966 expand_builtin_extract_return_addr (addr_tree)
2967 tree addr_tree;
2969 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2971 /* First mask out any unwanted bits. */
2972 #ifdef MASK_RETURN_ADDR
2973 expand_and (addr, MASK_RETURN_ADDR, addr);
2974 #endif
2976 /* Then adjust to find the real return address. */
2977 #if defined (RETURN_ADDR_OFFSET)
2978 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2979 #endif
2981 return addr;
2984 /* Given an actual address in addr_tree, do any necessary encoding
2985 and return the value to be stored in the return address register or
2986 stack slot so the epilogue will return to that address. */
2989 expand_builtin_frob_return_addr (addr_tree)
2990 tree addr_tree;
2992 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2994 #ifdef RETURN_ADDR_OFFSET
2995 addr = force_reg (Pmode, addr);
2996 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2997 #endif
2999 return addr;
3002 /* Set up the epilogue with the magic bits we'll need to return to the
3003 exception handler. */
3005 void
3006 expand_builtin_eh_return (stackadj_tree, handler_tree)
3007 tree stackadj_tree, handler_tree;
3009 rtx stackadj, handler;
3011 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
3012 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
3014 if (! cfun->eh->ehr_label)
3016 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
3017 cfun->eh->ehr_handler = copy_to_reg (handler);
3018 cfun->eh->ehr_label = gen_label_rtx ();
3020 else
3022 if (stackadj != cfun->eh->ehr_stackadj)
3023 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
3024 if (handler != cfun->eh->ehr_handler)
3025 emit_move_insn (cfun->eh->ehr_handler, handler);
3028 emit_jump (cfun->eh->ehr_label);
3031 void
3032 expand_eh_return ()
3034 rtx sa, ra, around_label;
3036 if (! cfun->eh->ehr_label)
3037 return;
3039 sa = EH_RETURN_STACKADJ_RTX;
3040 if (! sa)
3042 error ("__builtin_eh_return not supported on this target");
3043 return;
3046 current_function_calls_eh_return = 1;
3048 around_label = gen_label_rtx ();
3049 emit_move_insn (sa, const0_rtx);
3050 emit_jump (around_label);
3052 emit_label (cfun->eh->ehr_label);
3053 clobber_return_register ();
3055 #ifdef HAVE_eh_return
3056 if (HAVE_eh_return)
3057 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3058 else
3059 #endif
3061 rtx handler;
3063 ra = EH_RETURN_HANDLER_RTX;
3064 if (! ra)
3066 error ("__builtin_eh_return not supported on this target");
3067 ra = gen_reg_rtx (Pmode);
3070 emit_move_insn (sa, cfun->eh->ehr_stackadj);
3072 handler = cfun->eh->ehr_handler;
3073 if (GET_MODE (ra) != Pmode)
3075 #ifdef POINTERS_EXTEND_UNSIGNED
3076 handler = convert_memory_address (GET_MODE (ra), handler);
3077 #else
3078 handler = convert_to_mode (GET_MODE (ra), handler, 0);
3079 #endif
3081 emit_move_insn (ra, handler);
3084 emit_label (around_label);
3087 /* In the following functions, we represent entries in the action table
3088 as 1-based indicies. Special cases are:
3090 0: null action record, non-null landing pad; implies cleanups
3091 -1: null action record, null landing pad; implies no action
3092 -2: no call-site entry; implies must_not_throw
3093 -3: we have yet to process outer regions
3095 Further, no special cases apply to the "next" field of the record.
3096 For next, 0 means end of list. */
3098 struct action_record
3100 int offset;
3101 int filter;
3102 int next;
3105 static int
3106 action_record_eq (pentry, pdata)
3107 const PTR pentry;
3108 const PTR pdata;
3110 const struct action_record *entry = (const struct action_record *) pentry;
3111 const struct action_record *data = (const struct action_record *) pdata;
3112 return entry->filter == data->filter && entry->next == data->next;
3115 static hashval_t
3116 action_record_hash (pentry)
3117 const PTR pentry;
3119 const struct action_record *entry = (const struct action_record *) pentry;
3120 return entry->next * 1009 + entry->filter;
3123 static int
3124 add_action_record (ar_hash, filter, next)
3125 htab_t ar_hash;
3126 int filter, next;
3128 struct action_record **slot, *new, tmp;
3130 tmp.filter = filter;
3131 tmp.next = next;
3132 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3134 if ((new = *slot) == NULL)
3136 new = (struct action_record *) xmalloc (sizeof (*new));
3137 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3138 new->filter = filter;
3139 new->next = next;
3140 *slot = new;
3142 /* The filter value goes in untouched. The link to the next
3143 record is a "self-relative" byte offset, or zero to indicate
3144 that there is no next record. So convert the absolute 1 based
3145 indicies we've been carrying around into a displacement. */
3147 push_sleb128 (&cfun->eh->action_record_data, filter);
3148 if (next)
3149 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3150 push_sleb128 (&cfun->eh->action_record_data, next);
3153 return new->offset;
3156 static int
3157 collect_one_action_chain (ar_hash, region)
3158 htab_t ar_hash;
3159 struct eh_region *region;
3161 struct eh_region *c;
3162 int next;
3164 /* If we've reached the top of the region chain, then we have
3165 no actions, and require no landing pad. */
3166 if (region == NULL)
3167 return -1;
3169 switch (region->type)
3171 case ERT_CLEANUP:
3172 /* A cleanup adds a zero filter to the beginning of the chain, but
3173 there are special cases to look out for. If there are *only*
3174 cleanups along a path, then it compresses to a zero action.
3175 Further, if there are multiple cleanups along a path, we only
3176 need to represent one of them, as that is enough to trigger
3177 entry to the landing pad at runtime. */
3178 next = collect_one_action_chain (ar_hash, region->outer);
3179 if (next <= 0)
3180 return 0;
3181 for (c = region->outer; c ; c = c->outer)
3182 if (c->type == ERT_CLEANUP)
3183 return next;
3184 return add_action_record (ar_hash, 0, next);
3186 case ERT_TRY:
3187 /* Process the associated catch regions in reverse order.
3188 If there's a catch-all handler, then we don't need to
3189 search outer regions. Use a magic -3 value to record
3190 that we havn't done the outer search. */
3191 next = -3;
3192 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3194 if (c->u.catch.type == NULL)
3195 next = add_action_record (ar_hash, c->u.catch.filter, 0);
3196 else
3198 if (next == -3)
3200 next = collect_one_action_chain (ar_hash, region->outer);
3202 /* If there is no next action, terminate the chain. */
3203 if (next == -1)
3204 next = 0;
3205 /* If all outer actions are cleanups or must_not_throw,
3206 we'll have no action record for it, since we had wanted
3207 to encode these states in the call-site record directly.
3208 Add a cleanup action to the chain to catch these. */
3209 else if (next <= 0)
3210 next = add_action_record (ar_hash, 0, 0);
3212 next = add_action_record (ar_hash, c->u.catch.filter, next);
3215 return next;
3217 case ERT_ALLOWED_EXCEPTIONS:
3218 /* An exception specification adds its filter to the
3219 beginning of the chain. */
3220 next = collect_one_action_chain (ar_hash, region->outer);
3221 return add_action_record (ar_hash, region->u.allowed.filter,
3222 next < 0 ? 0 : next);
3224 case ERT_MUST_NOT_THROW:
3225 /* A must-not-throw region with no inner handlers or cleanups
3226 requires no call-site entry. Note that this differs from
3227 the no handler or cleanup case in that we do require an lsda
3228 to be generated. Return a magic -2 value to record this. */
3229 return -2;
3231 case ERT_CATCH:
3232 case ERT_THROW:
3233 /* CATCH regions are handled in TRY above. THROW regions are
3234 for optimization information only and produce no output. */
3235 return collect_one_action_chain (ar_hash, region->outer);
3237 default:
3238 abort ();
3242 static int
3243 add_call_site (landing_pad, action)
3244 rtx landing_pad;
3245 int action;
3247 struct call_site_record *data = cfun->eh->call_site_data;
3248 int used = cfun->eh->call_site_data_used;
3249 int size = cfun->eh->call_site_data_size;
3251 if (used >= size)
3253 size = (size ? size * 2 : 64);
3254 data = (struct call_site_record *)
3255 xrealloc (data, sizeof (*data) * size);
3256 cfun->eh->call_site_data = data;
3257 cfun->eh->call_site_data_size = size;
3260 data[used].landing_pad = landing_pad;
3261 data[used].action = action;
3263 cfun->eh->call_site_data_used = used + 1;
3265 return used + call_site_base;
3268 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3269 The new note numbers will not refer to region numbers, but
3270 instead to call site entries. */
3272 void
3273 convert_to_eh_region_ranges ()
3275 rtx insn, iter, note;
3276 htab_t ar_hash;
3277 int last_action = -3;
3278 rtx last_action_insn = NULL_RTX;
3279 rtx last_landing_pad = NULL_RTX;
3280 rtx first_no_action_insn = NULL_RTX;
3281 int call_site;
3283 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3284 return;
3286 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3288 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3290 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3291 if (INSN_P (iter))
3293 struct eh_region *region;
3294 int this_action;
3295 rtx this_landing_pad;
3297 insn = iter;
3298 if (GET_CODE (insn) == INSN
3299 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3300 insn = XVECEXP (PATTERN (insn), 0, 0);
3302 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3303 if (!note)
3305 if (! (GET_CODE (insn) == CALL_INSN
3306 || (flag_non_call_exceptions
3307 && may_trap_p (PATTERN (insn)))))
3308 continue;
3309 this_action = -1;
3310 region = NULL;
3312 else
3314 if (INTVAL (XEXP (note, 0)) <= 0)
3315 continue;
3316 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3317 this_action = collect_one_action_chain (ar_hash, region);
3320 /* Existence of catch handlers, or must-not-throw regions
3321 implies that an lsda is needed (even if empty). */
3322 if (this_action != -1)
3323 cfun->uses_eh_lsda = 1;
3325 /* Delay creation of region notes for no-action regions
3326 until we're sure that an lsda will be required. */
3327 else if (last_action == -3)
3329 first_no_action_insn = iter;
3330 last_action = -1;
3333 /* Cleanups and handlers may share action chains but not
3334 landing pads. Collect the landing pad for this region. */
3335 if (this_action >= 0)
3337 struct eh_region *o;
3338 for (o = region; ! o->landing_pad ; o = o->outer)
3339 continue;
3340 this_landing_pad = o->landing_pad;
3342 else
3343 this_landing_pad = NULL_RTX;
3345 /* Differing actions or landing pads implies a change in call-site
3346 info, which implies some EH_REGION note should be emitted. */
3347 if (last_action != this_action
3348 || last_landing_pad != this_landing_pad)
3350 /* If we'd not seen a previous action (-3) or the previous
3351 action was must-not-throw (-2), then we do not need an
3352 end note. */
3353 if (last_action >= -1)
3355 /* If we delayed the creation of the begin, do it now. */
3356 if (first_no_action_insn)
3358 call_site = add_call_site (NULL_RTX, 0);
3359 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3360 first_no_action_insn);
3361 NOTE_EH_HANDLER (note) = call_site;
3362 first_no_action_insn = NULL_RTX;
3365 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3366 last_action_insn);
3367 NOTE_EH_HANDLER (note) = call_site;
3370 /* If the new action is must-not-throw, then no region notes
3371 are created. */
3372 if (this_action >= -1)
3374 call_site = add_call_site (this_landing_pad,
3375 this_action < 0 ? 0 : this_action);
3376 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3377 NOTE_EH_HANDLER (note) = call_site;
3380 last_action = this_action;
3381 last_landing_pad = this_landing_pad;
3383 last_action_insn = iter;
3386 if (last_action >= -1 && ! first_no_action_insn)
3388 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3389 NOTE_EH_HANDLER (note) = call_site;
3392 htab_delete (ar_hash);
3396 static void
3397 push_uleb128 (data_area, value)
3398 varray_type *data_area;
3399 unsigned int value;
3403 unsigned char byte = value & 0x7f;
3404 value >>= 7;
3405 if (value)
3406 byte |= 0x80;
3407 VARRAY_PUSH_UCHAR (*data_area, byte);
3409 while (value);
3412 static void
3413 push_sleb128 (data_area, value)
3414 varray_type *data_area;
3415 int value;
3417 unsigned char byte;
3418 int more;
3422 byte = value & 0x7f;
3423 value >>= 7;
3424 more = ! ((value == 0 && (byte & 0x40) == 0)
3425 || (value == -1 && (byte & 0x40) != 0));
3426 if (more)
3427 byte |= 0x80;
3428 VARRAY_PUSH_UCHAR (*data_area, byte);
3430 while (more);
3434 #ifndef HAVE_AS_LEB128
3435 static int
3436 dw2_size_of_call_site_table ()
3438 int n = cfun->eh->call_site_data_used;
3439 int size = n * (4 + 4 + 4);
3440 int i;
3442 for (i = 0; i < n; ++i)
3444 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3445 size += size_of_uleb128 (cs->action);
3448 return size;
3451 static int
3452 sjlj_size_of_call_site_table ()
3454 int n = cfun->eh->call_site_data_used;
3455 int size = 0;
3456 int i;
3458 for (i = 0; i < n; ++i)
3460 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3461 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3462 size += size_of_uleb128 (cs->action);
3465 return size;
3467 #endif
3469 static void
3470 dw2_output_call_site_table ()
3472 const char *function_start_lab
3473 = IDENTIFIER_POINTER (current_function_func_begin_label);
3474 int n = cfun->eh->call_site_data_used;
3475 int i;
3477 for (i = 0; i < n; ++i)
3479 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3480 char reg_start_lab[32];
3481 char reg_end_lab[32];
3482 char landing_pad_lab[32];
3484 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3485 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3487 if (cs->landing_pad)
3488 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3489 CODE_LABEL_NUMBER (cs->landing_pad));
3491 /* ??? Perhaps use insn length scaling if the assembler supports
3492 generic arithmetic. */
3493 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3494 data4 if the function is small enough. */
3495 #ifdef HAVE_AS_LEB128
3496 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3497 "region %d start", i);
3498 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3499 "length");
3500 if (cs->landing_pad)
3501 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3502 "landing pad");
3503 else
3504 dw2_asm_output_data_uleb128 (0, "landing pad");
3505 #else
3506 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3507 "region %d start", i);
3508 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3509 if (cs->landing_pad)
3510 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3511 "landing pad");
3512 else
3513 dw2_asm_output_data (4, 0, "landing pad");
3514 #endif
3515 dw2_asm_output_data_uleb128 (cs->action, "action");
3518 call_site_base += n;
3521 static void
3522 sjlj_output_call_site_table ()
3524 int n = cfun->eh->call_site_data_used;
3525 int i;
3527 for (i = 0; i < n; ++i)
3529 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3531 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3532 "region %d landing pad", i);
3533 dw2_asm_output_data_uleb128 (cs->action, "action");
3536 call_site_base += n;
3539 void
3540 output_function_exception_table ()
3542 int tt_format, cs_format, lp_format, i, n;
3543 #ifdef HAVE_AS_LEB128
3544 char ttype_label[32];
3545 char cs_after_size_label[32];
3546 char cs_end_label[32];
3547 #else
3548 int call_site_len;
3549 #endif
3550 int have_tt_data;
3551 int funcdef_number;
3552 int tt_format_size;
3554 /* Not all functions need anything. */
3555 if (! cfun->uses_eh_lsda)
3556 return;
3558 funcdef_number = (USING_SJLJ_EXCEPTIONS
3559 ? sjlj_funcdef_number
3560 : current_funcdef_number);
3562 #ifdef IA64_UNWIND_INFO
3563 fputs ("\t.personality\t", asm_out_file);
3564 output_addr_const (asm_out_file, eh_personality_libfunc);
3565 fputs ("\n\t.handlerdata\n", asm_out_file);
3566 /* Note that varasm still thinks we're in the function's code section.
3567 The ".endp" directive that will immediately follow will take us back. */
3568 #else
3569 exception_section ();
3570 #endif
3572 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3573 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3575 /* Indicate the format of the @TType entries. */
3576 if (! have_tt_data)
3577 tt_format = DW_EH_PE_omit;
3578 else
3580 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3581 #ifdef HAVE_AS_LEB128
3582 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3583 #endif
3584 tt_format_size = size_of_encoded_value (tt_format);
3586 assemble_eh_align (tt_format_size * BITS_PER_UNIT);
3589 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3591 /* The LSDA header. */
3593 /* Indicate the format of the landing pad start pointer. An omitted
3594 field implies @LPStart == @Start. */
3595 /* Currently we always put @LPStart == @Start. This field would
3596 be most useful in moving the landing pads completely out of
3597 line to another section, but it could also be used to minimize
3598 the size of uleb128 landing pad offsets. */
3599 lp_format = DW_EH_PE_omit;
3600 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3601 eh_data_format_name (lp_format));
3603 /* @LPStart pointer would go here. */
3605 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3606 eh_data_format_name (tt_format));
3608 #ifndef HAVE_AS_LEB128
3609 if (USING_SJLJ_EXCEPTIONS)
3610 call_site_len = sjlj_size_of_call_site_table ();
3611 else
3612 call_site_len = dw2_size_of_call_site_table ();
3613 #endif
3615 /* A pc-relative 4-byte displacement to the @TType data. */
3616 if (have_tt_data)
3618 #ifdef HAVE_AS_LEB128
3619 char ttype_after_disp_label[32];
3620 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3621 funcdef_number);
3622 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3623 "@TType base offset");
3624 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3625 #else
3626 /* Ug. Alignment queers things. */
3627 unsigned int before_disp, after_disp, last_disp, disp;
3629 before_disp = 1 + 1;
3630 after_disp = (1 + size_of_uleb128 (call_site_len)
3631 + call_site_len
3632 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3633 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3634 * tt_format_size));
3636 disp = after_disp;
3639 unsigned int disp_size, pad;
3641 last_disp = disp;
3642 disp_size = size_of_uleb128 (disp);
3643 pad = before_disp + disp_size + after_disp;
3644 if (pad % tt_format_size)
3645 pad = tt_format_size - (pad % tt_format_size);
3646 else
3647 pad = 0;
3648 disp = after_disp + pad;
3650 while (disp != last_disp);
3652 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3653 #endif
3656 /* Indicate the format of the call-site offsets. */
3657 #ifdef HAVE_AS_LEB128
3658 cs_format = DW_EH_PE_uleb128;
3659 #else
3660 cs_format = DW_EH_PE_udata4;
3661 #endif
3662 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3663 eh_data_format_name (cs_format));
3665 #ifdef HAVE_AS_LEB128
3666 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3667 funcdef_number);
3668 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3669 funcdef_number);
3670 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3671 "Call-site table length");
3672 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3673 if (USING_SJLJ_EXCEPTIONS)
3674 sjlj_output_call_site_table ();
3675 else
3676 dw2_output_call_site_table ();
3677 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3678 #else
3679 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3680 if (USING_SJLJ_EXCEPTIONS)
3681 sjlj_output_call_site_table ();
3682 else
3683 dw2_output_call_site_table ();
3684 #endif
3686 /* ??? Decode and interpret the data for flag_debug_asm. */
3687 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3688 for (i = 0; i < n; ++i)
3689 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3690 (i ? NULL : "Action record table"));
3692 if (have_tt_data)
3693 assemble_eh_align (tt_format_size * BITS_PER_UNIT);
3695 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3696 while (i-- > 0)
3698 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3700 if (type == NULL_TREE)
3701 type = integer_zero_node;
3702 else
3703 type = lookup_type_for_runtime (type);
3705 dw2_asm_output_encoded_addr_rtx (tt_format,
3706 expand_expr (type, NULL_RTX, VOIDmode,
3707 EXPAND_INITIALIZER),
3708 NULL);
3711 #ifdef HAVE_AS_LEB128
3712 if (have_tt_data)
3713 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3714 #endif
3716 /* ??? Decode and interpret the data for flag_debug_asm. */
3717 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3718 for (i = 0; i < n; ++i)
3719 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3720 (i ? NULL : "Exception specification table"));
3722 function_section (current_function_decl);
3724 if (USING_SJLJ_EXCEPTIONS)
3725 sjlj_funcdef_number += 1;