* config/h8300/h8300-protos.h: Update the prototype for
[official-gcc.git] / gcc / except.c
blob939610ffc1f54bbc510d79db32268d5e0d92e215
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "rtl.h"
53 #include "tree.h"
54 #include "flags.h"
55 #include "function.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "insn-config.h"
59 #include "except.h"
60 #include "integrate.h"
61 #include "hard-reg-set.h"
62 #include "basic-block.h"
63 #include "output.h"
64 #include "dwarf2asm.h"
65 #include "dwarf2out.h"
66 #include "dwarf2.h"
67 #include "toplev.h"
68 #include "hashtab.h"
69 #include "intl.h"
70 #include "ggc.h"
71 #include "tm_p.h"
72 #include "target.h"
74 /* Provide defaults for stuff that may not be defined when using
75 sjlj exceptions. */
76 #ifndef EH_RETURN_STACKADJ_RTX
77 #define EH_RETURN_STACKADJ_RTX 0
78 #endif
79 #ifndef EH_RETURN_HANDLER_RTX
80 #define EH_RETURN_HANDLER_RTX 0
81 #endif
82 #ifndef EH_RETURN_DATA_REGNO
83 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
84 #endif
87 /* Nonzero means enable synchronous exceptions for non-call instructions. */
88 int flag_non_call_exceptions;
90 /* Protect cleanup actions with must-not-throw regions, with a call
91 to the given failure handler. */
92 tree (*lang_protect_cleanup_actions) PARAMS ((void));
94 /* Return true if type A catches type B. */
95 int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
97 /* Map a type to a runtime object to match type. */
98 tree (*lang_eh_runtime_type) PARAMS ((tree));
100 /* A list of labels used for exception handlers. */
101 rtx exception_handler_labels;
103 static int call_site_base;
104 static unsigned int sjlj_funcdef_number;
105 static htab_t type_to_runtime_map;
107 /* Describe the SjLj_Function_Context structure. */
108 static tree sjlj_fc_type_node;
109 static int sjlj_fc_call_site_ofs;
110 static int sjlj_fc_data_ofs;
111 static int sjlj_fc_personality_ofs;
112 static int sjlj_fc_lsda_ofs;
113 static int sjlj_fc_jbuf_ofs;
115 /* Describes one exception region. */
116 struct eh_region
118 /* The immediately surrounding region. */
119 struct eh_region *outer;
121 /* The list of immediately contained regions. */
122 struct eh_region *inner;
123 struct eh_region *next_peer;
125 /* An identifier for this region. */
126 int region_number;
128 /* Each region does exactly one thing. */
129 enum eh_region_type
131 ERT_UNKNOWN = 0,
132 ERT_CLEANUP,
133 ERT_TRY,
134 ERT_CATCH,
135 ERT_ALLOWED_EXCEPTIONS,
136 ERT_MUST_NOT_THROW,
137 ERT_THROW,
138 ERT_FIXUP
139 } type;
141 /* Holds the action to perform based on the preceding type. */
142 union {
143 /* A list of catch blocks, a surrounding try block,
144 and the label for continuing after a catch. */
145 struct {
146 struct eh_region *catch;
147 struct eh_region *last_catch;
148 struct eh_region *prev_try;
149 rtx continue_label;
150 } try;
152 /* The list through the catch handlers, the list of type objects
153 matched, and the list of associated filters. */
154 struct {
155 struct eh_region *next_catch;
156 struct eh_region *prev_catch;
157 tree type_list;
158 tree filter_list;
159 } catch;
161 /* A tree_list of allowed types. */
162 struct {
163 tree type_list;
164 int filter;
165 } allowed;
167 /* The type given by a call to "throw foo();", or discovered
168 for a throw. */
169 struct {
170 tree type;
171 } throw;
173 /* Retain the cleanup expression even after expansion so that
174 we can match up fixup regions. */
175 struct {
176 tree exp;
177 } cleanup;
179 /* The real region (by expression and by pointer) that fixup code
180 should live in. */
181 struct {
182 tree cleanup_exp;
183 struct eh_region *real_region;
184 } fixup;
185 } u;
187 /* Entry point for this region's handler before landing pads are built. */
188 rtx label;
190 /* Entry point for this region's handler from the runtime eh library. */
191 rtx landing_pad;
193 /* Entry point for this region's handler from an inner region. */
194 rtx post_landing_pad;
196 /* The RESX insn for handing off control to the next outermost handler,
197 if appropriate. */
198 rtx resume;
201 /* Used to save exception status for each function. */
202 struct eh_status
204 /* The tree of all regions for this function. */
205 struct eh_region *region_tree;
207 /* The same information as an indexable array. */
208 struct eh_region **region_array;
210 /* The most recently open region. */
211 struct eh_region *cur_region;
213 /* This is the region for which we are processing catch blocks. */
214 struct eh_region *try_region;
216 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
217 node is itself a TREE_CHAINed list of handlers for regions that
218 are not yet closed. The TREE_VALUE of each entry contains the
219 handler for the corresponding entry on the ehstack. */
220 tree protect_list;
222 rtx filter;
223 rtx exc_ptr;
225 int built_landing_pads;
226 int last_region_number;
228 varray_type ttype_data;
229 varray_type ehspec_data;
230 varray_type action_record_data;
232 struct call_site_record
234 rtx landing_pad;
235 int action;
236 } *call_site_data;
237 int call_site_data_used;
238 int call_site_data_size;
240 rtx ehr_stackadj;
241 rtx ehr_handler;
242 rtx ehr_label;
244 rtx sjlj_fc;
245 rtx sjlj_exit_after;
249 static void mark_eh_region PARAMS ((struct eh_region *));
251 static int t2r_eq PARAMS ((const PTR,
252 const PTR));
253 static hashval_t t2r_hash PARAMS ((const PTR));
254 static int t2r_mark_1 PARAMS ((PTR *, PTR));
255 static void t2r_mark PARAMS ((PTR));
256 static void add_type_for_runtime PARAMS ((tree));
257 static tree lookup_type_for_runtime PARAMS ((tree));
259 static struct eh_region *expand_eh_region_end PARAMS ((void));
261 static rtx get_exception_filter PARAMS ((struct function *));
263 static void collect_eh_region_array PARAMS ((void));
264 static void resolve_fixup_regions PARAMS ((void));
265 static void remove_fixup_regions PARAMS ((void));
266 static void remove_unreachable_regions PARAMS ((rtx));
267 static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
269 static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
270 struct inline_remap *));
271 static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
272 struct eh_region **));
273 static int ttypes_filter_eq PARAMS ((const PTR,
274 const PTR));
275 static hashval_t ttypes_filter_hash PARAMS ((const PTR));
276 static int ehspec_filter_eq PARAMS ((const PTR,
277 const PTR));
278 static hashval_t ehspec_filter_hash PARAMS ((const PTR));
279 static int add_ttypes_entry PARAMS ((htab_t, tree));
280 static int add_ehspec_entry PARAMS ((htab_t, htab_t,
281 tree));
282 static void assign_filter_values PARAMS ((void));
283 static void build_post_landing_pads PARAMS ((void));
284 static void connect_post_landing_pads PARAMS ((void));
285 static void dw2_build_landing_pads PARAMS ((void));
287 struct sjlj_lp_info;
288 static bool sjlj_find_directly_reachable_regions
289 PARAMS ((struct sjlj_lp_info *));
290 static void sjlj_assign_call_site_values
291 PARAMS ((rtx, struct sjlj_lp_info *));
292 static void sjlj_mark_call_sites
293 PARAMS ((struct sjlj_lp_info *));
294 static void sjlj_emit_function_enter PARAMS ((rtx));
295 static void sjlj_emit_function_exit PARAMS ((void));
296 static void sjlj_emit_dispatch_table
297 PARAMS ((rtx, struct sjlj_lp_info *));
298 static void sjlj_build_landing_pads PARAMS ((void));
300 static void remove_exception_handler_label PARAMS ((rtx));
301 static void remove_eh_handler PARAMS ((struct eh_region *));
303 struct reachable_info;
305 /* The return value of reachable_next_level. */
306 enum reachable_code
308 /* The given exception is not processed by the given region. */
309 RNL_NOT_CAUGHT,
310 /* The given exception may need processing by the given region. */
311 RNL_MAYBE_CAUGHT,
312 /* The given exception is completely processed by the given region. */
313 RNL_CAUGHT,
314 /* The given exception is completely processed by the runtime. */
315 RNL_BLOCKED
318 static int check_handled PARAMS ((tree, tree));
319 static void add_reachable_handler
320 PARAMS ((struct reachable_info *, struct eh_region *,
321 struct eh_region *));
322 static enum reachable_code reachable_next_level
323 PARAMS ((struct eh_region *, tree, struct reachable_info *));
325 static int action_record_eq PARAMS ((const PTR,
326 const PTR));
327 static hashval_t action_record_hash PARAMS ((const PTR));
328 static int add_action_record PARAMS ((htab_t, int, int));
329 static int collect_one_action_chain PARAMS ((htab_t,
330 struct eh_region *));
331 static int add_call_site PARAMS ((rtx, int));
333 static void push_uleb128 PARAMS ((varray_type *,
334 unsigned int));
335 static void push_sleb128 PARAMS ((varray_type *, int));
336 #ifndef HAVE_AS_LEB128
337 static int dw2_size_of_call_site_table PARAMS ((void));
338 static int sjlj_size_of_call_site_table PARAMS ((void));
339 #endif
340 static void dw2_output_call_site_table PARAMS ((void));
341 static void sjlj_output_call_site_table PARAMS ((void));
344 /* Routine to see if exception handling is turned on.
345 DO_WARN is non-zero if we want to inform the user that exception
346 handling is turned off.
348 This is used to ensure that -fexceptions has been specified if the
349 compiler tries to use any exception-specific functions. */
352 doing_eh (do_warn)
353 int do_warn;
355 if (! flag_exceptions)
357 static int warned = 0;
358 if (! warned && do_warn)
360 error ("exception handling disabled, use -fexceptions to enable");
361 warned = 1;
363 return 0;
365 return 1;
369 void
370 init_eh ()
372 ggc_add_rtx_root (&exception_handler_labels, 1);
374 if (! flag_exceptions)
375 return;
377 type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
378 ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
380 /* Create the SjLj_Function_Context structure. This should match
381 the definition in unwind-sjlj.c. */
382 if (USING_SJLJ_EXCEPTIONS)
384 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
386 sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
387 ggc_add_tree_root (&sjlj_fc_type_node, 1);
389 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
390 build_pointer_type (sjlj_fc_type_node));
391 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
393 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
394 integer_type_node);
395 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
397 tmp = build_index_type (build_int_2 (4 - 1, 0));
398 tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
399 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
400 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
402 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
403 ptr_type_node);
404 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
406 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
407 ptr_type_node);
408 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
410 #ifdef DONT_USE_BUILTIN_SETJMP
411 #ifdef JMP_BUF_SIZE
412 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
413 #else
414 /* Should be large enough for most systems, if it is not,
415 JMP_BUF_SIZE should be defined with the proper value. It will
416 also tend to be larger than necessary for most systems, a more
417 optimal port will define JMP_BUF_SIZE. */
418 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
419 #endif
420 #else
421 /* This is 2 for builtin_setjmp, plus whatever the target requires
422 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
423 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
424 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
425 #endif
426 tmp = build_index_type (tmp);
427 tmp = build_array_type (ptr_type_node, tmp);
428 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
429 #ifdef DONT_USE_BUILTIN_SETJMP
430 /* We don't know what the alignment requirements of the
431 runtime's jmp_buf has. Overestimate. */
432 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
433 DECL_USER_ALIGN (f_jbuf) = 1;
434 #endif
435 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
437 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
438 TREE_CHAIN (f_prev) = f_cs;
439 TREE_CHAIN (f_cs) = f_data;
440 TREE_CHAIN (f_data) = f_per;
441 TREE_CHAIN (f_per) = f_lsda;
442 TREE_CHAIN (f_lsda) = f_jbuf;
444 layout_type (sjlj_fc_type_node);
446 /* Cache the interesting field offsets so that we have
447 easy access from rtl. */
448 sjlj_fc_call_site_ofs
449 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
450 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
451 sjlj_fc_data_ofs
452 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
453 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
454 sjlj_fc_personality_ofs
455 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
456 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
457 sjlj_fc_lsda_ofs
458 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
459 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
460 sjlj_fc_jbuf_ofs
461 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
462 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
466 void
467 init_eh_for_function ()
469 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
472 /* Mark EH for GC. */
474 static void
475 mark_eh_region (region)
476 struct eh_region *region;
478 if (! region)
479 return;
481 switch (region->type)
483 case ERT_UNKNOWN:
484 /* This can happen if a nested function is inside the body of a region
485 and we do a GC as part of processing it. */
486 break;
487 case ERT_CLEANUP:
488 ggc_mark_tree (region->u.cleanup.exp);
489 break;
490 case ERT_TRY:
491 ggc_mark_rtx (region->u.try.continue_label);
492 break;
493 case ERT_CATCH:
494 ggc_mark_tree (region->u.catch.type_list);
495 ggc_mark_tree (region->u.catch.filter_list);
496 break;
497 case ERT_ALLOWED_EXCEPTIONS:
498 ggc_mark_tree (region->u.allowed.type_list);
499 break;
500 case ERT_MUST_NOT_THROW:
501 break;
502 case ERT_THROW:
503 ggc_mark_tree (region->u.throw.type);
504 break;
505 case ERT_FIXUP:
506 ggc_mark_tree (region->u.fixup.cleanup_exp);
507 break;
508 default:
509 abort ();
512 ggc_mark_rtx (region->label);
513 ggc_mark_rtx (region->resume);
514 ggc_mark_rtx (region->landing_pad);
515 ggc_mark_rtx (region->post_landing_pad);
518 void
519 mark_eh_status (eh)
520 struct eh_status *eh;
522 int i;
524 if (eh == 0)
525 return;
527 /* If we've called collect_eh_region_array, use it. Otherwise walk
528 the tree non-recursively. */
529 if (eh->region_array)
531 for (i = eh->last_region_number; i > 0; --i)
533 struct eh_region *r = eh->region_array[i];
534 if (r && r->region_number == i)
535 mark_eh_region (r);
538 else if (eh->region_tree)
540 struct eh_region *r = eh->region_tree;
541 while (1)
543 mark_eh_region (r);
544 if (r->inner)
545 r = r->inner;
546 else if (r->next_peer)
547 r = r->next_peer;
548 else
550 do {
551 r = r->outer;
552 if (r == NULL)
553 goto tree_done;
554 } while (r->next_peer == NULL);
555 r = r->next_peer;
558 tree_done:;
561 ggc_mark_tree (eh->protect_list);
562 ggc_mark_rtx (eh->filter);
563 ggc_mark_rtx (eh->exc_ptr);
564 ggc_mark_tree_varray (eh->ttype_data);
566 if (eh->call_site_data)
568 for (i = eh->call_site_data_used - 1; i >= 0; --i)
569 ggc_mark_rtx (eh->call_site_data[i].landing_pad);
572 ggc_mark_rtx (eh->ehr_stackadj);
573 ggc_mark_rtx (eh->ehr_handler);
574 ggc_mark_rtx (eh->ehr_label);
576 ggc_mark_rtx (eh->sjlj_fc);
577 ggc_mark_rtx (eh->sjlj_exit_after);
580 void
581 free_eh_status (f)
582 struct function *f;
584 struct eh_status *eh = f->eh;
586 if (eh->region_array)
588 int i;
589 for (i = eh->last_region_number; i > 0; --i)
591 struct eh_region *r = eh->region_array[i];
592 /* Mind we don't free a region struct more than once. */
593 if (r && r->region_number == i)
594 free (r);
596 free (eh->region_array);
598 else if (eh->region_tree)
600 struct eh_region *next, *r = eh->region_tree;
601 while (1)
603 if (r->inner)
604 r = r->inner;
605 else if (r->next_peer)
607 next = r->next_peer;
608 free (r);
609 r = next;
611 else
613 do {
614 next = r->outer;
615 free (r);
616 r = next;
617 if (r == NULL)
618 goto tree_done;
619 } while (r->next_peer == NULL);
620 next = r->next_peer;
621 free (r);
622 r = next;
625 tree_done:;
628 VARRAY_FREE (eh->ttype_data);
629 VARRAY_FREE (eh->ehspec_data);
630 VARRAY_FREE (eh->action_record_data);
631 if (eh->call_site_data)
632 free (eh->call_site_data);
634 free (eh);
635 f->eh = NULL;
636 exception_handler_labels = NULL;
640 /* Start an exception handling region. All instructions emitted
641 after this point are considered to be part of the region until
642 expand_eh_region_end is invoked. */
644 void
645 expand_eh_region_start ()
647 struct eh_region *new_region;
648 struct eh_region *cur_region;
649 rtx note;
651 if (! doing_eh (0))
652 return;
654 /* Insert a new blank region as a leaf in the tree. */
655 new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
656 cur_region = cfun->eh->cur_region;
657 new_region->outer = cur_region;
658 if (cur_region)
660 new_region->next_peer = cur_region->inner;
661 cur_region->inner = new_region;
663 else
665 new_region->next_peer = cfun->eh->region_tree;
666 cfun->eh->region_tree = new_region;
668 cfun->eh->cur_region = new_region;
670 /* Create a note marking the start of this region. */
671 new_region->region_number = ++cfun->eh->last_region_number;
672 note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
673 NOTE_EH_HANDLER (note) = new_region->region_number;
676 /* Common code to end a region. Returns the region just ended. */
678 static struct eh_region *
679 expand_eh_region_end ()
681 struct eh_region *cur_region = cfun->eh->cur_region;
682 rtx note;
684 /* Create a note marking the end of this region. */
685 note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
686 NOTE_EH_HANDLER (note) = cur_region->region_number;
688 /* Pop. */
689 cfun->eh->cur_region = cur_region->outer;
691 return cur_region;
694 /* End an exception handling region for a cleanup. HANDLER is an
695 expression to expand for the cleanup. */
697 void
698 expand_eh_region_end_cleanup (handler)
699 tree handler;
701 struct eh_region *region;
702 tree protect_cleanup_actions;
703 rtx around_label;
704 rtx data_save[2];
706 if (! doing_eh (0))
707 return;
709 region = expand_eh_region_end ();
710 region->type = ERT_CLEANUP;
711 region->label = gen_label_rtx ();
712 region->u.cleanup.exp = handler;
714 around_label = gen_label_rtx ();
715 emit_jump (around_label);
717 emit_label (region->label);
719 /* Give the language a chance to specify an action to be taken if an
720 exception is thrown that would propagate out of the HANDLER. */
721 protect_cleanup_actions
722 = (lang_protect_cleanup_actions
723 ? (*lang_protect_cleanup_actions) ()
724 : NULL_TREE);
726 if (protect_cleanup_actions)
727 expand_eh_region_start ();
729 /* In case this cleanup involves an inline destructor with a try block in
730 it, we need to save the EH return data registers around it. */
731 data_save[0] = gen_reg_rtx (Pmode);
732 emit_move_insn (data_save[0], get_exception_pointer (cfun));
733 data_save[1] = gen_reg_rtx (word_mode);
734 emit_move_insn (data_save[1], get_exception_filter (cfun));
736 expand_expr (handler, const0_rtx, VOIDmode, 0);
738 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
739 emit_move_insn (cfun->eh->filter, data_save[1]);
741 if (protect_cleanup_actions)
742 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
744 /* We need any stack adjustment complete before the around_label. */
745 do_pending_stack_adjust ();
747 /* We delay the generation of the _Unwind_Resume until we generate
748 landing pads. We emit a marker here so as to get good control
749 flow data in the meantime. */
750 region->resume
751 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
752 emit_barrier ();
754 emit_label (around_label);
757 /* End an exception handling region for a try block, and prepares
758 for subsequent calls to expand_start_catch. */
760 void
761 expand_start_all_catch ()
763 struct eh_region *region;
765 if (! doing_eh (1))
766 return;
768 region = expand_eh_region_end ();
769 region->type = ERT_TRY;
770 region->u.try.prev_try = cfun->eh->try_region;
771 region->u.try.continue_label = gen_label_rtx ();
773 cfun->eh->try_region = region;
775 emit_jump (region->u.try.continue_label);
778 /* Begin a catch clause. TYPE is the type caught, a list of such types, or
779 null if this is a catch-all clause. Providing a type list enables to
780 associate the catch region with potentially several exception types, which
781 is useful e.g. for Ada. */
783 void
784 expand_start_catch (type_or_list)
785 tree type_or_list;
787 struct eh_region *t, *c, *l;
788 tree type_list;
790 if (! doing_eh (0))
791 return;
793 type_list = type_or_list;
795 if (type_or_list)
797 /* Ensure to always end up with a type list to normalize further
798 processing, then register each type against the runtime types
799 map. */
800 tree type_node;
802 if (TREE_CODE (type_or_list) != TREE_LIST)
803 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
805 type_node = type_list;
806 for (; type_node; type_node = TREE_CHAIN (type_node))
807 add_type_for_runtime (TREE_VALUE (type_node));
810 expand_eh_region_start ();
812 t = cfun->eh->try_region;
813 c = cfun->eh->cur_region;
814 c->type = ERT_CATCH;
815 c->u.catch.type_list = type_list;
816 c->label = gen_label_rtx ();
818 l = t->u.try.last_catch;
819 c->u.catch.prev_catch = l;
820 if (l)
821 l->u.catch.next_catch = c;
822 else
823 t->u.try.catch = c;
824 t->u.try.last_catch = c;
826 emit_label (c->label);
829 /* End a catch clause. Control will resume after the try/catch block. */
831 void
832 expand_end_catch ()
834 struct eh_region *try_region, *catch_region;
836 if (! doing_eh (0))
837 return;
839 catch_region = expand_eh_region_end ();
840 try_region = cfun->eh->try_region;
842 emit_jump (try_region->u.try.continue_label);
845 /* End a sequence of catch handlers for a try block. */
847 void
848 expand_end_all_catch ()
850 struct eh_region *try_region;
852 if (! doing_eh (0))
853 return;
855 try_region = cfun->eh->try_region;
856 cfun->eh->try_region = try_region->u.try.prev_try;
858 emit_label (try_region->u.try.continue_label);
861 /* End an exception region for an exception type filter. ALLOWED is a
862 TREE_LIST of types to be matched by the runtime. FAILURE is an
863 expression to invoke if a mismatch occurs.
865 ??? We could use these semantics for calls to rethrow, too; if we can
866 see the surrounding catch clause, we know that the exception we're
867 rethrowing satisfies the "filter" of the catch type. */
869 void
870 expand_eh_region_end_allowed (allowed, failure)
871 tree allowed, failure;
873 struct eh_region *region;
874 rtx around_label;
876 if (! doing_eh (0))
877 return;
879 region = expand_eh_region_end ();
880 region->type = ERT_ALLOWED_EXCEPTIONS;
881 region->u.allowed.type_list = allowed;
882 region->label = gen_label_rtx ();
884 for (; allowed ; allowed = TREE_CHAIN (allowed))
885 add_type_for_runtime (TREE_VALUE (allowed));
887 /* We must emit the call to FAILURE here, so that if this function
888 throws a different exception, that it will be processed by the
889 correct region. */
891 around_label = gen_label_rtx ();
892 emit_jump (around_label);
894 emit_label (region->label);
895 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
896 /* We must adjust the stack before we reach the AROUND_LABEL because
897 the call to FAILURE does not occur on all paths to the
898 AROUND_LABEL. */
899 do_pending_stack_adjust ();
901 emit_label (around_label);
904 /* End an exception region for a must-not-throw filter. FAILURE is an
905 expression invoke if an uncaught exception propagates this far.
907 This is conceptually identical to expand_eh_region_end_allowed with
908 an empty allowed list (if you passed "std::terminate" instead of
909 "__cxa_call_unexpected"), but they are represented differently in
910 the C++ LSDA. */
912 void
913 expand_eh_region_end_must_not_throw (failure)
914 tree failure;
916 struct eh_region *region;
917 rtx around_label;
919 if (! doing_eh (0))
920 return;
922 region = expand_eh_region_end ();
923 region->type = ERT_MUST_NOT_THROW;
924 region->label = gen_label_rtx ();
926 /* We must emit the call to FAILURE here, so that if this function
927 throws a different exception, that it will be processed by the
928 correct region. */
930 around_label = gen_label_rtx ();
931 emit_jump (around_label);
933 emit_label (region->label);
934 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
936 emit_label (around_label);
939 /* End an exception region for a throw. No handling goes on here,
940 but it's the easiest way for the front-end to indicate what type
941 is being thrown. */
943 void
944 expand_eh_region_end_throw (type)
945 tree type;
947 struct eh_region *region;
949 if (! doing_eh (0))
950 return;
952 region = expand_eh_region_end ();
953 region->type = ERT_THROW;
954 region->u.throw.type = type;
957 /* End a fixup region. Within this region the cleanups for the immediately
958 enclosing region are _not_ run. This is used for goto cleanup to avoid
959 destroying an object twice.
961 This would be an extraordinarily simple prospect, were it not for the
962 fact that we don't actually know what the immediately enclosing region
963 is. This surprising fact is because expand_cleanups is currently
964 generating a sequence that it will insert somewhere else. We collect
965 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
967 void
968 expand_eh_region_end_fixup (handler)
969 tree handler;
971 struct eh_region *fixup;
973 if (! doing_eh (0))
974 return;
976 fixup = expand_eh_region_end ();
977 fixup->type = ERT_FIXUP;
978 fixup->u.fixup.cleanup_exp = handler;
981 /* Return an rtl expression for a pointer to the exception object
982 within a handler. */
985 get_exception_pointer (fun)
986 struct function *fun;
988 rtx exc_ptr = fun->eh->exc_ptr;
989 if (fun == cfun && ! exc_ptr)
991 exc_ptr = gen_reg_rtx (Pmode);
992 fun->eh->exc_ptr = exc_ptr;
994 return exc_ptr;
997 /* Return an rtl expression for the exception dispatch filter
998 within a handler. */
1000 static rtx
1001 get_exception_filter (fun)
1002 struct function *fun;
1004 rtx filter = fun->eh->filter;
1005 if (fun == cfun && ! filter)
1007 filter = gen_reg_rtx (word_mode);
1008 fun->eh->filter = filter;
1010 return filter;
1013 /* Begin a region that will contain entries created with
1014 add_partial_entry. */
1016 void
1017 begin_protect_partials ()
1019 /* Push room for a new list. */
1020 cfun->eh->protect_list
1021 = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
1024 /* Start a new exception region for a region of code that has a
1025 cleanup action and push the HANDLER for the region onto
1026 protect_list. All of the regions created with add_partial_entry
1027 will be ended when end_protect_partials is invoked.
1029 ??? The only difference between this purpose and that of
1030 expand_decl_cleanup is that in this case, we only want the cleanup to
1031 run if an exception is thrown. This should also be handled using
1032 binding levels. */
1034 void
1035 add_partial_entry (handler)
1036 tree handler;
1038 expand_eh_region_start ();
1040 /* Add this entry to the front of the list. */
1041 TREE_VALUE (cfun->eh->protect_list)
1042 = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
1045 /* End all the pending exception regions on protect_list. */
1047 void
1048 end_protect_partials ()
1050 tree t;
1052 /* Pop the topmost entry. */
1053 t = TREE_VALUE (cfun->eh->protect_list);
1054 cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
1056 /* End all the exception regions. */
1057 for (; t; t = TREE_CHAIN (t))
1058 expand_eh_region_end_cleanup (TREE_VALUE (t));
1062 /* This section is for the exception handling specific optimization pass. */
1064 /* Random access the exception region tree. It's just as simple to
1065 collect the regions this way as in expand_eh_region_start, but
1066 without having to realloc memory. */
1068 static void
1069 collect_eh_region_array ()
1071 struct eh_region **array, *i;
1073 i = cfun->eh->region_tree;
1074 if (! i)
1075 return;
1077 array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1078 cfun->eh->region_array = array;
1080 while (1)
1082 array[i->region_number] = i;
1084 /* If there are sub-regions, process them. */
1085 if (i->inner)
1086 i = i->inner;
1087 /* If there are peers, process them. */
1088 else if (i->next_peer)
1089 i = i->next_peer;
1090 /* Otherwise, step back up the tree to the next peer. */
1091 else
1093 do {
1094 i = i->outer;
1095 if (i == NULL)
1096 return;
1097 } while (i->next_peer == NULL);
1098 i = i->next_peer;
1103 static void
1104 resolve_fixup_regions ()
1106 int i, j, n = cfun->eh->last_region_number;
1108 for (i = 1; i <= n; ++i)
1110 struct eh_region *fixup = cfun->eh->region_array[i];
1111 struct eh_region *cleanup = 0;
1113 if (! fixup || fixup->type != ERT_FIXUP)
1114 continue;
1116 for (j = 1; j <= n; ++j)
1118 cleanup = cfun->eh->region_array[j];
1119 if (cleanup->type == ERT_CLEANUP
1120 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1121 break;
1123 if (j > n)
1124 abort ();
1126 fixup->u.fixup.real_region = cleanup->outer;
1130 /* Now that we've discovered what region actually encloses a fixup,
1131 we can shuffle pointers and remove them from the tree. */
1133 static void
1134 remove_fixup_regions ()
1136 int i;
1137 rtx insn, note;
1138 struct eh_region *fixup;
1140 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1141 for instructions referencing fixup regions. This is only
1142 strictly necessary for fixup regions with no parent, but
1143 doesn't hurt to do it for all regions. */
1144 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
1145 if (INSN_P (insn)
1146 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
1147 && INTVAL (XEXP (note, 0)) > 0
1148 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
1149 && fixup->type == ERT_FIXUP)
1151 if (fixup->u.fixup.real_region)
1152 XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
1153 else
1154 remove_note (insn, note);
1157 /* Remove the fixup regions from the tree. */
1158 for (i = cfun->eh->last_region_number; i > 0; --i)
1160 fixup = cfun->eh->region_array[i];
1161 if (! fixup)
1162 continue;
1164 /* Allow GC to maybe free some memory. */
1165 if (fixup->type == ERT_CLEANUP)
1166 fixup->u.cleanup.exp = NULL_TREE;
1168 if (fixup->type != ERT_FIXUP)
1169 continue;
1171 if (fixup->inner)
1173 struct eh_region *parent, *p, **pp;
1175 parent = fixup->u.fixup.real_region;
1177 /* Fix up the children's parent pointers; find the end of
1178 the list. */
1179 for (p = fixup->inner; ; p = p->next_peer)
1181 p->outer = parent;
1182 if (! p->next_peer)
1183 break;
1186 /* In the tree of cleanups, only outer-inner ordering matters.
1187 So link the children back in anywhere at the correct level. */
1188 if (parent)
1189 pp = &parent->inner;
1190 else
1191 pp = &cfun->eh->region_tree;
1192 p->next_peer = *pp;
1193 *pp = fixup->inner;
1194 fixup->inner = NULL;
1197 remove_eh_handler (fixup);
1201 /* Remove all regions whose labels are not reachable from insns. */
1203 static void
1204 remove_unreachable_regions (insns)
1205 rtx insns;
1207 int i, *uid_region_num;
1208 bool *reachable;
1209 struct eh_region *r;
1210 rtx insn;
1212 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
1213 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
1215 for (i = cfun->eh->last_region_number; i > 0; --i)
1217 r = cfun->eh->region_array[i];
1218 if (!r || r->region_number != i)
1219 continue;
1221 if (r->resume)
1223 if (uid_region_num[INSN_UID (r->resume)])
1224 abort ();
1225 uid_region_num[INSN_UID (r->resume)] = i;
1227 if (r->label)
1229 if (uid_region_num[INSN_UID (r->label)])
1230 abort ();
1231 uid_region_num[INSN_UID (r->label)] = i;
1233 if (r->type == ERT_TRY && r->u.try.continue_label)
1235 if (uid_region_num[INSN_UID (r->u.try.continue_label)])
1236 abort ();
1237 uid_region_num[INSN_UID (r->u.try.continue_label)] = i;
1241 for (insn = insns; insn; insn = NEXT_INSN (insn))
1242 reachable[uid_region_num[INSN_UID (insn)]] = true;
1244 for (i = cfun->eh->last_region_number; i > 0; --i)
1246 r = cfun->eh->region_array[i];
1247 if (r && r->region_number == i && !reachable[i])
1249 /* Don't remove ERT_THROW regions if their outer region
1250 is reachable. */
1251 if (r->type == ERT_THROW
1252 && r->outer
1253 && reachable[r->outer->region_number])
1254 continue;
1256 remove_eh_handler (r);
1260 free (reachable);
1261 free (uid_region_num);
1264 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1265 can_throw instruction in the region. */
1267 static void
1268 convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1269 rtx *pinsns;
1270 int *orig_sp;
1271 int cur;
1273 int *sp = orig_sp;
1274 rtx insn, next;
1276 for (insn = *pinsns; insn ; insn = next)
1278 next = NEXT_INSN (insn);
1279 if (GET_CODE (insn) == NOTE)
1281 int kind = NOTE_LINE_NUMBER (insn);
1282 if (kind == NOTE_INSN_EH_REGION_BEG
1283 || kind == NOTE_INSN_EH_REGION_END)
1285 if (kind == NOTE_INSN_EH_REGION_BEG)
1287 struct eh_region *r;
1289 *sp++ = cur;
1290 cur = NOTE_EH_HANDLER (insn);
1292 r = cfun->eh->region_array[cur];
1293 if (r->type == ERT_FIXUP)
1295 r = r->u.fixup.real_region;
1296 cur = r ? r->region_number : 0;
1298 else if (r->type == ERT_CATCH)
1300 r = r->outer;
1301 cur = r ? r->region_number : 0;
1304 else
1305 cur = *--sp;
1307 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1308 requires extra care to adjust sequence start. */
1309 if (insn == *pinsns)
1310 *pinsns = next;
1311 remove_insn (insn);
1312 continue;
1315 else if (INSN_P (insn))
1317 if (cur > 0
1318 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1319 /* Calls can always potentially throw exceptions, unless
1320 they have a REG_EH_REGION note with a value of 0 or less.
1321 Which should be the only possible kind so far. */
1322 && (GET_CODE (insn) == CALL_INSN
1323 /* If we wanted exceptions for non-call insns, then
1324 any may_trap_p instruction could throw. */
1325 || (flag_non_call_exceptions
1326 && GET_CODE (PATTERN (insn)) != CLOBBER
1327 && GET_CODE (PATTERN (insn)) != USE
1328 && may_trap_p (PATTERN (insn)))))
1330 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1331 REG_NOTES (insn));
1334 if (GET_CODE (insn) == CALL_INSN
1335 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1337 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1338 sp, cur);
1339 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1340 sp, cur);
1341 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1342 sp, cur);
1347 if (sp != orig_sp)
1348 abort ();
1351 void
1352 convert_from_eh_region_ranges ()
1354 int *stack;
1355 rtx insns;
1357 collect_eh_region_array ();
1358 resolve_fixup_regions ();
1360 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1361 insns = get_insns ();
1362 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1363 free (stack);
1365 remove_fixup_regions ();
1366 remove_unreachable_regions (insns);
1369 void
1370 find_exception_handler_labels ()
1372 rtx list = NULL_RTX;
1373 int i;
1375 free_EXPR_LIST_list (&exception_handler_labels);
1377 if (cfun->eh->region_tree == NULL)
1378 return;
1380 for (i = cfun->eh->last_region_number; i > 0; --i)
1382 struct eh_region *region = cfun->eh->region_array[i];
1383 rtx lab;
1385 if (! region || region->region_number != i)
1386 continue;
1387 if (cfun->eh->built_landing_pads)
1388 lab = region->landing_pad;
1389 else
1390 lab = region->label;
1392 if (lab)
1393 list = alloc_EXPR_LIST (0, lab, list);
1396 /* For sjlj exceptions, need the return label to remain live until
1397 after landing pad generation. */
1398 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1399 list = alloc_EXPR_LIST (0, return_label, list);
1401 exception_handler_labels = list;
1405 static struct eh_region *
1406 duplicate_eh_region_1 (o, map)
1407 struct eh_region *o;
1408 struct inline_remap *map;
1410 struct eh_region *n
1411 = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
1413 n->region_number = o->region_number + cfun->eh->last_region_number;
1414 n->type = o->type;
1416 switch (n->type)
1418 case ERT_CLEANUP:
1419 case ERT_MUST_NOT_THROW:
1420 break;
1422 case ERT_TRY:
1423 if (o->u.try.continue_label)
1424 n->u.try.continue_label
1425 = get_label_from_map (map,
1426 CODE_LABEL_NUMBER (o->u.try.continue_label));
1427 break;
1429 case ERT_CATCH:
1430 n->u.catch.type_list = o->u.catch.type_list;
1431 break;
1433 case ERT_ALLOWED_EXCEPTIONS:
1434 n->u.allowed.type_list = o->u.allowed.type_list;
1435 break;
1437 case ERT_THROW:
1438 n->u.throw.type = o->u.throw.type;
1440 default:
1441 abort ();
1444 if (o->label)
1445 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1446 if (o->resume)
1448 n->resume = map->insn_map[INSN_UID (o->resume)];
1449 if (n->resume == NULL)
1450 abort ();
1453 return n;
1456 static void
1457 duplicate_eh_region_2 (o, n_array)
1458 struct eh_region *o;
1459 struct eh_region **n_array;
1461 struct eh_region *n = n_array[o->region_number];
1463 switch (n->type)
1465 case ERT_TRY:
1466 n->u.try.catch = n_array[o->u.try.catch->region_number];
1467 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1468 break;
1470 case ERT_CATCH:
1471 if (o->u.catch.next_catch)
1472 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1473 if (o->u.catch.prev_catch)
1474 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1475 break;
1477 default:
1478 break;
1481 if (o->outer)
1482 n->outer = n_array[o->outer->region_number];
1483 if (o->inner)
1484 n->inner = n_array[o->inner->region_number];
1485 if (o->next_peer)
1486 n->next_peer = n_array[o->next_peer->region_number];
1490 duplicate_eh_regions (ifun, map)
1491 struct function *ifun;
1492 struct inline_remap *map;
1494 int ifun_last_region_number = ifun->eh->last_region_number;
1495 struct eh_region **n_array, *root, *cur;
1496 int i;
1498 if (ifun_last_region_number == 0)
1499 return 0;
1501 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1503 for (i = 1; i <= ifun_last_region_number; ++i)
1505 cur = ifun->eh->region_array[i];
1506 if (!cur || cur->region_number != i)
1507 continue;
1508 n_array[i] = duplicate_eh_region_1 (cur, map);
1510 for (i = 1; i <= ifun_last_region_number; ++i)
1512 cur = ifun->eh->region_array[i];
1513 if (!cur || cur->region_number != i)
1514 continue;
1515 duplicate_eh_region_2 (cur, n_array);
1518 root = n_array[ifun->eh->region_tree->region_number];
1519 cur = cfun->eh->cur_region;
1520 if (cur)
1522 struct eh_region *p = cur->inner;
1523 if (p)
1525 while (p->next_peer)
1526 p = p->next_peer;
1527 p->next_peer = root;
1529 else
1530 cur->inner = root;
1532 for (i = 1; i <= ifun_last_region_number; ++i)
1533 if (n_array[i] && n_array[i]->outer == NULL)
1534 n_array[i]->outer = cur;
1536 else
1538 struct eh_region *p = cfun->eh->region_tree;
1539 if (p)
1541 while (p->next_peer)
1542 p = p->next_peer;
1543 p->next_peer = root;
1545 else
1546 cfun->eh->region_tree = root;
1549 free (n_array);
1551 i = cfun->eh->last_region_number;
1552 cfun->eh->last_region_number = i + ifun_last_region_number;
1553 return i;
1557 static int
1558 t2r_eq (pentry, pdata)
1559 const PTR pentry;
1560 const PTR pdata;
1562 tree entry = (tree) pentry;
1563 tree data = (tree) pdata;
1565 return TREE_PURPOSE (entry) == data;
1568 static hashval_t
1569 t2r_hash (pentry)
1570 const PTR pentry;
1572 tree entry = (tree) pentry;
1573 return TYPE_HASH (TREE_PURPOSE (entry));
1576 static int
1577 t2r_mark_1 (slot, data)
1578 PTR *slot;
1579 PTR data ATTRIBUTE_UNUSED;
1581 tree contents = (tree) *slot;
1582 ggc_mark_tree (contents);
1583 return 1;
1586 static void
1587 t2r_mark (addr)
1588 PTR addr;
1590 htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1593 static void
1594 add_type_for_runtime (type)
1595 tree type;
1597 tree *slot;
1599 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1600 TYPE_HASH (type), INSERT);
1601 if (*slot == NULL)
1603 tree runtime = (*lang_eh_runtime_type) (type);
1604 *slot = tree_cons (type, runtime, NULL_TREE);
1608 static tree
1609 lookup_type_for_runtime (type)
1610 tree type;
1612 tree *slot;
1614 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1615 TYPE_HASH (type), NO_INSERT);
1617 /* We should have always inserted the data earlier. */
1618 return TREE_VALUE (*slot);
1622 /* Represent an entry in @TTypes for either catch actions
1623 or exception filter actions. */
1624 struct ttypes_filter
1626 tree t;
1627 int filter;
1630 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1631 (a tree) for a @TTypes type node we are thinking about adding. */
1633 static int
1634 ttypes_filter_eq (pentry, pdata)
1635 const PTR pentry;
1636 const PTR pdata;
1638 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1639 tree data = (tree) pdata;
1641 return entry->t == data;
1644 static hashval_t
1645 ttypes_filter_hash (pentry)
1646 const PTR pentry;
1648 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1649 return TYPE_HASH (entry->t);
1652 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1653 exception specification list we are thinking about adding. */
1654 /* ??? Currently we use the type lists in the order given. Someone
1655 should put these in some canonical order. */
1657 static int
1658 ehspec_filter_eq (pentry, pdata)
1659 const PTR pentry;
1660 const PTR pdata;
1662 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1663 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1665 return type_list_equal (entry->t, data->t);
1668 /* Hash function for exception specification lists. */
1670 static hashval_t
1671 ehspec_filter_hash (pentry)
1672 const PTR pentry;
1674 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1675 hashval_t h = 0;
1676 tree list;
1678 for (list = entry->t; list ; list = TREE_CHAIN (list))
1679 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1680 return h;
1683 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1684 up the search. Return the filter value to be used. */
1686 static int
1687 add_ttypes_entry (ttypes_hash, type)
1688 htab_t ttypes_hash;
1689 tree type;
1691 struct ttypes_filter **slot, *n;
1693 slot = (struct ttypes_filter **)
1694 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1696 if ((n = *slot) == NULL)
1698 /* Filter value is a 1 based table index. */
1700 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1701 n->t = type;
1702 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1703 *slot = n;
1705 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1708 return n->filter;
1711 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1712 to speed up the search. Return the filter value to be used. */
1714 static int
1715 add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1716 htab_t ehspec_hash;
1717 htab_t ttypes_hash;
1718 tree list;
1720 struct ttypes_filter **slot, *n;
1721 struct ttypes_filter dummy;
1723 dummy.t = list;
1724 slot = (struct ttypes_filter **)
1725 htab_find_slot (ehspec_hash, &dummy, INSERT);
1727 if ((n = *slot) == NULL)
1729 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1731 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1732 n->t = list;
1733 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1734 *slot = n;
1736 /* Look up each type in the list and encode its filter
1737 value as a uleb128. Terminate the list with 0. */
1738 for (; list ; list = TREE_CHAIN (list))
1739 push_uleb128 (&cfun->eh->ehspec_data,
1740 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1741 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1744 return n->filter;
1747 /* Generate the action filter values to be used for CATCH and
1748 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1749 we use lots of landing pads, and so every type or list can share
1750 the same filter value, which saves table space. */
1752 static void
1753 assign_filter_values ()
1755 int i;
1756 htab_t ttypes, ehspec;
1758 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1759 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1761 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1762 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1764 for (i = cfun->eh->last_region_number; i > 0; --i)
1766 struct eh_region *r = cfun->eh->region_array[i];
1768 /* Mind we don't process a region more than once. */
1769 if (!r || r->region_number != i)
1770 continue;
1772 switch (r->type)
1774 case ERT_CATCH:
1775 /* Whatever type_list is (NULL or true list), we build a list
1776 of filters for the region. */
1777 r->u.catch.filter_list = NULL_TREE;
1779 if (r->u.catch.type_list != NULL)
1781 /* Get a filter value for each of the types caught and store
1782 them in the region's dedicated list. */
1783 tree tp_node = r->u.catch.type_list;
1785 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1787 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1788 tree flt_node = build_int_2 (flt, 0);
1790 r->u.catch.filter_list
1791 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1794 else
1796 /* Get a filter value for the NULL list also since it will need
1797 an action record anyway. */
1798 int flt = add_ttypes_entry (ttypes, NULL);
1799 tree flt_node = build_int_2 (flt, 0);
1801 r->u.catch.filter_list
1802 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1805 break;
1807 case ERT_ALLOWED_EXCEPTIONS:
1808 r->u.allowed.filter
1809 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1810 break;
1812 default:
1813 break;
1817 htab_delete (ttypes);
1818 htab_delete (ehspec);
1821 static void
1822 build_post_landing_pads ()
1824 int i;
1826 for (i = cfun->eh->last_region_number; i > 0; --i)
1828 struct eh_region *region = cfun->eh->region_array[i];
1829 rtx seq;
1831 /* Mind we don't process a region more than once. */
1832 if (!region || region->region_number != i)
1833 continue;
1835 switch (region->type)
1837 case ERT_TRY:
1838 /* ??? Collect the set of all non-overlapping catch handlers
1839 all the way up the chain until blocked by a cleanup. */
1840 /* ??? Outer try regions can share landing pads with inner
1841 try regions if the types are completely non-overlapping,
1842 and there are no intervening cleanups. */
1844 region->post_landing_pad = gen_label_rtx ();
1846 start_sequence ();
1848 emit_label (region->post_landing_pad);
1850 /* ??? It is mighty inconvenient to call back into the
1851 switch statement generation code in expand_end_case.
1852 Rapid prototyping sez a sequence of ifs. */
1854 struct eh_region *c;
1855 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1857 /* ??? _Unwind_ForcedUnwind wants no match here. */
1858 if (c->u.catch.type_list == NULL)
1859 emit_jump (c->label);
1860 else
1862 /* Need for one cmp/jump per type caught. Each type
1863 list entry has a matching entry in the filter list
1864 (see assign_filter_values). */
1865 tree tp_node = c->u.catch.type_list;
1866 tree flt_node = c->u.catch.filter_list;
1868 for (; tp_node; )
1870 emit_cmp_and_jump_insns
1871 (cfun->eh->filter,
1872 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1873 EQ, NULL_RTX, word_mode, 0, c->label);
1875 tp_node = TREE_CHAIN (tp_node);
1876 flt_node = TREE_CHAIN (flt_node);
1882 /* We delay the generation of the _Unwind_Resume until we generate
1883 landing pads. We emit a marker here so as to get good control
1884 flow data in the meantime. */
1885 region->resume
1886 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1887 emit_barrier ();
1889 seq = get_insns ();
1890 end_sequence ();
1892 emit_insns_before (seq, region->u.try.catch->label);
1893 break;
1895 case ERT_ALLOWED_EXCEPTIONS:
1896 region->post_landing_pad = gen_label_rtx ();
1898 start_sequence ();
1900 emit_label (region->post_landing_pad);
1902 emit_cmp_and_jump_insns (cfun->eh->filter,
1903 GEN_INT (region->u.allowed.filter),
1904 EQ, NULL_RTX, word_mode, 0, region->label);
1906 /* We delay the generation of the _Unwind_Resume until we generate
1907 landing pads. We emit a marker here so as to get good control
1908 flow data in the meantime. */
1909 region->resume
1910 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1911 emit_barrier ();
1913 seq = get_insns ();
1914 end_sequence ();
1916 emit_insns_before (seq, region->label);
1917 break;
1919 case ERT_CLEANUP:
1920 case ERT_MUST_NOT_THROW:
1921 region->post_landing_pad = region->label;
1922 break;
1924 case ERT_CATCH:
1925 case ERT_THROW:
1926 /* Nothing to do. */
1927 break;
1929 default:
1930 abort ();
1935 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1936 _Unwind_Resume otherwise. */
1938 static void
1939 connect_post_landing_pads ()
1941 int i;
1943 for (i = cfun->eh->last_region_number; i > 0; --i)
1945 struct eh_region *region = cfun->eh->region_array[i];
1946 struct eh_region *outer;
1947 rtx seq;
1949 /* Mind we don't process a region more than once. */
1950 if (!region || region->region_number != i)
1951 continue;
1953 /* If there is no RESX, or it has been deleted by flow, there's
1954 nothing to fix up. */
1955 if (! region->resume || INSN_DELETED_P (region->resume))
1956 continue;
1958 /* Search for another landing pad in this function. */
1959 for (outer = region->outer; outer ; outer = outer->outer)
1960 if (outer->post_landing_pad)
1961 break;
1963 start_sequence ();
1965 if (outer)
1966 emit_jump (outer->post_landing_pad);
1967 else
1968 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1969 VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
1971 seq = get_insns ();
1972 end_sequence ();
1973 emit_insns_before (seq, region->resume);
1974 delete_insn (region->resume);
1979 static void
1980 dw2_build_landing_pads ()
1982 int i;
1983 unsigned int j;
1985 for (i = cfun->eh->last_region_number; i > 0; --i)
1987 struct eh_region *region = cfun->eh->region_array[i];
1988 rtx seq;
1989 bool clobbers_hard_regs = false;
1991 /* Mind we don't process a region more than once. */
1992 if (!region || region->region_number != i)
1993 continue;
1995 if (region->type != ERT_CLEANUP
1996 && region->type != ERT_TRY
1997 && region->type != ERT_ALLOWED_EXCEPTIONS)
1998 continue;
2000 start_sequence ();
2002 region->landing_pad = gen_label_rtx ();
2003 emit_label (region->landing_pad);
2005 #ifdef HAVE_exception_receiver
2006 if (HAVE_exception_receiver)
2007 emit_insn (gen_exception_receiver ());
2008 else
2009 #endif
2010 #ifdef HAVE_nonlocal_goto_receiver
2011 if (HAVE_nonlocal_goto_receiver)
2012 emit_insn (gen_nonlocal_goto_receiver ());
2013 else
2014 #endif
2015 { /* Nothing */ }
2017 /* If the eh_return data registers are call-saved, then we
2018 won't have considered them clobbered from the call that
2019 threw. Kill them now. */
2020 for (j = 0; ; ++j)
2022 unsigned r = EH_RETURN_DATA_REGNO (j);
2023 if (r == INVALID_REGNUM)
2024 break;
2025 if (! call_used_regs[r])
2027 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
2028 clobbers_hard_regs = true;
2032 if (clobbers_hard_regs)
2034 /* @@@ This is a kludge. Not all machine descriptions define a
2035 blockage insn, but we must not allow the code we just generated
2036 to be reordered by scheduling. So emit an ASM_INPUT to act as
2037 blockage insn. */
2038 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
2041 emit_move_insn (cfun->eh->exc_ptr,
2042 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
2043 emit_move_insn (cfun->eh->filter,
2044 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
2046 seq = get_insns ();
2047 end_sequence ();
2049 emit_insns_before (seq, region->post_landing_pad);
2054 struct sjlj_lp_info
2056 int directly_reachable;
2057 int action_index;
2058 int dispatch_index;
2059 int call_site_index;
2062 static bool
2063 sjlj_find_directly_reachable_regions (lp_info)
2064 struct sjlj_lp_info *lp_info;
2066 rtx insn;
2067 bool found_one = false;
2069 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2071 struct eh_region *region;
2072 enum reachable_code rc;
2073 tree type_thrown;
2074 rtx note;
2076 if (! INSN_P (insn))
2077 continue;
2079 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2080 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2081 continue;
2083 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2085 type_thrown = NULL_TREE;
2086 if (region->type == ERT_THROW)
2088 type_thrown = region->u.throw.type;
2089 region = region->outer;
2092 /* Find the first containing region that might handle the exception.
2093 That's the landing pad to which we will transfer control. */
2094 rc = RNL_NOT_CAUGHT;
2095 for (; region; region = region->outer)
2097 rc = reachable_next_level (region, type_thrown, 0);
2098 if (rc != RNL_NOT_CAUGHT)
2099 break;
2101 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
2103 lp_info[region->region_number].directly_reachable = 1;
2104 found_one = true;
2108 return found_one;
2111 static void
2112 sjlj_assign_call_site_values (dispatch_label, lp_info)
2113 rtx dispatch_label;
2114 struct sjlj_lp_info *lp_info;
2116 htab_t ar_hash;
2117 int i, index;
2119 /* First task: build the action table. */
2121 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
2122 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2124 for (i = cfun->eh->last_region_number; i > 0; --i)
2125 if (lp_info[i].directly_reachable)
2127 struct eh_region *r = cfun->eh->region_array[i];
2128 r->landing_pad = dispatch_label;
2129 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2130 if (lp_info[i].action_index != -1)
2131 cfun->uses_eh_lsda = 1;
2134 htab_delete (ar_hash);
2136 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2137 landing pad label for the region. For sjlj though, there is one
2138 common landing pad from which we dispatch to the post-landing pads.
2140 A region receives a dispatch index if it is directly reachable
2141 and requires in-function processing. Regions that share post-landing
2142 pads may share dispatch indices. */
2143 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2144 (see build_post_landing_pads) so we don't bother checking for it. */
2146 index = 0;
2147 for (i = cfun->eh->last_region_number; i > 0; --i)
2148 if (lp_info[i].directly_reachable)
2149 lp_info[i].dispatch_index = index++;
2151 /* Finally: assign call-site values. If dwarf2 terms, this would be
2152 the region number assigned by convert_to_eh_region_ranges, but
2153 handles no-action and must-not-throw differently. */
2155 call_site_base = 1;
2156 for (i = cfun->eh->last_region_number; i > 0; --i)
2157 if (lp_info[i].directly_reachable)
2159 int action = lp_info[i].action_index;
2161 /* Map must-not-throw to otherwise unused call-site index 0. */
2162 if (action == -2)
2163 index = 0;
2164 /* Map no-action to otherwise unused call-site index -1. */
2165 else if (action == -1)
2166 index = -1;
2167 /* Otherwise, look it up in the table. */
2168 else
2169 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2171 lp_info[i].call_site_index = index;
2175 static void
2176 sjlj_mark_call_sites (lp_info)
2177 struct sjlj_lp_info *lp_info;
2179 int last_call_site = -2;
2180 rtx insn, mem;
2182 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2184 struct eh_region *region;
2185 int this_call_site;
2186 rtx note, before, p;
2188 /* Reset value tracking at extended basic block boundaries. */
2189 if (GET_CODE (insn) == CODE_LABEL)
2190 last_call_site = -2;
2192 if (! INSN_P (insn))
2193 continue;
2195 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2196 if (!note)
2198 /* Calls (and trapping insns) without notes are outside any
2199 exception handling region in this function. Mark them as
2200 no action. */
2201 if (GET_CODE (insn) == CALL_INSN
2202 || (flag_non_call_exceptions
2203 && may_trap_p (PATTERN (insn))))
2204 this_call_site = -1;
2205 else
2206 continue;
2208 else
2210 /* Calls that are known to not throw need not be marked. */
2211 if (INTVAL (XEXP (note, 0)) <= 0)
2212 continue;
2214 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2215 this_call_site = lp_info[region->region_number].call_site_index;
2218 if (this_call_site == last_call_site)
2219 continue;
2221 /* Don't separate a call from it's argument loads. */
2222 before = insn;
2223 if (GET_CODE (insn) == CALL_INSN)
2224 before = find_first_parameter_load (insn, NULL_RTX);
2226 start_sequence ();
2227 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2228 sjlj_fc_call_site_ofs);
2229 emit_move_insn (mem, GEN_INT (this_call_site));
2230 p = get_insns ();
2231 end_sequence ();
2233 emit_insns_before (p, before);
2234 last_call_site = this_call_site;
2238 /* Construct the SjLj_Function_Context. */
2240 static void
2241 sjlj_emit_function_enter (dispatch_label)
2242 rtx dispatch_label;
2244 rtx fn_begin, fc, mem, seq;
2246 fc = cfun->eh->sjlj_fc;
2248 start_sequence ();
2250 /* We're storing this libcall's address into memory instead of
2251 calling it directly. Thus, we must call assemble_external_libcall
2252 here, as we can not depend on emit_library_call to do it for us. */
2253 assemble_external_libcall (eh_personality_libfunc);
2254 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2255 emit_move_insn (mem, eh_personality_libfunc);
2257 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2258 if (cfun->uses_eh_lsda)
2260 char buf[20];
2261 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2262 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
2264 else
2265 emit_move_insn (mem, const0_rtx);
2267 #ifdef DONT_USE_BUILTIN_SETJMP
2269 rtx x, note;
2270 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2271 TYPE_MODE (integer_type_node), 1,
2272 plus_constant (XEXP (fc, 0),
2273 sjlj_fc_jbuf_ofs), Pmode);
2275 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2276 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2278 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2279 TYPE_MODE (integer_type_node), 0, dispatch_label);
2281 #else
2282 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2283 dispatch_label);
2284 #endif
2286 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2287 1, XEXP (fc, 0), Pmode);
2289 seq = get_insns ();
2290 end_sequence ();
2292 /* ??? Instead of doing this at the beginning of the function,
2293 do this in a block that is at loop level 0 and dominates all
2294 can_throw_internal instructions. */
2296 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2297 if (GET_CODE (fn_begin) == NOTE
2298 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2299 break;
2300 emit_insns_after (seq, fn_begin);
2303 /* Call back from expand_function_end to know where we should put
2304 the call to unwind_sjlj_unregister_libfunc if needed. */
2306 void
2307 sjlj_emit_function_exit_after (after)
2308 rtx after;
2310 cfun->eh->sjlj_exit_after = after;
2313 static void
2314 sjlj_emit_function_exit ()
2316 rtx seq;
2318 start_sequence ();
2320 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2321 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2323 seq = get_insns ();
2324 end_sequence ();
2326 /* ??? Really this can be done in any block at loop level 0 that
2327 post-dominates all can_throw_internal instructions. This is
2328 the last possible moment. */
2330 emit_insns_after (seq, cfun->eh->sjlj_exit_after);
2333 static void
2334 sjlj_emit_dispatch_table (dispatch_label, lp_info)
2335 rtx dispatch_label;
2336 struct sjlj_lp_info *lp_info;
2338 int i, first_reachable;
2339 rtx mem, dispatch, seq, fc;
2341 fc = cfun->eh->sjlj_fc;
2343 start_sequence ();
2345 emit_label (dispatch_label);
2347 #ifndef DONT_USE_BUILTIN_SETJMP
2348 expand_builtin_setjmp_receiver (dispatch_label);
2349 #endif
2351 /* Load up dispatch index, exc_ptr and filter values from the
2352 function context. */
2353 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2354 sjlj_fc_call_site_ofs);
2355 dispatch = copy_to_reg (mem);
2357 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
2358 if (word_mode != Pmode)
2360 #ifdef POINTERS_EXTEND_UNSIGNED
2361 mem = convert_memory_address (Pmode, mem);
2362 #else
2363 mem = convert_to_mode (Pmode, mem, 0);
2364 #endif
2366 emit_move_insn (cfun->eh->exc_ptr, mem);
2368 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2369 emit_move_insn (cfun->eh->filter, mem);
2371 /* Jump to one of the directly reachable regions. */
2372 /* ??? This really ought to be using a switch statement. */
2374 first_reachable = 0;
2375 for (i = cfun->eh->last_region_number; i > 0; --i)
2377 if (! lp_info[i].directly_reachable)
2378 continue;
2380 if (! first_reachable)
2382 first_reachable = i;
2383 continue;
2386 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2387 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2388 cfun->eh->region_array[i]->post_landing_pad);
2391 seq = get_insns ();
2392 end_sequence ();
2394 emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2395 ->post_landing_pad));
2398 static void
2399 sjlj_build_landing_pads ()
2401 struct sjlj_lp_info *lp_info;
2403 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2404 sizeof (struct sjlj_lp_info));
2406 if (sjlj_find_directly_reachable_regions (lp_info))
2408 rtx dispatch_label = gen_label_rtx ();
2410 cfun->eh->sjlj_fc
2411 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2412 int_size_in_bytes (sjlj_fc_type_node),
2413 TYPE_ALIGN (sjlj_fc_type_node));
2415 sjlj_assign_call_site_values (dispatch_label, lp_info);
2416 sjlj_mark_call_sites (lp_info);
2418 sjlj_emit_function_enter (dispatch_label);
2419 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2420 sjlj_emit_function_exit ();
2423 free (lp_info);
2426 void
2427 finish_eh_generation ()
2429 /* Nothing to do if no regions created. */
2430 if (cfun->eh->region_tree == NULL)
2431 return;
2433 /* The object here is to provide find_basic_blocks with detailed
2434 information (via reachable_handlers) on how exception control
2435 flows within the function. In this first pass, we can include
2436 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2437 regions, and hope that it will be useful in deleting unreachable
2438 handlers. Subsequently, we will generate landing pads which will
2439 connect many of the handlers, and then type information will not
2440 be effective. Still, this is a win over previous implementations. */
2442 rebuild_jump_labels (get_insns ());
2443 find_basic_blocks (get_insns (), max_reg_num (), 0);
2444 cleanup_cfg (CLEANUP_PRE_LOOP);
2446 /* These registers are used by the landing pads. Make sure they
2447 have been generated. */
2448 get_exception_pointer (cfun);
2449 get_exception_filter (cfun);
2451 /* Construct the landing pads. */
2453 assign_filter_values ();
2454 build_post_landing_pads ();
2455 connect_post_landing_pads ();
2456 if (USING_SJLJ_EXCEPTIONS)
2457 sjlj_build_landing_pads ();
2458 else
2459 dw2_build_landing_pads ();
2461 cfun->eh->built_landing_pads = 1;
2463 /* We've totally changed the CFG. Start over. */
2464 find_exception_handler_labels ();
2465 rebuild_jump_labels (get_insns ());
2466 find_basic_blocks (get_insns (), max_reg_num (), 0);
2467 cleanup_cfg (CLEANUP_PRE_LOOP);
2470 /* This section handles removing dead code for flow. */
2472 /* Remove LABEL from the exception_handler_labels list. */
2474 static void
2475 remove_exception_handler_label (label)
2476 rtx label;
2478 rtx *pl, l;
2480 /* If exception_handler_labels was not built yet,
2481 there is nothing to do. */
2482 if (exception_handler_labels == NULL)
2483 return;
2485 for (pl = &exception_handler_labels, l = *pl;
2486 XEXP (l, 0) != label;
2487 pl = &XEXP (l, 1), l = *pl)
2488 continue;
2490 *pl = XEXP (l, 1);
2491 free_EXPR_LIST_node (l);
2494 /* Splice REGION from the region tree etc. */
2496 static void
2497 remove_eh_handler (region)
2498 struct eh_region *region;
2500 struct eh_region **pp, *p;
2501 rtx lab;
2502 int i;
2504 /* For the benefit of efficiently handling REG_EH_REGION notes,
2505 replace this region in the region array with its containing
2506 region. Note that previous region deletions may result in
2507 multiple copies of this region in the array, so we have to
2508 search the whole thing. */
2509 for (i = cfun->eh->last_region_number; i > 0; --i)
2510 if (cfun->eh->region_array[i] == region)
2511 cfun->eh->region_array[i] = region->outer;
2513 if (cfun->eh->built_landing_pads)
2514 lab = region->landing_pad;
2515 else
2516 lab = region->label;
2517 if (lab)
2518 remove_exception_handler_label (lab);
2520 if (region->outer)
2521 pp = &region->outer->inner;
2522 else
2523 pp = &cfun->eh->region_tree;
2524 for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
2525 continue;
2527 if (region->inner)
2529 for (p = region->inner; p->next_peer ; p = p->next_peer)
2530 p->outer = region->outer;
2531 p->next_peer = region->next_peer;
2532 p->outer = region->outer;
2533 *pp = region->inner;
2535 else
2536 *pp = region->next_peer;
2538 if (region->type == ERT_CATCH)
2540 struct eh_region *try, *next, *prev;
2542 for (try = region->next_peer;
2543 try->type == ERT_CATCH;
2544 try = try->next_peer)
2545 continue;
2546 if (try->type != ERT_TRY)
2547 abort ();
2549 next = region->u.catch.next_catch;
2550 prev = region->u.catch.prev_catch;
2552 if (next)
2553 next->u.catch.prev_catch = prev;
2554 else
2555 try->u.try.last_catch = prev;
2556 if (prev)
2557 prev->u.catch.next_catch = next;
2558 else
2560 try->u.try.catch = next;
2561 if (! next)
2562 remove_eh_handler (try);
2566 free (region);
2569 /* LABEL heads a basic block that is about to be deleted. If this
2570 label corresponds to an exception region, we may be able to
2571 delete the region. */
2573 void
2574 maybe_remove_eh_handler (label)
2575 rtx label;
2577 int i;
2579 /* ??? After generating landing pads, it's not so simple to determine
2580 if the region data is completely unused. One must examine the
2581 landing pad and the post landing pad, and whether an inner try block
2582 is referencing the catch handlers directly. */
2583 if (cfun->eh->built_landing_pads)
2584 return;
2586 for (i = cfun->eh->last_region_number; i > 0; --i)
2588 struct eh_region *region = cfun->eh->region_array[i];
2589 if (region && region->label == label)
2591 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2592 because there is no path to the fallback call to terminate.
2593 But the region continues to affect call-site data until there
2594 are no more contained calls, which we don't see here. */
2595 if (region->type == ERT_MUST_NOT_THROW)
2597 remove_exception_handler_label (region->label);
2598 region->label = NULL_RTX;
2600 else
2601 remove_eh_handler (region);
2602 break;
2608 /* This section describes CFG exception edges for flow. */
2610 /* For communicating between calls to reachable_next_level. */
2611 struct reachable_info
2613 tree types_caught;
2614 tree types_allowed;
2615 rtx handlers;
2618 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2619 base class of TYPE, is in HANDLED. */
2621 static int
2622 check_handled (handled, type)
2623 tree handled, type;
2625 tree t;
2627 /* We can check for exact matches without front-end help. */
2628 if (! lang_eh_type_covers)
2630 for (t = handled; t ; t = TREE_CHAIN (t))
2631 if (TREE_VALUE (t) == type)
2632 return 1;
2634 else
2636 for (t = handled; t ; t = TREE_CHAIN (t))
2637 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2638 return 1;
2641 return 0;
2644 /* A subroutine of reachable_next_level. If we are collecting a list
2645 of handlers, add one. After landing pad generation, reference
2646 it instead of the handlers themselves. Further, the handlers are
2647 all wired together, so by referencing one, we've got them all.
2648 Before landing pad generation we reference each handler individually.
2650 LP_REGION contains the landing pad; REGION is the handler. */
2652 static void
2653 add_reachable_handler (info, lp_region, region)
2654 struct reachable_info *info;
2655 struct eh_region *lp_region;
2656 struct eh_region *region;
2658 if (! info)
2659 return;
2661 if (cfun->eh->built_landing_pads)
2663 if (! info->handlers)
2664 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2666 else
2667 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2670 /* Process one level of exception regions for reachability.
2671 If TYPE_THROWN is non-null, then it is the *exact* type being
2672 propagated. If INFO is non-null, then collect handler labels
2673 and caught/allowed type information between invocations. */
2675 static enum reachable_code
2676 reachable_next_level (region, type_thrown, info)
2677 struct eh_region *region;
2678 tree type_thrown;
2679 struct reachable_info *info;
2681 switch (region->type)
2683 case ERT_CLEANUP:
2684 /* Before landing-pad generation, we model control flow
2685 directly to the individual handlers. In this way we can
2686 see that catch handler types may shadow one another. */
2687 add_reachable_handler (info, region, region);
2688 return RNL_MAYBE_CAUGHT;
2690 case ERT_TRY:
2692 struct eh_region *c;
2693 enum reachable_code ret = RNL_NOT_CAUGHT;
2695 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2697 /* A catch-all handler ends the search. */
2698 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2699 to be run as well. */
2700 if (c->u.catch.type_list == NULL)
2702 add_reachable_handler (info, region, c);
2703 return RNL_CAUGHT;
2706 if (type_thrown)
2708 /* If we have at least one type match, end the search. */
2709 tree tp_node = c->u.catch.type_list;
2711 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2713 tree type = TREE_VALUE (tp_node);
2715 if (type == type_thrown
2716 || (lang_eh_type_covers
2717 && (*lang_eh_type_covers) (type, type_thrown)))
2719 add_reachable_handler (info, region, c);
2720 return RNL_CAUGHT;
2724 /* If we have definitive information of a match failure,
2725 the catch won't trigger. */
2726 if (lang_eh_type_covers)
2727 return RNL_NOT_CAUGHT;
2730 /* At this point, we either don't know what type is thrown or
2731 don't have front-end assistance to help deciding if it is
2732 covered by one of the types in the list for this region.
2734 We'd then like to add this region to the list of reachable
2735 handlers since it is indeed potentially reachable based on the
2736 information we have.
2738 Actually, this handler is for sure not reachable if all the
2739 types it matches have already been caught. That is, it is only
2740 potentially reachable if at least one of the types it catches
2741 has not been previously caught. */
2743 if (! info)
2744 ret = RNL_MAYBE_CAUGHT;
2745 else
2747 tree tp_node = c->u.catch.type_list;
2748 bool maybe_reachable = false;
2750 /* Compute the potential reachability of this handler and
2751 update the list of types caught at the same time. */
2752 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2754 tree type = TREE_VALUE (tp_node);
2756 if (! check_handled (info->types_caught, type))
2758 info->types_caught
2759 = tree_cons (NULL, type, info->types_caught);
2761 maybe_reachable = true;
2765 if (maybe_reachable)
2767 add_reachable_handler (info, region, c);
2769 /* ??? If the catch type is a base class of every allowed
2770 type, then we know we can stop the search. */
2771 ret = RNL_MAYBE_CAUGHT;
2776 return ret;
2779 case ERT_ALLOWED_EXCEPTIONS:
2780 /* An empty list of types definitely ends the search. */
2781 if (region->u.allowed.type_list == NULL_TREE)
2783 add_reachable_handler (info, region, region);
2784 return RNL_CAUGHT;
2787 /* Collect a list of lists of allowed types for use in detecting
2788 when a catch may be transformed into a catch-all. */
2789 if (info)
2790 info->types_allowed = tree_cons (NULL_TREE,
2791 region->u.allowed.type_list,
2792 info->types_allowed);
2794 /* If we have definitive information about the type hierarchy,
2795 then we can tell if the thrown type will pass through the
2796 filter. */
2797 if (type_thrown && lang_eh_type_covers)
2799 if (check_handled (region->u.allowed.type_list, type_thrown))
2800 return RNL_NOT_CAUGHT;
2801 else
2803 add_reachable_handler (info, region, region);
2804 return RNL_CAUGHT;
2808 add_reachable_handler (info, region, region);
2809 return RNL_MAYBE_CAUGHT;
2811 case ERT_CATCH:
2812 /* Catch regions are handled by their controling try region. */
2813 return RNL_NOT_CAUGHT;
2815 case ERT_MUST_NOT_THROW:
2816 /* Here we end our search, since no exceptions may propagate.
2817 If we've touched down at some landing pad previous, then the
2818 explicit function call we generated may be used. Otherwise
2819 the call is made by the runtime. */
2820 if (info && info->handlers)
2822 add_reachable_handler (info, region, region);
2823 return RNL_CAUGHT;
2825 else
2826 return RNL_BLOCKED;
2828 case ERT_THROW:
2829 case ERT_FIXUP:
2830 case ERT_UNKNOWN:
2831 /* Shouldn't see these here. */
2832 break;
2835 abort ();
2838 /* Retrieve a list of labels of exception handlers which can be
2839 reached by a given insn. */
2842 reachable_handlers (insn)
2843 rtx insn;
2845 struct reachable_info info;
2846 struct eh_region *region;
2847 tree type_thrown;
2848 int region_number;
2850 if (GET_CODE (insn) == JUMP_INSN
2851 && GET_CODE (PATTERN (insn)) == RESX)
2852 region_number = XINT (PATTERN (insn), 0);
2853 else
2855 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2856 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2857 return NULL;
2858 region_number = INTVAL (XEXP (note, 0));
2861 memset (&info, 0, sizeof (info));
2863 region = cfun->eh->region_array[region_number];
2865 type_thrown = NULL_TREE;
2866 if (GET_CODE (insn) == JUMP_INSN
2867 && GET_CODE (PATTERN (insn)) == RESX)
2869 /* A RESX leaves a region instead of entering it. Thus the
2870 region itself may have been deleted out from under us. */
2871 if (region == NULL)
2872 return NULL;
2873 region = region->outer;
2875 else if (region->type == ERT_THROW)
2877 type_thrown = region->u.throw.type;
2878 region = region->outer;
2881 for (; region; region = region->outer)
2882 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2883 break;
2885 return info.handlers;
2888 /* Determine if the given INSN can throw an exception that is caught
2889 within the function. */
2891 bool
2892 can_throw_internal (insn)
2893 rtx insn;
2895 struct eh_region *region;
2896 tree type_thrown;
2897 rtx note;
2899 if (! INSN_P (insn))
2900 return false;
2902 if (GET_CODE (insn) == INSN
2903 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2904 insn = XVECEXP (PATTERN (insn), 0, 0);
2906 if (GET_CODE (insn) == CALL_INSN
2907 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2909 int i;
2910 for (i = 0; i < 3; ++i)
2912 rtx sub = XEXP (PATTERN (insn), i);
2913 for (; sub ; sub = NEXT_INSN (sub))
2914 if (can_throw_internal (sub))
2915 return true;
2917 return false;
2920 /* Every insn that might throw has an EH_REGION note. */
2921 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2922 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2923 return false;
2925 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2927 type_thrown = NULL_TREE;
2928 if (region->type == ERT_THROW)
2930 type_thrown = region->u.throw.type;
2931 region = region->outer;
2934 /* If this exception is ignored by each and every containing region,
2935 then control passes straight out. The runtime may handle some
2936 regions, which also do not require processing internally. */
2937 for (; region; region = region->outer)
2939 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2940 if (how == RNL_BLOCKED)
2941 return false;
2942 if (how != RNL_NOT_CAUGHT)
2943 return true;
2946 return false;
2949 /* Determine if the given INSN can throw an exception that is
2950 visible outside the function. */
2952 bool
2953 can_throw_external (insn)
2954 rtx insn;
2956 struct eh_region *region;
2957 tree type_thrown;
2958 rtx note;
2960 if (! INSN_P (insn))
2961 return false;
2963 if (GET_CODE (insn) == INSN
2964 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2965 insn = XVECEXP (PATTERN (insn), 0, 0);
2967 if (GET_CODE (insn) == CALL_INSN
2968 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2970 int i;
2971 for (i = 0; i < 3; ++i)
2973 rtx sub = XEXP (PATTERN (insn), i);
2974 for (; sub ; sub = NEXT_INSN (sub))
2975 if (can_throw_external (sub))
2976 return true;
2978 return false;
2981 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2982 if (!note)
2984 /* Calls (and trapping insns) without notes are outside any
2985 exception handling region in this function. We have to
2986 assume it might throw. Given that the front end and middle
2987 ends mark known NOTHROW functions, this isn't so wildly
2988 inaccurate. */
2989 return (GET_CODE (insn) == CALL_INSN
2990 || (flag_non_call_exceptions
2991 && may_trap_p (PATTERN (insn))));
2993 if (INTVAL (XEXP (note, 0)) <= 0)
2994 return false;
2996 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2998 type_thrown = NULL_TREE;
2999 if (region->type == ERT_THROW)
3001 type_thrown = region->u.throw.type;
3002 region = region->outer;
3005 /* If the exception is caught or blocked by any containing region,
3006 then it is not seen by any calling function. */
3007 for (; region ; region = region->outer)
3008 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
3009 return false;
3011 return true;
3014 /* True if nothing in this function can throw outside this function. */
3016 bool
3017 nothrow_function_p ()
3019 rtx insn;
3021 if (! flag_exceptions)
3022 return true;
3024 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3025 if (can_throw_external (insn))
3026 return false;
3027 for (insn = current_function_epilogue_delay_list; insn;
3028 insn = XEXP (insn, 1))
3029 if (can_throw_external (insn))
3030 return false;
3032 return true;
3036 /* Various hooks for unwind library. */
3038 /* Do any necessary initialization to access arbitrary stack frames.
3039 On the SPARC, this means flushing the register windows. */
3041 void
3042 expand_builtin_unwind_init ()
3044 /* Set this so all the registers get saved in our frame; we need to be
3045 able to copy the saved values for any registers from frames we unwind. */
3046 current_function_has_nonlocal_label = 1;
3048 #ifdef SETUP_FRAME_ADDRESSES
3049 SETUP_FRAME_ADDRESSES ();
3050 #endif
3054 expand_builtin_eh_return_data_regno (arglist)
3055 tree arglist;
3057 tree which = TREE_VALUE (arglist);
3058 unsigned HOST_WIDE_INT iwhich;
3060 if (TREE_CODE (which) != INTEGER_CST)
3062 error ("argument of `__builtin_eh_return_regno' must be constant");
3063 return constm1_rtx;
3066 iwhich = tree_low_cst (which, 1);
3067 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3068 if (iwhich == INVALID_REGNUM)
3069 return constm1_rtx;
3071 #ifdef DWARF_FRAME_REGNUM
3072 iwhich = DWARF_FRAME_REGNUM (iwhich);
3073 #else
3074 iwhich = DBX_REGISTER_NUMBER (iwhich);
3075 #endif
3077 return GEN_INT (iwhich);
3080 /* Given a value extracted from the return address register or stack slot,
3081 return the actual address encoded in that value. */
3084 expand_builtin_extract_return_addr (addr_tree)
3085 tree addr_tree;
3087 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
3089 /* First mask out any unwanted bits. */
3090 #ifdef MASK_RETURN_ADDR
3091 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3092 #endif
3094 /* Then adjust to find the real return address. */
3095 #if defined (RETURN_ADDR_OFFSET)
3096 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3097 #endif
3099 return addr;
3102 /* Given an actual address in addr_tree, do any necessary encoding
3103 and return the value to be stored in the return address register or
3104 stack slot so the epilogue will return to that address. */
3107 expand_builtin_frob_return_addr (addr_tree)
3108 tree addr_tree;
3110 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3112 #ifdef POINTERS_EXTEND_UNSIGNED
3113 if (GET_MODE (addr) != Pmode)
3114 addr = convert_memory_address (Pmode, addr);
3115 #endif
3117 #ifdef RETURN_ADDR_OFFSET
3118 addr = force_reg (Pmode, addr);
3119 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3120 #endif
3122 return addr;
3125 /* Set up the epilogue with the magic bits we'll need to return to the
3126 exception handler. */
3128 void
3129 expand_builtin_eh_return (stackadj_tree, handler_tree)
3130 tree stackadj_tree, handler_tree;
3132 rtx stackadj, handler;
3134 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
3135 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
3137 #ifdef POINTERS_EXTEND_UNSIGNED
3138 if (GET_MODE (stackadj) != Pmode)
3139 stackadj = convert_memory_address (Pmode, stackadj);
3141 if (GET_MODE (handler) != Pmode)
3142 handler = convert_memory_address (Pmode, handler);
3143 #endif
3145 if (! cfun->eh->ehr_label)
3147 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
3148 cfun->eh->ehr_handler = copy_to_reg (handler);
3149 cfun->eh->ehr_label = gen_label_rtx ();
3151 else
3153 if (stackadj != cfun->eh->ehr_stackadj)
3154 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
3155 if (handler != cfun->eh->ehr_handler)
3156 emit_move_insn (cfun->eh->ehr_handler, handler);
3159 emit_jump (cfun->eh->ehr_label);
3162 void
3163 expand_eh_return ()
3165 rtx sa, ra, around_label;
3167 if (! cfun->eh->ehr_label)
3168 return;
3170 sa = EH_RETURN_STACKADJ_RTX;
3171 if (! sa)
3173 error ("__builtin_eh_return not supported on this target");
3174 return;
3177 current_function_calls_eh_return = 1;
3179 around_label = gen_label_rtx ();
3180 emit_move_insn (sa, const0_rtx);
3181 emit_jump (around_label);
3183 emit_label (cfun->eh->ehr_label);
3184 clobber_return_register ();
3186 #ifdef HAVE_eh_return
3187 if (HAVE_eh_return)
3188 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3189 else
3190 #endif
3192 ra = EH_RETURN_HANDLER_RTX;
3193 if (! ra)
3195 error ("__builtin_eh_return not supported on this target");
3196 ra = gen_reg_rtx (Pmode);
3199 emit_move_insn (sa, cfun->eh->ehr_stackadj);
3200 emit_move_insn (ra, cfun->eh->ehr_handler);
3203 emit_label (around_label);
3206 /* In the following functions, we represent entries in the action table
3207 as 1-based indices. Special cases are:
3209 0: null action record, non-null landing pad; implies cleanups
3210 -1: null action record, null landing pad; implies no action
3211 -2: no call-site entry; implies must_not_throw
3212 -3: we have yet to process outer regions
3214 Further, no special cases apply to the "next" field of the record.
3215 For next, 0 means end of list. */
3217 struct action_record
3219 int offset;
3220 int filter;
3221 int next;
3224 static int
3225 action_record_eq (pentry, pdata)
3226 const PTR pentry;
3227 const PTR pdata;
3229 const struct action_record *entry = (const struct action_record *) pentry;
3230 const struct action_record *data = (const struct action_record *) pdata;
3231 return entry->filter == data->filter && entry->next == data->next;
3234 static hashval_t
3235 action_record_hash (pentry)
3236 const PTR pentry;
3238 const struct action_record *entry = (const struct action_record *) pentry;
3239 return entry->next * 1009 + entry->filter;
3242 static int
3243 add_action_record (ar_hash, filter, next)
3244 htab_t ar_hash;
3245 int filter, next;
3247 struct action_record **slot, *new, tmp;
3249 tmp.filter = filter;
3250 tmp.next = next;
3251 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3253 if ((new = *slot) == NULL)
3255 new = (struct action_record *) xmalloc (sizeof (*new));
3256 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3257 new->filter = filter;
3258 new->next = next;
3259 *slot = new;
3261 /* The filter value goes in untouched. The link to the next
3262 record is a "self-relative" byte offset, or zero to indicate
3263 that there is no next record. So convert the absolute 1 based
3264 indices we've been carrying around into a displacement. */
3266 push_sleb128 (&cfun->eh->action_record_data, filter);
3267 if (next)
3268 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3269 push_sleb128 (&cfun->eh->action_record_data, next);
3272 return new->offset;
3275 static int
3276 collect_one_action_chain (ar_hash, region)
3277 htab_t ar_hash;
3278 struct eh_region *region;
3280 struct eh_region *c;
3281 int next;
3283 /* If we've reached the top of the region chain, then we have
3284 no actions, and require no landing pad. */
3285 if (region == NULL)
3286 return -1;
3288 switch (region->type)
3290 case ERT_CLEANUP:
3291 /* A cleanup adds a zero filter to the beginning of the chain, but
3292 there are special cases to look out for. If there are *only*
3293 cleanups along a path, then it compresses to a zero action.
3294 Further, if there are multiple cleanups along a path, we only
3295 need to represent one of them, as that is enough to trigger
3296 entry to the landing pad at runtime. */
3297 next = collect_one_action_chain (ar_hash, region->outer);
3298 if (next <= 0)
3299 return 0;
3300 for (c = region->outer; c ; c = c->outer)
3301 if (c->type == ERT_CLEANUP)
3302 return next;
3303 return add_action_record (ar_hash, 0, next);
3305 case ERT_TRY:
3306 /* Process the associated catch regions in reverse order.
3307 If there's a catch-all handler, then we don't need to
3308 search outer regions. Use a magic -3 value to record
3309 that we haven't done the outer search. */
3310 next = -3;
3311 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3313 if (c->u.catch.type_list == NULL)
3315 /* Retrieve the filter from the head of the filter list
3316 where we have stored it (see assign_filter_values). */
3317 int filter
3318 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3320 next = add_action_record (ar_hash, filter, 0);
3322 else
3324 /* Once the outer search is done, trigger an action record for
3325 each filter we have. */
3326 tree flt_node;
3328 if (next == -3)
3330 next = collect_one_action_chain (ar_hash, region->outer);
3332 /* If there is no next action, terminate the chain. */
3333 if (next == -1)
3334 next = 0;
3335 /* If all outer actions are cleanups or must_not_throw,
3336 we'll have no action record for it, since we had wanted
3337 to encode these states in the call-site record directly.
3338 Add a cleanup action to the chain to catch these. */
3339 else if (next <= 0)
3340 next = add_action_record (ar_hash, 0, 0);
3343 flt_node = c->u.catch.filter_list;
3344 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3346 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3347 next = add_action_record (ar_hash, filter, next);
3351 return next;
3353 case ERT_ALLOWED_EXCEPTIONS:
3354 /* An exception specification adds its filter to the
3355 beginning of the chain. */
3356 next = collect_one_action_chain (ar_hash, region->outer);
3357 return add_action_record (ar_hash, region->u.allowed.filter,
3358 next < 0 ? 0 : next);
3360 case ERT_MUST_NOT_THROW:
3361 /* A must-not-throw region with no inner handlers or cleanups
3362 requires no call-site entry. Note that this differs from
3363 the no handler or cleanup case in that we do require an lsda
3364 to be generated. Return a magic -2 value to record this. */
3365 return -2;
3367 case ERT_CATCH:
3368 case ERT_THROW:
3369 /* CATCH regions are handled in TRY above. THROW regions are
3370 for optimization information only and produce no output. */
3371 return collect_one_action_chain (ar_hash, region->outer);
3373 default:
3374 abort ();
3378 static int
3379 add_call_site (landing_pad, action)
3380 rtx landing_pad;
3381 int action;
3383 struct call_site_record *data = cfun->eh->call_site_data;
3384 int used = cfun->eh->call_site_data_used;
3385 int size = cfun->eh->call_site_data_size;
3387 if (used >= size)
3389 size = (size ? size * 2 : 64);
3390 data = (struct call_site_record *)
3391 xrealloc (data, sizeof (*data) * size);
3392 cfun->eh->call_site_data = data;
3393 cfun->eh->call_site_data_size = size;
3396 data[used].landing_pad = landing_pad;
3397 data[used].action = action;
3399 cfun->eh->call_site_data_used = used + 1;
3401 return used + call_site_base;
3404 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3405 The new note numbers will not refer to region numbers, but
3406 instead to call site entries. */
3408 void
3409 convert_to_eh_region_ranges ()
3411 rtx insn, iter, note;
3412 htab_t ar_hash;
3413 int last_action = -3;
3414 rtx last_action_insn = NULL_RTX;
3415 rtx last_landing_pad = NULL_RTX;
3416 rtx first_no_action_insn = NULL_RTX;
3417 int call_site = 0;
3419 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3420 return;
3422 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3424 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3426 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3427 if (INSN_P (iter))
3429 struct eh_region *region;
3430 int this_action;
3431 rtx this_landing_pad;
3433 insn = iter;
3434 if (GET_CODE (insn) == INSN
3435 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3436 insn = XVECEXP (PATTERN (insn), 0, 0);
3438 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3439 if (!note)
3441 if (! (GET_CODE (insn) == CALL_INSN
3442 || (flag_non_call_exceptions
3443 && may_trap_p (PATTERN (insn)))))
3444 continue;
3445 this_action = -1;
3446 region = NULL;
3448 else
3450 if (INTVAL (XEXP (note, 0)) <= 0)
3451 continue;
3452 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3453 this_action = collect_one_action_chain (ar_hash, region);
3456 /* Existence of catch handlers, or must-not-throw regions
3457 implies that an lsda is needed (even if empty). */
3458 if (this_action != -1)
3459 cfun->uses_eh_lsda = 1;
3461 /* Delay creation of region notes for no-action regions
3462 until we're sure that an lsda will be required. */
3463 else if (last_action == -3)
3465 first_no_action_insn = iter;
3466 last_action = -1;
3469 /* Cleanups and handlers may share action chains but not
3470 landing pads. Collect the landing pad for this region. */
3471 if (this_action >= 0)
3473 struct eh_region *o;
3474 for (o = region; ! o->landing_pad ; o = o->outer)
3475 continue;
3476 this_landing_pad = o->landing_pad;
3478 else
3479 this_landing_pad = NULL_RTX;
3481 /* Differing actions or landing pads implies a change in call-site
3482 info, which implies some EH_REGION note should be emitted. */
3483 if (last_action != this_action
3484 || last_landing_pad != this_landing_pad)
3486 /* If we'd not seen a previous action (-3) or the previous
3487 action was must-not-throw (-2), then we do not need an
3488 end note. */
3489 if (last_action >= -1)
3491 /* If we delayed the creation of the begin, do it now. */
3492 if (first_no_action_insn)
3494 call_site = add_call_site (NULL_RTX, 0);
3495 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3496 first_no_action_insn);
3497 NOTE_EH_HANDLER (note) = call_site;
3498 first_no_action_insn = NULL_RTX;
3501 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3502 last_action_insn);
3503 NOTE_EH_HANDLER (note) = call_site;
3506 /* If the new action is must-not-throw, then no region notes
3507 are created. */
3508 if (this_action >= -1)
3510 call_site = add_call_site (this_landing_pad,
3511 this_action < 0 ? 0 : this_action);
3512 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3513 NOTE_EH_HANDLER (note) = call_site;
3516 last_action = this_action;
3517 last_landing_pad = this_landing_pad;
3519 last_action_insn = iter;
3522 if (last_action >= -1 && ! first_no_action_insn)
3524 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3525 NOTE_EH_HANDLER (note) = call_site;
3528 htab_delete (ar_hash);
3532 static void
3533 push_uleb128 (data_area, value)
3534 varray_type *data_area;
3535 unsigned int value;
3539 unsigned char byte = value & 0x7f;
3540 value >>= 7;
3541 if (value)
3542 byte |= 0x80;
3543 VARRAY_PUSH_UCHAR (*data_area, byte);
3545 while (value);
3548 static void
3549 push_sleb128 (data_area, value)
3550 varray_type *data_area;
3551 int value;
3553 unsigned char byte;
3554 int more;
3558 byte = value & 0x7f;
3559 value >>= 7;
3560 more = ! ((value == 0 && (byte & 0x40) == 0)
3561 || (value == -1 && (byte & 0x40) != 0));
3562 if (more)
3563 byte |= 0x80;
3564 VARRAY_PUSH_UCHAR (*data_area, byte);
3566 while (more);
3570 #ifndef HAVE_AS_LEB128
3571 static int
3572 dw2_size_of_call_site_table ()
3574 int n = cfun->eh->call_site_data_used;
3575 int size = n * (4 + 4 + 4);
3576 int i;
3578 for (i = 0; i < n; ++i)
3580 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3581 size += size_of_uleb128 (cs->action);
3584 return size;
3587 static int
3588 sjlj_size_of_call_site_table ()
3590 int n = cfun->eh->call_site_data_used;
3591 int size = 0;
3592 int i;
3594 for (i = 0; i < n; ++i)
3596 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3597 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3598 size += size_of_uleb128 (cs->action);
3601 return size;
3603 #endif
3605 static void
3606 dw2_output_call_site_table ()
3608 const char *const function_start_lab
3609 = IDENTIFIER_POINTER (current_function_func_begin_label);
3610 int n = cfun->eh->call_site_data_used;
3611 int i;
3613 for (i = 0; i < n; ++i)
3615 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3616 char reg_start_lab[32];
3617 char reg_end_lab[32];
3618 char landing_pad_lab[32];
3620 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3621 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3623 if (cs->landing_pad)
3624 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3625 CODE_LABEL_NUMBER (cs->landing_pad));
3627 /* ??? Perhaps use insn length scaling if the assembler supports
3628 generic arithmetic. */
3629 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3630 data4 if the function is small enough. */
3631 #ifdef HAVE_AS_LEB128
3632 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3633 "region %d start", i);
3634 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3635 "length");
3636 if (cs->landing_pad)
3637 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3638 "landing pad");
3639 else
3640 dw2_asm_output_data_uleb128 (0, "landing pad");
3641 #else
3642 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3643 "region %d start", i);
3644 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3645 if (cs->landing_pad)
3646 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3647 "landing pad");
3648 else
3649 dw2_asm_output_data (4, 0, "landing pad");
3650 #endif
3651 dw2_asm_output_data_uleb128 (cs->action, "action");
3654 call_site_base += n;
3657 static void
3658 sjlj_output_call_site_table ()
3660 int n = cfun->eh->call_site_data_used;
3661 int i;
3663 for (i = 0; i < n; ++i)
3665 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3667 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3668 "region %d landing pad", i);
3669 dw2_asm_output_data_uleb128 (cs->action, "action");
3672 call_site_base += n;
3675 void
3676 output_function_exception_table ()
3678 int tt_format, cs_format, lp_format, i, n;
3679 #ifdef HAVE_AS_LEB128
3680 char ttype_label[32];
3681 char cs_after_size_label[32];
3682 char cs_end_label[32];
3683 #else
3684 int call_site_len;
3685 #endif
3686 int have_tt_data;
3687 int funcdef_number;
3688 int tt_format_size = 0;
3690 /* Not all functions need anything. */
3691 if (! cfun->uses_eh_lsda)
3692 return;
3694 funcdef_number = (USING_SJLJ_EXCEPTIONS
3695 ? sjlj_funcdef_number
3696 : current_funcdef_number);
3698 #ifdef IA64_UNWIND_INFO
3699 fputs ("\t.personality\t", asm_out_file);
3700 output_addr_const (asm_out_file, eh_personality_libfunc);
3701 fputs ("\n\t.handlerdata\n", asm_out_file);
3702 /* Note that varasm still thinks we're in the function's code section.
3703 The ".endp" directive that will immediately follow will take us back. */
3704 #else
3705 (*targetm.asm_out.exception_section) ();
3706 #endif
3708 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3709 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3711 /* Indicate the format of the @TType entries. */
3712 if (! have_tt_data)
3713 tt_format = DW_EH_PE_omit;
3714 else
3716 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3717 #ifdef HAVE_AS_LEB128
3718 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3719 #endif
3720 tt_format_size = size_of_encoded_value (tt_format);
3722 assemble_align (tt_format_size * BITS_PER_UNIT);
3725 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3727 /* The LSDA header. */
3729 /* Indicate the format of the landing pad start pointer. An omitted
3730 field implies @LPStart == @Start. */
3731 /* Currently we always put @LPStart == @Start. This field would
3732 be most useful in moving the landing pads completely out of
3733 line to another section, but it could also be used to minimize
3734 the size of uleb128 landing pad offsets. */
3735 lp_format = DW_EH_PE_omit;
3736 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3737 eh_data_format_name (lp_format));
3739 /* @LPStart pointer would go here. */
3741 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3742 eh_data_format_name (tt_format));
3744 #ifndef HAVE_AS_LEB128
3745 if (USING_SJLJ_EXCEPTIONS)
3746 call_site_len = sjlj_size_of_call_site_table ();
3747 else
3748 call_site_len = dw2_size_of_call_site_table ();
3749 #endif
3751 /* A pc-relative 4-byte displacement to the @TType data. */
3752 if (have_tt_data)
3754 #ifdef HAVE_AS_LEB128
3755 char ttype_after_disp_label[32];
3756 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3757 funcdef_number);
3758 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3759 "@TType base offset");
3760 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3761 #else
3762 /* Ug. Alignment queers things. */
3763 unsigned int before_disp, after_disp, last_disp, disp;
3765 before_disp = 1 + 1;
3766 after_disp = (1 + size_of_uleb128 (call_site_len)
3767 + call_site_len
3768 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3769 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3770 * tt_format_size));
3772 disp = after_disp;
3775 unsigned int disp_size, pad;
3777 last_disp = disp;
3778 disp_size = size_of_uleb128 (disp);
3779 pad = before_disp + disp_size + after_disp;
3780 if (pad % tt_format_size)
3781 pad = tt_format_size - (pad % tt_format_size);
3782 else
3783 pad = 0;
3784 disp = after_disp + pad;
3786 while (disp != last_disp);
3788 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3789 #endif
3792 /* Indicate the format of the call-site offsets. */
3793 #ifdef HAVE_AS_LEB128
3794 cs_format = DW_EH_PE_uleb128;
3795 #else
3796 cs_format = DW_EH_PE_udata4;
3797 #endif
3798 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3799 eh_data_format_name (cs_format));
3801 #ifdef HAVE_AS_LEB128
3802 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3803 funcdef_number);
3804 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3805 funcdef_number);
3806 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3807 "Call-site table length");
3808 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3809 if (USING_SJLJ_EXCEPTIONS)
3810 sjlj_output_call_site_table ();
3811 else
3812 dw2_output_call_site_table ();
3813 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3814 #else
3815 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3816 if (USING_SJLJ_EXCEPTIONS)
3817 sjlj_output_call_site_table ();
3818 else
3819 dw2_output_call_site_table ();
3820 #endif
3822 /* ??? Decode and interpret the data for flag_debug_asm. */
3823 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3824 for (i = 0; i < n; ++i)
3825 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3826 (i ? NULL : "Action record table"));
3828 if (have_tt_data)
3829 assemble_align (tt_format_size * BITS_PER_UNIT);
3831 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3832 while (i-- > 0)
3834 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3835 rtx value;
3837 if (type == NULL_TREE)
3838 type = integer_zero_node;
3839 else
3840 type = lookup_type_for_runtime (type);
3842 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3843 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3844 assemble_integer (value, tt_format_size,
3845 tt_format_size * BITS_PER_UNIT, 1);
3846 else
3847 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
3850 #ifdef HAVE_AS_LEB128
3851 if (have_tt_data)
3852 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3853 #endif
3855 /* ??? Decode and interpret the data for flag_debug_asm. */
3856 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3857 for (i = 0; i < n; ++i)
3858 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3859 (i ? NULL : "Exception specification table"));
3861 function_section (current_function_decl);
3863 if (USING_SJLJ_EXCEPTIONS)
3864 sjlj_funcdef_number += 1;