PR tree-optimization/40585
[official-gcc/constexpr.git] / gcc / except.c
blob06e5529dddc09a9b108e0419c38e0612256c0378
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
79 #include "timevar.h"
80 #include "tree-flow.h"
82 /* Provide defaults for stuff that may not be defined when using
83 sjlj exceptions. */
84 #ifndef EH_RETURN_DATA_REGNO
85 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
86 #endif
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 gimple (*lang_protect_cleanup_actions) (void);
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers) (tree a, tree b);
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type) (tree);
98 /* A hash table of label to region number. */
100 struct GTY(()) ehl_map_entry {
101 rtx label;
102 struct eh_region_d *region;
105 static GTY(()) int call_site_base;
106 static GTY ((param_is (union tree_node)))
107 htab_t type_to_runtime_map;
109 /* Describe the SjLj_Function_Context structure. */
110 static GTY(()) tree sjlj_fc_type_node;
111 static int sjlj_fc_call_site_ofs;
112 static int sjlj_fc_data_ofs;
113 static int sjlj_fc_personality_ofs;
114 static int sjlj_fc_lsda_ofs;
115 static int sjlj_fc_jbuf_ofs;
118 struct GTY(()) call_site_record_d
120 rtx landing_pad;
121 int action;
124 static int t2r_eq (const void *, const void *);
125 static hashval_t t2r_hash (const void *);
127 static int ttypes_filter_eq (const void *, const void *);
128 static hashval_t ttypes_filter_hash (const void *);
129 static int ehspec_filter_eq (const void *, const void *);
130 static hashval_t ehspec_filter_hash (const void *);
131 static int add_ttypes_entry (htab_t, tree);
132 static int add_ehspec_entry (htab_t, htab_t, tree);
133 static void assign_filter_values (void);
134 static void build_post_landing_pads (void);
135 static void connect_post_landing_pads (void);
136 static void dw2_build_landing_pads (void);
138 struct sjlj_lp_info;
139 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
140 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
141 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
142 static void sjlj_emit_function_enter (rtx);
143 static void sjlj_emit_function_exit (void);
144 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
145 static void sjlj_build_landing_pads (void);
147 static void remove_eh_handler (struct eh_region_d *);
148 static void remove_eh_handler_and_replace (struct eh_region_d *,
149 struct eh_region_d *, bool);
151 /* The return value of reachable_next_level. */
152 enum reachable_code
154 /* The given exception is not processed by the given region. */
155 RNL_NOT_CAUGHT,
156 /* The given exception may need processing by the given region. */
157 RNL_MAYBE_CAUGHT,
158 /* The given exception is completely processed by the given region. */
159 RNL_CAUGHT,
160 /* The given exception is completely processed by the runtime. */
161 RNL_BLOCKED
164 struct reachable_info;
165 static enum reachable_code reachable_next_level (struct eh_region_d *, tree,
166 struct reachable_info *, bool);
168 static int action_record_eq (const void *, const void *);
169 static hashval_t action_record_hash (const void *);
170 static int add_action_record (htab_t, int, int);
171 static int collect_one_action_chain (htab_t, struct eh_region_d *);
172 static int add_call_site (rtx, int);
174 static void push_uleb128 (varray_type *, unsigned int);
175 static void push_sleb128 (varray_type *, int);
176 #ifndef HAVE_AS_LEB128
177 static int dw2_size_of_call_site_table (void);
178 static int sjlj_size_of_call_site_table (void);
179 #endif
180 static void dw2_output_call_site_table (void);
181 static void sjlj_output_call_site_table (void);
184 /* Routine to see if exception handling is turned on.
185 DO_WARN is nonzero if we want to inform the user that exception
186 handling is turned off.
188 This is used to ensure that -fexceptions has been specified if the
189 compiler tries to use any exception-specific functions. */
192 doing_eh (int do_warn)
194 if (! flag_exceptions)
196 static int warned = 0;
197 if (! warned && do_warn)
199 error ("exception handling disabled, use -fexceptions to enable");
200 warned = 1;
202 return 0;
204 return 1;
208 void
209 init_eh (void)
211 if (! flag_exceptions)
212 return;
214 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
216 /* Create the SjLj_Function_Context structure. This should match
217 the definition in unwind-sjlj.c. */
218 if (USING_SJLJ_EXCEPTIONS)
220 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
222 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
224 f_prev = build_decl (BUILTINS_LOCATION,
225 FIELD_DECL, get_identifier ("__prev"),
226 build_pointer_type (sjlj_fc_type_node));
227 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
229 f_cs = build_decl (BUILTINS_LOCATION,
230 FIELD_DECL, get_identifier ("__call_site"),
231 integer_type_node);
232 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
234 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
235 tmp = build_array_type (lang_hooks.types.type_for_mode
236 (targetm.unwind_word_mode (), 1),
237 tmp);
238 f_data = build_decl (BUILTINS_LOCATION,
239 FIELD_DECL, get_identifier ("__data"), tmp);
240 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
242 f_per = build_decl (BUILTINS_LOCATION,
243 FIELD_DECL, get_identifier ("__personality"),
244 ptr_type_node);
245 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
247 f_lsda = build_decl (BUILTINS_LOCATION,
248 FIELD_DECL, get_identifier ("__lsda"),
249 ptr_type_node);
250 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
252 #ifdef DONT_USE_BUILTIN_SETJMP
253 #ifdef JMP_BUF_SIZE
254 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
255 #else
256 /* Should be large enough for most systems, if it is not,
257 JMP_BUF_SIZE should be defined with the proper value. It will
258 also tend to be larger than necessary for most systems, a more
259 optimal port will define JMP_BUF_SIZE. */
260 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
261 #endif
262 #else
263 /* builtin_setjmp takes a pointer to 5 words. */
264 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
265 #endif
266 tmp = build_index_type (tmp);
267 tmp = build_array_type (ptr_type_node, tmp);
268 f_jbuf = build_decl (BUILTINS_LOCATION,
269 FIELD_DECL, get_identifier ("__jbuf"), tmp);
270 #ifdef DONT_USE_BUILTIN_SETJMP
271 /* We don't know what the alignment requirements of the
272 runtime's jmp_buf has. Overestimate. */
273 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
274 DECL_USER_ALIGN (f_jbuf) = 1;
275 #endif
276 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
278 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
279 TREE_CHAIN (f_prev) = f_cs;
280 TREE_CHAIN (f_cs) = f_data;
281 TREE_CHAIN (f_data) = f_per;
282 TREE_CHAIN (f_per) = f_lsda;
283 TREE_CHAIN (f_lsda) = f_jbuf;
285 layout_type (sjlj_fc_type_node);
287 /* Cache the interesting field offsets so that we have
288 easy access from rtl. */
289 sjlj_fc_call_site_ofs
290 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
291 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
292 sjlj_fc_data_ofs
293 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
294 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
295 sjlj_fc_personality_ofs
296 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
297 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
298 sjlj_fc_lsda_ofs
299 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
300 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
301 sjlj_fc_jbuf_ofs
302 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
303 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
307 void
308 init_eh_for_function (void)
310 cfun->eh = GGC_CNEW (struct eh_status);
313 /* Routines to generate the exception tree somewhat directly.
314 These are used from tree-eh.c when processing exception related
315 nodes during tree optimization. */
317 static struct eh_region_d *
318 gen_eh_region (enum eh_region_type type, struct eh_region_d *outer)
320 struct eh_region_d *new_eh;
322 #ifdef ENABLE_CHECKING
323 gcc_assert (doing_eh (0));
324 #endif
326 /* Insert a new blank region as a leaf in the tree. */
327 new_eh = GGC_CNEW (struct eh_region_d);
328 new_eh->type = type;
329 new_eh->outer = outer;
330 if (outer)
332 new_eh->next_peer = outer->inner;
333 outer->inner = new_eh;
335 else
337 new_eh->next_peer = cfun->eh->region_tree;
338 cfun->eh->region_tree = new_eh;
341 new_eh->region_number = ++cfun->eh->last_region_number;
343 return new_eh;
346 struct eh_region_d *
347 gen_eh_region_cleanup (struct eh_region_d *outer)
349 struct eh_region_d *cleanup = gen_eh_region (ERT_CLEANUP, outer);
350 return cleanup;
353 struct eh_region_d *
354 gen_eh_region_try (struct eh_region_d *outer)
356 return gen_eh_region (ERT_TRY, outer);
359 struct eh_region_d *
360 gen_eh_region_catch (struct eh_region_d *t, tree type_or_list)
362 struct eh_region_d *c, *l;
363 tree type_list, type_node;
365 /* Ensure to always end up with a type list to normalize further
366 processing, then register each type against the runtime types map. */
367 type_list = type_or_list;
368 if (type_or_list)
370 if (TREE_CODE (type_or_list) != TREE_LIST)
371 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
373 type_node = type_list;
374 for (; type_node; type_node = TREE_CHAIN (type_node))
375 add_type_for_runtime (TREE_VALUE (type_node));
378 c = gen_eh_region (ERT_CATCH, t->outer);
379 c->u.eh_catch.type_list = type_list;
380 l = t->u.eh_try.last_catch;
381 c->u.eh_catch.prev_catch = l;
382 if (l)
383 l->u.eh_catch.next_catch = c;
384 else
385 t->u.eh_try.eh_catch = c;
386 t->u.eh_try.last_catch = c;
388 return c;
391 struct eh_region_d *
392 gen_eh_region_allowed (struct eh_region_d *outer, tree allowed)
394 struct eh_region_d *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
395 region->u.allowed.type_list = allowed;
397 for (; allowed ; allowed = TREE_CHAIN (allowed))
398 add_type_for_runtime (TREE_VALUE (allowed));
400 return region;
403 struct eh_region_d *
404 gen_eh_region_must_not_throw (struct eh_region_d *outer)
406 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
410 get_eh_region_number (struct eh_region_d *region)
412 return region->region_number;
415 bool
416 get_eh_region_may_contain_throw (struct eh_region_d *region)
418 return region->may_contain_throw;
421 tree
422 get_eh_region_tree_label (struct eh_region_d *region)
424 return region->tree_label;
427 tree
428 get_eh_region_no_tree_label (int region)
430 return VEC_index (eh_region, cfun->eh->region_array, region)->tree_label;
433 void
434 set_eh_region_tree_label (struct eh_region_d *region, tree lab)
436 region->tree_label = lab;
439 void
440 expand_resx_expr (tree exp)
442 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
443 rtx insn;
444 struct eh_region_d *reg = VEC_index (eh_region,
445 cfun->eh->region_array, region_nr);
447 do_pending_stack_adjust ();
448 insn = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
449 if (reg->resume)
450 reg->resume = gen_rtx_INSN_LIST (VOIDmode, insn, reg->resume);
451 else
452 reg->resume = insn;
453 emit_barrier ();
456 /* Note that the current EH region (if any) may contain a throw, or a
457 call to a function which itself may contain a throw. */
459 void
460 note_eh_region_may_contain_throw (struct eh_region_d *region)
462 while (region && !region->may_contain_throw)
464 region->may_contain_throw = 1;
465 region = region->outer;
470 /* Return an rtl expression for a pointer to the exception object
471 within a handler. */
474 get_exception_pointer (void)
476 if (! crtl->eh.exc_ptr)
477 crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
478 return crtl->eh.exc_ptr;
481 /* Return an rtl expression for the exception dispatch filter
482 within a handler. */
485 get_exception_filter (void)
487 if (! crtl->eh.filter)
488 crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
489 return crtl->eh.filter;
492 /* This section is for the exception handling specific optimization pass. */
494 /* Random access the exception region tree. */
496 void
497 collect_eh_region_array (void)
499 struct eh_region_d *i;
501 i = cfun->eh->region_tree;
502 if (! i)
503 return;
505 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
506 cfun->eh->last_region_number + 1);
507 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
509 while (1)
511 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
513 /* If there are sub-regions, process them. */
514 if (i->inner)
515 i = i->inner;
516 /* If there are peers, process them. */
517 else if (i->next_peer)
518 i = i->next_peer;
519 /* Otherwise, step back up the tree to the next peer. */
520 else
522 do {
523 i = i->outer;
524 if (i == NULL)
525 return;
526 } while (i->next_peer == NULL);
527 i = i->next_peer;
532 /* R is MUST_NOT_THROW region that is not reachable via local
533 RESX instructions. It still must be kept in the tree in case runtime
534 can unwind through it, or we will eliminate out terminate call
535 runtime would do otherwise. Return TRUE if R contains throwing statements
536 or some of the exceptions in inner regions can be unwound up to R.
538 CONTAINS_STMT is bitmap of all regions that contains some throwing
539 statements.
541 Function looks O(^3) at first sight. In fact the function is called at most
542 once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
543 Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
544 the outer loop examines every region at most once. The inner loop
545 is doing unwinding from the throwing statement same way as we do during
546 CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
547 of CFG. In practice Eh trees are wide, not deep, so this is not
548 a problem. */
550 static bool
551 can_be_reached_by_runtime (sbitmap contains_stmt, struct eh_region_d *r)
553 struct eh_region_d *i = r->inner;
554 unsigned n;
555 bitmap_iterator bi;
557 if (TEST_BIT (contains_stmt, r->region_number))
558 return true;
559 if (r->aka)
560 EXECUTE_IF_SET_IN_BITMAP (r->aka, 0, n, bi)
561 if (TEST_BIT (contains_stmt, n))
562 return true;
563 if (!i)
564 return false;
565 while (1)
567 /* It is pointless to look into MUST_NOT_THROW
568 or dive into subregions. They never unwind up. */
569 if (i->type != ERT_MUST_NOT_THROW)
571 bool found = TEST_BIT (contains_stmt, i->region_number);
572 if (!found && i->aka)
573 EXECUTE_IF_SET_IN_BITMAP (i->aka, 0, n, bi)
574 if (TEST_BIT (contains_stmt, n))
576 found = true;
577 break;
579 /* We have nested region that contains throwing statement.
580 See if resuming might lead up to the resx or we get locally
581 caught sooner. If we get locally caught sooner, we either
582 know region R is not reachable or it would have direct edge
583 from the EH resx and thus consider region reachable at
584 firest place. */
585 if (found)
587 struct eh_region_d *i1 = i;
588 tree type_thrown = NULL_TREE;
590 if (i1->type == ERT_THROW)
592 type_thrown = i1->u.eh_throw.type;
593 i1 = i1->outer;
595 for (; i1 != r; i1 = i1->outer)
596 if (reachable_next_level (i1, type_thrown, NULL,
597 false) >= RNL_CAUGHT)
598 break;
599 if (i1 == r)
600 return true;
603 /* If there are sub-regions, process them. */
604 if (i->type != ERT_MUST_NOT_THROW && i->inner)
605 i = i->inner;
606 /* If there are peers, process them. */
607 else if (i->next_peer)
608 i = i->next_peer;
609 /* Otherwise, step back up the tree to the next peer. */
610 else
614 i = i->outer;
615 if (i == r)
616 return false;
618 while (i->next_peer == NULL);
619 i = i->next_peer;
624 /* Bring region R to the root of tree. */
626 static void
627 bring_to_root (struct eh_region_d *r)
629 struct eh_region_d **pp;
630 struct eh_region_d *outer = r->outer;
631 if (!r->outer)
632 return;
633 for (pp = &outer->inner; *pp != r; pp = &(*pp)->next_peer)
634 continue;
635 *pp = r->next_peer;
636 r->outer = NULL;
637 r->next_peer = cfun->eh->region_tree;
638 cfun->eh->region_tree = r;
641 /* Return true if region R2 can be replaced by R1. */
643 static bool
644 eh_region_replaceable_by_p (const struct eh_region_d *r1,
645 const struct eh_region_d *r2)
647 /* Regions are semantically same if they are of same type,
648 have same label and type. */
649 if (r1->type != r2->type)
650 return false;
651 if (r1->tree_label != r2->tree_label)
652 return false;
654 /* Verify that also region type dependent data are the same. */
655 switch (r1->type)
657 case ERT_MUST_NOT_THROW:
658 case ERT_CLEANUP:
659 break;
660 case ERT_TRY:
662 struct eh_region_d *c1, *c2;
663 for (c1 = r1->u.eh_try.eh_catch,
664 c2 = r2->u.eh_try.eh_catch;
665 c1 && c2;
666 c1 = c1->u.eh_catch.next_catch,
667 c2 = c2->u.eh_catch.next_catch)
668 if (!eh_region_replaceable_by_p (c1, c2))
669 return false;
670 if (c1 || c2)
671 return false;
673 break;
674 case ERT_CATCH:
675 if (!list_equal_p (r1->u.eh_catch.type_list, r2->u.eh_catch.type_list))
676 return false;
677 if (!list_equal_p (r1->u.eh_catch.filter_list,
678 r2->u.eh_catch.filter_list))
679 return false;
680 break;
681 case ERT_ALLOWED_EXCEPTIONS:
682 if (!list_equal_p (r1->u.allowed.type_list, r2->u.allowed.type_list))
683 return false;
684 if (r1->u.allowed.filter != r2->u.allowed.filter)
685 return false;
686 break;
687 case ERT_THROW:
688 if (r1->u.eh_throw.type != r2->u.eh_throw.type)
689 return false;
690 break;
691 default:
692 gcc_unreachable ();
694 if (dump_file && (dump_flags & TDF_DETAILS))
695 fprintf (dump_file, "Regions %i and %i match\n", r1->region_number,
696 r2->region_number);
697 return true;
700 /* Replace region R2 by R1. */
702 static void
703 replace_region (struct eh_region_d *r1, struct eh_region_d *r2)
705 struct eh_region_d *next1 = r1->u.eh_try.eh_catch;
706 struct eh_region_d *next2 = r2->u.eh_try.eh_catch;
707 bool is_try = r1->type == ERT_TRY;
709 gcc_assert (r1->type != ERT_CATCH);
710 remove_eh_handler_and_replace (r2, r1, false);
711 if (is_try)
713 while (next1)
715 r1 = next1;
716 r2 = next2;
717 gcc_assert (next1->type == ERT_CATCH);
718 gcc_assert (next2->type == ERT_CATCH);
719 next1 = next1->u.eh_catch.next_catch;
720 next2 = next2->u.eh_catch.next_catch;
721 remove_eh_handler_and_replace (r2, r1, false);
726 /* Return hash value of type list T. */
728 static hashval_t
729 hash_type_list (tree t)
731 hashval_t val = 0;
732 for (; t; t = TREE_CHAIN (t))
733 val = iterative_hash_hashval_t (TREE_HASH (TREE_VALUE (t)), val);
734 return val;
737 /* Hash EH regions so semantically same regions get same hash value. */
739 static hashval_t
740 hash_eh_region (const void *r)
742 const struct eh_region_d *region = (const struct eh_region_d *) r;
743 hashval_t val = region->type;
745 if (region->tree_label)
746 val = iterative_hash_hashval_t (LABEL_DECL_UID (region->tree_label), val);
747 switch (region->type)
749 case ERT_MUST_NOT_THROW:
750 case ERT_CLEANUP:
751 break;
752 case ERT_TRY:
754 struct eh_region_d *c;
755 for (c = region->u.eh_try.eh_catch;
756 c; c = c->u.eh_catch.next_catch)
757 val = iterative_hash_hashval_t (hash_eh_region (c), val);
759 break;
760 case ERT_CATCH:
761 val = iterative_hash_hashval_t (hash_type_list
762 (region->u.eh_catch.type_list), val);
763 break;
764 case ERT_ALLOWED_EXCEPTIONS:
765 val = iterative_hash_hashval_t
766 (hash_type_list (region->u.allowed.type_list), val);
767 val = iterative_hash_hashval_t (region->u.allowed.filter, val);
768 break;
769 case ERT_THROW:
770 val |= iterative_hash_hashval_t (TYPE_UID (region->u.eh_throw.type), val);
771 break;
772 default:
773 gcc_unreachable ();
775 return val;
778 /* Return true if regions R1 and R2 are equal. */
780 static int
781 eh_regions_equal_p (const void *r1, const void *r2)
783 return eh_region_replaceable_by_p ((const struct eh_region_d *) r1,
784 (const struct eh_region_d *) r2);
787 /* Walk all peers of REGION and try to merge those regions
788 that are semantically equivalent. Look into subregions
789 recursively too. */
791 static bool
792 merge_peers (struct eh_region_d *region)
794 struct eh_region_d *r1, *r2, *outer = NULL, *next;
795 bool merged = false;
796 int num_regions = 0;
797 if (region)
798 outer = region->outer;
799 else
800 return false;
802 /* First see if there is inner region equivalent to region
803 in question. EH control flow is acyclic so we know we
804 can merge them. */
805 if (outer)
806 for (r1 = region; r1; r1 = next)
808 next = r1->next_peer;
809 if (r1->type == ERT_CATCH)
810 continue;
811 if (eh_region_replaceable_by_p (r1->outer, r1))
813 replace_region (r1->outer, r1);
814 merged = true;
816 else
817 num_regions ++;
820 /* Get new first region and try to match the peers
821 for equivalence. */
822 if (outer)
823 region = outer->inner;
824 else
825 region = cfun->eh->region_tree;
827 /* There are few regions to inspect:
828 N^2 loop matching each region with each region
829 will do the job well. */
830 if (num_regions < 10)
832 for (r1 = region; r1; r1 = r1->next_peer)
834 if (r1->type == ERT_CATCH)
835 continue;
836 for (r2 = r1->next_peer; r2; r2 = next)
838 next = r2->next_peer;
839 if (eh_region_replaceable_by_p (r1, r2))
841 replace_region (r1, r2);
842 merged = true;
847 /* Or use hashtable to avoid N^2 behaviour. */
848 else
850 htab_t hash;
851 hash = htab_create (num_regions, hash_eh_region,
852 eh_regions_equal_p, NULL);
853 for (r1 = region; r1; r1 = next)
855 void **slot;
857 next = r1->next_peer;
858 if (r1->type == ERT_CATCH)
859 continue;
860 slot = htab_find_slot (hash, r1, INSERT);
861 if (!*slot)
862 *slot = r1;
863 else
864 replace_region ((struct eh_region_d *) *slot, r1);
866 htab_delete (hash);
868 for (r1 = region; r1; r1 = r1->next_peer)
869 merged |= merge_peers (r1->inner);
870 return merged;
873 /* Remove all regions whose labels are not reachable.
874 REACHABLE is bitmap of all regions that are used by the function
875 CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
877 void
878 remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
880 int i;
881 struct eh_region_d *r;
882 VEC(eh_region,heap) *must_not_throws = VEC_alloc (eh_region, heap, 16);
883 struct eh_region_d *local_must_not_throw = NULL;
884 struct eh_region_d *first_must_not_throw = NULL;
886 for (i = cfun->eh->last_region_number; i > 0; --i)
888 r = VEC_index (eh_region, cfun->eh->region_array, i);
889 if (!r || r->region_number != i)
890 continue;
891 if (!TEST_BIT (reachable, i) && !r->resume)
893 bool kill_it = true;
895 r->tree_label = NULL;
896 switch (r->type)
898 case ERT_THROW:
899 /* Don't remove ERT_THROW regions if their outer region
900 is reachable. */
901 if (r->outer && TEST_BIT (reachable, r->outer->region_number))
902 kill_it = false;
903 break;
904 case ERT_MUST_NOT_THROW:
905 /* MUST_NOT_THROW regions are implementable solely in the
906 runtime, but we need them when inlining function.
908 Keep them if outer region is not MUST_NOT_THROW a well
909 and if they contain some statement that might unwind through
910 them. */
911 if ((!r->outer || r->outer->type != ERT_MUST_NOT_THROW)
912 && (!contains_stmt
913 || can_be_reached_by_runtime (contains_stmt, r)))
914 kill_it = false;
915 break;
916 case ERT_TRY:
918 /* TRY regions are reachable if any of its CATCH regions
919 are reachable. */
920 struct eh_region_d *c;
921 for (c = r->u.eh_try.eh_catch; c;
922 c = c->u.eh_catch.next_catch)
923 if (TEST_BIT (reachable, c->region_number))
925 kill_it = false;
926 break;
928 break;
931 default:
932 break;
935 if (kill_it)
937 if (dump_file)
938 fprintf (dump_file, "Removing unreachable eh region %i\n",
939 r->region_number);
940 remove_eh_handler (r);
942 else if (r->type == ERT_MUST_NOT_THROW)
944 if (!first_must_not_throw)
945 first_must_not_throw = r;
946 VEC_safe_push (eh_region, heap, must_not_throws, r);
949 else
950 if (r->type == ERT_MUST_NOT_THROW)
952 if (!local_must_not_throw)
953 local_must_not_throw = r;
954 if (r->outer)
955 VEC_safe_push (eh_region, heap, must_not_throws, r);
959 /* MUST_NOT_THROW regions without local handler are all the same; they
960 trigger terminate call in runtime.
961 MUST_NOT_THROW handled locally can differ in debug info associated
962 to std::terminate () call or if one is coming from Java and other
963 from C++ whether they call terminate or abort.
965 We merge all MUST_NOT_THROW regions handled by the run-time into one.
966 We alsobring all local MUST_NOT_THROW regions to the roots of EH tree
967 (since unwinding never continues to the outer region anyway).
968 If MUST_NOT_THROW with local handler is present in the tree, we use
969 that region to merge into, since it will remain in tree anyway;
970 otherwise we use first MUST_NOT_THROW.
972 Merging of locally handled regions needs changes to the CFG. Crossjumping
973 should take care of this, by looking at the actual code and
974 ensuring that the cleanup actions are really the same. */
976 if (local_must_not_throw)
977 first_must_not_throw = local_must_not_throw;
979 for (i = 0; VEC_iterate (eh_region, must_not_throws, i, r); i++)
981 if (!r->label && !r->tree_label && r != first_must_not_throw)
983 if (dump_file)
984 fprintf (dump_file, "Replacing MUST_NOT_THROW region %i by %i\n",
985 r->region_number,
986 first_must_not_throw->region_number);
987 remove_eh_handler_and_replace (r, first_must_not_throw, false);
988 first_must_not_throw->may_contain_throw |= r->may_contain_throw;
990 else
991 bring_to_root (r);
993 merge_peers (cfun->eh->region_tree);
994 #ifdef ENABLE_CHECKING
995 verify_eh_tree (cfun);
996 #endif
997 VEC_free (eh_region, heap, must_not_throws);
1000 /* Return array mapping LABEL_DECL_UID to region such that region's tree_label
1001 is identical to label. */
1003 VEC (int, heap) *
1004 label_to_region_map (void)
1006 VEC (int, heap) * label_to_region = NULL;
1007 int i;
1008 int idx;
1010 VEC_safe_grow_cleared (int, heap, label_to_region,
1011 cfun->cfg->last_label_uid + 1);
1012 for (i = cfun->eh->last_region_number; i > 0; --i)
1014 struct eh_region_d *r = VEC_index (eh_region, cfun->eh->region_array, i);
1015 if (r && r->region_number == i
1016 && r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
1018 if ((idx = VEC_index (int, label_to_region,
1019 LABEL_DECL_UID (r->tree_label))) != 0)
1020 r->next_region_sharing_label =
1021 VEC_index (eh_region, cfun->eh->region_array, idx);
1022 else
1023 r->next_region_sharing_label = NULL;
1024 VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
1028 return label_to_region;
1031 /* Return number of EH regions. */
1033 num_eh_regions (void)
1035 return cfun->eh->last_region_number + 1;
1038 /* Return next region sharing same label as REGION. */
1041 get_next_region_sharing_label (int region)
1043 struct eh_region_d *r;
1044 if (!region)
1045 return 0;
1046 r = VEC_index (eh_region, cfun->eh->region_array, region);
1047 if (!r || !r->next_region_sharing_label)
1048 return 0;
1049 return r->next_region_sharing_label->region_number;
1052 /* Return bitmap of all labels that are handlers of must not throw regions. */
1054 bitmap
1055 must_not_throw_labels (void)
1057 struct eh_region_d *i;
1058 bitmap labels = BITMAP_ALLOC (NULL);
1060 i = cfun->eh->region_tree;
1061 if (! i)
1062 return labels;
1064 while (1)
1066 if (i->type == ERT_MUST_NOT_THROW && i->tree_label
1067 && LABEL_DECL_UID (i->tree_label) >= 0)
1068 bitmap_set_bit (labels, LABEL_DECL_UID (i->tree_label));
1070 /* If there are sub-regions, process them. */
1071 if (i->inner)
1072 i = i->inner;
1073 /* If there are peers, process them. */
1074 else if (i->next_peer)
1075 i = i->next_peer;
1076 /* Otherwise, step back up the tree to the next peer. */
1077 else
1079 do {
1080 i = i->outer;
1081 if (i == NULL)
1082 return labels;
1083 } while (i->next_peer == NULL);
1084 i = i->next_peer;
1089 /* Set up EH labels for RTL. */
1091 void
1092 convert_from_eh_region_ranges (void)
1094 int i, n = cfun->eh->last_region_number;
1096 /* Most of the work is already done at the tree level. All we need to
1097 do is collect the rtl labels that correspond to the tree labels that
1098 collect the rtl labels that correspond to the tree labels
1099 we allocated earlier. */
1100 for (i = 1; i <= n; ++i)
1102 struct eh_region_d *region;
1104 region = VEC_index (eh_region, cfun->eh->region_array, i);
1105 if (region && region->tree_label)
1106 region->label = DECL_RTL_IF_SET (region->tree_label);
1110 void
1111 find_exception_handler_labels (void)
1113 int i;
1115 if (cfun->eh->region_tree == NULL)
1116 return;
1118 for (i = cfun->eh->last_region_number; i > 0; --i)
1120 struct eh_region_d *region;
1121 rtx lab;
1123 region = VEC_index (eh_region, cfun->eh->region_array, i);
1124 if (! region || region->region_number != i)
1125 continue;
1126 if (crtl->eh.built_landing_pads)
1127 lab = region->landing_pad;
1128 else
1129 lab = region->label;
1133 /* Returns true if the current function has exception handling regions. */
1135 bool
1136 current_function_has_exception_handlers (void)
1138 int i;
1140 for (i = cfun->eh->last_region_number; i > 0; --i)
1142 struct eh_region_d *region;
1144 region = VEC_index (eh_region, cfun->eh->region_array, i);
1145 if (region
1146 && region->region_number == i
1147 && region->type != ERT_THROW)
1148 return true;
1151 return false;
1154 /* A subroutine of duplicate_eh_regions. Search the region tree under O
1155 for the minimum and maximum region numbers. Update *MIN and *MAX. */
1157 static void
1158 duplicate_eh_regions_0 (eh_region o, int *min, int *max)
1160 int i;
1162 if (o->aka)
1164 i = bitmap_first_set_bit (o->aka);
1165 if (i < *min)
1166 *min = i;
1167 i = bitmap_last_set_bit (o->aka);
1168 if (i > *max)
1169 *max = i;
1171 if (o->region_number < *min)
1172 *min = o->region_number;
1173 if (o->region_number > *max)
1174 *max = o->region_number;
1176 if (o->inner)
1178 o = o->inner;
1179 duplicate_eh_regions_0 (o, min, max);
1180 while (o->next_peer)
1182 o = o->next_peer;
1183 duplicate_eh_regions_0 (o, min, max);
1188 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
1189 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
1190 about the other internal pointers just yet, just the tree-like pointers. */
1192 static eh_region
1193 duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
1195 eh_region ret, n;
1197 ret = n = GGC_NEW (struct eh_region_d);
1199 *n = *old;
1200 n->outer = outer;
1201 n->next_peer = NULL;
1202 if (old->aka)
1204 unsigned i;
1205 bitmap_iterator bi;
1206 n->aka = BITMAP_GGC_ALLOC ();
1208 EXECUTE_IF_SET_IN_BITMAP (old->aka, 0, i, bi)
1210 bitmap_set_bit (n->aka, i + eh_offset);
1211 VEC_replace (eh_region, cfun->eh->region_array, i + eh_offset, n);
1215 n->region_number += eh_offset;
1216 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1218 if (old->inner)
1220 old = old->inner;
1221 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
1222 while (old->next_peer)
1224 old = old->next_peer;
1225 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
1229 return ret;
1232 /* Look for first outer region of R (or R itself) that is
1233 TRY region. Return NULL if none. */
1235 static struct eh_region_d *
1236 find_prev_try (struct eh_region_d * r)
1238 for (; r && r->type != ERT_TRY; r = r->outer)
1239 if (r->type == ERT_MUST_NOT_THROW
1240 || (r->type == ERT_ALLOWED_EXCEPTIONS
1241 && !r->u.allowed.type_list))
1243 r = NULL;
1244 break;
1246 return r;
1249 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
1250 function and root the tree below OUTER_REGION. Remap labels using MAP
1251 callback. The special case of COPY_REGION of 0 means all regions. */
1254 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
1255 void *data, int copy_region, int outer_region)
1257 eh_region cur, outer, *splice;
1258 int i, min_region, max_region, eh_offset, cfun_last_region_number;
1259 int num_regions;
1261 if (!ifun->eh)
1262 return 0;
1263 #ifdef ENABLE_CHECKING
1264 verify_eh_tree (ifun);
1265 #endif
1267 /* Find the range of region numbers to be copied. The interface we
1268 provide here mandates a single offset to find new number from old,
1269 which means we must look at the numbers present, instead of the
1270 count or something else. */
1271 if (copy_region > 0)
1273 min_region = INT_MAX;
1274 max_region = 0;
1276 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1277 duplicate_eh_regions_0 (cur, &min_region, &max_region);
1279 else
1281 min_region = 1;
1282 max_region = ifun->eh->last_region_number;
1284 num_regions = max_region - min_region + 1;
1285 cfun_last_region_number = cfun->eh->last_region_number;
1286 eh_offset = cfun_last_region_number + 1 - min_region;
1288 /* If we've not yet created a region array, do so now. */
1289 cfun->eh->last_region_number = cfun_last_region_number + num_regions;
1290 VEC_safe_grow_cleared (eh_region, gc, cfun->eh->region_array,
1291 cfun->eh->last_region_number + 1);
1293 /* Locate the spot at which to insert the new tree. */
1294 if (outer_region > 0)
1296 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
1297 if (outer)
1298 splice = &outer->inner;
1299 else
1300 splice = &cfun->eh->region_tree;
1302 else
1304 outer = NULL;
1305 splice = &cfun->eh->region_tree;
1307 while (*splice)
1308 splice = &(*splice)->next_peer;
1310 if (!ifun->eh->region_tree)
1312 if (outer)
1313 for (i = cfun_last_region_number + 1;
1314 i <= cfun->eh->last_region_number; i++)
1316 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1317 if (outer->aka == NULL)
1318 outer->aka = BITMAP_GGC_ALLOC ();
1319 bitmap_set_bit (outer->aka, i);
1321 return eh_offset;
1324 /* Copy all the regions in the subtree. */
1325 if (copy_region > 0)
1327 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1328 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
1330 else
1332 eh_region n;
1334 cur = ifun->eh->region_tree;
1335 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
1336 while (cur->next_peer)
1338 cur = cur->next_peer;
1339 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
1343 /* Remap all the labels in the new regions. */
1344 for (i = cfun_last_region_number + 1;
1345 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1346 if (cur && cur->tree_label)
1347 cur->tree_label = map (cur->tree_label, data);
1349 /* Remap all of the internal catch and cleanup linkages. Since we
1350 duplicate entire subtrees, all of the referenced regions will have
1351 been copied too. And since we renumbered them as a block, a simple
1352 bit of arithmetic finds us the index for the replacement region. */
1353 for (i = cfun_last_region_number + 1;
1354 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1356 /* All removed EH that is toplevel in input function is now
1357 in outer EH of output function. */
1358 if (cur == NULL)
1360 gcc_assert (VEC_index
1361 (eh_region, ifun->eh->region_array,
1362 i - eh_offset) == NULL);
1363 if (outer)
1365 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1366 if (outer->aka == NULL)
1367 outer->aka = BITMAP_GGC_ALLOC ();
1368 bitmap_set_bit (outer->aka, i);
1370 continue;
1372 if (i != cur->region_number)
1373 continue;
1375 #define REMAP(REG) \
1376 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1377 (REG)->region_number + eh_offset)
1379 switch (cur->type)
1381 case ERT_TRY:
1382 if (cur->u.eh_try.eh_catch)
1383 REMAP (cur->u.eh_try.eh_catch);
1384 if (cur->u.eh_try.last_catch)
1385 REMAP (cur->u.eh_try.last_catch);
1386 break;
1388 case ERT_CATCH:
1389 if (cur->u.eh_catch.next_catch)
1390 REMAP (cur->u.eh_catch.next_catch);
1391 if (cur->u.eh_catch.prev_catch)
1392 REMAP (cur->u.eh_catch.prev_catch);
1393 break;
1395 default:
1396 break;
1399 #undef REMAP
1401 #ifdef ENABLE_CHECKING
1402 verify_eh_tree (cfun);
1403 #endif
1405 return eh_offset;
1408 /* Return new copy of eh region OLD inside region NEW_OUTER.
1409 Do not care about updating the tree otherwise. */
1411 static struct eh_region_d *
1412 copy_eh_region_1 (struct eh_region_d *old, struct eh_region_d *new_outer)
1414 struct eh_region_d *new_eh = gen_eh_region (old->type, new_outer);
1415 new_eh->u = old->u;
1416 new_eh->tree_label = old->tree_label;
1417 new_eh->may_contain_throw = old->may_contain_throw;
1418 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1419 cfun->eh->last_region_number + 1);
1420 VEC_replace (eh_region, cfun->eh->region_array, new_eh->region_number, new_eh);
1421 if (dump_file && (dump_flags & TDF_DETAILS))
1422 fprintf (dump_file, "Copying region %i to %i\n", old->region_number, new_eh->region_number);
1423 return new_eh;
1426 /* Return new copy of eh region OLD inside region NEW_OUTER.
1428 Copy whole catch-try chain if neccesary. */
1430 static struct eh_region_d *
1431 copy_eh_region (struct eh_region_d *old, struct eh_region_d *new_outer)
1433 struct eh_region_d *r, *n, *old_try, *new_try, *ret = NULL;
1434 VEC(eh_region,heap) *catch_list = NULL;
1436 if (old->type != ERT_CATCH)
1438 gcc_assert (old->type != ERT_TRY);
1439 r = copy_eh_region_1 (old, new_outer);
1440 return r;
1443 /* Locate and copy corresponding TRY. */
1444 for (old_try = old->next_peer; old_try->type == ERT_CATCH; old_try = old_try->next_peer)
1445 continue;
1446 gcc_assert (old_try->type == ERT_TRY);
1447 new_try = gen_eh_region_try (new_outer);
1448 new_try->tree_label = old_try->tree_label;
1449 new_try->may_contain_throw = old_try->may_contain_throw;
1450 if (dump_file && (dump_flags & TDF_DETAILS))
1451 fprintf (dump_file, "Copying try-catch regions. Try: %i to %i\n",
1452 old_try->region_number, new_try->region_number);
1453 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1454 cfun->eh->last_region_number + 1);
1455 VEC_replace (eh_region, cfun->eh->region_array, new_try->region_number, new_try);
1457 /* In order to keep CATCH list in order, we need to copy in reverse order. */
1458 for (r = old_try->u.eh_try.last_catch; r->type == ERT_CATCH; r = r->next_peer)
1459 VEC_safe_push (eh_region, heap, catch_list, r);
1461 while (VEC_length (eh_region, catch_list))
1463 r = VEC_pop (eh_region, catch_list);
1465 /* Duplicate CATCH. */
1466 n = gen_eh_region_catch (new_try, r->u.eh_catch.type_list);
1467 n->tree_label = r->tree_label;
1468 n->may_contain_throw = r->may_contain_throw;
1469 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1470 cfun->eh->last_region_number + 1);
1471 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1472 n->tree_label = r->tree_label;
1474 if (dump_file && (dump_flags & TDF_DETAILS))
1475 fprintf (dump_file, "Copying try-catch regions. Catch: %i to %i\n",
1476 r->region_number, n->region_number);
1477 if (r == old)
1478 ret = n;
1480 VEC_free (eh_region, heap, catch_list);
1481 gcc_assert (ret);
1482 return ret;
1485 /* Callback for forach_reachable_handler that push REGION into single VECtor DATA. */
1487 static void
1488 push_reachable_handler (struct eh_region_d *region, void *data)
1490 VEC(eh_region,heap) **trace = (VEC(eh_region,heap) **) data;
1491 VEC_safe_push (eh_region, heap, *trace, region);
1494 /* Redirect EH edge E that to NEW_DEST_LABEL.
1495 IS_RESX, INLINABLE_CALL and REGION_NMUBER match the parameter of
1496 foreach_reachable_handler. */
1498 struct eh_region_d *
1499 redirect_eh_edge_to_label (edge e, tree new_dest_label, bool is_resx,
1500 bool inlinable_call, int region_number)
1502 struct eh_region_d *outer;
1503 struct eh_region_d *region;
1504 VEC (eh_region, heap) * trace = NULL;
1505 int i;
1506 int start_here = -1;
1507 basic_block old_bb = e->dest;
1508 struct eh_region_d *old, *r = NULL;
1509 bool update_inplace = true;
1510 edge_iterator ei;
1511 edge e2;
1513 /* If there is only one EH edge, we don't need to duplicate;
1514 just update labels in the tree. */
1515 FOR_EACH_EDGE (e2, ei, old_bb->preds)
1516 if ((e2->flags & EDGE_EH) && e2 != e)
1518 update_inplace = false;
1519 break;
1522 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
1523 gcc_assert (region);
1525 foreach_reachable_handler (region_number, is_resx, inlinable_call,
1526 push_reachable_handler, &trace);
1527 if (dump_file && (dump_flags & TDF_DETAILS))
1529 dump_eh_tree (dump_file, cfun);
1530 fprintf (dump_file, "Trace: ");
1531 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1532 fprintf (dump_file, " %i", VEC_index (eh_region, trace, i)->region_number);
1533 fprintf (dump_file, " inplace: %i\n", update_inplace);
1536 if (update_inplace)
1538 /* In easy route just walk trace and update all occurences of the label. */
1539 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1541 r = VEC_index (eh_region, trace, i);
1542 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1544 r->tree_label = new_dest_label;
1545 if (dump_file && (dump_flags & TDF_DETAILS))
1546 fprintf (dump_file, "Updating label for region %i\n",
1547 r->region_number);
1550 r = region;
1552 else
1554 /* Now look for outermost handler that reffers to the basic block in question.
1555 We start our duplication there. */
1556 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1558 r = VEC_index (eh_region, trace, i);
1559 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1560 start_here = i;
1562 outer = VEC_index (eh_region, trace, start_here)->outer;
1563 gcc_assert (start_here >= 0);
1565 /* And now do the dirty job! */
1566 for (i = start_here; i >= 0; i--)
1568 old = VEC_index (eh_region, trace, i);
1569 gcc_assert (!outer || old->outer != outer->outer);
1571 /* Copy region and update label. */
1572 r = copy_eh_region (old, outer);
1573 VEC_replace (eh_region, trace, i, r);
1574 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1576 r->tree_label = new_dest_label;
1577 if (dump_file && (dump_flags & TDF_DETAILS))
1578 fprintf (dump_file, "Updating label for region %i\n",
1579 r->region_number);
1582 /* We got into copying CATCH. copy_eh_region already did job
1583 of copying all catch blocks corresponding to the try. Now
1584 we need to update labels in all of them and see trace.
1586 We continue nesting into TRY region corresponding to CATCH:
1587 When duplicating EH tree contaiing subregions of CATCH,
1588 the CATCH region itself is never inserted to trace so we
1589 never get here anyway. */
1590 if (r->type == ERT_CATCH)
1592 /* Walk other catch regions we copied and update labels as needed. */
1593 for (r = r->next_peer; r->type == ERT_CATCH; r = r->next_peer)
1594 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1596 r->tree_label = new_dest_label;
1597 if (dump_file && (dump_flags & TDF_DETAILS))
1598 fprintf (dump_file, "Updating label for region %i\n",
1599 r->region_number);
1601 gcc_assert (r->type == ERT_TRY);
1603 /* Skip sibling catch regions from the trace.
1604 They are already updated. */
1605 while (i > 0 && VEC_index (eh_region, trace, i - 1)->outer == old->outer)
1607 gcc_assert (VEC_index (eh_region, trace, i - 1)->type == ERT_CATCH);
1608 i--;
1612 outer = r;
1615 if (is_resx || region->type == ERT_THROW)
1616 r = copy_eh_region (region, outer);
1619 VEC_free (eh_region, heap, trace);
1620 if (dump_file && (dump_flags & TDF_DETAILS))
1622 dump_eh_tree (dump_file, cfun);
1623 fprintf (dump_file, "New region: %i\n", r->region_number);
1625 return r;
1628 /* Return region number of region that is outer to both if REGION_A and
1629 REGION_B in IFUN. */
1632 eh_region_outermost (struct function *ifun, int region_a, int region_b)
1634 struct eh_region_d *rp_a, *rp_b;
1635 sbitmap b_outer;
1637 gcc_assert (ifun->eh->last_region_number > 0);
1638 gcc_assert (ifun->eh->region_tree);
1640 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1641 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1642 gcc_assert (rp_a != NULL);
1643 gcc_assert (rp_b != NULL);
1645 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1646 sbitmap_zero (b_outer);
1650 SET_BIT (b_outer, rp_b->region_number);
1651 rp_b = rp_b->outer;
1653 while (rp_b);
1657 if (TEST_BIT (b_outer, rp_a->region_number))
1659 sbitmap_free (b_outer);
1660 return rp_a->region_number;
1662 rp_a = rp_a->outer;
1664 while (rp_a);
1666 sbitmap_free (b_outer);
1667 return -1;
1670 static int
1671 t2r_eq (const void *pentry, const void *pdata)
1673 const_tree const entry = (const_tree) pentry;
1674 const_tree const data = (const_tree) pdata;
1676 return TREE_PURPOSE (entry) == data;
1679 static hashval_t
1680 t2r_hash (const void *pentry)
1682 const_tree const entry = (const_tree) pentry;
1683 return TREE_HASH (TREE_PURPOSE (entry));
1686 void
1687 add_type_for_runtime (tree type)
1689 tree *slot;
1691 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1692 TREE_HASH (type), INSERT);
1693 if (*slot == NULL)
1695 tree runtime = (*lang_eh_runtime_type) (type);
1696 *slot = tree_cons (type, runtime, NULL_TREE);
1700 tree
1701 lookup_type_for_runtime (tree type)
1703 tree *slot;
1705 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1706 TREE_HASH (type), NO_INSERT);
1708 /* We should have always inserted the data earlier. */
1709 return TREE_VALUE (*slot);
1713 /* Represent an entry in @TTypes for either catch actions
1714 or exception filter actions. */
1715 struct GTY(()) ttypes_filter {
1716 tree t;
1717 int filter;
1720 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1721 (a tree) for a @TTypes type node we are thinking about adding. */
1723 static int
1724 ttypes_filter_eq (const void *pentry, const void *pdata)
1726 const struct ttypes_filter *const entry
1727 = (const struct ttypes_filter *) pentry;
1728 const_tree const data = (const_tree) pdata;
1730 return entry->t == data;
1733 static hashval_t
1734 ttypes_filter_hash (const void *pentry)
1736 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1737 return TREE_HASH (entry->t);
1740 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1741 exception specification list we are thinking about adding. */
1742 /* ??? Currently we use the type lists in the order given. Someone
1743 should put these in some canonical order. */
1745 static int
1746 ehspec_filter_eq (const void *pentry, const void *pdata)
1748 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1749 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1751 return type_list_equal (entry->t, data->t);
1754 /* Hash function for exception specification lists. */
1756 static hashval_t
1757 ehspec_filter_hash (const void *pentry)
1759 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1760 hashval_t h = 0;
1761 tree list;
1763 for (list = entry->t; list ; list = TREE_CHAIN (list))
1764 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1765 return h;
1768 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1769 to speed up the search. Return the filter value to be used. */
1771 static int
1772 add_ttypes_entry (htab_t ttypes_hash, tree type)
1774 struct ttypes_filter **slot, *n;
1776 slot = (struct ttypes_filter **)
1777 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1779 if ((n = *slot) == NULL)
1781 /* Filter value is a 1 based table index. */
1783 n = XNEW (struct ttypes_filter);
1784 n->t = type;
1785 n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
1786 *slot = n;
1788 VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
1791 return n->filter;
1794 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1795 to speed up the search. Return the filter value to be used. */
1797 static int
1798 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1800 struct ttypes_filter **slot, *n;
1801 struct ttypes_filter dummy;
1803 dummy.t = list;
1804 slot = (struct ttypes_filter **)
1805 htab_find_slot (ehspec_hash, &dummy, INSERT);
1807 if ((n = *slot) == NULL)
1809 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1811 n = XNEW (struct ttypes_filter);
1812 n->t = list;
1813 n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
1814 *slot = n;
1816 /* Generate a 0 terminated list of filter values. */
1817 for (; list ; list = TREE_CHAIN (list))
1819 if (targetm.arm_eabi_unwinder)
1820 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
1821 else
1823 /* Look up each type in the list and encode its filter
1824 value as a uleb128. */
1825 push_uleb128 (&crtl->eh.ehspec_data,
1826 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1829 if (targetm.arm_eabi_unwinder)
1830 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
1831 else
1832 VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
1835 return n->filter;
1838 /* Generate the action filter values to be used for CATCH and
1839 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1840 we use lots of landing pads, and so every type or list can share
1841 the same filter value, which saves table space. */
1843 static void
1844 assign_filter_values (void)
1846 int i;
1847 htab_t ttypes, ehspec;
1849 crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
1850 if (targetm.arm_eabi_unwinder)
1851 VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1852 else
1853 VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1855 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1856 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1858 for (i = cfun->eh->last_region_number; i > 0; --i)
1860 struct eh_region_d *r;
1862 r = VEC_index (eh_region, cfun->eh->region_array, i);
1864 /* Mind we don't process a region more than once. */
1865 if (!r || r->region_number != i)
1866 continue;
1868 switch (r->type)
1870 case ERT_CATCH:
1871 /* Whatever type_list is (NULL or true list), we build a list
1872 of filters for the region. */
1873 r->u.eh_catch.filter_list = NULL_TREE;
1875 if (r->u.eh_catch.type_list != NULL)
1877 /* Get a filter value for each of the types caught and store
1878 them in the region's dedicated list. */
1879 tree tp_node = r->u.eh_catch.type_list;
1881 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1883 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1884 tree flt_node = build_int_cst (NULL_TREE, flt);
1886 r->u.eh_catch.filter_list
1887 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1890 else
1892 /* Get a filter value for the NULL list also since it will need
1893 an action record anyway. */
1894 int flt = add_ttypes_entry (ttypes, NULL);
1895 tree flt_node = build_int_cst (NULL_TREE, flt);
1897 r->u.eh_catch.filter_list
1898 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1901 break;
1903 case ERT_ALLOWED_EXCEPTIONS:
1904 r->u.allowed.filter
1905 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1906 break;
1908 default:
1909 break;
1913 htab_delete (ttypes);
1914 htab_delete (ehspec);
1917 /* Emit SEQ into basic block just before INSN (that is assumed to be
1918 first instruction of some existing BB and return the newly
1919 produced block. */
1920 static basic_block
1921 emit_to_new_bb_before (rtx seq, rtx insn)
1923 rtx last;
1924 basic_block bb;
1925 edge e;
1926 edge_iterator ei;
1928 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1929 call), we don't want it to go into newly created landing pad or other EH
1930 construct. */
1931 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1932 if (e->flags & EDGE_FALLTHRU)
1933 force_nonfallthru (e);
1934 else
1935 ei_next (&ei);
1936 last = emit_insn_before (seq, insn);
1937 if (BARRIER_P (last))
1938 last = PREV_INSN (last);
1939 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1940 update_bb_for_insn (bb);
1941 bb->flags |= BB_SUPERBLOCK;
1942 return bb;
1945 /* Generate the code to actually handle exceptions, which will follow the
1946 landing pads. */
1948 static void
1949 build_post_landing_pads (void)
1951 int i;
1953 for (i = cfun->eh->last_region_number; i > 0; --i)
1955 struct eh_region_d *region;
1956 rtx seq;
1958 region = VEC_index (eh_region, cfun->eh->region_array, i);
1959 /* Mind we don't process a region more than once. */
1960 if (!region || region->region_number != i)
1961 continue;
1963 switch (region->type)
1965 case ERT_TRY:
1966 /* It is possible that TRY region is kept alive only because some of
1967 contained catch region still have RESX instruction but they are
1968 reached via their copies. In this case we need to do nothing. */
1969 if (!region->u.eh_try.eh_catch->label)
1970 break;
1972 /* ??? Collect the set of all non-overlapping catch handlers
1973 all the way up the chain until blocked by a cleanup. */
1974 /* ??? Outer try regions can share landing pads with inner
1975 try regions if the types are completely non-overlapping,
1976 and there are no intervening cleanups. */
1978 region->post_landing_pad = gen_label_rtx ();
1980 start_sequence ();
1982 emit_label (region->post_landing_pad);
1984 /* ??? It is mighty inconvenient to call back into the
1985 switch statement generation code in expand_end_case.
1986 Rapid prototyping sez a sequence of ifs. */
1988 struct eh_region_d *c;
1989 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
1991 if (c->u.eh_catch.type_list == NULL)
1992 emit_jump (c->label);
1993 else
1995 /* Need for one cmp/jump per type caught. Each type
1996 list entry has a matching entry in the filter list
1997 (see assign_filter_values). */
1998 tree tp_node = c->u.eh_catch.type_list;
1999 tree flt_node = c->u.eh_catch.filter_list;
2001 for (; tp_node; )
2003 emit_cmp_and_jump_insns
2004 (crtl->eh.filter,
2005 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
2006 EQ, NULL_RTX,
2007 targetm.eh_return_filter_mode (), 0, c->label);
2009 tp_node = TREE_CHAIN (tp_node);
2010 flt_node = TREE_CHAIN (flt_node);
2016 /* We delay the generation of the _Unwind_Resume until we generate
2017 landing pads. We emit a marker here so as to get good control
2018 flow data in the meantime. */
2019 gcc_assert (!region->resume);
2020 region->resume
2021 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
2022 emit_barrier ();
2024 seq = get_insns ();
2025 end_sequence ();
2027 emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
2029 break;
2031 case ERT_ALLOWED_EXCEPTIONS:
2032 if (!region->label)
2033 break;
2034 region->post_landing_pad = gen_label_rtx ();
2036 start_sequence ();
2038 emit_label (region->post_landing_pad);
2040 emit_cmp_and_jump_insns (crtl->eh.filter,
2041 GEN_INT (region->u.allowed.filter),
2042 EQ, NULL_RTX,
2043 targetm.eh_return_filter_mode (), 0, region->label);
2045 /* We delay the generation of the _Unwind_Resume until we generate
2046 landing pads. We emit a marker here so as to get good control
2047 flow data in the meantime. */
2048 gcc_assert (!region->resume);
2049 region->resume
2050 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
2051 emit_barrier ();
2053 seq = get_insns ();
2054 end_sequence ();
2056 emit_to_new_bb_before (seq, region->label);
2057 break;
2059 case ERT_CLEANUP:
2060 case ERT_MUST_NOT_THROW:
2061 region->post_landing_pad = region->label;
2062 break;
2064 case ERT_CATCH:
2065 case ERT_THROW:
2066 /* Nothing to do. */
2067 break;
2069 default:
2070 gcc_unreachable ();
2075 /* Replace RESX patterns with jumps to the next handler if any, or calls to
2076 _Unwind_Resume otherwise. */
2078 static void
2079 connect_post_landing_pads (void)
2081 int i;
2083 for (i = cfun->eh->last_region_number; i > 0; --i)
2085 struct eh_region_d *region;
2086 struct eh_region_d *outer;
2087 rtx seq;
2088 rtx barrier;
2089 rtx resume_list;
2091 region = VEC_index (eh_region, cfun->eh->region_array, i);
2092 /* Mind we don't process a region more than once. */
2093 if (!region || region->region_number != i)
2094 continue;
2096 /* If there is no RESX, or it has been deleted by flow, there's
2097 nothing to fix up. */
2098 if (! region->resume)
2099 continue;
2101 /* Search for another landing pad in this function. */
2102 for (outer = region->outer; outer ; outer = outer->outer)
2103 if (outer->post_landing_pad)
2104 break;
2106 for (resume_list = region->resume; resume_list;
2107 resume_list = (GET_CODE (resume_list) == INSN_LIST
2108 ? XEXP (resume_list, 1) : NULL_RTX))
2110 rtx resume = (GET_CODE (resume_list) == INSN_LIST
2111 ? XEXP (resume_list, 0) : resume_list);
2112 if (INSN_DELETED_P (resume))
2113 continue;
2114 start_sequence ();
2116 if (outer)
2118 edge e;
2119 basic_block src, dest;
2121 emit_jump (outer->post_landing_pad);
2122 src = BLOCK_FOR_INSN (resume);
2123 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
2124 while (EDGE_COUNT (src->succs) > 0)
2125 remove_edge (EDGE_SUCC (src, 0));
2126 e = make_edge (src, dest, 0);
2127 e->probability = REG_BR_PROB_BASE;
2128 e->count = src->count;
2130 else
2132 emit_library_call (unwind_resume_libfunc, LCT_THROW,
2133 VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
2135 /* What we just emitted was a throwing libcall, so it got a
2136 barrier automatically added after it. If the last insn in
2137 the libcall sequence isn't the barrier, it's because the
2138 target emits multiple insns for a call, and there are insns
2139 after the actual call insn (which are redundant and would be
2140 optimized away). The barrier is inserted exactly after the
2141 call insn, so let's go get that and delete the insns after
2142 it, because below we need the barrier to be the last insn in
2143 the sequence. */
2144 delete_insns_since (NEXT_INSN (last_call_insn ()));
2147 seq = get_insns ();
2148 end_sequence ();
2149 barrier = emit_insn_before (seq, resume);
2150 /* Avoid duplicate barrier. */
2151 gcc_assert (BARRIER_P (barrier));
2152 delete_insn (barrier);
2153 delete_insn (resume);
2156 /* ??? From tree-ssa we can wind up with catch regions whose
2157 label is not instantiated, but whose resx is present. Now
2158 that we've dealt with the resx, kill the region. */
2159 if (region->label == NULL && region->type == ERT_CLEANUP)
2160 remove_eh_handler (region);
2165 static void
2166 dw2_build_landing_pads (void)
2168 int i;
2170 for (i = cfun->eh->last_region_number; i > 0; --i)
2172 struct eh_region_d *region;
2173 rtx seq;
2174 basic_block bb;
2175 edge e;
2177 region = VEC_index (eh_region, cfun->eh->region_array, i);
2178 /* Mind we don't process a region more than once. */
2179 if (!region || region->region_number != i)
2180 continue;
2182 if (region->type != ERT_CLEANUP
2183 && region->type != ERT_TRY
2184 && region->type != ERT_ALLOWED_EXCEPTIONS)
2185 continue;
2187 if (!region->post_landing_pad)
2188 continue;
2190 start_sequence ();
2192 region->landing_pad = gen_label_rtx ();
2193 emit_label (region->landing_pad);
2195 #ifdef HAVE_exception_receiver
2196 if (HAVE_exception_receiver)
2197 emit_insn (gen_exception_receiver ());
2198 else
2199 #endif
2200 #ifdef HAVE_nonlocal_goto_receiver
2201 if (HAVE_nonlocal_goto_receiver)
2202 emit_insn (gen_nonlocal_goto_receiver ());
2203 else
2204 #endif
2205 { /* Nothing */ }
2207 emit_move_insn (crtl->eh.exc_ptr,
2208 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
2209 emit_move_insn (crtl->eh.filter,
2210 gen_rtx_REG (targetm.eh_return_filter_mode (),
2211 EH_RETURN_DATA_REGNO (1)));
2213 seq = get_insns ();
2214 end_sequence ();
2216 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
2217 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2218 e->count = bb->count;
2219 e->probability = REG_BR_PROB_BASE;
2224 struct sjlj_lp_info
2226 int directly_reachable;
2227 int action_index;
2228 int dispatch_index;
2229 int call_site_index;
2232 static bool
2233 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
2235 rtx insn;
2236 bool found_one = false;
2238 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2240 struct eh_region_d *region;
2241 enum reachable_code rc;
2242 tree type_thrown;
2243 rtx note;
2245 if (! INSN_P (insn))
2246 continue;
2248 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2249 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2250 continue;
2252 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2253 if (!region)
2254 continue;
2256 type_thrown = NULL_TREE;
2257 if (region->type == ERT_THROW)
2259 type_thrown = region->u.eh_throw.type;
2260 region = region->outer;
2263 /* Find the first containing region that might handle the exception.
2264 That's the landing pad to which we will transfer control. */
2265 rc = RNL_NOT_CAUGHT;
2266 for (; region; region = region->outer)
2268 rc = reachable_next_level (region, type_thrown, NULL, false);
2269 if (rc != RNL_NOT_CAUGHT)
2270 break;
2272 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
2274 lp_info[region->region_number].directly_reachable = 1;
2275 found_one = true;
2279 return found_one;
2282 static void
2283 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2285 htab_t ar_hash;
2286 int i, index;
2288 /* First task: build the action table. */
2290 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
2291 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2293 for (i = cfun->eh->last_region_number; i > 0; --i)
2294 if (lp_info[i].directly_reachable)
2296 struct eh_region_d *r =
2297 VEC_index (eh_region, cfun->eh->region_array, i);
2299 r->landing_pad = dispatch_label;
2300 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2301 if (lp_info[i].action_index != -1)
2302 crtl->uses_eh_lsda = 1;
2305 htab_delete (ar_hash);
2307 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2308 landing pad label for the region. For sjlj though, there is one
2309 common landing pad from which we dispatch to the post-landing pads.
2311 A region receives a dispatch index if it is directly reachable
2312 and requires in-function processing. Regions that share post-landing
2313 pads may share dispatch indices. */
2314 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2315 (see build_post_landing_pads) so we don't bother checking for it. */
2317 index = 0;
2318 for (i = cfun->eh->last_region_number; i > 0; --i)
2319 if (lp_info[i].directly_reachable)
2320 lp_info[i].dispatch_index = index++;
2322 /* Finally: assign call-site values. If dwarf2 terms, this would be
2323 the region number assigned by convert_to_eh_region_ranges, but
2324 handles no-action and must-not-throw differently. */
2326 call_site_base = 1;
2327 for (i = cfun->eh->last_region_number; i > 0; --i)
2328 if (lp_info[i].directly_reachable)
2330 int action = lp_info[i].action_index;
2332 /* Map must-not-throw to otherwise unused call-site index 0. */
2333 if (action == -2)
2334 index = 0;
2335 /* Map no-action to otherwise unused call-site index -1. */
2336 else if (action == -1)
2337 index = -1;
2338 /* Otherwise, look it up in the table. */
2339 else
2340 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2342 lp_info[i].call_site_index = index;
2346 static void
2347 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
2349 int last_call_site = -2;
2350 rtx insn, mem;
2352 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2354 struct eh_region_d *region;
2355 int this_call_site;
2356 rtx note, before, p;
2358 /* Reset value tracking at extended basic block boundaries. */
2359 if (LABEL_P (insn))
2360 last_call_site = -2;
2362 if (! INSN_P (insn))
2363 continue;
2365 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2367 /* Calls that are known to not throw need not be marked. */
2368 if (note && INTVAL (XEXP (note, 0)) <= 0)
2369 continue;
2371 if (note)
2372 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2373 else
2374 region = NULL;
2376 if (!region)
2378 /* Calls (and trapping insns) without notes are outside any
2379 exception handling region in this function. Mark them as
2380 no action. */
2381 if (CALL_P (insn)
2382 || (flag_non_call_exceptions
2383 && may_trap_p (PATTERN (insn))))
2384 this_call_site = -1;
2385 else
2386 continue;
2388 else
2389 this_call_site = lp_info[region->region_number].call_site_index;
2391 if (this_call_site == last_call_site)
2392 continue;
2394 /* Don't separate a call from it's argument loads. */
2395 before = insn;
2396 if (CALL_P (insn))
2397 before = find_first_parameter_load (insn, NULL_RTX);
2399 start_sequence ();
2400 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
2401 sjlj_fc_call_site_ofs);
2402 emit_move_insn (mem, GEN_INT (this_call_site));
2403 p = get_insns ();
2404 end_sequence ();
2406 emit_insn_before (p, before);
2407 last_call_site = this_call_site;
2411 /* Construct the SjLj_Function_Context. */
2413 static void
2414 sjlj_emit_function_enter (rtx dispatch_label)
2416 rtx fn_begin, fc, mem, seq;
2417 bool fn_begin_outside_block;
2419 fc = crtl->eh.sjlj_fc;
2421 start_sequence ();
2423 /* We're storing this libcall's address into memory instead of
2424 calling it directly. Thus, we must call assemble_external_libcall
2425 here, as we can not depend on emit_library_call to do it for us. */
2426 assemble_external_libcall (eh_personality_libfunc);
2427 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2428 emit_move_insn (mem, eh_personality_libfunc);
2430 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2431 if (crtl->uses_eh_lsda)
2433 char buf[20];
2434 rtx sym;
2436 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2437 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2438 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
2439 emit_move_insn (mem, sym);
2441 else
2442 emit_move_insn (mem, const0_rtx);
2444 #ifdef DONT_USE_BUILTIN_SETJMP
2446 rtx x;
2447 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2448 TYPE_MODE (integer_type_node), 1,
2449 plus_constant (XEXP (fc, 0),
2450 sjlj_fc_jbuf_ofs), Pmode);
2452 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2453 TYPE_MODE (integer_type_node), 0, dispatch_label);
2454 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
2456 #else
2457 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2458 dispatch_label);
2459 #endif
2461 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2462 1, XEXP (fc, 0), Pmode);
2464 seq = get_insns ();
2465 end_sequence ();
2467 /* ??? Instead of doing this at the beginning of the function,
2468 do this in a block that is at loop level 0 and dominates all
2469 can_throw_internal instructions. */
2471 fn_begin_outside_block = true;
2472 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2473 if (NOTE_P (fn_begin))
2475 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2476 break;
2477 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
2478 fn_begin_outside_block = false;
2481 if (fn_begin_outside_block)
2482 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
2483 else
2484 emit_insn_after (seq, fn_begin);
2487 /* Call back from expand_function_end to know where we should put
2488 the call to unwind_sjlj_unregister_libfunc if needed. */
2490 void
2491 sjlj_emit_function_exit_after (rtx after)
2493 crtl->eh.sjlj_exit_after = after;
2496 static void
2497 sjlj_emit_function_exit (void)
2499 rtx seq, insn;
2501 start_sequence ();
2503 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2504 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
2506 seq = get_insns ();
2507 end_sequence ();
2509 /* ??? Really this can be done in any block at loop level 0 that
2510 post-dominates all can_throw_internal instructions. This is
2511 the last possible moment. */
2513 insn = crtl->eh.sjlj_exit_after;
2514 if (LABEL_P (insn))
2515 insn = NEXT_INSN (insn);
2517 emit_insn_after (seq, insn);
2520 static void
2521 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2523 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
2524 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
2525 int i, first_reachable;
2526 rtx mem, dispatch, seq, fc;
2527 rtx before;
2528 basic_block bb;
2529 edge e;
2531 fc = crtl->eh.sjlj_fc;
2533 start_sequence ();
2535 emit_label (dispatch_label);
2537 #ifndef DONT_USE_BUILTIN_SETJMP
2538 expand_builtin_setjmp_receiver (dispatch_label);
2539 #endif
2541 /* Load up dispatch index, exc_ptr and filter values from the
2542 function context. */
2543 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2544 sjlj_fc_call_site_ofs);
2545 dispatch = copy_to_reg (mem);
2547 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
2548 if (unwind_word_mode != ptr_mode)
2550 #ifdef POINTERS_EXTEND_UNSIGNED
2551 mem = convert_memory_address (ptr_mode, mem);
2552 #else
2553 mem = convert_to_mode (ptr_mode, mem, 0);
2554 #endif
2556 emit_move_insn (crtl->eh.exc_ptr, mem);
2558 mem = adjust_address (fc, unwind_word_mode,
2559 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
2560 if (unwind_word_mode != filter_mode)
2561 mem = convert_to_mode (filter_mode, mem, 0);
2562 emit_move_insn (crtl->eh.filter, mem);
2564 /* Jump to one of the directly reachable regions. */
2565 /* ??? This really ought to be using a switch statement. */
2567 first_reachable = 0;
2568 for (i = cfun->eh->last_region_number; i > 0; --i)
2570 if (! lp_info[i].directly_reachable)
2571 continue;
2573 if (! first_reachable)
2575 first_reachable = i;
2576 continue;
2579 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2580 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2581 (((struct eh_region_d *)
2582 VEC_index (eh_region,
2583 cfun->eh->region_array, i))
2584 ->post_landing_pad));
2587 seq = get_insns ();
2588 end_sequence ();
2590 before = (((struct eh_region_d *)
2591 VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2592 ->post_landing_pad);
2594 bb = emit_to_new_bb_before (seq, before);
2595 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2596 e->count = bb->count;
2597 e->probability = REG_BR_PROB_BASE;
2600 static void
2601 sjlj_build_landing_pads (void)
2603 struct sjlj_lp_info *lp_info;
2605 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2607 if (sjlj_find_directly_reachable_regions (lp_info))
2609 rtx dispatch_label = gen_label_rtx ();
2610 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
2611 TYPE_MODE (sjlj_fc_type_node),
2612 TYPE_ALIGN (sjlj_fc_type_node));
2613 crtl->eh.sjlj_fc
2614 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2615 int_size_in_bytes (sjlj_fc_type_node),
2616 align);
2618 sjlj_assign_call_site_values (dispatch_label, lp_info);
2619 sjlj_mark_call_sites (lp_info);
2621 sjlj_emit_function_enter (dispatch_label);
2622 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2623 sjlj_emit_function_exit ();
2626 free (lp_info);
2629 /* After initial rtl generation, call back to finish generating
2630 exception support code. */
2632 static void
2633 finish_eh_generation (void)
2635 basic_block bb;
2637 /* Nothing to do if no regions created. */
2638 if (cfun->eh->region_tree == NULL)
2639 return;
2641 /* The object here is to provide detailed information (via
2642 reachable_handlers) on how exception control flows within the
2643 function for the CFG construction. In this first pass, we can
2644 include type information garnered from ERT_THROW and
2645 ERT_ALLOWED_EXCEPTIONS regions, and hope that it will be useful
2646 in deleting unreachable handlers. Subsequently, we will generate
2647 landing pads which will connect many of the handlers, and then
2648 type information will not be effective. Still, this is a win
2649 over previous implementations. */
2651 /* These registers are used by the landing pads. Make sure they
2652 have been generated. */
2653 get_exception_pointer ();
2654 get_exception_filter ();
2656 /* Construct the landing pads. */
2658 assign_filter_values ();
2659 build_post_landing_pads ();
2660 connect_post_landing_pads ();
2661 if (USING_SJLJ_EXCEPTIONS)
2662 sjlj_build_landing_pads ();
2663 else
2664 dw2_build_landing_pads ();
2666 crtl->eh.built_landing_pads = 1;
2668 /* We've totally changed the CFG. Start over. */
2669 find_exception_handler_labels ();
2670 break_superblocks ();
2671 if (USING_SJLJ_EXCEPTIONS
2672 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2673 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
2674 commit_edge_insertions ();
2675 FOR_EACH_BB (bb)
2677 edge e;
2678 edge_iterator ei;
2679 bool eh = false;
2680 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2682 if (e->flags & EDGE_EH)
2684 remove_edge (e);
2685 eh = true;
2687 else
2688 ei_next (&ei);
2690 if (eh)
2691 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2695 /* This section handles removing dead code for flow. */
2697 /* Splice REGION from the region tree and replace it by REPLACE etc.
2698 When UPDATE_CATCH_TRY is true mind updating links from catch to try
2699 region.*/
2701 static void
2702 remove_eh_handler_and_replace (struct eh_region_d *region,
2703 struct eh_region_d *replace,
2704 bool update_catch_try)
2706 struct eh_region_d **pp, **pp_start, *p, *outer, *inner;
2707 rtx lab;
2709 outer = region->outer;
2711 /* For the benefit of efficiently handling REG_EH_REGION notes,
2712 replace this region in the region array with its containing
2713 region. Note that previous region deletions may result in
2714 multiple copies of this region in the array, so we have a
2715 list of alternate numbers by which we are known. */
2717 VEC_replace (eh_region, cfun->eh->region_array, region->region_number,
2718 replace);
2719 if (region->aka)
2721 unsigned i;
2722 bitmap_iterator bi;
2724 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2726 VEC_replace (eh_region, cfun->eh->region_array, i, replace);
2730 if (replace)
2732 if (!replace->aka)
2733 replace->aka = BITMAP_GGC_ALLOC ();
2734 if (region->aka)
2735 bitmap_ior_into (replace->aka, region->aka);
2736 bitmap_set_bit (replace->aka, region->region_number);
2739 if (crtl->eh.built_landing_pads)
2740 lab = region->landing_pad;
2741 else
2742 lab = region->label;
2743 if (outer)
2744 pp_start = &outer->inner;
2745 else
2746 pp_start = &cfun->eh->region_tree;
2747 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2748 continue;
2749 *pp = region->next_peer;
2751 if (replace)
2752 pp_start = &replace->inner;
2753 else
2754 pp_start = &cfun->eh->region_tree;
2755 inner = region->inner;
2756 if (inner)
2758 for (p = inner; p->next_peer ; p = p->next_peer)
2759 p->outer = replace;
2760 p->outer = replace;
2762 p->next_peer = *pp_start;
2763 *pp_start = inner;
2766 if (region->type == ERT_CATCH
2767 && update_catch_try)
2769 struct eh_region_d *eh_try, *next, *prev;
2771 for (eh_try = region->next_peer;
2772 eh_try->type == ERT_CATCH;
2773 eh_try = eh_try->next_peer)
2774 continue;
2775 gcc_assert (eh_try->type == ERT_TRY);
2777 next = region->u.eh_catch.next_catch;
2778 prev = region->u.eh_catch.prev_catch;
2780 if (next)
2781 next->u.eh_catch.prev_catch = prev;
2782 else
2783 eh_try->u.eh_try.last_catch = prev;
2784 if (prev)
2785 prev->u.eh_catch.next_catch = next;
2786 else
2788 eh_try->u.eh_try.eh_catch = next;
2789 if (! next)
2790 remove_eh_handler (eh_try);
2795 /* Splice REGION from the region tree and replace it by the outer region
2796 etc. */
2798 static void
2799 remove_eh_handler (struct eh_region_d *region)
2801 remove_eh_handler_and_replace (region, region->outer, true);
2804 /* Remove Eh region R that has turned out to have no code in its handler. */
2806 void
2807 remove_eh_region (int r)
2809 struct eh_region_d *region;
2811 region = VEC_index (eh_region, cfun->eh->region_array, r);
2812 remove_eh_handler (region);
2815 /* Remove Eh region R that has turned out to have no code in its handler
2816 and replace in by R2. */
2818 void
2819 remove_eh_region_and_replace_by_outer_of (int r, int r2)
2821 struct eh_region_d *region, *region2;
2823 region = VEC_index (eh_region, cfun->eh->region_array, r);
2824 region2 = VEC_index (eh_region, cfun->eh->region_array, r2);
2825 remove_eh_handler_and_replace (region, region2->outer, true);
2828 /* Invokes CALLBACK for every exception handler label. Only used by old
2829 loop hackery; should not be used by new code. */
2831 void
2832 for_each_eh_label (void (*callback) (rtx))
2834 int i;
2835 for (i = 0; i < cfun->eh->last_region_number; i++)
2837 struct eh_region_d *r = VEC_index (eh_region, cfun->eh->region_array, i);
2838 if (r && r->region_number == i && r->label
2839 && LABEL_P (r->label))
2840 (*callback) (r->label);
2844 /* Invoke CALLBACK for every exception region in the current function. */
2846 void
2847 for_each_eh_region (void (*callback) (struct eh_region_d *))
2849 int i, n = cfun->eh->last_region_number;
2850 for (i = 1; i <= n; ++i)
2852 struct eh_region_d *region;
2854 region = VEC_index (eh_region, cfun->eh->region_array, i);
2855 if (region)
2856 (*callback) (region);
2860 /* This section describes CFG exception edges for flow. */
2862 /* For communicating between calls to reachable_next_level. */
2863 struct reachable_info
2865 tree types_caught;
2866 tree types_allowed;
2867 void (*callback) (struct eh_region_d *, void *);
2868 void *callback_data;
2871 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2872 base class of TYPE, is in HANDLED. */
2874 static int
2875 check_handled (tree handled, tree type)
2877 tree t;
2879 /* We can check for exact matches without front-end help. */
2880 if (! lang_eh_type_covers)
2882 for (t = handled; t ; t = TREE_CHAIN (t))
2883 if (TREE_VALUE (t) == type)
2884 return 1;
2886 else
2888 for (t = handled; t ; t = TREE_CHAIN (t))
2889 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2890 return 1;
2893 return 0;
2896 /* A subroutine of reachable_next_level. If we are collecting a list
2897 of handlers, add one. After landing pad generation, reference
2898 it instead of the handlers themselves. Further, the handlers are
2899 all wired together, so by referencing one, we've got them all.
2900 Before landing pad generation we reference each handler individually.
2902 LP_REGION contains the landing pad; REGION is the handler. */
2904 static void
2905 add_reachable_handler (struct reachable_info *info,
2906 struct eh_region_d *lp_region,
2907 struct eh_region_d *region)
2909 if (! info)
2910 return;
2912 if (crtl->eh.built_landing_pads)
2913 info->callback (lp_region, info->callback_data);
2914 else
2915 info->callback (region, info->callback_data);
2918 /* Process one level of exception regions for reachability.
2919 If TYPE_THROWN is non-null, then it is the *exact* type being
2920 propagated. If INFO is non-null, then collect handler labels
2921 and caught/allowed type information between invocations. */
2923 static enum reachable_code
2924 reachable_next_level (struct eh_region_d *region, tree type_thrown,
2925 struct reachable_info *info,
2926 bool maybe_resx)
2928 switch (region->type)
2930 case ERT_CLEANUP:
2931 /* Before landing-pad generation, we model control flow
2932 directly to the individual handlers. In this way we can
2933 see that catch handler types may shadow one another. */
2934 add_reachable_handler (info, region, region);
2935 return RNL_MAYBE_CAUGHT;
2937 case ERT_TRY:
2939 struct eh_region_d *c;
2940 enum reachable_code ret = RNL_NOT_CAUGHT;
2942 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
2944 /* A catch-all handler ends the search. */
2945 if (c->u.eh_catch.type_list == NULL)
2947 add_reachable_handler (info, region, c);
2948 return RNL_CAUGHT;
2951 if (type_thrown)
2953 /* If we have at least one type match, end the search. */
2954 tree tp_node = c->u.eh_catch.type_list;
2956 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2958 tree type = TREE_VALUE (tp_node);
2960 if (type == type_thrown
2961 || (lang_eh_type_covers
2962 && (*lang_eh_type_covers) (type, type_thrown)))
2964 add_reachable_handler (info, region, c);
2965 return RNL_CAUGHT;
2969 /* If we have definitive information of a match failure,
2970 the catch won't trigger. */
2971 if (lang_eh_type_covers)
2972 return RNL_NOT_CAUGHT;
2975 /* At this point, we either don't know what type is thrown or
2976 don't have front-end assistance to help deciding if it is
2977 covered by one of the types in the list for this region.
2979 We'd then like to add this region to the list of reachable
2980 handlers since it is indeed potentially reachable based on the
2981 information we have.
2983 Actually, this handler is for sure not reachable if all the
2984 types it matches have already been caught. That is, it is only
2985 potentially reachable if at least one of the types it catches
2986 has not been previously caught. */
2988 if (! info)
2989 ret = RNL_MAYBE_CAUGHT;
2990 else
2992 tree tp_node = c->u.eh_catch.type_list;
2993 bool maybe_reachable = false;
2995 /* Compute the potential reachability of this handler and
2996 update the list of types caught at the same time. */
2997 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2999 tree type = TREE_VALUE (tp_node);
3001 if (! check_handled (info->types_caught, type))
3003 info->types_caught
3004 = tree_cons (NULL, type, info->types_caught);
3006 maybe_reachable = true;
3010 if (maybe_reachable)
3012 add_reachable_handler (info, region, c);
3014 /* ??? If the catch type is a base class of every allowed
3015 type, then we know we can stop the search. */
3016 ret = RNL_MAYBE_CAUGHT;
3021 return ret;
3024 case ERT_ALLOWED_EXCEPTIONS:
3025 /* An empty list of types definitely ends the search. */
3026 if (region->u.allowed.type_list == NULL_TREE)
3028 add_reachable_handler (info, region, region);
3029 return RNL_CAUGHT;
3032 /* Collect a list of lists of allowed types for use in detecting
3033 when a catch may be transformed into a catch-all. */
3034 if (info)
3035 info->types_allowed = tree_cons (NULL_TREE,
3036 region->u.allowed.type_list,
3037 info->types_allowed);
3039 /* If we have definitive information about the type hierarchy,
3040 then we can tell if the thrown type will pass through the
3041 filter. */
3042 if (type_thrown && lang_eh_type_covers)
3044 if (check_handled (region->u.allowed.type_list, type_thrown))
3045 return RNL_NOT_CAUGHT;
3046 else
3048 add_reachable_handler (info, region, region);
3049 return RNL_CAUGHT;
3053 add_reachable_handler (info, region, region);
3054 return RNL_MAYBE_CAUGHT;
3056 case ERT_CATCH:
3057 /* Catch regions are handled by their controlling try region. */
3058 return RNL_NOT_CAUGHT;
3060 case ERT_MUST_NOT_THROW:
3061 /* Here we end our search, since no exceptions may propagate.
3063 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
3064 only via locally handled RESX instructions.
3066 When we inline a function call, we can bring in new handlers. In order
3067 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
3068 assume that such handlers exists prior for any inlinable call prior
3069 inlining decisions are fixed. */
3071 if (maybe_resx)
3073 add_reachable_handler (info, region, region);
3074 return RNL_CAUGHT;
3076 else
3077 return RNL_BLOCKED;
3079 case ERT_THROW:
3080 case ERT_UNKNOWN:
3081 /* Shouldn't see these here. */
3082 gcc_unreachable ();
3083 break;
3084 default:
3085 gcc_unreachable ();
3089 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
3091 void
3092 foreach_reachable_handler (int region_number, bool is_resx, bool inlinable_call,
3093 void (*callback) (struct eh_region_d *, void *),
3094 void *callback_data)
3096 struct reachable_info info;
3097 struct eh_region_d *region;
3098 tree type_thrown;
3100 memset (&info, 0, sizeof (info));
3101 info.callback = callback;
3102 info.callback_data = callback_data;
3104 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3105 if (!region)
3106 return;
3108 type_thrown = NULL_TREE;
3109 if (is_resx)
3111 /* A RESX leaves a region instead of entering it. Thus the
3112 region itself may have been deleted out from under us. */
3113 if (region == NULL)
3114 return;
3115 region = region->outer;
3117 else if (region->type == ERT_THROW)
3119 type_thrown = region->u.eh_throw.type;
3120 region = region->outer;
3123 while (region)
3125 if (reachable_next_level (region, type_thrown, &info,
3126 inlinable_call || is_resx) >= RNL_CAUGHT)
3127 break;
3128 /* If we have processed one cleanup, there is no point in
3129 processing any more of them. Each cleanup will have an edge
3130 to the next outer cleanup region, so the flow graph will be
3131 accurate. */
3132 if (region->type == ERT_CLEANUP)
3134 enum reachable_code code = RNL_NOT_CAUGHT;
3135 region = find_prev_try (region->outer);
3136 /* Continue looking for outer TRY region until we find one
3137 that might cath something. */
3138 while (region
3139 && (code = reachable_next_level (region, type_thrown, &info,
3140 inlinable_call || is_resx))
3141 == RNL_NOT_CAUGHT)
3142 region = find_prev_try (region->outer);
3143 if (code >= RNL_CAUGHT)
3144 break;
3146 if (region)
3147 region = region->outer;
3151 /* Retrieve a list of labels of exception handlers which can be
3152 reached by a given insn. */
3154 static void
3155 arh_to_landing_pad (struct eh_region_d *region, void *data)
3157 rtx *p_handlers = (rtx *) data;
3158 if (! *p_handlers)
3159 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
3162 static void
3163 arh_to_label (struct eh_region_d *region, void *data)
3165 rtx *p_handlers = (rtx *) data;
3166 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
3170 reachable_handlers (rtx insn)
3172 bool is_resx = false;
3173 rtx handlers = NULL;
3174 int region_number;
3176 if (JUMP_P (insn)
3177 && GET_CODE (PATTERN (insn)) == RESX)
3179 region_number = XINT (PATTERN (insn), 0);
3180 is_resx = true;
3182 else
3184 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3185 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3186 return NULL;
3187 region_number = INTVAL (XEXP (note, 0));
3190 foreach_reachable_handler (region_number, is_resx, false,
3191 (crtl->eh.built_landing_pads
3192 ? arh_to_landing_pad
3193 : arh_to_label),
3194 &handlers);
3196 return handlers;
3199 /* Determine if the given INSN can throw an exception that is caught
3200 within the function. */
3202 bool
3203 can_throw_internal_1 (int region_number, bool is_resx, bool inlinable_call)
3205 struct eh_region_d *region;
3206 tree type_thrown;
3208 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3209 if (!region)
3210 return false;
3212 type_thrown = NULL_TREE;
3213 if (is_resx)
3214 region = region->outer;
3215 else if (region->type == ERT_THROW)
3217 type_thrown = region->u.eh_throw.type;
3218 region = region->outer;
3221 /* If this exception is ignored by each and every containing region,
3222 then control passes straight out. The runtime may handle some
3223 regions, which also do not require processing internally. */
3224 for (; region; region = region->outer)
3226 enum reachable_code how = reachable_next_level (region, type_thrown, 0,
3227 inlinable_call || is_resx);
3228 if (how == RNL_BLOCKED)
3229 return false;
3230 if (how != RNL_NOT_CAUGHT)
3231 return true;
3234 return false;
3237 bool
3238 can_throw_internal (const_rtx insn)
3240 rtx note;
3242 if (! INSN_P (insn))
3243 return false;
3245 if (JUMP_P (insn)
3246 && GET_CODE (PATTERN (insn)) == RESX
3247 && XINT (PATTERN (insn), 0) > 0)
3248 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true, false);
3250 if (NONJUMP_INSN_P (insn)
3251 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3252 insn = XVECEXP (PATTERN (insn), 0, 0);
3254 /* Every insn that might throw has an EH_REGION note. */
3255 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3256 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3257 return false;
3259 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false, false);
3262 /* Determine if the given INSN can throw an exception that is
3263 visible outside the function. */
3265 bool
3266 can_throw_external_1 (int region_number, bool is_resx, bool inlinable_call)
3268 struct eh_region_d *region;
3269 tree type_thrown;
3271 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3272 if (!region)
3273 return true;
3275 type_thrown = NULL_TREE;
3276 if (is_resx)
3277 region = region->outer;
3278 else if (region->type == ERT_THROW)
3280 type_thrown = region->u.eh_throw.type;
3281 region = region->outer;
3284 /* If the exception is caught or blocked by any containing region,
3285 then it is not seen by any calling function. */
3286 for (; region ; region = region->outer)
3287 if (reachable_next_level (region, type_thrown, NULL,
3288 inlinable_call || is_resx) >= RNL_CAUGHT)
3289 return false;
3291 return true;
3294 bool
3295 can_throw_external (const_rtx insn)
3297 rtx note;
3299 if (! INSN_P (insn))
3300 return false;
3302 if (JUMP_P (insn)
3303 && GET_CODE (PATTERN (insn)) == RESX
3304 && XINT (PATTERN (insn), 0) > 0)
3305 return can_throw_external_1 (XINT (PATTERN (insn), 0), true, false);
3307 if (NONJUMP_INSN_P (insn)
3308 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3310 rtx seq = PATTERN (insn);
3311 int i, n = XVECLEN (seq, 0);
3313 for (i = 0; i < n; i++)
3314 if (can_throw_external (XVECEXP (seq, 0, i)))
3315 return true;
3317 return false;
3320 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3321 if (!note)
3323 /* Calls (and trapping insns) without notes are outside any
3324 exception handling region in this function. We have to
3325 assume it might throw. Given that the front end and middle
3326 ends mark known NOTHROW functions, this isn't so wildly
3327 inaccurate. */
3328 return (CALL_P (insn)
3329 || (flag_non_call_exceptions
3330 && may_trap_p (PATTERN (insn))));
3332 if (INTVAL (XEXP (note, 0)) <= 0)
3333 return false;
3335 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false, false);
3338 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
3340 unsigned int
3341 set_nothrow_function_flags (void)
3343 rtx insn;
3345 crtl->nothrow = 1;
3347 /* Assume crtl->all_throwers_are_sibcalls until we encounter
3348 something that can throw an exception. We specifically exempt
3349 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
3350 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
3351 is optimistic. */
3353 crtl->all_throwers_are_sibcalls = 1;
3355 /* If we don't know that this implementation of the function will
3356 actually be used, then we must not set TREE_NOTHROW, since
3357 callers must not assume that this function does not throw. */
3358 if (TREE_NOTHROW (current_function_decl))
3359 return 0;
3361 if (! flag_exceptions)
3362 return 0;
3364 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3365 if (can_throw_external (insn))
3367 crtl->nothrow = 0;
3369 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3371 crtl->all_throwers_are_sibcalls = 0;
3372 return 0;
3376 for (insn = crtl->epilogue_delay_list; insn;
3377 insn = XEXP (insn, 1))
3378 if (can_throw_external (insn))
3380 crtl->nothrow = 0;
3382 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3384 crtl->all_throwers_are_sibcalls = 0;
3385 return 0;
3388 if (crtl->nothrow
3389 && (cgraph_function_body_availability (cgraph_node
3390 (current_function_decl))
3391 >= AVAIL_AVAILABLE))
3393 struct cgraph_node *node = cgraph_node (current_function_decl);
3394 struct cgraph_edge *e;
3395 for (e = node->callers; e; e = e->next_caller)
3396 e->can_throw_external = false;
3397 TREE_NOTHROW (current_function_decl) = 1;
3399 if (dump_file)
3400 fprintf (dump_file, "Marking function nothrow: %s\n\n",
3401 current_function_name ());
3403 return 0;
3406 struct rtl_opt_pass pass_set_nothrow_function_flags =
3409 RTL_PASS,
3410 "nothrow", /* name */
3411 NULL, /* gate */
3412 set_nothrow_function_flags, /* execute */
3413 NULL, /* sub */
3414 NULL, /* next */
3415 0, /* static_pass_number */
3416 TV_NONE, /* tv_id */
3417 0, /* properties_required */
3418 0, /* properties_provided */
3419 0, /* properties_destroyed */
3420 0, /* todo_flags_start */
3421 TODO_dump_func, /* todo_flags_finish */
3426 /* Various hooks for unwind library. */
3428 /* Do any necessary initialization to access arbitrary stack frames.
3429 On the SPARC, this means flushing the register windows. */
3431 void
3432 expand_builtin_unwind_init (void)
3434 /* Set this so all the registers get saved in our frame; we need to be
3435 able to copy the saved values for any registers from frames we unwind. */
3436 crtl->saves_all_registers = 1;
3438 #ifdef SETUP_FRAME_ADDRESSES
3439 SETUP_FRAME_ADDRESSES ();
3440 #endif
3444 expand_builtin_eh_return_data_regno (tree exp)
3446 tree which = CALL_EXPR_ARG (exp, 0);
3447 unsigned HOST_WIDE_INT iwhich;
3449 if (TREE_CODE (which) != INTEGER_CST)
3451 error ("argument of %<__builtin_eh_return_regno%> must be constant");
3452 return constm1_rtx;
3455 iwhich = tree_low_cst (which, 1);
3456 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3457 if (iwhich == INVALID_REGNUM)
3458 return constm1_rtx;
3460 #ifdef DWARF_FRAME_REGNUM
3461 iwhich = DWARF_FRAME_REGNUM (iwhich);
3462 #else
3463 iwhich = DBX_REGISTER_NUMBER (iwhich);
3464 #endif
3466 return GEN_INT (iwhich);
3469 /* Given a value extracted from the return address register or stack slot,
3470 return the actual address encoded in that value. */
3473 expand_builtin_extract_return_addr (tree addr_tree)
3475 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3477 if (GET_MODE (addr) != Pmode
3478 && GET_MODE (addr) != VOIDmode)
3480 #ifdef POINTERS_EXTEND_UNSIGNED
3481 addr = convert_memory_address (Pmode, addr);
3482 #else
3483 addr = convert_to_mode (Pmode, addr, 0);
3484 #endif
3487 /* First mask out any unwanted bits. */
3488 #ifdef MASK_RETURN_ADDR
3489 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3490 #endif
3492 /* Then adjust to find the real return address. */
3493 #if defined (RETURN_ADDR_OFFSET)
3494 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3495 #endif
3497 return addr;
3500 /* Given an actual address in addr_tree, do any necessary encoding
3501 and return the value to be stored in the return address register or
3502 stack slot so the epilogue will return to that address. */
3505 expand_builtin_frob_return_addr (tree addr_tree)
3507 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3509 addr = convert_memory_address (Pmode, addr);
3511 #ifdef RETURN_ADDR_OFFSET
3512 addr = force_reg (Pmode, addr);
3513 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3514 #endif
3516 return addr;
3519 /* Set up the epilogue with the magic bits we'll need to return to the
3520 exception handler. */
3522 void
3523 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3524 tree handler_tree)
3526 rtx tmp;
3528 #ifdef EH_RETURN_STACKADJ_RTX
3529 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
3530 VOIDmode, EXPAND_NORMAL);
3531 tmp = convert_memory_address (Pmode, tmp);
3532 if (!crtl->eh.ehr_stackadj)
3533 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
3534 else if (tmp != crtl->eh.ehr_stackadj)
3535 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
3536 #endif
3538 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
3539 VOIDmode, EXPAND_NORMAL);
3540 tmp = convert_memory_address (Pmode, tmp);
3541 if (!crtl->eh.ehr_handler)
3542 crtl->eh.ehr_handler = copy_to_reg (tmp);
3543 else if (tmp != crtl->eh.ehr_handler)
3544 emit_move_insn (crtl->eh.ehr_handler, tmp);
3546 if (!crtl->eh.ehr_label)
3547 crtl->eh.ehr_label = gen_label_rtx ();
3548 emit_jump (crtl->eh.ehr_label);
3551 void
3552 expand_eh_return (void)
3554 rtx around_label;
3556 if (! crtl->eh.ehr_label)
3557 return;
3559 crtl->calls_eh_return = 1;
3561 #ifdef EH_RETURN_STACKADJ_RTX
3562 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3563 #endif
3565 around_label = gen_label_rtx ();
3566 emit_jump (around_label);
3568 emit_label (crtl->eh.ehr_label);
3569 clobber_return_register ();
3571 #ifdef EH_RETURN_STACKADJ_RTX
3572 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
3573 #endif
3575 #ifdef HAVE_eh_return
3576 if (HAVE_eh_return)
3577 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
3578 else
3579 #endif
3581 #ifdef EH_RETURN_HANDLER_RTX
3582 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
3583 #else
3584 error ("__builtin_eh_return not supported on this target");
3585 #endif
3588 emit_label (around_label);
3591 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3592 POINTERS_EXTEND_UNSIGNED and return it. */
3595 expand_builtin_extend_pointer (tree addr_tree)
3597 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3598 int extend;
3600 #ifdef POINTERS_EXTEND_UNSIGNED
3601 extend = POINTERS_EXTEND_UNSIGNED;
3602 #else
3603 /* The previous EH code did an unsigned extend by default, so we do this also
3604 for consistency. */
3605 extend = 1;
3606 #endif
3608 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
3611 /* In the following functions, we represent entries in the action table
3612 as 1-based indices. Special cases are:
3614 0: null action record, non-null landing pad; implies cleanups
3615 -1: null action record, null landing pad; implies no action
3616 -2: no call-site entry; implies must_not_throw
3617 -3: we have yet to process outer regions
3619 Further, no special cases apply to the "next" field of the record.
3620 For next, 0 means end of list. */
3622 struct action_record
3624 int offset;
3625 int filter;
3626 int next;
3629 static int
3630 action_record_eq (const void *pentry, const void *pdata)
3632 const struct action_record *entry = (const struct action_record *) pentry;
3633 const struct action_record *data = (const struct action_record *) pdata;
3634 return entry->filter == data->filter && entry->next == data->next;
3637 static hashval_t
3638 action_record_hash (const void *pentry)
3640 const struct action_record *entry = (const struct action_record *) pentry;
3641 return entry->next * 1009 + entry->filter;
3644 static int
3645 add_action_record (htab_t ar_hash, int filter, int next)
3647 struct action_record **slot, *new_ar, tmp;
3649 tmp.filter = filter;
3650 tmp.next = next;
3651 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3653 if ((new_ar = *slot) == NULL)
3655 new_ar = XNEW (struct action_record);
3656 new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3657 new_ar->filter = filter;
3658 new_ar->next = next;
3659 *slot = new_ar;
3661 /* The filter value goes in untouched. The link to the next
3662 record is a "self-relative" byte offset, or zero to indicate
3663 that there is no next record. So convert the absolute 1 based
3664 indices we've been carrying around into a displacement. */
3666 push_sleb128 (&crtl->eh.action_record_data, filter);
3667 if (next)
3668 next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3669 push_sleb128 (&crtl->eh.action_record_data, next);
3672 return new_ar->offset;
3675 static int
3676 collect_one_action_chain (htab_t ar_hash, struct eh_region_d *region)
3678 struct eh_region_d *c;
3679 int next;
3681 /* If we've reached the top of the region chain, then we have
3682 no actions, and require no landing pad. */
3683 if (region == NULL)
3684 return -1;
3686 switch (region->type)
3688 case ERT_CLEANUP:
3689 /* A cleanup adds a zero filter to the beginning of the chain, but
3690 there are special cases to look out for. If there are *only*
3691 cleanups along a path, then it compresses to a zero action.
3692 Further, if there are multiple cleanups along a path, we only
3693 need to represent one of them, as that is enough to trigger
3694 entry to the landing pad at runtime. */
3695 next = collect_one_action_chain (ar_hash, region->outer);
3696 if (next <= 0)
3697 return 0;
3698 for (c = region->outer; c ; c = c->outer)
3699 if (c->type == ERT_CLEANUP)
3700 return next;
3701 return add_action_record (ar_hash, 0, next);
3703 case ERT_TRY:
3704 /* Process the associated catch regions in reverse order.
3705 If there's a catch-all handler, then we don't need to
3706 search outer regions. Use a magic -3 value to record
3707 that we haven't done the outer search. */
3708 next = -3;
3709 for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)
3711 if (c->u.eh_catch.type_list == NULL)
3713 /* Retrieve the filter from the head of the filter list
3714 where we have stored it (see assign_filter_values). */
3715 int filter
3716 = TREE_INT_CST_LOW (TREE_VALUE (c->u.eh_catch.filter_list));
3718 next = add_action_record (ar_hash, filter, 0);
3720 else
3722 /* Once the outer search is done, trigger an action record for
3723 each filter we have. */
3724 tree flt_node;
3726 if (next == -3)
3728 next = collect_one_action_chain (ar_hash, region->outer);
3730 /* If there is no next action, terminate the chain. */
3731 if (next == -1)
3732 next = 0;
3733 /* If all outer actions are cleanups or must_not_throw,
3734 we'll have no action record for it, since we had wanted
3735 to encode these states in the call-site record directly.
3736 Add a cleanup action to the chain to catch these. */
3737 else if (next <= 0)
3738 next = add_action_record (ar_hash, 0, 0);
3741 flt_node = c->u.eh_catch.filter_list;
3742 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3744 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3745 next = add_action_record (ar_hash, filter, next);
3749 return next;
3751 case ERT_ALLOWED_EXCEPTIONS:
3752 /* An exception specification adds its filter to the
3753 beginning of the chain. */
3754 next = collect_one_action_chain (ar_hash, region->outer);
3756 /* If there is no next action, terminate the chain. */
3757 if (next == -1)
3758 next = 0;
3759 /* If all outer actions are cleanups or must_not_throw,
3760 we'll have no action record for it, since we had wanted
3761 to encode these states in the call-site record directly.
3762 Add a cleanup action to the chain to catch these. */
3763 else if (next <= 0)
3764 next = add_action_record (ar_hash, 0, 0);
3766 return add_action_record (ar_hash, region->u.allowed.filter, next);
3768 case ERT_MUST_NOT_THROW:
3769 /* A must-not-throw region with no inner handlers or cleanups
3770 requires no call-site entry. Note that this differs from
3771 the no handler or cleanup case in that we do require an lsda
3772 to be generated. Return a magic -2 value to record this. */
3773 return -2;
3775 case ERT_CATCH:
3776 case ERT_THROW:
3777 /* CATCH regions are handled in TRY above. THROW regions are
3778 for optimization information only and produce no output. */
3779 return collect_one_action_chain (ar_hash, region->outer);
3781 default:
3782 gcc_unreachable ();
3786 static int
3787 add_call_site (rtx landing_pad, int action)
3789 call_site_record record;
3791 record = GGC_NEW (struct call_site_record_d);
3792 record->landing_pad = landing_pad;
3793 record->action = action;
3795 VEC_safe_push (call_site_record, gc, crtl->eh.call_site_record, record);
3797 return call_site_base + VEC_length (call_site_record, crtl->eh.call_site_record) - 1;
3800 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3801 The new note numbers will not refer to region numbers, but
3802 instead to call site entries. */
3804 unsigned int
3805 convert_to_eh_region_ranges (void)
3807 rtx insn, iter, note;
3808 htab_t ar_hash;
3809 int last_action = -3;
3810 rtx last_action_insn = NULL_RTX;
3811 rtx last_landing_pad = NULL_RTX;
3812 rtx first_no_action_insn = NULL_RTX;
3813 int call_site = 0;
3815 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3816 return 0;
3818 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
3820 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3822 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3823 if (INSN_P (iter))
3825 struct eh_region_d *region;
3826 int this_action;
3827 rtx this_landing_pad;
3829 insn = iter;
3830 if (NONJUMP_INSN_P (insn)
3831 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3832 insn = XVECEXP (PATTERN (insn), 0, 0);
3834 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3835 if (!note)
3837 if (! (CALL_P (insn)
3838 || (flag_non_call_exceptions
3839 && may_trap_p (PATTERN (insn)))))
3840 continue;
3841 this_action = -1;
3842 region = NULL;
3844 else
3846 if (INTVAL (XEXP (note, 0)) <= 0)
3847 continue;
3848 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3849 this_action = collect_one_action_chain (ar_hash, region);
3852 /* Existence of catch handlers, or must-not-throw regions
3853 implies that an lsda is needed (even if empty). */
3854 if (this_action != -1)
3855 crtl->uses_eh_lsda = 1;
3857 /* Delay creation of region notes for no-action regions
3858 until we're sure that an lsda will be required. */
3859 else if (last_action == -3)
3861 first_no_action_insn = iter;
3862 last_action = -1;
3865 /* Cleanups and handlers may share action chains but not
3866 landing pads. Collect the landing pad for this region. */
3867 if (this_action >= 0)
3869 struct eh_region_d *o;
3870 for (o = region; ! o->landing_pad ; o = o->outer)
3871 continue;
3872 this_landing_pad = o->landing_pad;
3874 else
3875 this_landing_pad = NULL_RTX;
3877 /* Differing actions or landing pads implies a change in call-site
3878 info, which implies some EH_REGION note should be emitted. */
3879 if (last_action != this_action
3880 || last_landing_pad != this_landing_pad)
3882 /* If we'd not seen a previous action (-3) or the previous
3883 action was must-not-throw (-2), then we do not need an
3884 end note. */
3885 if (last_action >= -1)
3887 /* If we delayed the creation of the begin, do it now. */
3888 if (first_no_action_insn)
3890 call_site = add_call_site (NULL_RTX, 0);
3891 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3892 first_no_action_insn);
3893 NOTE_EH_HANDLER (note) = call_site;
3894 first_no_action_insn = NULL_RTX;
3897 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3898 last_action_insn);
3899 NOTE_EH_HANDLER (note) = call_site;
3902 /* If the new action is must-not-throw, then no region notes
3903 are created. */
3904 if (this_action >= -1)
3906 call_site = add_call_site (this_landing_pad,
3907 this_action < 0 ? 0 : this_action);
3908 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3909 NOTE_EH_HANDLER (note) = call_site;
3912 last_action = this_action;
3913 last_landing_pad = this_landing_pad;
3915 last_action_insn = iter;
3918 if (last_action >= -1 && ! first_no_action_insn)
3920 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3921 NOTE_EH_HANDLER (note) = call_site;
3924 htab_delete (ar_hash);
3925 return 0;
3928 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
3931 RTL_PASS,
3932 "eh_ranges", /* name */
3933 NULL, /* gate */
3934 convert_to_eh_region_ranges, /* execute */
3935 NULL, /* sub */
3936 NULL, /* next */
3937 0, /* static_pass_number */
3938 TV_NONE, /* tv_id */
3939 0, /* properties_required */
3940 0, /* properties_provided */
3941 0, /* properties_destroyed */
3942 0, /* todo_flags_start */
3943 TODO_dump_func, /* todo_flags_finish */
3948 static void
3949 push_uleb128 (varray_type *data_area, unsigned int value)
3953 unsigned char byte = value & 0x7f;
3954 value >>= 7;
3955 if (value)
3956 byte |= 0x80;
3957 VARRAY_PUSH_UCHAR (*data_area, byte);
3959 while (value);
3962 static void
3963 push_sleb128 (varray_type *data_area, int value)
3965 unsigned char byte;
3966 int more;
3970 byte = value & 0x7f;
3971 value >>= 7;
3972 more = ! ((value == 0 && (byte & 0x40) == 0)
3973 || (value == -1 && (byte & 0x40) != 0));
3974 if (more)
3975 byte |= 0x80;
3976 VARRAY_PUSH_UCHAR (*data_area, byte);
3978 while (more);
3982 #ifndef HAVE_AS_LEB128
3983 static int
3984 dw2_size_of_call_site_table (void)
3986 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3987 int size = n * (4 + 4 + 4);
3988 int i;
3990 for (i = 0; i < n; ++i)
3992 struct call_site_record_d *cs =
3993 VEC_index (call_site_record, crtl->eh.call_site_record, i);
3994 size += size_of_uleb128 (cs->action);
3997 return size;
4000 static int
4001 sjlj_size_of_call_site_table (void)
4003 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
4004 int size = 0;
4005 int i;
4007 for (i = 0; i < n; ++i)
4009 struct call_site_record_d *cs =
4010 VEC_index (call_site_record, crtl->eh.call_site_record, i);
4011 size += size_of_uleb128 (INTVAL (cs->landing_pad));
4012 size += size_of_uleb128 (cs->action);
4015 return size;
4017 #endif
4019 static void
4020 dw2_output_call_site_table (void)
4022 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
4023 int i;
4025 for (i = 0; i < n; ++i)
4027 struct call_site_record_d *cs =
4028 VEC_index (call_site_record, crtl->eh.call_site_record, i);
4029 char reg_start_lab[32];
4030 char reg_end_lab[32];
4031 char landing_pad_lab[32];
4033 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
4034 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
4036 if (cs->landing_pad)
4037 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
4038 CODE_LABEL_NUMBER (cs->landing_pad));
4040 /* ??? Perhaps use insn length scaling if the assembler supports
4041 generic arithmetic. */
4042 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
4043 data4 if the function is small enough. */
4044 #ifdef HAVE_AS_LEB128
4045 dw2_asm_output_delta_uleb128 (reg_start_lab,
4046 current_function_func_begin_label,
4047 "region %d start", i);
4048 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
4049 "length");
4050 if (cs->landing_pad)
4051 dw2_asm_output_delta_uleb128 (landing_pad_lab,
4052 current_function_func_begin_label,
4053 "landing pad");
4054 else
4055 dw2_asm_output_data_uleb128 (0, "landing pad");
4056 #else
4057 dw2_asm_output_delta (4, reg_start_lab,
4058 current_function_func_begin_label,
4059 "region %d start", i);
4060 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
4061 if (cs->landing_pad)
4062 dw2_asm_output_delta (4, landing_pad_lab,
4063 current_function_func_begin_label,
4064 "landing pad");
4065 else
4066 dw2_asm_output_data (4, 0, "landing pad");
4067 #endif
4068 dw2_asm_output_data_uleb128 (cs->action, "action");
4071 call_site_base += n;
4074 static void
4075 sjlj_output_call_site_table (void)
4077 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
4078 int i;
4080 for (i = 0; i < n; ++i)
4082 struct call_site_record_d *cs =
4083 VEC_index (call_site_record, crtl->eh.call_site_record, i);
4085 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
4086 "region %d landing pad", i);
4087 dw2_asm_output_data_uleb128 (cs->action, "action");
4090 call_site_base += n;
4093 #ifndef TARGET_UNWIND_INFO
4094 /* Switch to the section that should be used for exception tables. */
4096 static void
4097 switch_to_exception_section (const char * ARG_UNUSED (fnname))
4099 section *s;
4101 if (exception_section)
4102 s = exception_section;
4103 else
4105 /* Compute the section and cache it into exception_section,
4106 unless it depends on the function name. */
4107 if (targetm.have_named_sections)
4109 int flags;
4111 if (EH_TABLES_CAN_BE_READ_ONLY)
4113 int tt_format =
4114 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
4115 flags = ((! flag_pic
4116 || ((tt_format & 0x70) != DW_EH_PE_absptr
4117 && (tt_format & 0x70) != DW_EH_PE_aligned))
4118 ? 0 : SECTION_WRITE);
4120 else
4121 flags = SECTION_WRITE;
4123 #ifdef HAVE_LD_EH_GC_SECTIONS
4124 if (flag_function_sections)
4126 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
4127 sprintf (section_name, ".gcc_except_table.%s", fnname);
4128 s = get_section (section_name, flags, NULL);
4129 free (section_name);
4131 else
4132 #endif
4133 exception_section
4134 = s = get_section (".gcc_except_table", flags, NULL);
4136 else
4137 exception_section
4138 = s = flag_pic ? data_section : readonly_data_section;
4141 switch_to_section (s);
4143 #endif
4146 /* Output a reference from an exception table to the type_info object TYPE.
4147 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
4148 the value. */
4150 static void
4151 output_ttype (tree type, int tt_format, int tt_format_size)
4153 rtx value;
4154 bool is_public = true;
4156 if (type == NULL_TREE)
4157 value = const0_rtx;
4158 else
4160 struct varpool_node *node;
4162 type = lookup_type_for_runtime (type);
4163 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
4165 /* Let cgraph know that the rtti decl is used. Not all of the
4166 paths below go through assemble_integer, which would take
4167 care of this for us. */
4168 STRIP_NOPS (type);
4169 if (TREE_CODE (type) == ADDR_EXPR)
4171 type = TREE_OPERAND (type, 0);
4172 if (TREE_CODE (type) == VAR_DECL)
4174 node = varpool_node (type);
4175 if (node)
4176 varpool_mark_needed_node (node);
4177 is_public = TREE_PUBLIC (type);
4180 else
4181 gcc_assert (TREE_CODE (type) == INTEGER_CST);
4184 /* Allow the target to override the type table entry format. */
4185 if (targetm.asm_out.ttype (value))
4186 return;
4188 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
4189 assemble_integer (value, tt_format_size,
4190 tt_format_size * BITS_PER_UNIT, 1);
4191 else
4192 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
4195 void
4196 output_function_exception_table (const char * ARG_UNUSED (fnname))
4198 int tt_format, cs_format, lp_format, i, n;
4199 #ifdef HAVE_AS_LEB128
4200 char ttype_label[32];
4201 char cs_after_size_label[32];
4202 char cs_end_label[32];
4203 #else
4204 int call_site_len;
4205 #endif
4206 int have_tt_data;
4207 int tt_format_size = 0;
4209 /* Not all functions need anything. */
4210 if (! crtl->uses_eh_lsda)
4211 return;
4213 if (eh_personality_libfunc)
4214 assemble_external_libcall (eh_personality_libfunc);
4216 #ifdef TARGET_UNWIND_INFO
4217 /* TODO: Move this into target file. */
4218 fputs ("\t.personality\t", asm_out_file);
4219 output_addr_const (asm_out_file, eh_personality_libfunc);
4220 fputs ("\n\t.handlerdata\n", asm_out_file);
4221 /* Note that varasm still thinks we're in the function's code section.
4222 The ".endp" directive that will immediately follow will take us back. */
4223 #else
4224 switch_to_exception_section (fnname);
4225 #endif
4227 /* If the target wants a label to begin the table, emit it here. */
4228 targetm.asm_out.except_table_label (asm_out_file);
4230 have_tt_data = (VEC_length (tree, crtl->eh.ttype_data) > 0
4231 || VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) > 0);
4233 /* Indicate the format of the @TType entries. */
4234 if (! have_tt_data)
4235 tt_format = DW_EH_PE_omit;
4236 else
4238 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
4239 #ifdef HAVE_AS_LEB128
4240 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
4241 current_function_funcdef_no);
4242 #endif
4243 tt_format_size = size_of_encoded_value (tt_format);
4245 assemble_align (tt_format_size * BITS_PER_UNIT);
4248 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
4249 current_function_funcdef_no);
4251 /* The LSDA header. */
4253 /* Indicate the format of the landing pad start pointer. An omitted
4254 field implies @LPStart == @Start. */
4255 /* Currently we always put @LPStart == @Start. This field would
4256 be most useful in moving the landing pads completely out of
4257 line to another section, but it could also be used to minimize
4258 the size of uleb128 landing pad offsets. */
4259 lp_format = DW_EH_PE_omit;
4260 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
4261 eh_data_format_name (lp_format));
4263 /* @LPStart pointer would go here. */
4265 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
4266 eh_data_format_name (tt_format));
4268 #ifndef HAVE_AS_LEB128
4269 if (USING_SJLJ_EXCEPTIONS)
4270 call_site_len = sjlj_size_of_call_site_table ();
4271 else
4272 call_site_len = dw2_size_of_call_site_table ();
4273 #endif
4275 /* A pc-relative 4-byte displacement to the @TType data. */
4276 if (have_tt_data)
4278 #ifdef HAVE_AS_LEB128
4279 char ttype_after_disp_label[32];
4280 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
4281 current_function_funcdef_no);
4282 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
4283 "@TType base offset");
4284 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
4285 #else
4286 /* Ug. Alignment queers things. */
4287 unsigned int before_disp, after_disp, last_disp, disp;
4289 before_disp = 1 + 1;
4290 after_disp = (1 + size_of_uleb128 (call_site_len)
4291 + call_site_len
4292 + VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data)
4293 + (VEC_length (tree, crtl->eh.ttype_data)
4294 * tt_format_size));
4296 disp = after_disp;
4299 unsigned int disp_size, pad;
4301 last_disp = disp;
4302 disp_size = size_of_uleb128 (disp);
4303 pad = before_disp + disp_size + after_disp;
4304 if (pad % tt_format_size)
4305 pad = tt_format_size - (pad % tt_format_size);
4306 else
4307 pad = 0;
4308 disp = after_disp + pad;
4310 while (disp != last_disp);
4312 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
4313 #endif
4316 /* Indicate the format of the call-site offsets. */
4317 #ifdef HAVE_AS_LEB128
4318 cs_format = DW_EH_PE_uleb128;
4319 #else
4320 cs_format = DW_EH_PE_udata4;
4321 #endif
4322 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
4323 eh_data_format_name (cs_format));
4325 #ifdef HAVE_AS_LEB128
4326 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
4327 current_function_funcdef_no);
4328 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
4329 current_function_funcdef_no);
4330 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
4331 "Call-site table length");
4332 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
4333 if (USING_SJLJ_EXCEPTIONS)
4334 sjlj_output_call_site_table ();
4335 else
4336 dw2_output_call_site_table ();
4337 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
4338 #else
4339 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
4340 if (USING_SJLJ_EXCEPTIONS)
4341 sjlj_output_call_site_table ();
4342 else
4343 dw2_output_call_site_table ();
4344 #endif
4346 /* ??? Decode and interpret the data for flag_debug_asm. */
4347 n = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data);
4348 for (i = 0; i < n; ++i)
4349 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.action_record_data, i),
4350 (i ? NULL : "Action record table"));
4352 if (have_tt_data)
4353 assemble_align (tt_format_size * BITS_PER_UNIT);
4355 i = VEC_length (tree, crtl->eh.ttype_data);
4356 while (i-- > 0)
4358 tree type = VEC_index (tree, crtl->eh.ttype_data, i);
4359 output_ttype (type, tt_format, tt_format_size);
4362 #ifdef HAVE_AS_LEB128
4363 if (have_tt_data)
4364 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
4365 #endif
4367 /* ??? Decode and interpret the data for flag_debug_asm. */
4368 n = VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data);
4369 for (i = 0; i < n; ++i)
4371 if (targetm.arm_eabi_unwinder)
4373 tree type = VARRAY_TREE (crtl->eh.ehspec_data, i);
4374 output_ttype (type, tt_format, tt_format_size);
4376 else
4377 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.ehspec_data, i),
4378 (i ? NULL : "Exception specification table"));
4381 switch_to_section (current_function_section ());
4384 void
4385 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
4387 fun->eh->throw_stmt_table = table;
4390 htab_t
4391 get_eh_throw_stmt_table (struct function *fun)
4393 return fun->eh->throw_stmt_table;
4396 /* Dump EH information to OUT. */
4398 void
4399 dump_eh_tree (FILE * out, struct function *fun)
4401 struct eh_region_d *i;
4402 int depth = 0;
4403 static const char *const type_name[] = { "unknown", "cleanup", "try", "catch",
4404 "allowed_exceptions", "must_not_throw",
4405 "throw"
4408 i = fun->eh->region_tree;
4409 if (!i)
4410 return;
4412 fprintf (out, "Eh tree:\n");
4413 while (1)
4415 fprintf (out, " %*s %i %s", depth * 2, "",
4416 i->region_number, type_name[(int) i->type]);
4417 if (i->tree_label)
4419 fprintf (out, " tree_label:");
4420 print_generic_expr (out, i->tree_label, 0);
4422 if (i->label)
4423 fprintf (out, " label:%i", INSN_UID (i->label));
4424 if (i->landing_pad)
4426 fprintf (out, " landing_pad:%i", INSN_UID (i->landing_pad));
4427 if (NOTE_P (i->landing_pad))
4428 fprintf (out, " (deleted)");
4430 if (i->post_landing_pad)
4432 fprintf (out, " post_landing_pad:%i", INSN_UID (i->post_landing_pad));
4433 if (NOTE_P (i->post_landing_pad))
4434 fprintf (out, " (deleted)");
4436 if (i->resume)
4438 rtx resume_list = i->resume;
4439 fprintf (out, " resume:");
4440 while (GET_CODE (resume_list) == INSN_LIST)
4442 fprintf (out, "%i,", INSN_UID (XEXP (resume_list, 0)));
4443 if (NOTE_P (XEXP (resume_list, 0)))
4444 fprintf (out, " (deleted)");
4445 resume_list = XEXP (resume_list, 1);
4447 fprintf (out, " resume:%i", INSN_UID (i->resume));
4448 if (NOTE_P (i->resume))
4449 fprintf (out, " (deleted)");
4451 if (i->may_contain_throw)
4452 fprintf (out, " may_contain_throw");
4453 switch (i->type)
4455 case ERT_CLEANUP:
4456 break;
4458 case ERT_TRY:
4460 struct eh_region_d *c;
4461 fprintf (out, " catch regions:");
4462 for (c = i->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4463 fprintf (out, " %i", c->region_number);
4465 break;
4467 case ERT_CATCH:
4468 if (i->u.eh_catch.prev_catch)
4469 fprintf (out, " prev: %i",
4470 i->u.eh_catch.prev_catch->region_number);
4471 if (i->u.eh_catch.next_catch)
4472 fprintf (out, " next %i",
4473 i->u.eh_catch.next_catch->region_number);
4474 fprintf (out, " type:");
4475 print_generic_expr (out, i->u.eh_catch.type_list, 0);
4476 break;
4478 case ERT_ALLOWED_EXCEPTIONS:
4479 fprintf (out, " filter :%i types:", i->u.allowed.filter);
4480 print_generic_expr (out, i->u.allowed.type_list, 0);
4481 break;
4483 case ERT_THROW:
4484 fprintf (out, " type:");
4485 print_generic_expr (out, i->u.eh_throw.type, 0);
4486 break;
4488 case ERT_MUST_NOT_THROW:
4489 break;
4491 case ERT_UNKNOWN:
4492 break;
4494 if (i->aka)
4496 fprintf (out, " also known as:");
4497 dump_bitmap (out, i->aka);
4499 else
4500 fprintf (out, "\n");
4501 /* If there are sub-regions, process them. */
4502 if (i->inner)
4503 i = i->inner, depth++;
4504 /* If there are peers, process them. */
4505 else if (i->next_peer)
4506 i = i->next_peer;
4507 /* Otherwise, step back up the tree to the next peer. */
4508 else
4512 i = i->outer;
4513 depth--;
4514 if (i == NULL)
4515 return;
4517 while (i->next_peer == NULL);
4518 i = i->next_peer;
4523 /* Dump the EH tree for FN on stderr. */
4525 void
4526 debug_eh_tree (struct function *fn)
4528 dump_eh_tree (stderr, fn);
4532 /* Verify EH region invariants. */
4534 static bool
4535 verify_eh_region (struct eh_region_d *region)
4537 bool found = false;
4538 if (!region)
4539 return false;
4540 switch (region->type)
4542 case ERT_TRY:
4544 struct eh_region_d *c, *prev = NULL;
4545 if (region->u.eh_try.eh_catch->u.eh_catch.prev_catch)
4547 error ("Try region %i has wrong rh_catch pointer to %i",
4548 region->region_number,
4549 region->u.eh_try.eh_catch->region_number);
4550 found = true;
4552 for (c = region->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4554 if (c->outer != region->outer)
4556 error
4557 ("Catch region %i has different outer region than try region %i",
4558 c->region_number, region->region_number);
4559 found = true;
4561 if (c->u.eh_catch.prev_catch != prev)
4563 error ("Catch region %i has corrupted catchlist",
4564 c->region_number);
4565 found = true;
4567 prev = c;
4569 if (prev != region->u.eh_try.last_catch)
4571 error
4572 ("Try region %i has wrong last_catch pointer to %i instead of %i",
4573 region->region_number,
4574 region->u.eh_try.last_catch->region_number,
4575 prev->region_number);
4576 found = true;
4579 break;
4580 case ERT_CATCH:
4581 if (!region->u.eh_catch.prev_catch
4582 && (!region->next_peer || region->next_peer->type != ERT_TRY))
4584 error ("Catch region %i should be followed by try", region->region_number);
4585 found = true;
4587 break;
4588 case ERT_CLEANUP:
4589 case ERT_ALLOWED_EXCEPTIONS:
4590 case ERT_MUST_NOT_THROW:
4591 case ERT_THROW:
4592 break;
4593 case ERT_UNKNOWN:
4594 gcc_unreachable ();
4596 for (region = region->inner; region; region = region->next_peer)
4597 found |= verify_eh_region (region);
4598 return found;
4601 /* Verify invariants on EH datastructures. */
4603 void
4604 verify_eh_tree (struct function *fun)
4606 struct eh_region_d *i, *outer = NULL;
4607 bool err = false;
4608 int nvisited = 0;
4609 int count = 0;
4610 int j;
4611 int depth = 0;
4613 if (!fun->eh->region_tree)
4614 return;
4615 for (j = fun->eh->last_region_number; j > 0; --j)
4616 if ((i = VEC_index (eh_region, fun->eh->region_array, j)))
4618 if (i->region_number == j)
4619 count++;
4620 if (i->region_number != j && (!i->aka || !bitmap_bit_p (i->aka, j)))
4622 error ("region_array is corrupted for region %i",
4623 i->region_number);
4624 err = true;
4627 i = fun->eh->region_tree;
4629 while (1)
4631 if (VEC_index (eh_region, fun->eh->region_array, i->region_number) != i)
4633 error ("region_array is corrupted for region %i", i->region_number);
4634 err = true;
4636 if (i->outer != outer)
4638 error ("outer block of region %i is wrong", i->region_number);
4639 err = true;
4641 if (i->may_contain_throw && outer && !outer->may_contain_throw)
4643 error
4644 ("region %i may contain throw and is contained in region that may not",
4645 i->region_number);
4646 err = true;
4648 if (depth < 0)
4650 error ("negative nesting depth of region %i", i->region_number);
4651 err = true;
4653 nvisited++;
4654 /* If there are sub-regions, process them. */
4655 if (i->inner)
4656 outer = i, i = i->inner, depth++;
4657 /* If there are peers, process them. */
4658 else if (i->next_peer)
4659 i = i->next_peer;
4660 /* Otherwise, step back up the tree to the next peer. */
4661 else
4665 i = i->outer;
4666 depth--;
4667 if (i == NULL)
4669 if (depth != -1)
4671 error ("tree list ends on depth %i", depth + 1);
4672 err = true;
4674 if (count != nvisited)
4676 error ("array does not match the region tree");
4677 err = true;
4679 if (!err)
4680 for (i = fun->eh->region_tree; i; i = i->next_peer)
4681 err |= verify_eh_region (i);
4683 if (err)
4685 dump_eh_tree (stderr, fun);
4686 internal_error ("verify_eh_tree failed");
4688 return;
4690 outer = i->outer;
4692 while (i->next_peer == NULL);
4693 i = i->next_peer;
4698 /* Initialize unwind_resume_libfunc. */
4700 void
4701 default_init_unwind_resume_libfunc (void)
4703 /* The default c++ routines aren't actually c++ specific, so use those. */
4704 unwind_resume_libfunc =
4705 init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
4706 : "_Unwind_Resume");
4710 static bool
4711 gate_handle_eh (void)
4713 return doing_eh (0);
4716 /* Complete generation of exception handling code. */
4717 static unsigned int
4718 rest_of_handle_eh (void)
4720 finish_eh_generation ();
4721 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4722 return 0;
4725 struct rtl_opt_pass pass_rtl_eh =
4728 RTL_PASS,
4729 "eh", /* name */
4730 gate_handle_eh, /* gate */
4731 rest_of_handle_eh, /* execute */
4732 NULL, /* sub */
4733 NULL, /* next */
4734 0, /* static_pass_number */
4735 TV_JUMP, /* tv_id */
4736 0, /* properties_required */
4737 0, /* properties_provided */
4738 0, /* properties_destroyed */
4739 0, /* todo_flags_start */
4740 TODO_dump_func /* todo_flags_finish */
4744 #include "gt-except.h"