* xcoffout.h (xcoffout_source_line): Update prototype.
[official-gcc.git] / gcc / except.c
blobff45a7e6d727a310b1c891abd2e7751f1604fee2
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
79 #include "timevar.h"
80 #include "tree-flow.h"
82 /* Provide defaults for stuff that may not be defined when using
83 sjlj exceptions. */
84 #ifndef EH_RETURN_DATA_REGNO
85 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
86 #endif
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 gimple (*lang_protect_cleanup_actions) (void);
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers) (tree a, tree b);
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type) (tree);
98 /* A hash table of label to region number. */
100 struct GTY(()) ehl_map_entry {
101 rtx label;
102 struct eh_region_d *region;
105 static GTY(()) int call_site_base;
106 static GTY ((param_is (union tree_node)))
107 htab_t type_to_runtime_map;
109 /* Describe the SjLj_Function_Context structure. */
110 static GTY(()) tree sjlj_fc_type_node;
111 static int sjlj_fc_call_site_ofs;
112 static int sjlj_fc_data_ofs;
113 static int sjlj_fc_personality_ofs;
114 static int sjlj_fc_lsda_ofs;
115 static int sjlj_fc_jbuf_ofs;
118 struct GTY(()) call_site_record_d
120 rtx landing_pad;
121 int action;
124 static int t2r_eq (const void *, const void *);
125 static hashval_t t2r_hash (const void *);
127 static int ttypes_filter_eq (const void *, const void *);
128 static hashval_t ttypes_filter_hash (const void *);
129 static int ehspec_filter_eq (const void *, const void *);
130 static hashval_t ehspec_filter_hash (const void *);
131 static int add_ttypes_entry (htab_t, tree);
132 static int add_ehspec_entry (htab_t, htab_t, tree);
133 static void assign_filter_values (void);
134 static void build_post_landing_pads (void);
135 static void connect_post_landing_pads (void);
136 static void dw2_build_landing_pads (void);
138 struct sjlj_lp_info;
139 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
140 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
141 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
142 static void sjlj_emit_function_enter (rtx);
143 static void sjlj_emit_function_exit (void);
144 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
145 static void sjlj_build_landing_pads (void);
147 static void remove_eh_handler (struct eh_region_d *);
148 static void remove_eh_handler_and_replace (struct eh_region_d *,
149 struct eh_region_d *, bool);
151 /* The return value of reachable_next_level. */
152 enum reachable_code
154 /* The given exception is not processed by the given region. */
155 RNL_NOT_CAUGHT,
156 /* The given exception may need processing by the given region. */
157 RNL_MAYBE_CAUGHT,
158 /* The given exception is completely processed by the given region. */
159 RNL_CAUGHT,
160 /* The given exception is completely processed by the runtime. */
161 RNL_BLOCKED
164 struct reachable_info;
165 static enum reachable_code reachable_next_level (struct eh_region_d *, tree,
166 struct reachable_info *, bool);
168 static int action_record_eq (const void *, const void *);
169 static hashval_t action_record_hash (const void *);
170 static int add_action_record (htab_t, int, int);
171 static int collect_one_action_chain (htab_t, struct eh_region_d *);
172 static int add_call_site (rtx, int);
174 static void push_uleb128 (varray_type *, unsigned int);
175 static void push_sleb128 (varray_type *, int);
176 #ifndef HAVE_AS_LEB128
177 static int dw2_size_of_call_site_table (void);
178 static int sjlj_size_of_call_site_table (void);
179 #endif
180 static void dw2_output_call_site_table (void);
181 static void sjlj_output_call_site_table (void);
184 /* Routine to see if exception handling is turned on.
185 DO_WARN is nonzero if we want to inform the user that exception
186 handling is turned off.
188 This is used to ensure that -fexceptions has been specified if the
189 compiler tries to use any exception-specific functions. */
192 doing_eh (int do_warn)
194 if (! flag_exceptions)
196 static int warned = 0;
197 if (! warned && do_warn)
199 error ("exception handling disabled, use -fexceptions to enable");
200 warned = 1;
202 return 0;
204 return 1;
208 void
209 init_eh (void)
211 if (! flag_exceptions)
212 return;
214 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
216 /* Create the SjLj_Function_Context structure. This should match
217 the definition in unwind-sjlj.c. */
218 if (USING_SJLJ_EXCEPTIONS)
220 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
222 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
224 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
225 build_pointer_type (sjlj_fc_type_node));
226 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
228 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
229 integer_type_node);
230 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
232 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
233 tmp = build_array_type (lang_hooks.types.type_for_mode
234 (targetm.unwind_word_mode (), 1),
235 tmp);
236 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
237 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
239 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
240 ptr_type_node);
241 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
243 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
244 ptr_type_node);
245 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
247 #ifdef DONT_USE_BUILTIN_SETJMP
248 #ifdef JMP_BUF_SIZE
249 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
250 #else
251 /* Should be large enough for most systems, if it is not,
252 JMP_BUF_SIZE should be defined with the proper value. It will
253 also tend to be larger than necessary for most systems, a more
254 optimal port will define JMP_BUF_SIZE. */
255 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
256 #endif
257 #else
258 /* builtin_setjmp takes a pointer to 5 words. */
259 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
260 #endif
261 tmp = build_index_type (tmp);
262 tmp = build_array_type (ptr_type_node, tmp);
263 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
264 #ifdef DONT_USE_BUILTIN_SETJMP
265 /* We don't know what the alignment requirements of the
266 runtime's jmp_buf has. Overestimate. */
267 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
268 DECL_USER_ALIGN (f_jbuf) = 1;
269 #endif
270 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
272 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
273 TREE_CHAIN (f_prev) = f_cs;
274 TREE_CHAIN (f_cs) = f_data;
275 TREE_CHAIN (f_data) = f_per;
276 TREE_CHAIN (f_per) = f_lsda;
277 TREE_CHAIN (f_lsda) = f_jbuf;
279 layout_type (sjlj_fc_type_node);
281 /* Cache the interesting field offsets so that we have
282 easy access from rtl. */
283 sjlj_fc_call_site_ofs
284 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
285 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
286 sjlj_fc_data_ofs
287 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
288 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
289 sjlj_fc_personality_ofs
290 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
291 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
292 sjlj_fc_lsda_ofs
293 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
294 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
295 sjlj_fc_jbuf_ofs
296 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
297 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
301 void
302 init_eh_for_function (void)
304 cfun->eh = GGC_CNEW (struct eh_status);
307 /* Routines to generate the exception tree somewhat directly.
308 These are used from tree-eh.c when processing exception related
309 nodes during tree optimization. */
311 static struct eh_region_d *
312 gen_eh_region (enum eh_region_type type, struct eh_region_d *outer)
314 struct eh_region_d *new_eh;
316 #ifdef ENABLE_CHECKING
317 gcc_assert (doing_eh (0));
318 #endif
320 /* Insert a new blank region as a leaf in the tree. */
321 new_eh = GGC_CNEW (struct eh_region_d);
322 new_eh->type = type;
323 new_eh->outer = outer;
324 if (outer)
326 new_eh->next_peer = outer->inner;
327 outer->inner = new_eh;
329 else
331 new_eh->next_peer = cfun->eh->region_tree;
332 cfun->eh->region_tree = new_eh;
335 new_eh->region_number = ++cfun->eh->last_region_number;
337 return new_eh;
340 struct eh_region_d *
341 gen_eh_region_cleanup (struct eh_region_d *outer)
343 struct eh_region_d *cleanup = gen_eh_region (ERT_CLEANUP, outer);
344 return cleanup;
347 struct eh_region_d *
348 gen_eh_region_try (struct eh_region_d *outer)
350 return gen_eh_region (ERT_TRY, outer);
353 struct eh_region_d *
354 gen_eh_region_catch (struct eh_region_d *t, tree type_or_list)
356 struct eh_region_d *c, *l;
357 tree type_list, type_node;
359 /* Ensure to always end up with a type list to normalize further
360 processing, then register each type against the runtime types map. */
361 type_list = type_or_list;
362 if (type_or_list)
364 if (TREE_CODE (type_or_list) != TREE_LIST)
365 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
367 type_node = type_list;
368 for (; type_node; type_node = TREE_CHAIN (type_node))
369 add_type_for_runtime (TREE_VALUE (type_node));
372 c = gen_eh_region (ERT_CATCH, t->outer);
373 c->u.eh_catch.type_list = type_list;
374 l = t->u.eh_try.last_catch;
375 c->u.eh_catch.prev_catch = l;
376 if (l)
377 l->u.eh_catch.next_catch = c;
378 else
379 t->u.eh_try.eh_catch = c;
380 t->u.eh_try.last_catch = c;
382 return c;
385 struct eh_region_d *
386 gen_eh_region_allowed (struct eh_region_d *outer, tree allowed)
388 struct eh_region_d *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
389 region->u.allowed.type_list = allowed;
391 for (; allowed ; allowed = TREE_CHAIN (allowed))
392 add_type_for_runtime (TREE_VALUE (allowed));
394 return region;
397 struct eh_region_d *
398 gen_eh_region_must_not_throw (struct eh_region_d *outer)
400 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
404 get_eh_region_number (struct eh_region_d *region)
406 return region->region_number;
409 bool
410 get_eh_region_may_contain_throw (struct eh_region_d *region)
412 return region->may_contain_throw;
415 tree
416 get_eh_region_tree_label (struct eh_region_d *region)
418 return region->tree_label;
421 tree
422 get_eh_region_no_tree_label (int region)
424 return VEC_index (eh_region, cfun->eh->region_array, region)->tree_label;
427 void
428 set_eh_region_tree_label (struct eh_region_d *region, tree lab)
430 region->tree_label = lab;
433 void
434 expand_resx_expr (tree exp)
436 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
437 struct eh_region_d *reg = VEC_index (eh_region,
438 cfun->eh->region_array, region_nr);
440 gcc_assert (!reg->resume);
441 do_pending_stack_adjust ();
442 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
443 emit_barrier ();
446 /* Note that the current EH region (if any) may contain a throw, or a
447 call to a function which itself may contain a throw. */
449 void
450 note_eh_region_may_contain_throw (struct eh_region_d *region)
452 while (region && !region->may_contain_throw)
454 region->may_contain_throw = 1;
455 region = region->outer;
460 /* Return an rtl expression for a pointer to the exception object
461 within a handler. */
464 get_exception_pointer (void)
466 if (! crtl->eh.exc_ptr)
467 crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
468 return crtl->eh.exc_ptr;
471 /* Return an rtl expression for the exception dispatch filter
472 within a handler. */
475 get_exception_filter (void)
477 if (! crtl->eh.filter)
478 crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
479 return crtl->eh.filter;
482 /* This section is for the exception handling specific optimization pass. */
484 /* Random access the exception region tree. */
486 void
487 collect_eh_region_array (void)
489 struct eh_region_d *i;
491 i = cfun->eh->region_tree;
492 if (! i)
493 return;
495 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
496 cfun->eh->last_region_number + 1);
497 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
499 while (1)
501 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
503 /* If there are sub-regions, process them. */
504 if (i->inner)
505 i = i->inner;
506 /* If there are peers, process them. */
507 else if (i->next_peer)
508 i = i->next_peer;
509 /* Otherwise, step back up the tree to the next peer. */
510 else
512 do {
513 i = i->outer;
514 if (i == NULL)
515 return;
516 } while (i->next_peer == NULL);
517 i = i->next_peer;
522 /* R is MUST_NOT_THROW region that is not reachable via local
523 RESX instructions. It still must be kept in the tree in case runtime
524 can unwind through it, or we will eliminate out terminate call
525 runtime would do otherwise. Return TRUE if R contains throwing statements
526 or some of the exceptions in inner regions can be unwound up to R.
528 CONTAINS_STMT is bitmap of all regions that contains some throwing
529 statements.
531 Function looks O(^3) at first sight. In fact the function is called at most
532 once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
533 Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
534 the outer loop examines every region at most once. The inner loop
535 is doing unwinding from the throwing statement same way as we do during
536 CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
537 of CFG. In practice Eh trees are wide, not deep, so this is not
538 a problem. */
540 static bool
541 can_be_reached_by_runtime (sbitmap contains_stmt, struct eh_region_d *r)
543 struct eh_region_d *i = r->inner;
544 unsigned n;
545 bitmap_iterator bi;
547 if (TEST_BIT (contains_stmt, r->region_number))
548 return true;
549 if (r->aka)
550 EXECUTE_IF_SET_IN_BITMAP (r->aka, 0, n, bi)
551 if (TEST_BIT (contains_stmt, n))
552 return true;
553 if (!i)
554 return false;
555 while (1)
557 /* It is pointless to look into MUST_NOT_THROW
558 or dive into subregions. They never unwind up. */
559 if (i->type != ERT_MUST_NOT_THROW)
561 bool found = TEST_BIT (contains_stmt, i->region_number);
562 if (!found)
563 EXECUTE_IF_SET_IN_BITMAP (i->aka, 0, n, bi)
564 if (TEST_BIT (contains_stmt, n))
566 found = true;
567 break;
569 /* We have nested region that contains throwing statement.
570 See if resuming might lead up to the resx or we get locally
571 caught sooner. If we get locally caught sooner, we either
572 know region R is not reachable or it would have direct edge
573 from the EH resx and thus consider region reachable at
574 firest place. */
575 if (found)
577 struct eh_region_d *i1 = i;
578 tree type_thrown = NULL_TREE;
580 if (i1->type == ERT_THROW)
582 type_thrown = i1->u.eh_throw.type;
583 i1 = i1->outer;
585 for (; i1 != r; i1 = i1->outer)
586 if (reachable_next_level (i1, type_thrown, NULL,
587 false) >= RNL_CAUGHT)
588 break;
589 if (i1 == r)
590 return true;
593 /* If there are sub-regions, process them. */
594 if (i->type != ERT_MUST_NOT_THROW && i->inner)
595 i = i->inner;
596 /* If there are peers, process them. */
597 else if (i->next_peer)
598 i = i->next_peer;
599 /* Otherwise, step back up the tree to the next peer. */
600 else
604 i = i->outer;
605 if (i == r)
606 return false;
608 while (i->next_peer == NULL);
609 i = i->next_peer;
614 /* Bring region R to the root of tree. */
616 static void
617 bring_to_root (struct eh_region_d *r)
619 struct eh_region_d **pp;
620 struct eh_region_d *outer = r->outer;
621 if (!r->outer)
622 return;
623 for (pp = &outer->inner; *pp != r; pp = &(*pp)->next_peer)
624 continue;
625 *pp = r->next_peer;
626 r->outer = NULL;
627 r->next_peer = cfun->eh->region_tree;
628 cfun->eh->region_tree = r;
631 /* Return true if region R2 can be replaced by R1. */
633 static bool
634 eh_region_replaceable_by_p (const struct eh_region_d *r1,
635 const struct eh_region_d *r2)
637 /* Regions are semantically same if they are of same type,
638 have same label and type. */
639 if (r1->type != r2->type)
640 return false;
641 if (r1->tree_label != r2->tree_label)
642 return false;
644 /* Verify that also region type dependent data are the same. */
645 switch (r1->type)
647 case ERT_MUST_NOT_THROW:
648 case ERT_CLEANUP:
649 break;
650 case ERT_TRY:
652 struct eh_region_d *c1, *c2;
653 for (c1 = r1->u.eh_try.eh_catch,
654 c2 = r2->u.eh_try.eh_catch;
655 c1 && c2;
656 c1 = c1->u.eh_catch.next_catch,
657 c2 = c2->u.eh_catch.next_catch)
658 if (!eh_region_replaceable_by_p (c1, c2))
659 return false;
660 if (c1 || c2)
661 return false;
663 break;
664 case ERT_CATCH:
665 if (!list_equal_p (r1->u.eh_catch.type_list, r2->u.eh_catch.type_list))
666 return false;
667 if (!list_equal_p (r1->u.eh_catch.filter_list,
668 r2->u.eh_catch.filter_list))
669 return false;
670 break;
671 case ERT_ALLOWED_EXCEPTIONS:
672 if (!list_equal_p (r1->u.allowed.type_list, r2->u.allowed.type_list))
673 return false;
674 if (r1->u.allowed.filter != r2->u.allowed.filter)
675 return false;
676 break;
677 case ERT_THROW:
678 if (r1->u.eh_throw.type != r2->u.eh_throw.type)
679 return false;
680 break;
681 default:
682 gcc_unreachable ();
684 if (dump_file && (dump_flags & TDF_DETAILS))
685 fprintf (dump_file, "Regions %i and %i match\n", r1->region_number,
686 r2->region_number);
687 return true;
690 /* Replace region R2 by R1. */
692 static void
693 replace_region (struct eh_region_d *r1, struct eh_region_d *r2)
695 struct eh_region_d *next1 = r1->u.eh_try.eh_catch;
696 struct eh_region_d *next2 = r2->u.eh_try.eh_catch;
697 bool is_try = r1->type == ERT_TRY;
699 gcc_assert (r1->type != ERT_CATCH);
700 remove_eh_handler_and_replace (r2, r1, false);
701 if (is_try)
703 while (next1)
705 r1 = next1;
706 r2 = next2;
707 gcc_assert (next1->type == ERT_CATCH);
708 gcc_assert (next2->type == ERT_CATCH);
709 next1 = next1->u.eh_catch.next_catch;
710 next2 = next2->u.eh_catch.next_catch;
711 remove_eh_handler_and_replace (r2, r1, false);
716 /* Return hash value of type list T. */
718 static hashval_t
719 hash_type_list (tree t)
721 hashval_t val = 0;
722 for (; t; t = TREE_CHAIN (t))
723 val = iterative_hash_hashval_t (TREE_HASH (TREE_VALUE (t)), val);
724 return val;
727 /* Hash EH regions so semantically same regions get same hash value. */
729 static hashval_t
730 hash_eh_region (const void *r)
732 const struct eh_region_d *region = (const struct eh_region_d *) r;
733 hashval_t val = region->type;
735 if (region->tree_label)
736 val = iterative_hash_hashval_t (LABEL_DECL_UID (region->tree_label), val);
737 switch (region->type)
739 case ERT_MUST_NOT_THROW:
740 case ERT_CLEANUP:
741 break;
742 case ERT_TRY:
744 struct eh_region_d *c;
745 for (c = region->u.eh_try.eh_catch;
746 c; c = c->u.eh_catch.next_catch)
747 val = iterative_hash_hashval_t (hash_eh_region (c), val);
749 break;
750 case ERT_CATCH:
751 val = iterative_hash_hashval_t (hash_type_list
752 (region->u.eh_catch.type_list), val);
753 break;
754 case ERT_ALLOWED_EXCEPTIONS:
755 val = iterative_hash_hashval_t
756 (hash_type_list (region->u.allowed.type_list), val);
757 val = iterative_hash_hashval_t (region->u.allowed.filter, val);
758 break;
759 case ERT_THROW:
760 val |= iterative_hash_hashval_t (TYPE_UID (region->u.eh_throw.type), val);
761 break;
762 default:
763 gcc_unreachable ();
765 return val;
768 /* Return true if regions R1 and R2 are equal. */
770 static int
771 eh_regions_equal_p (const void *r1, const void *r2)
773 return eh_region_replaceable_by_p ((const struct eh_region_d *) r1,
774 (const struct eh_region_d *) r2);
777 /* Walk all peers of REGION and try to merge those regions
778 that are semantically equivalent. Look into subregions
779 recursively too. */
781 static bool
782 merge_peers (struct eh_region_d *region)
784 struct eh_region_d *r1, *r2, *outer = NULL, *next;
785 bool merged = false;
786 int num_regions = 0;
787 if (region)
788 outer = region->outer;
789 else
790 return false;
792 /* First see if there is inner region equivalent to region
793 in question. EH control flow is acyclic so we know we
794 can merge them. */
795 if (outer)
796 for (r1 = region; r1; r1 = next)
798 next = r1->next_peer;
799 if (r1->type == ERT_CATCH)
800 continue;
801 if (eh_region_replaceable_by_p (r1->outer, r1))
803 replace_region (r1->outer, r1);
804 merged = true;
806 else
807 num_regions ++;
810 /* Get new first region and try to match the peers
811 for equivalence. */
812 if (outer)
813 region = outer->inner;
814 else
815 region = cfun->eh->region_tree;
817 /* There are few regions to inspect:
818 N^2 loop matching each region with each region
819 will do the job well. */
820 if (num_regions < 10)
822 for (r1 = region; r1; r1 = r1->next_peer)
824 if (r1->type == ERT_CATCH)
825 continue;
826 for (r2 = r1->next_peer; r2; r2 = next)
828 next = r2->next_peer;
829 if (eh_region_replaceable_by_p (r1, r2))
831 replace_region (r1, r2);
832 merged = true;
837 /* Or use hashtable to avoid N^2 behaviour. */
838 else
840 htab_t hash;
841 hash = htab_create (num_regions, hash_eh_region,
842 eh_regions_equal_p, NULL);
843 for (r1 = region; r1; r1 = next)
845 void **slot;
847 next = r1->next_peer;
848 if (r1->type == ERT_CATCH)
849 continue;
850 slot = htab_find_slot (hash, r1, INSERT);
851 if (!*slot)
852 *slot = r1;
853 else
854 replace_region ((struct eh_region_d *) *slot, r1);
856 htab_delete (hash);
858 for (r1 = region; r1; r1 = r1->next_peer)
859 merged |= merge_peers (r1->inner);
860 return merged;
863 /* Remove all regions whose labels are not reachable.
864 REACHABLE is bitmap of all regions that are used by the function
865 CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
867 void
868 remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
870 int i;
871 struct eh_region_d *r;
872 VEC(eh_region,heap) *must_not_throws = VEC_alloc (eh_region, heap, 16);
873 struct eh_region_d *local_must_not_throw = NULL;
874 struct eh_region_d *first_must_not_throw = NULL;
876 for (i = cfun->eh->last_region_number; i > 0; --i)
878 r = VEC_index (eh_region, cfun->eh->region_array, i);
879 if (!r || r->region_number != i)
880 continue;
881 if (!TEST_BIT (reachable, i) && !r->resume)
883 bool kill_it = true;
885 r->tree_label = NULL;
886 switch (r->type)
888 case ERT_THROW:
889 /* Don't remove ERT_THROW regions if their outer region
890 is reachable. */
891 if (r->outer && TEST_BIT (reachable, r->outer->region_number))
892 kill_it = false;
893 break;
894 case ERT_MUST_NOT_THROW:
895 /* MUST_NOT_THROW regions are implementable solely in the
896 runtime, but we need them when inlining function.
898 Keep them if outer region is not MUST_NOT_THROW a well
899 and if they contain some statement that might unwind through
900 them. */
901 if ((!r->outer || r->outer->type != ERT_MUST_NOT_THROW)
902 && (!contains_stmt
903 || can_be_reached_by_runtime (contains_stmt, r)))
904 kill_it = false;
905 break;
906 case ERT_TRY:
908 /* TRY regions are reachable if any of its CATCH regions
909 are reachable. */
910 struct eh_region_d *c;
911 for (c = r->u.eh_try.eh_catch; c;
912 c = c->u.eh_catch.next_catch)
913 if (TEST_BIT (reachable, c->region_number))
915 kill_it = false;
916 break;
918 break;
921 default:
922 break;
925 if (kill_it)
927 if (dump_file)
928 fprintf (dump_file, "Removing unreachable eh region %i\n",
929 r->region_number);
930 remove_eh_handler (r);
932 else if (r->type == ERT_MUST_NOT_THROW)
934 if (!first_must_not_throw)
935 first_must_not_throw = r;
936 VEC_safe_push (eh_region, heap, must_not_throws, r);
939 else
940 if (r->type == ERT_MUST_NOT_THROW)
942 if (!local_must_not_throw)
943 local_must_not_throw = r;
944 if (r->outer)
945 VEC_safe_push (eh_region, heap, must_not_throws, r);
949 /* MUST_NOT_THROW regions without local handler are all the same; they
950 trigger terminate call in runtime.
951 MUST_NOT_THROW handled locally can differ in debug info associated
952 to std::terminate () call or if one is coming from Java and other
953 from C++ whether they call terminate or abort.
955 We merge all MUST_NOT_THROW regions handled by the run-time into one.
956 We alsobring all local MUST_NOT_THROW regions to the roots of EH tree
957 (since unwinding never continues to the outer region anyway).
958 If MUST_NOT_THROW with local handler is present in the tree, we use
959 that region to merge into, since it will remain in tree anyway;
960 otherwise we use first MUST_NOT_THROW.
962 Merging of locally handled regions needs changes to the CFG. Crossjumping
963 should take care of this, by looking at the actual code and
964 ensuring that the cleanup actions are really the same. */
966 if (local_must_not_throw)
967 first_must_not_throw = local_must_not_throw;
969 for (i = 0; VEC_iterate (eh_region, must_not_throws, i, r); i++)
971 if (!r->label && !r->tree_label && r != first_must_not_throw)
973 if (dump_file)
974 fprintf (dump_file, "Replacing MUST_NOT_THROW region %i by %i\n",
975 r->region_number,
976 first_must_not_throw->region_number);
977 remove_eh_handler_and_replace (r, first_must_not_throw, false);
978 first_must_not_throw->may_contain_throw |= r->may_contain_throw;
980 else
981 bring_to_root (r);
983 merge_peers (cfun->eh->region_tree);
984 #ifdef ENABLE_CHECKING
985 verify_eh_tree (cfun);
986 #endif
987 VEC_free (eh_region, heap, must_not_throws);
990 /* Return array mapping LABEL_DECL_UID to region such that region's tree_label
991 is identical to label. */
993 VEC (int, heap) *
994 label_to_region_map (void)
996 VEC (int, heap) * label_to_region = NULL;
997 int i;
998 int idx;
1000 VEC_safe_grow_cleared (int, heap, label_to_region,
1001 cfun->cfg->last_label_uid + 1);
1002 for (i = cfun->eh->last_region_number; i > 0; --i)
1004 struct eh_region_d *r = VEC_index (eh_region, cfun->eh->region_array, i);
1005 if (r && r->region_number == i
1006 && r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
1008 if ((idx = VEC_index (int, label_to_region,
1009 LABEL_DECL_UID (r->tree_label))) != 0)
1010 r->next_region_sharing_label =
1011 VEC_index (eh_region, cfun->eh->region_array, idx);
1012 else
1013 r->next_region_sharing_label = NULL;
1014 VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
1018 return label_to_region;
1021 /* Return number of EH regions. */
1023 num_eh_regions (void)
1025 return cfun->eh->last_region_number + 1;
1028 /* Return next region sharing same label as REGION. */
1031 get_next_region_sharing_label (int region)
1033 struct eh_region_d *r;
1034 if (!region)
1035 return 0;
1036 r = VEC_index (eh_region, cfun->eh->region_array, region);
1037 if (!r || !r->next_region_sharing_label)
1038 return 0;
1039 return r->next_region_sharing_label->region_number;
1042 /* Return bitmap of all labels that are handlers of must not throw regions. */
1044 bitmap
1045 must_not_throw_labels (void)
1047 struct eh_region_d *i;
1048 bitmap labels = BITMAP_ALLOC (NULL);
1050 i = cfun->eh->region_tree;
1051 if (! i)
1052 return labels;
1054 while (1)
1056 if (i->type == ERT_MUST_NOT_THROW && i->tree_label
1057 && LABEL_DECL_UID (i->tree_label) >= 0)
1058 bitmap_set_bit (labels, LABEL_DECL_UID (i->tree_label));
1060 /* If there are sub-regions, process them. */
1061 if (i->inner)
1062 i = i->inner;
1063 /* If there are peers, process them. */
1064 else if (i->next_peer)
1065 i = i->next_peer;
1066 /* Otherwise, step back up the tree to the next peer. */
1067 else
1069 do {
1070 i = i->outer;
1071 if (i == NULL)
1072 return labels;
1073 } while (i->next_peer == NULL);
1074 i = i->next_peer;
1079 /* Set up EH labels for RTL. */
1081 void
1082 convert_from_eh_region_ranges (void)
1084 int i, n = cfun->eh->last_region_number;
1086 /* Most of the work is already done at the tree level. All we need to
1087 do is collect the rtl labels that correspond to the tree labels that
1088 collect the rtl labels that correspond to the tree labels
1089 we allocated earlier. */
1090 for (i = 1; i <= n; ++i)
1092 struct eh_region_d *region;
1094 region = VEC_index (eh_region, cfun->eh->region_array, i);
1095 if (region && region->tree_label)
1096 region->label = DECL_RTL_IF_SET (region->tree_label);
1100 void
1101 find_exception_handler_labels (void)
1103 int i;
1105 if (cfun->eh->region_tree == NULL)
1106 return;
1108 for (i = cfun->eh->last_region_number; i > 0; --i)
1110 struct eh_region_d *region;
1111 rtx lab;
1113 region = VEC_index (eh_region, cfun->eh->region_array, i);
1114 if (! region || region->region_number != i)
1115 continue;
1116 if (crtl->eh.built_landing_pads)
1117 lab = region->landing_pad;
1118 else
1119 lab = region->label;
1123 /* Returns true if the current function has exception handling regions. */
1125 bool
1126 current_function_has_exception_handlers (void)
1128 int i;
1130 for (i = cfun->eh->last_region_number; i > 0; --i)
1132 struct eh_region_d *region;
1134 region = VEC_index (eh_region, cfun->eh->region_array, i);
1135 if (region
1136 && region->region_number == i
1137 && region->type != ERT_THROW)
1138 return true;
1141 return false;
1144 /* A subroutine of duplicate_eh_regions. Search the region tree under O
1145 for the minimum and maximum region numbers. Update *MIN and *MAX. */
1147 static void
1148 duplicate_eh_regions_0 (eh_region o, int *min, int *max)
1150 int i;
1152 if (o->aka)
1154 i = bitmap_first_set_bit (o->aka);
1155 if (i < *min)
1156 *min = i;
1157 i = bitmap_last_set_bit (o->aka);
1158 if (i > *max)
1159 *max = i;
1161 if (o->region_number < *min)
1162 *min = o->region_number;
1163 if (o->region_number > *max)
1164 *max = o->region_number;
1166 if (o->inner)
1168 o = o->inner;
1169 duplicate_eh_regions_0 (o, min, max);
1170 while (o->next_peer)
1172 o = o->next_peer;
1173 duplicate_eh_regions_0 (o, min, max);
1178 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
1179 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
1180 about the other internal pointers just yet, just the tree-like pointers. */
1182 static eh_region
1183 duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
1185 eh_region ret, n;
1187 ret = n = GGC_NEW (struct eh_region_d);
1189 *n = *old;
1190 n->outer = outer;
1191 n->next_peer = NULL;
1192 if (old->aka)
1194 unsigned i;
1195 bitmap_iterator bi;
1196 n->aka = BITMAP_GGC_ALLOC ();
1198 EXECUTE_IF_SET_IN_BITMAP (old->aka, 0, i, bi)
1200 bitmap_set_bit (n->aka, i + eh_offset);
1201 VEC_replace (eh_region, cfun->eh->region_array, i + eh_offset, n);
1205 n->region_number += eh_offset;
1206 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1208 if (old->inner)
1210 old = old->inner;
1211 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
1212 while (old->next_peer)
1214 old = old->next_peer;
1215 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
1219 return ret;
1222 /* Look for first outer region of R (or R itself) that is
1223 TRY region. Return NULL if none. */
1225 static struct eh_region_d *
1226 find_prev_try (struct eh_region_d * r)
1228 for (; r && r->type != ERT_TRY; r = r->outer)
1229 if (r->type == ERT_MUST_NOT_THROW
1230 || (r->type == ERT_ALLOWED_EXCEPTIONS
1231 && !r->u.allowed.type_list))
1233 r = NULL;
1234 break;
1236 return r;
1239 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
1240 function and root the tree below OUTER_REGION. Remap labels using MAP
1241 callback. The special case of COPY_REGION of 0 means all regions. */
1244 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
1245 void *data, int copy_region, int outer_region)
1247 eh_region cur, outer, *splice;
1248 int i, min_region, max_region, eh_offset, cfun_last_region_number;
1249 int num_regions;
1251 if (!ifun->eh)
1252 return 0;
1253 #ifdef ENABLE_CHECKING
1254 verify_eh_tree (ifun);
1255 #endif
1257 /* Find the range of region numbers to be copied. The interface we
1258 provide here mandates a single offset to find new number from old,
1259 which means we must look at the numbers present, instead of the
1260 count or something else. */
1261 if (copy_region > 0)
1263 min_region = INT_MAX;
1264 max_region = 0;
1266 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1267 duplicate_eh_regions_0 (cur, &min_region, &max_region);
1269 else
1271 min_region = 1;
1272 max_region = ifun->eh->last_region_number;
1274 num_regions = max_region - min_region + 1;
1275 cfun_last_region_number = cfun->eh->last_region_number;
1276 eh_offset = cfun_last_region_number + 1 - min_region;
1278 /* If we've not yet created a region array, do so now. */
1279 cfun->eh->last_region_number = cfun_last_region_number + num_regions;
1280 VEC_safe_grow_cleared (eh_region, gc, cfun->eh->region_array,
1281 cfun->eh->last_region_number + 1);
1283 /* Locate the spot at which to insert the new tree. */
1284 if (outer_region > 0)
1286 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
1287 if (outer)
1288 splice = &outer->inner;
1289 else
1290 splice = &cfun->eh->region_tree;
1292 else
1294 outer = NULL;
1295 splice = &cfun->eh->region_tree;
1297 while (*splice)
1298 splice = &(*splice)->next_peer;
1300 if (!ifun->eh->region_tree)
1302 if (outer)
1303 for (i = cfun_last_region_number + 1;
1304 i <= cfun->eh->last_region_number; i++)
1306 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1307 if (outer->aka == NULL)
1308 outer->aka = BITMAP_GGC_ALLOC ();
1309 bitmap_set_bit (outer->aka, i);
1311 return eh_offset;
1314 /* Copy all the regions in the subtree. */
1315 if (copy_region > 0)
1317 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1318 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
1320 else
1322 eh_region n;
1324 cur = ifun->eh->region_tree;
1325 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
1326 while (cur->next_peer)
1328 cur = cur->next_peer;
1329 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
1333 /* Remap all the labels in the new regions. */
1334 for (i = cfun_last_region_number + 1;
1335 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1336 if (cur && cur->tree_label)
1337 cur->tree_label = map (cur->tree_label, data);
1339 /* Remap all of the internal catch and cleanup linkages. Since we
1340 duplicate entire subtrees, all of the referenced regions will have
1341 been copied too. And since we renumbered them as a block, a simple
1342 bit of arithmetic finds us the index for the replacement region. */
1343 for (i = cfun_last_region_number + 1;
1344 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1346 /* All removed EH that is toplevel in input function is now
1347 in outer EH of output function. */
1348 if (cur == NULL)
1350 gcc_assert (VEC_index
1351 (eh_region, ifun->eh->region_array,
1352 i - eh_offset) == NULL);
1353 if (outer)
1355 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1356 if (outer->aka == NULL)
1357 outer->aka = BITMAP_GGC_ALLOC ();
1358 bitmap_set_bit (outer->aka, i);
1360 continue;
1362 if (i != cur->region_number)
1363 continue;
1365 #define REMAP(REG) \
1366 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1367 (REG)->region_number + eh_offset)
1369 switch (cur->type)
1371 case ERT_TRY:
1372 if (cur->u.eh_try.eh_catch)
1373 REMAP (cur->u.eh_try.eh_catch);
1374 if (cur->u.eh_try.last_catch)
1375 REMAP (cur->u.eh_try.last_catch);
1376 break;
1378 case ERT_CATCH:
1379 if (cur->u.eh_catch.next_catch)
1380 REMAP (cur->u.eh_catch.next_catch);
1381 if (cur->u.eh_catch.prev_catch)
1382 REMAP (cur->u.eh_catch.prev_catch);
1383 break;
1385 default:
1386 break;
1389 #undef REMAP
1391 #ifdef ENABLE_CHECKING
1392 verify_eh_tree (cfun);
1393 #endif
1395 return eh_offset;
1398 /* Return new copy of eh region OLD inside region NEW_OUTER.
1399 Do not care about updating the tree otherwise. */
1401 static struct eh_region_d *
1402 copy_eh_region_1 (struct eh_region_d *old, struct eh_region_d *new_outer)
1404 struct eh_region_d *new_eh = gen_eh_region (old->type, new_outer);
1405 new_eh->u = old->u;
1406 new_eh->tree_label = old->tree_label;
1407 new_eh->may_contain_throw = old->may_contain_throw;
1408 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1409 cfun->eh->last_region_number + 1);
1410 VEC_replace (eh_region, cfun->eh->region_array, new_eh->region_number, new_eh);
1411 if (dump_file && (dump_flags & TDF_DETAILS))
1412 fprintf (dump_file, "Copying region %i to %i\n", old->region_number, new_eh->region_number);
1413 return new_eh;
1416 /* Return new copy of eh region OLD inside region NEW_OUTER.
1418 Copy whole catch-try chain if neccesary. */
1420 static struct eh_region_d *
1421 copy_eh_region (struct eh_region_d *old, struct eh_region_d *new_outer)
1423 struct eh_region_d *r, *n, *old_try, *new_try, *ret = NULL;
1424 VEC(eh_region,heap) *catch_list = NULL;
1426 if (old->type != ERT_CATCH)
1428 gcc_assert (old->type != ERT_TRY);
1429 r = copy_eh_region_1 (old, new_outer);
1430 return r;
1433 /* Locate and copy corresponding TRY. */
1434 for (old_try = old->next_peer; old_try->type == ERT_CATCH; old_try = old_try->next_peer)
1435 continue;
1436 gcc_assert (old_try->type == ERT_TRY);
1437 new_try = gen_eh_region_try (new_outer);
1438 new_try->tree_label = old_try->tree_label;
1439 new_try->may_contain_throw = old_try->may_contain_throw;
1440 if (dump_file && (dump_flags & TDF_DETAILS))
1441 fprintf (dump_file, "Copying try-catch regions. Try: %i to %i\n",
1442 old_try->region_number, new_try->region_number);
1443 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1444 cfun->eh->last_region_number + 1);
1445 VEC_replace (eh_region, cfun->eh->region_array, new_try->region_number, new_try);
1447 /* In order to keep CATCH list in order, we need to copy in reverse order. */
1448 for (r = old_try->u.eh_try.last_catch; r->type == ERT_CATCH; r = r->next_peer)
1449 VEC_safe_push (eh_region, heap, catch_list, r);
1451 while (VEC_length (eh_region, catch_list))
1453 r = VEC_pop (eh_region, catch_list);
1455 /* Duplicate CATCH. */
1456 n = gen_eh_region_catch (new_try, r->u.eh_catch.type_list);
1457 n->tree_label = r->tree_label;
1458 n->may_contain_throw = r->may_contain_throw;
1459 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1460 cfun->eh->last_region_number + 1);
1461 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1462 n->tree_label = r->tree_label;
1464 if (dump_file && (dump_flags & TDF_DETAILS))
1465 fprintf (dump_file, "Copying try-catch regions. Catch: %i to %i\n",
1466 r->region_number, n->region_number);
1467 if (r == old)
1468 ret = n;
1470 VEC_free (eh_region, heap, catch_list);
1471 gcc_assert (ret);
1472 return ret;
1475 /* Callback for forach_reachable_handler that push REGION into single VECtor DATA. */
1477 static void
1478 push_reachable_handler (struct eh_region_d *region, void *data)
1480 VEC(eh_region,heap) **trace = (VEC(eh_region,heap) **) data;
1481 VEC_safe_push (eh_region, heap, *trace, region);
1484 /* Redirect EH edge E that to NEW_DEST_LABEL.
1485 IS_RESX, INLINABLE_CALL and REGION_NMUBER match the parameter of
1486 foreach_reachable_handler. */
1488 struct eh_region_d *
1489 redirect_eh_edge_to_label (edge e, tree new_dest_label, bool is_resx,
1490 bool inlinable_call, int region_number)
1492 struct eh_region_d *outer;
1493 struct eh_region_d *region;
1494 VEC (eh_region, heap) * trace = NULL;
1495 int i;
1496 int start_here = -1;
1497 basic_block old_bb = e->dest;
1498 struct eh_region_d *old, *r = NULL;
1499 bool update_inplace = true;
1500 edge_iterator ei;
1501 edge e2;
1503 /* If there is only one EH edge, we don't need to duplicate;
1504 just update labels in the tree. */
1505 FOR_EACH_EDGE (e2, ei, old_bb->preds)
1506 if ((e2->flags & EDGE_EH) && e2 != e)
1508 update_inplace = false;
1509 break;
1512 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
1513 gcc_assert (region);
1515 foreach_reachable_handler (region_number, is_resx, inlinable_call,
1516 push_reachable_handler, &trace);
1517 if (dump_file && (dump_flags & TDF_DETAILS))
1519 dump_eh_tree (dump_file, cfun);
1520 fprintf (dump_file, "Trace: ");
1521 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1522 fprintf (dump_file, " %i", VEC_index (eh_region, trace, i)->region_number);
1523 fprintf (dump_file, " inplace: %i\n", update_inplace);
1526 if (update_inplace)
1528 /* In easy route just walk trace and update all occurences of the label. */
1529 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1531 r = VEC_index (eh_region, trace, i);
1532 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1534 r->tree_label = new_dest_label;
1535 if (dump_file && (dump_flags & TDF_DETAILS))
1536 fprintf (dump_file, "Updating label for region %i\n",
1537 r->region_number);
1540 r = region;
1542 else
1544 /* Now look for outermost handler that reffers to the basic block in question.
1545 We start our duplication there. */
1546 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1548 r = VEC_index (eh_region, trace, i);
1549 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1550 start_here = i;
1552 outer = VEC_index (eh_region, trace, start_here)->outer;
1553 gcc_assert (start_here >= 0);
1555 /* And now do the dirty job! */
1556 for (i = start_here; i >= 0; i--)
1558 old = VEC_index (eh_region, trace, i);
1559 gcc_assert (!outer || old->outer != outer->outer);
1561 /* Copy region and update label. */
1562 r = copy_eh_region (old, outer);
1563 VEC_replace (eh_region, trace, i, r);
1564 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1566 r->tree_label = new_dest_label;
1567 if (dump_file && (dump_flags & TDF_DETAILS))
1568 fprintf (dump_file, "Updating label for region %i\n",
1569 r->region_number);
1572 /* We got into copying CATCH. copy_eh_region already did job
1573 of copying all catch blocks corresponding to the try. Now
1574 we need to update labels in all of them and see trace.
1576 We continue nesting into TRY region corresponding to CATCH:
1577 When duplicating EH tree contaiing subregions of CATCH,
1578 the CATCH region itself is never inserted to trace so we
1579 never get here anyway. */
1580 if (r->type == ERT_CATCH)
1582 /* Walk other catch regions we copied and update labels as needed. */
1583 for (r = r->next_peer; r->type == ERT_CATCH; r = r->next_peer)
1584 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1586 r->tree_label = new_dest_label;
1587 if (dump_file && (dump_flags & TDF_DETAILS))
1588 fprintf (dump_file, "Updating label for region %i\n",
1589 r->region_number);
1591 gcc_assert (r->type == ERT_TRY);
1593 /* Skip sibling catch regions from the trace.
1594 They are already updated. */
1595 while (i > 0 && VEC_index (eh_region, trace, i - 1)->outer == old->outer)
1597 gcc_assert (VEC_index (eh_region, trace, i - 1)->type == ERT_CATCH);
1598 i--;
1602 outer = r;
1605 if (is_resx || region->type == ERT_THROW)
1606 r = copy_eh_region (region, outer);
1609 VEC_free (eh_region, heap, trace);
1610 if (dump_file && (dump_flags & TDF_DETAILS))
1612 dump_eh_tree (dump_file, cfun);
1613 fprintf (dump_file, "New region: %i\n", r->region_number);
1615 return r;
1618 /* Return region number of region that is outer to both if REGION_A and
1619 REGION_B in IFUN. */
1622 eh_region_outermost (struct function *ifun, int region_a, int region_b)
1624 struct eh_region_d *rp_a, *rp_b;
1625 sbitmap b_outer;
1627 gcc_assert (ifun->eh->last_region_number > 0);
1628 gcc_assert (ifun->eh->region_tree);
1630 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1631 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1632 gcc_assert (rp_a != NULL);
1633 gcc_assert (rp_b != NULL);
1635 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1636 sbitmap_zero (b_outer);
1640 SET_BIT (b_outer, rp_b->region_number);
1641 rp_b = rp_b->outer;
1643 while (rp_b);
1647 if (TEST_BIT (b_outer, rp_a->region_number))
1649 sbitmap_free (b_outer);
1650 return rp_a->region_number;
1652 rp_a = rp_a->outer;
1654 while (rp_a);
1656 sbitmap_free (b_outer);
1657 return -1;
1660 static int
1661 t2r_eq (const void *pentry, const void *pdata)
1663 const_tree const entry = (const_tree) pentry;
1664 const_tree const data = (const_tree) pdata;
1666 return TREE_PURPOSE (entry) == data;
1669 static hashval_t
1670 t2r_hash (const void *pentry)
1672 const_tree const entry = (const_tree) pentry;
1673 return TREE_HASH (TREE_PURPOSE (entry));
1676 void
1677 add_type_for_runtime (tree type)
1679 tree *slot;
1681 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1682 TREE_HASH (type), INSERT);
1683 if (*slot == NULL)
1685 tree runtime = (*lang_eh_runtime_type) (type);
1686 *slot = tree_cons (type, runtime, NULL_TREE);
1690 tree
1691 lookup_type_for_runtime (tree type)
1693 tree *slot;
1695 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1696 TREE_HASH (type), NO_INSERT);
1698 /* We should have always inserted the data earlier. */
1699 return TREE_VALUE (*slot);
1703 /* Represent an entry in @TTypes for either catch actions
1704 or exception filter actions. */
1705 struct GTY(()) ttypes_filter {
1706 tree t;
1707 int filter;
1710 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1711 (a tree) for a @TTypes type node we are thinking about adding. */
1713 static int
1714 ttypes_filter_eq (const void *pentry, const void *pdata)
1716 const struct ttypes_filter *const entry
1717 = (const struct ttypes_filter *) pentry;
1718 const_tree const data = (const_tree) pdata;
1720 return entry->t == data;
1723 static hashval_t
1724 ttypes_filter_hash (const void *pentry)
1726 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1727 return TREE_HASH (entry->t);
1730 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1731 exception specification list we are thinking about adding. */
1732 /* ??? Currently we use the type lists in the order given. Someone
1733 should put these in some canonical order. */
1735 static int
1736 ehspec_filter_eq (const void *pentry, const void *pdata)
1738 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1739 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1741 return type_list_equal (entry->t, data->t);
1744 /* Hash function for exception specification lists. */
1746 static hashval_t
1747 ehspec_filter_hash (const void *pentry)
1749 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1750 hashval_t h = 0;
1751 tree list;
1753 for (list = entry->t; list ; list = TREE_CHAIN (list))
1754 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1755 return h;
1758 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1759 to speed up the search. Return the filter value to be used. */
1761 static int
1762 add_ttypes_entry (htab_t ttypes_hash, tree type)
1764 struct ttypes_filter **slot, *n;
1766 slot = (struct ttypes_filter **)
1767 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1769 if ((n = *slot) == NULL)
1771 /* Filter value is a 1 based table index. */
1773 n = XNEW (struct ttypes_filter);
1774 n->t = type;
1775 n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
1776 *slot = n;
1778 VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
1781 return n->filter;
1784 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1785 to speed up the search. Return the filter value to be used. */
1787 static int
1788 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1790 struct ttypes_filter **slot, *n;
1791 struct ttypes_filter dummy;
1793 dummy.t = list;
1794 slot = (struct ttypes_filter **)
1795 htab_find_slot (ehspec_hash, &dummy, INSERT);
1797 if ((n = *slot) == NULL)
1799 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1801 n = XNEW (struct ttypes_filter);
1802 n->t = list;
1803 n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
1804 *slot = n;
1806 /* Generate a 0 terminated list of filter values. */
1807 for (; list ; list = TREE_CHAIN (list))
1809 if (targetm.arm_eabi_unwinder)
1810 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
1811 else
1813 /* Look up each type in the list and encode its filter
1814 value as a uleb128. */
1815 push_uleb128 (&crtl->eh.ehspec_data,
1816 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1819 if (targetm.arm_eabi_unwinder)
1820 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
1821 else
1822 VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
1825 return n->filter;
1828 /* Generate the action filter values to be used for CATCH and
1829 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1830 we use lots of landing pads, and so every type or list can share
1831 the same filter value, which saves table space. */
1833 static void
1834 assign_filter_values (void)
1836 int i;
1837 htab_t ttypes, ehspec;
1839 crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
1840 if (targetm.arm_eabi_unwinder)
1841 VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1842 else
1843 VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1845 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1846 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1848 for (i = cfun->eh->last_region_number; i > 0; --i)
1850 struct eh_region_d *r;
1852 r = VEC_index (eh_region, cfun->eh->region_array, i);
1854 /* Mind we don't process a region more than once. */
1855 if (!r || r->region_number != i)
1856 continue;
1858 switch (r->type)
1860 case ERT_CATCH:
1861 /* Whatever type_list is (NULL or true list), we build a list
1862 of filters for the region. */
1863 r->u.eh_catch.filter_list = NULL_TREE;
1865 if (r->u.eh_catch.type_list != NULL)
1867 /* Get a filter value for each of the types caught and store
1868 them in the region's dedicated list. */
1869 tree tp_node = r->u.eh_catch.type_list;
1871 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1873 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1874 tree flt_node = build_int_cst (NULL_TREE, flt);
1876 r->u.eh_catch.filter_list
1877 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1880 else
1882 /* Get a filter value for the NULL list also since it will need
1883 an action record anyway. */
1884 int flt = add_ttypes_entry (ttypes, NULL);
1885 tree flt_node = build_int_cst (NULL_TREE, flt);
1887 r->u.eh_catch.filter_list
1888 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1891 break;
1893 case ERT_ALLOWED_EXCEPTIONS:
1894 r->u.allowed.filter
1895 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1896 break;
1898 default:
1899 break;
1903 htab_delete (ttypes);
1904 htab_delete (ehspec);
1907 /* Emit SEQ into basic block just before INSN (that is assumed to be
1908 first instruction of some existing BB and return the newly
1909 produced block. */
1910 static basic_block
1911 emit_to_new_bb_before (rtx seq, rtx insn)
1913 rtx last;
1914 basic_block bb;
1915 edge e;
1916 edge_iterator ei;
1918 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1919 call), we don't want it to go into newly created landing pad or other EH
1920 construct. */
1921 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1922 if (e->flags & EDGE_FALLTHRU)
1923 force_nonfallthru (e);
1924 else
1925 ei_next (&ei);
1926 last = emit_insn_before (seq, insn);
1927 if (BARRIER_P (last))
1928 last = PREV_INSN (last);
1929 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1930 update_bb_for_insn (bb);
1931 bb->flags |= BB_SUPERBLOCK;
1932 return bb;
1935 /* Generate the code to actually handle exceptions, which will follow the
1936 landing pads. */
1938 static void
1939 build_post_landing_pads (void)
1941 int i;
1943 for (i = cfun->eh->last_region_number; i > 0; --i)
1945 struct eh_region_d *region;
1946 rtx seq;
1948 region = VEC_index (eh_region, cfun->eh->region_array, i);
1949 /* Mind we don't process a region more than once. */
1950 if (!region || region->region_number != i)
1951 continue;
1953 switch (region->type)
1955 case ERT_TRY:
1956 /* It is possible that TRY region is kept alive only because some of
1957 contained catch region still have RESX instruction but they are
1958 reached via their copies. In this case we need to do nothing. */
1959 if (!region->u.eh_try.eh_catch->label)
1960 break;
1962 /* ??? Collect the set of all non-overlapping catch handlers
1963 all the way up the chain until blocked by a cleanup. */
1964 /* ??? Outer try regions can share landing pads with inner
1965 try regions if the types are completely non-overlapping,
1966 and there are no intervening cleanups. */
1968 region->post_landing_pad = gen_label_rtx ();
1970 start_sequence ();
1972 emit_label (region->post_landing_pad);
1974 /* ??? It is mighty inconvenient to call back into the
1975 switch statement generation code in expand_end_case.
1976 Rapid prototyping sez a sequence of ifs. */
1978 struct eh_region_d *c;
1979 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
1981 if (c->u.eh_catch.type_list == NULL)
1982 emit_jump (c->label);
1983 else
1985 /* Need for one cmp/jump per type caught. Each type
1986 list entry has a matching entry in the filter list
1987 (see assign_filter_values). */
1988 tree tp_node = c->u.eh_catch.type_list;
1989 tree flt_node = c->u.eh_catch.filter_list;
1991 for (; tp_node; )
1993 emit_cmp_and_jump_insns
1994 (crtl->eh.filter,
1995 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1996 EQ, NULL_RTX,
1997 targetm.eh_return_filter_mode (), 0, c->label);
1999 tp_node = TREE_CHAIN (tp_node);
2000 flt_node = TREE_CHAIN (flt_node);
2006 /* We delay the generation of the _Unwind_Resume until we generate
2007 landing pads. We emit a marker here so as to get good control
2008 flow data in the meantime. */
2009 region->resume
2010 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
2011 emit_barrier ();
2013 seq = get_insns ();
2014 end_sequence ();
2016 emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
2018 break;
2020 case ERT_ALLOWED_EXCEPTIONS:
2021 if (!region->label)
2022 break;
2023 region->post_landing_pad = gen_label_rtx ();
2025 start_sequence ();
2027 emit_label (region->post_landing_pad);
2029 emit_cmp_and_jump_insns (crtl->eh.filter,
2030 GEN_INT (region->u.allowed.filter),
2031 EQ, NULL_RTX,
2032 targetm.eh_return_filter_mode (), 0, region->label);
2034 /* We delay the generation of the _Unwind_Resume until we generate
2035 landing pads. We emit a marker here so as to get good control
2036 flow data in the meantime. */
2037 region->resume
2038 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
2039 emit_barrier ();
2041 seq = get_insns ();
2042 end_sequence ();
2044 emit_to_new_bb_before (seq, region->label);
2045 break;
2047 case ERT_CLEANUP:
2048 case ERT_MUST_NOT_THROW:
2049 region->post_landing_pad = region->label;
2050 break;
2052 case ERT_CATCH:
2053 case ERT_THROW:
2054 /* Nothing to do. */
2055 break;
2057 default:
2058 gcc_unreachable ();
2063 /* Replace RESX patterns with jumps to the next handler if any, or calls to
2064 _Unwind_Resume otherwise. */
2066 static void
2067 connect_post_landing_pads (void)
2069 int i;
2071 for (i = cfun->eh->last_region_number; i > 0; --i)
2073 struct eh_region_d *region;
2074 struct eh_region_d *outer;
2075 rtx seq;
2076 rtx barrier;
2078 region = VEC_index (eh_region, cfun->eh->region_array, i);
2079 /* Mind we don't process a region more than once. */
2080 if (!region || region->region_number != i)
2081 continue;
2083 /* If there is no RESX, or it has been deleted by flow, there's
2084 nothing to fix up. */
2085 if (! region->resume || INSN_DELETED_P (region->resume))
2086 continue;
2088 /* Search for another landing pad in this function. */
2089 for (outer = region->outer; outer ; outer = outer->outer)
2090 if (outer->post_landing_pad)
2091 break;
2093 start_sequence ();
2095 if (outer)
2097 edge e;
2098 basic_block src, dest;
2100 emit_jump (outer->post_landing_pad);
2101 src = BLOCK_FOR_INSN (region->resume);
2102 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
2103 while (EDGE_COUNT (src->succs) > 0)
2104 remove_edge (EDGE_SUCC (src, 0));
2105 e = make_edge (src, dest, 0);
2106 e->probability = REG_BR_PROB_BASE;
2107 e->count = src->count;
2109 else
2111 emit_library_call (unwind_resume_libfunc, LCT_THROW,
2112 VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
2114 /* What we just emitted was a throwing libcall, so it got a
2115 barrier automatically added after it. If the last insn in
2116 the libcall sequence isn't the barrier, it's because the
2117 target emits multiple insns for a call, and there are insns
2118 after the actual call insn (which are redundant and would be
2119 optimized away). The barrier is inserted exactly after the
2120 call insn, so let's go get that and delete the insns after
2121 it, because below we need the barrier to be the last insn in
2122 the sequence. */
2123 delete_insns_since (NEXT_INSN (last_call_insn ()));
2126 seq = get_insns ();
2127 end_sequence ();
2128 barrier = emit_insn_before (seq, region->resume);
2129 /* Avoid duplicate barrier. */
2130 gcc_assert (BARRIER_P (barrier));
2131 delete_insn (barrier);
2132 delete_insn (region->resume);
2134 /* ??? From tree-ssa we can wind up with catch regions whose
2135 label is not instantiated, but whose resx is present. Now
2136 that we've dealt with the resx, kill the region. */
2137 if (region->label == NULL && region->type == ERT_CLEANUP)
2138 remove_eh_handler (region);
2143 static void
2144 dw2_build_landing_pads (void)
2146 int i;
2148 for (i = cfun->eh->last_region_number; i > 0; --i)
2150 struct eh_region_d *region;
2151 rtx seq;
2152 basic_block bb;
2153 edge e;
2155 region = VEC_index (eh_region, cfun->eh->region_array, i);
2156 /* Mind we don't process a region more than once. */
2157 if (!region || region->region_number != i)
2158 continue;
2160 if (region->type != ERT_CLEANUP
2161 && region->type != ERT_TRY
2162 && region->type != ERT_ALLOWED_EXCEPTIONS)
2163 continue;
2165 if (!region->post_landing_pad)
2166 continue;
2168 start_sequence ();
2170 region->landing_pad = gen_label_rtx ();
2171 emit_label (region->landing_pad);
2173 #ifdef HAVE_exception_receiver
2174 if (HAVE_exception_receiver)
2175 emit_insn (gen_exception_receiver ());
2176 else
2177 #endif
2178 #ifdef HAVE_nonlocal_goto_receiver
2179 if (HAVE_nonlocal_goto_receiver)
2180 emit_insn (gen_nonlocal_goto_receiver ());
2181 else
2182 #endif
2183 { /* Nothing */ }
2185 emit_move_insn (crtl->eh.exc_ptr,
2186 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
2187 emit_move_insn (crtl->eh.filter,
2188 gen_rtx_REG (targetm.eh_return_filter_mode (),
2189 EH_RETURN_DATA_REGNO (1)));
2191 seq = get_insns ();
2192 end_sequence ();
2194 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
2195 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2196 e->count = bb->count;
2197 e->probability = REG_BR_PROB_BASE;
2202 struct sjlj_lp_info
2204 int directly_reachable;
2205 int action_index;
2206 int dispatch_index;
2207 int call_site_index;
2210 static bool
2211 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
2213 rtx insn;
2214 bool found_one = false;
2216 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2218 struct eh_region_d *region;
2219 enum reachable_code rc;
2220 tree type_thrown;
2221 rtx note;
2223 if (! INSN_P (insn))
2224 continue;
2226 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2227 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2228 continue;
2230 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2231 if (!region)
2232 continue;
2234 type_thrown = NULL_TREE;
2235 if (region->type == ERT_THROW)
2237 type_thrown = region->u.eh_throw.type;
2238 region = region->outer;
2241 /* Find the first containing region that might handle the exception.
2242 That's the landing pad to which we will transfer control. */
2243 rc = RNL_NOT_CAUGHT;
2244 for (; region; region = region->outer)
2246 rc = reachable_next_level (region, type_thrown, NULL, false);
2247 if (rc != RNL_NOT_CAUGHT)
2248 break;
2250 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
2252 lp_info[region->region_number].directly_reachable = 1;
2253 found_one = true;
2257 return found_one;
2260 static void
2261 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2263 htab_t ar_hash;
2264 int i, index;
2266 /* First task: build the action table. */
2268 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
2269 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2271 for (i = cfun->eh->last_region_number; i > 0; --i)
2272 if (lp_info[i].directly_reachable)
2274 struct eh_region_d *r =
2275 VEC_index (eh_region, cfun->eh->region_array, i);
2277 r->landing_pad = dispatch_label;
2278 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2279 if (lp_info[i].action_index != -1)
2280 crtl->uses_eh_lsda = 1;
2283 htab_delete (ar_hash);
2285 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2286 landing pad label for the region. For sjlj though, there is one
2287 common landing pad from which we dispatch to the post-landing pads.
2289 A region receives a dispatch index if it is directly reachable
2290 and requires in-function processing. Regions that share post-landing
2291 pads may share dispatch indices. */
2292 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2293 (see build_post_landing_pads) so we don't bother checking for it. */
2295 index = 0;
2296 for (i = cfun->eh->last_region_number; i > 0; --i)
2297 if (lp_info[i].directly_reachable)
2298 lp_info[i].dispatch_index = index++;
2300 /* Finally: assign call-site values. If dwarf2 terms, this would be
2301 the region number assigned by convert_to_eh_region_ranges, but
2302 handles no-action and must-not-throw differently. */
2304 call_site_base = 1;
2305 for (i = cfun->eh->last_region_number; i > 0; --i)
2306 if (lp_info[i].directly_reachable)
2308 int action = lp_info[i].action_index;
2310 /* Map must-not-throw to otherwise unused call-site index 0. */
2311 if (action == -2)
2312 index = 0;
2313 /* Map no-action to otherwise unused call-site index -1. */
2314 else if (action == -1)
2315 index = -1;
2316 /* Otherwise, look it up in the table. */
2317 else
2318 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2320 lp_info[i].call_site_index = index;
2324 static void
2325 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
2327 int last_call_site = -2;
2328 rtx insn, mem;
2330 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2332 struct eh_region_d *region;
2333 int this_call_site;
2334 rtx note, before, p;
2336 /* Reset value tracking at extended basic block boundaries. */
2337 if (LABEL_P (insn))
2338 last_call_site = -2;
2340 if (! INSN_P (insn))
2341 continue;
2343 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2345 /* Calls that are known to not throw need not be marked. */
2346 if (note && INTVAL (XEXP (note, 0)) <= 0)
2347 continue;
2349 if (note)
2350 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2351 else
2352 region = NULL;
2354 if (!region)
2356 /* Calls (and trapping insns) without notes are outside any
2357 exception handling region in this function. Mark them as
2358 no action. */
2359 if (CALL_P (insn)
2360 || (flag_non_call_exceptions
2361 && may_trap_p (PATTERN (insn))))
2362 this_call_site = -1;
2363 else
2364 continue;
2366 else
2367 this_call_site = lp_info[region->region_number].call_site_index;
2369 if (this_call_site == last_call_site)
2370 continue;
2372 /* Don't separate a call from it's argument loads. */
2373 before = insn;
2374 if (CALL_P (insn))
2375 before = find_first_parameter_load (insn, NULL_RTX);
2377 start_sequence ();
2378 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
2379 sjlj_fc_call_site_ofs);
2380 emit_move_insn (mem, GEN_INT (this_call_site));
2381 p = get_insns ();
2382 end_sequence ();
2384 emit_insn_before (p, before);
2385 last_call_site = this_call_site;
2389 /* Construct the SjLj_Function_Context. */
2391 static void
2392 sjlj_emit_function_enter (rtx dispatch_label)
2394 rtx fn_begin, fc, mem, seq;
2395 bool fn_begin_outside_block;
2397 fc = crtl->eh.sjlj_fc;
2399 start_sequence ();
2401 /* We're storing this libcall's address into memory instead of
2402 calling it directly. Thus, we must call assemble_external_libcall
2403 here, as we can not depend on emit_library_call to do it for us. */
2404 assemble_external_libcall (eh_personality_libfunc);
2405 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2406 emit_move_insn (mem, eh_personality_libfunc);
2408 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2409 if (crtl->uses_eh_lsda)
2411 char buf[20];
2412 rtx sym;
2414 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2415 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2416 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
2417 emit_move_insn (mem, sym);
2419 else
2420 emit_move_insn (mem, const0_rtx);
2422 #ifdef DONT_USE_BUILTIN_SETJMP
2424 rtx x;
2425 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2426 TYPE_MODE (integer_type_node), 1,
2427 plus_constant (XEXP (fc, 0),
2428 sjlj_fc_jbuf_ofs), Pmode);
2430 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2431 TYPE_MODE (integer_type_node), 0, dispatch_label);
2432 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
2434 #else
2435 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2436 dispatch_label);
2437 #endif
2439 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2440 1, XEXP (fc, 0), Pmode);
2442 seq = get_insns ();
2443 end_sequence ();
2445 /* ??? Instead of doing this at the beginning of the function,
2446 do this in a block that is at loop level 0 and dominates all
2447 can_throw_internal instructions. */
2449 fn_begin_outside_block = true;
2450 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2451 if (NOTE_P (fn_begin))
2453 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2454 break;
2455 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
2456 fn_begin_outside_block = false;
2459 if (fn_begin_outside_block)
2460 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
2461 else
2462 emit_insn_after (seq, fn_begin);
2465 /* Call back from expand_function_end to know where we should put
2466 the call to unwind_sjlj_unregister_libfunc if needed. */
2468 void
2469 sjlj_emit_function_exit_after (rtx after)
2471 crtl->eh.sjlj_exit_after = after;
2474 static void
2475 sjlj_emit_function_exit (void)
2477 rtx seq, insn;
2479 start_sequence ();
2481 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2482 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
2484 seq = get_insns ();
2485 end_sequence ();
2487 /* ??? Really this can be done in any block at loop level 0 that
2488 post-dominates all can_throw_internal instructions. This is
2489 the last possible moment. */
2491 insn = crtl->eh.sjlj_exit_after;
2492 if (LABEL_P (insn))
2493 insn = NEXT_INSN (insn);
2495 emit_insn_after (seq, insn);
2498 static void
2499 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2501 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
2502 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
2503 int i, first_reachable;
2504 rtx mem, dispatch, seq, fc;
2505 rtx before;
2506 basic_block bb;
2507 edge e;
2509 fc = crtl->eh.sjlj_fc;
2511 start_sequence ();
2513 emit_label (dispatch_label);
2515 #ifndef DONT_USE_BUILTIN_SETJMP
2516 expand_builtin_setjmp_receiver (dispatch_label);
2517 #endif
2519 /* Load up dispatch index, exc_ptr and filter values from the
2520 function context. */
2521 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2522 sjlj_fc_call_site_ofs);
2523 dispatch = copy_to_reg (mem);
2525 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
2526 if (unwind_word_mode != ptr_mode)
2528 #ifdef POINTERS_EXTEND_UNSIGNED
2529 mem = convert_memory_address (ptr_mode, mem);
2530 #else
2531 mem = convert_to_mode (ptr_mode, mem, 0);
2532 #endif
2534 emit_move_insn (crtl->eh.exc_ptr, mem);
2536 mem = adjust_address (fc, unwind_word_mode,
2537 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
2538 if (unwind_word_mode != filter_mode)
2539 mem = convert_to_mode (filter_mode, mem, 0);
2540 emit_move_insn (crtl->eh.filter, mem);
2542 /* Jump to one of the directly reachable regions. */
2543 /* ??? This really ought to be using a switch statement. */
2545 first_reachable = 0;
2546 for (i = cfun->eh->last_region_number; i > 0; --i)
2548 if (! lp_info[i].directly_reachable)
2549 continue;
2551 if (! first_reachable)
2553 first_reachable = i;
2554 continue;
2557 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2558 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2559 (((struct eh_region_d *)
2560 VEC_index (eh_region,
2561 cfun->eh->region_array, i))
2562 ->post_landing_pad));
2565 seq = get_insns ();
2566 end_sequence ();
2568 before = (((struct eh_region_d *)
2569 VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2570 ->post_landing_pad);
2572 bb = emit_to_new_bb_before (seq, before);
2573 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2574 e->count = bb->count;
2575 e->probability = REG_BR_PROB_BASE;
2578 static void
2579 sjlj_build_landing_pads (void)
2581 struct sjlj_lp_info *lp_info;
2583 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2585 if (sjlj_find_directly_reachable_regions (lp_info))
2587 rtx dispatch_label = gen_label_rtx ();
2588 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
2589 TYPE_MODE (sjlj_fc_type_node),
2590 TYPE_ALIGN (sjlj_fc_type_node));
2591 crtl->eh.sjlj_fc
2592 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2593 int_size_in_bytes (sjlj_fc_type_node),
2594 align);
2596 sjlj_assign_call_site_values (dispatch_label, lp_info);
2597 sjlj_mark_call_sites (lp_info);
2599 sjlj_emit_function_enter (dispatch_label);
2600 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2601 sjlj_emit_function_exit ();
2604 free (lp_info);
2607 /* After initial rtl generation, call back to finish generating
2608 exception support code. */
2610 static void
2611 finish_eh_generation (void)
2613 basic_block bb;
2615 /* Nothing to do if no regions created. */
2616 if (cfun->eh->region_tree == NULL)
2617 return;
2619 /* The object here is to provide detailed information (via
2620 reachable_handlers) on how exception control flows within the
2621 function for the CFG construction. In this first pass, we can
2622 include type information garnered from ERT_THROW and
2623 ERT_ALLOWED_EXCEPTIONS regions, and hope that it will be useful
2624 in deleting unreachable handlers. Subsequently, we will generate
2625 landing pads which will connect many of the handlers, and then
2626 type information will not be effective. Still, this is a win
2627 over previous implementations. */
2629 /* These registers are used by the landing pads. Make sure they
2630 have been generated. */
2631 get_exception_pointer ();
2632 get_exception_filter ();
2634 /* Construct the landing pads. */
2636 assign_filter_values ();
2637 build_post_landing_pads ();
2638 connect_post_landing_pads ();
2639 if (USING_SJLJ_EXCEPTIONS)
2640 sjlj_build_landing_pads ();
2641 else
2642 dw2_build_landing_pads ();
2644 crtl->eh.built_landing_pads = 1;
2646 /* We've totally changed the CFG. Start over. */
2647 find_exception_handler_labels ();
2648 break_superblocks ();
2649 if (USING_SJLJ_EXCEPTIONS
2650 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2651 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
2652 commit_edge_insertions ();
2653 FOR_EACH_BB (bb)
2655 edge e;
2656 edge_iterator ei;
2657 bool eh = false;
2658 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2660 if (e->flags & EDGE_EH)
2662 remove_edge (e);
2663 eh = true;
2665 else
2666 ei_next (&ei);
2668 if (eh)
2669 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2673 /* This section handles removing dead code for flow. */
2675 /* Splice REGION from the region tree and replace it by REPLACE etc.
2676 When UPDATE_CATCH_TRY is true mind updating links from catch to try
2677 region.*/
2679 static void
2680 remove_eh_handler_and_replace (struct eh_region_d *region,
2681 struct eh_region_d *replace,
2682 bool update_catch_try)
2684 struct eh_region_d **pp, **pp_start, *p, *outer, *inner;
2685 rtx lab;
2687 outer = region->outer;
2689 /* For the benefit of efficiently handling REG_EH_REGION notes,
2690 replace this region in the region array with its containing
2691 region. Note that previous region deletions may result in
2692 multiple copies of this region in the array, so we have a
2693 list of alternate numbers by which we are known. */
2695 VEC_replace (eh_region, cfun->eh->region_array, region->region_number,
2696 replace);
2697 if (region->aka)
2699 unsigned i;
2700 bitmap_iterator bi;
2702 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2704 VEC_replace (eh_region, cfun->eh->region_array, i, replace);
2708 if (replace)
2710 if (!replace->aka)
2711 replace->aka = BITMAP_GGC_ALLOC ();
2712 if (region->aka)
2713 bitmap_ior_into (replace->aka, region->aka);
2714 bitmap_set_bit (replace->aka, region->region_number);
2717 if (crtl->eh.built_landing_pads)
2718 lab = region->landing_pad;
2719 else
2720 lab = region->label;
2721 if (outer)
2722 pp_start = &outer->inner;
2723 else
2724 pp_start = &cfun->eh->region_tree;
2725 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2726 continue;
2727 *pp = region->next_peer;
2729 if (replace)
2730 pp_start = &replace->inner;
2731 else
2732 pp_start = &cfun->eh->region_tree;
2733 inner = region->inner;
2734 if (inner)
2736 for (p = inner; p->next_peer ; p = p->next_peer)
2737 p->outer = replace;
2738 p->outer = replace;
2740 p->next_peer = *pp_start;
2741 *pp_start = inner;
2744 if (region->type == ERT_CATCH
2745 && update_catch_try)
2747 struct eh_region_d *eh_try, *next, *prev;
2749 for (eh_try = region->next_peer;
2750 eh_try->type == ERT_CATCH;
2751 eh_try = eh_try->next_peer)
2752 continue;
2753 gcc_assert (eh_try->type == ERT_TRY);
2755 next = region->u.eh_catch.next_catch;
2756 prev = region->u.eh_catch.prev_catch;
2758 if (next)
2759 next->u.eh_catch.prev_catch = prev;
2760 else
2761 eh_try->u.eh_try.last_catch = prev;
2762 if (prev)
2763 prev->u.eh_catch.next_catch = next;
2764 else
2766 eh_try->u.eh_try.eh_catch = next;
2767 if (! next)
2768 remove_eh_handler (eh_try);
2773 /* Splice REGION from the region tree and replace it by the outer region
2774 etc. */
2776 static void
2777 remove_eh_handler (struct eh_region_d *region)
2779 remove_eh_handler_and_replace (region, region->outer, true);
2782 /* Remove Eh region R that has turned out to have no code in its handler. */
2784 void
2785 remove_eh_region (int r)
2787 struct eh_region_d *region;
2789 region = VEC_index (eh_region, cfun->eh->region_array, r);
2790 remove_eh_handler (region);
2793 /* Remove Eh region R that has turned out to have no code in its handler
2794 and replace in by R2. */
2796 void
2797 remove_eh_region_and_replace_by_outer_of (int r, int r2)
2799 struct eh_region_d *region, *region2;
2801 region = VEC_index (eh_region, cfun->eh->region_array, r);
2802 region2 = VEC_index (eh_region, cfun->eh->region_array, r2);
2803 remove_eh_handler_and_replace (region, region2->outer, true);
2806 /* Invokes CALLBACK for every exception handler label. Only used by old
2807 loop hackery; should not be used by new code. */
2809 void
2810 for_each_eh_label (void (*callback) (rtx))
2812 int i;
2813 for (i = 0; i < cfun->eh->last_region_number; i++)
2815 struct eh_region_d *r = VEC_index (eh_region, cfun->eh->region_array, i);
2816 if (r && r->region_number == i && r->label
2817 && GET_CODE (r->label) == CODE_LABEL)
2818 (*callback) (r->label);
2822 /* Invoke CALLBACK for every exception region in the current function. */
2824 void
2825 for_each_eh_region (void (*callback) (struct eh_region_d *))
2827 int i, n = cfun->eh->last_region_number;
2828 for (i = 1; i <= n; ++i)
2830 struct eh_region_d *region;
2832 region = VEC_index (eh_region, cfun->eh->region_array, i);
2833 if (region)
2834 (*callback) (region);
2838 /* This section describes CFG exception edges for flow. */
2840 /* For communicating between calls to reachable_next_level. */
2841 struct reachable_info
2843 tree types_caught;
2844 tree types_allowed;
2845 void (*callback) (struct eh_region_d *, void *);
2846 void *callback_data;
2849 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2850 base class of TYPE, is in HANDLED. */
2852 static int
2853 check_handled (tree handled, tree type)
2855 tree t;
2857 /* We can check for exact matches without front-end help. */
2858 if (! lang_eh_type_covers)
2860 for (t = handled; t ; t = TREE_CHAIN (t))
2861 if (TREE_VALUE (t) == type)
2862 return 1;
2864 else
2866 for (t = handled; t ; t = TREE_CHAIN (t))
2867 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2868 return 1;
2871 return 0;
2874 /* A subroutine of reachable_next_level. If we are collecting a list
2875 of handlers, add one. After landing pad generation, reference
2876 it instead of the handlers themselves. Further, the handlers are
2877 all wired together, so by referencing one, we've got them all.
2878 Before landing pad generation we reference each handler individually.
2880 LP_REGION contains the landing pad; REGION is the handler. */
2882 static void
2883 add_reachable_handler (struct reachable_info *info,
2884 struct eh_region_d *lp_region,
2885 struct eh_region_d *region)
2887 if (! info)
2888 return;
2890 if (crtl->eh.built_landing_pads)
2891 info->callback (lp_region, info->callback_data);
2892 else
2893 info->callback (region, info->callback_data);
2896 /* Process one level of exception regions for reachability.
2897 If TYPE_THROWN is non-null, then it is the *exact* type being
2898 propagated. If INFO is non-null, then collect handler labels
2899 and caught/allowed type information between invocations. */
2901 static enum reachable_code
2902 reachable_next_level (struct eh_region_d *region, tree type_thrown,
2903 struct reachable_info *info,
2904 bool maybe_resx)
2906 switch (region->type)
2908 case ERT_CLEANUP:
2909 /* Before landing-pad generation, we model control flow
2910 directly to the individual handlers. In this way we can
2911 see that catch handler types may shadow one another. */
2912 add_reachable_handler (info, region, region);
2913 return RNL_MAYBE_CAUGHT;
2915 case ERT_TRY:
2917 struct eh_region_d *c;
2918 enum reachable_code ret = RNL_NOT_CAUGHT;
2920 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
2922 /* A catch-all handler ends the search. */
2923 if (c->u.eh_catch.type_list == NULL)
2925 add_reachable_handler (info, region, c);
2926 return RNL_CAUGHT;
2929 if (type_thrown)
2931 /* If we have at least one type match, end the search. */
2932 tree tp_node = c->u.eh_catch.type_list;
2934 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2936 tree type = TREE_VALUE (tp_node);
2938 if (type == type_thrown
2939 || (lang_eh_type_covers
2940 && (*lang_eh_type_covers) (type, type_thrown)))
2942 add_reachable_handler (info, region, c);
2943 return RNL_CAUGHT;
2947 /* If we have definitive information of a match failure,
2948 the catch won't trigger. */
2949 if (lang_eh_type_covers)
2950 return RNL_NOT_CAUGHT;
2953 /* At this point, we either don't know what type is thrown or
2954 don't have front-end assistance to help deciding if it is
2955 covered by one of the types in the list for this region.
2957 We'd then like to add this region to the list of reachable
2958 handlers since it is indeed potentially reachable based on the
2959 information we have.
2961 Actually, this handler is for sure not reachable if all the
2962 types it matches have already been caught. That is, it is only
2963 potentially reachable if at least one of the types it catches
2964 has not been previously caught. */
2966 if (! info)
2967 ret = RNL_MAYBE_CAUGHT;
2968 else
2970 tree tp_node = c->u.eh_catch.type_list;
2971 bool maybe_reachable = false;
2973 /* Compute the potential reachability of this handler and
2974 update the list of types caught at the same time. */
2975 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2977 tree type = TREE_VALUE (tp_node);
2979 if (! check_handled (info->types_caught, type))
2981 info->types_caught
2982 = tree_cons (NULL, type, info->types_caught);
2984 maybe_reachable = true;
2988 if (maybe_reachable)
2990 add_reachable_handler (info, region, c);
2992 /* ??? If the catch type is a base class of every allowed
2993 type, then we know we can stop the search. */
2994 ret = RNL_MAYBE_CAUGHT;
2999 return ret;
3002 case ERT_ALLOWED_EXCEPTIONS:
3003 /* An empty list of types definitely ends the search. */
3004 if (region->u.allowed.type_list == NULL_TREE)
3006 add_reachable_handler (info, region, region);
3007 return RNL_CAUGHT;
3010 /* Collect a list of lists of allowed types for use in detecting
3011 when a catch may be transformed into a catch-all. */
3012 if (info)
3013 info->types_allowed = tree_cons (NULL_TREE,
3014 region->u.allowed.type_list,
3015 info->types_allowed);
3017 /* If we have definitive information about the type hierarchy,
3018 then we can tell if the thrown type will pass through the
3019 filter. */
3020 if (type_thrown && lang_eh_type_covers)
3022 if (check_handled (region->u.allowed.type_list, type_thrown))
3023 return RNL_NOT_CAUGHT;
3024 else
3026 add_reachable_handler (info, region, region);
3027 return RNL_CAUGHT;
3031 add_reachable_handler (info, region, region);
3032 return RNL_MAYBE_CAUGHT;
3034 case ERT_CATCH:
3035 /* Catch regions are handled by their controlling try region. */
3036 return RNL_NOT_CAUGHT;
3038 case ERT_MUST_NOT_THROW:
3039 /* Here we end our search, since no exceptions may propagate.
3041 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
3042 only via locally handled RESX instructions.
3044 When we inline a function call, we can bring in new handlers. In order
3045 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
3046 assume that such handlers exists prior for any inlinable call prior
3047 inlining decisions are fixed. */
3049 if (maybe_resx)
3051 add_reachable_handler (info, region, region);
3052 return RNL_CAUGHT;
3054 else
3055 return RNL_BLOCKED;
3057 case ERT_THROW:
3058 case ERT_UNKNOWN:
3059 /* Shouldn't see these here. */
3060 gcc_unreachable ();
3061 break;
3062 default:
3063 gcc_unreachable ();
3067 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
3069 void
3070 foreach_reachable_handler (int region_number, bool is_resx, bool inlinable_call,
3071 void (*callback) (struct eh_region_d *, void *),
3072 void *callback_data)
3074 struct reachable_info info;
3075 struct eh_region_d *region;
3076 tree type_thrown;
3078 memset (&info, 0, sizeof (info));
3079 info.callback = callback;
3080 info.callback_data = callback_data;
3082 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3083 if (!region)
3084 return;
3086 type_thrown = NULL_TREE;
3087 if (is_resx)
3089 /* A RESX leaves a region instead of entering it. Thus the
3090 region itself may have been deleted out from under us. */
3091 if (region == NULL)
3092 return;
3093 region = region->outer;
3095 else if (region->type == ERT_THROW)
3097 type_thrown = region->u.eh_throw.type;
3098 region = region->outer;
3101 while (region)
3103 if (reachable_next_level (region, type_thrown, &info,
3104 inlinable_call || is_resx) >= RNL_CAUGHT)
3105 break;
3106 /* If we have processed one cleanup, there is no point in
3107 processing any more of them. Each cleanup will have an edge
3108 to the next outer cleanup region, so the flow graph will be
3109 accurate. */
3110 if (region->type == ERT_CLEANUP)
3112 enum reachable_code code = RNL_NOT_CAUGHT;
3113 region = find_prev_try (region->outer);
3114 /* Continue looking for outer TRY region until we find one
3115 that might cath something. */
3116 while (region
3117 && (code = reachable_next_level (region, type_thrown, &info,
3118 inlinable_call || is_resx))
3119 == RNL_NOT_CAUGHT)
3120 region = find_prev_try (region->outer);
3121 if (code >= RNL_CAUGHT)
3122 break;
3124 if (region)
3125 region = region->outer;
3129 /* Retrieve a list of labels of exception handlers which can be
3130 reached by a given insn. */
3132 static void
3133 arh_to_landing_pad (struct eh_region_d *region, void *data)
3135 rtx *p_handlers = (rtx *) data;
3136 if (! *p_handlers)
3137 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
3140 static void
3141 arh_to_label (struct eh_region_d *region, void *data)
3143 rtx *p_handlers = (rtx *) data;
3144 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
3148 reachable_handlers (rtx insn)
3150 bool is_resx = false;
3151 rtx handlers = NULL;
3152 int region_number;
3154 if (JUMP_P (insn)
3155 && GET_CODE (PATTERN (insn)) == RESX)
3157 region_number = XINT (PATTERN (insn), 0);
3158 is_resx = true;
3160 else
3162 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3163 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3164 return NULL;
3165 region_number = INTVAL (XEXP (note, 0));
3168 foreach_reachable_handler (region_number, is_resx, false,
3169 (crtl->eh.built_landing_pads
3170 ? arh_to_landing_pad
3171 : arh_to_label),
3172 &handlers);
3174 return handlers;
3177 /* Determine if the given INSN can throw an exception that is caught
3178 within the function. */
3180 bool
3181 can_throw_internal_1 (int region_number, bool is_resx, bool inlinable_call)
3183 struct eh_region_d *region;
3184 tree type_thrown;
3186 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3187 if (!region)
3188 return false;
3190 type_thrown = NULL_TREE;
3191 if (is_resx)
3192 region = region->outer;
3193 else if (region->type == ERT_THROW)
3195 type_thrown = region->u.eh_throw.type;
3196 region = region->outer;
3199 /* If this exception is ignored by each and every containing region,
3200 then control passes straight out. The runtime may handle some
3201 regions, which also do not require processing internally. */
3202 for (; region; region = region->outer)
3204 enum reachable_code how = reachable_next_level (region, type_thrown, 0,
3205 inlinable_call || is_resx);
3206 if (how == RNL_BLOCKED)
3207 return false;
3208 if (how != RNL_NOT_CAUGHT)
3209 return true;
3212 return false;
3215 bool
3216 can_throw_internal (const_rtx insn)
3218 rtx note;
3220 if (! INSN_P (insn))
3221 return false;
3223 if (JUMP_P (insn)
3224 && GET_CODE (PATTERN (insn)) == RESX
3225 && XINT (PATTERN (insn), 0) > 0)
3226 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true, false);
3228 if (NONJUMP_INSN_P (insn)
3229 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3230 insn = XVECEXP (PATTERN (insn), 0, 0);
3232 /* Every insn that might throw has an EH_REGION note. */
3233 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3234 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3235 return false;
3237 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false, false);
3240 /* Determine if the given INSN can throw an exception that is
3241 visible outside the function. */
3243 bool
3244 can_throw_external_1 (int region_number, bool is_resx, bool inlinable_call)
3246 struct eh_region_d *region;
3247 tree type_thrown;
3249 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3250 if (!region)
3251 return true;
3253 type_thrown = NULL_TREE;
3254 if (is_resx)
3255 region = region->outer;
3256 else if (region->type == ERT_THROW)
3258 type_thrown = region->u.eh_throw.type;
3259 region = region->outer;
3262 /* If the exception is caught or blocked by any containing region,
3263 then it is not seen by any calling function. */
3264 for (; region ; region = region->outer)
3265 if (reachable_next_level (region, type_thrown, NULL,
3266 inlinable_call || is_resx) >= RNL_CAUGHT)
3267 return false;
3269 return true;
3272 bool
3273 can_throw_external (const_rtx insn)
3275 rtx note;
3277 if (! INSN_P (insn))
3278 return false;
3280 if (JUMP_P (insn)
3281 && GET_CODE (PATTERN (insn)) == RESX
3282 && XINT (PATTERN (insn), 0) > 0)
3283 return can_throw_external_1 (XINT (PATTERN (insn), 0), true, false);
3285 if (NONJUMP_INSN_P (insn)
3286 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3288 rtx seq = PATTERN (insn);
3289 int i, n = XVECLEN (seq, 0);
3291 for (i = 0; i < n; i++)
3292 if (can_throw_external (XVECEXP (seq, 0, i)))
3293 return true;
3295 return false;
3298 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3299 if (!note)
3301 /* Calls (and trapping insns) without notes are outside any
3302 exception handling region in this function. We have to
3303 assume it might throw. Given that the front end and middle
3304 ends mark known NOTHROW functions, this isn't so wildly
3305 inaccurate. */
3306 return (CALL_P (insn)
3307 || (flag_non_call_exceptions
3308 && may_trap_p (PATTERN (insn))));
3310 if (INTVAL (XEXP (note, 0)) <= 0)
3311 return false;
3313 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false, false);
3316 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
3318 unsigned int
3319 set_nothrow_function_flags (void)
3321 rtx insn;
3323 crtl->nothrow = 1;
3325 /* Assume crtl->all_throwers_are_sibcalls until we encounter
3326 something that can throw an exception. We specifically exempt
3327 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
3328 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
3329 is optimistic. */
3331 crtl->all_throwers_are_sibcalls = 1;
3333 /* If we don't know that this implementation of the function will
3334 actually be used, then we must not set TREE_NOTHROW, since
3335 callers must not assume that this function does not throw. */
3336 if (TREE_NOTHROW (current_function_decl))
3337 return 0;
3339 if (! flag_exceptions)
3340 return 0;
3342 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3343 if (can_throw_external (insn))
3345 crtl->nothrow = 0;
3347 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3349 crtl->all_throwers_are_sibcalls = 0;
3350 return 0;
3354 for (insn = crtl->epilogue_delay_list; insn;
3355 insn = XEXP (insn, 1))
3356 if (can_throw_external (insn))
3358 crtl->nothrow = 0;
3360 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3362 crtl->all_throwers_are_sibcalls = 0;
3363 return 0;
3366 if (crtl->nothrow
3367 && (cgraph_function_body_availability (cgraph_node
3368 (current_function_decl))
3369 >= AVAIL_AVAILABLE))
3371 struct cgraph_node *node = cgraph_node (current_function_decl);
3372 struct cgraph_edge *e;
3373 for (e = node->callers; e; e = e->next_caller)
3374 e->can_throw_external = false;
3375 TREE_NOTHROW (current_function_decl) = 1;
3377 if (dump_file)
3378 fprintf (dump_file, "Marking function nothrow: %s\n\n",
3379 current_function_name ());
3381 return 0;
3384 struct rtl_opt_pass pass_set_nothrow_function_flags =
3387 RTL_PASS,
3388 "nothrow", /* name */
3389 NULL, /* gate */
3390 set_nothrow_function_flags, /* execute */
3391 NULL, /* sub */
3392 NULL, /* next */
3393 0, /* static_pass_number */
3394 TV_NONE, /* tv_id */
3395 0, /* properties_required */
3396 0, /* properties_provided */
3397 0, /* properties_destroyed */
3398 0, /* todo_flags_start */
3399 TODO_dump_func, /* todo_flags_finish */
3404 /* Various hooks for unwind library. */
3406 /* Do any necessary initialization to access arbitrary stack frames.
3407 On the SPARC, this means flushing the register windows. */
3409 void
3410 expand_builtin_unwind_init (void)
3412 /* Set this so all the registers get saved in our frame; we need to be
3413 able to copy the saved values for any registers from frames we unwind. */
3414 crtl->saves_all_registers = 1;
3416 #ifdef SETUP_FRAME_ADDRESSES
3417 SETUP_FRAME_ADDRESSES ();
3418 #endif
3422 expand_builtin_eh_return_data_regno (tree exp)
3424 tree which = CALL_EXPR_ARG (exp, 0);
3425 unsigned HOST_WIDE_INT iwhich;
3427 if (TREE_CODE (which) != INTEGER_CST)
3429 error ("argument of %<__builtin_eh_return_regno%> must be constant");
3430 return constm1_rtx;
3433 iwhich = tree_low_cst (which, 1);
3434 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3435 if (iwhich == INVALID_REGNUM)
3436 return constm1_rtx;
3438 #ifdef DWARF_FRAME_REGNUM
3439 iwhich = DWARF_FRAME_REGNUM (iwhich);
3440 #else
3441 iwhich = DBX_REGISTER_NUMBER (iwhich);
3442 #endif
3444 return GEN_INT (iwhich);
3447 /* Given a value extracted from the return address register or stack slot,
3448 return the actual address encoded in that value. */
3451 expand_builtin_extract_return_addr (tree addr_tree)
3453 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3455 if (GET_MODE (addr) != Pmode
3456 && GET_MODE (addr) != VOIDmode)
3458 #ifdef POINTERS_EXTEND_UNSIGNED
3459 addr = convert_memory_address (Pmode, addr);
3460 #else
3461 addr = convert_to_mode (Pmode, addr, 0);
3462 #endif
3465 /* First mask out any unwanted bits. */
3466 #ifdef MASK_RETURN_ADDR
3467 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3468 #endif
3470 /* Then adjust to find the real return address. */
3471 #if defined (RETURN_ADDR_OFFSET)
3472 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3473 #endif
3475 return addr;
3478 /* Given an actual address in addr_tree, do any necessary encoding
3479 and return the value to be stored in the return address register or
3480 stack slot so the epilogue will return to that address. */
3483 expand_builtin_frob_return_addr (tree addr_tree)
3485 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3487 addr = convert_memory_address (Pmode, addr);
3489 #ifdef RETURN_ADDR_OFFSET
3490 addr = force_reg (Pmode, addr);
3491 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3492 #endif
3494 return addr;
3497 /* Set up the epilogue with the magic bits we'll need to return to the
3498 exception handler. */
3500 void
3501 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3502 tree handler_tree)
3504 rtx tmp;
3506 #ifdef EH_RETURN_STACKADJ_RTX
3507 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
3508 VOIDmode, EXPAND_NORMAL);
3509 tmp = convert_memory_address (Pmode, tmp);
3510 if (!crtl->eh.ehr_stackadj)
3511 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
3512 else if (tmp != crtl->eh.ehr_stackadj)
3513 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
3514 #endif
3516 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
3517 VOIDmode, EXPAND_NORMAL);
3518 tmp = convert_memory_address (Pmode, tmp);
3519 if (!crtl->eh.ehr_handler)
3520 crtl->eh.ehr_handler = copy_to_reg (tmp);
3521 else if (tmp != crtl->eh.ehr_handler)
3522 emit_move_insn (crtl->eh.ehr_handler, tmp);
3524 if (!crtl->eh.ehr_label)
3525 crtl->eh.ehr_label = gen_label_rtx ();
3526 emit_jump (crtl->eh.ehr_label);
3529 void
3530 expand_eh_return (void)
3532 rtx around_label;
3534 if (! crtl->eh.ehr_label)
3535 return;
3537 crtl->calls_eh_return = 1;
3539 #ifdef EH_RETURN_STACKADJ_RTX
3540 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3541 #endif
3543 around_label = gen_label_rtx ();
3544 emit_jump (around_label);
3546 emit_label (crtl->eh.ehr_label);
3547 clobber_return_register ();
3549 #ifdef EH_RETURN_STACKADJ_RTX
3550 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
3551 #endif
3553 #ifdef HAVE_eh_return
3554 if (HAVE_eh_return)
3555 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
3556 else
3557 #endif
3559 #ifdef EH_RETURN_HANDLER_RTX
3560 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
3561 #else
3562 error ("__builtin_eh_return not supported on this target");
3563 #endif
3566 emit_label (around_label);
3569 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3570 POINTERS_EXTEND_UNSIGNED and return it. */
3573 expand_builtin_extend_pointer (tree addr_tree)
3575 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3576 int extend;
3578 #ifdef POINTERS_EXTEND_UNSIGNED
3579 extend = POINTERS_EXTEND_UNSIGNED;
3580 #else
3581 /* The previous EH code did an unsigned extend by default, so we do this also
3582 for consistency. */
3583 extend = 1;
3584 #endif
3586 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
3589 /* In the following functions, we represent entries in the action table
3590 as 1-based indices. Special cases are:
3592 0: null action record, non-null landing pad; implies cleanups
3593 -1: null action record, null landing pad; implies no action
3594 -2: no call-site entry; implies must_not_throw
3595 -3: we have yet to process outer regions
3597 Further, no special cases apply to the "next" field of the record.
3598 For next, 0 means end of list. */
3600 struct action_record
3602 int offset;
3603 int filter;
3604 int next;
3607 static int
3608 action_record_eq (const void *pentry, const void *pdata)
3610 const struct action_record *entry = (const struct action_record *) pentry;
3611 const struct action_record *data = (const struct action_record *) pdata;
3612 return entry->filter == data->filter && entry->next == data->next;
3615 static hashval_t
3616 action_record_hash (const void *pentry)
3618 const struct action_record *entry = (const struct action_record *) pentry;
3619 return entry->next * 1009 + entry->filter;
3622 static int
3623 add_action_record (htab_t ar_hash, int filter, int next)
3625 struct action_record **slot, *new_ar, tmp;
3627 tmp.filter = filter;
3628 tmp.next = next;
3629 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3631 if ((new_ar = *slot) == NULL)
3633 new_ar = XNEW (struct action_record);
3634 new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3635 new_ar->filter = filter;
3636 new_ar->next = next;
3637 *slot = new_ar;
3639 /* The filter value goes in untouched. The link to the next
3640 record is a "self-relative" byte offset, or zero to indicate
3641 that there is no next record. So convert the absolute 1 based
3642 indices we've been carrying around into a displacement. */
3644 push_sleb128 (&crtl->eh.action_record_data, filter);
3645 if (next)
3646 next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3647 push_sleb128 (&crtl->eh.action_record_data, next);
3650 return new_ar->offset;
3653 static int
3654 collect_one_action_chain (htab_t ar_hash, struct eh_region_d *region)
3656 struct eh_region_d *c;
3657 int next;
3659 /* If we've reached the top of the region chain, then we have
3660 no actions, and require no landing pad. */
3661 if (region == NULL)
3662 return -1;
3664 switch (region->type)
3666 case ERT_CLEANUP:
3667 /* A cleanup adds a zero filter to the beginning of the chain, but
3668 there are special cases to look out for. If there are *only*
3669 cleanups along a path, then it compresses to a zero action.
3670 Further, if there are multiple cleanups along a path, we only
3671 need to represent one of them, as that is enough to trigger
3672 entry to the landing pad at runtime. */
3673 next = collect_one_action_chain (ar_hash, region->outer);
3674 if (next <= 0)
3675 return 0;
3676 for (c = region->outer; c ; c = c->outer)
3677 if (c->type == ERT_CLEANUP)
3678 return next;
3679 return add_action_record (ar_hash, 0, next);
3681 case ERT_TRY:
3682 /* Process the associated catch regions in reverse order.
3683 If there's a catch-all handler, then we don't need to
3684 search outer regions. Use a magic -3 value to record
3685 that we haven't done the outer search. */
3686 next = -3;
3687 for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)
3689 if (c->u.eh_catch.type_list == NULL)
3691 /* Retrieve the filter from the head of the filter list
3692 where we have stored it (see assign_filter_values). */
3693 int filter
3694 = TREE_INT_CST_LOW (TREE_VALUE (c->u.eh_catch.filter_list));
3696 next = add_action_record (ar_hash, filter, 0);
3698 else
3700 /* Once the outer search is done, trigger an action record for
3701 each filter we have. */
3702 tree flt_node;
3704 if (next == -3)
3706 next = collect_one_action_chain (ar_hash, region->outer);
3708 /* If there is no next action, terminate the chain. */
3709 if (next == -1)
3710 next = 0;
3711 /* If all outer actions are cleanups or must_not_throw,
3712 we'll have no action record for it, since we had wanted
3713 to encode these states in the call-site record directly.
3714 Add a cleanup action to the chain to catch these. */
3715 else if (next <= 0)
3716 next = add_action_record (ar_hash, 0, 0);
3719 flt_node = c->u.eh_catch.filter_list;
3720 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3722 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3723 next = add_action_record (ar_hash, filter, next);
3727 return next;
3729 case ERT_ALLOWED_EXCEPTIONS:
3730 /* An exception specification adds its filter to the
3731 beginning of the chain. */
3732 next = collect_one_action_chain (ar_hash, region->outer);
3734 /* If there is no next action, terminate the chain. */
3735 if (next == -1)
3736 next = 0;
3737 /* If all outer actions are cleanups or must_not_throw,
3738 we'll have no action record for it, since we had wanted
3739 to encode these states in the call-site record directly.
3740 Add a cleanup action to the chain to catch these. */
3741 else if (next <= 0)
3742 next = add_action_record (ar_hash, 0, 0);
3744 return add_action_record (ar_hash, region->u.allowed.filter, next);
3746 case ERT_MUST_NOT_THROW:
3747 /* A must-not-throw region with no inner handlers or cleanups
3748 requires no call-site entry. Note that this differs from
3749 the no handler or cleanup case in that we do require an lsda
3750 to be generated. Return a magic -2 value to record this. */
3751 return -2;
3753 case ERT_CATCH:
3754 case ERT_THROW:
3755 /* CATCH regions are handled in TRY above. THROW regions are
3756 for optimization information only and produce no output. */
3757 return collect_one_action_chain (ar_hash, region->outer);
3759 default:
3760 gcc_unreachable ();
3764 static int
3765 add_call_site (rtx landing_pad, int action)
3767 call_site_record record;
3769 record = GGC_NEW (struct call_site_record_d);
3770 record->landing_pad = landing_pad;
3771 record->action = action;
3773 VEC_safe_push (call_site_record, gc, crtl->eh.call_site_record, record);
3775 return call_site_base + VEC_length (call_site_record, crtl->eh.call_site_record) - 1;
3778 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3779 The new note numbers will not refer to region numbers, but
3780 instead to call site entries. */
3782 unsigned int
3783 convert_to_eh_region_ranges (void)
3785 rtx insn, iter, note;
3786 htab_t ar_hash;
3787 int last_action = -3;
3788 rtx last_action_insn = NULL_RTX;
3789 rtx last_landing_pad = NULL_RTX;
3790 rtx first_no_action_insn = NULL_RTX;
3791 int call_site = 0;
3793 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3794 return 0;
3796 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
3798 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3800 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3801 if (INSN_P (iter))
3803 struct eh_region_d *region;
3804 int this_action;
3805 rtx this_landing_pad;
3807 insn = iter;
3808 if (NONJUMP_INSN_P (insn)
3809 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3810 insn = XVECEXP (PATTERN (insn), 0, 0);
3812 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3813 if (!note)
3815 if (! (CALL_P (insn)
3816 || (flag_non_call_exceptions
3817 && may_trap_p (PATTERN (insn)))))
3818 continue;
3819 this_action = -1;
3820 region = NULL;
3822 else
3824 if (INTVAL (XEXP (note, 0)) <= 0)
3825 continue;
3826 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3827 this_action = collect_one_action_chain (ar_hash, region);
3830 /* Existence of catch handlers, or must-not-throw regions
3831 implies that an lsda is needed (even if empty). */
3832 if (this_action != -1)
3833 crtl->uses_eh_lsda = 1;
3835 /* Delay creation of region notes for no-action regions
3836 until we're sure that an lsda will be required. */
3837 else if (last_action == -3)
3839 first_no_action_insn = iter;
3840 last_action = -1;
3843 /* Cleanups and handlers may share action chains but not
3844 landing pads. Collect the landing pad for this region. */
3845 if (this_action >= 0)
3847 struct eh_region_d *o;
3848 for (o = region; ! o->landing_pad ; o = o->outer)
3849 continue;
3850 this_landing_pad = o->landing_pad;
3852 else
3853 this_landing_pad = NULL_RTX;
3855 /* Differing actions or landing pads implies a change in call-site
3856 info, which implies some EH_REGION note should be emitted. */
3857 if (last_action != this_action
3858 || last_landing_pad != this_landing_pad)
3860 /* If we'd not seen a previous action (-3) or the previous
3861 action was must-not-throw (-2), then we do not need an
3862 end note. */
3863 if (last_action >= -1)
3865 /* If we delayed the creation of the begin, do it now. */
3866 if (first_no_action_insn)
3868 call_site = add_call_site (NULL_RTX, 0);
3869 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3870 first_no_action_insn);
3871 NOTE_EH_HANDLER (note) = call_site;
3872 first_no_action_insn = NULL_RTX;
3875 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3876 last_action_insn);
3877 NOTE_EH_HANDLER (note) = call_site;
3880 /* If the new action is must-not-throw, then no region notes
3881 are created. */
3882 if (this_action >= -1)
3884 call_site = add_call_site (this_landing_pad,
3885 this_action < 0 ? 0 : this_action);
3886 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3887 NOTE_EH_HANDLER (note) = call_site;
3890 last_action = this_action;
3891 last_landing_pad = this_landing_pad;
3893 last_action_insn = iter;
3896 if (last_action >= -1 && ! first_no_action_insn)
3898 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3899 NOTE_EH_HANDLER (note) = call_site;
3902 htab_delete (ar_hash);
3903 return 0;
3906 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
3909 RTL_PASS,
3910 "eh_ranges", /* name */
3911 NULL, /* gate */
3912 convert_to_eh_region_ranges, /* execute */
3913 NULL, /* sub */
3914 NULL, /* next */
3915 0, /* static_pass_number */
3916 TV_NONE, /* tv_id */
3917 0, /* properties_required */
3918 0, /* properties_provided */
3919 0, /* properties_destroyed */
3920 0, /* todo_flags_start */
3921 TODO_dump_func, /* todo_flags_finish */
3926 static void
3927 push_uleb128 (varray_type *data_area, unsigned int value)
3931 unsigned char byte = value & 0x7f;
3932 value >>= 7;
3933 if (value)
3934 byte |= 0x80;
3935 VARRAY_PUSH_UCHAR (*data_area, byte);
3937 while (value);
3940 static void
3941 push_sleb128 (varray_type *data_area, int value)
3943 unsigned char byte;
3944 int more;
3948 byte = value & 0x7f;
3949 value >>= 7;
3950 more = ! ((value == 0 && (byte & 0x40) == 0)
3951 || (value == -1 && (byte & 0x40) != 0));
3952 if (more)
3953 byte |= 0x80;
3954 VARRAY_PUSH_UCHAR (*data_area, byte);
3956 while (more);
3960 #ifndef HAVE_AS_LEB128
3961 static int
3962 dw2_size_of_call_site_table (void)
3964 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3965 int size = n * (4 + 4 + 4);
3966 int i;
3968 for (i = 0; i < n; ++i)
3970 struct call_site_record_d *cs =
3971 VEC_index (call_site_record, crtl->eh.call_site_record, i);
3972 size += size_of_uleb128 (cs->action);
3975 return size;
3978 static int
3979 sjlj_size_of_call_site_table (void)
3981 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3982 int size = 0;
3983 int i;
3985 for (i = 0; i < n; ++i)
3987 struct call_site_record_d *cs =
3988 VEC_index (call_site_record, crtl->eh.call_site_record, i);
3989 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3990 size += size_of_uleb128 (cs->action);
3993 return size;
3995 #endif
3997 static void
3998 dw2_output_call_site_table (void)
4000 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
4001 int i;
4003 for (i = 0; i < n; ++i)
4005 struct call_site_record_d *cs =
4006 VEC_index (call_site_record, crtl->eh.call_site_record, i);
4007 char reg_start_lab[32];
4008 char reg_end_lab[32];
4009 char landing_pad_lab[32];
4011 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
4012 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
4014 if (cs->landing_pad)
4015 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
4016 CODE_LABEL_NUMBER (cs->landing_pad));
4018 /* ??? Perhaps use insn length scaling if the assembler supports
4019 generic arithmetic. */
4020 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
4021 data4 if the function is small enough. */
4022 #ifdef HAVE_AS_LEB128
4023 dw2_asm_output_delta_uleb128 (reg_start_lab,
4024 current_function_func_begin_label,
4025 "region %d start", i);
4026 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
4027 "length");
4028 if (cs->landing_pad)
4029 dw2_asm_output_delta_uleb128 (landing_pad_lab,
4030 current_function_func_begin_label,
4031 "landing pad");
4032 else
4033 dw2_asm_output_data_uleb128 (0, "landing pad");
4034 #else
4035 dw2_asm_output_delta (4, reg_start_lab,
4036 current_function_func_begin_label,
4037 "region %d start", i);
4038 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
4039 if (cs->landing_pad)
4040 dw2_asm_output_delta (4, landing_pad_lab,
4041 current_function_func_begin_label,
4042 "landing pad");
4043 else
4044 dw2_asm_output_data (4, 0, "landing pad");
4045 #endif
4046 dw2_asm_output_data_uleb128 (cs->action, "action");
4049 call_site_base += n;
4052 static void
4053 sjlj_output_call_site_table (void)
4055 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
4056 int i;
4058 for (i = 0; i < n; ++i)
4060 struct call_site_record_d *cs =
4061 VEC_index (call_site_record, crtl->eh.call_site_record, i);
4063 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
4064 "region %d landing pad", i);
4065 dw2_asm_output_data_uleb128 (cs->action, "action");
4068 call_site_base += n;
4071 #ifndef TARGET_UNWIND_INFO
4072 /* Switch to the section that should be used for exception tables. */
4074 static void
4075 switch_to_exception_section (const char * ARG_UNUSED (fnname))
4077 section *s;
4079 if (exception_section)
4080 s = exception_section;
4081 else
4083 /* Compute the section and cache it into exception_section,
4084 unless it depends on the function name. */
4085 if (targetm.have_named_sections)
4087 int flags;
4089 if (EH_TABLES_CAN_BE_READ_ONLY)
4091 int tt_format =
4092 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
4093 flags = ((! flag_pic
4094 || ((tt_format & 0x70) != DW_EH_PE_absptr
4095 && (tt_format & 0x70) != DW_EH_PE_aligned))
4096 ? 0 : SECTION_WRITE);
4098 else
4099 flags = SECTION_WRITE;
4101 #ifdef HAVE_LD_EH_GC_SECTIONS
4102 if (flag_function_sections)
4104 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
4105 sprintf (section_name, ".gcc_except_table.%s", fnname);
4106 s = get_section (section_name, flags, NULL);
4107 free (section_name);
4109 else
4110 #endif
4111 exception_section
4112 = s = get_section (".gcc_except_table", flags, NULL);
4114 else
4115 exception_section
4116 = s = flag_pic ? data_section : readonly_data_section;
4119 switch_to_section (s);
4121 #endif
4124 /* Output a reference from an exception table to the type_info object TYPE.
4125 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
4126 the value. */
4128 static void
4129 output_ttype (tree type, int tt_format, int tt_format_size)
4131 rtx value;
4132 bool is_public = true;
4134 if (type == NULL_TREE)
4135 value = const0_rtx;
4136 else
4138 struct varpool_node *node;
4140 type = lookup_type_for_runtime (type);
4141 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
4143 /* Let cgraph know that the rtti decl is used. Not all of the
4144 paths below go through assemble_integer, which would take
4145 care of this for us. */
4146 STRIP_NOPS (type);
4147 if (TREE_CODE (type) == ADDR_EXPR)
4149 type = TREE_OPERAND (type, 0);
4150 if (TREE_CODE (type) == VAR_DECL)
4152 node = varpool_node (type);
4153 if (node)
4154 varpool_mark_needed_node (node);
4155 is_public = TREE_PUBLIC (type);
4158 else
4159 gcc_assert (TREE_CODE (type) == INTEGER_CST);
4162 /* Allow the target to override the type table entry format. */
4163 if (targetm.asm_out.ttype (value))
4164 return;
4166 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
4167 assemble_integer (value, tt_format_size,
4168 tt_format_size * BITS_PER_UNIT, 1);
4169 else
4170 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
4173 void
4174 output_function_exception_table (const char * ARG_UNUSED (fnname))
4176 int tt_format, cs_format, lp_format, i, n;
4177 #ifdef HAVE_AS_LEB128
4178 char ttype_label[32];
4179 char cs_after_size_label[32];
4180 char cs_end_label[32];
4181 #else
4182 int call_site_len;
4183 #endif
4184 int have_tt_data;
4185 int tt_format_size = 0;
4187 /* Not all functions need anything. */
4188 if (! crtl->uses_eh_lsda)
4189 return;
4191 if (eh_personality_libfunc)
4192 assemble_external_libcall (eh_personality_libfunc);
4194 #ifdef TARGET_UNWIND_INFO
4195 /* TODO: Move this into target file. */
4196 fputs ("\t.personality\t", asm_out_file);
4197 output_addr_const (asm_out_file, eh_personality_libfunc);
4198 fputs ("\n\t.handlerdata\n", asm_out_file);
4199 /* Note that varasm still thinks we're in the function's code section.
4200 The ".endp" directive that will immediately follow will take us back. */
4201 #else
4202 switch_to_exception_section (fnname);
4203 #endif
4205 /* If the target wants a label to begin the table, emit it here. */
4206 targetm.asm_out.except_table_label (asm_out_file);
4208 have_tt_data = (VEC_length (tree, crtl->eh.ttype_data) > 0
4209 || VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) > 0);
4211 /* Indicate the format of the @TType entries. */
4212 if (! have_tt_data)
4213 tt_format = DW_EH_PE_omit;
4214 else
4216 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
4217 #ifdef HAVE_AS_LEB128
4218 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
4219 current_function_funcdef_no);
4220 #endif
4221 tt_format_size = size_of_encoded_value (tt_format);
4223 assemble_align (tt_format_size * BITS_PER_UNIT);
4226 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
4227 current_function_funcdef_no);
4229 /* The LSDA header. */
4231 /* Indicate the format of the landing pad start pointer. An omitted
4232 field implies @LPStart == @Start. */
4233 /* Currently we always put @LPStart == @Start. This field would
4234 be most useful in moving the landing pads completely out of
4235 line to another section, but it could also be used to minimize
4236 the size of uleb128 landing pad offsets. */
4237 lp_format = DW_EH_PE_omit;
4238 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
4239 eh_data_format_name (lp_format));
4241 /* @LPStart pointer would go here. */
4243 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
4244 eh_data_format_name (tt_format));
4246 #ifndef HAVE_AS_LEB128
4247 if (USING_SJLJ_EXCEPTIONS)
4248 call_site_len = sjlj_size_of_call_site_table ();
4249 else
4250 call_site_len = dw2_size_of_call_site_table ();
4251 #endif
4253 /* A pc-relative 4-byte displacement to the @TType data. */
4254 if (have_tt_data)
4256 #ifdef HAVE_AS_LEB128
4257 char ttype_after_disp_label[32];
4258 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
4259 current_function_funcdef_no);
4260 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
4261 "@TType base offset");
4262 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
4263 #else
4264 /* Ug. Alignment queers things. */
4265 unsigned int before_disp, after_disp, last_disp, disp;
4267 before_disp = 1 + 1;
4268 after_disp = (1 + size_of_uleb128 (call_site_len)
4269 + call_site_len
4270 + VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data)
4271 + (VEC_length (tree, crtl->eh.ttype_data)
4272 * tt_format_size));
4274 disp = after_disp;
4277 unsigned int disp_size, pad;
4279 last_disp = disp;
4280 disp_size = size_of_uleb128 (disp);
4281 pad = before_disp + disp_size + after_disp;
4282 if (pad % tt_format_size)
4283 pad = tt_format_size - (pad % tt_format_size);
4284 else
4285 pad = 0;
4286 disp = after_disp + pad;
4288 while (disp != last_disp);
4290 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
4291 #endif
4294 /* Indicate the format of the call-site offsets. */
4295 #ifdef HAVE_AS_LEB128
4296 cs_format = DW_EH_PE_uleb128;
4297 #else
4298 cs_format = DW_EH_PE_udata4;
4299 #endif
4300 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
4301 eh_data_format_name (cs_format));
4303 #ifdef HAVE_AS_LEB128
4304 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
4305 current_function_funcdef_no);
4306 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
4307 current_function_funcdef_no);
4308 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
4309 "Call-site table length");
4310 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
4311 if (USING_SJLJ_EXCEPTIONS)
4312 sjlj_output_call_site_table ();
4313 else
4314 dw2_output_call_site_table ();
4315 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
4316 #else
4317 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
4318 if (USING_SJLJ_EXCEPTIONS)
4319 sjlj_output_call_site_table ();
4320 else
4321 dw2_output_call_site_table ();
4322 #endif
4324 /* ??? Decode and interpret the data for flag_debug_asm. */
4325 n = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data);
4326 for (i = 0; i < n; ++i)
4327 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.action_record_data, i),
4328 (i ? NULL : "Action record table"));
4330 if (have_tt_data)
4331 assemble_align (tt_format_size * BITS_PER_UNIT);
4333 i = VEC_length (tree, crtl->eh.ttype_data);
4334 while (i-- > 0)
4336 tree type = VEC_index (tree, crtl->eh.ttype_data, i);
4337 output_ttype (type, tt_format, tt_format_size);
4340 #ifdef HAVE_AS_LEB128
4341 if (have_tt_data)
4342 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
4343 #endif
4345 /* ??? Decode and interpret the data for flag_debug_asm. */
4346 n = VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data);
4347 for (i = 0; i < n; ++i)
4349 if (targetm.arm_eabi_unwinder)
4351 tree type = VARRAY_TREE (crtl->eh.ehspec_data, i);
4352 output_ttype (type, tt_format, tt_format_size);
4354 else
4355 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.ehspec_data, i),
4356 (i ? NULL : "Exception specification table"));
4359 switch_to_section (current_function_section ());
4362 void
4363 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
4365 fun->eh->throw_stmt_table = table;
4368 htab_t
4369 get_eh_throw_stmt_table (struct function *fun)
4371 return fun->eh->throw_stmt_table;
4374 /* Dump EH information to OUT. */
4376 void
4377 dump_eh_tree (FILE * out, struct function *fun)
4379 struct eh_region_d *i;
4380 int depth = 0;
4381 static const char *const type_name[] = { "unknown", "cleanup", "try", "catch",
4382 "allowed_exceptions", "must_not_throw",
4383 "throw"
4386 i = fun->eh->region_tree;
4387 if (!i)
4388 return;
4390 fprintf (out, "Eh tree:\n");
4391 while (1)
4393 fprintf (out, " %*s %i %s", depth * 2, "",
4394 i->region_number, type_name[(int) i->type]);
4395 if (i->tree_label)
4397 fprintf (out, " tree_label:");
4398 print_generic_expr (out, i->tree_label, 0);
4400 if (i->label)
4401 fprintf (out, " label:%i", INSN_UID (i->label));
4402 if (i->landing_pad)
4404 fprintf (out, " landing_pad:%i", INSN_UID (i->landing_pad));
4405 if (GET_CODE (i->landing_pad) == NOTE)
4406 fprintf (out, " (deleted)");
4408 if (i->post_landing_pad)
4410 fprintf (out, " post_landing_pad:%i", INSN_UID (i->post_landing_pad));
4411 if (GET_CODE (i->post_landing_pad) == NOTE)
4412 fprintf (out, " (deleted)");
4414 if (i->resume)
4416 fprintf (out, " resume:%i", INSN_UID (i->resume));
4417 if (GET_CODE (i->resume) == NOTE)
4418 fprintf (out, " (deleted)");
4420 if (i->may_contain_throw)
4421 fprintf (out, " may_contain_throw");
4422 switch (i->type)
4424 case ERT_CLEANUP:
4425 break;
4427 case ERT_TRY:
4429 struct eh_region_d *c;
4430 fprintf (out, " catch regions:");
4431 for (c = i->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4432 fprintf (out, " %i", c->region_number);
4434 break;
4436 case ERT_CATCH:
4437 if (i->u.eh_catch.prev_catch)
4438 fprintf (out, " prev: %i",
4439 i->u.eh_catch.prev_catch->region_number);
4440 if (i->u.eh_catch.next_catch)
4441 fprintf (out, " next %i",
4442 i->u.eh_catch.next_catch->region_number);
4443 fprintf (out, " type:");
4444 print_generic_expr (out, i->u.eh_catch.type_list, 0);
4445 break;
4447 case ERT_ALLOWED_EXCEPTIONS:
4448 fprintf (out, " filter :%i types:", i->u.allowed.filter);
4449 print_generic_expr (out, i->u.allowed.type_list, 0);
4450 break;
4452 case ERT_THROW:
4453 fprintf (out, " type:");
4454 print_generic_expr (out, i->u.eh_throw.type, 0);
4455 break;
4457 case ERT_MUST_NOT_THROW:
4458 break;
4460 case ERT_UNKNOWN:
4461 break;
4463 if (i->aka)
4465 fprintf (out, " also known as:");
4466 dump_bitmap (out, i->aka);
4468 else
4469 fprintf (out, "\n");
4470 /* If there are sub-regions, process them. */
4471 if (i->inner)
4472 i = i->inner, depth++;
4473 /* If there are peers, process them. */
4474 else if (i->next_peer)
4475 i = i->next_peer;
4476 /* Otherwise, step back up the tree to the next peer. */
4477 else
4481 i = i->outer;
4482 depth--;
4483 if (i == NULL)
4484 return;
4486 while (i->next_peer == NULL);
4487 i = i->next_peer;
4492 /* Dump the EH tree for FN on stderr. */
4494 void
4495 debug_eh_tree (struct function *fn)
4497 dump_eh_tree (stderr, fn);
4501 /* Verify EH region invariants. */
4503 static bool
4504 verify_eh_region (struct eh_region_d *region)
4506 bool found = false;
4507 if (!region)
4508 return false;
4509 switch (region->type)
4511 case ERT_TRY:
4513 struct eh_region_d *c, *prev = NULL;
4514 if (region->u.eh_try.eh_catch->u.eh_catch.prev_catch)
4516 error ("Try region %i has wrong rh_catch pointer to %i",
4517 region->region_number,
4518 region->u.eh_try.eh_catch->region_number);
4519 found = true;
4521 for (c = region->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4523 if (c->outer != region->outer)
4525 error
4526 ("Catch region %i has different outer region than try region %i",
4527 c->region_number, region->region_number);
4528 found = true;
4530 if (c->u.eh_catch.prev_catch != prev)
4532 error ("Catch region %i has corrupted catchlist",
4533 c->region_number);
4534 found = true;
4536 prev = c;
4538 if (prev != region->u.eh_try.last_catch)
4540 error
4541 ("Try region %i has wrong last_catch pointer to %i instead of %i",
4542 region->region_number,
4543 region->u.eh_try.last_catch->region_number,
4544 prev->region_number);
4545 found = true;
4548 break;
4549 case ERT_CATCH:
4550 if (!region->u.eh_catch.prev_catch
4551 && (!region->next_peer || region->next_peer->type != ERT_TRY))
4553 error ("Catch region %i should be followed by try", region->region_number);
4554 found = true;
4556 break;
4557 case ERT_CLEANUP:
4558 case ERT_ALLOWED_EXCEPTIONS:
4559 case ERT_MUST_NOT_THROW:
4560 case ERT_THROW:
4561 break;
4562 case ERT_UNKNOWN:
4563 gcc_unreachable ();
4565 for (region = region->inner; region; region = region->next_peer)
4566 found |= verify_eh_region (region);
4567 return found;
4570 /* Verify invariants on EH datastructures. */
4572 void
4573 verify_eh_tree (struct function *fun)
4575 struct eh_region_d *i, *outer = NULL;
4576 bool err = false;
4577 int nvisited = 0;
4578 int count = 0;
4579 int j;
4580 int depth = 0;
4582 if (!fun->eh->region_tree)
4583 return;
4584 for (j = fun->eh->last_region_number; j > 0; --j)
4585 if ((i = VEC_index (eh_region, fun->eh->region_array, j)))
4587 if (i->region_number == j)
4588 count++;
4589 if (i->region_number != j && (!i->aka || !bitmap_bit_p (i->aka, j)))
4591 error ("region_array is corrupted for region %i",
4592 i->region_number);
4593 err = true;
4596 i = fun->eh->region_tree;
4598 while (1)
4600 if (VEC_index (eh_region, fun->eh->region_array, i->region_number) != i)
4602 error ("region_array is corrupted for region %i", i->region_number);
4603 err = true;
4605 if (i->outer != outer)
4607 error ("outer block of region %i is wrong", i->region_number);
4608 err = true;
4610 if (i->may_contain_throw && outer && !outer->may_contain_throw)
4612 error
4613 ("region %i may contain throw and is contained in region that may not",
4614 i->region_number);
4615 err = true;
4617 if (depth < 0)
4619 error ("negative nesting depth of region %i", i->region_number);
4620 err = true;
4622 nvisited++;
4623 /* If there are sub-regions, process them. */
4624 if (i->inner)
4625 outer = i, i = i->inner, depth++;
4626 /* If there are peers, process them. */
4627 else if (i->next_peer)
4628 i = i->next_peer;
4629 /* Otherwise, step back up the tree to the next peer. */
4630 else
4634 i = i->outer;
4635 depth--;
4636 if (i == NULL)
4638 if (depth != -1)
4640 error ("tree list ends on depth %i", depth + 1);
4641 err = true;
4643 if (count != nvisited)
4645 error ("array does not match the region tree");
4646 err = true;
4648 if (!err)
4649 for (i = fun->eh->region_tree; i; i = i->next_peer)
4650 err |= verify_eh_region (i);
4652 if (err)
4654 dump_eh_tree (stderr, fun);
4655 internal_error ("verify_eh_tree failed");
4657 return;
4659 outer = i->outer;
4661 while (i->next_peer == NULL);
4662 i = i->next_peer;
4667 /* Initialize unwind_resume_libfunc. */
4669 void
4670 default_init_unwind_resume_libfunc (void)
4672 /* The default c++ routines aren't actually c++ specific, so use those. */
4673 unwind_resume_libfunc =
4674 init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
4675 : "_Unwind_Resume");
4679 static bool
4680 gate_handle_eh (void)
4682 return doing_eh (0);
4685 /* Complete generation of exception handling code. */
4686 static unsigned int
4687 rest_of_handle_eh (void)
4689 finish_eh_generation ();
4690 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4691 return 0;
4694 struct rtl_opt_pass pass_rtl_eh =
4697 RTL_PASS,
4698 "eh", /* name */
4699 gate_handle_eh, /* gate */
4700 rest_of_handle_eh, /* execute */
4701 NULL, /* sub */
4702 NULL, /* next */
4703 0, /* static_pass_number */
4704 TV_JUMP, /* tv_id */
4705 0, /* properties_required */
4706 0, /* properties_provided */
4707 0, /* properties_destroyed */
4708 0, /* todo_flags_start */
4709 TODO_dump_func /* todo_flags_finish */
4713 #include "gt-except.h"