PR ada/40608
[official-gcc.git] / gcc / except.c
blob4a02fe305b44201713da6b54445f109c008a6a9e
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "elf/dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
79 #include "timevar.h"
80 #include "tree-flow.h"
82 /* Provide defaults for stuff that may not be defined when using
83 sjlj exceptions. */
84 #ifndef EH_RETURN_DATA_REGNO
85 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
86 #endif
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 gimple (*lang_protect_cleanup_actions) (void);
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers) (tree a, tree b);
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type) (tree);
98 /* A hash table of label to region number. */
100 struct GTY(()) ehl_map_entry {
101 rtx label;
102 struct eh_region_d *region;
105 static GTY(()) int call_site_base;
106 static GTY ((param_is (union tree_node)))
107 htab_t type_to_runtime_map;
109 /* Describe the SjLj_Function_Context structure. */
110 static GTY(()) tree sjlj_fc_type_node;
111 static int sjlj_fc_call_site_ofs;
112 static int sjlj_fc_data_ofs;
113 static int sjlj_fc_personality_ofs;
114 static int sjlj_fc_lsda_ofs;
115 static int sjlj_fc_jbuf_ofs;
118 struct GTY(()) call_site_record_d
120 rtx landing_pad;
121 int action;
124 static int t2r_eq (const void *, const void *);
125 static hashval_t t2r_hash (const void *);
127 static int ttypes_filter_eq (const void *, const void *);
128 static hashval_t ttypes_filter_hash (const void *);
129 static int ehspec_filter_eq (const void *, const void *);
130 static hashval_t ehspec_filter_hash (const void *);
131 static int add_ttypes_entry (htab_t, tree);
132 static int add_ehspec_entry (htab_t, htab_t, tree);
133 static void assign_filter_values (void);
134 static void build_post_landing_pads (void);
135 static void connect_post_landing_pads (void);
136 static void dw2_build_landing_pads (void);
138 struct sjlj_lp_info;
139 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
140 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
141 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
142 static void sjlj_emit_function_enter (rtx);
143 static void sjlj_emit_function_exit (void);
144 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
145 static void sjlj_build_landing_pads (void);
147 static void remove_eh_handler (struct eh_region_d *);
148 static void remove_eh_handler_and_replace (struct eh_region_d *,
149 struct eh_region_d *, bool);
151 /* The return value of reachable_next_level. */
152 enum reachable_code
154 /* The given exception is not processed by the given region. */
155 RNL_NOT_CAUGHT,
156 /* The given exception may need processing by the given region. */
157 RNL_MAYBE_CAUGHT,
158 /* The given exception is completely processed by the given region. */
159 RNL_CAUGHT,
160 /* The given exception is completely processed by the runtime. */
161 RNL_BLOCKED
164 struct reachable_info;
165 static enum reachable_code reachable_next_level (struct eh_region_d *, tree,
166 struct reachable_info *, bool);
168 static int action_record_eq (const void *, const void *);
169 static hashval_t action_record_hash (const void *);
170 static int add_action_record (htab_t, int, int);
171 static int collect_one_action_chain (htab_t, struct eh_region_d *);
172 static int add_call_site (rtx, int);
174 static void push_uleb128 (varray_type *, unsigned int);
175 static void push_sleb128 (varray_type *, int);
176 #ifndef HAVE_AS_LEB128
177 static int dw2_size_of_call_site_table (void);
178 static int sjlj_size_of_call_site_table (void);
179 #endif
180 static void dw2_output_call_site_table (void);
181 static void sjlj_output_call_site_table (void);
184 /* Routine to see if exception handling is turned on.
185 DO_WARN is nonzero if we want to inform the user that exception
186 handling is turned off.
188 This is used to ensure that -fexceptions has been specified if the
189 compiler tries to use any exception-specific functions. */
192 doing_eh (int do_warn)
194 if (! flag_exceptions)
196 static int warned = 0;
197 if (! warned && do_warn)
199 error ("exception handling disabled, use -fexceptions to enable");
200 warned = 1;
202 return 0;
204 return 1;
208 void
209 init_eh (void)
211 if (! flag_exceptions)
212 return;
214 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
216 /* Create the SjLj_Function_Context structure. This should match
217 the definition in unwind-sjlj.c. */
218 if (USING_SJLJ_EXCEPTIONS)
220 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
222 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
224 f_prev = build_decl (BUILTINS_LOCATION,
225 FIELD_DECL, get_identifier ("__prev"),
226 build_pointer_type (sjlj_fc_type_node));
227 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
229 f_cs = build_decl (BUILTINS_LOCATION,
230 FIELD_DECL, get_identifier ("__call_site"),
231 integer_type_node);
232 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
234 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
235 tmp = build_array_type (lang_hooks.types.type_for_mode
236 (targetm.unwind_word_mode (), 1),
237 tmp);
238 f_data = build_decl (BUILTINS_LOCATION,
239 FIELD_DECL, get_identifier ("__data"), tmp);
240 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
242 f_per = build_decl (BUILTINS_LOCATION,
243 FIELD_DECL, get_identifier ("__personality"),
244 ptr_type_node);
245 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
247 f_lsda = build_decl (BUILTINS_LOCATION,
248 FIELD_DECL, get_identifier ("__lsda"),
249 ptr_type_node);
250 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
252 #ifdef DONT_USE_BUILTIN_SETJMP
253 #ifdef JMP_BUF_SIZE
254 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
255 #else
256 /* Should be large enough for most systems, if it is not,
257 JMP_BUF_SIZE should be defined with the proper value. It will
258 also tend to be larger than necessary for most systems, a more
259 optimal port will define JMP_BUF_SIZE. */
260 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
261 #endif
262 #else
263 /* builtin_setjmp takes a pointer to 5 words. */
264 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
265 #endif
266 tmp = build_index_type (tmp);
267 tmp = build_array_type (ptr_type_node, tmp);
268 f_jbuf = build_decl (BUILTINS_LOCATION,
269 FIELD_DECL, get_identifier ("__jbuf"), tmp);
270 #ifdef DONT_USE_BUILTIN_SETJMP
271 /* We don't know what the alignment requirements of the
272 runtime's jmp_buf has. Overestimate. */
273 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
274 DECL_USER_ALIGN (f_jbuf) = 1;
275 #endif
276 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
278 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
279 TREE_CHAIN (f_prev) = f_cs;
280 TREE_CHAIN (f_cs) = f_data;
281 TREE_CHAIN (f_data) = f_per;
282 TREE_CHAIN (f_per) = f_lsda;
283 TREE_CHAIN (f_lsda) = f_jbuf;
285 layout_type (sjlj_fc_type_node);
287 /* Cache the interesting field offsets so that we have
288 easy access from rtl. */
289 sjlj_fc_call_site_ofs
290 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
291 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
292 sjlj_fc_data_ofs
293 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
294 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
295 sjlj_fc_personality_ofs
296 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
297 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
298 sjlj_fc_lsda_ofs
299 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
300 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
301 sjlj_fc_jbuf_ofs
302 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
303 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
307 void
308 init_eh_for_function (void)
310 cfun->eh = GGC_CNEW (struct eh_status);
313 /* Routines to generate the exception tree somewhat directly.
314 These are used from tree-eh.c when processing exception related
315 nodes during tree optimization. */
317 static struct eh_region_d *
318 gen_eh_region (enum eh_region_type type, struct eh_region_d *outer)
320 struct eh_region_d *new_eh;
322 #ifdef ENABLE_CHECKING
323 gcc_assert (doing_eh (0));
324 #endif
326 /* Insert a new blank region as a leaf in the tree. */
327 new_eh = GGC_CNEW (struct eh_region_d);
328 new_eh->type = type;
329 new_eh->outer = outer;
330 if (outer)
332 new_eh->next_peer = outer->inner;
333 outer->inner = new_eh;
335 else
337 new_eh->next_peer = cfun->eh->region_tree;
338 cfun->eh->region_tree = new_eh;
341 new_eh->region_number = ++cfun->eh->last_region_number;
343 return new_eh;
346 struct eh_region_d *
347 gen_eh_region_cleanup (struct eh_region_d *outer)
349 struct eh_region_d *cleanup = gen_eh_region (ERT_CLEANUP, outer);
350 return cleanup;
353 struct eh_region_d *
354 gen_eh_region_try (struct eh_region_d *outer)
356 return gen_eh_region (ERT_TRY, outer);
359 struct eh_region_d *
360 gen_eh_region_catch (struct eh_region_d *t, tree type_or_list)
362 struct eh_region_d *c, *l;
363 tree type_list, type_node;
365 /* Ensure to always end up with a type list to normalize further
366 processing, then register each type against the runtime types map. */
367 type_list = type_or_list;
368 if (type_or_list)
370 if (TREE_CODE (type_or_list) != TREE_LIST)
371 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
373 type_node = type_list;
374 for (; type_node; type_node = TREE_CHAIN (type_node))
375 add_type_for_runtime (TREE_VALUE (type_node));
378 c = gen_eh_region (ERT_CATCH, t->outer);
379 c->u.eh_catch.type_list = type_list;
380 l = t->u.eh_try.last_catch;
381 c->u.eh_catch.prev_catch = l;
382 if (l)
383 l->u.eh_catch.next_catch = c;
384 else
385 t->u.eh_try.eh_catch = c;
386 t->u.eh_try.last_catch = c;
388 return c;
391 struct eh_region_d *
392 gen_eh_region_allowed (struct eh_region_d *outer, tree allowed)
394 struct eh_region_d *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
395 region->u.allowed.type_list = allowed;
397 for (; allowed ; allowed = TREE_CHAIN (allowed))
398 add_type_for_runtime (TREE_VALUE (allowed));
400 return region;
403 struct eh_region_d *
404 gen_eh_region_must_not_throw (struct eh_region_d *outer)
406 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
410 get_eh_region_number (struct eh_region_d *region)
412 return region->region_number;
415 bool
416 get_eh_region_may_contain_throw (struct eh_region_d *region)
418 return region->may_contain_throw;
421 tree
422 get_eh_region_tree_label (struct eh_region_d *region)
424 return region->tree_label;
427 tree
428 get_eh_region_no_tree_label (int region)
430 return VEC_index (eh_region, cfun->eh->region_array, region)->tree_label;
433 void
434 set_eh_region_tree_label (struct eh_region_d *region, tree lab)
436 region->tree_label = lab;
439 void
440 expand_resx_expr (tree exp)
442 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
443 struct eh_region_d *reg = VEC_index (eh_region,
444 cfun->eh->region_array, region_nr);
446 gcc_assert (!reg->resume);
447 do_pending_stack_adjust ();
448 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
449 emit_barrier ();
452 /* Note that the current EH region (if any) may contain a throw, or a
453 call to a function which itself may contain a throw. */
455 void
456 note_eh_region_may_contain_throw (struct eh_region_d *region)
458 while (region && !region->may_contain_throw)
460 region->may_contain_throw = 1;
461 region = region->outer;
466 /* Return an rtl expression for a pointer to the exception object
467 within a handler. */
470 get_exception_pointer (void)
472 if (! crtl->eh.exc_ptr)
473 crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
474 return crtl->eh.exc_ptr;
477 /* Return an rtl expression for the exception dispatch filter
478 within a handler. */
481 get_exception_filter (void)
483 if (! crtl->eh.filter)
484 crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
485 return crtl->eh.filter;
488 /* This section is for the exception handling specific optimization pass. */
490 /* Random access the exception region tree. */
492 void
493 collect_eh_region_array (void)
495 struct eh_region_d *i;
497 i = cfun->eh->region_tree;
498 if (! i)
499 return;
501 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
502 cfun->eh->last_region_number + 1);
503 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
505 while (1)
507 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
509 /* If there are sub-regions, process them. */
510 if (i->inner)
511 i = i->inner;
512 /* If there are peers, process them. */
513 else if (i->next_peer)
514 i = i->next_peer;
515 /* Otherwise, step back up the tree to the next peer. */
516 else
518 do {
519 i = i->outer;
520 if (i == NULL)
521 return;
522 } while (i->next_peer == NULL);
523 i = i->next_peer;
528 /* R is MUST_NOT_THROW region that is not reachable via local
529 RESX instructions. It still must be kept in the tree in case runtime
530 can unwind through it, or we will eliminate out terminate call
531 runtime would do otherwise. Return TRUE if R contains throwing statements
532 or some of the exceptions in inner regions can be unwound up to R.
534 CONTAINS_STMT is bitmap of all regions that contains some throwing
535 statements.
537 Function looks O(^3) at first sight. In fact the function is called at most
538 once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
539 Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
540 the outer loop examines every region at most once. The inner loop
541 is doing unwinding from the throwing statement same way as we do during
542 CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
543 of CFG. In practice Eh trees are wide, not deep, so this is not
544 a problem. */
546 static bool
547 can_be_reached_by_runtime (sbitmap contains_stmt, struct eh_region_d *r)
549 struct eh_region_d *i = r->inner;
550 unsigned n;
551 bitmap_iterator bi;
553 if (TEST_BIT (contains_stmt, r->region_number))
554 return true;
555 if (r->aka)
556 EXECUTE_IF_SET_IN_BITMAP (r->aka, 0, n, bi)
557 if (TEST_BIT (contains_stmt, n))
558 return true;
559 if (!i)
560 return false;
561 while (1)
563 /* It is pointless to look into MUST_NOT_THROW
564 or dive into subregions. They never unwind up. */
565 if (i->type != ERT_MUST_NOT_THROW)
567 bool found = TEST_BIT (contains_stmt, i->region_number);
568 if (!found)
569 EXECUTE_IF_SET_IN_BITMAP (i->aka, 0, n, bi)
570 if (TEST_BIT (contains_stmt, n))
572 found = true;
573 break;
575 /* We have nested region that contains throwing statement.
576 See if resuming might lead up to the resx or we get locally
577 caught sooner. If we get locally caught sooner, we either
578 know region R is not reachable or it would have direct edge
579 from the EH resx and thus consider region reachable at
580 firest place. */
581 if (found)
583 struct eh_region_d *i1 = i;
584 tree type_thrown = NULL_TREE;
586 if (i1->type == ERT_THROW)
588 type_thrown = i1->u.eh_throw.type;
589 i1 = i1->outer;
591 for (; i1 != r; i1 = i1->outer)
592 if (reachable_next_level (i1, type_thrown, NULL,
593 false) >= RNL_CAUGHT)
594 break;
595 if (i1 == r)
596 return true;
599 /* If there are sub-regions, process them. */
600 if (i->type != ERT_MUST_NOT_THROW && i->inner)
601 i = i->inner;
602 /* If there are peers, process them. */
603 else if (i->next_peer)
604 i = i->next_peer;
605 /* Otherwise, step back up the tree to the next peer. */
606 else
610 i = i->outer;
611 if (i == r)
612 return false;
614 while (i->next_peer == NULL);
615 i = i->next_peer;
620 /* Bring region R to the root of tree. */
622 static void
623 bring_to_root (struct eh_region_d *r)
625 struct eh_region_d **pp;
626 struct eh_region_d *outer = r->outer;
627 if (!r->outer)
628 return;
629 for (pp = &outer->inner; *pp != r; pp = &(*pp)->next_peer)
630 continue;
631 *pp = r->next_peer;
632 r->outer = NULL;
633 r->next_peer = cfun->eh->region_tree;
634 cfun->eh->region_tree = r;
637 /* Return true if region R2 can be replaced by R1. */
639 static bool
640 eh_region_replaceable_by_p (const struct eh_region_d *r1,
641 const struct eh_region_d *r2)
643 /* Regions are semantically same if they are of same type,
644 have same label and type. */
645 if (r1->type != r2->type)
646 return false;
647 if (r1->tree_label != r2->tree_label)
648 return false;
650 /* Verify that also region type dependent data are the same. */
651 switch (r1->type)
653 case ERT_MUST_NOT_THROW:
654 case ERT_CLEANUP:
655 break;
656 case ERT_TRY:
658 struct eh_region_d *c1, *c2;
659 for (c1 = r1->u.eh_try.eh_catch,
660 c2 = r2->u.eh_try.eh_catch;
661 c1 && c2;
662 c1 = c1->u.eh_catch.next_catch,
663 c2 = c2->u.eh_catch.next_catch)
664 if (!eh_region_replaceable_by_p (c1, c2))
665 return false;
666 if (c1 || c2)
667 return false;
669 break;
670 case ERT_CATCH:
671 if (!list_equal_p (r1->u.eh_catch.type_list, r2->u.eh_catch.type_list))
672 return false;
673 if (!list_equal_p (r1->u.eh_catch.filter_list,
674 r2->u.eh_catch.filter_list))
675 return false;
676 break;
677 case ERT_ALLOWED_EXCEPTIONS:
678 if (!list_equal_p (r1->u.allowed.type_list, r2->u.allowed.type_list))
679 return false;
680 if (r1->u.allowed.filter != r2->u.allowed.filter)
681 return false;
682 break;
683 case ERT_THROW:
684 if (r1->u.eh_throw.type != r2->u.eh_throw.type)
685 return false;
686 break;
687 default:
688 gcc_unreachable ();
690 if (dump_file && (dump_flags & TDF_DETAILS))
691 fprintf (dump_file, "Regions %i and %i match\n", r1->region_number,
692 r2->region_number);
693 return true;
696 /* Replace region R2 by R1. */
698 static void
699 replace_region (struct eh_region_d *r1, struct eh_region_d *r2)
701 struct eh_region_d *next1 = r1->u.eh_try.eh_catch;
702 struct eh_region_d *next2 = r2->u.eh_try.eh_catch;
703 bool is_try = r1->type == ERT_TRY;
705 gcc_assert (r1->type != ERT_CATCH);
706 remove_eh_handler_and_replace (r2, r1, false);
707 if (is_try)
709 while (next1)
711 r1 = next1;
712 r2 = next2;
713 gcc_assert (next1->type == ERT_CATCH);
714 gcc_assert (next2->type == ERT_CATCH);
715 next1 = next1->u.eh_catch.next_catch;
716 next2 = next2->u.eh_catch.next_catch;
717 remove_eh_handler_and_replace (r2, r1, false);
722 /* Return hash value of type list T. */
724 static hashval_t
725 hash_type_list (tree t)
727 hashval_t val = 0;
728 for (; t; t = TREE_CHAIN (t))
729 val = iterative_hash_hashval_t (TREE_HASH (TREE_VALUE (t)), val);
730 return val;
733 /* Hash EH regions so semantically same regions get same hash value. */
735 static hashval_t
736 hash_eh_region (const void *r)
738 const struct eh_region_d *region = (const struct eh_region_d *) r;
739 hashval_t val = region->type;
741 if (region->tree_label)
742 val = iterative_hash_hashval_t (LABEL_DECL_UID (region->tree_label), val);
743 switch (region->type)
745 case ERT_MUST_NOT_THROW:
746 case ERT_CLEANUP:
747 break;
748 case ERT_TRY:
750 struct eh_region_d *c;
751 for (c = region->u.eh_try.eh_catch;
752 c; c = c->u.eh_catch.next_catch)
753 val = iterative_hash_hashval_t (hash_eh_region (c), val);
755 break;
756 case ERT_CATCH:
757 val = iterative_hash_hashval_t (hash_type_list
758 (region->u.eh_catch.type_list), val);
759 break;
760 case ERT_ALLOWED_EXCEPTIONS:
761 val = iterative_hash_hashval_t
762 (hash_type_list (region->u.allowed.type_list), val);
763 val = iterative_hash_hashval_t (region->u.allowed.filter, val);
764 break;
765 case ERT_THROW:
766 val |= iterative_hash_hashval_t (TYPE_UID (region->u.eh_throw.type), val);
767 break;
768 default:
769 gcc_unreachable ();
771 return val;
774 /* Return true if regions R1 and R2 are equal. */
776 static int
777 eh_regions_equal_p (const void *r1, const void *r2)
779 return eh_region_replaceable_by_p ((const struct eh_region_d *) r1,
780 (const struct eh_region_d *) r2);
783 /* Walk all peers of REGION and try to merge those regions
784 that are semantically equivalent. Look into subregions
785 recursively too. */
787 static bool
788 merge_peers (struct eh_region_d *region)
790 struct eh_region_d *r1, *r2, *outer = NULL, *next;
791 bool merged = false;
792 int num_regions = 0;
793 if (region)
794 outer = region->outer;
795 else
796 return false;
798 /* First see if there is inner region equivalent to region
799 in question. EH control flow is acyclic so we know we
800 can merge them. */
801 if (outer)
802 for (r1 = region; r1; r1 = next)
804 next = r1->next_peer;
805 if (r1->type == ERT_CATCH)
806 continue;
807 if (eh_region_replaceable_by_p (r1->outer, r1))
809 replace_region (r1->outer, r1);
810 merged = true;
812 else
813 num_regions ++;
816 /* Get new first region and try to match the peers
817 for equivalence. */
818 if (outer)
819 region = outer->inner;
820 else
821 region = cfun->eh->region_tree;
823 /* There are few regions to inspect:
824 N^2 loop matching each region with each region
825 will do the job well. */
826 if (num_regions < 10)
828 for (r1 = region; r1; r1 = r1->next_peer)
830 if (r1->type == ERT_CATCH)
831 continue;
832 for (r2 = r1->next_peer; r2; r2 = next)
834 next = r2->next_peer;
835 if (eh_region_replaceable_by_p (r1, r2))
837 replace_region (r1, r2);
838 merged = true;
843 /* Or use hashtable to avoid N^2 behaviour. */
844 else
846 htab_t hash;
847 hash = htab_create (num_regions, hash_eh_region,
848 eh_regions_equal_p, NULL);
849 for (r1 = region; r1; r1 = next)
851 void **slot;
853 next = r1->next_peer;
854 if (r1->type == ERT_CATCH)
855 continue;
856 slot = htab_find_slot (hash, r1, INSERT);
857 if (!*slot)
858 *slot = r1;
859 else
860 replace_region ((struct eh_region_d *) *slot, r1);
862 htab_delete (hash);
864 for (r1 = region; r1; r1 = r1->next_peer)
865 merged |= merge_peers (r1->inner);
866 return merged;
869 /* Remove all regions whose labels are not reachable.
870 REACHABLE is bitmap of all regions that are used by the function
871 CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
873 void
874 remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
876 int i;
877 struct eh_region_d *r;
878 VEC(eh_region,heap) *must_not_throws = VEC_alloc (eh_region, heap, 16);
879 struct eh_region_d *local_must_not_throw = NULL;
880 struct eh_region_d *first_must_not_throw = NULL;
882 for (i = cfun->eh->last_region_number; i > 0; --i)
884 r = VEC_index (eh_region, cfun->eh->region_array, i);
885 if (!r || r->region_number != i)
886 continue;
887 if (!TEST_BIT (reachable, i) && !r->resume)
889 bool kill_it = true;
891 r->tree_label = NULL;
892 switch (r->type)
894 case ERT_THROW:
895 /* Don't remove ERT_THROW regions if their outer region
896 is reachable. */
897 if (r->outer && TEST_BIT (reachable, r->outer->region_number))
898 kill_it = false;
899 break;
900 case ERT_MUST_NOT_THROW:
901 /* MUST_NOT_THROW regions are implementable solely in the
902 runtime, but we need them when inlining function.
904 Keep them if outer region is not MUST_NOT_THROW a well
905 and if they contain some statement that might unwind through
906 them. */
907 if ((!r->outer || r->outer->type != ERT_MUST_NOT_THROW)
908 && (!contains_stmt
909 || can_be_reached_by_runtime (contains_stmt, r)))
910 kill_it = false;
911 break;
912 case ERT_TRY:
914 /* TRY regions are reachable if any of its CATCH regions
915 are reachable. */
916 struct eh_region_d *c;
917 for (c = r->u.eh_try.eh_catch; c;
918 c = c->u.eh_catch.next_catch)
919 if (TEST_BIT (reachable, c->region_number))
921 kill_it = false;
922 break;
924 break;
927 default:
928 break;
931 if (kill_it)
933 if (dump_file)
934 fprintf (dump_file, "Removing unreachable eh region %i\n",
935 r->region_number);
936 remove_eh_handler (r);
938 else if (r->type == ERT_MUST_NOT_THROW)
940 if (!first_must_not_throw)
941 first_must_not_throw = r;
942 VEC_safe_push (eh_region, heap, must_not_throws, r);
945 else
946 if (r->type == ERT_MUST_NOT_THROW)
948 if (!local_must_not_throw)
949 local_must_not_throw = r;
950 if (r->outer)
951 VEC_safe_push (eh_region, heap, must_not_throws, r);
955 /* MUST_NOT_THROW regions without local handler are all the same; they
956 trigger terminate call in runtime.
957 MUST_NOT_THROW handled locally can differ in debug info associated
958 to std::terminate () call or if one is coming from Java and other
959 from C++ whether they call terminate or abort.
961 We merge all MUST_NOT_THROW regions handled by the run-time into one.
962 We alsobring all local MUST_NOT_THROW regions to the roots of EH tree
963 (since unwinding never continues to the outer region anyway).
964 If MUST_NOT_THROW with local handler is present in the tree, we use
965 that region to merge into, since it will remain in tree anyway;
966 otherwise we use first MUST_NOT_THROW.
968 Merging of locally handled regions needs changes to the CFG. Crossjumping
969 should take care of this, by looking at the actual code and
970 ensuring that the cleanup actions are really the same. */
972 if (local_must_not_throw)
973 first_must_not_throw = local_must_not_throw;
975 for (i = 0; VEC_iterate (eh_region, must_not_throws, i, r); i++)
977 if (!r->label && !r->tree_label && r != first_must_not_throw)
979 if (dump_file)
980 fprintf (dump_file, "Replacing MUST_NOT_THROW region %i by %i\n",
981 r->region_number,
982 first_must_not_throw->region_number);
983 remove_eh_handler_and_replace (r, first_must_not_throw, false);
984 first_must_not_throw->may_contain_throw |= r->may_contain_throw;
986 else
987 bring_to_root (r);
989 merge_peers (cfun->eh->region_tree);
990 #ifdef ENABLE_CHECKING
991 verify_eh_tree (cfun);
992 #endif
993 VEC_free (eh_region, heap, must_not_throws);
996 /* Return array mapping LABEL_DECL_UID to region such that region's tree_label
997 is identical to label. */
999 VEC (int, heap) *
1000 label_to_region_map (void)
1002 VEC (int, heap) * label_to_region = NULL;
1003 int i;
1004 int idx;
1006 VEC_safe_grow_cleared (int, heap, label_to_region,
1007 cfun->cfg->last_label_uid + 1);
1008 for (i = cfun->eh->last_region_number; i > 0; --i)
1010 struct eh_region_d *r = VEC_index (eh_region, cfun->eh->region_array, i);
1011 if (r && r->region_number == i
1012 && r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
1014 if ((idx = VEC_index (int, label_to_region,
1015 LABEL_DECL_UID (r->tree_label))) != 0)
1016 r->next_region_sharing_label =
1017 VEC_index (eh_region, cfun->eh->region_array, idx);
1018 else
1019 r->next_region_sharing_label = NULL;
1020 VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
1024 return label_to_region;
1027 /* Return number of EH regions. */
1029 num_eh_regions (void)
1031 return cfun->eh->last_region_number + 1;
1034 /* Return next region sharing same label as REGION. */
1037 get_next_region_sharing_label (int region)
1039 struct eh_region_d *r;
1040 if (!region)
1041 return 0;
1042 r = VEC_index (eh_region, cfun->eh->region_array, region);
1043 if (!r || !r->next_region_sharing_label)
1044 return 0;
1045 return r->next_region_sharing_label->region_number;
1048 /* Return bitmap of all labels that are handlers of must not throw regions. */
1050 bitmap
1051 must_not_throw_labels (void)
1053 struct eh_region_d *i;
1054 bitmap labels = BITMAP_ALLOC (NULL);
1056 i = cfun->eh->region_tree;
1057 if (! i)
1058 return labels;
1060 while (1)
1062 if (i->type == ERT_MUST_NOT_THROW && i->tree_label
1063 && LABEL_DECL_UID (i->tree_label) >= 0)
1064 bitmap_set_bit (labels, LABEL_DECL_UID (i->tree_label));
1066 /* If there are sub-regions, process them. */
1067 if (i->inner)
1068 i = i->inner;
1069 /* If there are peers, process them. */
1070 else if (i->next_peer)
1071 i = i->next_peer;
1072 /* Otherwise, step back up the tree to the next peer. */
1073 else
1075 do {
1076 i = i->outer;
1077 if (i == NULL)
1078 return labels;
1079 } while (i->next_peer == NULL);
1080 i = i->next_peer;
1085 /* Set up EH labels for RTL. */
1087 void
1088 convert_from_eh_region_ranges (void)
1090 int i, n = cfun->eh->last_region_number;
1092 /* Most of the work is already done at the tree level. All we need to
1093 do is collect the rtl labels that correspond to the tree labels that
1094 collect the rtl labels that correspond to the tree labels
1095 we allocated earlier. */
1096 for (i = 1; i <= n; ++i)
1098 struct eh_region_d *region;
1100 region = VEC_index (eh_region, cfun->eh->region_array, i);
1101 if (region && region->tree_label)
1102 region->label = DECL_RTL_IF_SET (region->tree_label);
1106 void
1107 find_exception_handler_labels (void)
1109 int i;
1111 if (cfun->eh->region_tree == NULL)
1112 return;
1114 for (i = cfun->eh->last_region_number; i > 0; --i)
1116 struct eh_region_d *region;
1117 rtx lab;
1119 region = VEC_index (eh_region, cfun->eh->region_array, i);
1120 if (! region || region->region_number != i)
1121 continue;
1122 if (crtl->eh.built_landing_pads)
1123 lab = region->landing_pad;
1124 else
1125 lab = region->label;
1129 /* Returns true if the current function has exception handling regions. */
1131 bool
1132 current_function_has_exception_handlers (void)
1134 int i;
1136 for (i = cfun->eh->last_region_number; i > 0; --i)
1138 struct eh_region_d *region;
1140 region = VEC_index (eh_region, cfun->eh->region_array, i);
1141 if (region
1142 && region->region_number == i
1143 && region->type != ERT_THROW)
1144 return true;
1147 return false;
1150 /* A subroutine of duplicate_eh_regions. Search the region tree under O
1151 for the minimum and maximum region numbers. Update *MIN and *MAX. */
1153 static void
1154 duplicate_eh_regions_0 (eh_region o, int *min, int *max)
1156 int i;
1158 if (o->aka)
1160 i = bitmap_first_set_bit (o->aka);
1161 if (i < *min)
1162 *min = i;
1163 i = bitmap_last_set_bit (o->aka);
1164 if (i > *max)
1165 *max = i;
1167 if (o->region_number < *min)
1168 *min = o->region_number;
1169 if (o->region_number > *max)
1170 *max = o->region_number;
1172 if (o->inner)
1174 o = o->inner;
1175 duplicate_eh_regions_0 (o, min, max);
1176 while (o->next_peer)
1178 o = o->next_peer;
1179 duplicate_eh_regions_0 (o, min, max);
1184 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
1185 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
1186 about the other internal pointers just yet, just the tree-like pointers. */
1188 static eh_region
1189 duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
1191 eh_region ret, n;
1193 ret = n = GGC_NEW (struct eh_region_d);
1195 *n = *old;
1196 n->outer = outer;
1197 n->next_peer = NULL;
1198 if (old->aka)
1200 unsigned i;
1201 bitmap_iterator bi;
1202 n->aka = BITMAP_GGC_ALLOC ();
1204 EXECUTE_IF_SET_IN_BITMAP (old->aka, 0, i, bi)
1206 bitmap_set_bit (n->aka, i + eh_offset);
1207 VEC_replace (eh_region, cfun->eh->region_array, i + eh_offset, n);
1211 n->region_number += eh_offset;
1212 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1214 if (old->inner)
1216 old = old->inner;
1217 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
1218 while (old->next_peer)
1220 old = old->next_peer;
1221 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
1225 return ret;
1228 /* Look for first outer region of R (or R itself) that is
1229 TRY region. Return NULL if none. */
1231 static struct eh_region_d *
1232 find_prev_try (struct eh_region_d * r)
1234 for (; r && r->type != ERT_TRY; r = r->outer)
1235 if (r->type == ERT_MUST_NOT_THROW
1236 || (r->type == ERT_ALLOWED_EXCEPTIONS
1237 && !r->u.allowed.type_list))
1239 r = NULL;
1240 break;
1242 return r;
1245 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
1246 function and root the tree below OUTER_REGION. Remap labels using MAP
1247 callback. The special case of COPY_REGION of 0 means all regions. */
1250 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
1251 void *data, int copy_region, int outer_region)
1253 eh_region cur, outer, *splice;
1254 int i, min_region, max_region, eh_offset, cfun_last_region_number;
1255 int num_regions;
1257 if (!ifun->eh)
1258 return 0;
1259 #ifdef ENABLE_CHECKING
1260 verify_eh_tree (ifun);
1261 #endif
1263 /* Find the range of region numbers to be copied. The interface we
1264 provide here mandates a single offset to find new number from old,
1265 which means we must look at the numbers present, instead of the
1266 count or something else. */
1267 if (copy_region > 0)
1269 min_region = INT_MAX;
1270 max_region = 0;
1272 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1273 duplicate_eh_regions_0 (cur, &min_region, &max_region);
1275 else
1277 min_region = 1;
1278 max_region = ifun->eh->last_region_number;
1280 num_regions = max_region - min_region + 1;
1281 cfun_last_region_number = cfun->eh->last_region_number;
1282 eh_offset = cfun_last_region_number + 1 - min_region;
1284 /* If we've not yet created a region array, do so now. */
1285 cfun->eh->last_region_number = cfun_last_region_number + num_regions;
1286 VEC_safe_grow_cleared (eh_region, gc, cfun->eh->region_array,
1287 cfun->eh->last_region_number + 1);
1289 /* Locate the spot at which to insert the new tree. */
1290 if (outer_region > 0)
1292 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
1293 if (outer)
1294 splice = &outer->inner;
1295 else
1296 splice = &cfun->eh->region_tree;
1298 else
1300 outer = NULL;
1301 splice = &cfun->eh->region_tree;
1303 while (*splice)
1304 splice = &(*splice)->next_peer;
1306 if (!ifun->eh->region_tree)
1308 if (outer)
1309 for (i = cfun_last_region_number + 1;
1310 i <= cfun->eh->last_region_number; i++)
1312 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1313 if (outer->aka == NULL)
1314 outer->aka = BITMAP_GGC_ALLOC ();
1315 bitmap_set_bit (outer->aka, i);
1317 return eh_offset;
1320 /* Copy all the regions in the subtree. */
1321 if (copy_region > 0)
1323 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1324 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
1326 else
1328 eh_region n;
1330 cur = ifun->eh->region_tree;
1331 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
1332 while (cur->next_peer)
1334 cur = cur->next_peer;
1335 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
1339 /* Remap all the labels in the new regions. */
1340 for (i = cfun_last_region_number + 1;
1341 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1342 if (cur && cur->tree_label)
1343 cur->tree_label = map (cur->tree_label, data);
1345 /* Remap all of the internal catch and cleanup linkages. Since we
1346 duplicate entire subtrees, all of the referenced regions will have
1347 been copied too. And since we renumbered them as a block, a simple
1348 bit of arithmetic finds us the index for the replacement region. */
1349 for (i = cfun_last_region_number + 1;
1350 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1352 /* All removed EH that is toplevel in input function is now
1353 in outer EH of output function. */
1354 if (cur == NULL)
1356 gcc_assert (VEC_index
1357 (eh_region, ifun->eh->region_array,
1358 i - eh_offset) == NULL);
1359 if (outer)
1361 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1362 if (outer->aka == NULL)
1363 outer->aka = BITMAP_GGC_ALLOC ();
1364 bitmap_set_bit (outer->aka, i);
1366 continue;
1368 if (i != cur->region_number)
1369 continue;
1371 #define REMAP(REG) \
1372 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1373 (REG)->region_number + eh_offset)
1375 switch (cur->type)
1377 case ERT_TRY:
1378 if (cur->u.eh_try.eh_catch)
1379 REMAP (cur->u.eh_try.eh_catch);
1380 if (cur->u.eh_try.last_catch)
1381 REMAP (cur->u.eh_try.last_catch);
1382 break;
1384 case ERT_CATCH:
1385 if (cur->u.eh_catch.next_catch)
1386 REMAP (cur->u.eh_catch.next_catch);
1387 if (cur->u.eh_catch.prev_catch)
1388 REMAP (cur->u.eh_catch.prev_catch);
1389 break;
1391 default:
1392 break;
1395 #undef REMAP
1397 #ifdef ENABLE_CHECKING
1398 verify_eh_tree (cfun);
1399 #endif
1401 return eh_offset;
1404 /* Return new copy of eh region OLD inside region NEW_OUTER.
1405 Do not care about updating the tree otherwise. */
1407 static struct eh_region_d *
1408 copy_eh_region_1 (struct eh_region_d *old, struct eh_region_d *new_outer)
1410 struct eh_region_d *new_eh = gen_eh_region (old->type, new_outer);
1411 new_eh->u = old->u;
1412 new_eh->tree_label = old->tree_label;
1413 new_eh->may_contain_throw = old->may_contain_throw;
1414 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1415 cfun->eh->last_region_number + 1);
1416 VEC_replace (eh_region, cfun->eh->region_array, new_eh->region_number, new_eh);
1417 if (dump_file && (dump_flags & TDF_DETAILS))
1418 fprintf (dump_file, "Copying region %i to %i\n", old->region_number, new_eh->region_number);
1419 return new_eh;
1422 /* Return new copy of eh region OLD inside region NEW_OUTER.
1424 Copy whole catch-try chain if neccesary. */
1426 static struct eh_region_d *
1427 copy_eh_region (struct eh_region_d *old, struct eh_region_d *new_outer)
1429 struct eh_region_d *r, *n, *old_try, *new_try, *ret = NULL;
1430 VEC(eh_region,heap) *catch_list = NULL;
1432 if (old->type != ERT_CATCH)
1434 gcc_assert (old->type != ERT_TRY);
1435 r = copy_eh_region_1 (old, new_outer);
1436 return r;
1439 /* Locate and copy corresponding TRY. */
1440 for (old_try = old->next_peer; old_try->type == ERT_CATCH; old_try = old_try->next_peer)
1441 continue;
1442 gcc_assert (old_try->type == ERT_TRY);
1443 new_try = gen_eh_region_try (new_outer);
1444 new_try->tree_label = old_try->tree_label;
1445 new_try->may_contain_throw = old_try->may_contain_throw;
1446 if (dump_file && (dump_flags & TDF_DETAILS))
1447 fprintf (dump_file, "Copying try-catch regions. Try: %i to %i\n",
1448 old_try->region_number, new_try->region_number);
1449 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1450 cfun->eh->last_region_number + 1);
1451 VEC_replace (eh_region, cfun->eh->region_array, new_try->region_number, new_try);
1453 /* In order to keep CATCH list in order, we need to copy in reverse order. */
1454 for (r = old_try->u.eh_try.last_catch; r->type == ERT_CATCH; r = r->next_peer)
1455 VEC_safe_push (eh_region, heap, catch_list, r);
1457 while (VEC_length (eh_region, catch_list))
1459 r = VEC_pop (eh_region, catch_list);
1461 /* Duplicate CATCH. */
1462 n = gen_eh_region_catch (new_try, r->u.eh_catch.type_list);
1463 n->tree_label = r->tree_label;
1464 n->may_contain_throw = r->may_contain_throw;
1465 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1466 cfun->eh->last_region_number + 1);
1467 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1468 n->tree_label = r->tree_label;
1470 if (dump_file && (dump_flags & TDF_DETAILS))
1471 fprintf (dump_file, "Copying try-catch regions. Catch: %i to %i\n",
1472 r->region_number, n->region_number);
1473 if (r == old)
1474 ret = n;
1476 VEC_free (eh_region, heap, catch_list);
1477 gcc_assert (ret);
1478 return ret;
1481 /* Callback for forach_reachable_handler that push REGION into single VECtor DATA. */
1483 static void
1484 push_reachable_handler (struct eh_region_d *region, void *data)
1486 VEC(eh_region,heap) **trace = (VEC(eh_region,heap) **) data;
1487 VEC_safe_push (eh_region, heap, *trace, region);
1490 /* Redirect EH edge E that to NEW_DEST_LABEL.
1491 IS_RESX, INLINABLE_CALL and REGION_NMUBER match the parameter of
1492 foreach_reachable_handler. */
1494 struct eh_region_d *
1495 redirect_eh_edge_to_label (edge e, tree new_dest_label, bool is_resx,
1496 bool inlinable_call, int region_number)
1498 struct eh_region_d *outer;
1499 struct eh_region_d *region;
1500 VEC (eh_region, heap) * trace = NULL;
1501 int i;
1502 int start_here = -1;
1503 basic_block old_bb = e->dest;
1504 struct eh_region_d *old, *r = NULL;
1505 bool update_inplace = true;
1506 edge_iterator ei;
1507 edge e2;
1509 /* If there is only one EH edge, we don't need to duplicate;
1510 just update labels in the tree. */
1511 FOR_EACH_EDGE (e2, ei, old_bb->preds)
1512 if ((e2->flags & EDGE_EH) && e2 != e)
1514 update_inplace = false;
1515 break;
1518 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
1519 gcc_assert (region);
1521 foreach_reachable_handler (region_number, is_resx, inlinable_call,
1522 push_reachable_handler, &trace);
1523 if (dump_file && (dump_flags & TDF_DETAILS))
1525 dump_eh_tree (dump_file, cfun);
1526 fprintf (dump_file, "Trace: ");
1527 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1528 fprintf (dump_file, " %i", VEC_index (eh_region, trace, i)->region_number);
1529 fprintf (dump_file, " inplace: %i\n", update_inplace);
1532 if (update_inplace)
1534 /* In easy route just walk trace and update all occurences of the label. */
1535 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1537 r = VEC_index (eh_region, trace, i);
1538 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1540 r->tree_label = new_dest_label;
1541 if (dump_file && (dump_flags & TDF_DETAILS))
1542 fprintf (dump_file, "Updating label for region %i\n",
1543 r->region_number);
1546 r = region;
1548 else
1550 /* Now look for outermost handler that reffers to the basic block in question.
1551 We start our duplication there. */
1552 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1554 r = VEC_index (eh_region, trace, i);
1555 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1556 start_here = i;
1558 outer = VEC_index (eh_region, trace, start_here)->outer;
1559 gcc_assert (start_here >= 0);
1561 /* And now do the dirty job! */
1562 for (i = start_here; i >= 0; i--)
1564 old = VEC_index (eh_region, trace, i);
1565 gcc_assert (!outer || old->outer != outer->outer);
1567 /* Copy region and update label. */
1568 r = copy_eh_region (old, outer);
1569 VEC_replace (eh_region, trace, i, r);
1570 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1572 r->tree_label = new_dest_label;
1573 if (dump_file && (dump_flags & TDF_DETAILS))
1574 fprintf (dump_file, "Updating label for region %i\n",
1575 r->region_number);
1578 /* We got into copying CATCH. copy_eh_region already did job
1579 of copying all catch blocks corresponding to the try. Now
1580 we need to update labels in all of them and see trace.
1582 We continue nesting into TRY region corresponding to CATCH:
1583 When duplicating EH tree contaiing subregions of CATCH,
1584 the CATCH region itself is never inserted to trace so we
1585 never get here anyway. */
1586 if (r->type == ERT_CATCH)
1588 /* Walk other catch regions we copied and update labels as needed. */
1589 for (r = r->next_peer; r->type == ERT_CATCH; r = r->next_peer)
1590 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1592 r->tree_label = new_dest_label;
1593 if (dump_file && (dump_flags & TDF_DETAILS))
1594 fprintf (dump_file, "Updating label for region %i\n",
1595 r->region_number);
1597 gcc_assert (r->type == ERT_TRY);
1599 /* Skip sibling catch regions from the trace.
1600 They are already updated. */
1601 while (i > 0 && VEC_index (eh_region, trace, i - 1)->outer == old->outer)
1603 gcc_assert (VEC_index (eh_region, trace, i - 1)->type == ERT_CATCH);
1604 i--;
1608 outer = r;
1611 if (is_resx || region->type == ERT_THROW)
1612 r = copy_eh_region (region, outer);
1615 VEC_free (eh_region, heap, trace);
1616 if (dump_file && (dump_flags & TDF_DETAILS))
1618 dump_eh_tree (dump_file, cfun);
1619 fprintf (dump_file, "New region: %i\n", r->region_number);
1621 return r;
1624 /* Return region number of region that is outer to both if REGION_A and
1625 REGION_B in IFUN. */
1628 eh_region_outermost (struct function *ifun, int region_a, int region_b)
1630 struct eh_region_d *rp_a, *rp_b;
1631 sbitmap b_outer;
1633 gcc_assert (ifun->eh->last_region_number > 0);
1634 gcc_assert (ifun->eh->region_tree);
1636 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1637 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1638 gcc_assert (rp_a != NULL);
1639 gcc_assert (rp_b != NULL);
1641 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1642 sbitmap_zero (b_outer);
1646 SET_BIT (b_outer, rp_b->region_number);
1647 rp_b = rp_b->outer;
1649 while (rp_b);
1653 if (TEST_BIT (b_outer, rp_a->region_number))
1655 sbitmap_free (b_outer);
1656 return rp_a->region_number;
1658 rp_a = rp_a->outer;
1660 while (rp_a);
1662 sbitmap_free (b_outer);
1663 return -1;
1666 static int
1667 t2r_eq (const void *pentry, const void *pdata)
1669 const_tree const entry = (const_tree) pentry;
1670 const_tree const data = (const_tree) pdata;
1672 return TREE_PURPOSE (entry) == data;
1675 static hashval_t
1676 t2r_hash (const void *pentry)
1678 const_tree const entry = (const_tree) pentry;
1679 return TREE_HASH (TREE_PURPOSE (entry));
1682 void
1683 add_type_for_runtime (tree type)
1685 tree *slot;
1687 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1688 TREE_HASH (type), INSERT);
1689 if (*slot == NULL)
1691 tree runtime = (*lang_eh_runtime_type) (type);
1692 *slot = tree_cons (type, runtime, NULL_TREE);
1696 tree
1697 lookup_type_for_runtime (tree type)
1699 tree *slot;
1701 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1702 TREE_HASH (type), NO_INSERT);
1704 /* We should have always inserted the data earlier. */
1705 return TREE_VALUE (*slot);
1709 /* Represent an entry in @TTypes for either catch actions
1710 or exception filter actions. */
1711 struct GTY(()) ttypes_filter {
1712 tree t;
1713 int filter;
1716 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1717 (a tree) for a @TTypes type node we are thinking about adding. */
1719 static int
1720 ttypes_filter_eq (const void *pentry, const void *pdata)
1722 const struct ttypes_filter *const entry
1723 = (const struct ttypes_filter *) pentry;
1724 const_tree const data = (const_tree) pdata;
1726 return entry->t == data;
1729 static hashval_t
1730 ttypes_filter_hash (const void *pentry)
1732 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1733 return TREE_HASH (entry->t);
1736 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1737 exception specification list we are thinking about adding. */
1738 /* ??? Currently we use the type lists in the order given. Someone
1739 should put these in some canonical order. */
1741 static int
1742 ehspec_filter_eq (const void *pentry, const void *pdata)
1744 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1745 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1747 return type_list_equal (entry->t, data->t);
1750 /* Hash function for exception specification lists. */
1752 static hashval_t
1753 ehspec_filter_hash (const void *pentry)
1755 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1756 hashval_t h = 0;
1757 tree list;
1759 for (list = entry->t; list ; list = TREE_CHAIN (list))
1760 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1761 return h;
1764 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1765 to speed up the search. Return the filter value to be used. */
1767 static int
1768 add_ttypes_entry (htab_t ttypes_hash, tree type)
1770 struct ttypes_filter **slot, *n;
1772 slot = (struct ttypes_filter **)
1773 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1775 if ((n = *slot) == NULL)
1777 /* Filter value is a 1 based table index. */
1779 n = XNEW (struct ttypes_filter);
1780 n->t = type;
1781 n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
1782 *slot = n;
1784 VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
1787 return n->filter;
1790 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1791 to speed up the search. Return the filter value to be used. */
1793 static int
1794 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1796 struct ttypes_filter **slot, *n;
1797 struct ttypes_filter dummy;
1799 dummy.t = list;
1800 slot = (struct ttypes_filter **)
1801 htab_find_slot (ehspec_hash, &dummy, INSERT);
1803 if ((n = *slot) == NULL)
1805 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1807 n = XNEW (struct ttypes_filter);
1808 n->t = list;
1809 n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
1810 *slot = n;
1812 /* Generate a 0 terminated list of filter values. */
1813 for (; list ; list = TREE_CHAIN (list))
1815 if (targetm.arm_eabi_unwinder)
1816 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
1817 else
1819 /* Look up each type in the list and encode its filter
1820 value as a uleb128. */
1821 push_uleb128 (&crtl->eh.ehspec_data,
1822 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1825 if (targetm.arm_eabi_unwinder)
1826 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
1827 else
1828 VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
1831 return n->filter;
1834 /* Generate the action filter values to be used for CATCH and
1835 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1836 we use lots of landing pads, and so every type or list can share
1837 the same filter value, which saves table space. */
1839 static void
1840 assign_filter_values (void)
1842 int i;
1843 htab_t ttypes, ehspec;
1845 crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
1846 if (targetm.arm_eabi_unwinder)
1847 VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1848 else
1849 VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1851 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1852 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1854 for (i = cfun->eh->last_region_number; i > 0; --i)
1856 struct eh_region_d *r;
1858 r = VEC_index (eh_region, cfun->eh->region_array, i);
1860 /* Mind we don't process a region more than once. */
1861 if (!r || r->region_number != i)
1862 continue;
1864 switch (r->type)
1866 case ERT_CATCH:
1867 /* Whatever type_list is (NULL or true list), we build a list
1868 of filters for the region. */
1869 r->u.eh_catch.filter_list = NULL_TREE;
1871 if (r->u.eh_catch.type_list != NULL)
1873 /* Get a filter value for each of the types caught and store
1874 them in the region's dedicated list. */
1875 tree tp_node = r->u.eh_catch.type_list;
1877 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1879 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1880 tree flt_node = build_int_cst (NULL_TREE, flt);
1882 r->u.eh_catch.filter_list
1883 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1886 else
1888 /* Get a filter value for the NULL list also since it will need
1889 an action record anyway. */
1890 int flt = add_ttypes_entry (ttypes, NULL);
1891 tree flt_node = build_int_cst (NULL_TREE, flt);
1893 r->u.eh_catch.filter_list
1894 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1897 break;
1899 case ERT_ALLOWED_EXCEPTIONS:
1900 r->u.allowed.filter
1901 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1902 break;
1904 default:
1905 break;
1909 htab_delete (ttypes);
1910 htab_delete (ehspec);
1913 /* Emit SEQ into basic block just before INSN (that is assumed to be
1914 first instruction of some existing BB and return the newly
1915 produced block. */
1916 static basic_block
1917 emit_to_new_bb_before (rtx seq, rtx insn)
1919 rtx last;
1920 basic_block bb;
1921 edge e;
1922 edge_iterator ei;
1924 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1925 call), we don't want it to go into newly created landing pad or other EH
1926 construct. */
1927 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1928 if (e->flags & EDGE_FALLTHRU)
1929 force_nonfallthru (e);
1930 else
1931 ei_next (&ei);
1932 last = emit_insn_before (seq, insn);
1933 if (BARRIER_P (last))
1934 last = PREV_INSN (last);
1935 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1936 update_bb_for_insn (bb);
1937 bb->flags |= BB_SUPERBLOCK;
1938 return bb;
1941 /* Generate the code to actually handle exceptions, which will follow the
1942 landing pads. */
1944 static void
1945 build_post_landing_pads (void)
1947 int i;
1949 for (i = cfun->eh->last_region_number; i > 0; --i)
1951 struct eh_region_d *region;
1952 rtx seq;
1954 region = VEC_index (eh_region, cfun->eh->region_array, i);
1955 /* Mind we don't process a region more than once. */
1956 if (!region || region->region_number != i)
1957 continue;
1959 switch (region->type)
1961 case ERT_TRY:
1962 /* It is possible that TRY region is kept alive only because some of
1963 contained catch region still have RESX instruction but they are
1964 reached via their copies. In this case we need to do nothing. */
1965 if (!region->u.eh_try.eh_catch->label)
1966 break;
1968 /* ??? Collect the set of all non-overlapping catch handlers
1969 all the way up the chain until blocked by a cleanup. */
1970 /* ??? Outer try regions can share landing pads with inner
1971 try regions if the types are completely non-overlapping,
1972 and there are no intervening cleanups. */
1974 region->post_landing_pad = gen_label_rtx ();
1976 start_sequence ();
1978 emit_label (region->post_landing_pad);
1980 /* ??? It is mighty inconvenient to call back into the
1981 switch statement generation code in expand_end_case.
1982 Rapid prototyping sez a sequence of ifs. */
1984 struct eh_region_d *c;
1985 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
1987 if (c->u.eh_catch.type_list == NULL)
1988 emit_jump (c->label);
1989 else
1991 /* Need for one cmp/jump per type caught. Each type
1992 list entry has a matching entry in the filter list
1993 (see assign_filter_values). */
1994 tree tp_node = c->u.eh_catch.type_list;
1995 tree flt_node = c->u.eh_catch.filter_list;
1997 for (; tp_node; )
1999 emit_cmp_and_jump_insns
2000 (crtl->eh.filter,
2001 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
2002 EQ, NULL_RTX,
2003 targetm.eh_return_filter_mode (), 0, c->label);
2005 tp_node = TREE_CHAIN (tp_node);
2006 flt_node = TREE_CHAIN (flt_node);
2012 /* We delay the generation of the _Unwind_Resume until we generate
2013 landing pads. We emit a marker here so as to get good control
2014 flow data in the meantime. */
2015 region->resume
2016 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
2017 emit_barrier ();
2019 seq = get_insns ();
2020 end_sequence ();
2022 emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
2024 break;
2026 case ERT_ALLOWED_EXCEPTIONS:
2027 if (!region->label)
2028 break;
2029 region->post_landing_pad = gen_label_rtx ();
2031 start_sequence ();
2033 emit_label (region->post_landing_pad);
2035 emit_cmp_and_jump_insns (crtl->eh.filter,
2036 GEN_INT (region->u.allowed.filter),
2037 EQ, NULL_RTX,
2038 targetm.eh_return_filter_mode (), 0, region->label);
2040 /* We delay the generation of the _Unwind_Resume until we generate
2041 landing pads. We emit a marker here so as to get good control
2042 flow data in the meantime. */
2043 region->resume
2044 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
2045 emit_barrier ();
2047 seq = get_insns ();
2048 end_sequence ();
2050 emit_to_new_bb_before (seq, region->label);
2051 break;
2053 case ERT_CLEANUP:
2054 case ERT_MUST_NOT_THROW:
2055 region->post_landing_pad = region->label;
2056 break;
2058 case ERT_CATCH:
2059 case ERT_THROW:
2060 /* Nothing to do. */
2061 break;
2063 default:
2064 gcc_unreachable ();
2069 /* Replace RESX patterns with jumps to the next handler if any, or calls to
2070 _Unwind_Resume otherwise. */
2072 static void
2073 connect_post_landing_pads (void)
2075 int i;
2077 for (i = cfun->eh->last_region_number; i > 0; --i)
2079 struct eh_region_d *region;
2080 struct eh_region_d *outer;
2081 rtx seq;
2082 rtx barrier;
2084 region = VEC_index (eh_region, cfun->eh->region_array, i);
2085 /* Mind we don't process a region more than once. */
2086 if (!region || region->region_number != i)
2087 continue;
2089 /* If there is no RESX, or it has been deleted by flow, there's
2090 nothing to fix up. */
2091 if (! region->resume || INSN_DELETED_P (region->resume))
2092 continue;
2094 /* Search for another landing pad in this function. */
2095 for (outer = region->outer; outer ; outer = outer->outer)
2096 if (outer->post_landing_pad)
2097 break;
2099 start_sequence ();
2101 if (outer)
2103 edge e;
2104 basic_block src, dest;
2106 emit_jump (outer->post_landing_pad);
2107 src = BLOCK_FOR_INSN (region->resume);
2108 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
2109 while (EDGE_COUNT (src->succs) > 0)
2110 remove_edge (EDGE_SUCC (src, 0));
2111 e = make_edge (src, dest, 0);
2112 e->probability = REG_BR_PROB_BASE;
2113 e->count = src->count;
2115 else
2117 emit_library_call (unwind_resume_libfunc, LCT_THROW,
2118 VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
2120 /* What we just emitted was a throwing libcall, so it got a
2121 barrier automatically added after it. If the last insn in
2122 the libcall sequence isn't the barrier, it's because the
2123 target emits multiple insns for a call, and there are insns
2124 after the actual call insn (which are redundant and would be
2125 optimized away). The barrier is inserted exactly after the
2126 call insn, so let's go get that and delete the insns after
2127 it, because below we need the barrier to be the last insn in
2128 the sequence. */
2129 delete_insns_since (NEXT_INSN (last_call_insn ()));
2132 seq = get_insns ();
2133 end_sequence ();
2134 barrier = emit_insn_before (seq, region->resume);
2135 /* Avoid duplicate barrier. */
2136 gcc_assert (BARRIER_P (barrier));
2137 delete_insn (barrier);
2138 delete_insn (region->resume);
2140 /* ??? From tree-ssa we can wind up with catch regions whose
2141 label is not instantiated, but whose resx is present. Now
2142 that we've dealt with the resx, kill the region. */
2143 if (region->label == NULL && region->type == ERT_CLEANUP)
2144 remove_eh_handler (region);
2149 static void
2150 dw2_build_landing_pads (void)
2152 int i;
2154 for (i = cfun->eh->last_region_number; i > 0; --i)
2156 struct eh_region_d *region;
2157 rtx seq;
2158 basic_block bb;
2159 edge e;
2161 region = VEC_index (eh_region, cfun->eh->region_array, i);
2162 /* Mind we don't process a region more than once. */
2163 if (!region || region->region_number != i)
2164 continue;
2166 if (region->type != ERT_CLEANUP
2167 && region->type != ERT_TRY
2168 && region->type != ERT_ALLOWED_EXCEPTIONS)
2169 continue;
2171 if (!region->post_landing_pad)
2172 continue;
2174 start_sequence ();
2176 region->landing_pad = gen_label_rtx ();
2177 emit_label (region->landing_pad);
2179 #ifdef HAVE_exception_receiver
2180 if (HAVE_exception_receiver)
2181 emit_insn (gen_exception_receiver ());
2182 else
2183 #endif
2184 #ifdef HAVE_nonlocal_goto_receiver
2185 if (HAVE_nonlocal_goto_receiver)
2186 emit_insn (gen_nonlocal_goto_receiver ());
2187 else
2188 #endif
2189 { /* Nothing */ }
2191 emit_move_insn (crtl->eh.exc_ptr,
2192 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
2193 emit_move_insn (crtl->eh.filter,
2194 gen_rtx_REG (targetm.eh_return_filter_mode (),
2195 EH_RETURN_DATA_REGNO (1)));
2197 seq = get_insns ();
2198 end_sequence ();
2200 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
2201 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2202 e->count = bb->count;
2203 e->probability = REG_BR_PROB_BASE;
2208 struct sjlj_lp_info
2210 int directly_reachable;
2211 int action_index;
2212 int dispatch_index;
2213 int call_site_index;
2216 static bool
2217 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
2219 rtx insn;
2220 bool found_one = false;
2222 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2224 struct eh_region_d *region;
2225 enum reachable_code rc;
2226 tree type_thrown;
2227 rtx note;
2229 if (! INSN_P (insn))
2230 continue;
2232 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2233 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2234 continue;
2236 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2237 if (!region)
2238 continue;
2240 type_thrown = NULL_TREE;
2241 if (region->type == ERT_THROW)
2243 type_thrown = region->u.eh_throw.type;
2244 region = region->outer;
2247 /* Find the first containing region that might handle the exception.
2248 That's the landing pad to which we will transfer control. */
2249 rc = RNL_NOT_CAUGHT;
2250 for (; region; region = region->outer)
2252 rc = reachable_next_level (region, type_thrown, NULL, false);
2253 if (rc != RNL_NOT_CAUGHT)
2254 break;
2256 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
2258 lp_info[region->region_number].directly_reachable = 1;
2259 found_one = true;
2263 return found_one;
2266 static void
2267 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2269 htab_t ar_hash;
2270 int i, index;
2272 /* First task: build the action table. */
2274 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
2275 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2277 for (i = cfun->eh->last_region_number; i > 0; --i)
2278 if (lp_info[i].directly_reachable)
2280 struct eh_region_d *r =
2281 VEC_index (eh_region, cfun->eh->region_array, i);
2283 r->landing_pad = dispatch_label;
2284 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2285 if (lp_info[i].action_index != -1)
2286 crtl->uses_eh_lsda = 1;
2289 htab_delete (ar_hash);
2291 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2292 landing pad label for the region. For sjlj though, there is one
2293 common landing pad from which we dispatch to the post-landing pads.
2295 A region receives a dispatch index if it is directly reachable
2296 and requires in-function processing. Regions that share post-landing
2297 pads may share dispatch indices. */
2298 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2299 (see build_post_landing_pads) so we don't bother checking for it. */
2301 index = 0;
2302 for (i = cfun->eh->last_region_number; i > 0; --i)
2303 if (lp_info[i].directly_reachable)
2304 lp_info[i].dispatch_index = index++;
2306 /* Finally: assign call-site values. If dwarf2 terms, this would be
2307 the region number assigned by convert_to_eh_region_ranges, but
2308 handles no-action and must-not-throw differently. */
2310 call_site_base = 1;
2311 for (i = cfun->eh->last_region_number; i > 0; --i)
2312 if (lp_info[i].directly_reachable)
2314 int action = lp_info[i].action_index;
2316 /* Map must-not-throw to otherwise unused call-site index 0. */
2317 if (action == -2)
2318 index = 0;
2319 /* Map no-action to otherwise unused call-site index -1. */
2320 else if (action == -1)
2321 index = -1;
2322 /* Otherwise, look it up in the table. */
2323 else
2324 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2326 lp_info[i].call_site_index = index;
2330 static void
2331 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
2333 int last_call_site = -2;
2334 rtx insn, mem;
2336 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2338 struct eh_region_d *region;
2339 int this_call_site;
2340 rtx note, before, p;
2342 /* Reset value tracking at extended basic block boundaries. */
2343 if (LABEL_P (insn))
2344 last_call_site = -2;
2346 if (! INSN_P (insn))
2347 continue;
2349 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2351 /* Calls that are known to not throw need not be marked. */
2352 if (note && INTVAL (XEXP (note, 0)) <= 0)
2353 continue;
2355 if (note)
2356 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2357 else
2358 region = NULL;
2360 if (!region)
2362 /* Calls (and trapping insns) without notes are outside any
2363 exception handling region in this function. Mark them as
2364 no action. */
2365 if (CALL_P (insn)
2366 || (flag_non_call_exceptions
2367 && may_trap_p (PATTERN (insn))))
2368 this_call_site = -1;
2369 else
2370 continue;
2372 else
2373 this_call_site = lp_info[region->region_number].call_site_index;
2375 if (this_call_site == last_call_site)
2376 continue;
2378 /* Don't separate a call from it's argument loads. */
2379 before = insn;
2380 if (CALL_P (insn))
2381 before = find_first_parameter_load (insn, NULL_RTX);
2383 start_sequence ();
2384 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
2385 sjlj_fc_call_site_ofs);
2386 emit_move_insn (mem, GEN_INT (this_call_site));
2387 p = get_insns ();
2388 end_sequence ();
2390 emit_insn_before (p, before);
2391 last_call_site = this_call_site;
2395 /* Construct the SjLj_Function_Context. */
2397 static void
2398 sjlj_emit_function_enter (rtx dispatch_label)
2400 rtx fn_begin, fc, mem, seq;
2401 bool fn_begin_outside_block;
2403 fc = crtl->eh.sjlj_fc;
2405 start_sequence ();
2407 /* We're storing this libcall's address into memory instead of
2408 calling it directly. Thus, we must call assemble_external_libcall
2409 here, as we can not depend on emit_library_call to do it for us. */
2410 assemble_external_libcall (eh_personality_libfunc);
2411 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2412 emit_move_insn (mem, eh_personality_libfunc);
2414 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2415 if (crtl->uses_eh_lsda)
2417 char buf[20];
2418 rtx sym;
2420 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2421 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2422 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
2423 emit_move_insn (mem, sym);
2425 else
2426 emit_move_insn (mem, const0_rtx);
2428 #ifdef DONT_USE_BUILTIN_SETJMP
2430 rtx x;
2431 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2432 TYPE_MODE (integer_type_node), 1,
2433 plus_constant (XEXP (fc, 0),
2434 sjlj_fc_jbuf_ofs), Pmode);
2436 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2437 TYPE_MODE (integer_type_node), 0, dispatch_label);
2438 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
2440 #else
2441 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2442 dispatch_label);
2443 #endif
2445 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2446 1, XEXP (fc, 0), Pmode);
2448 seq = get_insns ();
2449 end_sequence ();
2451 /* ??? Instead of doing this at the beginning of the function,
2452 do this in a block that is at loop level 0 and dominates all
2453 can_throw_internal instructions. */
2455 fn_begin_outside_block = true;
2456 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2457 if (NOTE_P (fn_begin))
2459 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2460 break;
2461 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
2462 fn_begin_outside_block = false;
2465 if (fn_begin_outside_block)
2466 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
2467 else
2468 emit_insn_after (seq, fn_begin);
2471 /* Call back from expand_function_end to know where we should put
2472 the call to unwind_sjlj_unregister_libfunc if needed. */
2474 void
2475 sjlj_emit_function_exit_after (rtx after)
2477 crtl->eh.sjlj_exit_after = after;
2480 static void
2481 sjlj_emit_function_exit (void)
2483 rtx seq, insn;
2485 start_sequence ();
2487 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2488 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
2490 seq = get_insns ();
2491 end_sequence ();
2493 /* ??? Really this can be done in any block at loop level 0 that
2494 post-dominates all can_throw_internal instructions. This is
2495 the last possible moment. */
2497 insn = crtl->eh.sjlj_exit_after;
2498 if (LABEL_P (insn))
2499 insn = NEXT_INSN (insn);
2501 emit_insn_after (seq, insn);
2504 static void
2505 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2507 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
2508 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
2509 int i, first_reachable;
2510 rtx mem, dispatch, seq, fc;
2511 rtx before;
2512 basic_block bb;
2513 edge e;
2515 fc = crtl->eh.sjlj_fc;
2517 start_sequence ();
2519 emit_label (dispatch_label);
2521 #ifndef DONT_USE_BUILTIN_SETJMP
2522 expand_builtin_setjmp_receiver (dispatch_label);
2523 #endif
2525 /* Load up dispatch index, exc_ptr and filter values from the
2526 function context. */
2527 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2528 sjlj_fc_call_site_ofs);
2529 dispatch = copy_to_reg (mem);
2531 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
2532 if (unwind_word_mode != ptr_mode)
2534 #ifdef POINTERS_EXTEND_UNSIGNED
2535 mem = convert_memory_address (ptr_mode, mem);
2536 #else
2537 mem = convert_to_mode (ptr_mode, mem, 0);
2538 #endif
2540 emit_move_insn (crtl->eh.exc_ptr, mem);
2542 mem = adjust_address (fc, unwind_word_mode,
2543 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
2544 if (unwind_word_mode != filter_mode)
2545 mem = convert_to_mode (filter_mode, mem, 0);
2546 emit_move_insn (crtl->eh.filter, mem);
2548 /* Jump to one of the directly reachable regions. */
2549 /* ??? This really ought to be using a switch statement. */
2551 first_reachable = 0;
2552 for (i = cfun->eh->last_region_number; i > 0; --i)
2554 if (! lp_info[i].directly_reachable)
2555 continue;
2557 if (! first_reachable)
2559 first_reachable = i;
2560 continue;
2563 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2564 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2565 (((struct eh_region_d *)
2566 VEC_index (eh_region,
2567 cfun->eh->region_array, i))
2568 ->post_landing_pad));
2571 seq = get_insns ();
2572 end_sequence ();
2574 before = (((struct eh_region_d *)
2575 VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2576 ->post_landing_pad);
2578 bb = emit_to_new_bb_before (seq, before);
2579 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2580 e->count = bb->count;
2581 e->probability = REG_BR_PROB_BASE;
2584 static void
2585 sjlj_build_landing_pads (void)
2587 struct sjlj_lp_info *lp_info;
2589 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2591 if (sjlj_find_directly_reachable_regions (lp_info))
2593 rtx dispatch_label = gen_label_rtx ();
2594 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
2595 TYPE_MODE (sjlj_fc_type_node),
2596 TYPE_ALIGN (sjlj_fc_type_node));
2597 crtl->eh.sjlj_fc
2598 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2599 int_size_in_bytes (sjlj_fc_type_node),
2600 align);
2602 sjlj_assign_call_site_values (dispatch_label, lp_info);
2603 sjlj_mark_call_sites (lp_info);
2605 sjlj_emit_function_enter (dispatch_label);
2606 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2607 sjlj_emit_function_exit ();
2610 free (lp_info);
2613 /* After initial rtl generation, call back to finish generating
2614 exception support code. */
2616 static void
2617 finish_eh_generation (void)
2619 basic_block bb;
2621 /* Nothing to do if no regions created. */
2622 if (cfun->eh->region_tree == NULL)
2623 return;
2625 /* The object here is to provide detailed information (via
2626 reachable_handlers) on how exception control flows within the
2627 function for the CFG construction. In this first pass, we can
2628 include type information garnered from ERT_THROW and
2629 ERT_ALLOWED_EXCEPTIONS regions, and hope that it will be useful
2630 in deleting unreachable handlers. Subsequently, we will generate
2631 landing pads which will connect many of the handlers, and then
2632 type information will not be effective. Still, this is a win
2633 over previous implementations. */
2635 /* These registers are used by the landing pads. Make sure they
2636 have been generated. */
2637 get_exception_pointer ();
2638 get_exception_filter ();
2640 /* Construct the landing pads. */
2642 assign_filter_values ();
2643 build_post_landing_pads ();
2644 connect_post_landing_pads ();
2645 if (USING_SJLJ_EXCEPTIONS)
2646 sjlj_build_landing_pads ();
2647 else
2648 dw2_build_landing_pads ();
2650 crtl->eh.built_landing_pads = 1;
2652 /* We've totally changed the CFG. Start over. */
2653 find_exception_handler_labels ();
2654 break_superblocks ();
2655 if (USING_SJLJ_EXCEPTIONS
2656 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2657 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
2658 commit_edge_insertions ();
2659 FOR_EACH_BB (bb)
2661 edge e;
2662 edge_iterator ei;
2663 bool eh = false;
2664 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2666 if (e->flags & EDGE_EH)
2668 remove_edge (e);
2669 eh = true;
2671 else
2672 ei_next (&ei);
2674 if (eh)
2675 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2679 /* This section handles removing dead code for flow. */
2681 /* Splice REGION from the region tree and replace it by REPLACE etc.
2682 When UPDATE_CATCH_TRY is true mind updating links from catch to try
2683 region.*/
2685 static void
2686 remove_eh_handler_and_replace (struct eh_region_d *region,
2687 struct eh_region_d *replace,
2688 bool update_catch_try)
2690 struct eh_region_d **pp, **pp_start, *p, *outer, *inner;
2691 rtx lab;
2693 outer = region->outer;
2695 /* For the benefit of efficiently handling REG_EH_REGION notes,
2696 replace this region in the region array with its containing
2697 region. Note that previous region deletions may result in
2698 multiple copies of this region in the array, so we have a
2699 list of alternate numbers by which we are known. */
2701 VEC_replace (eh_region, cfun->eh->region_array, region->region_number,
2702 replace);
2703 if (region->aka)
2705 unsigned i;
2706 bitmap_iterator bi;
2708 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2710 VEC_replace (eh_region, cfun->eh->region_array, i, replace);
2714 if (replace)
2716 if (!replace->aka)
2717 replace->aka = BITMAP_GGC_ALLOC ();
2718 if (region->aka)
2719 bitmap_ior_into (replace->aka, region->aka);
2720 bitmap_set_bit (replace->aka, region->region_number);
2723 if (crtl->eh.built_landing_pads)
2724 lab = region->landing_pad;
2725 else
2726 lab = region->label;
2727 if (outer)
2728 pp_start = &outer->inner;
2729 else
2730 pp_start = &cfun->eh->region_tree;
2731 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2732 continue;
2733 *pp = region->next_peer;
2735 if (replace)
2736 pp_start = &replace->inner;
2737 else
2738 pp_start = &cfun->eh->region_tree;
2739 inner = region->inner;
2740 if (inner)
2742 for (p = inner; p->next_peer ; p = p->next_peer)
2743 p->outer = replace;
2744 p->outer = replace;
2746 p->next_peer = *pp_start;
2747 *pp_start = inner;
2750 if (region->type == ERT_CATCH
2751 && update_catch_try)
2753 struct eh_region_d *eh_try, *next, *prev;
2755 for (eh_try = region->next_peer;
2756 eh_try->type == ERT_CATCH;
2757 eh_try = eh_try->next_peer)
2758 continue;
2759 gcc_assert (eh_try->type == ERT_TRY);
2761 next = region->u.eh_catch.next_catch;
2762 prev = region->u.eh_catch.prev_catch;
2764 if (next)
2765 next->u.eh_catch.prev_catch = prev;
2766 else
2767 eh_try->u.eh_try.last_catch = prev;
2768 if (prev)
2769 prev->u.eh_catch.next_catch = next;
2770 else
2772 eh_try->u.eh_try.eh_catch = next;
2773 if (! next)
2774 remove_eh_handler (eh_try);
2779 /* Splice REGION from the region tree and replace it by the outer region
2780 etc. */
2782 static void
2783 remove_eh_handler (struct eh_region_d *region)
2785 remove_eh_handler_and_replace (region, region->outer, true);
2788 /* Remove Eh region R that has turned out to have no code in its handler. */
2790 void
2791 remove_eh_region (int r)
2793 struct eh_region_d *region;
2795 region = VEC_index (eh_region, cfun->eh->region_array, r);
2796 remove_eh_handler (region);
2799 /* Remove Eh region R that has turned out to have no code in its handler
2800 and replace in by R2. */
2802 void
2803 remove_eh_region_and_replace_by_outer_of (int r, int r2)
2805 struct eh_region_d *region, *region2;
2807 region = VEC_index (eh_region, cfun->eh->region_array, r);
2808 region2 = VEC_index (eh_region, cfun->eh->region_array, r2);
2809 remove_eh_handler_and_replace (region, region2->outer, true);
2812 /* Invokes CALLBACK for every exception handler label. Only used by old
2813 loop hackery; should not be used by new code. */
2815 void
2816 for_each_eh_label (void (*callback) (rtx))
2818 int i;
2819 for (i = 0; i < cfun->eh->last_region_number; i++)
2821 struct eh_region_d *r = VEC_index (eh_region, cfun->eh->region_array, i);
2822 if (r && r->region_number == i && r->label
2823 && LABEL_P (r->label))
2824 (*callback) (r->label);
2828 /* Invoke CALLBACK for every exception region in the current function. */
2830 void
2831 for_each_eh_region (void (*callback) (struct eh_region_d *))
2833 int i, n = cfun->eh->last_region_number;
2834 for (i = 1; i <= n; ++i)
2836 struct eh_region_d *region;
2838 region = VEC_index (eh_region, cfun->eh->region_array, i);
2839 if (region)
2840 (*callback) (region);
2844 /* This section describes CFG exception edges for flow. */
2846 /* For communicating between calls to reachable_next_level. */
2847 struct reachable_info
2849 tree types_caught;
2850 tree types_allowed;
2851 void (*callback) (struct eh_region_d *, void *);
2852 void *callback_data;
2855 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2856 base class of TYPE, is in HANDLED. */
2858 static int
2859 check_handled (tree handled, tree type)
2861 tree t;
2863 /* We can check for exact matches without front-end help. */
2864 if (! lang_eh_type_covers)
2866 for (t = handled; t ; t = TREE_CHAIN (t))
2867 if (TREE_VALUE (t) == type)
2868 return 1;
2870 else
2872 for (t = handled; t ; t = TREE_CHAIN (t))
2873 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2874 return 1;
2877 return 0;
2880 /* A subroutine of reachable_next_level. If we are collecting a list
2881 of handlers, add one. After landing pad generation, reference
2882 it instead of the handlers themselves. Further, the handlers are
2883 all wired together, so by referencing one, we've got them all.
2884 Before landing pad generation we reference each handler individually.
2886 LP_REGION contains the landing pad; REGION is the handler. */
2888 static void
2889 add_reachable_handler (struct reachable_info *info,
2890 struct eh_region_d *lp_region,
2891 struct eh_region_d *region)
2893 if (! info)
2894 return;
2896 if (crtl->eh.built_landing_pads)
2897 info->callback (lp_region, info->callback_data);
2898 else
2899 info->callback (region, info->callback_data);
2902 /* Process one level of exception regions for reachability.
2903 If TYPE_THROWN is non-null, then it is the *exact* type being
2904 propagated. If INFO is non-null, then collect handler labels
2905 and caught/allowed type information between invocations. */
2907 static enum reachable_code
2908 reachable_next_level (struct eh_region_d *region, tree type_thrown,
2909 struct reachable_info *info,
2910 bool maybe_resx)
2912 switch (region->type)
2914 case ERT_CLEANUP:
2915 /* Before landing-pad generation, we model control flow
2916 directly to the individual handlers. In this way we can
2917 see that catch handler types may shadow one another. */
2918 add_reachable_handler (info, region, region);
2919 return RNL_MAYBE_CAUGHT;
2921 case ERT_TRY:
2923 struct eh_region_d *c;
2924 enum reachable_code ret = RNL_NOT_CAUGHT;
2926 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
2928 /* A catch-all handler ends the search. */
2929 if (c->u.eh_catch.type_list == NULL)
2931 add_reachable_handler (info, region, c);
2932 return RNL_CAUGHT;
2935 if (type_thrown)
2937 /* If we have at least one type match, end the search. */
2938 tree tp_node = c->u.eh_catch.type_list;
2940 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2942 tree type = TREE_VALUE (tp_node);
2944 if (type == type_thrown
2945 || (lang_eh_type_covers
2946 && (*lang_eh_type_covers) (type, type_thrown)))
2948 add_reachable_handler (info, region, c);
2949 return RNL_CAUGHT;
2953 /* If we have definitive information of a match failure,
2954 the catch won't trigger. */
2955 if (lang_eh_type_covers)
2956 return RNL_NOT_CAUGHT;
2959 /* At this point, we either don't know what type is thrown or
2960 don't have front-end assistance to help deciding if it is
2961 covered by one of the types in the list for this region.
2963 We'd then like to add this region to the list of reachable
2964 handlers since it is indeed potentially reachable based on the
2965 information we have.
2967 Actually, this handler is for sure not reachable if all the
2968 types it matches have already been caught. That is, it is only
2969 potentially reachable if at least one of the types it catches
2970 has not been previously caught. */
2972 if (! info)
2973 ret = RNL_MAYBE_CAUGHT;
2974 else
2976 tree tp_node = c->u.eh_catch.type_list;
2977 bool maybe_reachable = false;
2979 /* Compute the potential reachability of this handler and
2980 update the list of types caught at the same time. */
2981 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2983 tree type = TREE_VALUE (tp_node);
2985 if (! check_handled (info->types_caught, type))
2987 info->types_caught
2988 = tree_cons (NULL, type, info->types_caught);
2990 maybe_reachable = true;
2994 if (maybe_reachable)
2996 add_reachable_handler (info, region, c);
2998 /* ??? If the catch type is a base class of every allowed
2999 type, then we know we can stop the search. */
3000 ret = RNL_MAYBE_CAUGHT;
3005 return ret;
3008 case ERT_ALLOWED_EXCEPTIONS:
3009 /* An empty list of types definitely ends the search. */
3010 if (region->u.allowed.type_list == NULL_TREE)
3012 add_reachable_handler (info, region, region);
3013 return RNL_CAUGHT;
3016 /* Collect a list of lists of allowed types for use in detecting
3017 when a catch may be transformed into a catch-all. */
3018 if (info)
3019 info->types_allowed = tree_cons (NULL_TREE,
3020 region->u.allowed.type_list,
3021 info->types_allowed);
3023 /* If we have definitive information about the type hierarchy,
3024 then we can tell if the thrown type will pass through the
3025 filter. */
3026 if (type_thrown && lang_eh_type_covers)
3028 if (check_handled (region->u.allowed.type_list, type_thrown))
3029 return RNL_NOT_CAUGHT;
3030 else
3032 add_reachable_handler (info, region, region);
3033 return RNL_CAUGHT;
3037 add_reachable_handler (info, region, region);
3038 return RNL_MAYBE_CAUGHT;
3040 case ERT_CATCH:
3041 /* Catch regions are handled by their controlling try region. */
3042 return RNL_NOT_CAUGHT;
3044 case ERT_MUST_NOT_THROW:
3045 /* Here we end our search, since no exceptions may propagate.
3047 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
3048 only via locally handled RESX instructions.
3050 When we inline a function call, we can bring in new handlers. In order
3051 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
3052 assume that such handlers exists prior for any inlinable call prior
3053 inlining decisions are fixed. */
3055 if (maybe_resx)
3057 add_reachable_handler (info, region, region);
3058 return RNL_CAUGHT;
3060 else
3061 return RNL_BLOCKED;
3063 case ERT_THROW:
3064 case ERT_UNKNOWN:
3065 /* Shouldn't see these here. */
3066 gcc_unreachable ();
3067 break;
3068 default:
3069 gcc_unreachable ();
3073 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
3075 void
3076 foreach_reachable_handler (int region_number, bool is_resx, bool inlinable_call,
3077 void (*callback) (struct eh_region_d *, void *),
3078 void *callback_data)
3080 struct reachable_info info;
3081 struct eh_region_d *region;
3082 tree type_thrown;
3084 memset (&info, 0, sizeof (info));
3085 info.callback = callback;
3086 info.callback_data = callback_data;
3088 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3089 if (!region)
3090 return;
3092 type_thrown = NULL_TREE;
3093 if (is_resx)
3095 /* A RESX leaves a region instead of entering it. Thus the
3096 region itself may have been deleted out from under us. */
3097 if (region == NULL)
3098 return;
3099 region = region->outer;
3101 else if (region->type == ERT_THROW)
3103 type_thrown = region->u.eh_throw.type;
3104 region = region->outer;
3107 while (region)
3109 if (reachable_next_level (region, type_thrown, &info,
3110 inlinable_call || is_resx) >= RNL_CAUGHT)
3111 break;
3112 /* If we have processed one cleanup, there is no point in
3113 processing any more of them. Each cleanup will have an edge
3114 to the next outer cleanup region, so the flow graph will be
3115 accurate. */
3116 if (region->type == ERT_CLEANUP)
3118 enum reachable_code code = RNL_NOT_CAUGHT;
3119 region = find_prev_try (region->outer);
3120 /* Continue looking for outer TRY region until we find one
3121 that might cath something. */
3122 while (region
3123 && (code = reachable_next_level (region, type_thrown, &info,
3124 inlinable_call || is_resx))
3125 == RNL_NOT_CAUGHT)
3126 region = find_prev_try (region->outer);
3127 if (code >= RNL_CAUGHT)
3128 break;
3130 if (region)
3131 region = region->outer;
3135 /* Retrieve a list of labels of exception handlers which can be
3136 reached by a given insn. */
3138 static void
3139 arh_to_landing_pad (struct eh_region_d *region, void *data)
3141 rtx *p_handlers = (rtx *) data;
3142 if (! *p_handlers)
3143 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
3146 static void
3147 arh_to_label (struct eh_region_d *region, void *data)
3149 rtx *p_handlers = (rtx *) data;
3150 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
3154 reachable_handlers (rtx insn)
3156 bool is_resx = false;
3157 rtx handlers = NULL;
3158 int region_number;
3160 if (JUMP_P (insn)
3161 && GET_CODE (PATTERN (insn)) == RESX)
3163 region_number = XINT (PATTERN (insn), 0);
3164 is_resx = true;
3166 else
3168 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3169 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3170 return NULL;
3171 region_number = INTVAL (XEXP (note, 0));
3174 foreach_reachable_handler (region_number, is_resx, false,
3175 (crtl->eh.built_landing_pads
3176 ? arh_to_landing_pad
3177 : arh_to_label),
3178 &handlers);
3180 return handlers;
3183 /* Determine if the given INSN can throw an exception that is caught
3184 within the function. */
3186 bool
3187 can_throw_internal_1 (int region_number, bool is_resx, bool inlinable_call)
3189 struct eh_region_d *region;
3190 tree type_thrown;
3192 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3193 if (!region)
3194 return false;
3196 type_thrown = NULL_TREE;
3197 if (is_resx)
3198 region = region->outer;
3199 else if (region->type == ERT_THROW)
3201 type_thrown = region->u.eh_throw.type;
3202 region = region->outer;
3205 /* If this exception is ignored by each and every containing region,
3206 then control passes straight out. The runtime may handle some
3207 regions, which also do not require processing internally. */
3208 for (; region; region = region->outer)
3210 enum reachable_code how = reachable_next_level (region, type_thrown, 0,
3211 inlinable_call || is_resx);
3212 if (how == RNL_BLOCKED)
3213 return false;
3214 if (how != RNL_NOT_CAUGHT)
3215 return true;
3218 return false;
3221 bool
3222 can_throw_internal (const_rtx insn)
3224 rtx note;
3226 if (! INSN_P (insn))
3227 return false;
3229 if (JUMP_P (insn)
3230 && GET_CODE (PATTERN (insn)) == RESX
3231 && XINT (PATTERN (insn), 0) > 0)
3232 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true, false);
3234 if (NONJUMP_INSN_P (insn)
3235 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3236 insn = XVECEXP (PATTERN (insn), 0, 0);
3238 /* Every insn that might throw has an EH_REGION note. */
3239 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3240 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3241 return false;
3243 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false, false);
3246 /* Determine if the given INSN can throw an exception that is
3247 visible outside the function. */
3249 bool
3250 can_throw_external_1 (int region_number, bool is_resx, bool inlinable_call)
3252 struct eh_region_d *region;
3253 tree type_thrown;
3255 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3256 if (!region)
3257 return true;
3259 type_thrown = NULL_TREE;
3260 if (is_resx)
3261 region = region->outer;
3262 else if (region->type == ERT_THROW)
3264 type_thrown = region->u.eh_throw.type;
3265 region = region->outer;
3268 /* If the exception is caught or blocked by any containing region,
3269 then it is not seen by any calling function. */
3270 for (; region ; region = region->outer)
3271 if (reachable_next_level (region, type_thrown, NULL,
3272 inlinable_call || is_resx) >= RNL_CAUGHT)
3273 return false;
3275 return true;
3278 bool
3279 can_throw_external (const_rtx insn)
3281 rtx note;
3283 if (! INSN_P (insn))
3284 return false;
3286 if (JUMP_P (insn)
3287 && GET_CODE (PATTERN (insn)) == RESX
3288 && XINT (PATTERN (insn), 0) > 0)
3289 return can_throw_external_1 (XINT (PATTERN (insn), 0), true, false);
3291 if (NONJUMP_INSN_P (insn)
3292 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3294 rtx seq = PATTERN (insn);
3295 int i, n = XVECLEN (seq, 0);
3297 for (i = 0; i < n; i++)
3298 if (can_throw_external (XVECEXP (seq, 0, i)))
3299 return true;
3301 return false;
3304 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3305 if (!note)
3307 /* Calls (and trapping insns) without notes are outside any
3308 exception handling region in this function. We have to
3309 assume it might throw. Given that the front end and middle
3310 ends mark known NOTHROW functions, this isn't so wildly
3311 inaccurate. */
3312 return (CALL_P (insn)
3313 || (flag_non_call_exceptions
3314 && may_trap_p (PATTERN (insn))));
3316 if (INTVAL (XEXP (note, 0)) <= 0)
3317 return false;
3319 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false, false);
3322 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
3324 unsigned int
3325 set_nothrow_function_flags (void)
3327 rtx insn;
3329 crtl->nothrow = 1;
3331 /* Assume crtl->all_throwers_are_sibcalls until we encounter
3332 something that can throw an exception. We specifically exempt
3333 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
3334 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
3335 is optimistic. */
3337 crtl->all_throwers_are_sibcalls = 1;
3339 /* If we don't know that this implementation of the function will
3340 actually be used, then we must not set TREE_NOTHROW, since
3341 callers must not assume that this function does not throw. */
3342 if (TREE_NOTHROW (current_function_decl))
3343 return 0;
3345 if (! flag_exceptions)
3346 return 0;
3348 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3349 if (can_throw_external (insn))
3351 crtl->nothrow = 0;
3353 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3355 crtl->all_throwers_are_sibcalls = 0;
3356 return 0;
3360 for (insn = crtl->epilogue_delay_list; insn;
3361 insn = XEXP (insn, 1))
3362 if (can_throw_external (insn))
3364 crtl->nothrow = 0;
3366 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3368 crtl->all_throwers_are_sibcalls = 0;
3369 return 0;
3372 if (crtl->nothrow
3373 && (cgraph_function_body_availability (cgraph_node
3374 (current_function_decl))
3375 >= AVAIL_AVAILABLE))
3377 struct cgraph_node *node = cgraph_node (current_function_decl);
3378 struct cgraph_edge *e;
3379 for (e = node->callers; e; e = e->next_caller)
3380 e->can_throw_external = false;
3381 TREE_NOTHROW (current_function_decl) = 1;
3383 if (dump_file)
3384 fprintf (dump_file, "Marking function nothrow: %s\n\n",
3385 current_function_name ());
3387 return 0;
3390 struct rtl_opt_pass pass_set_nothrow_function_flags =
3393 RTL_PASS,
3394 "nothrow", /* name */
3395 NULL, /* gate */
3396 set_nothrow_function_flags, /* execute */
3397 NULL, /* sub */
3398 NULL, /* next */
3399 0, /* static_pass_number */
3400 TV_NONE, /* tv_id */
3401 0, /* properties_required */
3402 0, /* properties_provided */
3403 0, /* properties_destroyed */
3404 0, /* todo_flags_start */
3405 TODO_dump_func, /* todo_flags_finish */
3410 /* Various hooks for unwind library. */
3412 /* Do any necessary initialization to access arbitrary stack frames.
3413 On the SPARC, this means flushing the register windows. */
3415 void
3416 expand_builtin_unwind_init (void)
3418 /* Set this so all the registers get saved in our frame; we need to be
3419 able to copy the saved values for any registers from frames we unwind. */
3420 crtl->saves_all_registers = 1;
3422 #ifdef SETUP_FRAME_ADDRESSES
3423 SETUP_FRAME_ADDRESSES ();
3424 #endif
3428 expand_builtin_eh_return_data_regno (tree exp)
3430 tree which = CALL_EXPR_ARG (exp, 0);
3431 unsigned HOST_WIDE_INT iwhich;
3433 if (TREE_CODE (which) != INTEGER_CST)
3435 error ("argument of %<__builtin_eh_return_regno%> must be constant");
3436 return constm1_rtx;
3439 iwhich = tree_low_cst (which, 1);
3440 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3441 if (iwhich == INVALID_REGNUM)
3442 return constm1_rtx;
3444 #ifdef DWARF_FRAME_REGNUM
3445 iwhich = DWARF_FRAME_REGNUM (iwhich);
3446 #else
3447 iwhich = DBX_REGISTER_NUMBER (iwhich);
3448 #endif
3450 return GEN_INT (iwhich);
3453 /* Given a value extracted from the return address register or stack slot,
3454 return the actual address encoded in that value. */
3457 expand_builtin_extract_return_addr (tree addr_tree)
3459 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3461 if (GET_MODE (addr) != Pmode
3462 && GET_MODE (addr) != VOIDmode)
3464 #ifdef POINTERS_EXTEND_UNSIGNED
3465 addr = convert_memory_address (Pmode, addr);
3466 #else
3467 addr = convert_to_mode (Pmode, addr, 0);
3468 #endif
3471 /* First mask out any unwanted bits. */
3472 #ifdef MASK_RETURN_ADDR
3473 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3474 #endif
3476 /* Then adjust to find the real return address. */
3477 #if defined (RETURN_ADDR_OFFSET)
3478 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3479 #endif
3481 return addr;
3484 /* Given an actual address in addr_tree, do any necessary encoding
3485 and return the value to be stored in the return address register or
3486 stack slot so the epilogue will return to that address. */
3489 expand_builtin_frob_return_addr (tree addr_tree)
3491 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3493 addr = convert_memory_address (Pmode, addr);
3495 #ifdef RETURN_ADDR_OFFSET
3496 addr = force_reg (Pmode, addr);
3497 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3498 #endif
3500 return addr;
3503 /* Set up the epilogue with the magic bits we'll need to return to the
3504 exception handler. */
3506 void
3507 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3508 tree handler_tree)
3510 rtx tmp;
3512 #ifdef EH_RETURN_STACKADJ_RTX
3513 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
3514 VOIDmode, EXPAND_NORMAL);
3515 tmp = convert_memory_address (Pmode, tmp);
3516 if (!crtl->eh.ehr_stackadj)
3517 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
3518 else if (tmp != crtl->eh.ehr_stackadj)
3519 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
3520 #endif
3522 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
3523 VOIDmode, EXPAND_NORMAL);
3524 tmp = convert_memory_address (Pmode, tmp);
3525 if (!crtl->eh.ehr_handler)
3526 crtl->eh.ehr_handler = copy_to_reg (tmp);
3527 else if (tmp != crtl->eh.ehr_handler)
3528 emit_move_insn (crtl->eh.ehr_handler, tmp);
3530 if (!crtl->eh.ehr_label)
3531 crtl->eh.ehr_label = gen_label_rtx ();
3532 emit_jump (crtl->eh.ehr_label);
3535 void
3536 expand_eh_return (void)
3538 rtx around_label;
3540 if (! crtl->eh.ehr_label)
3541 return;
3543 crtl->calls_eh_return = 1;
3545 #ifdef EH_RETURN_STACKADJ_RTX
3546 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3547 #endif
3549 around_label = gen_label_rtx ();
3550 emit_jump (around_label);
3552 emit_label (crtl->eh.ehr_label);
3553 clobber_return_register ();
3555 #ifdef EH_RETURN_STACKADJ_RTX
3556 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
3557 #endif
3559 #ifdef HAVE_eh_return
3560 if (HAVE_eh_return)
3561 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
3562 else
3563 #endif
3565 #ifdef EH_RETURN_HANDLER_RTX
3566 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
3567 #else
3568 error ("__builtin_eh_return not supported on this target");
3569 #endif
3572 emit_label (around_label);
3575 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3576 POINTERS_EXTEND_UNSIGNED and return it. */
3579 expand_builtin_extend_pointer (tree addr_tree)
3581 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3582 int extend;
3584 #ifdef POINTERS_EXTEND_UNSIGNED
3585 extend = POINTERS_EXTEND_UNSIGNED;
3586 #else
3587 /* The previous EH code did an unsigned extend by default, so we do this also
3588 for consistency. */
3589 extend = 1;
3590 #endif
3592 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
3595 /* In the following functions, we represent entries in the action table
3596 as 1-based indices. Special cases are:
3598 0: null action record, non-null landing pad; implies cleanups
3599 -1: null action record, null landing pad; implies no action
3600 -2: no call-site entry; implies must_not_throw
3601 -3: we have yet to process outer regions
3603 Further, no special cases apply to the "next" field of the record.
3604 For next, 0 means end of list. */
3606 struct action_record
3608 int offset;
3609 int filter;
3610 int next;
3613 static int
3614 action_record_eq (const void *pentry, const void *pdata)
3616 const struct action_record *entry = (const struct action_record *) pentry;
3617 const struct action_record *data = (const struct action_record *) pdata;
3618 return entry->filter == data->filter && entry->next == data->next;
3621 static hashval_t
3622 action_record_hash (const void *pentry)
3624 const struct action_record *entry = (const struct action_record *) pentry;
3625 return entry->next * 1009 + entry->filter;
3628 static int
3629 add_action_record (htab_t ar_hash, int filter, int next)
3631 struct action_record **slot, *new_ar, tmp;
3633 tmp.filter = filter;
3634 tmp.next = next;
3635 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3637 if ((new_ar = *slot) == NULL)
3639 new_ar = XNEW (struct action_record);
3640 new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3641 new_ar->filter = filter;
3642 new_ar->next = next;
3643 *slot = new_ar;
3645 /* The filter value goes in untouched. The link to the next
3646 record is a "self-relative" byte offset, or zero to indicate
3647 that there is no next record. So convert the absolute 1 based
3648 indices we've been carrying around into a displacement. */
3650 push_sleb128 (&crtl->eh.action_record_data, filter);
3651 if (next)
3652 next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3653 push_sleb128 (&crtl->eh.action_record_data, next);
3656 return new_ar->offset;
3659 static int
3660 collect_one_action_chain (htab_t ar_hash, struct eh_region_d *region)
3662 struct eh_region_d *c;
3663 int next;
3665 /* If we've reached the top of the region chain, then we have
3666 no actions, and require no landing pad. */
3667 if (region == NULL)
3668 return -1;
3670 switch (region->type)
3672 case ERT_CLEANUP:
3673 /* A cleanup adds a zero filter to the beginning of the chain, but
3674 there are special cases to look out for. If there are *only*
3675 cleanups along a path, then it compresses to a zero action.
3676 Further, if there are multiple cleanups along a path, we only
3677 need to represent one of them, as that is enough to trigger
3678 entry to the landing pad at runtime. */
3679 next = collect_one_action_chain (ar_hash, region->outer);
3680 if (next <= 0)
3681 return 0;
3682 for (c = region->outer; c ; c = c->outer)
3683 if (c->type == ERT_CLEANUP)
3684 return next;
3685 return add_action_record (ar_hash, 0, next);
3687 case ERT_TRY:
3688 /* Process the associated catch regions in reverse order.
3689 If there's a catch-all handler, then we don't need to
3690 search outer regions. Use a magic -3 value to record
3691 that we haven't done the outer search. */
3692 next = -3;
3693 for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)
3695 if (c->u.eh_catch.type_list == NULL)
3697 /* Retrieve the filter from the head of the filter list
3698 where we have stored it (see assign_filter_values). */
3699 int filter
3700 = TREE_INT_CST_LOW (TREE_VALUE (c->u.eh_catch.filter_list));
3702 next = add_action_record (ar_hash, filter, 0);
3704 else
3706 /* Once the outer search is done, trigger an action record for
3707 each filter we have. */
3708 tree flt_node;
3710 if (next == -3)
3712 next = collect_one_action_chain (ar_hash, region->outer);
3714 /* If there is no next action, terminate the chain. */
3715 if (next == -1)
3716 next = 0;
3717 /* If all outer actions are cleanups or must_not_throw,
3718 we'll have no action record for it, since we had wanted
3719 to encode these states in the call-site record directly.
3720 Add a cleanup action to the chain to catch these. */
3721 else if (next <= 0)
3722 next = add_action_record (ar_hash, 0, 0);
3725 flt_node = c->u.eh_catch.filter_list;
3726 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3728 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3729 next = add_action_record (ar_hash, filter, next);
3733 return next;
3735 case ERT_ALLOWED_EXCEPTIONS:
3736 /* An exception specification adds its filter to the
3737 beginning of the chain. */
3738 next = collect_one_action_chain (ar_hash, region->outer);
3740 /* If there is no next action, terminate the chain. */
3741 if (next == -1)
3742 next = 0;
3743 /* If all outer actions are cleanups or must_not_throw,
3744 we'll have no action record for it, since we had wanted
3745 to encode these states in the call-site record directly.
3746 Add a cleanup action to the chain to catch these. */
3747 else if (next <= 0)
3748 next = add_action_record (ar_hash, 0, 0);
3750 return add_action_record (ar_hash, region->u.allowed.filter, next);
3752 case ERT_MUST_NOT_THROW:
3753 /* A must-not-throw region with no inner handlers or cleanups
3754 requires no call-site entry. Note that this differs from
3755 the no handler or cleanup case in that we do require an lsda
3756 to be generated. Return a magic -2 value to record this. */
3757 return -2;
3759 case ERT_CATCH:
3760 case ERT_THROW:
3761 /* CATCH regions are handled in TRY above. THROW regions are
3762 for optimization information only and produce no output. */
3763 return collect_one_action_chain (ar_hash, region->outer);
3765 default:
3766 gcc_unreachable ();
3770 static int
3771 add_call_site (rtx landing_pad, int action)
3773 call_site_record record;
3775 record = GGC_NEW (struct call_site_record_d);
3776 record->landing_pad = landing_pad;
3777 record->action = action;
3779 VEC_safe_push (call_site_record, gc, crtl->eh.call_site_record, record);
3781 return call_site_base + VEC_length (call_site_record, crtl->eh.call_site_record) - 1;
3784 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3785 The new note numbers will not refer to region numbers, but
3786 instead to call site entries. */
3788 unsigned int
3789 convert_to_eh_region_ranges (void)
3791 rtx insn, iter, note;
3792 htab_t ar_hash;
3793 int last_action = -3;
3794 rtx last_action_insn = NULL_RTX;
3795 rtx last_landing_pad = NULL_RTX;
3796 rtx first_no_action_insn = NULL_RTX;
3797 int call_site = 0;
3799 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3800 return 0;
3802 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
3804 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3806 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3807 if (INSN_P (iter))
3809 struct eh_region_d *region;
3810 int this_action;
3811 rtx this_landing_pad;
3813 insn = iter;
3814 if (NONJUMP_INSN_P (insn)
3815 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3816 insn = XVECEXP (PATTERN (insn), 0, 0);
3818 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3819 if (!note)
3821 if (! (CALL_P (insn)
3822 || (flag_non_call_exceptions
3823 && may_trap_p (PATTERN (insn)))))
3824 continue;
3825 this_action = -1;
3826 region = NULL;
3828 else
3830 if (INTVAL (XEXP (note, 0)) <= 0)
3831 continue;
3832 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3833 this_action = collect_one_action_chain (ar_hash, region);
3836 /* Existence of catch handlers, or must-not-throw regions
3837 implies that an lsda is needed (even if empty). */
3838 if (this_action != -1)
3839 crtl->uses_eh_lsda = 1;
3841 /* Delay creation of region notes for no-action regions
3842 until we're sure that an lsda will be required. */
3843 else if (last_action == -3)
3845 first_no_action_insn = iter;
3846 last_action = -1;
3849 /* Cleanups and handlers may share action chains but not
3850 landing pads. Collect the landing pad for this region. */
3851 if (this_action >= 0)
3853 struct eh_region_d *o;
3854 for (o = region; ! o->landing_pad ; o = o->outer)
3855 continue;
3856 this_landing_pad = o->landing_pad;
3858 else
3859 this_landing_pad = NULL_RTX;
3861 /* Differing actions or landing pads implies a change in call-site
3862 info, which implies some EH_REGION note should be emitted. */
3863 if (last_action != this_action
3864 || last_landing_pad != this_landing_pad)
3866 /* If we'd not seen a previous action (-3) or the previous
3867 action was must-not-throw (-2), then we do not need an
3868 end note. */
3869 if (last_action >= -1)
3871 /* If we delayed the creation of the begin, do it now. */
3872 if (first_no_action_insn)
3874 call_site = add_call_site (NULL_RTX, 0);
3875 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3876 first_no_action_insn);
3877 NOTE_EH_HANDLER (note) = call_site;
3878 first_no_action_insn = NULL_RTX;
3881 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3882 last_action_insn);
3883 NOTE_EH_HANDLER (note) = call_site;
3886 /* If the new action is must-not-throw, then no region notes
3887 are created. */
3888 if (this_action >= -1)
3890 call_site = add_call_site (this_landing_pad,
3891 this_action < 0 ? 0 : this_action);
3892 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3893 NOTE_EH_HANDLER (note) = call_site;
3896 last_action = this_action;
3897 last_landing_pad = this_landing_pad;
3899 last_action_insn = iter;
3902 if (last_action >= -1 && ! first_no_action_insn)
3904 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3905 NOTE_EH_HANDLER (note) = call_site;
3908 htab_delete (ar_hash);
3909 return 0;
3912 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
3915 RTL_PASS,
3916 "eh_ranges", /* name */
3917 NULL, /* gate */
3918 convert_to_eh_region_ranges, /* execute */
3919 NULL, /* sub */
3920 NULL, /* next */
3921 0, /* static_pass_number */
3922 TV_NONE, /* tv_id */
3923 0, /* properties_required */
3924 0, /* properties_provided */
3925 0, /* properties_destroyed */
3926 0, /* todo_flags_start */
3927 TODO_dump_func, /* todo_flags_finish */
3932 static void
3933 push_uleb128 (varray_type *data_area, unsigned int value)
3937 unsigned char byte = value & 0x7f;
3938 value >>= 7;
3939 if (value)
3940 byte |= 0x80;
3941 VARRAY_PUSH_UCHAR (*data_area, byte);
3943 while (value);
3946 static void
3947 push_sleb128 (varray_type *data_area, int value)
3949 unsigned char byte;
3950 int more;
3954 byte = value & 0x7f;
3955 value >>= 7;
3956 more = ! ((value == 0 && (byte & 0x40) == 0)
3957 || (value == -1 && (byte & 0x40) != 0));
3958 if (more)
3959 byte |= 0x80;
3960 VARRAY_PUSH_UCHAR (*data_area, byte);
3962 while (more);
3966 #ifndef HAVE_AS_LEB128
3967 static int
3968 dw2_size_of_call_site_table (void)
3970 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3971 int size = n * (4 + 4 + 4);
3972 int i;
3974 for (i = 0; i < n; ++i)
3976 struct call_site_record_d *cs =
3977 VEC_index (call_site_record, crtl->eh.call_site_record, i);
3978 size += size_of_uleb128 (cs->action);
3981 return size;
3984 static int
3985 sjlj_size_of_call_site_table (void)
3987 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3988 int size = 0;
3989 int i;
3991 for (i = 0; i < n; ++i)
3993 struct call_site_record_d *cs =
3994 VEC_index (call_site_record, crtl->eh.call_site_record, i);
3995 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3996 size += size_of_uleb128 (cs->action);
3999 return size;
4001 #endif
4003 static void
4004 dw2_output_call_site_table (void)
4006 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
4007 int i;
4009 for (i = 0; i < n; ++i)
4011 struct call_site_record_d *cs =
4012 VEC_index (call_site_record, crtl->eh.call_site_record, i);
4013 char reg_start_lab[32];
4014 char reg_end_lab[32];
4015 char landing_pad_lab[32];
4017 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
4018 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
4020 if (cs->landing_pad)
4021 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
4022 CODE_LABEL_NUMBER (cs->landing_pad));
4024 /* ??? Perhaps use insn length scaling if the assembler supports
4025 generic arithmetic. */
4026 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
4027 data4 if the function is small enough. */
4028 #ifdef HAVE_AS_LEB128
4029 dw2_asm_output_delta_uleb128 (reg_start_lab,
4030 current_function_func_begin_label,
4031 "region %d start", i);
4032 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
4033 "length");
4034 if (cs->landing_pad)
4035 dw2_asm_output_delta_uleb128 (landing_pad_lab,
4036 current_function_func_begin_label,
4037 "landing pad");
4038 else
4039 dw2_asm_output_data_uleb128 (0, "landing pad");
4040 #else
4041 dw2_asm_output_delta (4, reg_start_lab,
4042 current_function_func_begin_label,
4043 "region %d start", i);
4044 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
4045 if (cs->landing_pad)
4046 dw2_asm_output_delta (4, landing_pad_lab,
4047 current_function_func_begin_label,
4048 "landing pad");
4049 else
4050 dw2_asm_output_data (4, 0, "landing pad");
4051 #endif
4052 dw2_asm_output_data_uleb128 (cs->action, "action");
4055 call_site_base += n;
4058 static void
4059 sjlj_output_call_site_table (void)
4061 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
4062 int i;
4064 for (i = 0; i < n; ++i)
4066 struct call_site_record_d *cs =
4067 VEC_index (call_site_record, crtl->eh.call_site_record, i);
4069 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
4070 "region %d landing pad", i);
4071 dw2_asm_output_data_uleb128 (cs->action, "action");
4074 call_site_base += n;
4077 #ifndef TARGET_UNWIND_INFO
4078 /* Switch to the section that should be used for exception tables. */
4080 static void
4081 switch_to_exception_section (const char * ARG_UNUSED (fnname))
4083 section *s;
4085 if (exception_section)
4086 s = exception_section;
4087 else
4089 /* Compute the section and cache it into exception_section,
4090 unless it depends on the function name. */
4091 if (targetm.have_named_sections)
4093 int flags;
4095 if (EH_TABLES_CAN_BE_READ_ONLY)
4097 int tt_format =
4098 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
4099 flags = ((! flag_pic
4100 || ((tt_format & 0x70) != DW_EH_PE_absptr
4101 && (tt_format & 0x70) != DW_EH_PE_aligned))
4102 ? 0 : SECTION_WRITE);
4104 else
4105 flags = SECTION_WRITE;
4107 #ifdef HAVE_LD_EH_GC_SECTIONS
4108 if (flag_function_sections)
4110 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
4111 sprintf (section_name, ".gcc_except_table.%s", fnname);
4112 s = get_section (section_name, flags, NULL);
4113 free (section_name);
4115 else
4116 #endif
4117 exception_section
4118 = s = get_section (".gcc_except_table", flags, NULL);
4120 else
4121 exception_section
4122 = s = flag_pic ? data_section : readonly_data_section;
4125 switch_to_section (s);
4127 #endif
4130 /* Output a reference from an exception table to the type_info object TYPE.
4131 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
4132 the value. */
4134 static void
4135 output_ttype (tree type, int tt_format, int tt_format_size)
4137 rtx value;
4138 bool is_public = true;
4140 if (type == NULL_TREE)
4141 value = const0_rtx;
4142 else
4144 struct varpool_node *node;
4146 type = lookup_type_for_runtime (type);
4147 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
4149 /* Let cgraph know that the rtti decl is used. Not all of the
4150 paths below go through assemble_integer, which would take
4151 care of this for us. */
4152 STRIP_NOPS (type);
4153 if (TREE_CODE (type) == ADDR_EXPR)
4155 type = TREE_OPERAND (type, 0);
4156 if (TREE_CODE (type) == VAR_DECL)
4158 node = varpool_node (type);
4159 if (node)
4160 varpool_mark_needed_node (node);
4161 is_public = TREE_PUBLIC (type);
4164 else
4165 gcc_assert (TREE_CODE (type) == INTEGER_CST);
4168 /* Allow the target to override the type table entry format. */
4169 if (targetm.asm_out.ttype (value))
4170 return;
4172 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
4173 assemble_integer (value, tt_format_size,
4174 tt_format_size * BITS_PER_UNIT, 1);
4175 else
4176 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
4179 void
4180 output_function_exception_table (const char * ARG_UNUSED (fnname))
4182 int tt_format, cs_format, lp_format, i, n;
4183 #ifdef HAVE_AS_LEB128
4184 char ttype_label[32];
4185 char cs_after_size_label[32];
4186 char cs_end_label[32];
4187 #else
4188 int call_site_len;
4189 #endif
4190 int have_tt_data;
4191 int tt_format_size = 0;
4193 /* Not all functions need anything. */
4194 if (! crtl->uses_eh_lsda)
4195 return;
4197 if (eh_personality_libfunc)
4198 assemble_external_libcall (eh_personality_libfunc);
4200 #ifdef TARGET_UNWIND_INFO
4201 /* TODO: Move this into target file. */
4202 fputs ("\t.personality\t", asm_out_file);
4203 output_addr_const (asm_out_file, eh_personality_libfunc);
4204 fputs ("\n\t.handlerdata\n", asm_out_file);
4205 /* Note that varasm still thinks we're in the function's code section.
4206 The ".endp" directive that will immediately follow will take us back. */
4207 #else
4208 switch_to_exception_section (fnname);
4209 #endif
4211 /* If the target wants a label to begin the table, emit it here. */
4212 targetm.asm_out.except_table_label (asm_out_file);
4214 have_tt_data = (VEC_length (tree, crtl->eh.ttype_data) > 0
4215 || VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) > 0);
4217 /* Indicate the format of the @TType entries. */
4218 if (! have_tt_data)
4219 tt_format = DW_EH_PE_omit;
4220 else
4222 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
4223 #ifdef HAVE_AS_LEB128
4224 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
4225 current_function_funcdef_no);
4226 #endif
4227 tt_format_size = size_of_encoded_value (tt_format);
4229 assemble_align (tt_format_size * BITS_PER_UNIT);
4232 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
4233 current_function_funcdef_no);
4235 /* The LSDA header. */
4237 /* Indicate the format of the landing pad start pointer. An omitted
4238 field implies @LPStart == @Start. */
4239 /* Currently we always put @LPStart == @Start. This field would
4240 be most useful in moving the landing pads completely out of
4241 line to another section, but it could also be used to minimize
4242 the size of uleb128 landing pad offsets. */
4243 lp_format = DW_EH_PE_omit;
4244 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
4245 eh_data_format_name (lp_format));
4247 /* @LPStart pointer would go here. */
4249 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
4250 eh_data_format_name (tt_format));
4252 #ifndef HAVE_AS_LEB128
4253 if (USING_SJLJ_EXCEPTIONS)
4254 call_site_len = sjlj_size_of_call_site_table ();
4255 else
4256 call_site_len = dw2_size_of_call_site_table ();
4257 #endif
4259 /* A pc-relative 4-byte displacement to the @TType data. */
4260 if (have_tt_data)
4262 #ifdef HAVE_AS_LEB128
4263 char ttype_after_disp_label[32];
4264 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
4265 current_function_funcdef_no);
4266 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
4267 "@TType base offset");
4268 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
4269 #else
4270 /* Ug. Alignment queers things. */
4271 unsigned int before_disp, after_disp, last_disp, disp;
4273 before_disp = 1 + 1;
4274 after_disp = (1 + size_of_uleb128 (call_site_len)
4275 + call_site_len
4276 + VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data)
4277 + (VEC_length (tree, crtl->eh.ttype_data)
4278 * tt_format_size));
4280 disp = after_disp;
4283 unsigned int disp_size, pad;
4285 last_disp = disp;
4286 disp_size = size_of_uleb128 (disp);
4287 pad = before_disp + disp_size + after_disp;
4288 if (pad % tt_format_size)
4289 pad = tt_format_size - (pad % tt_format_size);
4290 else
4291 pad = 0;
4292 disp = after_disp + pad;
4294 while (disp != last_disp);
4296 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
4297 #endif
4300 /* Indicate the format of the call-site offsets. */
4301 #ifdef HAVE_AS_LEB128
4302 cs_format = DW_EH_PE_uleb128;
4303 #else
4304 cs_format = DW_EH_PE_udata4;
4305 #endif
4306 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
4307 eh_data_format_name (cs_format));
4309 #ifdef HAVE_AS_LEB128
4310 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
4311 current_function_funcdef_no);
4312 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
4313 current_function_funcdef_no);
4314 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
4315 "Call-site table length");
4316 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
4317 if (USING_SJLJ_EXCEPTIONS)
4318 sjlj_output_call_site_table ();
4319 else
4320 dw2_output_call_site_table ();
4321 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
4322 #else
4323 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
4324 if (USING_SJLJ_EXCEPTIONS)
4325 sjlj_output_call_site_table ();
4326 else
4327 dw2_output_call_site_table ();
4328 #endif
4330 /* ??? Decode and interpret the data for flag_debug_asm. */
4331 n = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data);
4332 for (i = 0; i < n; ++i)
4333 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.action_record_data, i),
4334 (i ? NULL : "Action record table"));
4336 if (have_tt_data)
4337 assemble_align (tt_format_size * BITS_PER_UNIT);
4339 i = VEC_length (tree, crtl->eh.ttype_data);
4340 while (i-- > 0)
4342 tree type = VEC_index (tree, crtl->eh.ttype_data, i);
4343 output_ttype (type, tt_format, tt_format_size);
4346 #ifdef HAVE_AS_LEB128
4347 if (have_tt_data)
4348 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
4349 #endif
4351 /* ??? Decode and interpret the data for flag_debug_asm. */
4352 n = VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data);
4353 for (i = 0; i < n; ++i)
4355 if (targetm.arm_eabi_unwinder)
4357 tree type = VARRAY_TREE (crtl->eh.ehspec_data, i);
4358 output_ttype (type, tt_format, tt_format_size);
4360 else
4361 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.ehspec_data, i),
4362 (i ? NULL : "Exception specification table"));
4365 switch_to_section (current_function_section ());
4368 void
4369 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
4371 fun->eh->throw_stmt_table = table;
4374 htab_t
4375 get_eh_throw_stmt_table (struct function *fun)
4377 return fun->eh->throw_stmt_table;
4380 /* Dump EH information to OUT. */
4382 void
4383 dump_eh_tree (FILE * out, struct function *fun)
4385 struct eh_region_d *i;
4386 int depth = 0;
4387 static const char *const type_name[] = { "unknown", "cleanup", "try", "catch",
4388 "allowed_exceptions", "must_not_throw",
4389 "throw"
4392 i = fun->eh->region_tree;
4393 if (!i)
4394 return;
4396 fprintf (out, "Eh tree:\n");
4397 while (1)
4399 fprintf (out, " %*s %i %s", depth * 2, "",
4400 i->region_number, type_name[(int) i->type]);
4401 if (i->tree_label)
4403 fprintf (out, " tree_label:");
4404 print_generic_expr (out, i->tree_label, 0);
4406 if (i->label)
4407 fprintf (out, " label:%i", INSN_UID (i->label));
4408 if (i->landing_pad)
4410 fprintf (out, " landing_pad:%i", INSN_UID (i->landing_pad));
4411 if (NOTE_P (i->landing_pad))
4412 fprintf (out, " (deleted)");
4414 if (i->post_landing_pad)
4416 fprintf (out, " post_landing_pad:%i", INSN_UID (i->post_landing_pad));
4417 if (NOTE_P (i->post_landing_pad))
4418 fprintf (out, " (deleted)");
4420 if (i->resume)
4422 fprintf (out, " resume:%i", INSN_UID (i->resume));
4423 if (NOTE_P (i->resume))
4424 fprintf (out, " (deleted)");
4426 if (i->may_contain_throw)
4427 fprintf (out, " may_contain_throw");
4428 switch (i->type)
4430 case ERT_CLEANUP:
4431 break;
4433 case ERT_TRY:
4435 struct eh_region_d *c;
4436 fprintf (out, " catch regions:");
4437 for (c = i->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4438 fprintf (out, " %i", c->region_number);
4440 break;
4442 case ERT_CATCH:
4443 if (i->u.eh_catch.prev_catch)
4444 fprintf (out, " prev: %i",
4445 i->u.eh_catch.prev_catch->region_number);
4446 if (i->u.eh_catch.next_catch)
4447 fprintf (out, " next %i",
4448 i->u.eh_catch.next_catch->region_number);
4449 fprintf (out, " type:");
4450 print_generic_expr (out, i->u.eh_catch.type_list, 0);
4451 break;
4453 case ERT_ALLOWED_EXCEPTIONS:
4454 fprintf (out, " filter :%i types:", i->u.allowed.filter);
4455 print_generic_expr (out, i->u.allowed.type_list, 0);
4456 break;
4458 case ERT_THROW:
4459 fprintf (out, " type:");
4460 print_generic_expr (out, i->u.eh_throw.type, 0);
4461 break;
4463 case ERT_MUST_NOT_THROW:
4464 break;
4466 case ERT_UNKNOWN:
4467 break;
4469 if (i->aka)
4471 fprintf (out, " also known as:");
4472 dump_bitmap (out, i->aka);
4474 else
4475 fprintf (out, "\n");
4476 /* If there are sub-regions, process them. */
4477 if (i->inner)
4478 i = i->inner, depth++;
4479 /* If there are peers, process them. */
4480 else if (i->next_peer)
4481 i = i->next_peer;
4482 /* Otherwise, step back up the tree to the next peer. */
4483 else
4487 i = i->outer;
4488 depth--;
4489 if (i == NULL)
4490 return;
4492 while (i->next_peer == NULL);
4493 i = i->next_peer;
4498 /* Dump the EH tree for FN on stderr. */
4500 void
4501 debug_eh_tree (struct function *fn)
4503 dump_eh_tree (stderr, fn);
4507 /* Verify EH region invariants. */
4509 static bool
4510 verify_eh_region (struct eh_region_d *region)
4512 bool found = false;
4513 if (!region)
4514 return false;
4515 switch (region->type)
4517 case ERT_TRY:
4519 struct eh_region_d *c, *prev = NULL;
4520 if (region->u.eh_try.eh_catch->u.eh_catch.prev_catch)
4522 error ("Try region %i has wrong rh_catch pointer to %i",
4523 region->region_number,
4524 region->u.eh_try.eh_catch->region_number);
4525 found = true;
4527 for (c = region->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4529 if (c->outer != region->outer)
4531 error
4532 ("Catch region %i has different outer region than try region %i",
4533 c->region_number, region->region_number);
4534 found = true;
4536 if (c->u.eh_catch.prev_catch != prev)
4538 error ("Catch region %i has corrupted catchlist",
4539 c->region_number);
4540 found = true;
4542 prev = c;
4544 if (prev != region->u.eh_try.last_catch)
4546 error
4547 ("Try region %i has wrong last_catch pointer to %i instead of %i",
4548 region->region_number,
4549 region->u.eh_try.last_catch->region_number,
4550 prev->region_number);
4551 found = true;
4554 break;
4555 case ERT_CATCH:
4556 if (!region->u.eh_catch.prev_catch
4557 && (!region->next_peer || region->next_peer->type != ERT_TRY))
4559 error ("Catch region %i should be followed by try", region->region_number);
4560 found = true;
4562 break;
4563 case ERT_CLEANUP:
4564 case ERT_ALLOWED_EXCEPTIONS:
4565 case ERT_MUST_NOT_THROW:
4566 case ERT_THROW:
4567 break;
4568 case ERT_UNKNOWN:
4569 gcc_unreachable ();
4571 for (region = region->inner; region; region = region->next_peer)
4572 found |= verify_eh_region (region);
4573 return found;
4576 /* Verify invariants on EH datastructures. */
4578 void
4579 verify_eh_tree (struct function *fun)
4581 struct eh_region_d *i, *outer = NULL;
4582 bool err = false;
4583 int nvisited = 0;
4584 int count = 0;
4585 int j;
4586 int depth = 0;
4588 if (!fun->eh->region_tree)
4589 return;
4590 for (j = fun->eh->last_region_number; j > 0; --j)
4591 if ((i = VEC_index (eh_region, fun->eh->region_array, j)))
4593 if (i->region_number == j)
4594 count++;
4595 if (i->region_number != j && (!i->aka || !bitmap_bit_p (i->aka, j)))
4597 error ("region_array is corrupted for region %i",
4598 i->region_number);
4599 err = true;
4602 i = fun->eh->region_tree;
4604 while (1)
4606 if (VEC_index (eh_region, fun->eh->region_array, i->region_number) != i)
4608 error ("region_array is corrupted for region %i", i->region_number);
4609 err = true;
4611 if (i->outer != outer)
4613 error ("outer block of region %i is wrong", i->region_number);
4614 err = true;
4616 if (i->may_contain_throw && outer && !outer->may_contain_throw)
4618 error
4619 ("region %i may contain throw and is contained in region that may not",
4620 i->region_number);
4621 err = true;
4623 if (depth < 0)
4625 error ("negative nesting depth of region %i", i->region_number);
4626 err = true;
4628 nvisited++;
4629 /* If there are sub-regions, process them. */
4630 if (i->inner)
4631 outer = i, i = i->inner, depth++;
4632 /* If there are peers, process them. */
4633 else if (i->next_peer)
4634 i = i->next_peer;
4635 /* Otherwise, step back up the tree to the next peer. */
4636 else
4640 i = i->outer;
4641 depth--;
4642 if (i == NULL)
4644 if (depth != -1)
4646 error ("tree list ends on depth %i", depth + 1);
4647 err = true;
4649 if (count != nvisited)
4651 error ("array does not match the region tree");
4652 err = true;
4654 if (!err)
4655 for (i = fun->eh->region_tree; i; i = i->next_peer)
4656 err |= verify_eh_region (i);
4658 if (err)
4660 dump_eh_tree (stderr, fun);
4661 internal_error ("verify_eh_tree failed");
4663 return;
4665 outer = i->outer;
4667 while (i->next_peer == NULL);
4668 i = i->next_peer;
4673 /* Initialize unwind_resume_libfunc. */
4675 void
4676 default_init_unwind_resume_libfunc (void)
4678 /* The default c++ routines aren't actually c++ specific, so use those. */
4679 unwind_resume_libfunc =
4680 init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
4681 : "_Unwind_Resume");
4685 static bool
4686 gate_handle_eh (void)
4688 return doing_eh (0);
4691 /* Complete generation of exception handling code. */
4692 static unsigned int
4693 rest_of_handle_eh (void)
4695 finish_eh_generation ();
4696 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4697 return 0;
4700 struct rtl_opt_pass pass_rtl_eh =
4703 RTL_PASS,
4704 "eh", /* name */
4705 gate_handle_eh, /* gate */
4706 rest_of_handle_eh, /* execute */
4707 NULL, /* sub */
4708 NULL, /* next */
4709 0, /* static_pass_number */
4710 TV_JUMP, /* tv_id */
4711 0, /* properties_required */
4712 0, /* properties_provided */
4713 0, /* properties_destroyed */
4714 0, /* todo_flags_start */
4715 TODO_dump_func /* todo_flags_finish */
4719 #include "gt-except.h"