Enable dumping of alias graphs.
[official-gcc/Ramakrishna.git] / gcc / except.c
blobc97928eadf17b55e61abb11bf534c98e632b2dea
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
79 #include "timevar.h"
80 #include "tree-flow.h"
82 /* Provide defaults for stuff that may not be defined when using
83 sjlj exceptions. */
84 #ifndef EH_RETURN_DATA_REGNO
85 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
86 #endif
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 gimple (*lang_protect_cleanup_actions) (void);
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers) (tree a, tree b);
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type) (tree);
98 /* A hash table of label to region number. */
100 struct GTY(()) ehl_map_entry {
101 rtx label;
102 struct eh_region_d *region;
105 static GTY(()) int call_site_base;
106 static GTY ((param_is (union tree_node)))
107 htab_t type_to_runtime_map;
109 /* Describe the SjLj_Function_Context structure. */
110 static GTY(()) tree sjlj_fc_type_node;
111 static int sjlj_fc_call_site_ofs;
112 static int sjlj_fc_data_ofs;
113 static int sjlj_fc_personality_ofs;
114 static int sjlj_fc_lsda_ofs;
115 static int sjlj_fc_jbuf_ofs;
118 struct GTY(()) call_site_record_d
120 rtx landing_pad;
121 int action;
124 static int t2r_eq (const void *, const void *);
125 static hashval_t t2r_hash (const void *);
127 static int ttypes_filter_eq (const void *, const void *);
128 static hashval_t ttypes_filter_hash (const void *);
129 static int ehspec_filter_eq (const void *, const void *);
130 static hashval_t ehspec_filter_hash (const void *);
131 static int add_ttypes_entry (htab_t, tree);
132 static int add_ehspec_entry (htab_t, htab_t, tree);
133 static void assign_filter_values (void);
134 static void build_post_landing_pads (void);
135 static void connect_post_landing_pads (void);
136 static void dw2_build_landing_pads (void);
138 struct sjlj_lp_info;
139 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
140 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
141 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
142 static void sjlj_emit_function_enter (rtx);
143 static void sjlj_emit_function_exit (void);
144 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
145 static void sjlj_build_landing_pads (void);
147 static void remove_eh_handler (struct eh_region_d *);
148 static void remove_eh_handler_and_replace (struct eh_region_d *,
149 struct eh_region_d *, bool);
151 /* The return value of reachable_next_level. */
152 enum reachable_code
154 /* The given exception is not processed by the given region. */
155 RNL_NOT_CAUGHT,
156 /* The given exception may need processing by the given region. */
157 RNL_MAYBE_CAUGHT,
158 /* The given exception is completely processed by the given region. */
159 RNL_CAUGHT,
160 /* The given exception is completely processed by the runtime. */
161 RNL_BLOCKED
164 struct reachable_info;
165 static enum reachable_code reachable_next_level (struct eh_region_d *, tree,
166 struct reachable_info *, bool);
168 static int action_record_eq (const void *, const void *);
169 static hashval_t action_record_hash (const void *);
170 static int add_action_record (htab_t, int, int);
171 static int collect_one_action_chain (htab_t, struct eh_region_d *);
172 static int add_call_site (rtx, int, int);
174 static void push_uleb128 (varray_type *, unsigned int);
175 static void push_sleb128 (varray_type *, int);
176 #ifndef HAVE_AS_LEB128
177 static int dw2_size_of_call_site_table (int);
178 static int sjlj_size_of_call_site_table (void);
179 #endif
180 static void dw2_output_call_site_table (int, int);
181 static void sjlj_output_call_site_table (void);
184 /* Routine to see if exception handling is turned on.
185 DO_WARN is nonzero if we want to inform the user that exception
186 handling is turned off.
188 This is used to ensure that -fexceptions has been specified if the
189 compiler tries to use any exception-specific functions. */
192 doing_eh (int do_warn)
194 if (! flag_exceptions)
196 static int warned = 0;
197 if (! warned && do_warn)
199 error ("exception handling disabled, use -fexceptions to enable");
200 warned = 1;
202 return 0;
204 return 1;
208 void
209 init_eh (void)
211 if (! flag_exceptions)
212 return;
214 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
216 /* Create the SjLj_Function_Context structure. This should match
217 the definition in unwind-sjlj.c. */
218 if (USING_SJLJ_EXCEPTIONS)
220 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
222 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
224 f_prev = build_decl (BUILTINS_LOCATION,
225 FIELD_DECL, get_identifier ("__prev"),
226 build_pointer_type (sjlj_fc_type_node));
227 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
229 f_cs = build_decl (BUILTINS_LOCATION,
230 FIELD_DECL, get_identifier ("__call_site"),
231 integer_type_node);
232 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
234 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
235 tmp = build_array_type (lang_hooks.types.type_for_mode
236 (targetm.unwind_word_mode (), 1),
237 tmp);
238 f_data = build_decl (BUILTINS_LOCATION,
239 FIELD_DECL, get_identifier ("__data"), tmp);
240 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
242 f_per = build_decl (BUILTINS_LOCATION,
243 FIELD_DECL, get_identifier ("__personality"),
244 ptr_type_node);
245 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
247 f_lsda = build_decl (BUILTINS_LOCATION,
248 FIELD_DECL, get_identifier ("__lsda"),
249 ptr_type_node);
250 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
252 #ifdef DONT_USE_BUILTIN_SETJMP
253 #ifdef JMP_BUF_SIZE
254 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
255 #else
256 /* Should be large enough for most systems, if it is not,
257 JMP_BUF_SIZE should be defined with the proper value. It will
258 also tend to be larger than necessary for most systems, a more
259 optimal port will define JMP_BUF_SIZE. */
260 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
261 #endif
262 #else
263 /* builtin_setjmp takes a pointer to 5 words. */
264 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
265 #endif
266 tmp = build_index_type (tmp);
267 tmp = build_array_type (ptr_type_node, tmp);
268 f_jbuf = build_decl (BUILTINS_LOCATION,
269 FIELD_DECL, get_identifier ("__jbuf"), tmp);
270 #ifdef DONT_USE_BUILTIN_SETJMP
271 /* We don't know what the alignment requirements of the
272 runtime's jmp_buf has. Overestimate. */
273 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
274 DECL_USER_ALIGN (f_jbuf) = 1;
275 #endif
276 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
278 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
279 TREE_CHAIN (f_prev) = f_cs;
280 TREE_CHAIN (f_cs) = f_data;
281 TREE_CHAIN (f_data) = f_per;
282 TREE_CHAIN (f_per) = f_lsda;
283 TREE_CHAIN (f_lsda) = f_jbuf;
285 layout_type (sjlj_fc_type_node);
287 /* Cache the interesting field offsets so that we have
288 easy access from rtl. */
289 sjlj_fc_call_site_ofs
290 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
291 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
292 sjlj_fc_data_ofs
293 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
294 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
295 sjlj_fc_personality_ofs
296 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
297 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
298 sjlj_fc_lsda_ofs
299 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
300 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
301 sjlj_fc_jbuf_ofs
302 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
303 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
307 void
308 init_eh_for_function (void)
310 cfun->eh = GGC_CNEW (struct eh_status);
313 /* Routines to generate the exception tree somewhat directly.
314 These are used from tree-eh.c when processing exception related
315 nodes during tree optimization. */
317 static struct eh_region_d *
318 gen_eh_region (enum eh_region_type type, struct eh_region_d *outer)
320 struct eh_region_d *new_eh;
322 #ifdef ENABLE_CHECKING
323 gcc_assert (doing_eh (0));
324 #endif
326 /* Insert a new blank region as a leaf in the tree. */
327 new_eh = GGC_CNEW (struct eh_region_d);
328 new_eh->type = type;
329 new_eh->outer = outer;
330 if (outer)
332 new_eh->next_peer = outer->inner;
333 outer->inner = new_eh;
335 else
337 new_eh->next_peer = cfun->eh->region_tree;
338 cfun->eh->region_tree = new_eh;
341 new_eh->region_number = ++cfun->eh->last_region_number;
343 return new_eh;
346 struct eh_region_d *
347 gen_eh_region_cleanup (struct eh_region_d *outer)
349 struct eh_region_d *cleanup = gen_eh_region (ERT_CLEANUP, outer);
350 return cleanup;
353 struct eh_region_d *
354 gen_eh_region_try (struct eh_region_d *outer)
356 return gen_eh_region (ERT_TRY, outer);
359 struct eh_region_d *
360 gen_eh_region_catch (struct eh_region_d *t, tree type_or_list)
362 struct eh_region_d *c, *l;
363 tree type_list, type_node;
365 /* Ensure to always end up with a type list to normalize further
366 processing, then register each type against the runtime types map. */
367 type_list = type_or_list;
368 if (type_or_list)
370 if (TREE_CODE (type_or_list) != TREE_LIST)
371 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
373 type_node = type_list;
374 for (; type_node; type_node = TREE_CHAIN (type_node))
375 add_type_for_runtime (TREE_VALUE (type_node));
378 c = gen_eh_region (ERT_CATCH, t->outer);
379 c->u.eh_catch.type_list = type_list;
380 l = t->u.eh_try.last_catch;
381 c->u.eh_catch.prev_catch = l;
382 if (l)
383 l->u.eh_catch.next_catch = c;
384 else
385 t->u.eh_try.eh_catch = c;
386 t->u.eh_try.last_catch = c;
388 return c;
391 struct eh_region_d *
392 gen_eh_region_allowed (struct eh_region_d *outer, tree allowed)
394 struct eh_region_d *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
395 region->u.allowed.type_list = allowed;
397 for (; allowed ; allowed = TREE_CHAIN (allowed))
398 add_type_for_runtime (TREE_VALUE (allowed));
400 return region;
403 struct eh_region_d *
404 gen_eh_region_must_not_throw (struct eh_region_d *outer)
406 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
410 get_eh_region_number (struct eh_region_d *region)
412 return region->region_number;
415 bool
416 get_eh_region_may_contain_throw (struct eh_region_d *region)
418 return region->may_contain_throw;
421 tree
422 get_eh_region_tree_label (struct eh_region_d *region)
424 return region->tree_label;
427 tree
428 get_eh_region_no_tree_label (int region)
430 return VEC_index (eh_region, cfun->eh->region_array, region)->tree_label;
433 void
434 set_eh_region_tree_label (struct eh_region_d *region, tree lab)
436 region->tree_label = lab;
439 void
440 expand_resx_stmt (gimple stmt)
442 int region_nr = gimple_resx_region (stmt);
443 rtx insn;
444 struct eh_region_d *reg = VEC_index (eh_region,
445 cfun->eh->region_array, region_nr);
447 do_pending_stack_adjust ();
448 insn = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
449 if (reg->resume)
450 reg->resume = gen_rtx_INSN_LIST (VOIDmode, insn, reg->resume);
451 else
452 reg->resume = insn;
453 emit_barrier ();
456 /* Note that the current EH region (if any) may contain a throw, or a
457 call to a function which itself may contain a throw. */
459 void
460 note_eh_region_may_contain_throw (struct eh_region_d *region)
462 while (region && !region->may_contain_throw)
464 region->may_contain_throw = 1;
465 region = region->outer;
470 /* Return an rtl expression for a pointer to the exception object
471 within a handler. */
474 get_exception_pointer (void)
476 if (! crtl->eh.exc_ptr)
477 crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
478 return crtl->eh.exc_ptr;
481 /* Return an rtl expression for the exception dispatch filter
482 within a handler. */
485 get_exception_filter (void)
487 if (! crtl->eh.filter)
488 crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
489 return crtl->eh.filter;
492 /* This section is for the exception handling specific optimization pass. */
494 /* Random access the exception region tree. */
496 void
497 collect_eh_region_array (void)
499 struct eh_region_d *i;
501 i = cfun->eh->region_tree;
502 if (! i)
503 return;
505 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
506 cfun->eh->last_region_number + 1);
507 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
509 while (1)
511 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
513 /* If there are sub-regions, process them. */
514 if (i->inner)
515 i = i->inner;
516 /* If there are peers, process them. */
517 else if (i->next_peer)
518 i = i->next_peer;
519 /* Otherwise, step back up the tree to the next peer. */
520 else
522 do {
523 i = i->outer;
524 if (i == NULL)
525 return;
526 } while (i->next_peer == NULL);
527 i = i->next_peer;
532 /* R is MUST_NOT_THROW region that is not reachable via local
533 RESX instructions. It still must be kept in the tree in case runtime
534 can unwind through it, or we will eliminate out terminate call
535 runtime would do otherwise. Return TRUE if R contains throwing statements
536 or some of the exceptions in inner regions can be unwound up to R.
538 CONTAINS_STMT is bitmap of all regions that contains some throwing
539 statements.
541 Function looks O(^3) at first sight. In fact the function is called at most
542 once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
543 Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
544 the outer loop examines every region at most once. The inner loop
545 is doing unwinding from the throwing statement same way as we do during
546 CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
547 of CFG. In practice Eh trees are wide, not deep, so this is not
548 a problem. */
550 static bool
551 can_be_reached_by_runtime (sbitmap contains_stmt, struct eh_region_d *r)
553 struct eh_region_d *i = r->inner;
554 unsigned n;
555 bitmap_iterator bi;
557 if (TEST_BIT (contains_stmt, r->region_number))
558 return true;
559 if (r->aka)
560 EXECUTE_IF_SET_IN_BITMAP (r->aka, 0, n, bi)
561 if (TEST_BIT (contains_stmt, n))
562 return true;
563 if (!i)
564 return false;
565 while (1)
567 /* It is pointless to look into MUST_NOT_THROW
568 or dive into subregions. They never unwind up. */
569 if (i->type != ERT_MUST_NOT_THROW)
571 bool found = TEST_BIT (contains_stmt, i->region_number);
572 if (!found && i->aka)
573 EXECUTE_IF_SET_IN_BITMAP (i->aka, 0, n, bi)
574 if (TEST_BIT (contains_stmt, n))
576 found = true;
577 break;
579 /* We have nested region that contains throwing statement.
580 See if resuming might lead up to the resx or we get locally
581 caught sooner. If we get locally caught sooner, we either
582 know region R is not reachable or it would have direct edge
583 from the EH resx and thus consider region reachable at
584 firest place. */
585 if (found)
587 struct eh_region_d *i1 = i;
588 tree type_thrown = NULL_TREE;
590 if (i1->type == ERT_THROW)
592 type_thrown = i1->u.eh_throw.type;
593 i1 = i1->outer;
595 for (; i1 != r; i1 = i1->outer)
596 if (reachable_next_level (i1, type_thrown, NULL,
597 false) >= RNL_CAUGHT)
598 break;
599 if (i1 == r)
600 return true;
603 /* If there are sub-regions, process them. */
604 if (i->type != ERT_MUST_NOT_THROW && i->inner)
605 i = i->inner;
606 /* If there are peers, process them. */
607 else if (i->next_peer)
608 i = i->next_peer;
609 /* Otherwise, step back up the tree to the next peer. */
610 else
614 i = i->outer;
615 if (i == r)
616 return false;
618 while (i->next_peer == NULL);
619 i = i->next_peer;
624 /* Bring region R to the root of tree. */
626 static void
627 bring_to_root (struct eh_region_d *r)
629 struct eh_region_d **pp;
630 struct eh_region_d *outer = r->outer;
631 if (!r->outer)
632 return;
633 for (pp = &outer->inner; *pp != r; pp = &(*pp)->next_peer)
634 continue;
635 *pp = r->next_peer;
636 r->outer = NULL;
637 r->next_peer = cfun->eh->region_tree;
638 cfun->eh->region_tree = r;
641 /* Return true if region R2 can be replaced by R1. */
643 static bool
644 eh_region_replaceable_by_p (const struct eh_region_d *r1,
645 const struct eh_region_d *r2)
647 /* Regions are semantically same if they are of same type,
648 have same label and type. */
649 if (r1->type != r2->type)
650 return false;
651 if (r1->tree_label != r2->tree_label)
652 return false;
654 /* Verify that also region type dependent data are the same. */
655 switch (r1->type)
657 case ERT_MUST_NOT_THROW:
658 case ERT_CLEANUP:
659 break;
660 case ERT_TRY:
662 struct eh_region_d *c1, *c2;
663 for (c1 = r1->u.eh_try.eh_catch,
664 c2 = r2->u.eh_try.eh_catch;
665 c1 && c2;
666 c1 = c1->u.eh_catch.next_catch,
667 c2 = c2->u.eh_catch.next_catch)
668 if (!eh_region_replaceable_by_p (c1, c2))
669 return false;
670 if (c1 || c2)
671 return false;
673 break;
674 case ERT_CATCH:
675 if (!list_equal_p (r1->u.eh_catch.type_list, r2->u.eh_catch.type_list))
676 return false;
677 if (!list_equal_p (r1->u.eh_catch.filter_list,
678 r2->u.eh_catch.filter_list))
679 return false;
680 break;
681 case ERT_ALLOWED_EXCEPTIONS:
682 if (!list_equal_p (r1->u.allowed.type_list, r2->u.allowed.type_list))
683 return false;
684 if (r1->u.allowed.filter != r2->u.allowed.filter)
685 return false;
686 break;
687 case ERT_THROW:
688 if (r1->u.eh_throw.type != r2->u.eh_throw.type)
689 return false;
690 break;
691 default:
692 gcc_unreachable ();
694 if (dump_file && (dump_flags & TDF_DETAILS))
695 fprintf (dump_file, "Regions %i and %i match\n", r1->region_number,
696 r2->region_number);
697 return true;
700 /* Replace region R2 by R1. */
702 static void
703 replace_region (struct eh_region_d *r1, struct eh_region_d *r2)
705 struct eh_region_d *next1 = r1->u.eh_try.eh_catch;
706 struct eh_region_d *next2 = r2->u.eh_try.eh_catch;
707 bool is_try = r1->type == ERT_TRY;
709 gcc_assert (r1->type != ERT_CATCH);
710 remove_eh_handler_and_replace (r2, r1, false);
711 if (is_try)
713 while (next1)
715 r1 = next1;
716 r2 = next2;
717 gcc_assert (next1->type == ERT_CATCH);
718 gcc_assert (next2->type == ERT_CATCH);
719 next1 = next1->u.eh_catch.next_catch;
720 next2 = next2->u.eh_catch.next_catch;
721 remove_eh_handler_and_replace (r2, r1, false);
726 /* Return hash value of type list T. */
728 static hashval_t
729 hash_type_list (tree t)
731 hashval_t val = 0;
732 for (; t; t = TREE_CHAIN (t))
733 val = iterative_hash_hashval_t (TREE_HASH (TREE_VALUE (t)), val);
734 return val;
737 /* Hash EH regions so semantically same regions get same hash value. */
739 static hashval_t
740 hash_eh_region (const void *r)
742 const struct eh_region_d *region = (const struct eh_region_d *) r;
743 hashval_t val = region->type;
745 if (region->tree_label)
746 val = iterative_hash_hashval_t (LABEL_DECL_UID (region->tree_label), val);
747 switch (region->type)
749 case ERT_MUST_NOT_THROW:
750 case ERT_CLEANUP:
751 break;
752 case ERT_TRY:
754 struct eh_region_d *c;
755 for (c = region->u.eh_try.eh_catch;
756 c; c = c->u.eh_catch.next_catch)
757 val = iterative_hash_hashval_t (hash_eh_region (c), val);
759 break;
760 case ERT_CATCH:
761 val = iterative_hash_hashval_t (hash_type_list
762 (region->u.eh_catch.type_list), val);
763 break;
764 case ERT_ALLOWED_EXCEPTIONS:
765 val = iterative_hash_hashval_t
766 (hash_type_list (region->u.allowed.type_list), val);
767 val = iterative_hash_hashval_t (region->u.allowed.filter, val);
768 break;
769 case ERT_THROW:
770 val |= iterative_hash_hashval_t (TYPE_UID (region->u.eh_throw.type), val);
771 break;
772 default:
773 gcc_unreachable ();
775 return val;
778 /* Return true if regions R1 and R2 are equal. */
780 static int
781 eh_regions_equal_p (const void *r1, const void *r2)
783 return eh_region_replaceable_by_p ((const struct eh_region_d *) r1,
784 (const struct eh_region_d *) r2);
787 /* Walk all peers of REGION and try to merge those regions
788 that are semantically equivalent. Look into subregions
789 recursively too. */
791 static bool
792 merge_peers (struct eh_region_d *region)
794 struct eh_region_d *r1, *r2, *outer = NULL, *next;
795 bool merged = false;
796 int num_regions = 0;
797 if (region)
798 outer = region->outer;
799 else
800 return false;
802 /* First see if there is inner region equivalent to region
803 in question. EH control flow is acyclic so we know we
804 can merge them. */
805 if (outer)
806 for (r1 = region; r1; r1 = next)
808 next = r1->next_peer;
809 if (r1->type == ERT_CATCH)
810 continue;
811 if (eh_region_replaceable_by_p (r1->outer, r1))
813 replace_region (r1->outer, r1);
814 merged = true;
816 else
817 num_regions ++;
820 /* Get new first region and try to match the peers
821 for equivalence. */
822 if (outer)
823 region = outer->inner;
824 else
825 region = cfun->eh->region_tree;
827 /* There are few regions to inspect:
828 N^2 loop matching each region with each region
829 will do the job well. */
830 if (num_regions < 10)
832 for (r1 = region; r1; r1 = r1->next_peer)
834 if (r1->type == ERT_CATCH)
835 continue;
836 for (r2 = r1->next_peer; r2; r2 = next)
838 next = r2->next_peer;
839 if (eh_region_replaceable_by_p (r1, r2))
841 replace_region (r1, r2);
842 merged = true;
847 /* Or use hashtable to avoid N^2 behaviour. */
848 else
850 htab_t hash;
851 hash = htab_create (num_regions, hash_eh_region,
852 eh_regions_equal_p, NULL);
853 for (r1 = region; r1; r1 = next)
855 void **slot;
857 next = r1->next_peer;
858 if (r1->type == ERT_CATCH)
859 continue;
860 slot = htab_find_slot (hash, r1, INSERT);
861 if (!*slot)
862 *slot = r1;
863 else
864 replace_region ((struct eh_region_d *) *slot, r1);
866 htab_delete (hash);
868 for (r1 = region; r1; r1 = r1->next_peer)
869 merged |= merge_peers (r1->inner);
870 return merged;
873 /* Remove all regions whose labels are not reachable.
874 REACHABLE is bitmap of all regions that are used by the function
875 CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
877 void
878 remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
880 int i;
881 struct eh_region_d *r;
882 VEC(eh_region,heap) *must_not_throws = VEC_alloc (eh_region, heap, 16);
883 struct eh_region_d *local_must_not_throw = NULL;
884 struct eh_region_d *first_must_not_throw = NULL;
886 for (i = cfun->eh->last_region_number; i > 0; --i)
888 r = VEC_index (eh_region, cfun->eh->region_array, i);
889 if (!r || r->region_number != i)
890 continue;
891 if (!TEST_BIT (reachable, i) && !r->resume)
893 bool kill_it = true;
895 r->tree_label = NULL;
896 switch (r->type)
898 case ERT_THROW:
899 /* Don't remove ERT_THROW regions if their outer region
900 is reachable. */
901 if (r->outer && TEST_BIT (reachable, r->outer->region_number))
902 kill_it = false;
903 break;
904 case ERT_MUST_NOT_THROW:
905 /* MUST_NOT_THROW regions are implementable solely in the
906 runtime, but we need them when inlining function.
908 Keep them if outer region is not MUST_NOT_THROW a well
909 and if they contain some statement that might unwind through
910 them. */
911 if ((!r->outer || r->outer->type != ERT_MUST_NOT_THROW)
912 && (!contains_stmt
913 || can_be_reached_by_runtime (contains_stmt, r)))
914 kill_it = false;
915 break;
916 case ERT_TRY:
918 /* TRY regions are reachable if any of its CATCH regions
919 are reachable. */
920 struct eh_region_d *c;
921 for (c = r->u.eh_try.eh_catch; c;
922 c = c->u.eh_catch.next_catch)
923 if (TEST_BIT (reachable, c->region_number))
925 kill_it = false;
926 break;
928 break;
931 default:
932 break;
935 if (kill_it)
937 if (dump_file)
938 fprintf (dump_file, "Removing unreachable eh region %i\n",
939 r->region_number);
940 remove_eh_handler (r);
942 else if (r->type == ERT_MUST_NOT_THROW)
944 if (!first_must_not_throw)
945 first_must_not_throw = r;
946 VEC_safe_push (eh_region, heap, must_not_throws, r);
949 else
950 if (r->type == ERT_MUST_NOT_THROW)
952 if (!local_must_not_throw)
953 local_must_not_throw = r;
954 if (r->outer)
955 VEC_safe_push (eh_region, heap, must_not_throws, r);
959 /* MUST_NOT_THROW regions without local handler are all the same; they
960 trigger terminate call in runtime.
961 MUST_NOT_THROW handled locally can differ in debug info associated
962 to std::terminate () call or if one is coming from Java and other
963 from C++ whether they call terminate or abort.
965 We merge all MUST_NOT_THROW regions handled by the run-time into one.
966 We alsobring all local MUST_NOT_THROW regions to the roots of EH tree
967 (since unwinding never continues to the outer region anyway).
968 If MUST_NOT_THROW with local handler is present in the tree, we use
969 that region to merge into, since it will remain in tree anyway;
970 otherwise we use first MUST_NOT_THROW.
972 Merging of locally handled regions needs changes to the CFG. Crossjumping
973 should take care of this, by looking at the actual code and
974 ensuring that the cleanup actions are really the same. */
976 if (local_must_not_throw)
977 first_must_not_throw = local_must_not_throw;
979 for (i = 0; VEC_iterate (eh_region, must_not_throws, i, r); i++)
981 if (!r->label && !r->tree_label && r != first_must_not_throw)
983 if (dump_file)
984 fprintf (dump_file, "Replacing MUST_NOT_THROW region %i by %i\n",
985 r->region_number,
986 first_must_not_throw->region_number);
987 remove_eh_handler_and_replace (r, first_must_not_throw, false);
988 first_must_not_throw->may_contain_throw |= r->may_contain_throw;
990 else
991 bring_to_root (r);
993 merge_peers (cfun->eh->region_tree);
994 #ifdef ENABLE_CHECKING
995 verify_eh_tree (cfun);
996 #endif
997 VEC_free (eh_region, heap, must_not_throws);
1000 /* Return array mapping LABEL_DECL_UID to region such that region's tree_label
1001 is identical to label. */
1003 VEC (int, heap) *
1004 label_to_region_map (void)
1006 VEC (int, heap) * label_to_region = NULL;
1007 int i;
1008 int idx;
1010 VEC_safe_grow_cleared (int, heap, label_to_region,
1011 cfun->cfg->last_label_uid + 1);
1012 for (i = cfun->eh->last_region_number; i > 0; --i)
1014 struct eh_region_d *r = VEC_index (eh_region, cfun->eh->region_array, i);
1015 if (r && r->region_number == i
1016 && r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
1018 if ((idx = VEC_index (int, label_to_region,
1019 LABEL_DECL_UID (r->tree_label))) != 0)
1020 r->next_region_sharing_label =
1021 VEC_index (eh_region, cfun->eh->region_array, idx);
1022 else
1023 r->next_region_sharing_label = NULL;
1024 VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
1028 return label_to_region;
1031 /* Return number of EH regions. */
1033 num_eh_regions (void)
1035 return cfun->eh->last_region_number + 1;
1038 /* Return next region sharing same label as REGION. */
1041 get_next_region_sharing_label (int region)
1043 struct eh_region_d *r;
1044 if (!region)
1045 return 0;
1046 r = VEC_index (eh_region, cfun->eh->region_array, region);
1047 if (!r || !r->next_region_sharing_label)
1048 return 0;
1049 return r->next_region_sharing_label->region_number;
1052 /* Return bitmap of all labels that are handlers of must not throw regions. */
1054 bitmap
1055 must_not_throw_labels (void)
1057 struct eh_region_d *i;
1058 bitmap labels = BITMAP_ALLOC (NULL);
1060 i = cfun->eh->region_tree;
1061 if (! i)
1062 return labels;
1064 while (1)
1066 if (i->type == ERT_MUST_NOT_THROW && i->tree_label
1067 && LABEL_DECL_UID (i->tree_label) >= 0)
1068 bitmap_set_bit (labels, LABEL_DECL_UID (i->tree_label));
1070 /* If there are sub-regions, process them. */
1071 if (i->inner)
1072 i = i->inner;
1073 /* If there are peers, process them. */
1074 else if (i->next_peer)
1075 i = i->next_peer;
1076 /* Otherwise, step back up the tree to the next peer. */
1077 else
1079 do {
1080 i = i->outer;
1081 if (i == NULL)
1082 return labels;
1083 } while (i->next_peer == NULL);
1084 i = i->next_peer;
1089 /* Set up EH labels for RTL. */
1091 void
1092 convert_from_eh_region_ranges (void)
1094 int i, n = cfun->eh->last_region_number;
1096 /* Most of the work is already done at the tree level. All we need to
1097 do is collect the rtl labels that correspond to the tree labels that
1098 collect the rtl labels that correspond to the tree labels
1099 we allocated earlier. */
1100 for (i = 1; i <= n; ++i)
1102 struct eh_region_d *region;
1104 region = VEC_index (eh_region, cfun->eh->region_array, i);
1105 if (region && region->tree_label)
1106 region->label = DECL_RTL_IF_SET (region->tree_label);
1110 void
1111 find_exception_handler_labels (void)
1113 int i;
1115 if (cfun->eh->region_tree == NULL)
1116 return;
1118 for (i = cfun->eh->last_region_number; i > 0; --i)
1120 struct eh_region_d *region;
1121 rtx lab;
1123 region = VEC_index (eh_region, cfun->eh->region_array, i);
1124 if (! region || region->region_number != i)
1125 continue;
1126 if (crtl->eh.built_landing_pads)
1127 lab = region->landing_pad;
1128 else
1129 lab = region->label;
1133 /* Returns true if the current function has exception handling regions. */
1135 bool
1136 current_function_has_exception_handlers (void)
1138 int i;
1140 for (i = cfun->eh->last_region_number; i > 0; --i)
1142 struct eh_region_d *region;
1144 region = VEC_index (eh_region, cfun->eh->region_array, i);
1145 if (region
1146 && region->region_number == i
1147 && region->type != ERT_THROW)
1148 return true;
1151 return false;
1154 /* A subroutine of duplicate_eh_regions. Search the region tree under O
1155 for the minimum and maximum region numbers. Update *MIN and *MAX. */
1157 static void
1158 duplicate_eh_regions_0 (eh_region o, int *min, int *max)
1160 int i;
1162 if (o->aka)
1164 i = bitmap_first_set_bit (o->aka);
1165 if (i < *min)
1166 *min = i;
1167 i = bitmap_last_set_bit (o->aka);
1168 if (i > *max)
1169 *max = i;
1171 if (o->region_number < *min)
1172 *min = o->region_number;
1173 if (o->region_number > *max)
1174 *max = o->region_number;
1176 if (o->inner)
1178 o = o->inner;
1179 duplicate_eh_regions_0 (o, min, max);
1180 while (o->next_peer)
1182 o = o->next_peer;
1183 duplicate_eh_regions_0 (o, min, max);
1188 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
1189 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
1190 about the other internal pointers just yet, just the tree-like pointers. */
1192 static eh_region
1193 duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
1195 eh_region ret, n;
1197 ret = n = GGC_NEW (struct eh_region_d);
1199 *n = *old;
1200 n->outer = outer;
1201 n->next_peer = NULL;
1202 if (old->aka)
1204 unsigned i;
1205 bitmap_iterator bi;
1206 n->aka = BITMAP_GGC_ALLOC ();
1208 EXECUTE_IF_SET_IN_BITMAP (old->aka, 0, i, bi)
1210 bitmap_set_bit (n->aka, i + eh_offset);
1211 VEC_replace (eh_region, cfun->eh->region_array, i + eh_offset, n);
1215 n->region_number += eh_offset;
1216 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1218 if (old->inner)
1220 old = old->inner;
1221 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
1222 while (old->next_peer)
1224 old = old->next_peer;
1225 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
1229 return ret;
1232 /* Look for first outer region of R (or R itself) that is
1233 TRY region. Return NULL if none. */
1235 static struct eh_region_d *
1236 find_prev_try (struct eh_region_d * r)
1238 for (; r && r->type != ERT_TRY; r = r->outer)
1239 if (r->type == ERT_MUST_NOT_THROW
1240 || (r->type == ERT_ALLOWED_EXCEPTIONS
1241 && !r->u.allowed.type_list))
1243 r = NULL;
1244 break;
1246 return r;
1249 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
1250 function and root the tree below OUTER_REGION. Remap labels using MAP
1251 callback. The special case of COPY_REGION of 0 means all regions. */
1254 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
1255 void *data, int copy_region, int outer_region)
1257 eh_region cur, outer, *splice;
1258 int i, min_region, max_region, eh_offset, cfun_last_region_number;
1259 int num_regions;
1261 if (!ifun->eh)
1262 return 0;
1263 #ifdef ENABLE_CHECKING
1264 verify_eh_tree (ifun);
1265 #endif
1267 /* Find the range of region numbers to be copied. The interface we
1268 provide here mandates a single offset to find new number from old,
1269 which means we must look at the numbers present, instead of the
1270 count or something else. */
1271 if (copy_region > 0)
1273 min_region = INT_MAX;
1274 max_region = 0;
1276 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1277 duplicate_eh_regions_0 (cur, &min_region, &max_region);
1279 else
1281 min_region = 1;
1282 max_region = ifun->eh->last_region_number;
1284 num_regions = max_region - min_region + 1;
1285 cfun_last_region_number = cfun->eh->last_region_number;
1286 eh_offset = cfun_last_region_number + 1 - min_region;
1288 /* If we've not yet created a region array, do so now. */
1289 cfun->eh->last_region_number = cfun_last_region_number + num_regions;
1290 VEC_safe_grow_cleared (eh_region, gc, cfun->eh->region_array,
1291 cfun->eh->last_region_number + 1);
1293 /* Locate the spot at which to insert the new tree. */
1294 if (outer_region > 0)
1296 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
1297 if (outer)
1298 splice = &outer->inner;
1299 else
1300 splice = &cfun->eh->region_tree;
1302 else
1304 outer = NULL;
1305 splice = &cfun->eh->region_tree;
1307 while (*splice)
1308 splice = &(*splice)->next_peer;
1310 if (!ifun->eh->region_tree)
1312 if (outer)
1313 for (i = cfun_last_region_number + 1;
1314 i <= cfun->eh->last_region_number; i++)
1316 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1317 if (outer->aka == NULL)
1318 outer->aka = BITMAP_GGC_ALLOC ();
1319 bitmap_set_bit (outer->aka, i);
1321 return eh_offset;
1324 /* Copy all the regions in the subtree. */
1325 if (copy_region > 0)
1327 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1328 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
1330 else
1332 eh_region n;
1334 cur = ifun->eh->region_tree;
1335 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
1336 while (cur->next_peer)
1338 cur = cur->next_peer;
1339 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
1343 /* Remap all the labels in the new regions. */
1344 for (i = cfun_last_region_number + 1;
1345 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1346 if (cur && cur->tree_label)
1347 cur->tree_label = map (cur->tree_label, data);
1349 /* Remap all of the internal catch and cleanup linkages. Since we
1350 duplicate entire subtrees, all of the referenced regions will have
1351 been copied too. And since we renumbered them as a block, a simple
1352 bit of arithmetic finds us the index for the replacement region. */
1353 for (i = cfun_last_region_number + 1;
1354 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1356 /* All removed EH that is toplevel in input function is now
1357 in outer EH of output function. */
1358 if (cur == NULL)
1360 gcc_assert (VEC_index
1361 (eh_region, ifun->eh->region_array,
1362 i - eh_offset) == NULL);
1363 if (outer)
1365 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1366 if (outer->aka == NULL)
1367 outer->aka = BITMAP_GGC_ALLOC ();
1368 bitmap_set_bit (outer->aka, i);
1370 continue;
1372 if (i != cur->region_number)
1373 continue;
1375 #define REMAP(REG) \
1376 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1377 (REG)->region_number + eh_offset)
1379 switch (cur->type)
1381 case ERT_TRY:
1382 if (cur->u.eh_try.eh_catch)
1383 REMAP (cur->u.eh_try.eh_catch);
1384 if (cur->u.eh_try.last_catch)
1385 REMAP (cur->u.eh_try.last_catch);
1386 break;
1388 case ERT_CATCH:
1389 if (cur->u.eh_catch.next_catch)
1390 REMAP (cur->u.eh_catch.next_catch);
1391 if (cur->u.eh_catch.prev_catch)
1392 REMAP (cur->u.eh_catch.prev_catch);
1393 break;
1395 default:
1396 break;
1399 #undef REMAP
1401 #ifdef ENABLE_CHECKING
1402 verify_eh_tree (cfun);
1403 #endif
1405 return eh_offset;
1408 /* Return new copy of eh region OLD inside region NEW_OUTER.
1409 Do not care about updating the tree otherwise. */
1411 static struct eh_region_d *
1412 copy_eh_region_1 (struct eh_region_d *old, struct eh_region_d *new_outer)
1414 struct eh_region_d *new_eh = gen_eh_region (old->type, new_outer);
1415 new_eh->u = old->u;
1416 new_eh->tree_label = old->tree_label;
1417 new_eh->may_contain_throw = old->may_contain_throw;
1418 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1419 cfun->eh->last_region_number + 1);
1420 VEC_replace (eh_region, cfun->eh->region_array, new_eh->region_number, new_eh);
1421 if (dump_file && (dump_flags & TDF_DETAILS))
1422 fprintf (dump_file, "Copying region %i to %i\n", old->region_number, new_eh->region_number);
1423 return new_eh;
1426 /* Return new copy of eh region OLD inside region NEW_OUTER.
1428 Copy whole catch-try chain if neccesary. */
1430 static struct eh_region_d *
1431 copy_eh_region (struct eh_region_d *old, struct eh_region_d *new_outer)
1433 struct eh_region_d *r, *n, *old_try, *new_try, *ret = NULL;
1434 VEC(eh_region,heap) *catch_list = NULL;
1436 if (old->type != ERT_CATCH)
1438 gcc_assert (old->type != ERT_TRY);
1439 r = copy_eh_region_1 (old, new_outer);
1440 return r;
1443 /* Locate and copy corresponding TRY. */
1444 for (old_try = old->next_peer; old_try->type == ERT_CATCH; old_try = old_try->next_peer)
1445 continue;
1446 gcc_assert (old_try->type == ERT_TRY);
1447 new_try = gen_eh_region_try (new_outer);
1448 new_try->tree_label = old_try->tree_label;
1449 new_try->may_contain_throw = old_try->may_contain_throw;
1450 if (dump_file && (dump_flags & TDF_DETAILS))
1451 fprintf (dump_file, "Copying try-catch regions. Try: %i to %i\n",
1452 old_try->region_number, new_try->region_number);
1453 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1454 cfun->eh->last_region_number + 1);
1455 VEC_replace (eh_region, cfun->eh->region_array, new_try->region_number, new_try);
1457 /* In order to keep CATCH list in order, we need to copy in reverse order. */
1458 for (r = old_try->u.eh_try.last_catch; r->type == ERT_CATCH; r = r->next_peer)
1459 VEC_safe_push (eh_region, heap, catch_list, r);
1461 while (VEC_length (eh_region, catch_list))
1463 r = VEC_pop (eh_region, catch_list);
1465 /* Duplicate CATCH. */
1466 n = gen_eh_region_catch (new_try, r->u.eh_catch.type_list);
1467 n->tree_label = r->tree_label;
1468 n->may_contain_throw = r->may_contain_throw;
1469 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1470 cfun->eh->last_region_number + 1);
1471 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1472 n->tree_label = r->tree_label;
1474 if (dump_file && (dump_flags & TDF_DETAILS))
1475 fprintf (dump_file, "Copying try-catch regions. Catch: %i to %i\n",
1476 r->region_number, n->region_number);
1477 if (r == old)
1478 ret = n;
1480 VEC_free (eh_region, heap, catch_list);
1481 gcc_assert (ret);
1482 return ret;
1485 /* Callback for forach_reachable_handler that push REGION into single VECtor DATA. */
1487 static void
1488 push_reachable_handler (struct eh_region_d *region, void *data)
1490 VEC(eh_region,heap) **trace = (VEC(eh_region,heap) **) data;
1491 VEC_safe_push (eh_region, heap, *trace, region);
1494 /* Redirect EH edge E that to NEW_DEST_LABEL.
1495 IS_RESX, INLINABLE_CALL and REGION_NMUBER match the parameter of
1496 foreach_reachable_handler. */
1498 struct eh_region_d *
1499 redirect_eh_edge_to_label (edge e, tree new_dest_label, bool is_resx,
1500 bool inlinable_call, int region_number)
1502 struct eh_region_d *outer;
1503 struct eh_region_d *region;
1504 VEC (eh_region, heap) * trace = NULL;
1505 int i;
1506 int start_here = -1;
1507 basic_block old_bb = e->dest;
1508 struct eh_region_d *old, *r = NULL;
1509 bool update_inplace = true;
1510 edge_iterator ei;
1511 edge e2;
1513 /* If there is only one EH edge, we don't need to duplicate;
1514 just update labels in the tree. */
1515 FOR_EACH_EDGE (e2, ei, old_bb->preds)
1516 if ((e2->flags & EDGE_EH) && e2 != e)
1518 update_inplace = false;
1519 break;
1522 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
1523 gcc_assert (region);
1525 foreach_reachable_handler (region_number, is_resx, inlinable_call,
1526 push_reachable_handler, &trace);
1527 if (dump_file && (dump_flags & TDF_DETAILS))
1529 dump_eh_tree (dump_file, cfun);
1530 fprintf (dump_file, "Trace: ");
1531 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1532 fprintf (dump_file, " %i", VEC_index (eh_region, trace, i)->region_number);
1533 fprintf (dump_file, " inplace: %i\n", update_inplace);
1536 if (update_inplace)
1538 /* In easy route just walk trace and update all occurences of the label. */
1539 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1541 r = VEC_index (eh_region, trace, i);
1542 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1544 r->tree_label = new_dest_label;
1545 if (dump_file && (dump_flags & TDF_DETAILS))
1546 fprintf (dump_file, "Updating label for region %i\n",
1547 r->region_number);
1550 r = region;
1552 else
1554 /* Now look for outermost handler that reffers to the basic block in question.
1555 We start our duplication there. */
1556 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1558 r = VEC_index (eh_region, trace, i);
1559 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1560 start_here = i;
1562 outer = VEC_index (eh_region, trace, start_here)->outer;
1563 gcc_assert (start_here >= 0);
1565 /* And now do the dirty job! */
1566 for (i = start_here; i >= 0; i--)
1568 old = VEC_index (eh_region, trace, i);
1569 gcc_assert (!outer || old->outer != outer->outer);
1571 /* Copy region and update label. */
1572 r = copy_eh_region (old, outer);
1573 VEC_replace (eh_region, trace, i, r);
1574 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1576 r->tree_label = new_dest_label;
1577 if (dump_file && (dump_flags & TDF_DETAILS))
1578 fprintf (dump_file, "Updating label for region %i\n",
1579 r->region_number);
1582 /* We got into copying CATCH. copy_eh_region already did job
1583 of copying all catch blocks corresponding to the try. Now
1584 we need to update labels in all of them and see trace.
1586 We continue nesting into TRY region corresponding to CATCH:
1587 When duplicating EH tree contaiing subregions of CATCH,
1588 the CATCH region itself is never inserted to trace so we
1589 never get here anyway. */
1590 if (r->type == ERT_CATCH)
1592 /* Walk other catch regions we copied and update labels as needed. */
1593 for (r = r->next_peer; r->type == ERT_CATCH; r = r->next_peer)
1594 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1596 r->tree_label = new_dest_label;
1597 if (dump_file && (dump_flags & TDF_DETAILS))
1598 fprintf (dump_file, "Updating label for region %i\n",
1599 r->region_number);
1601 gcc_assert (r->type == ERT_TRY);
1603 /* Skip sibling catch regions from the trace.
1604 They are already updated. */
1605 while (i > 0 && VEC_index (eh_region, trace, i - 1)->outer == old->outer)
1607 gcc_assert (VEC_index (eh_region, trace, i - 1)->type == ERT_CATCH);
1608 i--;
1612 outer = r;
1615 if (is_resx || region->type == ERT_THROW)
1616 r = copy_eh_region (region, outer);
1619 VEC_free (eh_region, heap, trace);
1620 if (dump_file && (dump_flags & TDF_DETAILS))
1622 dump_eh_tree (dump_file, cfun);
1623 fprintf (dump_file, "New region: %i\n", r->region_number);
1625 return r;
1628 /* Return region number of region that is outer to both if REGION_A and
1629 REGION_B in IFUN. */
1632 eh_region_outermost (struct function *ifun, int region_a, int region_b)
1634 struct eh_region_d *rp_a, *rp_b;
1635 sbitmap b_outer;
1637 gcc_assert (ifun->eh->last_region_number > 0);
1638 gcc_assert (ifun->eh->region_tree);
1640 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1641 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1642 gcc_assert (rp_a != NULL);
1643 gcc_assert (rp_b != NULL);
1645 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1646 sbitmap_zero (b_outer);
1650 SET_BIT (b_outer, rp_b->region_number);
1651 rp_b = rp_b->outer;
1653 while (rp_b);
1657 if (TEST_BIT (b_outer, rp_a->region_number))
1659 sbitmap_free (b_outer);
1660 return rp_a->region_number;
1662 rp_a = rp_a->outer;
1664 while (rp_a);
1666 sbitmap_free (b_outer);
1667 return -1;
1670 static int
1671 t2r_eq (const void *pentry, const void *pdata)
1673 const_tree const entry = (const_tree) pentry;
1674 const_tree const data = (const_tree) pdata;
1676 return TREE_PURPOSE (entry) == data;
1679 static hashval_t
1680 t2r_hash (const void *pentry)
1682 const_tree const entry = (const_tree) pentry;
1683 return TREE_HASH (TREE_PURPOSE (entry));
1686 void
1687 add_type_for_runtime (tree type)
1689 tree *slot;
1691 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
1692 if (TREE_CODE (type) == NOP_EXPR)
1693 return;
1695 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1696 TREE_HASH (type), INSERT);
1697 if (*slot == NULL)
1699 tree runtime = (*lang_eh_runtime_type) (type);
1700 *slot = tree_cons (type, runtime, NULL_TREE);
1704 tree
1705 lookup_type_for_runtime (tree type)
1707 tree *slot;
1709 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
1710 if (TREE_CODE (type) == NOP_EXPR)
1711 return type;
1713 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1714 TREE_HASH (type), NO_INSERT);
1716 /* We should have always inserted the data earlier. */
1717 return TREE_VALUE (*slot);
1721 /* Represent an entry in @TTypes for either catch actions
1722 or exception filter actions. */
1723 struct GTY(()) ttypes_filter {
1724 tree t;
1725 int filter;
1728 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1729 (a tree) for a @TTypes type node we are thinking about adding. */
1731 static int
1732 ttypes_filter_eq (const void *pentry, const void *pdata)
1734 const struct ttypes_filter *const entry
1735 = (const struct ttypes_filter *) pentry;
1736 const_tree const data = (const_tree) pdata;
1738 return entry->t == data;
1741 static hashval_t
1742 ttypes_filter_hash (const void *pentry)
1744 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1745 return TREE_HASH (entry->t);
1748 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1749 exception specification list we are thinking about adding. */
1750 /* ??? Currently we use the type lists in the order given. Someone
1751 should put these in some canonical order. */
1753 static int
1754 ehspec_filter_eq (const void *pentry, const void *pdata)
1756 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1757 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1759 return type_list_equal (entry->t, data->t);
1762 /* Hash function for exception specification lists. */
1764 static hashval_t
1765 ehspec_filter_hash (const void *pentry)
1767 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1768 hashval_t h = 0;
1769 tree list;
1771 for (list = entry->t; list ; list = TREE_CHAIN (list))
1772 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1773 return h;
1776 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1777 to speed up the search. Return the filter value to be used. */
1779 static int
1780 add_ttypes_entry (htab_t ttypes_hash, tree type)
1782 struct ttypes_filter **slot, *n;
1784 slot = (struct ttypes_filter **)
1785 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1787 if ((n = *slot) == NULL)
1789 /* Filter value is a 1 based table index. */
1791 n = XNEW (struct ttypes_filter);
1792 n->t = type;
1793 n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
1794 *slot = n;
1796 VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
1799 return n->filter;
1802 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1803 to speed up the search. Return the filter value to be used. */
1805 static int
1806 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1808 struct ttypes_filter **slot, *n;
1809 struct ttypes_filter dummy;
1811 dummy.t = list;
1812 slot = (struct ttypes_filter **)
1813 htab_find_slot (ehspec_hash, &dummy, INSERT);
1815 if ((n = *slot) == NULL)
1817 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1819 n = XNEW (struct ttypes_filter);
1820 n->t = list;
1821 n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
1822 *slot = n;
1824 /* Generate a 0 terminated list of filter values. */
1825 for (; list ; list = TREE_CHAIN (list))
1827 if (targetm.arm_eabi_unwinder)
1828 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
1829 else
1831 /* Look up each type in the list and encode its filter
1832 value as a uleb128. */
1833 push_uleb128 (&crtl->eh.ehspec_data,
1834 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1837 if (targetm.arm_eabi_unwinder)
1838 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
1839 else
1840 VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
1843 return n->filter;
1846 /* Generate the action filter values to be used for CATCH and
1847 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1848 we use lots of landing pads, and so every type or list can share
1849 the same filter value, which saves table space. */
1851 static void
1852 assign_filter_values (void)
1854 int i;
1855 htab_t ttypes, ehspec;
1857 crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
1858 if (targetm.arm_eabi_unwinder)
1859 VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1860 else
1861 VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1863 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1864 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1866 for (i = cfun->eh->last_region_number; i > 0; --i)
1868 struct eh_region_d *r;
1870 r = VEC_index (eh_region, cfun->eh->region_array, i);
1872 /* Mind we don't process a region more than once. */
1873 if (!r || r->region_number != i)
1874 continue;
1876 switch (r->type)
1878 case ERT_CATCH:
1879 /* Whatever type_list is (NULL or true list), we build a list
1880 of filters for the region. */
1881 r->u.eh_catch.filter_list = NULL_TREE;
1883 if (r->u.eh_catch.type_list != NULL)
1885 /* Get a filter value for each of the types caught and store
1886 them in the region's dedicated list. */
1887 tree tp_node = r->u.eh_catch.type_list;
1889 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1891 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1892 tree flt_node = build_int_cst (NULL_TREE, flt);
1894 r->u.eh_catch.filter_list
1895 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1898 else
1900 /* Get a filter value for the NULL list also since it will need
1901 an action record anyway. */
1902 int flt = add_ttypes_entry (ttypes, NULL);
1903 tree flt_node = build_int_cst (NULL_TREE, flt);
1905 r->u.eh_catch.filter_list
1906 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1909 break;
1911 case ERT_ALLOWED_EXCEPTIONS:
1912 r->u.allowed.filter
1913 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1914 break;
1916 default:
1917 break;
1921 htab_delete (ttypes);
1922 htab_delete (ehspec);
1925 /* Emit SEQ into basic block just before INSN (that is assumed to be
1926 first instruction of some existing BB and return the newly
1927 produced block. */
1928 static basic_block
1929 emit_to_new_bb_before (rtx seq, rtx insn)
1931 rtx last;
1932 basic_block bb;
1933 edge e;
1934 edge_iterator ei;
1936 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1937 call), we don't want it to go into newly created landing pad or other EH
1938 construct. */
1939 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1940 if (e->flags & EDGE_FALLTHRU)
1941 force_nonfallthru (e);
1942 else
1943 ei_next (&ei);
1944 last = emit_insn_before (seq, insn);
1945 if (BARRIER_P (last))
1946 last = PREV_INSN (last);
1947 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1948 update_bb_for_insn (bb);
1949 bb->flags |= BB_SUPERBLOCK;
1950 return bb;
1953 /* Generate the code to actually handle exceptions, which will follow the
1954 landing pads. */
1956 static void
1957 build_post_landing_pads (void)
1959 int i;
1961 for (i = cfun->eh->last_region_number; i > 0; --i)
1963 struct eh_region_d *region;
1964 rtx seq;
1966 region = VEC_index (eh_region, cfun->eh->region_array, i);
1967 /* Mind we don't process a region more than once. */
1968 if (!region || region->region_number != i)
1969 continue;
1971 switch (region->type)
1973 case ERT_TRY:
1974 /* It is possible that TRY region is kept alive only because some of
1975 contained catch region still have RESX instruction but they are
1976 reached via their copies. In this case we need to do nothing. */
1977 if (!region->u.eh_try.eh_catch->label)
1978 break;
1980 /* ??? Collect the set of all non-overlapping catch handlers
1981 all the way up the chain until blocked by a cleanup. */
1982 /* ??? Outer try regions can share landing pads with inner
1983 try regions if the types are completely non-overlapping,
1984 and there are no intervening cleanups. */
1986 region->post_landing_pad = gen_label_rtx ();
1988 start_sequence ();
1990 emit_label (region->post_landing_pad);
1992 /* ??? It is mighty inconvenient to call back into the
1993 switch statement generation code in expand_end_case.
1994 Rapid prototyping sez a sequence of ifs. */
1996 struct eh_region_d *c;
1997 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
1999 if (c->u.eh_catch.type_list == NULL)
2000 emit_jump (c->label);
2001 else
2003 /* Need for one cmp/jump per type caught. Each type
2004 list entry has a matching entry in the filter list
2005 (see assign_filter_values). */
2006 tree tp_node = c->u.eh_catch.type_list;
2007 tree flt_node = c->u.eh_catch.filter_list;
2009 for (; tp_node; )
2011 emit_cmp_and_jump_insns
2012 (crtl->eh.filter,
2013 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
2014 EQ, NULL_RTX,
2015 targetm.eh_return_filter_mode (), 0, c->label);
2017 tp_node = TREE_CHAIN (tp_node);
2018 flt_node = TREE_CHAIN (flt_node);
2024 /* We delay the generation of the _Unwind_Resume until we generate
2025 landing pads. We emit a marker here so as to get good control
2026 flow data in the meantime. */
2027 gcc_assert (!region->resume);
2028 region->resume
2029 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
2030 emit_barrier ();
2032 seq = get_insns ();
2033 end_sequence ();
2035 emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
2037 break;
2039 case ERT_ALLOWED_EXCEPTIONS:
2040 if (!region->label)
2041 break;
2042 region->post_landing_pad = gen_label_rtx ();
2044 start_sequence ();
2046 emit_label (region->post_landing_pad);
2048 emit_cmp_and_jump_insns (crtl->eh.filter,
2049 GEN_INT (region->u.allowed.filter),
2050 EQ, NULL_RTX,
2051 targetm.eh_return_filter_mode (), 0, region->label);
2053 /* We delay the generation of the _Unwind_Resume until we generate
2054 landing pads. We emit a marker here so as to get good control
2055 flow data in the meantime. */
2056 gcc_assert (!region->resume);
2057 region->resume
2058 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
2059 emit_barrier ();
2061 seq = get_insns ();
2062 end_sequence ();
2064 emit_to_new_bb_before (seq, region->label);
2065 break;
2067 case ERT_CLEANUP:
2068 case ERT_MUST_NOT_THROW:
2069 region->post_landing_pad = region->label;
2070 break;
2072 case ERT_CATCH:
2073 case ERT_THROW:
2074 /* Nothing to do. */
2075 break;
2077 default:
2078 gcc_unreachable ();
2083 /* Replace RESX patterns with jumps to the next handler if any, or calls to
2084 _Unwind_Resume otherwise. */
2086 static void
2087 connect_post_landing_pads (void)
2089 int i;
2091 for (i = cfun->eh->last_region_number; i > 0; --i)
2093 struct eh_region_d *region;
2094 struct eh_region_d *outer;
2095 rtx seq;
2096 rtx barrier;
2097 rtx resume_list;
2099 region = VEC_index (eh_region, cfun->eh->region_array, i);
2100 /* Mind we don't process a region more than once. */
2101 if (!region || region->region_number != i)
2102 continue;
2104 /* If there is no RESX, or it has been deleted by flow, there's
2105 nothing to fix up. */
2106 if (! region->resume)
2107 continue;
2109 /* Search for another landing pad in this function. */
2110 for (outer = region->outer; outer ; outer = outer->outer)
2111 if (outer->post_landing_pad)
2112 break;
2114 for (resume_list = region->resume; resume_list;
2115 resume_list = (GET_CODE (resume_list) == INSN_LIST
2116 ? XEXP (resume_list, 1) : NULL_RTX))
2118 rtx resume = (GET_CODE (resume_list) == INSN_LIST
2119 ? XEXP (resume_list, 0) : resume_list);
2120 if (INSN_DELETED_P (resume))
2121 continue;
2122 start_sequence ();
2124 if (outer)
2126 edge e;
2127 basic_block src, dest;
2129 emit_jump (outer->post_landing_pad);
2130 src = BLOCK_FOR_INSN (resume);
2131 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
2132 while (EDGE_COUNT (src->succs) > 0)
2133 remove_edge (EDGE_SUCC (src, 0));
2134 e = make_edge (src, dest, 0);
2135 e->probability = REG_BR_PROB_BASE;
2136 e->count = src->count;
2138 else
2140 emit_library_call (unwind_resume_libfunc, LCT_THROW,
2141 VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
2143 /* What we just emitted was a throwing libcall, so it got a
2144 barrier automatically added after it. If the last insn in
2145 the libcall sequence isn't the barrier, it's because the
2146 target emits multiple insns for a call, and there are insns
2147 after the actual call insn (which are redundant and would be
2148 optimized away). The barrier is inserted exactly after the
2149 call insn, so let's go get that and delete the insns after
2150 it, because below we need the barrier to be the last insn in
2151 the sequence. */
2152 delete_insns_since (NEXT_INSN (last_call_insn ()));
2155 seq = get_insns ();
2156 end_sequence ();
2157 barrier = emit_insn_before (seq, resume);
2158 /* Avoid duplicate barrier. */
2159 gcc_assert (BARRIER_P (barrier));
2160 delete_insn (barrier);
2161 delete_insn (resume);
2164 /* ??? From tree-ssa we can wind up with catch regions whose
2165 label is not instantiated, but whose resx is present. Now
2166 that we've dealt with the resx, kill the region. */
2167 if (region->label == NULL && region->type == ERT_CLEANUP)
2168 remove_eh_handler (region);
2173 static void
2174 dw2_build_landing_pads (void)
2176 int i;
2178 for (i = cfun->eh->last_region_number; i > 0; --i)
2180 struct eh_region_d *region;
2181 rtx seq;
2182 basic_block bb;
2183 edge e;
2185 region = VEC_index (eh_region, cfun->eh->region_array, i);
2186 /* Mind we don't process a region more than once. */
2187 if (!region || region->region_number != i)
2188 continue;
2190 if (region->type != ERT_CLEANUP
2191 && region->type != ERT_TRY
2192 && region->type != ERT_ALLOWED_EXCEPTIONS)
2193 continue;
2195 if (!region->post_landing_pad)
2196 continue;
2198 start_sequence ();
2200 region->landing_pad = gen_label_rtx ();
2201 emit_label (region->landing_pad);
2203 #ifdef HAVE_exception_receiver
2204 if (HAVE_exception_receiver)
2205 emit_insn (gen_exception_receiver ());
2206 else
2207 #endif
2208 #ifdef HAVE_nonlocal_goto_receiver
2209 if (HAVE_nonlocal_goto_receiver)
2210 emit_insn (gen_nonlocal_goto_receiver ());
2211 else
2212 #endif
2213 { /* Nothing */ }
2215 emit_move_insn (crtl->eh.exc_ptr,
2216 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
2217 emit_move_insn (crtl->eh.filter,
2218 gen_rtx_REG (targetm.eh_return_filter_mode (),
2219 EH_RETURN_DATA_REGNO (1)));
2221 seq = get_insns ();
2222 end_sequence ();
2224 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
2225 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2226 e->count = bb->count;
2227 e->probability = REG_BR_PROB_BASE;
2232 struct sjlj_lp_info
2234 int directly_reachable;
2235 int action_index;
2236 int dispatch_index;
2237 int call_site_index;
2240 static bool
2241 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
2243 rtx insn;
2244 bool found_one = false;
2246 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2248 struct eh_region_d *region;
2249 enum reachable_code rc;
2250 tree type_thrown;
2251 rtx note;
2253 if (! INSN_P (insn))
2254 continue;
2256 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2257 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2258 continue;
2260 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2261 if (!region)
2262 continue;
2264 type_thrown = NULL_TREE;
2265 if (region->type == ERT_THROW)
2267 type_thrown = region->u.eh_throw.type;
2268 region = region->outer;
2271 /* Find the first containing region that might handle the exception.
2272 That's the landing pad to which we will transfer control. */
2273 rc = RNL_NOT_CAUGHT;
2274 for (; region; region = region->outer)
2276 rc = reachable_next_level (region, type_thrown, NULL, false);
2277 if (rc != RNL_NOT_CAUGHT)
2278 break;
2280 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
2282 lp_info[region->region_number].directly_reachable = 1;
2283 found_one = true;
2287 return found_one;
2290 static void
2291 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2293 htab_t ar_hash;
2294 int i, index;
2296 /* First task: build the action table. */
2298 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
2299 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2301 for (i = cfun->eh->last_region_number; i > 0; --i)
2302 if (lp_info[i].directly_reachable)
2304 struct eh_region_d *r =
2305 VEC_index (eh_region, cfun->eh->region_array, i);
2307 r->landing_pad = dispatch_label;
2308 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2309 if (lp_info[i].action_index != -1)
2310 crtl->uses_eh_lsda = 1;
2313 htab_delete (ar_hash);
2315 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2316 landing pad label for the region. For sjlj though, there is one
2317 common landing pad from which we dispatch to the post-landing pads.
2319 A region receives a dispatch index if it is directly reachable
2320 and requires in-function processing. Regions that share post-landing
2321 pads may share dispatch indices. */
2322 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2323 (see build_post_landing_pads) so we don't bother checking for it. */
2325 index = 0;
2326 for (i = cfun->eh->last_region_number; i > 0; --i)
2327 if (lp_info[i].directly_reachable)
2328 lp_info[i].dispatch_index = index++;
2330 /* Finally: assign call-site values. If dwarf2 terms, this would be
2331 the region number assigned by convert_to_eh_region_ranges, but
2332 handles no-action and must-not-throw differently. */
2334 call_site_base = 1;
2335 for (i = cfun->eh->last_region_number; i > 0; --i)
2336 if (lp_info[i].directly_reachable)
2338 int action = lp_info[i].action_index;
2340 /* Map must-not-throw to otherwise unused call-site index 0. */
2341 if (action == -2)
2342 index = 0;
2343 /* Map no-action to otherwise unused call-site index -1. */
2344 else if (action == -1)
2345 index = -1;
2346 /* Otherwise, look it up in the table. */
2347 else
2348 index = add_call_site (GEN_INT (lp_info[i].dispatch_index),
2349 action, 0);
2351 lp_info[i].call_site_index = index;
2355 static void
2356 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
2358 int last_call_site = -2;
2359 rtx insn, mem;
2361 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2363 struct eh_region_d *region;
2364 int this_call_site;
2365 rtx note, before, p;
2367 /* Reset value tracking at extended basic block boundaries. */
2368 if (LABEL_P (insn))
2369 last_call_site = -2;
2371 if (! INSN_P (insn))
2372 continue;
2374 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2376 /* Calls that are known to not throw need not be marked. */
2377 if (note && INTVAL (XEXP (note, 0)) <= 0)
2378 continue;
2380 if (note)
2381 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2382 else
2383 region = NULL;
2385 if (!region)
2387 /* Calls (and trapping insns) without notes are outside any
2388 exception handling region in this function. Mark them as
2389 no action. */
2390 if (CALL_P (insn)
2391 || (flag_non_call_exceptions
2392 && may_trap_p (PATTERN (insn))))
2393 this_call_site = -1;
2394 else
2395 continue;
2397 else
2398 this_call_site = lp_info[region->region_number].call_site_index;
2400 if (this_call_site == last_call_site)
2401 continue;
2403 /* Don't separate a call from it's argument loads. */
2404 before = insn;
2405 if (CALL_P (insn))
2406 before = find_first_parameter_load (insn, NULL_RTX);
2408 start_sequence ();
2409 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
2410 sjlj_fc_call_site_ofs);
2411 emit_move_insn (mem, GEN_INT (this_call_site));
2412 p = get_insns ();
2413 end_sequence ();
2415 emit_insn_before (p, before);
2416 last_call_site = this_call_site;
2420 /* Construct the SjLj_Function_Context. */
2422 static void
2423 sjlj_emit_function_enter (rtx dispatch_label)
2425 rtx fn_begin, fc, mem, seq;
2426 bool fn_begin_outside_block;
2428 fc = crtl->eh.sjlj_fc;
2430 start_sequence ();
2432 /* We're storing this libcall's address into memory instead of
2433 calling it directly. Thus, we must call assemble_external_libcall
2434 here, as we can not depend on emit_library_call to do it for us. */
2435 assemble_external_libcall (eh_personality_libfunc);
2436 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2437 emit_move_insn (mem, eh_personality_libfunc);
2439 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2440 if (crtl->uses_eh_lsda)
2442 char buf[20];
2443 rtx sym;
2445 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2446 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2447 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
2448 emit_move_insn (mem, sym);
2450 else
2451 emit_move_insn (mem, const0_rtx);
2453 #ifdef DONT_USE_BUILTIN_SETJMP
2455 rtx x;
2456 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2457 TYPE_MODE (integer_type_node), 1,
2458 plus_constant (XEXP (fc, 0),
2459 sjlj_fc_jbuf_ofs), Pmode);
2461 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2462 TYPE_MODE (integer_type_node), 0, dispatch_label);
2463 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
2465 #else
2466 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2467 dispatch_label);
2468 #endif
2470 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2471 1, XEXP (fc, 0), Pmode);
2473 seq = get_insns ();
2474 end_sequence ();
2476 /* ??? Instead of doing this at the beginning of the function,
2477 do this in a block that is at loop level 0 and dominates all
2478 can_throw_internal instructions. */
2480 fn_begin_outside_block = true;
2481 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2482 if (NOTE_P (fn_begin))
2484 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2485 break;
2486 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
2487 fn_begin_outside_block = false;
2490 if (fn_begin_outside_block)
2491 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
2492 else
2493 emit_insn_after (seq, fn_begin);
2496 /* Call back from expand_function_end to know where we should put
2497 the call to unwind_sjlj_unregister_libfunc if needed. */
2499 void
2500 sjlj_emit_function_exit_after (rtx after)
2502 crtl->eh.sjlj_exit_after = after;
2505 static void
2506 sjlj_emit_function_exit (void)
2508 rtx seq, insn;
2510 start_sequence ();
2512 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2513 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
2515 seq = get_insns ();
2516 end_sequence ();
2518 /* ??? Really this can be done in any block at loop level 0 that
2519 post-dominates all can_throw_internal instructions. This is
2520 the last possible moment. */
2522 insn = crtl->eh.sjlj_exit_after;
2523 if (LABEL_P (insn))
2524 insn = NEXT_INSN (insn);
2526 emit_insn_after (seq, insn);
2529 static void
2530 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2532 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
2533 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
2534 int i, first_reachable;
2535 rtx mem, dispatch, seq, fc;
2536 rtx before;
2537 basic_block bb;
2538 edge e;
2540 fc = crtl->eh.sjlj_fc;
2542 start_sequence ();
2544 emit_label (dispatch_label);
2546 #ifndef DONT_USE_BUILTIN_SETJMP
2547 expand_builtin_setjmp_receiver (dispatch_label);
2548 #endif
2550 /* Load up dispatch index, exc_ptr and filter values from the
2551 function context. */
2552 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2553 sjlj_fc_call_site_ofs);
2554 dispatch = copy_to_reg (mem);
2556 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
2557 if (unwind_word_mode != ptr_mode)
2559 #ifdef POINTERS_EXTEND_UNSIGNED
2560 mem = convert_memory_address (ptr_mode, mem);
2561 #else
2562 mem = convert_to_mode (ptr_mode, mem, 0);
2563 #endif
2565 emit_move_insn (crtl->eh.exc_ptr, mem);
2567 mem = adjust_address (fc, unwind_word_mode,
2568 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
2569 if (unwind_word_mode != filter_mode)
2570 mem = convert_to_mode (filter_mode, mem, 0);
2571 emit_move_insn (crtl->eh.filter, mem);
2573 /* Jump to one of the directly reachable regions. */
2574 /* ??? This really ought to be using a switch statement. */
2576 first_reachable = 0;
2577 for (i = cfun->eh->last_region_number; i > 0; --i)
2579 if (! lp_info[i].directly_reachable)
2580 continue;
2582 if (! first_reachable)
2584 first_reachable = i;
2585 continue;
2588 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2589 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2590 (((struct eh_region_d *)
2591 VEC_index (eh_region,
2592 cfun->eh->region_array, i))
2593 ->post_landing_pad));
2596 seq = get_insns ();
2597 end_sequence ();
2599 before = (((struct eh_region_d *)
2600 VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2601 ->post_landing_pad);
2603 bb = emit_to_new_bb_before (seq, before);
2604 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2605 e->count = bb->count;
2606 e->probability = REG_BR_PROB_BASE;
2609 static void
2610 sjlj_build_landing_pads (void)
2612 struct sjlj_lp_info *lp_info;
2614 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2616 if (sjlj_find_directly_reachable_regions (lp_info))
2618 rtx dispatch_label = gen_label_rtx ();
2619 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
2620 TYPE_MODE (sjlj_fc_type_node),
2621 TYPE_ALIGN (sjlj_fc_type_node));
2622 crtl->eh.sjlj_fc
2623 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2624 int_size_in_bytes (sjlj_fc_type_node),
2625 align);
2627 sjlj_assign_call_site_values (dispatch_label, lp_info);
2628 sjlj_mark_call_sites (lp_info);
2630 sjlj_emit_function_enter (dispatch_label);
2631 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2632 sjlj_emit_function_exit ();
2635 free (lp_info);
2638 /* After initial rtl generation, call back to finish generating
2639 exception support code. */
2641 static void
2642 finish_eh_generation (void)
2644 basic_block bb;
2646 /* Nothing to do if no regions created. */
2647 if (cfun->eh->region_tree == NULL)
2648 return;
2650 /* The object here is to provide detailed information (via
2651 reachable_handlers) on how exception control flows within the
2652 function for the CFG construction. In this first pass, we can
2653 include type information garnered from ERT_THROW and
2654 ERT_ALLOWED_EXCEPTIONS regions, and hope that it will be useful
2655 in deleting unreachable handlers. Subsequently, we will generate
2656 landing pads which will connect many of the handlers, and then
2657 type information will not be effective. Still, this is a win
2658 over previous implementations. */
2660 /* These registers are used by the landing pads. Make sure they
2661 have been generated. */
2662 get_exception_pointer ();
2663 get_exception_filter ();
2665 /* Construct the landing pads. */
2667 assign_filter_values ();
2668 build_post_landing_pads ();
2669 connect_post_landing_pads ();
2670 if (USING_SJLJ_EXCEPTIONS)
2671 sjlj_build_landing_pads ();
2672 else
2673 dw2_build_landing_pads ();
2675 crtl->eh.built_landing_pads = 1;
2677 /* We've totally changed the CFG. Start over. */
2678 find_exception_handler_labels ();
2679 break_superblocks ();
2680 if (USING_SJLJ_EXCEPTIONS
2681 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2682 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
2683 commit_edge_insertions ();
2684 FOR_EACH_BB (bb)
2686 edge e;
2687 edge_iterator ei;
2688 bool eh = false;
2689 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2691 if (e->flags & EDGE_EH)
2693 remove_edge (e);
2694 eh = true;
2696 else
2697 ei_next (&ei);
2699 if (eh)
2700 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2704 /* This section handles removing dead code for flow. */
2706 /* Splice REGION from the region tree and replace it by REPLACE etc.
2707 When UPDATE_CATCH_TRY is true mind updating links from catch to try
2708 region.*/
2710 static void
2711 remove_eh_handler_and_replace (struct eh_region_d *region,
2712 struct eh_region_d *replace,
2713 bool update_catch_try)
2715 struct eh_region_d **pp, **pp_start, *p, *outer, *inner;
2716 rtx lab;
2718 outer = region->outer;
2720 /* For the benefit of efficiently handling REG_EH_REGION notes,
2721 replace this region in the region array with its containing
2722 region. Note that previous region deletions may result in
2723 multiple copies of this region in the array, so we have a
2724 list of alternate numbers by which we are known. */
2726 VEC_replace (eh_region, cfun->eh->region_array, region->region_number,
2727 replace);
2728 if (region->aka)
2730 unsigned i;
2731 bitmap_iterator bi;
2733 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2735 VEC_replace (eh_region, cfun->eh->region_array, i, replace);
2739 if (replace)
2741 if (!replace->aka)
2742 replace->aka = BITMAP_GGC_ALLOC ();
2743 if (region->aka)
2744 bitmap_ior_into (replace->aka, region->aka);
2745 bitmap_set_bit (replace->aka, region->region_number);
2748 if (crtl->eh.built_landing_pads)
2749 lab = region->landing_pad;
2750 else
2751 lab = region->label;
2752 if (outer)
2753 pp_start = &outer->inner;
2754 else
2755 pp_start = &cfun->eh->region_tree;
2756 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2757 continue;
2758 *pp = region->next_peer;
2760 if (replace)
2761 pp_start = &replace->inner;
2762 else
2763 pp_start = &cfun->eh->region_tree;
2764 inner = region->inner;
2765 if (inner)
2767 for (p = inner; p->next_peer ; p = p->next_peer)
2768 p->outer = replace;
2769 p->outer = replace;
2771 p->next_peer = *pp_start;
2772 *pp_start = inner;
2775 if (region->type == ERT_CATCH
2776 && update_catch_try)
2778 struct eh_region_d *eh_try, *next, *prev;
2780 for (eh_try = region->next_peer;
2781 eh_try->type == ERT_CATCH;
2782 eh_try = eh_try->next_peer)
2783 continue;
2784 gcc_assert (eh_try->type == ERT_TRY);
2786 next = region->u.eh_catch.next_catch;
2787 prev = region->u.eh_catch.prev_catch;
2789 if (next)
2790 next->u.eh_catch.prev_catch = prev;
2791 else
2792 eh_try->u.eh_try.last_catch = prev;
2793 if (prev)
2794 prev->u.eh_catch.next_catch = next;
2795 else
2797 eh_try->u.eh_try.eh_catch = next;
2798 if (! next)
2799 remove_eh_handler (eh_try);
2804 /* Splice REGION from the region tree and replace it by the outer region
2805 etc. */
2807 static void
2808 remove_eh_handler (struct eh_region_d *region)
2810 remove_eh_handler_and_replace (region, region->outer, true);
2813 /* Remove Eh region R that has turned out to have no code in its handler. */
2815 void
2816 remove_eh_region (int r)
2818 struct eh_region_d *region;
2820 region = VEC_index (eh_region, cfun->eh->region_array, r);
2821 remove_eh_handler (region);
2824 /* Remove Eh region R that has turned out to have no code in its handler
2825 and replace in by R2. */
2827 void
2828 remove_eh_region_and_replace_by_outer_of (int r, int r2)
2830 struct eh_region_d *region, *region2;
2832 region = VEC_index (eh_region, cfun->eh->region_array, r);
2833 region2 = VEC_index (eh_region, cfun->eh->region_array, r2);
2834 remove_eh_handler_and_replace (region, region2->outer, true);
2837 /* Invokes CALLBACK for every exception handler label. Only used by old
2838 loop hackery; should not be used by new code. */
2840 void
2841 for_each_eh_label (void (*callback) (rtx))
2843 int i;
2844 for (i = 0; i < cfun->eh->last_region_number; i++)
2846 struct eh_region_d *r = VEC_index (eh_region, cfun->eh->region_array, i);
2847 if (r && r->region_number == i && r->label
2848 && LABEL_P (r->label))
2849 (*callback) (r->label);
2853 /* Invoke CALLBACK for every exception region in the current function. */
2855 void
2856 for_each_eh_region (void (*callback) (struct eh_region_d *))
2858 int i, n = cfun->eh->last_region_number;
2859 for (i = 1; i <= n; ++i)
2861 struct eh_region_d *region;
2863 region = VEC_index (eh_region, cfun->eh->region_array, i);
2864 if (region)
2865 (*callback) (region);
2869 /* This section describes CFG exception edges for flow. */
2871 /* For communicating between calls to reachable_next_level. */
2872 struct reachable_info
2874 tree types_caught;
2875 tree types_allowed;
2876 void (*callback) (struct eh_region_d *, void *);
2877 void *callback_data;
2880 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2881 base class of TYPE, is in HANDLED. */
2883 static int
2884 check_handled (tree handled, tree type)
2886 tree t;
2888 /* We can check for exact matches without front-end help. */
2889 if (! lang_eh_type_covers)
2891 for (t = handled; t ; t = TREE_CHAIN (t))
2893 tree t1 = TREE_VALUE (t);
2894 tree t2 = type;
2896 /* If the types have been converted to runtime types (i.e.,
2897 when the IL is being read from disk in an LTO
2898 compilation), then T1 and T2 will be pointers to the
2899 runtime type of the form '(void *) &<runtime_type>' (See
2900 cp/except.c:build_eh_type_type). Strip the conversion
2901 and the address. */
2902 if (CONVERT_EXPR_P (t1))
2904 STRIP_NOPS (t1);
2905 gcc_assert (TREE_CODE (t1) == ADDR_EXPR);
2906 t1 = TREE_OPERAND (t1, 0);
2909 if (CONVERT_EXPR_P (t2))
2911 STRIP_NOPS (t2);
2912 gcc_assert (TREE_CODE (t2) == ADDR_EXPR);
2913 t2 = TREE_OPERAND (t2, 0);
2916 if (t1 == t2)
2917 return 1;
2920 else
2922 for (t = handled; t ; t = TREE_CHAIN (t))
2923 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2924 return 1;
2927 return 0;
2930 /* A subroutine of reachable_next_level. If we are collecting a list
2931 of handlers, add one. After landing pad generation, reference
2932 it instead of the handlers themselves. Further, the handlers are
2933 all wired together, so by referencing one, we've got them all.
2934 Before landing pad generation we reference each handler individually.
2936 LP_REGION contains the landing pad; REGION is the handler. */
2938 static void
2939 add_reachable_handler (struct reachable_info *info,
2940 struct eh_region_d *lp_region,
2941 struct eh_region_d *region)
2943 if (! info)
2944 return;
2946 if (crtl->eh.built_landing_pads)
2947 info->callback (lp_region, info->callback_data);
2948 else
2949 info->callback (region, info->callback_data);
2952 /* Process one level of exception regions for reachability.
2953 If TYPE_THROWN is non-null, then it is the *exact* type being
2954 propagated. If INFO is non-null, then collect handler labels
2955 and caught/allowed type information between invocations. */
2957 static enum reachable_code
2958 reachable_next_level (struct eh_region_d *region, tree type_thrown,
2959 struct reachable_info *info,
2960 bool maybe_resx)
2962 switch (region->type)
2964 case ERT_CLEANUP:
2965 /* Before landing-pad generation, we model control flow
2966 directly to the individual handlers. In this way we can
2967 see that catch handler types may shadow one another. */
2968 add_reachable_handler (info, region, region);
2969 return RNL_MAYBE_CAUGHT;
2971 case ERT_TRY:
2973 struct eh_region_d *c;
2974 enum reachable_code ret = RNL_NOT_CAUGHT;
2976 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
2978 /* A catch-all handler ends the search. */
2979 if (c->u.eh_catch.type_list == NULL)
2981 add_reachable_handler (info, region, c);
2982 return RNL_CAUGHT;
2985 if (type_thrown)
2987 /* If we have at least one type match, end the search. */
2988 tree tp_node = c->u.eh_catch.type_list;
2990 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2992 tree type = TREE_VALUE (tp_node);
2994 if (type == type_thrown
2995 || (lang_eh_type_covers
2996 && (*lang_eh_type_covers) (type, type_thrown)))
2998 add_reachable_handler (info, region, c);
2999 return RNL_CAUGHT;
3003 /* If we have definitive information of a match failure,
3004 the catch won't trigger. */
3005 if (lang_eh_type_covers)
3006 return RNL_NOT_CAUGHT;
3009 /* At this point, we either don't know what type is thrown or
3010 don't have front-end assistance to help deciding if it is
3011 covered by one of the types in the list for this region.
3013 We'd then like to add this region to the list of reachable
3014 handlers since it is indeed potentially reachable based on the
3015 information we have.
3017 Actually, this handler is for sure not reachable if all the
3018 types it matches have already been caught. That is, it is only
3019 potentially reachable if at least one of the types it catches
3020 has not been previously caught. */
3022 if (! info)
3023 ret = RNL_MAYBE_CAUGHT;
3024 else
3026 tree tp_node = c->u.eh_catch.type_list;
3027 bool maybe_reachable = false;
3029 /* Compute the potential reachability of this handler and
3030 update the list of types caught at the same time. */
3031 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
3033 tree type = TREE_VALUE (tp_node);
3035 if (! check_handled (info->types_caught, type))
3037 info->types_caught
3038 = tree_cons (NULL, type, info->types_caught);
3040 maybe_reachable = true;
3044 if (maybe_reachable)
3046 add_reachable_handler (info, region, c);
3048 /* ??? If the catch type is a base class of every allowed
3049 type, then we know we can stop the search. */
3050 ret = RNL_MAYBE_CAUGHT;
3055 return ret;
3058 case ERT_ALLOWED_EXCEPTIONS:
3059 /* An empty list of types definitely ends the search. */
3060 if (region->u.allowed.type_list == NULL_TREE)
3062 add_reachable_handler (info, region, region);
3063 return RNL_CAUGHT;
3066 /* Collect a list of lists of allowed types for use in detecting
3067 when a catch may be transformed into a catch-all. */
3068 if (info)
3069 info->types_allowed = tree_cons (NULL_TREE,
3070 region->u.allowed.type_list,
3071 info->types_allowed);
3073 /* If we have definitive information about the type hierarchy,
3074 then we can tell if the thrown type will pass through the
3075 filter. */
3076 if (type_thrown && lang_eh_type_covers)
3078 if (check_handled (region->u.allowed.type_list, type_thrown))
3079 return RNL_NOT_CAUGHT;
3080 else
3082 add_reachable_handler (info, region, region);
3083 return RNL_CAUGHT;
3087 add_reachable_handler (info, region, region);
3088 return RNL_MAYBE_CAUGHT;
3090 case ERT_CATCH:
3091 /* Catch regions are handled by their controlling try region. */
3092 return RNL_NOT_CAUGHT;
3094 case ERT_MUST_NOT_THROW:
3095 /* Here we end our search, since no exceptions may propagate.
3097 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
3098 only via locally handled RESX instructions.
3100 When we inline a function call, we can bring in new handlers. In order
3101 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
3102 assume that such handlers exists prior for any inlinable call prior
3103 inlining decisions are fixed. */
3105 if (maybe_resx)
3107 add_reachable_handler (info, region, region);
3108 return RNL_CAUGHT;
3110 else
3111 return RNL_BLOCKED;
3113 case ERT_THROW:
3114 case ERT_UNKNOWN:
3115 /* Shouldn't see these here. */
3116 gcc_unreachable ();
3117 break;
3118 default:
3119 gcc_unreachable ();
3123 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
3125 void
3126 foreach_reachable_handler (int region_number, bool is_resx, bool inlinable_call,
3127 void (*callback) (struct eh_region_d *, void *),
3128 void *callback_data)
3130 struct reachable_info info;
3131 struct eh_region_d *region;
3132 tree type_thrown;
3134 memset (&info, 0, sizeof (info));
3135 info.callback = callback;
3136 info.callback_data = callback_data;
3138 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3139 if (!region)
3140 return;
3142 type_thrown = NULL_TREE;
3143 if (is_resx)
3145 /* A RESX leaves a region instead of entering it. Thus the
3146 region itself may have been deleted out from under us. */
3147 if (region == NULL)
3148 return;
3149 region = region->outer;
3151 else if (region->type == ERT_THROW)
3153 type_thrown = region->u.eh_throw.type;
3154 region = region->outer;
3157 while (region)
3159 if (reachable_next_level (region, type_thrown, &info,
3160 inlinable_call || is_resx) >= RNL_CAUGHT)
3161 break;
3162 /* If we have processed one cleanup, there is no point in
3163 processing any more of them. Each cleanup will have an edge
3164 to the next outer cleanup region, so the flow graph will be
3165 accurate. */
3166 if (region->type == ERT_CLEANUP)
3168 enum reachable_code code = RNL_NOT_CAUGHT;
3169 region = find_prev_try (region->outer);
3170 /* Continue looking for outer TRY region until we find one
3171 that might cath something. */
3172 while (region
3173 && (code = reachable_next_level (region, type_thrown, &info,
3174 inlinable_call || is_resx))
3175 == RNL_NOT_CAUGHT)
3176 region = find_prev_try (region->outer);
3177 if (code >= RNL_CAUGHT)
3178 break;
3180 if (region)
3181 region = region->outer;
3185 /* Retrieve a list of labels of exception handlers which can be
3186 reached by a given insn. */
3188 static void
3189 arh_to_landing_pad (struct eh_region_d *region, void *data)
3191 rtx *p_handlers = (rtx *) data;
3192 if (! *p_handlers)
3193 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
3196 static void
3197 arh_to_label (struct eh_region_d *region, void *data)
3199 rtx *p_handlers = (rtx *) data;
3200 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
3204 reachable_handlers (rtx insn)
3206 bool is_resx = false;
3207 rtx handlers = NULL;
3208 int region_number;
3210 if (JUMP_P (insn)
3211 && GET_CODE (PATTERN (insn)) == RESX)
3213 region_number = XINT (PATTERN (insn), 0);
3214 is_resx = true;
3216 else
3218 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3219 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3220 return NULL;
3221 region_number = INTVAL (XEXP (note, 0));
3224 foreach_reachable_handler (region_number, is_resx, false,
3225 (crtl->eh.built_landing_pads
3226 ? arh_to_landing_pad
3227 : arh_to_label),
3228 &handlers);
3230 return handlers;
3233 /* Determine if the given INSN can throw an exception that is caught
3234 within the function. */
3236 bool
3237 can_throw_internal_1 (int region_number, bool is_resx, bool inlinable_call)
3239 struct eh_region_d *region;
3240 tree type_thrown;
3242 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3243 if (!region)
3244 return false;
3246 type_thrown = NULL_TREE;
3247 if (is_resx)
3248 region = region->outer;
3249 else if (region->type == ERT_THROW)
3251 type_thrown = region->u.eh_throw.type;
3252 region = region->outer;
3255 /* If this exception is ignored by each and every containing region,
3256 then control passes straight out. The runtime may handle some
3257 regions, which also do not require processing internally. */
3258 for (; region; region = region->outer)
3260 enum reachable_code how = reachable_next_level (region, type_thrown, 0,
3261 inlinable_call || is_resx);
3262 if (how == RNL_BLOCKED)
3263 return false;
3264 if (how != RNL_NOT_CAUGHT)
3265 return true;
3268 return false;
3271 bool
3272 can_throw_internal (const_rtx insn)
3274 rtx note;
3276 if (! INSN_P (insn))
3277 return false;
3279 if (JUMP_P (insn)
3280 && GET_CODE (PATTERN (insn)) == RESX
3281 && XINT (PATTERN (insn), 0) > 0)
3282 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true, false);
3284 if (NONJUMP_INSN_P (insn)
3285 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3286 insn = XVECEXP (PATTERN (insn), 0, 0);
3288 /* Every insn that might throw has an EH_REGION note. */
3289 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3290 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3291 return false;
3293 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false, false);
3296 /* Determine if the given INSN can throw an exception that is
3297 visible outside the function. */
3299 bool
3300 can_throw_external_1 (int region_number, bool is_resx, bool inlinable_call)
3302 struct eh_region_d *region;
3303 tree type_thrown;
3305 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3306 if (!region)
3307 return true;
3309 type_thrown = NULL_TREE;
3310 if (is_resx)
3311 region = region->outer;
3312 else if (region->type == ERT_THROW)
3314 type_thrown = region->u.eh_throw.type;
3315 region = region->outer;
3318 /* If the exception is caught or blocked by any containing region,
3319 then it is not seen by any calling function. */
3320 for (; region ; region = region->outer)
3321 if (reachable_next_level (region, type_thrown, NULL,
3322 inlinable_call || is_resx) >= RNL_CAUGHT)
3323 return false;
3325 return true;
3328 bool
3329 can_throw_external (const_rtx insn)
3331 rtx note;
3333 if (! INSN_P (insn))
3334 return false;
3336 if (JUMP_P (insn)
3337 && GET_CODE (PATTERN (insn)) == RESX
3338 && XINT (PATTERN (insn), 0) > 0)
3339 return can_throw_external_1 (XINT (PATTERN (insn), 0), true, false);
3341 if (NONJUMP_INSN_P (insn)
3342 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3344 rtx seq = PATTERN (insn);
3345 int i, n = XVECLEN (seq, 0);
3347 for (i = 0; i < n; i++)
3348 if (can_throw_external (XVECEXP (seq, 0, i)))
3349 return true;
3351 return false;
3354 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3355 if (!note)
3357 /* Calls (and trapping insns) without notes are outside any
3358 exception handling region in this function. We have to
3359 assume it might throw. Given that the front end and middle
3360 ends mark known NOTHROW functions, this isn't so wildly
3361 inaccurate. */
3362 return (CALL_P (insn)
3363 || (flag_non_call_exceptions
3364 && may_trap_p (PATTERN (insn))));
3366 if (INTVAL (XEXP (note, 0)) <= 0)
3367 return false;
3369 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false, false);
3372 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
3374 unsigned int
3375 set_nothrow_function_flags (void)
3377 rtx insn;
3379 crtl->nothrow = 1;
3381 /* Assume crtl->all_throwers_are_sibcalls until we encounter
3382 something that can throw an exception. We specifically exempt
3383 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
3384 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
3385 is optimistic. */
3387 crtl->all_throwers_are_sibcalls = 1;
3389 /* If we don't know that this implementation of the function will
3390 actually be used, then we must not set TREE_NOTHROW, since
3391 callers must not assume that this function does not throw. */
3392 if (TREE_NOTHROW (current_function_decl))
3393 return 0;
3395 if (! flag_exceptions)
3396 return 0;
3398 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3399 if (can_throw_external (insn))
3401 crtl->nothrow = 0;
3403 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3405 crtl->all_throwers_are_sibcalls = 0;
3406 return 0;
3410 for (insn = crtl->epilogue_delay_list; insn;
3411 insn = XEXP (insn, 1))
3412 if (can_throw_external (insn))
3414 crtl->nothrow = 0;
3416 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3418 crtl->all_throwers_are_sibcalls = 0;
3419 return 0;
3422 if (crtl->nothrow
3423 && (cgraph_function_body_availability (cgraph_node
3424 (current_function_decl))
3425 >= AVAIL_AVAILABLE))
3427 struct cgraph_node *node = cgraph_node (current_function_decl);
3428 struct cgraph_edge *e;
3429 for (e = node->callers; e; e = e->next_caller)
3430 e->can_throw_external = false;
3431 TREE_NOTHROW (current_function_decl) = 1;
3433 if (dump_file)
3434 fprintf (dump_file, "Marking function nothrow: %s\n\n",
3435 current_function_name ());
3437 return 0;
3440 struct rtl_opt_pass pass_set_nothrow_function_flags =
3443 RTL_PASS,
3444 "nothrow", /* name */
3445 NULL, /* gate */
3446 set_nothrow_function_flags, /* execute */
3447 NULL, /* sub */
3448 NULL, /* next */
3449 0, /* static_pass_number */
3450 TV_NONE, /* tv_id */
3451 0, /* properties_required */
3452 0, /* properties_provided */
3453 0, /* properties_destroyed */
3454 0, /* todo_flags_start */
3455 TODO_dump_func, /* todo_flags_finish */
3460 /* Various hooks for unwind library. */
3462 /* Do any necessary initialization to access arbitrary stack frames.
3463 On the SPARC, this means flushing the register windows. */
3465 void
3466 expand_builtin_unwind_init (void)
3468 /* Set this so all the registers get saved in our frame; we need to be
3469 able to copy the saved values for any registers from frames we unwind. */
3470 crtl->saves_all_registers = 1;
3472 #ifdef SETUP_FRAME_ADDRESSES
3473 SETUP_FRAME_ADDRESSES ();
3474 #endif
3478 expand_builtin_eh_return_data_regno (tree exp)
3480 tree which = CALL_EXPR_ARG (exp, 0);
3481 unsigned HOST_WIDE_INT iwhich;
3483 if (TREE_CODE (which) != INTEGER_CST)
3485 error ("argument of %<__builtin_eh_return_regno%> must be constant");
3486 return constm1_rtx;
3489 iwhich = tree_low_cst (which, 1);
3490 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3491 if (iwhich == INVALID_REGNUM)
3492 return constm1_rtx;
3494 #ifdef DWARF_FRAME_REGNUM
3495 iwhich = DWARF_FRAME_REGNUM (iwhich);
3496 #else
3497 iwhich = DBX_REGISTER_NUMBER (iwhich);
3498 #endif
3500 return GEN_INT (iwhich);
3503 /* Given a value extracted from the return address register or stack slot,
3504 return the actual address encoded in that value. */
3507 expand_builtin_extract_return_addr (tree addr_tree)
3509 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3511 if (GET_MODE (addr) != Pmode
3512 && GET_MODE (addr) != VOIDmode)
3514 #ifdef POINTERS_EXTEND_UNSIGNED
3515 addr = convert_memory_address (Pmode, addr);
3516 #else
3517 addr = convert_to_mode (Pmode, addr, 0);
3518 #endif
3521 /* First mask out any unwanted bits. */
3522 #ifdef MASK_RETURN_ADDR
3523 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3524 #endif
3526 /* Then adjust to find the real return address. */
3527 #if defined (RETURN_ADDR_OFFSET)
3528 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3529 #endif
3531 return addr;
3534 /* Given an actual address in addr_tree, do any necessary encoding
3535 and return the value to be stored in the return address register or
3536 stack slot so the epilogue will return to that address. */
3539 expand_builtin_frob_return_addr (tree addr_tree)
3541 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3543 addr = convert_memory_address (Pmode, addr);
3545 #ifdef RETURN_ADDR_OFFSET
3546 addr = force_reg (Pmode, addr);
3547 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3548 #endif
3550 return addr;
3553 /* Set up the epilogue with the magic bits we'll need to return to the
3554 exception handler. */
3556 void
3557 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3558 tree handler_tree)
3560 rtx tmp;
3562 #ifdef EH_RETURN_STACKADJ_RTX
3563 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
3564 VOIDmode, EXPAND_NORMAL);
3565 tmp = convert_memory_address (Pmode, tmp);
3566 if (!crtl->eh.ehr_stackadj)
3567 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
3568 else if (tmp != crtl->eh.ehr_stackadj)
3569 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
3570 #endif
3572 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
3573 VOIDmode, EXPAND_NORMAL);
3574 tmp = convert_memory_address (Pmode, tmp);
3575 if (!crtl->eh.ehr_handler)
3576 crtl->eh.ehr_handler = copy_to_reg (tmp);
3577 else if (tmp != crtl->eh.ehr_handler)
3578 emit_move_insn (crtl->eh.ehr_handler, tmp);
3580 if (!crtl->eh.ehr_label)
3581 crtl->eh.ehr_label = gen_label_rtx ();
3582 emit_jump (crtl->eh.ehr_label);
3585 void
3586 expand_eh_return (void)
3588 rtx around_label;
3590 if (! crtl->eh.ehr_label)
3591 return;
3593 crtl->calls_eh_return = 1;
3595 #ifdef EH_RETURN_STACKADJ_RTX
3596 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3597 #endif
3599 around_label = gen_label_rtx ();
3600 emit_jump (around_label);
3602 emit_label (crtl->eh.ehr_label);
3603 clobber_return_register ();
3605 #ifdef EH_RETURN_STACKADJ_RTX
3606 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
3607 #endif
3609 #ifdef HAVE_eh_return
3610 if (HAVE_eh_return)
3611 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
3612 else
3613 #endif
3615 #ifdef EH_RETURN_HANDLER_RTX
3616 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
3617 #else
3618 error ("__builtin_eh_return not supported on this target");
3619 #endif
3622 emit_label (around_label);
3625 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3626 POINTERS_EXTEND_UNSIGNED and return it. */
3629 expand_builtin_extend_pointer (tree addr_tree)
3631 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3632 int extend;
3634 #ifdef POINTERS_EXTEND_UNSIGNED
3635 extend = POINTERS_EXTEND_UNSIGNED;
3636 #else
3637 /* The previous EH code did an unsigned extend by default, so we do this also
3638 for consistency. */
3639 extend = 1;
3640 #endif
3642 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
3645 /* In the following functions, we represent entries in the action table
3646 as 1-based indices. Special cases are:
3648 0: null action record, non-null landing pad; implies cleanups
3649 -1: null action record, null landing pad; implies no action
3650 -2: no call-site entry; implies must_not_throw
3651 -3: we have yet to process outer regions
3653 Further, no special cases apply to the "next" field of the record.
3654 For next, 0 means end of list. */
3656 struct action_record
3658 int offset;
3659 int filter;
3660 int next;
3663 static int
3664 action_record_eq (const void *pentry, const void *pdata)
3666 const struct action_record *entry = (const struct action_record *) pentry;
3667 const struct action_record *data = (const struct action_record *) pdata;
3668 return entry->filter == data->filter && entry->next == data->next;
3671 static hashval_t
3672 action_record_hash (const void *pentry)
3674 const struct action_record *entry = (const struct action_record *) pentry;
3675 return entry->next * 1009 + entry->filter;
3678 static int
3679 add_action_record (htab_t ar_hash, int filter, int next)
3681 struct action_record **slot, *new_ar, tmp;
3683 tmp.filter = filter;
3684 tmp.next = next;
3685 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3687 if ((new_ar = *slot) == NULL)
3689 new_ar = XNEW (struct action_record);
3690 new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3691 new_ar->filter = filter;
3692 new_ar->next = next;
3693 *slot = new_ar;
3695 /* The filter value goes in untouched. The link to the next
3696 record is a "self-relative" byte offset, or zero to indicate
3697 that there is no next record. So convert the absolute 1 based
3698 indices we've been carrying around into a displacement. */
3700 push_sleb128 (&crtl->eh.action_record_data, filter);
3701 if (next)
3702 next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3703 push_sleb128 (&crtl->eh.action_record_data, next);
3706 return new_ar->offset;
3709 static int
3710 collect_one_action_chain (htab_t ar_hash, struct eh_region_d *region)
3712 struct eh_region_d *c;
3713 int next;
3715 /* If we've reached the top of the region chain, then we have
3716 no actions, and require no landing pad. */
3717 if (region == NULL)
3718 return -1;
3720 switch (region->type)
3722 case ERT_CLEANUP:
3723 /* A cleanup adds a zero filter to the beginning of the chain, but
3724 there are special cases to look out for. If there are *only*
3725 cleanups along a path, then it compresses to a zero action.
3726 Further, if there are multiple cleanups along a path, we only
3727 need to represent one of them, as that is enough to trigger
3728 entry to the landing pad at runtime. */
3729 next = collect_one_action_chain (ar_hash, region->outer);
3730 if (next <= 0)
3731 return 0;
3732 for (c = region->outer; c ; c = c->outer)
3733 if (c->type == ERT_CLEANUP)
3734 return next;
3735 return add_action_record (ar_hash, 0, next);
3737 case ERT_TRY:
3738 /* Process the associated catch regions in reverse order.
3739 If there's a catch-all handler, then we don't need to
3740 search outer regions. Use a magic -3 value to record
3741 that we haven't done the outer search. */
3742 next = -3;
3743 for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)
3745 if (c->u.eh_catch.type_list == NULL)
3747 /* Retrieve the filter from the head of the filter list
3748 where we have stored it (see assign_filter_values). */
3749 int filter
3750 = TREE_INT_CST_LOW (TREE_VALUE (c->u.eh_catch.filter_list));
3752 next = add_action_record (ar_hash, filter, 0);
3754 else
3756 /* Once the outer search is done, trigger an action record for
3757 each filter we have. */
3758 tree flt_node;
3760 if (next == -3)
3762 next = collect_one_action_chain (ar_hash, region->outer);
3764 /* If there is no next action, terminate the chain. */
3765 if (next == -1)
3766 next = 0;
3767 /* If all outer actions are cleanups or must_not_throw,
3768 we'll have no action record for it, since we had wanted
3769 to encode these states in the call-site record directly.
3770 Add a cleanup action to the chain to catch these. */
3771 else if (next <= 0)
3772 next = add_action_record (ar_hash, 0, 0);
3775 flt_node = c->u.eh_catch.filter_list;
3776 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3778 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3779 next = add_action_record (ar_hash, filter, next);
3783 return next;
3785 case ERT_ALLOWED_EXCEPTIONS:
3786 /* An exception specification adds its filter to the
3787 beginning of the chain. */
3788 next = collect_one_action_chain (ar_hash, region->outer);
3790 /* If there is no next action, terminate the chain. */
3791 if (next == -1)
3792 next = 0;
3793 /* If all outer actions are cleanups or must_not_throw,
3794 we'll have no action record for it, since we had wanted
3795 to encode these states in the call-site record directly.
3796 Add a cleanup action to the chain to catch these. */
3797 else if (next <= 0)
3798 next = add_action_record (ar_hash, 0, 0);
3800 return add_action_record (ar_hash, region->u.allowed.filter, next);
3802 case ERT_MUST_NOT_THROW:
3803 /* A must-not-throw region with no inner handlers or cleanups
3804 requires no call-site entry. Note that this differs from
3805 the no handler or cleanup case in that we do require an lsda
3806 to be generated. Return a magic -2 value to record this. */
3807 return -2;
3809 case ERT_CATCH:
3810 case ERT_THROW:
3811 /* CATCH regions are handled in TRY above. THROW regions are
3812 for optimization information only and produce no output. */
3813 return collect_one_action_chain (ar_hash, region->outer);
3815 default:
3816 gcc_unreachable ();
3820 static int
3821 add_call_site (rtx landing_pad, int action, int section)
3823 call_site_record record;
3825 record = GGC_NEW (struct call_site_record_d);
3826 record->landing_pad = landing_pad;
3827 record->action = action;
3829 VEC_safe_push (call_site_record, gc,
3830 crtl->eh.call_site_record[section], record);
3832 return call_site_base + VEC_length (call_site_record,
3833 crtl->eh.call_site_record[section]) - 1;
3836 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3837 The new note numbers will not refer to region numbers, but
3838 instead to call site entries. */
3840 unsigned int
3841 convert_to_eh_region_ranges (void)
3843 rtx insn, iter, note;
3844 htab_t ar_hash;
3845 int last_action = -3;
3846 rtx last_action_insn = NULL_RTX;
3847 rtx last_landing_pad = NULL_RTX;
3848 rtx first_no_action_insn = NULL_RTX;
3849 int call_site = 0;
3850 int cur_sec = 0;
3851 rtx section_switch_note = NULL_RTX;
3852 rtx first_no_action_insn_before_switch = NULL_RTX;
3853 rtx last_no_action_insn_before_switch = NULL_RTX;
3854 rtx *pad_map = NULL;
3855 sbitmap pad_loc = NULL;
3856 int min_labelno = 0, max_labelno = 0;
3857 int saved_call_site_base = call_site_base;
3859 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3860 return 0;
3862 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
3864 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3866 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3867 if (INSN_P (iter))
3869 struct eh_region_d *region;
3870 int this_action;
3871 rtx this_landing_pad;
3873 insn = iter;
3874 if (NONJUMP_INSN_P (insn)
3875 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3876 insn = XVECEXP (PATTERN (insn), 0, 0);
3878 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3879 if (!note)
3881 if (! (CALL_P (insn)
3882 || (flag_non_call_exceptions
3883 && may_trap_p (PATTERN (insn)))))
3884 continue;
3885 this_action = -1;
3886 region = NULL;
3888 else
3890 if (INTVAL (XEXP (note, 0)) <= 0)
3891 continue;
3892 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3893 this_action = collect_one_action_chain (ar_hash, region);
3896 /* Existence of catch handlers, or must-not-throw regions
3897 implies that an lsda is needed (even if empty). */
3898 if (this_action != -1)
3899 crtl->uses_eh_lsda = 1;
3901 /* Delay creation of region notes for no-action regions
3902 until we're sure that an lsda will be required. */
3903 else if (last_action == -3)
3905 first_no_action_insn = iter;
3906 last_action = -1;
3909 /* Cleanups and handlers may share action chains but not
3910 landing pads. Collect the landing pad for this region. */
3911 if (this_action >= 0)
3913 struct eh_region_d *o;
3914 for (o = region; ! o->landing_pad ; o = o->outer)
3915 continue;
3916 this_landing_pad = o->landing_pad;
3918 else
3919 this_landing_pad = NULL_RTX;
3921 /* Differing actions or landing pads implies a change in call-site
3922 info, which implies some EH_REGION note should be emitted. */
3923 if (last_action != this_action
3924 || last_landing_pad != this_landing_pad)
3926 /* If we'd not seen a previous action (-3) or the previous
3927 action was must-not-throw (-2), then we do not need an
3928 end note. */
3929 if (last_action >= -1)
3931 /* If we delayed the creation of the begin, do it now. */
3932 if (first_no_action_insn_before_switch)
3934 call_site = add_call_site (NULL_RTX, 0, 0);
3935 note
3936 = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3937 first_no_action_insn_before_switch);
3938 NOTE_EH_HANDLER (note) = call_site;
3939 if (first_no_action_insn)
3941 note
3942 = emit_note_after (NOTE_INSN_EH_REGION_END,
3943 last_no_action_insn_before_switch);
3944 NOTE_EH_HANDLER (note) = call_site;
3946 else
3947 gcc_assert (last_action_insn
3948 == last_no_action_insn_before_switch);
3950 if (first_no_action_insn)
3952 call_site = add_call_site (NULL_RTX, 0, cur_sec);
3953 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3954 first_no_action_insn);
3955 NOTE_EH_HANDLER (note) = call_site;
3956 first_no_action_insn = NULL_RTX;
3959 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3960 last_action_insn);
3961 NOTE_EH_HANDLER (note) = call_site;
3964 /* If the new action is must-not-throw, then no region notes
3965 are created. */
3966 if (this_action >= -1)
3968 call_site = add_call_site (this_landing_pad,
3969 this_action < 0 ? 0 : this_action,
3970 cur_sec);
3971 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3972 NOTE_EH_HANDLER (note) = call_site;
3975 last_action = this_action;
3976 last_landing_pad = this_landing_pad;
3978 last_action_insn = iter;
3980 else if (NOTE_P (iter)
3981 && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
3983 gcc_assert (section_switch_note == NULL_RTX);
3984 gcc_assert (flag_reorder_blocks_and_partition);
3985 section_switch_note = iter;
3986 if (first_no_action_insn)
3988 first_no_action_insn_before_switch = first_no_action_insn;
3989 last_no_action_insn_before_switch = last_action_insn;
3990 first_no_action_insn = NULL_RTX;
3991 gcc_assert (last_action == -1);
3992 last_action = -3;
3994 /* Force closing of current EH region before section switch and
3995 opening a new one afterwards. */
3996 else if (last_action != -3)
3997 last_landing_pad = pc_rtx;
3998 call_site_base += VEC_length (call_site_record,
3999 crtl->eh.call_site_record[cur_sec]);
4000 cur_sec++;
4001 gcc_assert (crtl->eh.call_site_record[cur_sec] == NULL);
4002 crtl->eh.call_site_record[cur_sec]
4003 = VEC_alloc (call_site_record, gc, 10);
4004 max_labelno = max_label_num ();
4005 min_labelno = get_first_label_num ();
4006 pad_map = XCNEWVEC (rtx, max_labelno - min_labelno + 1);
4007 pad_loc = sbitmap_alloc (max_labelno - min_labelno + 1);
4009 else if (LABEL_P (iter) && pad_map)
4010 SET_BIT (pad_loc, CODE_LABEL_NUMBER (iter) - min_labelno);
4012 if (last_action >= -1 && ! first_no_action_insn)
4014 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
4015 NOTE_EH_HANDLER (note) = call_site;
4018 call_site_base = saved_call_site_base;
4020 if (pad_map)
4022 /* When doing hot/cold partitioning, ensure landing pads are
4023 always in the same section as the EH region, .gcc_except_table
4024 can't express it otherwise. */
4025 for (cur_sec = 0; cur_sec < 2; cur_sec++)
4027 int i, idx;
4028 int n = VEC_length (call_site_record,
4029 crtl->eh.call_site_record[cur_sec]);
4030 basic_block prev_bb = NULL, padbb;
4032 for (i = 0; i < n; ++i)
4034 struct call_site_record_d *cs =
4035 VEC_index (call_site_record,
4036 crtl->eh.call_site_record[cur_sec], i);
4037 rtx jump, note;
4039 if (cs->landing_pad == NULL_RTX)
4040 continue;
4041 idx = CODE_LABEL_NUMBER (cs->landing_pad) - min_labelno;
4042 /* If the landing pad is in the correct section, nothing
4043 is needed. */
4044 if (TEST_BIT (pad_loc, idx) ^ (cur_sec == 0))
4045 continue;
4046 /* Otherwise, if we haven't seen this pad yet, we need to
4047 add a new label and jump to the correct section. */
4048 if (pad_map[idx] == NULL_RTX)
4050 pad_map[idx] = gen_label_rtx ();
4051 if (prev_bb == NULL)
4052 for (iter = section_switch_note;
4053 iter; iter = PREV_INSN (iter))
4054 if (NOTE_INSN_BASIC_BLOCK_P (iter))
4056 prev_bb = NOTE_BASIC_BLOCK (iter);
4057 break;
4059 if (cur_sec == 0)
4061 note = emit_label_before (pad_map[idx],
4062 section_switch_note);
4063 jump = emit_jump_insn_before (gen_jump (cs->landing_pad),
4064 section_switch_note);
4066 else
4068 jump = emit_jump_insn_after (gen_jump (cs->landing_pad),
4069 section_switch_note);
4070 note = emit_label_after (pad_map[idx],
4071 section_switch_note);
4073 JUMP_LABEL (jump) = cs->landing_pad;
4074 add_reg_note (jump, REG_CROSSING_JUMP, NULL_RTX);
4075 iter = NEXT_INSN (cs->landing_pad);
4076 if (iter && NOTE_INSN_BASIC_BLOCK_P (iter))
4077 padbb = NOTE_BASIC_BLOCK (iter);
4078 else
4079 padbb = NULL;
4080 if (padbb && prev_bb
4081 && BB_PARTITION (padbb) != BB_UNPARTITIONED)
4083 basic_block bb;
4084 int part
4085 = BB_PARTITION (padbb) == BB_COLD_PARTITION
4086 ? BB_HOT_PARTITION : BB_COLD_PARTITION;
4087 edge_iterator ei;
4088 edge e;
4090 bb = create_basic_block (note, jump, prev_bb);
4091 make_single_succ_edge (bb, padbb, EDGE_CROSSING);
4092 BB_SET_PARTITION (bb, part);
4093 for (ei = ei_start (padbb->preds);
4094 (e = ei_safe_edge (ei)); )
4096 if ((e->flags & (EDGE_EH|EDGE_CROSSING))
4097 == (EDGE_EH|EDGE_CROSSING))
4099 redirect_edge_succ (e, bb);
4100 e->flags &= ~EDGE_CROSSING;
4102 else
4103 ei_next (&ei);
4105 if (cur_sec == 0)
4106 prev_bb = bb;
4109 cs->landing_pad = pad_map[idx];
4113 sbitmap_free (pad_loc);
4114 XDELETEVEC (pad_map);
4117 htab_delete (ar_hash);
4118 return 0;
4121 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
4124 RTL_PASS,
4125 "eh_ranges", /* name */
4126 NULL, /* gate */
4127 convert_to_eh_region_ranges, /* execute */
4128 NULL, /* sub */
4129 NULL, /* next */
4130 0, /* static_pass_number */
4131 TV_NONE, /* tv_id */
4132 0, /* properties_required */
4133 0, /* properties_provided */
4134 0, /* properties_destroyed */
4135 0, /* todo_flags_start */
4136 TODO_dump_func, /* todo_flags_finish */
4141 static void
4142 push_uleb128 (varray_type *data_area, unsigned int value)
4146 unsigned char byte = value & 0x7f;
4147 value >>= 7;
4148 if (value)
4149 byte |= 0x80;
4150 VARRAY_PUSH_UCHAR (*data_area, byte);
4152 while (value);
4155 static void
4156 push_sleb128 (varray_type *data_area, int value)
4158 unsigned char byte;
4159 int more;
4163 byte = value & 0x7f;
4164 value >>= 7;
4165 more = ! ((value == 0 && (byte & 0x40) == 0)
4166 || (value == -1 && (byte & 0x40) != 0));
4167 if (more)
4168 byte |= 0x80;
4169 VARRAY_PUSH_UCHAR (*data_area, byte);
4171 while (more);
4175 #ifndef HAVE_AS_LEB128
4176 static int
4177 dw2_size_of_call_site_table (int section)
4179 int n = VEC_length (call_site_record, crtl->eh.call_site_record[section]);
4180 int size = n * (4 + 4 + 4);
4181 int i;
4183 for (i = 0; i < n; ++i)
4185 struct call_site_record_d *cs =
4186 VEC_index (call_site_record, crtl->eh.call_site_record[section], i);
4187 size += size_of_uleb128 (cs->action);
4190 return size;
4193 static int
4194 sjlj_size_of_call_site_table (void)
4196 int n = VEC_length (call_site_record, crtl->eh.call_site_record[0]);
4197 int size = 0;
4198 int i;
4200 for (i = 0; i < n; ++i)
4202 struct call_site_record_d *cs =
4203 VEC_index (call_site_record, crtl->eh.call_site_record[0], i);
4204 size += size_of_uleb128 (INTVAL (cs->landing_pad));
4205 size += size_of_uleb128 (cs->action);
4208 return size;
4210 #endif
4212 static void
4213 dw2_output_call_site_table (int cs_format, int section)
4215 int n = VEC_length (call_site_record, crtl->eh.call_site_record[section]);
4216 int i;
4217 const char *begin;
4219 if (section == 0)
4220 begin = current_function_func_begin_label;
4221 else if (first_function_block_is_cold)
4222 begin = crtl->subsections.hot_section_label;
4223 else
4224 begin = crtl->subsections.cold_section_label;
4226 for (i = 0; i < n; ++i)
4228 struct call_site_record_d *cs =
4229 VEC_index (call_site_record, crtl->eh.call_site_record[section], i);
4230 char reg_start_lab[32];
4231 char reg_end_lab[32];
4232 char landing_pad_lab[32];
4234 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
4235 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
4237 if (cs->landing_pad)
4238 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
4239 CODE_LABEL_NUMBER (cs->landing_pad));
4241 /* ??? Perhaps use insn length scaling if the assembler supports
4242 generic arithmetic. */
4243 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
4244 data4 if the function is small enough. */
4245 if (cs_format == DW_EH_PE_uleb128)
4247 dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
4248 "region %d start", i);
4249 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
4250 "length");
4251 if (cs->landing_pad)
4252 dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
4253 "landing pad");
4254 else
4255 dw2_asm_output_data_uleb128 (0, "landing pad");
4257 else
4259 dw2_asm_output_delta (4, reg_start_lab, begin,
4260 "region %d start", i);
4261 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
4262 if (cs->landing_pad)
4263 dw2_asm_output_delta (4, landing_pad_lab, begin,
4264 "landing pad");
4265 else
4266 dw2_asm_output_data (4, 0, "landing pad");
4268 dw2_asm_output_data_uleb128 (cs->action, "action");
4271 call_site_base += n;
4274 static void
4275 sjlj_output_call_site_table (void)
4277 int n = VEC_length (call_site_record, crtl->eh.call_site_record[0]);
4278 int i;
4280 for (i = 0; i < n; ++i)
4282 struct call_site_record_d *cs =
4283 VEC_index (call_site_record, crtl->eh.call_site_record[0], i);
4285 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
4286 "region %d landing pad", i);
4287 dw2_asm_output_data_uleb128 (cs->action, "action");
4290 call_site_base += n;
4293 #ifndef TARGET_UNWIND_INFO
4294 /* Switch to the section that should be used for exception tables. */
4296 static void
4297 switch_to_exception_section (const char * ARG_UNUSED (fnname))
4299 section *s;
4301 if (exception_section)
4302 s = exception_section;
4303 else
4305 /* Compute the section and cache it into exception_section,
4306 unless it depends on the function name. */
4307 if (targetm.have_named_sections)
4309 int flags;
4311 if (EH_TABLES_CAN_BE_READ_ONLY)
4313 int tt_format =
4314 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
4315 flags = ((! flag_pic
4316 || ((tt_format & 0x70) != DW_EH_PE_absptr
4317 && (tt_format & 0x70) != DW_EH_PE_aligned))
4318 ? 0 : SECTION_WRITE);
4320 else
4321 flags = SECTION_WRITE;
4323 #ifdef HAVE_LD_EH_GC_SECTIONS
4324 if (flag_function_sections)
4326 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
4327 sprintf (section_name, ".gcc_except_table.%s", fnname);
4328 s = get_section (section_name, flags, NULL);
4329 free (section_name);
4331 else
4332 #endif
4333 exception_section
4334 = s = get_section (".gcc_except_table", flags, NULL);
4336 else
4337 exception_section
4338 = s = flag_pic ? data_section : readonly_data_section;
4341 switch_to_section (s);
4343 #endif
4346 /* Output a reference from an exception table to the type_info object TYPE.
4347 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
4348 the value. */
4350 static void
4351 output_ttype (tree type, int tt_format, int tt_format_size)
4353 rtx value;
4354 bool is_public = true;
4356 if (type == NULL_TREE)
4357 value = const0_rtx;
4358 else
4360 struct varpool_node *node;
4362 type = lookup_type_for_runtime (type);
4363 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
4365 /* Let cgraph know that the rtti decl is used. Not all of the
4366 paths below go through assemble_integer, which would take
4367 care of this for us. */
4368 STRIP_NOPS (type);
4369 if (TREE_CODE (type) == ADDR_EXPR)
4371 type = TREE_OPERAND (type, 0);
4372 if (TREE_CODE (type) == VAR_DECL)
4374 node = varpool_node (type);
4375 if (node)
4376 varpool_mark_needed_node (node);
4377 is_public = TREE_PUBLIC (type);
4380 else
4381 gcc_assert (TREE_CODE (type) == INTEGER_CST);
4384 /* Allow the target to override the type table entry format. */
4385 if (targetm.asm_out.ttype (value))
4386 return;
4388 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
4389 assemble_integer (value, tt_format_size,
4390 tt_format_size * BITS_PER_UNIT, 1);
4391 else
4392 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
4395 static void
4396 output_one_function_exception_table (const char * ARG_UNUSED (fnname),
4397 int section)
4399 int tt_format, cs_format, lp_format, i, n;
4400 #ifdef HAVE_AS_LEB128
4401 char ttype_label[32];
4402 char cs_after_size_label[32];
4403 char cs_end_label[32];
4404 #else
4405 int call_site_len;
4406 #endif
4407 int have_tt_data;
4408 int tt_format_size = 0;
4410 #ifdef TARGET_UNWIND_INFO
4411 /* TODO: Move this into target file. */
4412 fputs ("\t.personality\t", asm_out_file);
4413 output_addr_const (asm_out_file, eh_personality_libfunc);
4414 fputs ("\n\t.handlerdata\n", asm_out_file);
4415 /* Note that varasm still thinks we're in the function's code section.
4416 The ".endp" directive that will immediately follow will take us back. */
4417 #else
4418 switch_to_exception_section (fnname);
4419 #endif
4421 /* If the target wants a label to begin the table, emit it here. */
4422 targetm.asm_out.except_table_label (asm_out_file);
4424 have_tt_data = (VEC_length (tree, crtl->eh.ttype_data) > 0
4425 || VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) > 0);
4427 /* Indicate the format of the @TType entries. */
4428 if (! have_tt_data)
4429 tt_format = DW_EH_PE_omit;
4430 else
4432 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
4433 #ifdef HAVE_AS_LEB128
4434 ASM_GENERATE_INTERNAL_LABEL (ttype_label,
4435 section ? "LLSDATTC" : "LLSDATT",
4436 current_function_funcdef_no);
4437 #endif
4438 tt_format_size = size_of_encoded_value (tt_format);
4440 assemble_align (tt_format_size * BITS_PER_UNIT);
4443 targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
4444 current_function_funcdef_no);
4446 /* The LSDA header. */
4448 /* Indicate the format of the landing pad start pointer. An omitted
4449 field implies @LPStart == @Start. */
4450 /* Currently we always put @LPStart == @Start. This field would
4451 be most useful in moving the landing pads completely out of
4452 line to another section, but it could also be used to minimize
4453 the size of uleb128 landing pad offsets. */
4454 lp_format = DW_EH_PE_omit;
4455 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
4456 eh_data_format_name (lp_format));
4458 /* @LPStart pointer would go here. */
4460 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
4461 eh_data_format_name (tt_format));
4463 #ifndef HAVE_AS_LEB128
4464 if (USING_SJLJ_EXCEPTIONS)
4465 call_site_len = sjlj_size_of_call_site_table ();
4466 else
4467 call_site_len = dw2_size_of_call_site_table (section);
4468 #endif
4470 /* A pc-relative 4-byte displacement to the @TType data. */
4471 if (have_tt_data)
4473 #ifdef HAVE_AS_LEB128
4474 char ttype_after_disp_label[32];
4475 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
4476 section ? "LLSDATTDC" : "LLSDATTD",
4477 current_function_funcdef_no);
4478 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
4479 "@TType base offset");
4480 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
4481 #else
4482 /* Ug. Alignment queers things. */
4483 unsigned int before_disp, after_disp, last_disp, disp;
4485 before_disp = 1 + 1;
4486 after_disp = (1 + size_of_uleb128 (call_site_len)
4487 + call_site_len
4488 + VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data)
4489 + (VEC_length (tree, crtl->eh.ttype_data)
4490 * tt_format_size));
4492 disp = after_disp;
4495 unsigned int disp_size, pad;
4497 last_disp = disp;
4498 disp_size = size_of_uleb128 (disp);
4499 pad = before_disp + disp_size + after_disp;
4500 if (pad % tt_format_size)
4501 pad = tt_format_size - (pad % tt_format_size);
4502 else
4503 pad = 0;
4504 disp = after_disp + pad;
4506 while (disp != last_disp);
4508 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
4509 #endif
4512 /* Indicate the format of the call-site offsets. */
4513 #ifdef HAVE_AS_LEB128
4514 cs_format = DW_EH_PE_uleb128;
4515 #else
4516 cs_format = DW_EH_PE_udata4;
4517 #endif
4518 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
4519 eh_data_format_name (cs_format));
4521 #ifdef HAVE_AS_LEB128
4522 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
4523 section ? "LLSDACSBC" : "LLSDACSB",
4524 current_function_funcdef_no);
4525 ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
4526 section ? "LLSDACSEC" : "LLSDACSE",
4527 current_function_funcdef_no);
4528 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
4529 "Call-site table length");
4530 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
4531 if (USING_SJLJ_EXCEPTIONS)
4532 sjlj_output_call_site_table ();
4533 else
4534 dw2_output_call_site_table (cs_format, section);
4535 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
4536 #else
4537 dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
4538 if (USING_SJLJ_EXCEPTIONS)
4539 sjlj_output_call_site_table ();
4540 else
4541 dw2_output_call_site_table (cs_format, section);
4542 #endif
4544 /* ??? Decode and interpret the data for flag_debug_asm. */
4545 n = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data);
4546 for (i = 0; i < n; ++i)
4547 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.action_record_data, i),
4548 (i ? NULL : "Action record table"));
4550 if (have_tt_data)
4551 assemble_align (tt_format_size * BITS_PER_UNIT);
4553 i = VEC_length (tree, crtl->eh.ttype_data);
4554 while (i-- > 0)
4556 tree type = VEC_index (tree, crtl->eh.ttype_data, i);
4557 output_ttype (type, tt_format, tt_format_size);
4560 #ifdef HAVE_AS_LEB128
4561 if (have_tt_data)
4562 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
4563 #endif
4565 /* ??? Decode and interpret the data for flag_debug_asm. */
4566 n = VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data);
4567 for (i = 0; i < n; ++i)
4569 if (targetm.arm_eabi_unwinder)
4571 tree type = VARRAY_TREE (crtl->eh.ehspec_data, i);
4572 output_ttype (type, tt_format, tt_format_size);
4574 else
4575 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.ehspec_data, i),
4576 (i ? NULL : "Exception specification table"));
4580 void
4581 output_function_exception_table (const char * ARG_UNUSED (fnname))
4583 /* Not all functions need anything. */
4584 if (! crtl->uses_eh_lsda)
4585 return;
4587 if (eh_personality_libfunc)
4588 assemble_external_libcall (eh_personality_libfunc);
4590 output_one_function_exception_table (fnname, 0);
4591 if (crtl->eh.call_site_record[1] != NULL)
4592 output_one_function_exception_table (fnname, 1);
4594 switch_to_section (current_function_section ());
4597 void
4598 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
4600 fun->eh->throw_stmt_table = table;
4603 htab_t
4604 get_eh_throw_stmt_table (struct function *fun)
4606 return fun->eh->throw_stmt_table;
4609 /* Dump EH information to OUT. */
4611 void
4612 dump_eh_tree (FILE * out, struct function *fun)
4614 struct eh_region_d *i;
4615 int depth = 0;
4616 static const char *const type_name[] = { "unknown", "cleanup", "try", "catch",
4617 "allowed_exceptions", "must_not_throw",
4618 "throw"
4621 i = fun->eh->region_tree;
4622 if (!i)
4623 return;
4625 fprintf (out, "Eh tree:\n");
4626 while (1)
4628 fprintf (out, " %*s %i %s", depth * 2, "",
4629 i->region_number, type_name[(int) i->type]);
4630 if (i->tree_label)
4632 fprintf (out, " tree_label:");
4633 print_generic_expr (out, i->tree_label, 0);
4635 if (i->label)
4636 fprintf (out, " label:%i", INSN_UID (i->label));
4637 if (i->landing_pad)
4639 fprintf (out, " landing_pad:%i", INSN_UID (i->landing_pad));
4640 if (NOTE_P (i->landing_pad))
4641 fprintf (out, " (deleted)");
4643 if (i->post_landing_pad)
4645 fprintf (out, " post_landing_pad:%i", INSN_UID (i->post_landing_pad));
4646 if (NOTE_P (i->post_landing_pad))
4647 fprintf (out, " (deleted)");
4649 if (i->resume)
4651 rtx resume_list = i->resume;
4652 fprintf (out, " resume:");
4653 while (GET_CODE (resume_list) == INSN_LIST)
4655 fprintf (out, "%i,", INSN_UID (XEXP (resume_list, 0)));
4656 if (NOTE_P (XEXP (resume_list, 0)))
4657 fprintf (out, " (deleted)");
4658 resume_list = XEXP (resume_list, 1);
4660 fprintf (out, " resume:%i", INSN_UID (i->resume));
4661 if (NOTE_P (i->resume))
4662 fprintf (out, " (deleted)");
4664 if (i->may_contain_throw)
4665 fprintf (out, " may_contain_throw");
4666 switch (i->type)
4668 case ERT_CLEANUP:
4669 break;
4671 case ERT_TRY:
4673 struct eh_region_d *c;
4674 fprintf (out, " catch regions:");
4675 for (c = i->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4676 fprintf (out, " %i", c->region_number);
4678 break;
4680 case ERT_CATCH:
4681 if (i->u.eh_catch.prev_catch)
4682 fprintf (out, " prev: %i",
4683 i->u.eh_catch.prev_catch->region_number);
4684 if (i->u.eh_catch.next_catch)
4685 fprintf (out, " next %i",
4686 i->u.eh_catch.next_catch->region_number);
4687 fprintf (out, " type:");
4688 print_generic_expr (out, i->u.eh_catch.type_list, 0);
4689 break;
4691 case ERT_ALLOWED_EXCEPTIONS:
4692 fprintf (out, " filter :%i types:", i->u.allowed.filter);
4693 print_generic_expr (out, i->u.allowed.type_list, 0);
4694 break;
4696 case ERT_THROW:
4697 fprintf (out, " type:");
4698 print_generic_expr (out, i->u.eh_throw.type, 0);
4699 break;
4701 case ERT_MUST_NOT_THROW:
4702 break;
4704 case ERT_UNKNOWN:
4705 break;
4707 if (i->aka)
4709 fprintf (out, " also known as:");
4710 dump_bitmap (out, i->aka);
4712 else
4713 fprintf (out, "\n");
4714 /* If there are sub-regions, process them. */
4715 if (i->inner)
4716 i = i->inner, depth++;
4717 /* If there are peers, process them. */
4718 else if (i->next_peer)
4719 i = i->next_peer;
4720 /* Otherwise, step back up the tree to the next peer. */
4721 else
4725 i = i->outer;
4726 depth--;
4727 if (i == NULL)
4728 return;
4730 while (i->next_peer == NULL);
4731 i = i->next_peer;
4736 /* Dump the EH tree for FN on stderr. */
4738 void
4739 debug_eh_tree (struct function *fn)
4741 dump_eh_tree (stderr, fn);
4745 /* Verify EH region invariants. */
4747 static bool
4748 verify_eh_region (struct eh_region_d *region)
4750 bool found = false;
4751 if (!region)
4752 return false;
4753 switch (region->type)
4755 case ERT_TRY:
4757 struct eh_region_d *c, *prev = NULL;
4758 if (region->u.eh_try.eh_catch->u.eh_catch.prev_catch)
4760 error ("Try region %i has wrong rh_catch pointer to %i",
4761 region->region_number,
4762 region->u.eh_try.eh_catch->region_number);
4763 found = true;
4765 for (c = region->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4767 if (c->outer != region->outer)
4769 error
4770 ("Catch region %i has different outer region than try region %i",
4771 c->region_number, region->region_number);
4772 found = true;
4774 if (c->u.eh_catch.prev_catch != prev)
4776 error ("Catch region %i has corrupted catchlist",
4777 c->region_number);
4778 found = true;
4780 prev = c;
4782 if (prev != region->u.eh_try.last_catch)
4784 error
4785 ("Try region %i has wrong last_catch pointer to %i instead of %i",
4786 region->region_number,
4787 region->u.eh_try.last_catch->region_number,
4788 prev->region_number);
4789 found = true;
4792 break;
4793 case ERT_CATCH:
4794 if (!region->u.eh_catch.prev_catch
4795 && (!region->next_peer || region->next_peer->type != ERT_TRY))
4797 error ("Catch region %i should be followed by try", region->region_number);
4798 found = true;
4800 break;
4801 case ERT_CLEANUP:
4802 case ERT_ALLOWED_EXCEPTIONS:
4803 case ERT_MUST_NOT_THROW:
4804 case ERT_THROW:
4805 break;
4806 case ERT_UNKNOWN:
4807 gcc_unreachable ();
4809 for (region = region->inner; region; region = region->next_peer)
4810 found |= verify_eh_region (region);
4811 return found;
4814 /* Verify invariants on EH datastructures. */
4816 void
4817 verify_eh_tree (struct function *fun)
4819 struct eh_region_d *i, *outer = NULL;
4820 bool err = false;
4821 int nvisited = 0;
4822 int count = 0;
4823 int j;
4824 int depth = 0;
4826 if (!fun->eh->region_tree)
4827 return;
4828 for (j = fun->eh->last_region_number; j > 0; --j)
4829 if ((i = VEC_index (eh_region, fun->eh->region_array, j)))
4831 if (i->region_number == j)
4832 count++;
4833 if (i->region_number != j && (!i->aka || !bitmap_bit_p (i->aka, j)))
4835 error ("region_array is corrupted for region %i",
4836 i->region_number);
4837 err = true;
4840 i = fun->eh->region_tree;
4842 while (1)
4844 if (VEC_index (eh_region, fun->eh->region_array, i->region_number) != i)
4846 error ("region_array is corrupted for region %i", i->region_number);
4847 err = true;
4849 if (i->outer != outer)
4851 error ("outer block of region %i is wrong", i->region_number);
4852 err = true;
4854 if (i->may_contain_throw && outer && !outer->may_contain_throw)
4856 error
4857 ("region %i may contain throw and is contained in region that may not",
4858 i->region_number);
4859 err = true;
4861 if (depth < 0)
4863 error ("negative nesting depth of region %i", i->region_number);
4864 err = true;
4866 nvisited++;
4867 /* If there are sub-regions, process them. */
4868 if (i->inner)
4869 outer = i, i = i->inner, depth++;
4870 /* If there are peers, process them. */
4871 else if (i->next_peer)
4872 i = i->next_peer;
4873 /* Otherwise, step back up the tree to the next peer. */
4874 else
4878 i = i->outer;
4879 depth--;
4880 if (i == NULL)
4882 if (depth != -1)
4884 error ("tree list ends on depth %i", depth + 1);
4885 err = true;
4887 if (count != nvisited)
4889 error ("array does not match the region tree");
4890 err = true;
4892 if (!err)
4893 for (i = fun->eh->region_tree; i; i = i->next_peer)
4894 err |= verify_eh_region (i);
4896 if (err)
4898 dump_eh_tree (stderr, fun);
4899 internal_error ("verify_eh_tree failed");
4901 return;
4903 outer = i->outer;
4905 while (i->next_peer == NULL);
4906 i = i->next_peer;
4911 /* Initialize unwind_resume_libfunc. */
4913 void
4914 default_init_unwind_resume_libfunc (void)
4916 /* The default c++ routines aren't actually c++ specific, so use those. */
4917 unwind_resume_libfunc =
4918 init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
4919 : "_Unwind_Resume");
4923 static bool
4924 gate_handle_eh (void)
4926 return doing_eh (0);
4929 /* Complete generation of exception handling code. */
4930 static unsigned int
4931 rest_of_handle_eh (void)
4933 finish_eh_generation ();
4934 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4935 return 0;
4938 struct rtl_opt_pass pass_rtl_eh =
4941 RTL_PASS,
4942 "eh", /* name */
4943 gate_handle_eh, /* gate */
4944 rest_of_handle_eh, /* execute */
4945 NULL, /* sub */
4946 NULL, /* next */
4947 0, /* static_pass_number */
4948 TV_JUMP, /* tv_id */
4949 0, /* properties_required */
4950 0, /* properties_provided */
4951 0, /* properties_destroyed */
4952 0, /* todo_flags_start */
4953 TODO_dump_func /* todo_flags_finish */
4957 #include "gt-except.h"