2009-04-21 Taras Glek <tglek@mozilla.com>
[official-gcc.git] / gcc / except.c
blob0a2bf3284c0d80028a9dfd52ec234c1eca851800
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
79 #include "timevar.h"
81 /* Provide defaults for stuff that may not be defined when using
82 sjlj exceptions. */
83 #ifndef EH_RETURN_DATA_REGNO
84 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
85 #endif
87 /* Protect cleanup actions with must-not-throw regions, with a call
88 to the given failure handler. */
89 gimple (*lang_protect_cleanup_actions) (void);
91 /* Return true if type A catches type B. */
92 int (*lang_eh_type_covers) (tree a, tree b);
94 /* Map a type to a runtime object to match type. */
95 tree (*lang_eh_runtime_type) (tree);
97 /* A hash table of label to region number. */
99 struct GTY(()) ehl_map_entry {
100 rtx label;
101 struct eh_region *region;
104 static GTY(()) int call_site_base;
105 static GTY ((param_is (union tree_node)))
106 htab_t type_to_runtime_map;
108 /* Describe the SjLj_Function_Context structure. */
109 static GTY(()) tree sjlj_fc_type_node;
110 static int sjlj_fc_call_site_ofs;
111 static int sjlj_fc_data_ofs;
112 static int sjlj_fc_personality_ofs;
113 static int sjlj_fc_lsda_ofs;
114 static int sjlj_fc_jbuf_ofs;
117 struct GTY(()) call_site_record
119 rtx landing_pad;
120 int action;
123 static int t2r_eq (const void *, const void *);
124 static hashval_t t2r_hash (const void *);
126 static int ttypes_filter_eq (const void *, const void *);
127 static hashval_t ttypes_filter_hash (const void *);
128 static int ehspec_filter_eq (const void *, const void *);
129 static hashval_t ehspec_filter_hash (const void *);
130 static int add_ttypes_entry (htab_t, tree);
131 static int add_ehspec_entry (htab_t, htab_t, tree);
132 static void assign_filter_values (void);
133 static void build_post_landing_pads (void);
134 static void connect_post_landing_pads (void);
135 static void dw2_build_landing_pads (void);
137 struct sjlj_lp_info;
138 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
139 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
140 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
141 static void sjlj_emit_function_enter (rtx);
142 static void sjlj_emit_function_exit (void);
143 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
144 static void sjlj_build_landing_pads (void);
146 static void remove_eh_handler (struct eh_region *);
147 static void remove_eh_handler_and_replace (struct eh_region *,
148 struct eh_region *);
150 /* The return value of reachable_next_level. */
151 enum reachable_code
153 /* The given exception is not processed by the given region. */
154 RNL_NOT_CAUGHT,
155 /* The given exception may need processing by the given region. */
156 RNL_MAYBE_CAUGHT,
157 /* The given exception is completely processed by the given region. */
158 RNL_CAUGHT,
159 /* The given exception is completely processed by the runtime. */
160 RNL_BLOCKED
163 struct reachable_info;
164 static enum reachable_code reachable_next_level (struct eh_region *, tree,
165 struct reachable_info *, bool);
167 static int action_record_eq (const void *, const void *);
168 static hashval_t action_record_hash (const void *);
169 static int add_action_record (htab_t, int, int);
170 static int collect_one_action_chain (htab_t, struct eh_region *);
171 static int add_call_site (rtx, int);
173 static void push_uleb128 (varray_type *, unsigned int);
174 static void push_sleb128 (varray_type *, int);
175 #ifndef HAVE_AS_LEB128
176 static int dw2_size_of_call_site_table (void);
177 static int sjlj_size_of_call_site_table (void);
178 #endif
179 static void dw2_output_call_site_table (void);
180 static void sjlj_output_call_site_table (void);
183 /* Routine to see if exception handling is turned on.
184 DO_WARN is nonzero if we want to inform the user that exception
185 handling is turned off.
187 This is used to ensure that -fexceptions has been specified if the
188 compiler tries to use any exception-specific functions. */
191 doing_eh (int do_warn)
193 if (! flag_exceptions)
195 static int warned = 0;
196 if (! warned && do_warn)
198 error ("exception handling disabled, use -fexceptions to enable");
199 warned = 1;
201 return 0;
203 return 1;
207 void
208 init_eh (void)
210 if (! flag_exceptions)
211 return;
213 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
215 /* Create the SjLj_Function_Context structure. This should match
216 the definition in unwind-sjlj.c. */
217 if (USING_SJLJ_EXCEPTIONS)
219 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
221 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
223 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
224 build_pointer_type (sjlj_fc_type_node));
225 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
227 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
228 integer_type_node);
229 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
231 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
232 tmp = build_array_type (lang_hooks.types.type_for_mode
233 (targetm.unwind_word_mode (), 1),
234 tmp);
235 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
236 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
238 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
239 ptr_type_node);
240 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
242 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
243 ptr_type_node);
244 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
246 #ifdef DONT_USE_BUILTIN_SETJMP
247 #ifdef JMP_BUF_SIZE
248 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
249 #else
250 /* Should be large enough for most systems, if it is not,
251 JMP_BUF_SIZE should be defined with the proper value. It will
252 also tend to be larger than necessary for most systems, a more
253 optimal port will define JMP_BUF_SIZE. */
254 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
255 #endif
256 #else
257 /* builtin_setjmp takes a pointer to 5 words. */
258 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
259 #endif
260 tmp = build_index_type (tmp);
261 tmp = build_array_type (ptr_type_node, tmp);
262 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
263 #ifdef DONT_USE_BUILTIN_SETJMP
264 /* We don't know what the alignment requirements of the
265 runtime's jmp_buf has. Overestimate. */
266 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
267 DECL_USER_ALIGN (f_jbuf) = 1;
268 #endif
269 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
271 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
272 TREE_CHAIN (f_prev) = f_cs;
273 TREE_CHAIN (f_cs) = f_data;
274 TREE_CHAIN (f_data) = f_per;
275 TREE_CHAIN (f_per) = f_lsda;
276 TREE_CHAIN (f_lsda) = f_jbuf;
278 layout_type (sjlj_fc_type_node);
280 /* Cache the interesting field offsets so that we have
281 easy access from rtl. */
282 sjlj_fc_call_site_ofs
283 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
284 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
285 sjlj_fc_data_ofs
286 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
287 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
288 sjlj_fc_personality_ofs
289 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
290 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
291 sjlj_fc_lsda_ofs
292 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
293 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
294 sjlj_fc_jbuf_ofs
295 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
296 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
300 void
301 init_eh_for_function (void)
303 cfun->eh = GGC_CNEW (struct eh_status);
306 /* Routines to generate the exception tree somewhat directly.
307 These are used from tree-eh.c when processing exception related
308 nodes during tree optimization. */
310 static struct eh_region *
311 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
313 struct eh_region *new_eh;
315 #ifdef ENABLE_CHECKING
316 gcc_assert (doing_eh (0));
317 #endif
319 /* Insert a new blank region as a leaf in the tree. */
320 new_eh = GGC_CNEW (struct eh_region);
321 new_eh->type = type;
322 new_eh->outer = outer;
323 if (outer)
325 new_eh->next_peer = outer->inner;
326 outer->inner = new_eh;
328 else
330 new_eh->next_peer = cfun->eh->region_tree;
331 cfun->eh->region_tree = new_eh;
334 new_eh->region_number = ++cfun->eh->last_region_number;
336 return new_eh;
339 struct eh_region *
340 gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
342 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
343 cleanup->u.cleanup.prev_try = prev_try;
344 return cleanup;
347 struct eh_region *
348 gen_eh_region_try (struct eh_region *outer)
350 return gen_eh_region (ERT_TRY, outer);
353 struct eh_region *
354 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
356 struct eh_region *c, *l;
357 tree type_list, type_node;
359 /* Ensure to always end up with a type list to normalize further
360 processing, then register each type against the runtime types map. */
361 type_list = type_or_list;
362 if (type_or_list)
364 if (TREE_CODE (type_or_list) != TREE_LIST)
365 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
367 type_node = type_list;
368 for (; type_node; type_node = TREE_CHAIN (type_node))
369 add_type_for_runtime (TREE_VALUE (type_node));
372 c = gen_eh_region (ERT_CATCH, t->outer);
373 c->u.eh_catch.type_list = type_list;
374 l = t->u.eh_try.last_catch;
375 c->u.eh_catch.prev_catch = l;
376 if (l)
377 l->u.eh_catch.next_catch = c;
378 else
379 t->u.eh_try.eh_catch = c;
380 t->u.eh_try.last_catch = c;
382 return c;
385 struct eh_region *
386 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
388 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
389 region->u.allowed.type_list = allowed;
391 for (; allowed ; allowed = TREE_CHAIN (allowed))
392 add_type_for_runtime (TREE_VALUE (allowed));
394 return region;
397 struct eh_region *
398 gen_eh_region_must_not_throw (struct eh_region *outer)
400 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
404 get_eh_region_number (struct eh_region *region)
406 return region->region_number;
409 bool
410 get_eh_region_may_contain_throw (struct eh_region *region)
412 return region->may_contain_throw;
415 tree
416 get_eh_region_tree_label (struct eh_region *region)
418 return region->tree_label;
421 tree
422 get_eh_region_no_tree_label (int region)
424 return VEC_index (eh_region, cfun->eh->region_array, region)->tree_label;
427 void
428 set_eh_region_tree_label (struct eh_region *region, tree lab)
430 region->tree_label = lab;
433 void
434 expand_resx_expr (tree exp)
436 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
437 struct eh_region *reg = VEC_index (eh_region,
438 cfun->eh->region_array, region_nr);
440 gcc_assert (!reg->resume);
441 do_pending_stack_adjust ();
442 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
443 emit_barrier ();
446 /* Note that the current EH region (if any) may contain a throw, or a
447 call to a function which itself may contain a throw. */
449 void
450 note_eh_region_may_contain_throw (struct eh_region *region)
452 while (region && !region->may_contain_throw)
454 region->may_contain_throw = 1;
455 region = region->outer;
460 /* Return an rtl expression for a pointer to the exception object
461 within a handler. */
464 get_exception_pointer (void)
466 if (! crtl->eh.exc_ptr)
467 crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
468 return crtl->eh.exc_ptr;
471 /* Return an rtl expression for the exception dispatch filter
472 within a handler. */
475 get_exception_filter (void)
477 if (! crtl->eh.filter)
478 crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
479 return crtl->eh.filter;
482 /* This section is for the exception handling specific optimization pass. */
484 /* Random access the exception region tree. */
486 void
487 collect_eh_region_array (void)
489 struct eh_region *i;
491 i = cfun->eh->region_tree;
492 if (! i)
493 return;
495 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
496 cfun->eh->last_region_number + 1);
497 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
499 while (1)
501 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
503 /* If there are sub-regions, process them. */
504 if (i->inner)
505 i = i->inner;
506 /* If there are peers, process them. */
507 else if (i->next_peer)
508 i = i->next_peer;
509 /* Otherwise, step back up the tree to the next peer. */
510 else
512 do {
513 i = i->outer;
514 if (i == NULL)
515 return;
516 } while (i->next_peer == NULL);
517 i = i->next_peer;
522 /* R is MUST_NOT_THROW region that is not reachable via local
523 RESX instructions. It still must be kept in the tree in case runtime
524 can unwind through it, or we will eliminate out terminate call
525 runtime would do otherwise. Return TRUE if R contains throwing statements
526 or some of the exceptions in inner regions can be unwound up to R.
528 CONTAINS_STMT is bitmap of all regions that contains some throwing
529 statements.
531 Function looks O(^3) at first sight. In fact the function is called at most
532 once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
533 Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
534 the outer loop examines every region at most once. The inner loop
535 is doing unwinding from the throwing statement same way as we do during
536 CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
537 of CFG. In practice Eh trees are wide, not deep, so this is not
538 a problem. */
540 static bool
541 can_be_reached_by_runtime (sbitmap contains_stmt, struct eh_region *r)
543 struct eh_region *i = r->inner;
544 unsigned n;
545 bitmap_iterator bi;
547 if (TEST_BIT (contains_stmt, r->region_number))
548 return true;
549 if (r->aka)
550 EXECUTE_IF_SET_IN_BITMAP (r->aka, 0, n, bi)
551 if (TEST_BIT (contains_stmt, n))
552 return true;
553 if (!i)
554 return false;
555 while (1)
557 /* It is pointless to look into MUST_NOT_THROW
558 or dive into subregions. They never unwind up. */
559 if (i->type != ERT_MUST_NOT_THROW)
561 bool found = TEST_BIT (contains_stmt, i->region_number);
562 if (!found)
563 EXECUTE_IF_SET_IN_BITMAP (i->aka, 0, n, bi)
564 if (TEST_BIT (contains_stmt, n))
566 found = true;
567 break;
569 /* We have nested region that contains throwing statement.
570 See if resuming might lead up to the resx or we get locally
571 caught sooner. If we get locally caught sooner, we either
572 know region R is not reachable or it would have direct edge
573 from the EH resx and thus consider region reachable at
574 firest place. */
575 if (found)
577 struct eh_region *i1 = i;
578 tree type_thrown = NULL_TREE;
580 if (i1->type == ERT_THROW)
582 type_thrown = i1->u.eh_throw.type;
583 i1 = i1->outer;
585 for (; i1 != r; i1 = i1->outer)
586 if (reachable_next_level (i1, type_thrown, NULL,
587 false) >= RNL_CAUGHT)
588 break;
589 if (i1 == r)
590 return true;
593 /* If there are sub-regions, process them. */
594 if (i->type != ERT_MUST_NOT_THROW && i->inner)
595 i = i->inner;
596 /* If there are peers, process them. */
597 else if (i->next_peer)
598 i = i->next_peer;
599 /* Otherwise, step back up the tree to the next peer. */
600 else
604 i = i->outer;
605 if (i == r)
606 return false;
608 while (i->next_peer == NULL);
609 i = i->next_peer;
614 /* Bring region R to the root of tree. */
616 static void
617 bring_to_root (struct eh_region *r)
619 struct eh_region **pp;
620 struct eh_region *outer = r->outer;
621 if (!r->outer)
622 return;
623 for (pp = &outer->inner; *pp != r; pp = &(*pp)->next_peer)
624 continue;
625 *pp = r->next_peer;
626 r->outer = NULL;
627 r->next_peer = cfun->eh->region_tree;
628 cfun->eh->region_tree = r;
631 /* Remove all regions whose labels are not reachable.
632 REACHABLE is bitmap of all regions that are used by the function
633 CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
635 void
636 remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
638 int i;
639 struct eh_region *r;
640 VEC(eh_region,heap) *must_not_throws = VEC_alloc (eh_region, heap, 16);
641 struct eh_region *local_must_not_throw = NULL;
642 struct eh_region *first_must_not_throw = NULL;
644 for (i = cfun->eh->last_region_number; i > 0; --i)
646 r = VEC_index (eh_region, cfun->eh->region_array, i);
647 if (!r || r->region_number != i)
648 continue;
649 if (!TEST_BIT (reachable, i) && !r->resume)
651 bool kill_it = true;
653 r->tree_label = NULL;
654 switch (r->type)
656 case ERT_THROW:
657 /* Don't remove ERT_THROW regions if their outer region
658 is reachable. */
659 if (r->outer && TEST_BIT (reachable, r->outer->region_number))
660 kill_it = false;
661 break;
662 case ERT_MUST_NOT_THROW:
663 /* MUST_NOT_THROW regions are implementable solely in the
664 runtime, but we need them when inlining function.
666 Keep them if outer region is not MUST_NOT_THROW a well
667 and if they contain some statement that might unwind through
668 them. */
669 if ((!r->outer || r->outer->type != ERT_MUST_NOT_THROW)
670 && (!contains_stmt
671 || can_be_reached_by_runtime (contains_stmt, r)))
672 kill_it = false;
673 break;
674 case ERT_TRY:
676 /* TRY regions are reachable if any of its CATCH regions
677 are reachable. */
678 struct eh_region *c;
679 for (c = r->u.eh_try.eh_catch; c;
680 c = c->u.eh_catch.next_catch)
681 if (TEST_BIT (reachable, c->region_number))
683 kill_it = false;
684 break;
686 break;
689 default:
690 break;
693 if (kill_it)
695 if (dump_file)
696 fprintf (dump_file, "Removing unreachable eh region %i\n",
697 r->region_number);
698 remove_eh_handler (r);
700 else if (r->type == ERT_MUST_NOT_THROW)
702 if (!first_must_not_throw)
703 first_must_not_throw = r;
704 VEC_safe_push (eh_region, heap, must_not_throws, r);
707 else
708 if (r->type == ERT_MUST_NOT_THROW)
710 if (!local_must_not_throw)
711 local_must_not_throw = r;
712 if (r->outer)
713 VEC_safe_push (eh_region, heap, must_not_throws, r);
717 /* MUST_NOT_THROW regions without local handler are all the same; they
718 trigger terminate call in runtime.
719 MUST_NOT_THROW handled locally can differ in debug info associated
720 to std::terminate () call or if one is coming from Java and other
721 from C++ whether they call terminate or abort.
723 We merge all MUST_NOT_THROW regions handled by the run-time into one.
724 We alsobring all local MUST_NOT_THROW regions to the roots of EH tree
725 (since unwinding never continues to the outer region anyway).
726 If MUST_NOT_THROW with local handler is present in the tree, we use
727 that region to merge into, since it will remain in tree anyway;
728 otherwise we use first MUST_NOT_THROW.
730 Merging of locally handled regions needs changes to the CFG. Crossjumping
731 should take care of this, by looking at the actual code and
732 ensuring that the cleanup actions are really the same. */
734 if (local_must_not_throw)
735 first_must_not_throw = local_must_not_throw;
737 for (i = 0; VEC_iterate (eh_region, must_not_throws, i, r); i++)
739 if (!r->label && !r->tree_label && r != first_must_not_throw)
741 if (dump_file)
742 fprintf (dump_file, "Replacing MUST_NOT_THROW region %i by %i\n",
743 r->region_number,
744 first_must_not_throw->region_number);
745 remove_eh_handler_and_replace (r, first_must_not_throw);
746 first_must_not_throw->may_contain_throw |= r->may_contain_throw;
748 else
749 bring_to_root (r);
751 #ifdef ENABLE_CHECKING
752 verify_eh_tree (cfun);
753 #endif
754 VEC_free (eh_region, heap, must_not_throws);
757 /* Return array mapping LABEL_DECL_UID to region such that region's tree_label
758 is identical to label. */
760 VEC(int,heap) *
761 label_to_region_map (void)
763 VEC(int,heap) * label_to_region = NULL;
764 int i;
766 VEC_safe_grow_cleared (int, heap, label_to_region,
767 cfun->cfg->last_label_uid + 1);
768 for (i = cfun->eh->last_region_number; i > 0; --i)
770 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
771 if (r && r->region_number == i
772 && r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
774 VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
778 return label_to_region;
781 /* Return number of EH regions. */
783 num_eh_regions (void)
785 return cfun->eh->last_region_number + 1;
788 /* Set up EH labels for RTL. */
790 void
791 convert_from_eh_region_ranges (void)
793 int i, n = cfun->eh->last_region_number;
795 /* Most of the work is already done at the tree level. All we need to
796 do is collect the rtl labels that correspond to the tree labels that
797 collect the rtl labels that correspond to the tree labels
798 we allocated earlier. */
799 for (i = 1; i <= n; ++i)
801 struct eh_region *region;
803 region = VEC_index (eh_region, cfun->eh->region_array, i);
804 if (region && region->tree_label)
805 region->label = DECL_RTL_IF_SET (region->tree_label);
809 void
810 find_exception_handler_labels (void)
812 int i;
814 if (cfun->eh->region_tree == NULL)
815 return;
817 for (i = cfun->eh->last_region_number; i > 0; --i)
819 struct eh_region *region;
820 rtx lab;
822 region = VEC_index (eh_region, cfun->eh->region_array, i);
823 if (! region || region->region_number != i)
824 continue;
825 if (crtl->eh.built_landing_pads)
826 lab = region->landing_pad;
827 else
828 lab = region->label;
832 /* Returns true if the current function has exception handling regions. */
834 bool
835 current_function_has_exception_handlers (void)
837 int i;
839 for (i = cfun->eh->last_region_number; i > 0; --i)
841 struct eh_region *region;
843 region = VEC_index (eh_region, cfun->eh->region_array, i);
844 if (region
845 && region->region_number == i
846 && region->type != ERT_THROW)
847 return true;
850 return false;
853 /* A subroutine of duplicate_eh_regions. Search the region tree under O
854 for the minimum and maximum region numbers. Update *MIN and *MAX. */
856 static void
857 duplicate_eh_regions_0 (eh_region o, int *min, int *max)
859 int i;
861 if (o->aka)
863 i = bitmap_first_set_bit (o->aka);
864 if (i < *min)
865 *min = i;
866 i = bitmap_last_set_bit (o->aka);
867 if (i > *max)
868 *max = i;
870 if (o->region_number < *min)
871 *min = o->region_number;
872 if (o->region_number > *max)
873 *max = o->region_number;
875 if (o->inner)
877 o = o->inner;
878 duplicate_eh_regions_0 (o, min, max);
879 while (o->next_peer)
881 o = o->next_peer;
882 duplicate_eh_regions_0 (o, min, max);
887 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
888 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
889 about the other internal pointers just yet, just the tree-like pointers. */
891 static eh_region
892 duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
894 eh_region ret, n;
896 ret = n = GGC_NEW (struct eh_region);
898 *n = *old;
899 n->outer = outer;
900 n->next_peer = NULL;
901 if (old->aka)
903 unsigned i;
904 bitmap_iterator bi;
905 n->aka = BITMAP_GGC_ALLOC ();
907 EXECUTE_IF_SET_IN_BITMAP (old->aka, 0, i, bi)
909 bitmap_set_bit (n->aka, i + eh_offset);
910 VEC_replace (eh_region, cfun->eh->region_array, i + eh_offset, n);
914 n->region_number += eh_offset;
915 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
917 if (old->inner)
919 old = old->inner;
920 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
921 while (old->next_peer)
923 old = old->next_peer;
924 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
928 return ret;
931 /* Return prev_try pointers catch subregions of R should
932 point to. */
934 static struct eh_region *
935 find_prev_try (struct eh_region * r)
937 for (; r && r->type != ERT_TRY; r = r->outer)
938 if (r->type == ERT_MUST_NOT_THROW
939 || (r->type == ERT_ALLOWED_EXCEPTIONS
940 && !r->u.allowed.type_list))
942 r = NULL;
943 break;
945 return r;
948 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
949 function and root the tree below OUTER_REGION. Remap labels using MAP
950 callback. The special case of COPY_REGION of 0 means all regions. */
953 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
954 void *data, int copy_region, int outer_region)
956 eh_region cur, prev_try, old_prev_try, outer, *splice;
957 int i, min_region, max_region, eh_offset, cfun_last_region_number;
958 int num_regions;
960 if (!ifun->eh)
961 return 0;
962 #ifdef ENABLE_CHECKING
963 verify_eh_tree (ifun);
964 #endif
966 /* Find the range of region numbers to be copied. The interface we
967 provide here mandates a single offset to find new number from old,
968 which means we must look at the numbers present, instead of the
969 count or something else. */
970 if (copy_region > 0)
972 min_region = INT_MAX;
973 max_region = 0;
975 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
976 old_prev_try = find_prev_try (cur);
977 duplicate_eh_regions_0 (cur, &min_region, &max_region);
979 else
981 min_region = 1;
982 max_region = ifun->eh->last_region_number;
983 old_prev_try = NULL;
985 num_regions = max_region - min_region + 1;
986 cfun_last_region_number = cfun->eh->last_region_number;
987 eh_offset = cfun_last_region_number + 1 - min_region;
989 /* If we've not yet created a region array, do so now. */
990 cfun->eh->last_region_number = cfun_last_region_number + num_regions;
991 VEC_safe_grow_cleared (eh_region, gc, cfun->eh->region_array,
992 cfun->eh->last_region_number + 1);
994 /* Locate the spot at which to insert the new tree. */
995 if (outer_region > 0)
997 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
998 if (outer)
999 splice = &outer->inner;
1000 else
1001 splice = &cfun->eh->region_tree;
1003 else
1005 outer = NULL;
1006 splice = &cfun->eh->region_tree;
1008 while (*splice)
1009 splice = &(*splice)->next_peer;
1011 if (!ifun->eh->region_tree)
1013 if (outer)
1014 for (i = cfun_last_region_number + 1;
1015 i <= cfun->eh->last_region_number; i++)
1017 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1018 if (outer->aka == NULL)
1019 outer->aka = BITMAP_GGC_ALLOC ();
1020 bitmap_set_bit (outer->aka, i);
1022 return eh_offset;
1025 /* Copy all the regions in the subtree. */
1026 if (copy_region > 0)
1028 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1029 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
1031 else
1033 eh_region n;
1035 cur = ifun->eh->region_tree;
1036 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
1037 while (cur->next_peer)
1039 cur = cur->next_peer;
1040 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
1044 /* Remap all the labels in the new regions. */
1045 for (i = cfun_last_region_number + 1;
1046 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1047 if (cur && cur->tree_label)
1048 cur->tree_label = map (cur->tree_label, data);
1050 /* Search for the containing ERT_TRY region to fix up
1051 the prev_try short-cuts for ERT_CLEANUP regions. */
1052 prev_try = NULL;
1053 if (outer_region > 0)
1054 prev_try = find_prev_try (VEC_index (eh_region, cfun->eh->region_array, outer_region));
1056 /* Remap all of the internal catch and cleanup linkages. Since we
1057 duplicate entire subtrees, all of the referenced regions will have
1058 been copied too. And since we renumbered them as a block, a simple
1059 bit of arithmetic finds us the index for the replacement region. */
1060 for (i = cfun_last_region_number + 1;
1061 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1063 /* All removed EH that is toplevel in input function is now
1064 in outer EH of output function. */
1065 if (cur == NULL)
1067 gcc_assert (VEC_index
1068 (eh_region, ifun->eh->region_array,
1069 i - eh_offset) == NULL);
1070 if (outer)
1072 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1073 if (outer->aka == NULL)
1074 outer->aka = BITMAP_GGC_ALLOC ();
1075 bitmap_set_bit (outer->aka, i);
1077 continue;
1079 if (i != cur->region_number)
1080 continue;
1082 #define REMAP(REG) \
1083 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1084 (REG)->region_number + eh_offset)
1086 switch (cur->type)
1088 case ERT_TRY:
1089 if (cur->u.eh_try.eh_catch)
1090 REMAP (cur->u.eh_try.eh_catch);
1091 if (cur->u.eh_try.last_catch)
1092 REMAP (cur->u.eh_try.last_catch);
1093 break;
1095 case ERT_CATCH:
1096 if (cur->u.eh_catch.next_catch)
1097 REMAP (cur->u.eh_catch.next_catch);
1098 if (cur->u.eh_catch.prev_catch)
1099 REMAP (cur->u.eh_catch.prev_catch);
1100 break;
1102 case ERT_CLEANUP:
1103 if (cur->u.cleanup.prev_try != old_prev_try)
1104 REMAP (cur->u.cleanup.prev_try);
1105 else
1106 cur->u.cleanup.prev_try = prev_try;
1107 break;
1109 default:
1110 break;
1113 #undef REMAP
1115 #ifdef ENABLE_CHECKING
1116 verify_eh_tree (cfun);
1117 #endif
1119 return eh_offset;
1122 /* Return region number of region that is outer to both if REGION_A and
1123 REGION_B in IFUN. */
1126 eh_region_outermost (struct function *ifun, int region_a, int region_b)
1128 struct eh_region *rp_a, *rp_b;
1129 sbitmap b_outer;
1131 gcc_assert (ifun->eh->last_region_number > 0);
1132 gcc_assert (ifun->eh->region_tree);
1134 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1135 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1136 gcc_assert (rp_a != NULL);
1137 gcc_assert (rp_b != NULL);
1139 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1140 sbitmap_zero (b_outer);
1144 SET_BIT (b_outer, rp_b->region_number);
1145 rp_b = rp_b->outer;
1147 while (rp_b);
1151 if (TEST_BIT (b_outer, rp_a->region_number))
1153 sbitmap_free (b_outer);
1154 return rp_a->region_number;
1156 rp_a = rp_a->outer;
1158 while (rp_a);
1160 sbitmap_free (b_outer);
1161 return -1;
1164 static int
1165 t2r_eq (const void *pentry, const void *pdata)
1167 const_tree const entry = (const_tree) pentry;
1168 const_tree const data = (const_tree) pdata;
1170 return TREE_PURPOSE (entry) == data;
1173 static hashval_t
1174 t2r_hash (const void *pentry)
1176 const_tree const entry = (const_tree) pentry;
1177 return TREE_HASH (TREE_PURPOSE (entry));
1180 void
1181 add_type_for_runtime (tree type)
1183 tree *slot;
1185 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1186 TREE_HASH (type), INSERT);
1187 if (*slot == NULL)
1189 tree runtime = (*lang_eh_runtime_type) (type);
1190 *slot = tree_cons (type, runtime, NULL_TREE);
1194 tree
1195 lookup_type_for_runtime (tree type)
1197 tree *slot;
1199 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1200 TREE_HASH (type), NO_INSERT);
1202 /* We should have always inserted the data earlier. */
1203 return TREE_VALUE (*slot);
1207 /* Represent an entry in @TTypes for either catch actions
1208 or exception filter actions. */
1209 struct GTY(()) ttypes_filter {
1210 tree t;
1211 int filter;
1214 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1215 (a tree) for a @TTypes type node we are thinking about adding. */
1217 static int
1218 ttypes_filter_eq (const void *pentry, const void *pdata)
1220 const struct ttypes_filter *const entry
1221 = (const struct ttypes_filter *) pentry;
1222 const_tree const data = (const_tree) pdata;
1224 return entry->t == data;
1227 static hashval_t
1228 ttypes_filter_hash (const void *pentry)
1230 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1231 return TREE_HASH (entry->t);
1234 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1235 exception specification list we are thinking about adding. */
1236 /* ??? Currently we use the type lists in the order given. Someone
1237 should put these in some canonical order. */
1239 static int
1240 ehspec_filter_eq (const void *pentry, const void *pdata)
1242 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1243 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1245 return type_list_equal (entry->t, data->t);
1248 /* Hash function for exception specification lists. */
1250 static hashval_t
1251 ehspec_filter_hash (const void *pentry)
1253 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1254 hashval_t h = 0;
1255 tree list;
1257 for (list = entry->t; list ; list = TREE_CHAIN (list))
1258 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1259 return h;
1262 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1263 to speed up the search. Return the filter value to be used. */
1265 static int
1266 add_ttypes_entry (htab_t ttypes_hash, tree type)
1268 struct ttypes_filter **slot, *n;
1270 slot = (struct ttypes_filter **)
1271 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1273 if ((n = *slot) == NULL)
1275 /* Filter value is a 1 based table index. */
1277 n = XNEW (struct ttypes_filter);
1278 n->t = type;
1279 n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
1280 *slot = n;
1282 VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
1285 return n->filter;
1288 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1289 to speed up the search. Return the filter value to be used. */
1291 static int
1292 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1294 struct ttypes_filter **slot, *n;
1295 struct ttypes_filter dummy;
1297 dummy.t = list;
1298 slot = (struct ttypes_filter **)
1299 htab_find_slot (ehspec_hash, &dummy, INSERT);
1301 if ((n = *slot) == NULL)
1303 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1305 n = XNEW (struct ttypes_filter);
1306 n->t = list;
1307 n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
1308 *slot = n;
1310 /* Generate a 0 terminated list of filter values. */
1311 for (; list ; list = TREE_CHAIN (list))
1313 if (targetm.arm_eabi_unwinder)
1314 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
1315 else
1317 /* Look up each type in the list and encode its filter
1318 value as a uleb128. */
1319 push_uleb128 (&crtl->eh.ehspec_data,
1320 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1323 if (targetm.arm_eabi_unwinder)
1324 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
1325 else
1326 VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
1329 return n->filter;
1332 /* Generate the action filter values to be used for CATCH and
1333 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1334 we use lots of landing pads, and so every type or list can share
1335 the same filter value, which saves table space. */
1337 static void
1338 assign_filter_values (void)
1340 int i;
1341 htab_t ttypes, ehspec;
1343 crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
1344 if (targetm.arm_eabi_unwinder)
1345 VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1346 else
1347 VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1349 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1350 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1352 for (i = cfun->eh->last_region_number; i > 0; --i)
1354 struct eh_region *r;
1356 r = VEC_index (eh_region, cfun->eh->region_array, i);
1358 /* Mind we don't process a region more than once. */
1359 if (!r || r->region_number != i)
1360 continue;
1362 switch (r->type)
1364 case ERT_CATCH:
1365 /* Whatever type_list is (NULL or true list), we build a list
1366 of filters for the region. */
1367 r->u.eh_catch.filter_list = NULL_TREE;
1369 if (r->u.eh_catch.type_list != NULL)
1371 /* Get a filter value for each of the types caught and store
1372 them in the region's dedicated list. */
1373 tree tp_node = r->u.eh_catch.type_list;
1375 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1377 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1378 tree flt_node = build_int_cst (NULL_TREE, flt);
1380 r->u.eh_catch.filter_list
1381 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1384 else
1386 /* Get a filter value for the NULL list also since it will need
1387 an action record anyway. */
1388 int flt = add_ttypes_entry (ttypes, NULL);
1389 tree flt_node = build_int_cst (NULL_TREE, flt);
1391 r->u.eh_catch.filter_list
1392 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1395 break;
1397 case ERT_ALLOWED_EXCEPTIONS:
1398 r->u.allowed.filter
1399 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1400 break;
1402 default:
1403 break;
1407 htab_delete (ttypes);
1408 htab_delete (ehspec);
1411 /* Emit SEQ into basic block just before INSN (that is assumed to be
1412 first instruction of some existing BB and return the newly
1413 produced block. */
1414 static basic_block
1415 emit_to_new_bb_before (rtx seq, rtx insn)
1417 rtx last;
1418 basic_block bb;
1419 edge e;
1420 edge_iterator ei;
1422 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1423 call), we don't want it to go into newly created landing pad or other EH
1424 construct. */
1425 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1426 if (e->flags & EDGE_FALLTHRU)
1427 force_nonfallthru (e);
1428 else
1429 ei_next (&ei);
1430 last = emit_insn_before (seq, insn);
1431 if (BARRIER_P (last))
1432 last = PREV_INSN (last);
1433 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1434 update_bb_for_insn (bb);
1435 bb->flags |= BB_SUPERBLOCK;
1436 return bb;
1439 /* Generate the code to actually handle exceptions, which will follow the
1440 landing pads. */
1442 static void
1443 build_post_landing_pads (void)
1445 int i;
1447 for (i = cfun->eh->last_region_number; i > 0; --i)
1449 struct eh_region *region;
1450 rtx seq;
1452 region = VEC_index (eh_region, cfun->eh->region_array, i);
1453 /* Mind we don't process a region more than once. */
1454 if (!region || region->region_number != i)
1455 continue;
1457 switch (region->type)
1459 case ERT_TRY:
1460 /* ??? Collect the set of all non-overlapping catch handlers
1461 all the way up the chain until blocked by a cleanup. */
1462 /* ??? Outer try regions can share landing pads with inner
1463 try regions if the types are completely non-overlapping,
1464 and there are no intervening cleanups. */
1466 region->post_landing_pad = gen_label_rtx ();
1468 start_sequence ();
1470 emit_label (region->post_landing_pad);
1472 /* ??? It is mighty inconvenient to call back into the
1473 switch statement generation code in expand_end_case.
1474 Rapid prototyping sez a sequence of ifs. */
1476 struct eh_region *c;
1477 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
1479 if (c->u.eh_catch.type_list == NULL)
1480 emit_jump (c->label);
1481 else
1483 /* Need for one cmp/jump per type caught. Each type
1484 list entry has a matching entry in the filter list
1485 (see assign_filter_values). */
1486 tree tp_node = c->u.eh_catch.type_list;
1487 tree flt_node = c->u.eh_catch.filter_list;
1489 for (; tp_node; )
1491 emit_cmp_and_jump_insns
1492 (crtl->eh.filter,
1493 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1494 EQ, NULL_RTX,
1495 targetm.eh_return_filter_mode (), 0, c->label);
1497 tp_node = TREE_CHAIN (tp_node);
1498 flt_node = TREE_CHAIN (flt_node);
1504 /* We delay the generation of the _Unwind_Resume until we generate
1505 landing pads. We emit a marker here so as to get good control
1506 flow data in the meantime. */
1507 region->resume
1508 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1509 emit_barrier ();
1511 seq = get_insns ();
1512 end_sequence ();
1514 emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
1516 break;
1518 case ERT_ALLOWED_EXCEPTIONS:
1519 region->post_landing_pad = gen_label_rtx ();
1521 start_sequence ();
1523 emit_label (region->post_landing_pad);
1525 emit_cmp_and_jump_insns (crtl->eh.filter,
1526 GEN_INT (region->u.allowed.filter),
1527 EQ, NULL_RTX,
1528 targetm.eh_return_filter_mode (), 0, region->label);
1530 /* We delay the generation of the _Unwind_Resume until we generate
1531 landing pads. We emit a marker here so as to get good control
1532 flow data in the meantime. */
1533 region->resume
1534 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1535 emit_barrier ();
1537 seq = get_insns ();
1538 end_sequence ();
1540 emit_to_new_bb_before (seq, region->label);
1541 break;
1543 case ERT_CLEANUP:
1544 case ERT_MUST_NOT_THROW:
1545 region->post_landing_pad = region->label;
1546 break;
1548 case ERT_CATCH:
1549 case ERT_THROW:
1550 /* Nothing to do. */
1551 break;
1553 default:
1554 gcc_unreachable ();
1559 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1560 _Unwind_Resume otherwise. */
1562 static void
1563 connect_post_landing_pads (void)
1565 int i;
1567 for (i = cfun->eh->last_region_number; i > 0; --i)
1569 struct eh_region *region;
1570 struct eh_region *outer;
1571 rtx seq;
1572 rtx barrier;
1574 region = VEC_index (eh_region, cfun->eh->region_array, i);
1575 /* Mind we don't process a region more than once. */
1576 if (!region || region->region_number != i)
1577 continue;
1579 /* If there is no RESX, or it has been deleted by flow, there's
1580 nothing to fix up. */
1581 if (! region->resume || INSN_DELETED_P (region->resume))
1582 continue;
1584 /* Search for another landing pad in this function. */
1585 for (outer = region->outer; outer ; outer = outer->outer)
1586 if (outer->post_landing_pad)
1587 break;
1589 start_sequence ();
1591 if (outer)
1593 edge e;
1594 basic_block src, dest;
1596 emit_jump (outer->post_landing_pad);
1597 src = BLOCK_FOR_INSN (region->resume);
1598 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1599 while (EDGE_COUNT (src->succs) > 0)
1600 remove_edge (EDGE_SUCC (src, 0));
1601 e = make_edge (src, dest, 0);
1602 e->probability = REG_BR_PROB_BASE;
1603 e->count = src->count;
1605 else
1607 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1608 VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
1610 /* What we just emitted was a throwing libcall, so it got a
1611 barrier automatically added after it. If the last insn in
1612 the libcall sequence isn't the barrier, it's because the
1613 target emits multiple insns for a call, and there are insns
1614 after the actual call insn (which are redundant and would be
1615 optimized away). The barrier is inserted exactly after the
1616 call insn, so let's go get that and delete the insns after
1617 it, because below we need the barrier to be the last insn in
1618 the sequence. */
1619 delete_insns_since (NEXT_INSN (last_call_insn ()));
1622 seq = get_insns ();
1623 end_sequence ();
1624 barrier = emit_insn_before (seq, region->resume);
1625 /* Avoid duplicate barrier. */
1626 gcc_assert (BARRIER_P (barrier));
1627 delete_insn (barrier);
1628 delete_insn (region->resume);
1630 /* ??? From tree-ssa we can wind up with catch regions whose
1631 label is not instantiated, but whose resx is present. Now
1632 that we've dealt with the resx, kill the region. */
1633 if (region->label == NULL && region->type == ERT_CLEANUP)
1634 remove_eh_handler (region);
1639 static void
1640 dw2_build_landing_pads (void)
1642 int i;
1644 for (i = cfun->eh->last_region_number; i > 0; --i)
1646 struct eh_region *region;
1647 rtx seq;
1648 basic_block bb;
1649 edge e;
1651 region = VEC_index (eh_region, cfun->eh->region_array, i);
1652 /* Mind we don't process a region more than once. */
1653 if (!region || region->region_number != i)
1654 continue;
1656 if (region->type != ERT_CLEANUP
1657 && region->type != ERT_TRY
1658 && region->type != ERT_ALLOWED_EXCEPTIONS)
1659 continue;
1661 start_sequence ();
1663 region->landing_pad = gen_label_rtx ();
1664 emit_label (region->landing_pad);
1666 #ifdef HAVE_exception_receiver
1667 if (HAVE_exception_receiver)
1668 emit_insn (gen_exception_receiver ());
1669 else
1670 #endif
1671 #ifdef HAVE_nonlocal_goto_receiver
1672 if (HAVE_nonlocal_goto_receiver)
1673 emit_insn (gen_nonlocal_goto_receiver ());
1674 else
1675 #endif
1676 { /* Nothing */ }
1678 emit_move_insn (crtl->eh.exc_ptr,
1679 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1680 emit_move_insn (crtl->eh.filter,
1681 gen_rtx_REG (targetm.eh_return_filter_mode (),
1682 EH_RETURN_DATA_REGNO (1)));
1684 seq = get_insns ();
1685 end_sequence ();
1687 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1688 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1689 e->count = bb->count;
1690 e->probability = REG_BR_PROB_BASE;
1695 struct sjlj_lp_info
1697 int directly_reachable;
1698 int action_index;
1699 int dispatch_index;
1700 int call_site_index;
1703 static bool
1704 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1706 rtx insn;
1707 bool found_one = false;
1709 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1711 struct eh_region *region;
1712 enum reachable_code rc;
1713 tree type_thrown;
1714 rtx note;
1716 if (! INSN_P (insn))
1717 continue;
1719 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1720 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1721 continue;
1723 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1724 if (!region)
1725 continue;
1727 type_thrown = NULL_TREE;
1728 if (region->type == ERT_THROW)
1730 type_thrown = region->u.eh_throw.type;
1731 region = region->outer;
1734 /* Find the first containing region that might handle the exception.
1735 That's the landing pad to which we will transfer control. */
1736 rc = RNL_NOT_CAUGHT;
1737 for (; region; region = region->outer)
1739 rc = reachable_next_level (region, type_thrown, NULL, false);
1740 if (rc != RNL_NOT_CAUGHT)
1741 break;
1743 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1745 lp_info[region->region_number].directly_reachable = 1;
1746 found_one = true;
1750 return found_one;
1753 static void
1754 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1756 htab_t ar_hash;
1757 int i, index;
1759 /* First task: build the action table. */
1761 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
1762 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1764 for (i = cfun->eh->last_region_number; i > 0; --i)
1765 if (lp_info[i].directly_reachable)
1767 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
1769 r->landing_pad = dispatch_label;
1770 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1771 if (lp_info[i].action_index != -1)
1772 crtl->uses_eh_lsda = 1;
1775 htab_delete (ar_hash);
1777 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1778 landing pad label for the region. For sjlj though, there is one
1779 common landing pad from which we dispatch to the post-landing pads.
1781 A region receives a dispatch index if it is directly reachable
1782 and requires in-function processing. Regions that share post-landing
1783 pads may share dispatch indices. */
1784 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1785 (see build_post_landing_pads) so we don't bother checking for it. */
1787 index = 0;
1788 for (i = cfun->eh->last_region_number; i > 0; --i)
1789 if (lp_info[i].directly_reachable)
1790 lp_info[i].dispatch_index = index++;
1792 /* Finally: assign call-site values. If dwarf2 terms, this would be
1793 the region number assigned by convert_to_eh_region_ranges, but
1794 handles no-action and must-not-throw differently. */
1796 call_site_base = 1;
1797 for (i = cfun->eh->last_region_number; i > 0; --i)
1798 if (lp_info[i].directly_reachable)
1800 int action = lp_info[i].action_index;
1802 /* Map must-not-throw to otherwise unused call-site index 0. */
1803 if (action == -2)
1804 index = 0;
1805 /* Map no-action to otherwise unused call-site index -1. */
1806 else if (action == -1)
1807 index = -1;
1808 /* Otherwise, look it up in the table. */
1809 else
1810 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1812 lp_info[i].call_site_index = index;
1816 static void
1817 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1819 int last_call_site = -2;
1820 rtx insn, mem;
1822 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1824 struct eh_region *region;
1825 int this_call_site;
1826 rtx note, before, p;
1828 /* Reset value tracking at extended basic block boundaries. */
1829 if (LABEL_P (insn))
1830 last_call_site = -2;
1832 if (! INSN_P (insn))
1833 continue;
1835 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1837 /* Calls that are known to not throw need not be marked. */
1838 if (note && INTVAL (XEXP (note, 0)) <= 0)
1839 continue;
1841 if (note)
1842 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1843 else
1844 region = NULL;
1846 if (!region)
1848 /* Calls (and trapping insns) without notes are outside any
1849 exception handling region in this function. Mark them as
1850 no action. */
1851 if (CALL_P (insn)
1852 || (flag_non_call_exceptions
1853 && may_trap_p (PATTERN (insn))))
1854 this_call_site = -1;
1855 else
1856 continue;
1858 else
1859 this_call_site = lp_info[region->region_number].call_site_index;
1861 if (this_call_site == last_call_site)
1862 continue;
1864 /* Don't separate a call from it's argument loads. */
1865 before = insn;
1866 if (CALL_P (insn))
1867 before = find_first_parameter_load (insn, NULL_RTX);
1869 start_sequence ();
1870 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1871 sjlj_fc_call_site_ofs);
1872 emit_move_insn (mem, GEN_INT (this_call_site));
1873 p = get_insns ();
1874 end_sequence ();
1876 emit_insn_before (p, before);
1877 last_call_site = this_call_site;
1881 /* Construct the SjLj_Function_Context. */
1883 static void
1884 sjlj_emit_function_enter (rtx dispatch_label)
1886 rtx fn_begin, fc, mem, seq;
1887 bool fn_begin_outside_block;
1889 fc = crtl->eh.sjlj_fc;
1891 start_sequence ();
1893 /* We're storing this libcall's address into memory instead of
1894 calling it directly. Thus, we must call assemble_external_libcall
1895 here, as we can not depend on emit_library_call to do it for us. */
1896 assemble_external_libcall (eh_personality_libfunc);
1897 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1898 emit_move_insn (mem, eh_personality_libfunc);
1900 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1901 if (crtl->uses_eh_lsda)
1903 char buf[20];
1904 rtx sym;
1906 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1907 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1908 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1909 emit_move_insn (mem, sym);
1911 else
1912 emit_move_insn (mem, const0_rtx);
1914 #ifdef DONT_USE_BUILTIN_SETJMP
1916 rtx x;
1917 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1918 TYPE_MODE (integer_type_node), 1,
1919 plus_constant (XEXP (fc, 0),
1920 sjlj_fc_jbuf_ofs), Pmode);
1922 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1923 TYPE_MODE (integer_type_node), 0, dispatch_label);
1924 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
1926 #else
1927 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
1928 dispatch_label);
1929 #endif
1931 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1932 1, XEXP (fc, 0), Pmode);
1934 seq = get_insns ();
1935 end_sequence ();
1937 /* ??? Instead of doing this at the beginning of the function,
1938 do this in a block that is at loop level 0 and dominates all
1939 can_throw_internal instructions. */
1941 fn_begin_outside_block = true;
1942 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1943 if (NOTE_P (fn_begin))
1945 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1946 break;
1947 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1948 fn_begin_outside_block = false;
1951 if (fn_begin_outside_block)
1952 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
1953 else
1954 emit_insn_after (seq, fn_begin);
1957 /* Call back from expand_function_end to know where we should put
1958 the call to unwind_sjlj_unregister_libfunc if needed. */
1960 void
1961 sjlj_emit_function_exit_after (rtx after)
1963 crtl->eh.sjlj_exit_after = after;
1966 static void
1967 sjlj_emit_function_exit (void)
1969 rtx seq, insn;
1971 start_sequence ();
1973 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1974 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1976 seq = get_insns ();
1977 end_sequence ();
1979 /* ??? Really this can be done in any block at loop level 0 that
1980 post-dominates all can_throw_internal instructions. This is
1981 the last possible moment. */
1983 insn = crtl->eh.sjlj_exit_after;
1984 if (LABEL_P (insn))
1985 insn = NEXT_INSN (insn);
1987 emit_insn_after (seq, insn);
1990 static void
1991 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1993 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
1994 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
1995 int i, first_reachable;
1996 rtx mem, dispatch, seq, fc;
1997 rtx before;
1998 basic_block bb;
1999 edge e;
2001 fc = crtl->eh.sjlj_fc;
2003 start_sequence ();
2005 emit_label (dispatch_label);
2007 #ifndef DONT_USE_BUILTIN_SETJMP
2008 expand_builtin_setjmp_receiver (dispatch_label);
2009 #endif
2011 /* Load up dispatch index, exc_ptr and filter values from the
2012 function context. */
2013 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2014 sjlj_fc_call_site_ofs);
2015 dispatch = copy_to_reg (mem);
2017 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
2018 if (unwind_word_mode != ptr_mode)
2020 #ifdef POINTERS_EXTEND_UNSIGNED
2021 mem = convert_memory_address (ptr_mode, mem);
2022 #else
2023 mem = convert_to_mode (ptr_mode, mem, 0);
2024 #endif
2026 emit_move_insn (crtl->eh.exc_ptr, mem);
2028 mem = adjust_address (fc, unwind_word_mode,
2029 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
2030 if (unwind_word_mode != filter_mode)
2031 mem = convert_to_mode (filter_mode, mem, 0);
2032 emit_move_insn (crtl->eh.filter, mem);
2034 /* Jump to one of the directly reachable regions. */
2035 /* ??? This really ought to be using a switch statement. */
2037 first_reachable = 0;
2038 for (i = cfun->eh->last_region_number; i > 0; --i)
2040 if (! lp_info[i].directly_reachable)
2041 continue;
2043 if (! first_reachable)
2045 first_reachable = i;
2046 continue;
2049 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2050 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2051 ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
2052 ->post_landing_pad);
2055 seq = get_insns ();
2056 end_sequence ();
2058 before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2059 ->post_landing_pad);
2061 bb = emit_to_new_bb_before (seq, before);
2062 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2063 e->count = bb->count;
2064 e->probability = REG_BR_PROB_BASE;
2067 static void
2068 sjlj_build_landing_pads (void)
2070 struct sjlj_lp_info *lp_info;
2072 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2074 if (sjlj_find_directly_reachable_regions (lp_info))
2076 rtx dispatch_label = gen_label_rtx ();
2077 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
2078 TYPE_MODE (sjlj_fc_type_node),
2079 TYPE_ALIGN (sjlj_fc_type_node));
2080 crtl->eh.sjlj_fc
2081 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2082 int_size_in_bytes (sjlj_fc_type_node),
2083 align);
2085 sjlj_assign_call_site_values (dispatch_label, lp_info);
2086 sjlj_mark_call_sites (lp_info);
2088 sjlj_emit_function_enter (dispatch_label);
2089 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2090 sjlj_emit_function_exit ();
2093 free (lp_info);
2096 /* After initial rtl generation, call back to finish generating
2097 exception support code. */
2099 static void
2100 finish_eh_generation (void)
2102 basic_block bb;
2104 /* Nothing to do if no regions created. */
2105 if (cfun->eh->region_tree == NULL)
2106 return;
2108 /* The object here is to provide detailed information (via
2109 reachable_handlers) on how exception control flows within the
2110 function for the CFG construction. In this first pass, we can
2111 include type information garnered from ERT_THROW and
2112 ERT_ALLOWED_EXCEPTIONS regions, and hope that it will be useful
2113 in deleting unreachable handlers. Subsequently, we will generate
2114 landing pads which will connect many of the handlers, and then
2115 type information will not be effective. Still, this is a win
2116 over previous implementations. */
2118 /* These registers are used by the landing pads. Make sure they
2119 have been generated. */
2120 get_exception_pointer ();
2121 get_exception_filter ();
2123 /* Construct the landing pads. */
2125 assign_filter_values ();
2126 build_post_landing_pads ();
2127 connect_post_landing_pads ();
2128 if (USING_SJLJ_EXCEPTIONS)
2129 sjlj_build_landing_pads ();
2130 else
2131 dw2_build_landing_pads ();
2133 crtl->eh.built_landing_pads = 1;
2135 /* We've totally changed the CFG. Start over. */
2136 find_exception_handler_labels ();
2137 break_superblocks ();
2138 if (USING_SJLJ_EXCEPTIONS
2139 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2140 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
2141 commit_edge_insertions ();
2142 FOR_EACH_BB (bb)
2144 edge e;
2145 edge_iterator ei;
2146 bool eh = false;
2147 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2149 if (e->flags & EDGE_EH)
2151 remove_edge (e);
2152 eh = true;
2154 else
2155 ei_next (&ei);
2157 if (eh)
2158 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2162 /* This section handles removing dead code for flow. */
2164 /* Splice REGION from the region tree and replace it by REPLACE etc. */
2166 static void
2167 remove_eh_handler_and_replace (struct eh_region *region,
2168 struct eh_region *replace)
2170 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2171 rtx lab;
2173 outer = region->outer;
2174 /* For the benefit of efficiently handling REG_EH_REGION notes,
2175 replace this region in the region array with its containing
2176 region. Note that previous region deletions may result in
2177 multiple copies of this region in the array, so we have a
2178 list of alternate numbers by which we are known. */
2180 VEC_replace (eh_region, cfun->eh->region_array, region->region_number,
2181 replace);
2182 if (region->aka)
2184 unsigned i;
2185 bitmap_iterator bi;
2187 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2189 VEC_replace (eh_region, cfun->eh->region_array, i, replace);
2193 if (replace)
2195 if (!replace->aka)
2196 replace->aka = BITMAP_GGC_ALLOC ();
2197 if (region->aka)
2198 bitmap_ior_into (replace->aka, region->aka);
2199 bitmap_set_bit (replace->aka, region->region_number);
2202 if (crtl->eh.built_landing_pads)
2203 lab = region->landing_pad;
2204 else
2205 lab = region->label;
2206 if (outer)
2207 pp_start = &outer->inner;
2208 else
2209 pp_start = &cfun->eh->region_tree;
2210 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2211 continue;
2212 *pp = region->next_peer;
2214 if (replace)
2215 pp_start = &replace->inner;
2216 else
2217 pp_start = &cfun->eh->region_tree;
2218 inner = region->inner;
2219 if (inner)
2221 for (p = inner; p->next_peer ; p = p->next_peer)
2222 p->outer = replace;
2223 p->outer = replace;
2225 p->next_peer = *pp_start;
2226 *pp_start = inner;
2229 if (region->type == ERT_CATCH)
2231 struct eh_region *eh_try, *next, *prev;
2233 for (eh_try = region->next_peer;
2234 eh_try->type == ERT_CATCH;
2235 eh_try = eh_try->next_peer)
2236 continue;
2237 gcc_assert (eh_try->type == ERT_TRY);
2239 next = region->u.eh_catch.next_catch;
2240 prev = region->u.eh_catch.prev_catch;
2242 if (next)
2243 next->u.eh_catch.prev_catch = prev;
2244 else
2245 eh_try->u.eh_try.last_catch = prev;
2246 if (prev)
2247 prev->u.eh_catch.next_catch = next;
2248 else
2250 eh_try->u.eh_try.eh_catch = next;
2251 if (! next)
2252 remove_eh_handler (eh_try);
2257 /* Splice REGION from the region tree and replace it by the outer region
2258 etc. */
2260 static void
2261 remove_eh_handler (struct eh_region *region)
2263 remove_eh_handler_and_replace (region, region->outer);
2266 /* Remove Eh region R that has turned out to have no code in its handler. */
2268 void
2269 remove_eh_region (int r)
2271 struct eh_region *region;
2273 region = VEC_index (eh_region, cfun->eh->region_array, r);
2274 remove_eh_handler (region);
2277 /* Invokes CALLBACK for every exception handler label. Only used by old
2278 loop hackery; should not be used by new code. */
2280 void
2281 for_each_eh_label (void (*callback) (rtx))
2283 int i;
2284 for (i = 0; i < cfun->eh->last_region_number; i++)
2286 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
2287 if (r && r->region_number == i && r->label
2288 && GET_CODE (r->label) == CODE_LABEL)
2289 (*callback) (r->label);
2293 /* Invoke CALLBACK for every exception region in the current function. */
2295 void
2296 for_each_eh_region (void (*callback) (struct eh_region *))
2298 int i, n = cfun->eh->last_region_number;
2299 for (i = 1; i <= n; ++i)
2301 struct eh_region *region;
2303 region = VEC_index (eh_region, cfun->eh->region_array, i);
2304 if (region)
2305 (*callback) (region);
2309 /* This section describes CFG exception edges for flow. */
2311 /* For communicating between calls to reachable_next_level. */
2312 struct reachable_info
2314 tree types_caught;
2315 tree types_allowed;
2316 void (*callback) (struct eh_region *, void *);
2317 void *callback_data;
2320 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2321 base class of TYPE, is in HANDLED. */
2323 static int
2324 check_handled (tree handled, tree type)
2326 tree t;
2328 /* We can check for exact matches without front-end help. */
2329 if (! lang_eh_type_covers)
2331 for (t = handled; t ; t = TREE_CHAIN (t))
2332 if (TREE_VALUE (t) == type)
2333 return 1;
2335 else
2337 for (t = handled; t ; t = TREE_CHAIN (t))
2338 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2339 return 1;
2342 return 0;
2345 /* A subroutine of reachable_next_level. If we are collecting a list
2346 of handlers, add one. After landing pad generation, reference
2347 it instead of the handlers themselves. Further, the handlers are
2348 all wired together, so by referencing one, we've got them all.
2349 Before landing pad generation we reference each handler individually.
2351 LP_REGION contains the landing pad; REGION is the handler. */
2353 static void
2354 add_reachable_handler (struct reachable_info *info,
2355 struct eh_region *lp_region, struct eh_region *region)
2357 if (! info)
2358 return;
2360 if (crtl->eh.built_landing_pads)
2361 info->callback (lp_region, info->callback_data);
2362 else
2363 info->callback (region, info->callback_data);
2366 /* Process one level of exception regions for reachability.
2367 If TYPE_THROWN is non-null, then it is the *exact* type being
2368 propagated. If INFO is non-null, then collect handler labels
2369 and caught/allowed type information between invocations. */
2371 static enum reachable_code
2372 reachable_next_level (struct eh_region *region, tree type_thrown,
2373 struct reachable_info *info,
2374 bool maybe_resx)
2376 switch (region->type)
2378 case ERT_CLEANUP:
2379 /* Before landing-pad generation, we model control flow
2380 directly to the individual handlers. In this way we can
2381 see that catch handler types may shadow one another. */
2382 add_reachable_handler (info, region, region);
2383 return RNL_MAYBE_CAUGHT;
2385 case ERT_TRY:
2387 struct eh_region *c;
2388 enum reachable_code ret = RNL_NOT_CAUGHT;
2390 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
2392 /* A catch-all handler ends the search. */
2393 if (c->u.eh_catch.type_list == NULL)
2395 add_reachable_handler (info, region, c);
2396 return RNL_CAUGHT;
2399 if (type_thrown)
2401 /* If we have at least one type match, end the search. */
2402 tree tp_node = c->u.eh_catch.type_list;
2404 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2406 tree type = TREE_VALUE (tp_node);
2408 if (type == type_thrown
2409 || (lang_eh_type_covers
2410 && (*lang_eh_type_covers) (type, type_thrown)))
2412 add_reachable_handler (info, region, c);
2413 return RNL_CAUGHT;
2417 /* If we have definitive information of a match failure,
2418 the catch won't trigger. */
2419 if (lang_eh_type_covers)
2420 return RNL_NOT_CAUGHT;
2423 /* At this point, we either don't know what type is thrown or
2424 don't have front-end assistance to help deciding if it is
2425 covered by one of the types in the list for this region.
2427 We'd then like to add this region to the list of reachable
2428 handlers since it is indeed potentially reachable based on the
2429 information we have.
2431 Actually, this handler is for sure not reachable if all the
2432 types it matches have already been caught. That is, it is only
2433 potentially reachable if at least one of the types it catches
2434 has not been previously caught. */
2436 if (! info)
2437 ret = RNL_MAYBE_CAUGHT;
2438 else
2440 tree tp_node = c->u.eh_catch.type_list;
2441 bool maybe_reachable = false;
2443 /* Compute the potential reachability of this handler and
2444 update the list of types caught at the same time. */
2445 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2447 tree type = TREE_VALUE (tp_node);
2449 if (! check_handled (info->types_caught, type))
2451 info->types_caught
2452 = tree_cons (NULL, type, info->types_caught);
2454 maybe_reachable = true;
2458 if (maybe_reachable)
2460 add_reachable_handler (info, region, c);
2462 /* ??? If the catch type is a base class of every allowed
2463 type, then we know we can stop the search. */
2464 ret = RNL_MAYBE_CAUGHT;
2469 return ret;
2472 case ERT_ALLOWED_EXCEPTIONS:
2473 /* An empty list of types definitely ends the search. */
2474 if (region->u.allowed.type_list == NULL_TREE)
2476 add_reachable_handler (info, region, region);
2477 return RNL_CAUGHT;
2480 /* Collect a list of lists of allowed types for use in detecting
2481 when a catch may be transformed into a catch-all. */
2482 if (info)
2483 info->types_allowed = tree_cons (NULL_TREE,
2484 region->u.allowed.type_list,
2485 info->types_allowed);
2487 /* If we have definitive information about the type hierarchy,
2488 then we can tell if the thrown type will pass through the
2489 filter. */
2490 if (type_thrown && lang_eh_type_covers)
2492 if (check_handled (region->u.allowed.type_list, type_thrown))
2493 return RNL_NOT_CAUGHT;
2494 else
2496 add_reachable_handler (info, region, region);
2497 return RNL_CAUGHT;
2501 add_reachable_handler (info, region, region);
2502 return RNL_MAYBE_CAUGHT;
2504 case ERT_CATCH:
2505 /* Catch regions are handled by their controlling try region. */
2506 return RNL_NOT_CAUGHT;
2508 case ERT_MUST_NOT_THROW:
2509 /* Here we end our search, since no exceptions may propagate.
2511 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
2512 only via locally handled RESX instructions.
2514 When we inline a function call, we can bring in new handlers. In order
2515 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
2516 assume that such handlers exists prior for any inlinable call prior
2517 inlining decisions are fixed. */
2519 if (maybe_resx)
2521 add_reachable_handler (info, region, region);
2522 return RNL_CAUGHT;
2524 else
2525 return RNL_BLOCKED;
2527 case ERT_THROW:
2528 case ERT_UNKNOWN:
2529 /* Shouldn't see these here. */
2530 gcc_unreachable ();
2531 break;
2532 default:
2533 gcc_unreachable ();
2537 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2539 void
2540 foreach_reachable_handler (int region_number, bool is_resx, bool inlinable_call,
2541 void (*callback) (struct eh_region *, void *),
2542 void *callback_data)
2544 struct reachable_info info;
2545 struct eh_region *region;
2546 tree type_thrown;
2548 memset (&info, 0, sizeof (info));
2549 info.callback = callback;
2550 info.callback_data = callback_data;
2552 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2553 if (!region)
2554 return;
2556 type_thrown = NULL_TREE;
2557 if (is_resx)
2559 /* A RESX leaves a region instead of entering it. Thus the
2560 region itself may have been deleted out from under us. */
2561 if (region == NULL)
2562 return;
2563 region = region->outer;
2565 else if (region->type == ERT_THROW)
2567 type_thrown = region->u.eh_throw.type;
2568 region = region->outer;
2571 while (region)
2573 if (reachable_next_level (region, type_thrown, &info,
2574 inlinable_call || is_resx) >= RNL_CAUGHT)
2575 break;
2576 /* If we have processed one cleanup, there is no point in
2577 processing any more of them. Each cleanup will have an edge
2578 to the next outer cleanup region, so the flow graph will be
2579 accurate. */
2580 if (region->type == ERT_CLEANUP)
2581 region = region->u.cleanup.prev_try;
2582 else
2583 region = region->outer;
2587 /* Retrieve a list of labels of exception handlers which can be
2588 reached by a given insn. */
2590 static void
2591 arh_to_landing_pad (struct eh_region *region, void *data)
2593 rtx *p_handlers = (rtx *) data;
2594 if (! *p_handlers)
2595 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2598 static void
2599 arh_to_label (struct eh_region *region, void *data)
2601 rtx *p_handlers = (rtx *) data;
2602 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2606 reachable_handlers (rtx insn)
2608 bool is_resx = false;
2609 rtx handlers = NULL;
2610 int region_number;
2612 if (JUMP_P (insn)
2613 && GET_CODE (PATTERN (insn)) == RESX)
2615 region_number = XINT (PATTERN (insn), 0);
2616 is_resx = true;
2618 else
2620 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2621 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2622 return NULL;
2623 region_number = INTVAL (XEXP (note, 0));
2626 foreach_reachable_handler (region_number, is_resx, false,
2627 (crtl->eh.built_landing_pads
2628 ? arh_to_landing_pad
2629 : arh_to_label),
2630 &handlers);
2632 return handlers;
2635 /* Determine if the given INSN can throw an exception that is caught
2636 within the function. */
2638 bool
2639 can_throw_internal_1 (int region_number, bool is_resx, bool inlinable_call)
2641 struct eh_region *region;
2642 tree type_thrown;
2644 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2645 if (!region)
2646 return false;
2648 type_thrown = NULL_TREE;
2649 if (is_resx)
2650 region = region->outer;
2651 else if (region->type == ERT_THROW)
2653 type_thrown = region->u.eh_throw.type;
2654 region = region->outer;
2657 /* If this exception is ignored by each and every containing region,
2658 then control passes straight out. The runtime may handle some
2659 regions, which also do not require processing internally. */
2660 for (; region; region = region->outer)
2662 enum reachable_code how = reachable_next_level (region, type_thrown, 0,
2663 inlinable_call || is_resx);
2664 if (how == RNL_BLOCKED)
2665 return false;
2666 if (how != RNL_NOT_CAUGHT)
2667 return true;
2670 return false;
2673 bool
2674 can_throw_internal (const_rtx insn)
2676 rtx note;
2678 if (! INSN_P (insn))
2679 return false;
2681 if (JUMP_P (insn)
2682 && GET_CODE (PATTERN (insn)) == RESX
2683 && XINT (PATTERN (insn), 0) > 0)
2684 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true, false);
2686 if (NONJUMP_INSN_P (insn)
2687 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2688 insn = XVECEXP (PATTERN (insn), 0, 0);
2690 /* Every insn that might throw has an EH_REGION note. */
2691 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2692 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2693 return false;
2695 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false, false);
2698 /* Determine if the given INSN can throw an exception that is
2699 visible outside the function. */
2701 bool
2702 can_throw_external_1 (int region_number, bool is_resx, bool inlinable_call)
2704 struct eh_region *region;
2705 tree type_thrown;
2707 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2708 if (!region)
2709 return true;
2711 type_thrown = NULL_TREE;
2712 if (is_resx)
2713 region = region->outer;
2714 else if (region->type == ERT_THROW)
2716 type_thrown = region->u.eh_throw.type;
2717 region = region->outer;
2720 /* If the exception is caught or blocked by any containing region,
2721 then it is not seen by any calling function. */
2722 for (; region ; region = region->outer)
2723 if (reachable_next_level (region, type_thrown, NULL,
2724 inlinable_call || is_resx) >= RNL_CAUGHT)
2725 return false;
2727 return true;
2730 bool
2731 can_throw_external (const_rtx insn)
2733 rtx note;
2735 if (! INSN_P (insn))
2736 return false;
2738 if (JUMP_P (insn)
2739 && GET_CODE (PATTERN (insn)) == RESX
2740 && XINT (PATTERN (insn), 0) > 0)
2741 return can_throw_external_1 (XINT (PATTERN (insn), 0), true, false);
2743 if (NONJUMP_INSN_P (insn)
2744 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2746 rtx seq = PATTERN (insn);
2747 int i, n = XVECLEN (seq, 0);
2749 for (i = 0; i < n; i++)
2750 if (can_throw_external (XVECEXP (seq, 0, i)))
2751 return true;
2753 return false;
2756 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2757 if (!note)
2759 /* Calls (and trapping insns) without notes are outside any
2760 exception handling region in this function. We have to
2761 assume it might throw. Given that the front end and middle
2762 ends mark known NOTHROW functions, this isn't so wildly
2763 inaccurate. */
2764 return (CALL_P (insn)
2765 || (flag_non_call_exceptions
2766 && may_trap_p (PATTERN (insn))));
2768 if (INTVAL (XEXP (note, 0)) <= 0)
2769 return false;
2771 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false, false);
2774 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
2776 unsigned int
2777 set_nothrow_function_flags (void)
2779 rtx insn;
2781 crtl->nothrow = 1;
2783 /* Assume crtl->all_throwers_are_sibcalls until we encounter
2784 something that can throw an exception. We specifically exempt
2785 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2786 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2787 is optimistic. */
2789 crtl->all_throwers_are_sibcalls = 1;
2791 /* If we don't know that this implementation of the function will
2792 actually be used, then we must not set TREE_NOTHROW, since
2793 callers must not assume that this function does not throw. */
2794 if (TREE_NOTHROW (current_function_decl))
2795 return 0;
2797 if (! flag_exceptions)
2798 return 0;
2800 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2801 if (can_throw_external (insn))
2803 crtl->nothrow = 0;
2805 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2807 crtl->all_throwers_are_sibcalls = 0;
2808 return 0;
2812 for (insn = crtl->epilogue_delay_list; insn;
2813 insn = XEXP (insn, 1))
2814 if (can_throw_external (insn))
2816 crtl->nothrow = 0;
2818 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2820 crtl->all_throwers_are_sibcalls = 0;
2821 return 0;
2824 if (crtl->nothrow
2825 && (cgraph_function_body_availability (cgraph_node
2826 (current_function_decl))
2827 >= AVAIL_AVAILABLE))
2829 struct cgraph_node *node = cgraph_node (current_function_decl);
2830 struct cgraph_edge *e;
2831 for (e = node->callers; e; e = e->next_caller)
2832 e->can_throw_external = false;
2833 TREE_NOTHROW (current_function_decl) = 1;
2835 if (dump_file)
2836 fprintf (dump_file, "Marking function nothrow: %s\n\n",
2837 current_function_name ());
2839 return 0;
2842 struct rtl_opt_pass pass_set_nothrow_function_flags =
2845 RTL_PASS,
2846 "nothrow", /* name */
2847 NULL, /* gate */
2848 set_nothrow_function_flags, /* execute */
2849 NULL, /* sub */
2850 NULL, /* next */
2851 0, /* static_pass_number */
2852 TV_NONE, /* tv_id */
2853 0, /* properties_required */
2854 0, /* properties_provided */
2855 0, /* properties_destroyed */
2856 0, /* todo_flags_start */
2857 TODO_dump_func, /* todo_flags_finish */
2862 /* Various hooks for unwind library. */
2864 /* Do any necessary initialization to access arbitrary stack frames.
2865 On the SPARC, this means flushing the register windows. */
2867 void
2868 expand_builtin_unwind_init (void)
2870 /* Set this so all the registers get saved in our frame; we need to be
2871 able to copy the saved values for any registers from frames we unwind. */
2872 crtl->saves_all_registers = 1;
2874 #ifdef SETUP_FRAME_ADDRESSES
2875 SETUP_FRAME_ADDRESSES ();
2876 #endif
2880 expand_builtin_eh_return_data_regno (tree exp)
2882 tree which = CALL_EXPR_ARG (exp, 0);
2883 unsigned HOST_WIDE_INT iwhich;
2885 if (TREE_CODE (which) != INTEGER_CST)
2887 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2888 return constm1_rtx;
2891 iwhich = tree_low_cst (which, 1);
2892 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2893 if (iwhich == INVALID_REGNUM)
2894 return constm1_rtx;
2896 #ifdef DWARF_FRAME_REGNUM
2897 iwhich = DWARF_FRAME_REGNUM (iwhich);
2898 #else
2899 iwhich = DBX_REGISTER_NUMBER (iwhich);
2900 #endif
2902 return GEN_INT (iwhich);
2905 /* Given a value extracted from the return address register or stack slot,
2906 return the actual address encoded in that value. */
2909 expand_builtin_extract_return_addr (tree addr_tree)
2911 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2913 if (GET_MODE (addr) != Pmode
2914 && GET_MODE (addr) != VOIDmode)
2916 #ifdef POINTERS_EXTEND_UNSIGNED
2917 addr = convert_memory_address (Pmode, addr);
2918 #else
2919 addr = convert_to_mode (Pmode, addr, 0);
2920 #endif
2923 /* First mask out any unwanted bits. */
2924 #ifdef MASK_RETURN_ADDR
2925 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2926 #endif
2928 /* Then adjust to find the real return address. */
2929 #if defined (RETURN_ADDR_OFFSET)
2930 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2931 #endif
2933 return addr;
2936 /* Given an actual address in addr_tree, do any necessary encoding
2937 and return the value to be stored in the return address register or
2938 stack slot so the epilogue will return to that address. */
2941 expand_builtin_frob_return_addr (tree addr_tree)
2943 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2945 addr = convert_memory_address (Pmode, addr);
2947 #ifdef RETURN_ADDR_OFFSET
2948 addr = force_reg (Pmode, addr);
2949 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2950 #endif
2952 return addr;
2955 /* Set up the epilogue with the magic bits we'll need to return to the
2956 exception handler. */
2958 void
2959 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2960 tree handler_tree)
2962 rtx tmp;
2964 #ifdef EH_RETURN_STACKADJ_RTX
2965 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2966 VOIDmode, EXPAND_NORMAL);
2967 tmp = convert_memory_address (Pmode, tmp);
2968 if (!crtl->eh.ehr_stackadj)
2969 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
2970 else if (tmp != crtl->eh.ehr_stackadj)
2971 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2972 #endif
2974 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2975 VOIDmode, EXPAND_NORMAL);
2976 tmp = convert_memory_address (Pmode, tmp);
2977 if (!crtl->eh.ehr_handler)
2978 crtl->eh.ehr_handler = copy_to_reg (tmp);
2979 else if (tmp != crtl->eh.ehr_handler)
2980 emit_move_insn (crtl->eh.ehr_handler, tmp);
2982 if (!crtl->eh.ehr_label)
2983 crtl->eh.ehr_label = gen_label_rtx ();
2984 emit_jump (crtl->eh.ehr_label);
2987 void
2988 expand_eh_return (void)
2990 rtx around_label;
2992 if (! crtl->eh.ehr_label)
2993 return;
2995 crtl->calls_eh_return = 1;
2997 #ifdef EH_RETURN_STACKADJ_RTX
2998 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2999 #endif
3001 around_label = gen_label_rtx ();
3002 emit_jump (around_label);
3004 emit_label (crtl->eh.ehr_label);
3005 clobber_return_register ();
3007 #ifdef EH_RETURN_STACKADJ_RTX
3008 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
3009 #endif
3011 #ifdef HAVE_eh_return
3012 if (HAVE_eh_return)
3013 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
3014 else
3015 #endif
3017 #ifdef EH_RETURN_HANDLER_RTX
3018 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
3019 #else
3020 error ("__builtin_eh_return not supported on this target");
3021 #endif
3024 emit_label (around_label);
3027 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3028 POINTERS_EXTEND_UNSIGNED and return it. */
3031 expand_builtin_extend_pointer (tree addr_tree)
3033 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3034 int extend;
3036 #ifdef POINTERS_EXTEND_UNSIGNED
3037 extend = POINTERS_EXTEND_UNSIGNED;
3038 #else
3039 /* The previous EH code did an unsigned extend by default, so we do this also
3040 for consistency. */
3041 extend = 1;
3042 #endif
3044 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
3047 /* In the following functions, we represent entries in the action table
3048 as 1-based indices. Special cases are:
3050 0: null action record, non-null landing pad; implies cleanups
3051 -1: null action record, null landing pad; implies no action
3052 -2: no call-site entry; implies must_not_throw
3053 -3: we have yet to process outer regions
3055 Further, no special cases apply to the "next" field of the record.
3056 For next, 0 means end of list. */
3058 struct action_record
3060 int offset;
3061 int filter;
3062 int next;
3065 static int
3066 action_record_eq (const void *pentry, const void *pdata)
3068 const struct action_record *entry = (const struct action_record *) pentry;
3069 const struct action_record *data = (const struct action_record *) pdata;
3070 return entry->filter == data->filter && entry->next == data->next;
3073 static hashval_t
3074 action_record_hash (const void *pentry)
3076 const struct action_record *entry = (const struct action_record *) pentry;
3077 return entry->next * 1009 + entry->filter;
3080 static int
3081 add_action_record (htab_t ar_hash, int filter, int next)
3083 struct action_record **slot, *new_ar, tmp;
3085 tmp.filter = filter;
3086 tmp.next = next;
3087 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3089 if ((new_ar = *slot) == NULL)
3091 new_ar = XNEW (struct action_record);
3092 new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3093 new_ar->filter = filter;
3094 new_ar->next = next;
3095 *slot = new_ar;
3097 /* The filter value goes in untouched. The link to the next
3098 record is a "self-relative" byte offset, or zero to indicate
3099 that there is no next record. So convert the absolute 1 based
3100 indices we've been carrying around into a displacement. */
3102 push_sleb128 (&crtl->eh.action_record_data, filter);
3103 if (next)
3104 next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3105 push_sleb128 (&crtl->eh.action_record_data, next);
3108 return new_ar->offset;
3111 static int
3112 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3114 struct eh_region *c;
3115 int next;
3117 /* If we've reached the top of the region chain, then we have
3118 no actions, and require no landing pad. */
3119 if (region == NULL)
3120 return -1;
3122 switch (region->type)
3124 case ERT_CLEANUP:
3125 /* A cleanup adds a zero filter to the beginning of the chain, but
3126 there are special cases to look out for. If there are *only*
3127 cleanups along a path, then it compresses to a zero action.
3128 Further, if there are multiple cleanups along a path, we only
3129 need to represent one of them, as that is enough to trigger
3130 entry to the landing pad at runtime. */
3131 next = collect_one_action_chain (ar_hash, region->outer);
3132 if (next <= 0)
3133 return 0;
3134 for (c = region->outer; c ; c = c->outer)
3135 if (c->type == ERT_CLEANUP)
3136 return next;
3137 return add_action_record (ar_hash, 0, next);
3139 case ERT_TRY:
3140 /* Process the associated catch regions in reverse order.
3141 If there's a catch-all handler, then we don't need to
3142 search outer regions. Use a magic -3 value to record
3143 that we haven't done the outer search. */
3144 next = -3;
3145 for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)
3147 if (c->u.eh_catch.type_list == NULL)
3149 /* Retrieve the filter from the head of the filter list
3150 where we have stored it (see assign_filter_values). */
3151 int filter
3152 = TREE_INT_CST_LOW (TREE_VALUE (c->u.eh_catch.filter_list));
3154 next = add_action_record (ar_hash, filter, 0);
3156 else
3158 /* Once the outer search is done, trigger an action record for
3159 each filter we have. */
3160 tree flt_node;
3162 if (next == -3)
3164 next = collect_one_action_chain (ar_hash, region->outer);
3166 /* If there is no next action, terminate the chain. */
3167 if (next == -1)
3168 next = 0;
3169 /* If all outer actions are cleanups or must_not_throw,
3170 we'll have no action record for it, since we had wanted
3171 to encode these states in the call-site record directly.
3172 Add a cleanup action to the chain to catch these. */
3173 else if (next <= 0)
3174 next = add_action_record (ar_hash, 0, 0);
3177 flt_node = c->u.eh_catch.filter_list;
3178 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3180 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3181 next = add_action_record (ar_hash, filter, next);
3185 return next;
3187 case ERT_ALLOWED_EXCEPTIONS:
3188 /* An exception specification adds its filter to the
3189 beginning of the chain. */
3190 next = collect_one_action_chain (ar_hash, region->outer);
3192 /* If there is no next action, terminate the chain. */
3193 if (next == -1)
3194 next = 0;
3195 /* If all outer actions are cleanups or must_not_throw,
3196 we'll have no action record for it, since we had wanted
3197 to encode these states in the call-site record directly.
3198 Add a cleanup action to the chain to catch these. */
3199 else if (next <= 0)
3200 next = add_action_record (ar_hash, 0, 0);
3202 return add_action_record (ar_hash, region->u.allowed.filter, next);
3204 case ERT_MUST_NOT_THROW:
3205 /* A must-not-throw region with no inner handlers or cleanups
3206 requires no call-site entry. Note that this differs from
3207 the no handler or cleanup case in that we do require an lsda
3208 to be generated. Return a magic -2 value to record this. */
3209 return -2;
3211 case ERT_CATCH:
3212 case ERT_THROW:
3213 /* CATCH regions are handled in TRY above. THROW regions are
3214 for optimization information only and produce no output. */
3215 return collect_one_action_chain (ar_hash, region->outer);
3217 default:
3218 gcc_unreachable ();
3222 static int
3223 add_call_site (rtx landing_pad, int action)
3225 call_site_record record;
3227 record = GGC_NEW (struct call_site_record);
3228 record->landing_pad = landing_pad;
3229 record->action = action;
3231 VEC_safe_push (call_site_record, gc, crtl->eh.call_site_record, record);
3233 return call_site_base + VEC_length (call_site_record, crtl->eh.call_site_record) - 1;
3236 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3237 The new note numbers will not refer to region numbers, but
3238 instead to call site entries. */
3240 unsigned int
3241 convert_to_eh_region_ranges (void)
3243 rtx insn, iter, note;
3244 htab_t ar_hash;
3245 int last_action = -3;
3246 rtx last_action_insn = NULL_RTX;
3247 rtx last_landing_pad = NULL_RTX;
3248 rtx first_no_action_insn = NULL_RTX;
3249 int call_site = 0;
3251 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3252 return 0;
3254 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
3256 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3258 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3259 if (INSN_P (iter))
3261 struct eh_region *region;
3262 int this_action;
3263 rtx this_landing_pad;
3265 insn = iter;
3266 if (NONJUMP_INSN_P (insn)
3267 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3268 insn = XVECEXP (PATTERN (insn), 0, 0);
3270 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3271 if (!note)
3273 if (! (CALL_P (insn)
3274 || (flag_non_call_exceptions
3275 && may_trap_p (PATTERN (insn)))))
3276 continue;
3277 this_action = -1;
3278 region = NULL;
3280 else
3282 if (INTVAL (XEXP (note, 0)) <= 0)
3283 continue;
3284 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3285 this_action = collect_one_action_chain (ar_hash, region);
3288 /* Existence of catch handlers, or must-not-throw regions
3289 implies that an lsda is needed (even if empty). */
3290 if (this_action != -1)
3291 crtl->uses_eh_lsda = 1;
3293 /* Delay creation of region notes for no-action regions
3294 until we're sure that an lsda will be required. */
3295 else if (last_action == -3)
3297 first_no_action_insn = iter;
3298 last_action = -1;
3301 /* Cleanups and handlers may share action chains but not
3302 landing pads. Collect the landing pad for this region. */
3303 if (this_action >= 0)
3305 struct eh_region *o;
3306 for (o = region; ! o->landing_pad ; o = o->outer)
3307 continue;
3308 this_landing_pad = o->landing_pad;
3310 else
3311 this_landing_pad = NULL_RTX;
3313 /* Differing actions or landing pads implies a change in call-site
3314 info, which implies some EH_REGION note should be emitted. */
3315 if (last_action != this_action
3316 || last_landing_pad != this_landing_pad)
3318 /* If we'd not seen a previous action (-3) or the previous
3319 action was must-not-throw (-2), then we do not need an
3320 end note. */
3321 if (last_action >= -1)
3323 /* If we delayed the creation of the begin, do it now. */
3324 if (first_no_action_insn)
3326 call_site = add_call_site (NULL_RTX, 0);
3327 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3328 first_no_action_insn);
3329 NOTE_EH_HANDLER (note) = call_site;
3330 first_no_action_insn = NULL_RTX;
3333 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3334 last_action_insn);
3335 NOTE_EH_HANDLER (note) = call_site;
3338 /* If the new action is must-not-throw, then no region notes
3339 are created. */
3340 if (this_action >= -1)
3342 call_site = add_call_site (this_landing_pad,
3343 this_action < 0 ? 0 : this_action);
3344 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3345 NOTE_EH_HANDLER (note) = call_site;
3348 last_action = this_action;
3349 last_landing_pad = this_landing_pad;
3351 last_action_insn = iter;
3354 if (last_action >= -1 && ! first_no_action_insn)
3356 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3357 NOTE_EH_HANDLER (note) = call_site;
3360 htab_delete (ar_hash);
3361 return 0;
3364 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
3367 RTL_PASS,
3368 "eh_ranges", /* name */
3369 NULL, /* gate */
3370 convert_to_eh_region_ranges, /* execute */
3371 NULL, /* sub */
3372 NULL, /* next */
3373 0, /* static_pass_number */
3374 TV_NONE, /* tv_id */
3375 0, /* properties_required */
3376 0, /* properties_provided */
3377 0, /* properties_destroyed */
3378 0, /* todo_flags_start */
3379 TODO_dump_func, /* todo_flags_finish */
3384 static void
3385 push_uleb128 (varray_type *data_area, unsigned int value)
3389 unsigned char byte = value & 0x7f;
3390 value >>= 7;
3391 if (value)
3392 byte |= 0x80;
3393 VARRAY_PUSH_UCHAR (*data_area, byte);
3395 while (value);
3398 static void
3399 push_sleb128 (varray_type *data_area, int value)
3401 unsigned char byte;
3402 int more;
3406 byte = value & 0x7f;
3407 value >>= 7;
3408 more = ! ((value == 0 && (byte & 0x40) == 0)
3409 || (value == -1 && (byte & 0x40) != 0));
3410 if (more)
3411 byte |= 0x80;
3412 VARRAY_PUSH_UCHAR (*data_area, byte);
3414 while (more);
3418 #ifndef HAVE_AS_LEB128
3419 static int
3420 dw2_size_of_call_site_table (void)
3422 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3423 int size = n * (4 + 4 + 4);
3424 int i;
3426 for (i = 0; i < n; ++i)
3428 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3429 size += size_of_uleb128 (cs->action);
3432 return size;
3435 static int
3436 sjlj_size_of_call_site_table (void)
3438 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3439 int size = 0;
3440 int i;
3442 for (i = 0; i < n; ++i)
3444 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3445 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3446 size += size_of_uleb128 (cs->action);
3449 return size;
3451 #endif
3453 static void
3454 dw2_output_call_site_table (void)
3456 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3457 int i;
3459 for (i = 0; i < n; ++i)
3461 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3462 char reg_start_lab[32];
3463 char reg_end_lab[32];
3464 char landing_pad_lab[32];
3466 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3467 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3469 if (cs->landing_pad)
3470 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3471 CODE_LABEL_NUMBER (cs->landing_pad));
3473 /* ??? Perhaps use insn length scaling if the assembler supports
3474 generic arithmetic. */
3475 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3476 data4 if the function is small enough. */
3477 #ifdef HAVE_AS_LEB128
3478 dw2_asm_output_delta_uleb128 (reg_start_lab,
3479 current_function_func_begin_label,
3480 "region %d start", i);
3481 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3482 "length");
3483 if (cs->landing_pad)
3484 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3485 current_function_func_begin_label,
3486 "landing pad");
3487 else
3488 dw2_asm_output_data_uleb128 (0, "landing pad");
3489 #else
3490 dw2_asm_output_delta (4, reg_start_lab,
3491 current_function_func_begin_label,
3492 "region %d start", i);
3493 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3494 if (cs->landing_pad)
3495 dw2_asm_output_delta (4, landing_pad_lab,
3496 current_function_func_begin_label,
3497 "landing pad");
3498 else
3499 dw2_asm_output_data (4, 0, "landing pad");
3500 #endif
3501 dw2_asm_output_data_uleb128 (cs->action, "action");
3504 call_site_base += n;
3507 static void
3508 sjlj_output_call_site_table (void)
3510 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3511 int i;
3513 for (i = 0; i < n; ++i)
3515 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3517 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3518 "region %d landing pad", i);
3519 dw2_asm_output_data_uleb128 (cs->action, "action");
3522 call_site_base += n;
3525 #ifndef TARGET_UNWIND_INFO
3526 /* Switch to the section that should be used for exception tables. */
3528 static void
3529 switch_to_exception_section (const char * ARG_UNUSED (fnname))
3531 section *s;
3533 if (exception_section)
3534 s = exception_section;
3535 else
3537 /* Compute the section and cache it into exception_section,
3538 unless it depends on the function name. */
3539 if (targetm.have_named_sections)
3541 int flags;
3543 if (EH_TABLES_CAN_BE_READ_ONLY)
3545 int tt_format =
3546 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3547 flags = ((! flag_pic
3548 || ((tt_format & 0x70) != DW_EH_PE_absptr
3549 && (tt_format & 0x70) != DW_EH_PE_aligned))
3550 ? 0 : SECTION_WRITE);
3552 else
3553 flags = SECTION_WRITE;
3555 #ifdef HAVE_LD_EH_GC_SECTIONS
3556 if (flag_function_sections)
3558 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
3559 sprintf (section_name, ".gcc_except_table.%s", fnname);
3560 s = get_section (section_name, flags, NULL);
3561 free (section_name);
3563 else
3564 #endif
3565 exception_section
3566 = s = get_section (".gcc_except_table", flags, NULL);
3568 else
3569 exception_section
3570 = s = flag_pic ? data_section : readonly_data_section;
3573 switch_to_section (s);
3575 #endif
3578 /* Output a reference from an exception table to the type_info object TYPE.
3579 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
3580 the value. */
3582 static void
3583 output_ttype (tree type, int tt_format, int tt_format_size)
3585 rtx value;
3586 bool is_public = true;
3588 if (type == NULL_TREE)
3589 value = const0_rtx;
3590 else
3592 struct varpool_node *node;
3594 type = lookup_type_for_runtime (type);
3595 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3597 /* Let cgraph know that the rtti decl is used. Not all of the
3598 paths below go through assemble_integer, which would take
3599 care of this for us. */
3600 STRIP_NOPS (type);
3601 if (TREE_CODE (type) == ADDR_EXPR)
3603 type = TREE_OPERAND (type, 0);
3604 if (TREE_CODE (type) == VAR_DECL)
3606 node = varpool_node (type);
3607 if (node)
3608 varpool_mark_needed_node (node);
3609 is_public = TREE_PUBLIC (type);
3612 else
3613 gcc_assert (TREE_CODE (type) == INTEGER_CST);
3616 /* Allow the target to override the type table entry format. */
3617 if (targetm.asm_out.ttype (value))
3618 return;
3620 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3621 assemble_integer (value, tt_format_size,
3622 tt_format_size * BITS_PER_UNIT, 1);
3623 else
3624 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
3627 void
3628 output_function_exception_table (const char * ARG_UNUSED (fnname))
3630 int tt_format, cs_format, lp_format, i, n;
3631 #ifdef HAVE_AS_LEB128
3632 char ttype_label[32];
3633 char cs_after_size_label[32];
3634 char cs_end_label[32];
3635 #else
3636 int call_site_len;
3637 #endif
3638 int have_tt_data;
3639 int tt_format_size = 0;
3641 /* Not all functions need anything. */
3642 if (! crtl->uses_eh_lsda)
3643 return;
3645 if (eh_personality_libfunc)
3646 assemble_external_libcall (eh_personality_libfunc);
3648 #ifdef TARGET_UNWIND_INFO
3649 /* TODO: Move this into target file. */
3650 fputs ("\t.personality\t", asm_out_file);
3651 output_addr_const (asm_out_file, eh_personality_libfunc);
3652 fputs ("\n\t.handlerdata\n", asm_out_file);
3653 /* Note that varasm still thinks we're in the function's code section.
3654 The ".endp" directive that will immediately follow will take us back. */
3655 #else
3656 switch_to_exception_section (fnname);
3657 #endif
3659 /* If the target wants a label to begin the table, emit it here. */
3660 targetm.asm_out.except_table_label (asm_out_file);
3662 have_tt_data = (VEC_length (tree, crtl->eh.ttype_data) > 0
3663 || VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) > 0);
3665 /* Indicate the format of the @TType entries. */
3666 if (! have_tt_data)
3667 tt_format = DW_EH_PE_omit;
3668 else
3670 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3671 #ifdef HAVE_AS_LEB128
3672 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3673 current_function_funcdef_no);
3674 #endif
3675 tt_format_size = size_of_encoded_value (tt_format);
3677 assemble_align (tt_format_size * BITS_PER_UNIT);
3680 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3681 current_function_funcdef_no);
3683 /* The LSDA header. */
3685 /* Indicate the format of the landing pad start pointer. An omitted
3686 field implies @LPStart == @Start. */
3687 /* Currently we always put @LPStart == @Start. This field would
3688 be most useful in moving the landing pads completely out of
3689 line to another section, but it could also be used to minimize
3690 the size of uleb128 landing pad offsets. */
3691 lp_format = DW_EH_PE_omit;
3692 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3693 eh_data_format_name (lp_format));
3695 /* @LPStart pointer would go here. */
3697 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3698 eh_data_format_name (tt_format));
3700 #ifndef HAVE_AS_LEB128
3701 if (USING_SJLJ_EXCEPTIONS)
3702 call_site_len = sjlj_size_of_call_site_table ();
3703 else
3704 call_site_len = dw2_size_of_call_site_table ();
3705 #endif
3707 /* A pc-relative 4-byte displacement to the @TType data. */
3708 if (have_tt_data)
3710 #ifdef HAVE_AS_LEB128
3711 char ttype_after_disp_label[32];
3712 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3713 current_function_funcdef_no);
3714 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3715 "@TType base offset");
3716 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3717 #else
3718 /* Ug. Alignment queers things. */
3719 unsigned int before_disp, after_disp, last_disp, disp;
3721 before_disp = 1 + 1;
3722 after_disp = (1 + size_of_uleb128 (call_site_len)
3723 + call_site_len
3724 + VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data)
3725 + (VEC_length (tree, crtl->eh.ttype_data)
3726 * tt_format_size));
3728 disp = after_disp;
3731 unsigned int disp_size, pad;
3733 last_disp = disp;
3734 disp_size = size_of_uleb128 (disp);
3735 pad = before_disp + disp_size + after_disp;
3736 if (pad % tt_format_size)
3737 pad = tt_format_size - (pad % tt_format_size);
3738 else
3739 pad = 0;
3740 disp = after_disp + pad;
3742 while (disp != last_disp);
3744 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3745 #endif
3748 /* Indicate the format of the call-site offsets. */
3749 #ifdef HAVE_AS_LEB128
3750 cs_format = DW_EH_PE_uleb128;
3751 #else
3752 cs_format = DW_EH_PE_udata4;
3753 #endif
3754 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3755 eh_data_format_name (cs_format));
3757 #ifdef HAVE_AS_LEB128
3758 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3759 current_function_funcdef_no);
3760 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3761 current_function_funcdef_no);
3762 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3763 "Call-site table length");
3764 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3765 if (USING_SJLJ_EXCEPTIONS)
3766 sjlj_output_call_site_table ();
3767 else
3768 dw2_output_call_site_table ();
3769 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3770 #else
3771 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3772 if (USING_SJLJ_EXCEPTIONS)
3773 sjlj_output_call_site_table ();
3774 else
3775 dw2_output_call_site_table ();
3776 #endif
3778 /* ??? Decode and interpret the data for flag_debug_asm. */
3779 n = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data);
3780 for (i = 0; i < n; ++i)
3781 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.action_record_data, i),
3782 (i ? NULL : "Action record table"));
3784 if (have_tt_data)
3785 assemble_align (tt_format_size * BITS_PER_UNIT);
3787 i = VEC_length (tree, crtl->eh.ttype_data);
3788 while (i-- > 0)
3790 tree type = VEC_index (tree, crtl->eh.ttype_data, i);
3791 output_ttype (type, tt_format, tt_format_size);
3794 #ifdef HAVE_AS_LEB128
3795 if (have_tt_data)
3796 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3797 #endif
3799 /* ??? Decode and interpret the data for flag_debug_asm. */
3800 n = VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data);
3801 for (i = 0; i < n; ++i)
3803 if (targetm.arm_eabi_unwinder)
3805 tree type = VARRAY_TREE (crtl->eh.ehspec_data, i);
3806 output_ttype (type, tt_format, tt_format_size);
3808 else
3809 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.ehspec_data, i),
3810 (i ? NULL : "Exception specification table"));
3813 switch_to_section (current_function_section ());
3816 void
3817 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3819 fun->eh->throw_stmt_table = table;
3822 htab_t
3823 get_eh_throw_stmt_table (struct function *fun)
3825 return fun->eh->throw_stmt_table;
3828 /* Dump EH information to OUT. */
3830 void
3831 dump_eh_tree (FILE * out, struct function *fun)
3833 struct eh_region *i;
3834 int depth = 0;
3835 static const char *const type_name[] = { "unknown", "cleanup", "try", "catch",
3836 "allowed_exceptions", "must_not_throw",
3837 "throw"
3840 i = fun->eh->region_tree;
3841 if (!i)
3842 return;
3844 fprintf (out, "Eh tree:\n");
3845 while (1)
3847 fprintf (out, " %*s %i %s", depth * 2, "",
3848 i->region_number, type_name[(int) i->type]);
3849 if (i->tree_label)
3851 fprintf (out, " tree_label:");
3852 print_generic_expr (out, i->tree_label, 0);
3854 if (i->label)
3855 fprintf (out, " label:%i", INSN_UID (i->label));
3856 if (i->landing_pad)
3858 fprintf (out, " landing_pad:%i", INSN_UID (i->landing_pad));
3859 if (GET_CODE (i->landing_pad) == NOTE)
3860 fprintf (out, " (deleted)");
3862 if (i->post_landing_pad)
3864 fprintf (out, " post_landing_pad:%i", INSN_UID (i->post_landing_pad));
3865 if (GET_CODE (i->post_landing_pad) == NOTE)
3866 fprintf (out, " (deleted)");
3868 if (i->resume)
3870 fprintf (out, " resume:%i", INSN_UID (i->resume));
3871 if (GET_CODE (i->resume) == NOTE)
3872 fprintf (out, " (deleted)");
3874 if (i->may_contain_throw)
3875 fprintf (out, " may_contain_throw");
3876 switch (i->type)
3878 case ERT_CLEANUP:
3879 if (i->u.cleanup.prev_try)
3880 fprintf (out, " prev try:%i",
3881 i->u.cleanup.prev_try->region_number);
3882 break;
3884 case ERT_TRY:
3886 struct eh_region *c;
3887 fprintf (out, " catch regions:");
3888 for (c = i->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
3889 fprintf (out, " %i", c->region_number);
3891 break;
3893 case ERT_CATCH:
3894 if (i->u.eh_catch.prev_catch)
3895 fprintf (out, " prev: %i",
3896 i->u.eh_catch.prev_catch->region_number);
3897 if (i->u.eh_catch.next_catch)
3898 fprintf (out, " next %i",
3899 i->u.eh_catch.next_catch->region_number);
3900 fprintf (out, " type:");
3901 print_generic_expr (out, i->u.eh_catch.type_list, 0);
3902 break;
3904 case ERT_ALLOWED_EXCEPTIONS:
3905 fprintf (out, " filter :%i types:", i->u.allowed.filter);
3906 print_generic_expr (out, i->u.allowed.type_list, 0);
3907 break;
3909 case ERT_THROW:
3910 fprintf (out, " type:");
3911 print_generic_expr (out, i->u.eh_throw.type, 0);
3912 break;
3914 case ERT_MUST_NOT_THROW:
3915 break;
3917 case ERT_UNKNOWN:
3918 break;
3920 if (i->aka)
3922 fprintf (out, " also known as:");
3923 dump_bitmap (out, i->aka);
3925 else
3926 fprintf (out, "\n");
3927 /* If there are sub-regions, process them. */
3928 if (i->inner)
3929 i = i->inner, depth++;
3930 /* If there are peers, process them. */
3931 else if (i->next_peer)
3932 i = i->next_peer;
3933 /* Otherwise, step back up the tree to the next peer. */
3934 else
3938 i = i->outer;
3939 depth--;
3940 if (i == NULL)
3941 return;
3943 while (i->next_peer == NULL);
3944 i = i->next_peer;
3949 /* Dump the EH tree for FN on stderr. */
3951 void
3952 debug_eh_tree (struct function *fn)
3954 dump_eh_tree (stderr, fn);
3958 /* Verify EH region invariants. */
3960 static bool
3961 verify_eh_region (struct eh_region *region, struct eh_region *prev_try)
3963 bool found = false;
3964 if (!region)
3965 return false;
3966 switch (region->type)
3968 case ERT_CLEANUP:
3969 if (region->u.cleanup.prev_try != prev_try)
3971 error ("Wrong prev_try pointer in EH region %i",
3972 region->region_number);
3973 found = true;
3975 break;
3976 case ERT_TRY:
3978 struct eh_region *c, *prev = NULL;
3979 if (region->u.eh_try.eh_catch->u.eh_catch.prev_catch)
3981 error ("Try region %i has wrong rh_catch pointer to %i",
3982 region->region_number,
3983 region->u.eh_try.eh_catch->region_number);
3984 found = true;
3986 for (c = region->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
3988 if (c->outer != region->outer)
3990 error
3991 ("Catch region %i has different outer region than try region %i",
3992 c->region_number, region->region_number);
3993 found = true;
3995 if (c->u.eh_catch.prev_catch != prev)
3997 error ("Catch region %i has corrupted catchlist",
3998 c->region_number);
3999 found = true;
4001 prev = c;
4003 if (prev != region->u.eh_try.last_catch)
4005 error
4006 ("Try region %i has wrong last_catch pointer to %i instead of %i",
4007 region->region_number,
4008 region->u.eh_try.last_catch->region_number,
4009 prev->region_number);
4010 found = true;
4013 break;
4014 case ERT_CATCH:
4015 if (!region->u.eh_catch.prev_catch
4016 && (!region->next_peer || region->next_peer->type != ERT_TRY))
4018 error ("Catch region %i should be followed by try", region->region_number);
4019 found = true;
4021 break;
4022 case ERT_ALLOWED_EXCEPTIONS:
4023 case ERT_MUST_NOT_THROW:
4024 case ERT_THROW:
4025 break;
4026 case ERT_UNKNOWN:
4027 gcc_unreachable ();
4029 if (region->type == ERT_TRY)
4030 prev_try = region;
4031 else if (region->type == ERT_MUST_NOT_THROW
4032 || (region->type == ERT_ALLOWED_EXCEPTIONS
4033 && !region->u.allowed.type_list))
4034 prev_try = NULL;
4035 for (region = region->inner; region; region = region->next_peer)
4036 found |= verify_eh_region (region, prev_try);
4037 return found;
4040 /* Verify invariants on EH datastructures. */
4042 void
4043 verify_eh_tree (struct function *fun)
4045 struct eh_region *i, *outer = NULL;
4046 bool err = false;
4047 int nvisited = 0;
4048 int count = 0;
4049 int j;
4050 int depth = 0;
4052 if (!fun->eh->region_tree)
4053 return;
4054 for (j = fun->eh->last_region_number; j > 0; --j)
4055 if ((i = VEC_index (eh_region, fun->eh->region_array, j)))
4057 if (i->region_number == j)
4058 count++;
4059 if (i->region_number != j && (!i->aka || !bitmap_bit_p (i->aka, j)))
4061 error ("region_array is corrupted for region %i",
4062 i->region_number);
4063 err = true;
4066 i = fun->eh->region_tree;
4068 while (1)
4070 if (VEC_index (eh_region, fun->eh->region_array, i->region_number) != i)
4072 error ("region_array is corrupted for region %i", i->region_number);
4073 err = true;
4075 if (i->outer != outer)
4077 error ("outer block of region %i is wrong", i->region_number);
4078 err = true;
4080 if (i->may_contain_throw && outer && !outer->may_contain_throw)
4082 error
4083 ("region %i may contain throw and is contained in region that may not",
4084 i->region_number);
4085 err = true;
4087 if (depth < 0)
4089 error ("negative nesting depth of region %i", i->region_number);
4090 err = true;
4092 nvisited++;
4093 /* If there are sub-regions, process them. */
4094 if (i->inner)
4095 outer = i, i = i->inner, depth++;
4096 /* If there are peers, process them. */
4097 else if (i->next_peer)
4098 i = i->next_peer;
4099 /* Otherwise, step back up the tree to the next peer. */
4100 else
4104 i = i->outer;
4105 depth--;
4106 if (i == NULL)
4108 if (depth != -1)
4110 error ("tree list ends on depth %i", depth + 1);
4111 err = true;
4113 if (count != nvisited)
4115 error ("array does not match the region tree");
4116 err = true;
4118 if (!err)
4119 for (i = fun->eh->region_tree; i; i = i->next_peer)
4120 err |= verify_eh_region (i, NULL);
4122 if (err)
4124 dump_eh_tree (stderr, fun);
4125 internal_error ("verify_eh_tree failed");
4127 return;
4129 outer = i->outer;
4131 while (i->next_peer == NULL);
4132 i = i->next_peer;
4137 /* Initialize unwind_resume_libfunc. */
4139 void
4140 default_init_unwind_resume_libfunc (void)
4142 /* The default c++ routines aren't actually c++ specific, so use those. */
4143 unwind_resume_libfunc =
4144 init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
4145 : "_Unwind_Resume");
4149 static bool
4150 gate_handle_eh (void)
4152 return doing_eh (0);
4155 /* Complete generation of exception handling code. */
4156 static unsigned int
4157 rest_of_handle_eh (void)
4159 finish_eh_generation ();
4160 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4161 return 0;
4164 struct rtl_opt_pass pass_rtl_eh =
4167 RTL_PASS,
4168 "eh", /* name */
4169 gate_handle_eh, /* gate */
4170 rest_of_handle_eh, /* execute */
4171 NULL, /* sub */
4172 NULL, /* next */
4173 0, /* static_pass_number */
4174 TV_JUMP, /* tv_id */
4175 0, /* properties_required */
4176 0, /* properties_provided */
4177 0, /* properties_destroyed */
4178 0, /* todo_flags_start */
4179 TODO_dump_func /* todo_flags_finish */
4183 #include "gt-except.h"