2009-04-16 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / except.c
blob37bc5bbf13c4989d2a6c7435c5a9578b2d2519d9
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
79 #include "timevar.h"
81 /* Provide defaults for stuff that may not be defined when using
82 sjlj exceptions. */
83 #ifndef EH_RETURN_DATA_REGNO
84 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
85 #endif
87 /* Protect cleanup actions with must-not-throw regions, with a call
88 to the given failure handler. */
89 gimple (*lang_protect_cleanup_actions) (void);
91 /* Return true if type A catches type B. */
92 int (*lang_eh_type_covers) (tree a, tree b);
94 /* Map a type to a runtime object to match type. */
95 tree (*lang_eh_runtime_type) (tree);
97 /* A hash table of label to region number. */
99 struct ehl_map_entry GTY(())
101 rtx label;
102 struct eh_region *region;
105 static GTY(()) int call_site_base;
106 static GTY ((param_is (union tree_node)))
107 htab_t type_to_runtime_map;
109 /* Describe the SjLj_Function_Context structure. */
110 static GTY(()) tree sjlj_fc_type_node;
111 static int sjlj_fc_call_site_ofs;
112 static int sjlj_fc_data_ofs;
113 static int sjlj_fc_personality_ofs;
114 static int sjlj_fc_lsda_ofs;
115 static int sjlj_fc_jbuf_ofs;
117 /* Describes one exception region. */
118 struct eh_region GTY(())
120 /* The immediately surrounding region. */
121 struct eh_region *outer;
123 /* The list of immediately contained regions. */
124 struct eh_region *inner;
125 struct eh_region *next_peer;
127 /* An identifier for this region. */
128 int region_number;
130 /* When a region is deleted, its parents inherit the REG_EH_REGION
131 numbers already assigned. */
132 bitmap aka;
134 /* Each region does exactly one thing. */
135 enum eh_region_type
137 ERT_UNKNOWN = 0,
138 ERT_CLEANUP,
139 ERT_TRY,
140 ERT_CATCH,
141 ERT_ALLOWED_EXCEPTIONS,
142 ERT_MUST_NOT_THROW,
143 ERT_THROW
144 } type;
146 /* Holds the action to perform based on the preceding type. */
147 union eh_region_u {
148 /* A list of catch blocks, a surrounding try block,
149 and the label for continuing after a catch. */
150 struct eh_region_u_try {
151 struct eh_region *eh_catch;
152 struct eh_region *last_catch;
153 } GTY ((tag ("ERT_TRY"))) eh_try;
155 /* The list through the catch handlers, the list of type objects
156 matched, and the list of associated filters. */
157 struct eh_region_u_catch {
158 struct eh_region *next_catch;
159 struct eh_region *prev_catch;
160 tree type_list;
161 tree filter_list;
162 } GTY ((tag ("ERT_CATCH"))) eh_catch;
164 /* A tree_list of allowed types. */
165 struct eh_region_u_allowed {
166 tree type_list;
167 int filter;
168 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
170 /* The type given by a call to "throw foo();", or discovered
171 for a throw. */
172 struct eh_region_u_throw {
173 tree type;
174 } GTY ((tag ("ERT_THROW"))) eh_throw;
176 /* Retain the cleanup expression even after expansion so that
177 we can match up fixup regions. */
178 struct eh_region_u_cleanup {
179 struct eh_region *prev_try;
180 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
181 } GTY ((desc ("%0.type"))) u;
183 /* Entry point for this region's handler before landing pads are built. */
184 rtx label;
185 tree tree_label;
187 /* Entry point for this region's handler from the runtime eh library. */
188 rtx landing_pad;
190 /* Entry point for this region's handler from an inner region. */
191 rtx post_landing_pad;
193 /* The RESX insn for handing off control to the next outermost handler,
194 if appropriate. */
195 rtx resume;
197 /* True if something in this region may throw. */
198 unsigned may_contain_throw : 1;
201 typedef struct eh_region *eh_region;
203 struct call_site_record GTY(())
205 rtx landing_pad;
206 int action;
209 DEF_VEC_P(eh_region);
210 DEF_VEC_ALLOC_P(eh_region, gc);
211 DEF_VEC_ALLOC_P(eh_region, heap);
213 /* Used to save exception status for each function. */
214 struct eh_status GTY(())
216 /* The tree of all regions for this function. */
217 struct eh_region *region_tree;
219 /* The same information as an indexable array. */
220 VEC(eh_region,gc) *region_array;
221 int last_region_number;
223 htab_t GTY((param_is (struct throw_stmt_node))) throw_stmt_table;
226 static int t2r_eq (const void *, const void *);
227 static hashval_t t2r_hash (const void *);
228 static void add_type_for_runtime (tree);
229 static tree lookup_type_for_runtime (tree);
231 static int ttypes_filter_eq (const void *, const void *);
232 static hashval_t ttypes_filter_hash (const void *);
233 static int ehspec_filter_eq (const void *, const void *);
234 static hashval_t ehspec_filter_hash (const void *);
235 static int add_ttypes_entry (htab_t, tree);
236 static int add_ehspec_entry (htab_t, htab_t, tree);
237 static void assign_filter_values (void);
238 static void build_post_landing_pads (void);
239 static void connect_post_landing_pads (void);
240 static void dw2_build_landing_pads (void);
242 struct sjlj_lp_info;
243 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
244 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
245 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
246 static void sjlj_emit_function_enter (rtx);
247 static void sjlj_emit_function_exit (void);
248 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
249 static void sjlj_build_landing_pads (void);
251 static void remove_eh_handler (struct eh_region *);
252 static void remove_eh_handler_and_replace (struct eh_region *,
253 struct eh_region *);
255 /* The return value of reachable_next_level. */
256 enum reachable_code
258 /* The given exception is not processed by the given region. */
259 RNL_NOT_CAUGHT,
260 /* The given exception may need processing by the given region. */
261 RNL_MAYBE_CAUGHT,
262 /* The given exception is completely processed by the given region. */
263 RNL_CAUGHT,
264 /* The given exception is completely processed by the runtime. */
265 RNL_BLOCKED
268 struct reachable_info;
269 static enum reachable_code reachable_next_level (struct eh_region *, tree,
270 struct reachable_info *, bool);
272 static int action_record_eq (const void *, const void *);
273 static hashval_t action_record_hash (const void *);
274 static int add_action_record (htab_t, int, int);
275 static int collect_one_action_chain (htab_t, struct eh_region *);
276 static int add_call_site (rtx, int);
278 static void push_uleb128 (varray_type *, unsigned int);
279 static void push_sleb128 (varray_type *, int);
280 #ifndef HAVE_AS_LEB128
281 static int dw2_size_of_call_site_table (void);
282 static int sjlj_size_of_call_site_table (void);
283 #endif
284 static void dw2_output_call_site_table (void);
285 static void sjlj_output_call_site_table (void);
288 /* Routine to see if exception handling is turned on.
289 DO_WARN is nonzero if we want to inform the user that exception
290 handling is turned off.
292 This is used to ensure that -fexceptions has been specified if the
293 compiler tries to use any exception-specific functions. */
296 doing_eh (int do_warn)
298 if (! flag_exceptions)
300 static int warned = 0;
301 if (! warned && do_warn)
303 error ("exception handling disabled, use -fexceptions to enable");
304 warned = 1;
306 return 0;
308 return 1;
312 void
313 init_eh (void)
315 if (! flag_exceptions)
316 return;
318 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
320 /* Create the SjLj_Function_Context structure. This should match
321 the definition in unwind-sjlj.c. */
322 if (USING_SJLJ_EXCEPTIONS)
324 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
326 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
328 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
329 build_pointer_type (sjlj_fc_type_node));
330 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
332 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
333 integer_type_node);
334 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
336 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
337 tmp = build_array_type (lang_hooks.types.type_for_mode
338 (targetm.unwind_word_mode (), 1),
339 tmp);
340 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
341 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
343 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
344 ptr_type_node);
345 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
347 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
348 ptr_type_node);
349 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
351 #ifdef DONT_USE_BUILTIN_SETJMP
352 #ifdef JMP_BUF_SIZE
353 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
354 #else
355 /* Should be large enough for most systems, if it is not,
356 JMP_BUF_SIZE should be defined with the proper value. It will
357 also tend to be larger than necessary for most systems, a more
358 optimal port will define JMP_BUF_SIZE. */
359 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
360 #endif
361 #else
362 /* builtin_setjmp takes a pointer to 5 words. */
363 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
364 #endif
365 tmp = build_index_type (tmp);
366 tmp = build_array_type (ptr_type_node, tmp);
367 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
368 #ifdef DONT_USE_BUILTIN_SETJMP
369 /* We don't know what the alignment requirements of the
370 runtime's jmp_buf has. Overestimate. */
371 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
372 DECL_USER_ALIGN (f_jbuf) = 1;
373 #endif
374 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
376 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
377 TREE_CHAIN (f_prev) = f_cs;
378 TREE_CHAIN (f_cs) = f_data;
379 TREE_CHAIN (f_data) = f_per;
380 TREE_CHAIN (f_per) = f_lsda;
381 TREE_CHAIN (f_lsda) = f_jbuf;
383 layout_type (sjlj_fc_type_node);
385 /* Cache the interesting field offsets so that we have
386 easy access from rtl. */
387 sjlj_fc_call_site_ofs
388 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
389 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
390 sjlj_fc_data_ofs
391 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
392 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
393 sjlj_fc_personality_ofs
394 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
395 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
396 sjlj_fc_lsda_ofs
397 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
398 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
399 sjlj_fc_jbuf_ofs
400 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
401 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
405 void
406 init_eh_for_function (void)
408 cfun->eh = GGC_CNEW (struct eh_status);
411 /* Routines to generate the exception tree somewhat directly.
412 These are used from tree-eh.c when processing exception related
413 nodes during tree optimization. */
415 static struct eh_region *
416 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
418 struct eh_region *new_eh;
420 #ifdef ENABLE_CHECKING
421 gcc_assert (doing_eh (0));
422 #endif
424 /* Insert a new blank region as a leaf in the tree. */
425 new_eh = GGC_CNEW (struct eh_region);
426 new_eh->type = type;
427 new_eh->outer = outer;
428 if (outer)
430 new_eh->next_peer = outer->inner;
431 outer->inner = new_eh;
433 else
435 new_eh->next_peer = cfun->eh->region_tree;
436 cfun->eh->region_tree = new_eh;
439 new_eh->region_number = ++cfun->eh->last_region_number;
441 return new_eh;
444 struct eh_region *
445 gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
447 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
448 cleanup->u.cleanup.prev_try = prev_try;
449 return cleanup;
452 struct eh_region *
453 gen_eh_region_try (struct eh_region *outer)
455 return gen_eh_region (ERT_TRY, outer);
458 struct eh_region *
459 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
461 struct eh_region *c, *l;
462 tree type_list, type_node;
464 /* Ensure to always end up with a type list to normalize further
465 processing, then register each type against the runtime types map. */
466 type_list = type_or_list;
467 if (type_or_list)
469 if (TREE_CODE (type_or_list) != TREE_LIST)
470 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
472 type_node = type_list;
473 for (; type_node; type_node = TREE_CHAIN (type_node))
474 add_type_for_runtime (TREE_VALUE (type_node));
477 c = gen_eh_region (ERT_CATCH, t->outer);
478 c->u.eh_catch.type_list = type_list;
479 l = t->u.eh_try.last_catch;
480 c->u.eh_catch.prev_catch = l;
481 if (l)
482 l->u.eh_catch.next_catch = c;
483 else
484 t->u.eh_try.eh_catch = c;
485 t->u.eh_try.last_catch = c;
487 return c;
490 struct eh_region *
491 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
493 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
494 region->u.allowed.type_list = allowed;
496 for (; allowed ; allowed = TREE_CHAIN (allowed))
497 add_type_for_runtime (TREE_VALUE (allowed));
499 return region;
502 struct eh_region *
503 gen_eh_region_must_not_throw (struct eh_region *outer)
505 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
509 get_eh_region_number (struct eh_region *region)
511 return region->region_number;
514 bool
515 get_eh_region_may_contain_throw (struct eh_region *region)
517 return region->may_contain_throw;
520 tree
521 get_eh_region_tree_label (struct eh_region *region)
523 return region->tree_label;
526 tree
527 get_eh_region_no_tree_label (int region)
529 return VEC_index (eh_region, cfun->eh->region_array, region)->tree_label;
532 void
533 set_eh_region_tree_label (struct eh_region *region, tree lab)
535 region->tree_label = lab;
538 void
539 expand_resx_expr (tree exp)
541 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
542 struct eh_region *reg = VEC_index (eh_region,
543 cfun->eh->region_array, region_nr);
545 gcc_assert (!reg->resume);
546 do_pending_stack_adjust ();
547 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
548 emit_barrier ();
551 /* Note that the current EH region (if any) may contain a throw, or a
552 call to a function which itself may contain a throw. */
554 void
555 note_eh_region_may_contain_throw (struct eh_region *region)
557 while (region && !region->may_contain_throw)
559 region->may_contain_throw = 1;
560 region = region->outer;
565 /* Return an rtl expression for a pointer to the exception object
566 within a handler. */
569 get_exception_pointer (void)
571 if (! crtl->eh.exc_ptr)
572 crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
573 return crtl->eh.exc_ptr;
576 /* Return an rtl expression for the exception dispatch filter
577 within a handler. */
580 get_exception_filter (void)
582 if (! crtl->eh.filter)
583 crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
584 return crtl->eh.filter;
587 /* This section is for the exception handling specific optimization pass. */
589 /* Random access the exception region tree. */
591 void
592 collect_eh_region_array (void)
594 struct eh_region *i;
596 i = cfun->eh->region_tree;
597 if (! i)
598 return;
600 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
601 cfun->eh->last_region_number + 1);
602 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
604 while (1)
606 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
608 /* If there are sub-regions, process them. */
609 if (i->inner)
610 i = i->inner;
611 /* If there are peers, process them. */
612 else if (i->next_peer)
613 i = i->next_peer;
614 /* Otherwise, step back up the tree to the next peer. */
615 else
617 do {
618 i = i->outer;
619 if (i == NULL)
620 return;
621 } while (i->next_peer == NULL);
622 i = i->next_peer;
627 /* R is MUST_NOT_THROW region that is not reachable via local
628 RESX instructions. It still must be kept in the tree in case runtime
629 can unwind through it, or we will eliminate out terminate call
630 runtime would do otherwise. Return TRUE if R contains throwing statements
631 or some of the exceptions in inner regions can be unwound up to R.
633 CONTAINS_STMT is bitmap of all regions that contains some throwing
634 statements.
636 Function looks O(^3) at first sight. In fact the function is called at most
637 once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
638 Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
639 the outer loop examines every region at most once. The inner loop
640 is doing unwinding from the throwing statement same way as we do during
641 CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
642 of CFG. In practice Eh trees are wide, not deep, so this is not
643 a problem. */
645 static bool
646 can_be_reached_by_runtime (sbitmap contains_stmt, struct eh_region *r)
648 struct eh_region *i = r->inner;
649 unsigned n;
650 bitmap_iterator bi;
652 if (TEST_BIT (contains_stmt, r->region_number))
653 return true;
654 if (r->aka)
655 EXECUTE_IF_SET_IN_BITMAP (r->aka, 0, n, bi)
656 if (TEST_BIT (contains_stmt, n))
657 return true;
658 if (!i)
659 return false;
660 while (1)
662 /* It is pointless to look into MUST_NOT_THROW
663 or dive into subregions. They never unwind up. */
664 if (i->type != ERT_MUST_NOT_THROW)
666 bool found = TEST_BIT (contains_stmt, i->region_number);
667 if (!found)
668 EXECUTE_IF_SET_IN_BITMAP (i->aka, 0, n, bi)
669 if (TEST_BIT (contains_stmt, n))
671 found = true;
672 break;
674 /* We have nested region that contains throwing statement.
675 See if resuming might lead up to the resx or we get locally
676 caught sooner. If we get locally caught sooner, we either
677 know region R is not reachable or it would have direct edge
678 from the EH resx and thus consider region reachable at
679 firest place. */
680 if (found)
682 struct eh_region *i1 = i;
683 tree type_thrown = NULL_TREE;
685 if (i1->type == ERT_THROW)
687 type_thrown = i1->u.eh_throw.type;
688 i1 = i1->outer;
690 for (; i1 != r; i1 = i1->outer)
691 if (reachable_next_level (i1, type_thrown, NULL,
692 false) >= RNL_CAUGHT)
693 break;
694 if (i1 == r)
695 return true;
698 /* If there are sub-regions, process them. */
699 if (i->type != ERT_MUST_NOT_THROW && i->inner)
700 i = i->inner;
701 /* If there are peers, process them. */
702 else if (i->next_peer)
703 i = i->next_peer;
704 /* Otherwise, step back up the tree to the next peer. */
705 else
709 i = i->outer;
710 if (i == r)
711 return false;
713 while (i->next_peer == NULL);
714 i = i->next_peer;
719 /* Bring region R to the root of tree. */
721 static void
722 bring_to_root (struct eh_region *r)
724 struct eh_region **pp;
725 struct eh_region *outer = r->outer;
726 if (!r->outer)
727 return;
728 for (pp = &outer->inner; *pp != r; pp = &(*pp)->next_peer)
729 continue;
730 *pp = r->next_peer;
731 r->outer = NULL;
732 r->next_peer = cfun->eh->region_tree;
733 cfun->eh->region_tree = r;
736 /* Remove all regions whose labels are not reachable.
737 REACHABLE is bitmap of all regions that are used by the function
738 CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
740 void
741 remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
743 int i;
744 struct eh_region *r;
745 VEC(eh_region,heap) *must_not_throws = VEC_alloc (eh_region, heap, 16);
746 struct eh_region *local_must_not_throw = NULL;
747 struct eh_region *first_must_not_throw = NULL;
749 for (i = cfun->eh->last_region_number; i > 0; --i)
751 r = VEC_index (eh_region, cfun->eh->region_array, i);
752 if (!r || r->region_number != i)
753 continue;
754 if (!TEST_BIT (reachable, i) && !r->resume)
756 bool kill_it = true;
758 r->tree_label = NULL;
759 switch (r->type)
761 case ERT_THROW:
762 /* Don't remove ERT_THROW regions if their outer region
763 is reachable. */
764 if (r->outer && TEST_BIT (reachable, r->outer->region_number))
765 kill_it = false;
766 break;
767 case ERT_MUST_NOT_THROW:
768 /* MUST_NOT_THROW regions are implementable solely in the
769 runtime, but we need them when inlining function.
771 Keep them if outer region is not MUST_NOT_THROW a well
772 and if they contain some statement that might unwind through
773 them. */
774 if ((!r->outer || r->outer->type != ERT_MUST_NOT_THROW)
775 && (!contains_stmt
776 || can_be_reached_by_runtime (contains_stmt, r)))
777 kill_it = false;
778 break;
779 case ERT_TRY:
781 /* TRY regions are reachable if any of its CATCH regions
782 are reachable. */
783 struct eh_region *c;
784 for (c = r->u.eh_try.eh_catch; c;
785 c = c->u.eh_catch.next_catch)
786 if (TEST_BIT (reachable, c->region_number))
788 kill_it = false;
789 break;
791 break;
794 default:
795 break;
798 if (kill_it)
800 if (dump_file)
801 fprintf (dump_file, "Removing unreachable eh region %i\n",
802 r->region_number);
803 remove_eh_handler (r);
805 else if (r->type == ERT_MUST_NOT_THROW)
807 if (!first_must_not_throw)
808 first_must_not_throw = r;
809 VEC_safe_push (eh_region, heap, must_not_throws, r);
812 else
813 if (r->type == ERT_MUST_NOT_THROW)
815 if (!local_must_not_throw)
816 local_must_not_throw = r;
817 if (r->outer)
818 VEC_safe_push (eh_region, heap, must_not_throws, r);
822 /* MUST_NOT_THROW regions without local handler are all the same; they
823 trigger terminate call in runtime.
824 MUST_NOT_THROW handled locally can differ in debug info associated
825 to std::terminate () call or if one is coming from Java and other
826 from C++ whether they call terminate or abort.
828 We merge all MUST_NOT_THROW regions handled by the run-time into one.
829 We alsobring all local MUST_NOT_THROW regions to the roots of EH tree
830 (since unwinding never continues to the outer region anyway).
831 If MUST_NOT_THROW with local handler is present in the tree, we use
832 that region to merge into, since it will remain in tree anyway;
833 otherwise we use first MUST_NOT_THROW.
835 Merging of locally handled regions needs changes to the CFG. Crossjumping
836 should take care of this, by looking at the actual code and
837 ensuring that the cleanup actions are really the same. */
839 if (local_must_not_throw)
840 first_must_not_throw = local_must_not_throw;
842 for (i = 0; VEC_iterate (eh_region, must_not_throws, i, r); i++)
844 if (!r->label && !r->tree_label && r != first_must_not_throw)
846 if (dump_file)
847 fprintf (dump_file, "Replacing MUST_NOT_THROW region %i by %i\n",
848 r->region_number,
849 first_must_not_throw->region_number);
850 remove_eh_handler_and_replace (r, first_must_not_throw);
851 first_must_not_throw->may_contain_throw |= r->may_contain_throw;
853 else
854 bring_to_root (r);
856 #ifdef ENABLE_CHECKING
857 verify_eh_tree (cfun);
858 #endif
859 VEC_free (eh_region, heap, must_not_throws);
862 /* Return array mapping LABEL_DECL_UID to region such that region's tree_label
863 is identical to label. */
865 VEC(int,heap) *
866 label_to_region_map (void)
868 VEC(int,heap) * label_to_region = NULL;
869 int i;
871 VEC_safe_grow_cleared (int, heap, label_to_region,
872 cfun->cfg->last_label_uid + 1);
873 for (i = cfun->eh->last_region_number; i > 0; --i)
875 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
876 if (r && r->region_number == i
877 && r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
879 VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
883 return label_to_region;
886 /* Return number of EH regions. */
888 num_eh_regions (void)
890 return cfun->eh->last_region_number + 1;
893 /* Set up EH labels for RTL. */
895 void
896 convert_from_eh_region_ranges (void)
898 int i, n = cfun->eh->last_region_number;
900 /* Most of the work is already done at the tree level. All we need to
901 do is collect the rtl labels that correspond to the tree labels that
902 collect the rtl labels that correspond to the tree labels
903 we allocated earlier. */
904 for (i = 1; i <= n; ++i)
906 struct eh_region *region;
908 region = VEC_index (eh_region, cfun->eh->region_array, i);
909 if (region && region->tree_label)
910 region->label = DECL_RTL_IF_SET (region->tree_label);
914 void
915 find_exception_handler_labels (void)
917 int i;
919 if (cfun->eh->region_tree == NULL)
920 return;
922 for (i = cfun->eh->last_region_number; i > 0; --i)
924 struct eh_region *region;
925 rtx lab;
927 region = VEC_index (eh_region, cfun->eh->region_array, i);
928 if (! region || region->region_number != i)
929 continue;
930 if (crtl->eh.built_landing_pads)
931 lab = region->landing_pad;
932 else
933 lab = region->label;
937 /* Returns true if the current function has exception handling regions. */
939 bool
940 current_function_has_exception_handlers (void)
942 int i;
944 for (i = cfun->eh->last_region_number; i > 0; --i)
946 struct eh_region *region;
948 region = VEC_index (eh_region, cfun->eh->region_array, i);
949 if (region
950 && region->region_number == i
951 && region->type != ERT_THROW)
952 return true;
955 return false;
958 /* A subroutine of duplicate_eh_regions. Search the region tree under O
959 for the minimum and maximum region numbers. Update *MIN and *MAX. */
961 static void
962 duplicate_eh_regions_0 (eh_region o, int *min, int *max)
964 int i;
966 if (o->aka)
968 i = bitmap_first_set_bit (o->aka);
969 if (i < *min)
970 *min = i;
971 i = bitmap_last_set_bit (o->aka);
972 if (i > *max)
973 *max = i;
975 if (o->region_number < *min)
976 *min = o->region_number;
977 if (o->region_number > *max)
978 *max = o->region_number;
980 if (o->inner)
982 o = o->inner;
983 duplicate_eh_regions_0 (o, min, max);
984 while (o->next_peer)
986 o = o->next_peer;
987 duplicate_eh_regions_0 (o, min, max);
992 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
993 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
994 about the other internal pointers just yet, just the tree-like pointers. */
996 static eh_region
997 duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
999 eh_region ret, n;
1001 ret = n = GGC_NEW (struct eh_region);
1003 *n = *old;
1004 n->outer = outer;
1005 n->next_peer = NULL;
1006 if (old->aka)
1008 unsigned i;
1009 bitmap_iterator bi;
1010 n->aka = BITMAP_GGC_ALLOC ();
1012 EXECUTE_IF_SET_IN_BITMAP (old->aka, 0, i, bi)
1014 bitmap_set_bit (n->aka, i + eh_offset);
1015 VEC_replace (eh_region, cfun->eh->region_array, i + eh_offset, n);
1019 n->region_number += eh_offset;
1020 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1022 if (old->inner)
1024 old = old->inner;
1025 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
1026 while (old->next_peer)
1028 old = old->next_peer;
1029 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
1033 return ret;
1036 /* Return prev_try pointers catch subregions of R should
1037 point to. */
1039 static struct eh_region *
1040 find_prev_try (struct eh_region * r)
1042 for (; r && r->type != ERT_TRY; r = r->outer)
1043 if (r->type == ERT_MUST_NOT_THROW
1044 || (r->type == ERT_ALLOWED_EXCEPTIONS
1045 && !r->u.allowed.type_list))
1047 r = NULL;
1048 break;
1050 return r;
1053 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
1054 function and root the tree below OUTER_REGION. Remap labels using MAP
1055 callback. The special case of COPY_REGION of 0 means all regions. */
1058 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
1059 void *data, int copy_region, int outer_region)
1061 eh_region cur, prev_try, old_prev_try, outer, *splice;
1062 int i, min_region, max_region, eh_offset, cfun_last_region_number;
1063 int num_regions;
1065 if (!ifun->eh)
1066 return 0;
1067 #ifdef ENABLE_CHECKING
1068 verify_eh_tree (ifun);
1069 #endif
1071 /* Find the range of region numbers to be copied. The interface we
1072 provide here mandates a single offset to find new number from old,
1073 which means we must look at the numbers present, instead of the
1074 count or something else. */
1075 if (copy_region > 0)
1077 min_region = INT_MAX;
1078 max_region = 0;
1080 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1081 old_prev_try = find_prev_try (cur);
1082 duplicate_eh_regions_0 (cur, &min_region, &max_region);
1084 else
1086 min_region = 1;
1087 max_region = ifun->eh->last_region_number;
1088 old_prev_try = NULL;
1090 num_regions = max_region - min_region + 1;
1091 cfun_last_region_number = cfun->eh->last_region_number;
1092 eh_offset = cfun_last_region_number + 1 - min_region;
1094 /* If we've not yet created a region array, do so now. */
1095 cfun->eh->last_region_number = cfun_last_region_number + num_regions;
1096 VEC_safe_grow_cleared (eh_region, gc, cfun->eh->region_array,
1097 cfun->eh->last_region_number + 1);
1099 /* Locate the spot at which to insert the new tree. */
1100 if (outer_region > 0)
1102 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
1103 if (outer)
1104 splice = &outer->inner;
1105 else
1106 splice = &cfun->eh->region_tree;
1108 else
1110 outer = NULL;
1111 splice = &cfun->eh->region_tree;
1113 while (*splice)
1114 splice = &(*splice)->next_peer;
1116 if (!ifun->eh->region_tree)
1118 if (outer)
1119 for (i = cfun_last_region_number + 1;
1120 i <= cfun->eh->last_region_number; i++)
1122 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1123 if (outer->aka == NULL)
1124 outer->aka = BITMAP_GGC_ALLOC ();
1125 bitmap_set_bit (outer->aka, i);
1127 return eh_offset;
1130 /* Copy all the regions in the subtree. */
1131 if (copy_region > 0)
1133 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1134 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
1136 else
1138 eh_region n;
1140 cur = ifun->eh->region_tree;
1141 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
1142 while (cur->next_peer)
1144 cur = cur->next_peer;
1145 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
1149 /* Remap all the labels in the new regions. */
1150 for (i = cfun_last_region_number + 1;
1151 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1152 if (cur && cur->tree_label)
1153 cur->tree_label = map (cur->tree_label, data);
1155 /* Search for the containing ERT_TRY region to fix up
1156 the prev_try short-cuts for ERT_CLEANUP regions. */
1157 prev_try = NULL;
1158 if (outer_region > 0)
1159 prev_try = find_prev_try (VEC_index (eh_region, cfun->eh->region_array, outer_region));
1161 /* Remap all of the internal catch and cleanup linkages. Since we
1162 duplicate entire subtrees, all of the referenced regions will have
1163 been copied too. And since we renumbered them as a block, a simple
1164 bit of arithmetic finds us the index for the replacement region. */
1165 for (i = cfun_last_region_number + 1;
1166 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1168 /* All removed EH that is toplevel in input function is now
1169 in outer EH of output function. */
1170 if (cur == NULL)
1172 gcc_assert (VEC_index
1173 (eh_region, ifun->eh->region_array,
1174 i - eh_offset) == NULL);
1175 if (outer)
1177 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1178 if (outer->aka == NULL)
1179 outer->aka = BITMAP_GGC_ALLOC ();
1180 bitmap_set_bit (outer->aka, i);
1182 continue;
1184 if (i != cur->region_number)
1185 continue;
1187 #define REMAP(REG) \
1188 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1189 (REG)->region_number + eh_offset)
1191 switch (cur->type)
1193 case ERT_TRY:
1194 if (cur->u.eh_try.eh_catch)
1195 REMAP (cur->u.eh_try.eh_catch);
1196 if (cur->u.eh_try.last_catch)
1197 REMAP (cur->u.eh_try.last_catch);
1198 break;
1200 case ERT_CATCH:
1201 if (cur->u.eh_catch.next_catch)
1202 REMAP (cur->u.eh_catch.next_catch);
1203 if (cur->u.eh_catch.prev_catch)
1204 REMAP (cur->u.eh_catch.prev_catch);
1205 break;
1207 case ERT_CLEANUP:
1208 if (cur->u.cleanup.prev_try != old_prev_try)
1209 REMAP (cur->u.cleanup.prev_try);
1210 else
1211 cur->u.cleanup.prev_try = prev_try;
1212 break;
1214 default:
1215 break;
1218 #undef REMAP
1220 #ifdef ENABLE_CHECKING
1221 verify_eh_tree (cfun);
1222 #endif
1224 return eh_offset;
1227 /* Return true if REGION_A is outer to REGION_B in IFUN. */
1229 bool
1230 eh_region_outer_p (struct function *ifun, int region_a, int region_b)
1232 struct eh_region *rp_a, *rp_b;
1234 gcc_assert (ifun->eh->last_region_number > 0);
1235 gcc_assert (ifun->eh->region_tree);
1237 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1238 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1239 gcc_assert (rp_a != NULL);
1240 gcc_assert (rp_b != NULL);
1244 if (rp_a == rp_b)
1245 return true;
1246 rp_b = rp_b->outer;
1248 while (rp_b);
1250 return false;
1253 /* Return region number of region that is outer to both if REGION_A and
1254 REGION_B in IFUN. */
1257 eh_region_outermost (struct function *ifun, int region_a, int region_b)
1259 struct eh_region *rp_a, *rp_b;
1260 sbitmap b_outer;
1262 gcc_assert (ifun->eh->last_region_number > 0);
1263 gcc_assert (ifun->eh->region_tree);
1265 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1266 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1267 gcc_assert (rp_a != NULL);
1268 gcc_assert (rp_b != NULL);
1270 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1271 sbitmap_zero (b_outer);
1275 SET_BIT (b_outer, rp_b->region_number);
1276 rp_b = rp_b->outer;
1278 while (rp_b);
1282 if (TEST_BIT (b_outer, rp_a->region_number))
1284 sbitmap_free (b_outer);
1285 return rp_a->region_number;
1287 rp_a = rp_a->outer;
1289 while (rp_a);
1291 sbitmap_free (b_outer);
1292 return -1;
1295 static int
1296 t2r_eq (const void *pentry, const void *pdata)
1298 const_tree const entry = (const_tree) pentry;
1299 const_tree const data = (const_tree) pdata;
1301 return TREE_PURPOSE (entry) == data;
1304 static hashval_t
1305 t2r_hash (const void *pentry)
1307 const_tree const entry = (const_tree) pentry;
1308 return TREE_HASH (TREE_PURPOSE (entry));
1311 static void
1312 add_type_for_runtime (tree type)
1314 tree *slot;
1316 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1317 TREE_HASH (type), INSERT);
1318 if (*slot == NULL)
1320 tree runtime = (*lang_eh_runtime_type) (type);
1321 *slot = tree_cons (type, runtime, NULL_TREE);
1325 static tree
1326 lookup_type_for_runtime (tree type)
1328 tree *slot;
1330 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1331 TREE_HASH (type), NO_INSERT);
1333 /* We should have always inserted the data earlier. */
1334 return TREE_VALUE (*slot);
1338 /* Represent an entry in @TTypes for either catch actions
1339 or exception filter actions. */
1340 struct ttypes_filter GTY(())
1342 tree t;
1343 int filter;
1346 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1347 (a tree) for a @TTypes type node we are thinking about adding. */
1349 static int
1350 ttypes_filter_eq (const void *pentry, const void *pdata)
1352 const struct ttypes_filter *const entry
1353 = (const struct ttypes_filter *) pentry;
1354 const_tree const data = (const_tree) pdata;
1356 return entry->t == data;
1359 static hashval_t
1360 ttypes_filter_hash (const void *pentry)
1362 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1363 return TREE_HASH (entry->t);
1366 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1367 exception specification list we are thinking about adding. */
1368 /* ??? Currently we use the type lists in the order given. Someone
1369 should put these in some canonical order. */
1371 static int
1372 ehspec_filter_eq (const void *pentry, const void *pdata)
1374 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1375 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1377 return type_list_equal (entry->t, data->t);
1380 /* Hash function for exception specification lists. */
1382 static hashval_t
1383 ehspec_filter_hash (const void *pentry)
1385 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1386 hashval_t h = 0;
1387 tree list;
1389 for (list = entry->t; list ; list = TREE_CHAIN (list))
1390 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1391 return h;
1394 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1395 to speed up the search. Return the filter value to be used. */
1397 static int
1398 add_ttypes_entry (htab_t ttypes_hash, tree type)
1400 struct ttypes_filter **slot, *n;
1402 slot = (struct ttypes_filter **)
1403 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1405 if ((n = *slot) == NULL)
1407 /* Filter value is a 1 based table index. */
1409 n = XNEW (struct ttypes_filter);
1410 n->t = type;
1411 n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
1412 *slot = n;
1414 VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
1417 return n->filter;
1420 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1421 to speed up the search. Return the filter value to be used. */
1423 static int
1424 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1426 struct ttypes_filter **slot, *n;
1427 struct ttypes_filter dummy;
1429 dummy.t = list;
1430 slot = (struct ttypes_filter **)
1431 htab_find_slot (ehspec_hash, &dummy, INSERT);
1433 if ((n = *slot) == NULL)
1435 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1437 n = XNEW (struct ttypes_filter);
1438 n->t = list;
1439 n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
1440 *slot = n;
1442 /* Generate a 0 terminated list of filter values. */
1443 for (; list ; list = TREE_CHAIN (list))
1445 if (targetm.arm_eabi_unwinder)
1446 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
1447 else
1449 /* Look up each type in the list and encode its filter
1450 value as a uleb128. */
1451 push_uleb128 (&crtl->eh.ehspec_data,
1452 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1455 if (targetm.arm_eabi_unwinder)
1456 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
1457 else
1458 VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
1461 return n->filter;
1464 /* Generate the action filter values to be used for CATCH and
1465 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1466 we use lots of landing pads, and so every type or list can share
1467 the same filter value, which saves table space. */
1469 static void
1470 assign_filter_values (void)
1472 int i;
1473 htab_t ttypes, ehspec;
1475 crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
1476 if (targetm.arm_eabi_unwinder)
1477 VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1478 else
1479 VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1481 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1482 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1484 for (i = cfun->eh->last_region_number; i > 0; --i)
1486 struct eh_region *r;
1488 r = VEC_index (eh_region, cfun->eh->region_array, i);
1490 /* Mind we don't process a region more than once. */
1491 if (!r || r->region_number != i)
1492 continue;
1494 switch (r->type)
1496 case ERT_CATCH:
1497 /* Whatever type_list is (NULL or true list), we build a list
1498 of filters for the region. */
1499 r->u.eh_catch.filter_list = NULL_TREE;
1501 if (r->u.eh_catch.type_list != NULL)
1503 /* Get a filter value for each of the types caught and store
1504 them in the region's dedicated list. */
1505 tree tp_node = r->u.eh_catch.type_list;
1507 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1509 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1510 tree flt_node = build_int_cst (NULL_TREE, flt);
1512 r->u.eh_catch.filter_list
1513 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1516 else
1518 /* Get a filter value for the NULL list also since it will need
1519 an action record anyway. */
1520 int flt = add_ttypes_entry (ttypes, NULL);
1521 tree flt_node = build_int_cst (NULL_TREE, flt);
1523 r->u.eh_catch.filter_list
1524 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1527 break;
1529 case ERT_ALLOWED_EXCEPTIONS:
1530 r->u.allowed.filter
1531 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1532 break;
1534 default:
1535 break;
1539 htab_delete (ttypes);
1540 htab_delete (ehspec);
1543 /* Emit SEQ into basic block just before INSN (that is assumed to be
1544 first instruction of some existing BB and return the newly
1545 produced block. */
1546 static basic_block
1547 emit_to_new_bb_before (rtx seq, rtx insn)
1549 rtx last;
1550 basic_block bb;
1551 edge e;
1552 edge_iterator ei;
1554 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1555 call), we don't want it to go into newly created landing pad or other EH
1556 construct. */
1557 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1558 if (e->flags & EDGE_FALLTHRU)
1559 force_nonfallthru (e);
1560 else
1561 ei_next (&ei);
1562 last = emit_insn_before (seq, insn);
1563 if (BARRIER_P (last))
1564 last = PREV_INSN (last);
1565 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1566 update_bb_for_insn (bb);
1567 bb->flags |= BB_SUPERBLOCK;
1568 return bb;
1571 /* Generate the code to actually handle exceptions, which will follow the
1572 landing pads. */
1574 static void
1575 build_post_landing_pads (void)
1577 int i;
1579 for (i = cfun->eh->last_region_number; i > 0; --i)
1581 struct eh_region *region;
1582 rtx seq;
1584 region = VEC_index (eh_region, cfun->eh->region_array, i);
1585 /* Mind we don't process a region more than once. */
1586 if (!region || region->region_number != i)
1587 continue;
1589 switch (region->type)
1591 case ERT_TRY:
1592 /* ??? Collect the set of all non-overlapping catch handlers
1593 all the way up the chain until blocked by a cleanup. */
1594 /* ??? Outer try regions can share landing pads with inner
1595 try regions if the types are completely non-overlapping,
1596 and there are no intervening cleanups. */
1598 region->post_landing_pad = gen_label_rtx ();
1600 start_sequence ();
1602 emit_label (region->post_landing_pad);
1604 /* ??? It is mighty inconvenient to call back into the
1605 switch statement generation code in expand_end_case.
1606 Rapid prototyping sez a sequence of ifs. */
1608 struct eh_region *c;
1609 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
1611 if (c->u.eh_catch.type_list == NULL)
1612 emit_jump (c->label);
1613 else
1615 /* Need for one cmp/jump per type caught. Each type
1616 list entry has a matching entry in the filter list
1617 (see assign_filter_values). */
1618 tree tp_node = c->u.eh_catch.type_list;
1619 tree flt_node = c->u.eh_catch.filter_list;
1621 for (; tp_node; )
1623 emit_cmp_and_jump_insns
1624 (crtl->eh.filter,
1625 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1626 EQ, NULL_RTX,
1627 targetm.eh_return_filter_mode (), 0, c->label);
1629 tp_node = TREE_CHAIN (tp_node);
1630 flt_node = TREE_CHAIN (flt_node);
1636 /* We delay the generation of the _Unwind_Resume until we generate
1637 landing pads. We emit a marker here so as to get good control
1638 flow data in the meantime. */
1639 region->resume
1640 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1641 emit_barrier ();
1643 seq = get_insns ();
1644 end_sequence ();
1646 emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
1648 break;
1650 case ERT_ALLOWED_EXCEPTIONS:
1651 region->post_landing_pad = gen_label_rtx ();
1653 start_sequence ();
1655 emit_label (region->post_landing_pad);
1657 emit_cmp_and_jump_insns (crtl->eh.filter,
1658 GEN_INT (region->u.allowed.filter),
1659 EQ, NULL_RTX,
1660 targetm.eh_return_filter_mode (), 0, region->label);
1662 /* We delay the generation of the _Unwind_Resume until we generate
1663 landing pads. We emit a marker here so as to get good control
1664 flow data in the meantime. */
1665 region->resume
1666 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1667 emit_barrier ();
1669 seq = get_insns ();
1670 end_sequence ();
1672 emit_to_new_bb_before (seq, region->label);
1673 break;
1675 case ERT_CLEANUP:
1676 case ERT_MUST_NOT_THROW:
1677 region->post_landing_pad = region->label;
1678 break;
1680 case ERT_CATCH:
1681 case ERT_THROW:
1682 /* Nothing to do. */
1683 break;
1685 default:
1686 gcc_unreachable ();
1691 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1692 _Unwind_Resume otherwise. */
1694 static void
1695 connect_post_landing_pads (void)
1697 int i;
1699 for (i = cfun->eh->last_region_number; i > 0; --i)
1701 struct eh_region *region;
1702 struct eh_region *outer;
1703 rtx seq;
1704 rtx barrier;
1706 region = VEC_index (eh_region, cfun->eh->region_array, i);
1707 /* Mind we don't process a region more than once. */
1708 if (!region || region->region_number != i)
1709 continue;
1711 /* If there is no RESX, or it has been deleted by flow, there's
1712 nothing to fix up. */
1713 if (! region->resume || INSN_DELETED_P (region->resume))
1714 continue;
1716 /* Search for another landing pad in this function. */
1717 for (outer = region->outer; outer ; outer = outer->outer)
1718 if (outer->post_landing_pad)
1719 break;
1721 start_sequence ();
1723 if (outer)
1725 edge e;
1726 basic_block src, dest;
1728 emit_jump (outer->post_landing_pad);
1729 src = BLOCK_FOR_INSN (region->resume);
1730 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1731 while (EDGE_COUNT (src->succs) > 0)
1732 remove_edge (EDGE_SUCC (src, 0));
1733 e = make_edge (src, dest, 0);
1734 e->probability = REG_BR_PROB_BASE;
1735 e->count = src->count;
1737 else
1739 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1740 VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
1742 /* What we just emitted was a throwing libcall, so it got a
1743 barrier automatically added after it. If the last insn in
1744 the libcall sequence isn't the barrier, it's because the
1745 target emits multiple insns for a call, and there are insns
1746 after the actual call insn (which are redundant and would be
1747 optimized away). The barrier is inserted exactly after the
1748 call insn, so let's go get that and delete the insns after
1749 it, because below we need the barrier to be the last insn in
1750 the sequence. */
1751 delete_insns_since (NEXT_INSN (last_call_insn ()));
1754 seq = get_insns ();
1755 end_sequence ();
1756 barrier = emit_insn_before (seq, region->resume);
1757 /* Avoid duplicate barrier. */
1758 gcc_assert (BARRIER_P (barrier));
1759 delete_insn (barrier);
1760 delete_insn (region->resume);
1762 /* ??? From tree-ssa we can wind up with catch regions whose
1763 label is not instantiated, but whose resx is present. Now
1764 that we've dealt with the resx, kill the region. */
1765 if (region->label == NULL && region->type == ERT_CLEANUP)
1766 remove_eh_handler (region);
1771 static void
1772 dw2_build_landing_pads (void)
1774 int i;
1776 for (i = cfun->eh->last_region_number; i > 0; --i)
1778 struct eh_region *region;
1779 rtx seq;
1780 basic_block bb;
1781 edge e;
1783 region = VEC_index (eh_region, cfun->eh->region_array, i);
1784 /* Mind we don't process a region more than once. */
1785 if (!region || region->region_number != i)
1786 continue;
1788 if (region->type != ERT_CLEANUP
1789 && region->type != ERT_TRY
1790 && region->type != ERT_ALLOWED_EXCEPTIONS)
1791 continue;
1793 start_sequence ();
1795 region->landing_pad = gen_label_rtx ();
1796 emit_label (region->landing_pad);
1798 #ifdef HAVE_exception_receiver
1799 if (HAVE_exception_receiver)
1800 emit_insn (gen_exception_receiver ());
1801 else
1802 #endif
1803 #ifdef HAVE_nonlocal_goto_receiver
1804 if (HAVE_nonlocal_goto_receiver)
1805 emit_insn (gen_nonlocal_goto_receiver ());
1806 else
1807 #endif
1808 { /* Nothing */ }
1810 emit_move_insn (crtl->eh.exc_ptr,
1811 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1812 emit_move_insn (crtl->eh.filter,
1813 gen_rtx_REG (targetm.eh_return_filter_mode (),
1814 EH_RETURN_DATA_REGNO (1)));
1816 seq = get_insns ();
1817 end_sequence ();
1819 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1820 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1821 e->count = bb->count;
1822 e->probability = REG_BR_PROB_BASE;
1827 struct sjlj_lp_info
1829 int directly_reachable;
1830 int action_index;
1831 int dispatch_index;
1832 int call_site_index;
1835 static bool
1836 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1838 rtx insn;
1839 bool found_one = false;
1841 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1843 struct eh_region *region;
1844 enum reachable_code rc;
1845 tree type_thrown;
1846 rtx note;
1848 if (! INSN_P (insn))
1849 continue;
1851 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1852 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1853 continue;
1855 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1856 if (!region)
1857 continue;
1859 type_thrown = NULL_TREE;
1860 if (region->type == ERT_THROW)
1862 type_thrown = region->u.eh_throw.type;
1863 region = region->outer;
1866 /* Find the first containing region that might handle the exception.
1867 That's the landing pad to which we will transfer control. */
1868 rc = RNL_NOT_CAUGHT;
1869 for (; region; region = region->outer)
1871 rc = reachable_next_level (region, type_thrown, NULL, false);
1872 if (rc != RNL_NOT_CAUGHT)
1873 break;
1875 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1877 lp_info[region->region_number].directly_reachable = 1;
1878 found_one = true;
1882 return found_one;
1885 static void
1886 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1888 htab_t ar_hash;
1889 int i, index;
1891 /* First task: build the action table. */
1893 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
1894 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1896 for (i = cfun->eh->last_region_number; i > 0; --i)
1897 if (lp_info[i].directly_reachable)
1899 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
1901 r->landing_pad = dispatch_label;
1902 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1903 if (lp_info[i].action_index != -1)
1904 crtl->uses_eh_lsda = 1;
1907 htab_delete (ar_hash);
1909 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1910 landing pad label for the region. For sjlj though, there is one
1911 common landing pad from which we dispatch to the post-landing pads.
1913 A region receives a dispatch index if it is directly reachable
1914 and requires in-function processing. Regions that share post-landing
1915 pads may share dispatch indices. */
1916 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1917 (see build_post_landing_pads) so we don't bother checking for it. */
1919 index = 0;
1920 for (i = cfun->eh->last_region_number; i > 0; --i)
1921 if (lp_info[i].directly_reachable)
1922 lp_info[i].dispatch_index = index++;
1924 /* Finally: assign call-site values. If dwarf2 terms, this would be
1925 the region number assigned by convert_to_eh_region_ranges, but
1926 handles no-action and must-not-throw differently. */
1928 call_site_base = 1;
1929 for (i = cfun->eh->last_region_number; i > 0; --i)
1930 if (lp_info[i].directly_reachable)
1932 int action = lp_info[i].action_index;
1934 /* Map must-not-throw to otherwise unused call-site index 0. */
1935 if (action == -2)
1936 index = 0;
1937 /* Map no-action to otherwise unused call-site index -1. */
1938 else if (action == -1)
1939 index = -1;
1940 /* Otherwise, look it up in the table. */
1941 else
1942 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1944 lp_info[i].call_site_index = index;
1948 static void
1949 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1951 int last_call_site = -2;
1952 rtx insn, mem;
1954 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1956 struct eh_region *region;
1957 int this_call_site;
1958 rtx note, before, p;
1960 /* Reset value tracking at extended basic block boundaries. */
1961 if (LABEL_P (insn))
1962 last_call_site = -2;
1964 if (! INSN_P (insn))
1965 continue;
1967 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1969 /* Calls that are known to not throw need not be marked. */
1970 if (note && INTVAL (XEXP (note, 0)) <= 0)
1971 continue;
1973 if (note)
1974 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1975 else
1976 region = NULL;
1978 if (!region)
1980 /* Calls (and trapping insns) without notes are outside any
1981 exception handling region in this function. Mark them as
1982 no action. */
1983 if (CALL_P (insn)
1984 || (flag_non_call_exceptions
1985 && may_trap_p (PATTERN (insn))))
1986 this_call_site = -1;
1987 else
1988 continue;
1990 else
1991 this_call_site = lp_info[region->region_number].call_site_index;
1993 if (this_call_site == last_call_site)
1994 continue;
1996 /* Don't separate a call from it's argument loads. */
1997 before = insn;
1998 if (CALL_P (insn))
1999 before = find_first_parameter_load (insn, NULL_RTX);
2001 start_sequence ();
2002 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
2003 sjlj_fc_call_site_ofs);
2004 emit_move_insn (mem, GEN_INT (this_call_site));
2005 p = get_insns ();
2006 end_sequence ();
2008 emit_insn_before (p, before);
2009 last_call_site = this_call_site;
2013 /* Construct the SjLj_Function_Context. */
2015 static void
2016 sjlj_emit_function_enter (rtx dispatch_label)
2018 rtx fn_begin, fc, mem, seq;
2019 bool fn_begin_outside_block;
2021 fc = crtl->eh.sjlj_fc;
2023 start_sequence ();
2025 /* We're storing this libcall's address into memory instead of
2026 calling it directly. Thus, we must call assemble_external_libcall
2027 here, as we can not depend on emit_library_call to do it for us. */
2028 assemble_external_libcall (eh_personality_libfunc);
2029 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2030 emit_move_insn (mem, eh_personality_libfunc);
2032 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2033 if (crtl->uses_eh_lsda)
2035 char buf[20];
2036 rtx sym;
2038 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2039 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2040 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
2041 emit_move_insn (mem, sym);
2043 else
2044 emit_move_insn (mem, const0_rtx);
2046 #ifdef DONT_USE_BUILTIN_SETJMP
2048 rtx x;
2049 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2050 TYPE_MODE (integer_type_node), 1,
2051 plus_constant (XEXP (fc, 0),
2052 sjlj_fc_jbuf_ofs), Pmode);
2054 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2055 TYPE_MODE (integer_type_node), 0, dispatch_label);
2056 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
2058 #else
2059 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2060 dispatch_label);
2061 #endif
2063 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2064 1, XEXP (fc, 0), Pmode);
2066 seq = get_insns ();
2067 end_sequence ();
2069 /* ??? Instead of doing this at the beginning of the function,
2070 do this in a block that is at loop level 0 and dominates all
2071 can_throw_internal instructions. */
2073 fn_begin_outside_block = true;
2074 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2075 if (NOTE_P (fn_begin))
2077 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2078 break;
2079 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
2080 fn_begin_outside_block = false;
2083 if (fn_begin_outside_block)
2084 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
2085 else
2086 emit_insn_after (seq, fn_begin);
2089 /* Call back from expand_function_end to know where we should put
2090 the call to unwind_sjlj_unregister_libfunc if needed. */
2092 void
2093 sjlj_emit_function_exit_after (rtx after)
2095 crtl->eh.sjlj_exit_after = after;
2098 static void
2099 sjlj_emit_function_exit (void)
2101 rtx seq;
2102 edge e;
2103 edge_iterator ei;
2105 start_sequence ();
2107 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2108 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
2110 seq = get_insns ();
2111 end_sequence ();
2113 /* ??? Really this can be done in any block at loop level 0 that
2114 post-dominates all can_throw_internal instructions. This is
2115 the last possible moment. */
2117 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
2118 if (e->flags & EDGE_FALLTHRU)
2119 break;
2120 if (e)
2122 rtx insn;
2124 /* Figure out whether the place we are supposed to insert libcall
2125 is inside the last basic block or after it. In the other case
2126 we need to emit to edge. */
2127 gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
2128 for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
2130 if (insn == crtl->eh.sjlj_exit_after)
2132 if (LABEL_P (insn))
2133 insn = NEXT_INSN (insn);
2134 emit_insn_after (seq, insn);
2135 return;
2137 if (insn == BB_END (e->src))
2138 break;
2140 insert_insn_on_edge (seq, e);
2144 static void
2145 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2147 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
2148 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
2149 int i, first_reachable;
2150 rtx mem, dispatch, seq, fc;
2151 rtx before;
2152 basic_block bb;
2153 edge e;
2155 fc = crtl->eh.sjlj_fc;
2157 start_sequence ();
2159 emit_label (dispatch_label);
2161 #ifndef DONT_USE_BUILTIN_SETJMP
2162 expand_builtin_setjmp_receiver (dispatch_label);
2163 #endif
2165 /* Load up dispatch index, exc_ptr and filter values from the
2166 function context. */
2167 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2168 sjlj_fc_call_site_ofs);
2169 dispatch = copy_to_reg (mem);
2171 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
2172 if (unwind_word_mode != ptr_mode)
2174 #ifdef POINTERS_EXTEND_UNSIGNED
2175 mem = convert_memory_address (ptr_mode, mem);
2176 #else
2177 mem = convert_to_mode (ptr_mode, mem, 0);
2178 #endif
2180 emit_move_insn (crtl->eh.exc_ptr, mem);
2182 mem = adjust_address (fc, unwind_word_mode,
2183 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
2184 if (unwind_word_mode != filter_mode)
2185 mem = convert_to_mode (filter_mode, mem, 0);
2186 emit_move_insn (crtl->eh.filter, mem);
2188 /* Jump to one of the directly reachable regions. */
2189 /* ??? This really ought to be using a switch statement. */
2191 first_reachable = 0;
2192 for (i = cfun->eh->last_region_number; i > 0; --i)
2194 if (! lp_info[i].directly_reachable)
2195 continue;
2197 if (! first_reachable)
2199 first_reachable = i;
2200 continue;
2203 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2204 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2205 ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
2206 ->post_landing_pad);
2209 seq = get_insns ();
2210 end_sequence ();
2212 before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2213 ->post_landing_pad);
2215 bb = emit_to_new_bb_before (seq, before);
2216 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2217 e->count = bb->count;
2218 e->probability = REG_BR_PROB_BASE;
2221 static void
2222 sjlj_build_landing_pads (void)
2224 struct sjlj_lp_info *lp_info;
2226 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2228 if (sjlj_find_directly_reachable_regions (lp_info))
2230 rtx dispatch_label = gen_label_rtx ();
2231 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
2232 TYPE_MODE (sjlj_fc_type_node),
2233 TYPE_ALIGN (sjlj_fc_type_node));
2234 crtl->eh.sjlj_fc
2235 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2236 int_size_in_bytes (sjlj_fc_type_node),
2237 align);
2239 sjlj_assign_call_site_values (dispatch_label, lp_info);
2240 sjlj_mark_call_sites (lp_info);
2242 sjlj_emit_function_enter (dispatch_label);
2243 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2244 sjlj_emit_function_exit ();
2247 free (lp_info);
2250 void
2251 finish_eh_generation (void)
2253 basic_block bb;
2255 /* Nothing to do if no regions created. */
2256 if (cfun->eh->region_tree == NULL)
2257 return;
2259 /* The object here is to provide find_basic_blocks with detailed
2260 information (via reachable_handlers) on how exception control
2261 flows within the function. In this first pass, we can include
2262 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2263 regions, and hope that it will be useful in deleting unreachable
2264 handlers. Subsequently, we will generate landing pads which will
2265 connect many of the handlers, and then type information will not
2266 be effective. Still, this is a win over previous implementations. */
2268 /* These registers are used by the landing pads. Make sure they
2269 have been generated. */
2270 get_exception_pointer ();
2271 get_exception_filter ();
2273 /* Construct the landing pads. */
2275 assign_filter_values ();
2276 build_post_landing_pads ();
2277 connect_post_landing_pads ();
2278 if (USING_SJLJ_EXCEPTIONS)
2279 sjlj_build_landing_pads ();
2280 else
2281 dw2_build_landing_pads ();
2283 crtl->eh.built_landing_pads = 1;
2285 /* We've totally changed the CFG. Start over. */
2286 find_exception_handler_labels ();
2287 break_superblocks ();
2288 if (USING_SJLJ_EXCEPTIONS
2289 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2290 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
2291 commit_edge_insertions ();
2292 FOR_EACH_BB (bb)
2294 edge e;
2295 edge_iterator ei;
2296 bool eh = false;
2297 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2299 if (e->flags & EDGE_EH)
2301 remove_edge (e);
2302 eh = true;
2304 else
2305 ei_next (&ei);
2307 if (eh)
2308 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2312 /* This section handles removing dead code for flow. */
2314 /* Splice REGION from the region tree and replace it by REPLACE etc. */
2316 static void
2317 remove_eh_handler_and_replace (struct eh_region *region,
2318 struct eh_region *replace)
2320 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2321 rtx lab;
2323 outer = region->outer;
2324 /* For the benefit of efficiently handling REG_EH_REGION notes,
2325 replace this region in the region array with its containing
2326 region. Note that previous region deletions may result in
2327 multiple copies of this region in the array, so we have a
2328 list of alternate numbers by which we are known. */
2330 VEC_replace (eh_region, cfun->eh->region_array, region->region_number,
2331 replace);
2332 if (region->aka)
2334 unsigned i;
2335 bitmap_iterator bi;
2337 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2339 VEC_replace (eh_region, cfun->eh->region_array, i, replace);
2343 if (replace)
2345 if (!replace->aka)
2346 replace->aka = BITMAP_GGC_ALLOC ();
2347 if (region->aka)
2348 bitmap_ior_into (replace->aka, region->aka);
2349 bitmap_set_bit (replace->aka, region->region_number);
2352 if (crtl->eh.built_landing_pads)
2353 lab = region->landing_pad;
2354 else
2355 lab = region->label;
2356 if (outer)
2357 pp_start = &outer->inner;
2358 else
2359 pp_start = &cfun->eh->region_tree;
2360 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2361 continue;
2362 *pp = region->next_peer;
2364 if (replace)
2365 pp_start = &replace->inner;
2366 else
2367 pp_start = &cfun->eh->region_tree;
2368 inner = region->inner;
2369 if (inner)
2371 for (p = inner; p->next_peer ; p = p->next_peer)
2372 p->outer = replace;
2373 p->outer = replace;
2375 p->next_peer = *pp_start;
2376 *pp_start = inner;
2379 if (region->type == ERT_CATCH)
2381 struct eh_region *eh_try, *next, *prev;
2383 for (eh_try = region->next_peer;
2384 eh_try->type == ERT_CATCH;
2385 eh_try = eh_try->next_peer)
2386 continue;
2387 gcc_assert (eh_try->type == ERT_TRY);
2389 next = region->u.eh_catch.next_catch;
2390 prev = region->u.eh_catch.prev_catch;
2392 if (next)
2393 next->u.eh_catch.prev_catch = prev;
2394 else
2395 eh_try->u.eh_try.last_catch = prev;
2396 if (prev)
2397 prev->u.eh_catch.next_catch = next;
2398 else
2400 eh_try->u.eh_try.eh_catch = next;
2401 if (! next)
2402 remove_eh_handler (eh_try);
2407 /* Splice REGION from the region tree and replace it by the outer region
2408 etc. */
2410 static void
2411 remove_eh_handler (struct eh_region *region)
2413 remove_eh_handler_and_replace (region, region->outer);
2416 /* Remove Eh region R that has turned out to have no code in its handler. */
2418 void
2419 remove_eh_region (int r)
2421 struct eh_region *region;
2423 region = VEC_index (eh_region, cfun->eh->region_array, r);
2424 remove_eh_handler (region);
2427 /* Invokes CALLBACK for every exception handler label. Only used by old
2428 loop hackery; should not be used by new code. */
2430 void
2431 for_each_eh_label (void (*callback) (rtx))
2433 int i;
2434 for (i = 0; i < cfun->eh->last_region_number; i++)
2436 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
2437 if (r && r->region_number == i && r->label
2438 && GET_CODE (r->label) == CODE_LABEL)
2439 (*callback) (r->label);
2443 /* Invoke CALLBACK for every exception region in the current function. */
2445 void
2446 for_each_eh_region (void (*callback) (struct eh_region *))
2448 int i, n = cfun->eh->last_region_number;
2449 for (i = 1; i <= n; ++i)
2451 struct eh_region *region;
2453 region = VEC_index (eh_region, cfun->eh->region_array, i);
2454 if (region)
2455 (*callback) (region);
2459 /* This section describes CFG exception edges for flow. */
2461 /* For communicating between calls to reachable_next_level. */
2462 struct reachable_info
2464 tree types_caught;
2465 tree types_allowed;
2466 void (*callback) (struct eh_region *, void *);
2467 void *callback_data;
2470 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2471 base class of TYPE, is in HANDLED. */
2473 static int
2474 check_handled (tree handled, tree type)
2476 tree t;
2478 /* We can check for exact matches without front-end help. */
2479 if (! lang_eh_type_covers)
2481 for (t = handled; t ; t = TREE_CHAIN (t))
2482 if (TREE_VALUE (t) == type)
2483 return 1;
2485 else
2487 for (t = handled; t ; t = TREE_CHAIN (t))
2488 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2489 return 1;
2492 return 0;
2495 /* A subroutine of reachable_next_level. If we are collecting a list
2496 of handlers, add one. After landing pad generation, reference
2497 it instead of the handlers themselves. Further, the handlers are
2498 all wired together, so by referencing one, we've got them all.
2499 Before landing pad generation we reference each handler individually.
2501 LP_REGION contains the landing pad; REGION is the handler. */
2503 static void
2504 add_reachable_handler (struct reachable_info *info,
2505 struct eh_region *lp_region, struct eh_region *region)
2507 if (! info)
2508 return;
2510 if (crtl->eh.built_landing_pads)
2511 info->callback (lp_region, info->callback_data);
2512 else
2513 info->callback (region, info->callback_data);
2516 /* Process one level of exception regions for reachability.
2517 If TYPE_THROWN is non-null, then it is the *exact* type being
2518 propagated. If INFO is non-null, then collect handler labels
2519 and caught/allowed type information between invocations. */
2521 static enum reachable_code
2522 reachable_next_level (struct eh_region *region, tree type_thrown,
2523 struct reachable_info *info,
2524 bool maybe_resx)
2526 switch (region->type)
2528 case ERT_CLEANUP:
2529 /* Before landing-pad generation, we model control flow
2530 directly to the individual handlers. In this way we can
2531 see that catch handler types may shadow one another. */
2532 add_reachable_handler (info, region, region);
2533 return RNL_MAYBE_CAUGHT;
2535 case ERT_TRY:
2537 struct eh_region *c;
2538 enum reachable_code ret = RNL_NOT_CAUGHT;
2540 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
2542 /* A catch-all handler ends the search. */
2543 if (c->u.eh_catch.type_list == NULL)
2545 add_reachable_handler (info, region, c);
2546 return RNL_CAUGHT;
2549 if (type_thrown)
2551 /* If we have at least one type match, end the search. */
2552 tree tp_node = c->u.eh_catch.type_list;
2554 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2556 tree type = TREE_VALUE (tp_node);
2558 if (type == type_thrown
2559 || (lang_eh_type_covers
2560 && (*lang_eh_type_covers) (type, type_thrown)))
2562 add_reachable_handler (info, region, c);
2563 return RNL_CAUGHT;
2567 /* If we have definitive information of a match failure,
2568 the catch won't trigger. */
2569 if (lang_eh_type_covers)
2570 return RNL_NOT_CAUGHT;
2573 /* At this point, we either don't know what type is thrown or
2574 don't have front-end assistance to help deciding if it is
2575 covered by one of the types in the list for this region.
2577 We'd then like to add this region to the list of reachable
2578 handlers since it is indeed potentially reachable based on the
2579 information we have.
2581 Actually, this handler is for sure not reachable if all the
2582 types it matches have already been caught. That is, it is only
2583 potentially reachable if at least one of the types it catches
2584 has not been previously caught. */
2586 if (! info)
2587 ret = RNL_MAYBE_CAUGHT;
2588 else
2590 tree tp_node = c->u.eh_catch.type_list;
2591 bool maybe_reachable = false;
2593 /* Compute the potential reachability of this handler and
2594 update the list of types caught at the same time. */
2595 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2597 tree type = TREE_VALUE (tp_node);
2599 if (! check_handled (info->types_caught, type))
2601 info->types_caught
2602 = tree_cons (NULL, type, info->types_caught);
2604 maybe_reachable = true;
2608 if (maybe_reachable)
2610 add_reachable_handler (info, region, c);
2612 /* ??? If the catch type is a base class of every allowed
2613 type, then we know we can stop the search. */
2614 ret = RNL_MAYBE_CAUGHT;
2619 return ret;
2622 case ERT_ALLOWED_EXCEPTIONS:
2623 /* An empty list of types definitely ends the search. */
2624 if (region->u.allowed.type_list == NULL_TREE)
2626 add_reachable_handler (info, region, region);
2627 return RNL_CAUGHT;
2630 /* Collect a list of lists of allowed types for use in detecting
2631 when a catch may be transformed into a catch-all. */
2632 if (info)
2633 info->types_allowed = tree_cons (NULL_TREE,
2634 region->u.allowed.type_list,
2635 info->types_allowed);
2637 /* If we have definitive information about the type hierarchy,
2638 then we can tell if the thrown type will pass through the
2639 filter. */
2640 if (type_thrown && lang_eh_type_covers)
2642 if (check_handled (region->u.allowed.type_list, type_thrown))
2643 return RNL_NOT_CAUGHT;
2644 else
2646 add_reachable_handler (info, region, region);
2647 return RNL_CAUGHT;
2651 add_reachable_handler (info, region, region);
2652 return RNL_MAYBE_CAUGHT;
2654 case ERT_CATCH:
2655 /* Catch regions are handled by their controlling try region. */
2656 return RNL_NOT_CAUGHT;
2658 case ERT_MUST_NOT_THROW:
2659 /* Here we end our search, since no exceptions may propagate.
2661 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
2662 only via locally handled RESX instructions.
2664 When we inline a function call, we can bring in new handlers. In order
2665 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
2666 assume that such handlers exists prior for any inlinable call prior
2667 inlining decisions are fixed. */
2669 if (maybe_resx)
2671 add_reachable_handler (info, region, region);
2672 return RNL_CAUGHT;
2674 else
2675 return RNL_BLOCKED;
2677 case ERT_THROW:
2678 case ERT_UNKNOWN:
2679 /* Shouldn't see these here. */
2680 gcc_unreachable ();
2681 break;
2682 default:
2683 gcc_unreachable ();
2687 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2689 void
2690 foreach_reachable_handler (int region_number, bool is_resx, bool inlinable_call,
2691 void (*callback) (struct eh_region *, void *),
2692 void *callback_data)
2694 struct reachable_info info;
2695 struct eh_region *region;
2696 tree type_thrown;
2698 memset (&info, 0, sizeof (info));
2699 info.callback = callback;
2700 info.callback_data = callback_data;
2702 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2703 if (!region)
2704 return;
2706 type_thrown = NULL_TREE;
2707 if (is_resx)
2709 /* A RESX leaves a region instead of entering it. Thus the
2710 region itself may have been deleted out from under us. */
2711 if (region == NULL)
2712 return;
2713 region = region->outer;
2715 else if (region->type == ERT_THROW)
2717 type_thrown = region->u.eh_throw.type;
2718 region = region->outer;
2721 while (region)
2723 if (reachable_next_level (region, type_thrown, &info,
2724 inlinable_call || is_resx) >= RNL_CAUGHT)
2725 break;
2726 /* If we have processed one cleanup, there is no point in
2727 processing any more of them. Each cleanup will have an edge
2728 to the next outer cleanup region, so the flow graph will be
2729 accurate. */
2730 if (region->type == ERT_CLEANUP)
2731 region = region->u.cleanup.prev_try;
2732 else
2733 region = region->outer;
2737 /* Retrieve a list of labels of exception handlers which can be
2738 reached by a given insn. */
2740 static void
2741 arh_to_landing_pad (struct eh_region *region, void *data)
2743 rtx *p_handlers = (rtx *) data;
2744 if (! *p_handlers)
2745 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2748 static void
2749 arh_to_label (struct eh_region *region, void *data)
2751 rtx *p_handlers = (rtx *) data;
2752 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2756 reachable_handlers (rtx insn)
2758 bool is_resx = false;
2759 rtx handlers = NULL;
2760 int region_number;
2762 if (JUMP_P (insn)
2763 && GET_CODE (PATTERN (insn)) == RESX)
2765 region_number = XINT (PATTERN (insn), 0);
2766 is_resx = true;
2768 else
2770 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2771 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2772 return NULL;
2773 region_number = INTVAL (XEXP (note, 0));
2776 foreach_reachable_handler (region_number, is_resx, false,
2777 (crtl->eh.built_landing_pads
2778 ? arh_to_landing_pad
2779 : arh_to_label),
2780 &handlers);
2782 return handlers;
2785 /* Determine if the given INSN can throw an exception that is caught
2786 within the function. */
2788 bool
2789 can_throw_internal_1 (int region_number, bool is_resx, bool inlinable_call)
2791 struct eh_region *region;
2792 tree type_thrown;
2794 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2795 if (!region)
2796 return false;
2798 type_thrown = NULL_TREE;
2799 if (is_resx)
2800 region = region->outer;
2801 else if (region->type == ERT_THROW)
2803 type_thrown = region->u.eh_throw.type;
2804 region = region->outer;
2807 /* If this exception is ignored by each and every containing region,
2808 then control passes straight out. The runtime may handle some
2809 regions, which also do not require processing internally. */
2810 for (; region; region = region->outer)
2812 enum reachable_code how = reachable_next_level (region, type_thrown, 0,
2813 inlinable_call || is_resx);
2814 if (how == RNL_BLOCKED)
2815 return false;
2816 if (how != RNL_NOT_CAUGHT)
2817 return true;
2820 return false;
2823 bool
2824 can_throw_internal (const_rtx insn)
2826 rtx note;
2828 if (! INSN_P (insn))
2829 return false;
2831 if (JUMP_P (insn)
2832 && GET_CODE (PATTERN (insn)) == RESX
2833 && XINT (PATTERN (insn), 0) > 0)
2834 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true, false);
2836 if (NONJUMP_INSN_P (insn)
2837 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2838 insn = XVECEXP (PATTERN (insn), 0, 0);
2840 /* Every insn that might throw has an EH_REGION note. */
2841 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2842 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2843 return false;
2845 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false, false);
2848 /* Determine if the given INSN can throw an exception that is
2849 visible outside the function. */
2851 bool
2852 can_throw_external_1 (int region_number, bool is_resx, bool inlinable_call)
2854 struct eh_region *region;
2855 tree type_thrown;
2857 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2858 if (!region)
2859 return true;
2861 type_thrown = NULL_TREE;
2862 if (is_resx)
2863 region = region->outer;
2864 else if (region->type == ERT_THROW)
2866 type_thrown = region->u.eh_throw.type;
2867 region = region->outer;
2870 /* If the exception is caught or blocked by any containing region,
2871 then it is not seen by any calling function. */
2872 for (; region ; region = region->outer)
2873 if (reachable_next_level (region, type_thrown, NULL,
2874 inlinable_call || is_resx) >= RNL_CAUGHT)
2875 return false;
2877 return true;
2880 bool
2881 can_throw_external (const_rtx insn)
2883 rtx note;
2885 if (! INSN_P (insn))
2886 return false;
2888 if (JUMP_P (insn)
2889 && GET_CODE (PATTERN (insn)) == RESX
2890 && XINT (PATTERN (insn), 0) > 0)
2891 return can_throw_external_1 (XINT (PATTERN (insn), 0), true, false);
2893 if (NONJUMP_INSN_P (insn)
2894 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2896 rtx seq = PATTERN (insn);
2897 int i, n = XVECLEN (seq, 0);
2899 for (i = 0; i < n; i++)
2900 if (can_throw_external (XVECEXP (seq, 0, i)))
2901 return true;
2903 return false;
2906 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2907 if (!note)
2909 /* Calls (and trapping insns) without notes are outside any
2910 exception handling region in this function. We have to
2911 assume it might throw. Given that the front end and middle
2912 ends mark known NOTHROW functions, this isn't so wildly
2913 inaccurate. */
2914 return (CALL_P (insn)
2915 || (flag_non_call_exceptions
2916 && may_trap_p (PATTERN (insn))));
2918 if (INTVAL (XEXP (note, 0)) <= 0)
2919 return false;
2921 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false, false);
2924 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
2926 unsigned int
2927 set_nothrow_function_flags (void)
2929 rtx insn;
2931 crtl->nothrow = 1;
2933 /* Assume crtl->all_throwers_are_sibcalls until we encounter
2934 something that can throw an exception. We specifically exempt
2935 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2936 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2937 is optimistic. */
2939 crtl->all_throwers_are_sibcalls = 1;
2941 /* If we don't know that this implementation of the function will
2942 actually be used, then we must not set TREE_NOTHROW, since
2943 callers must not assume that this function does not throw. */
2944 if (TREE_NOTHROW (current_function_decl))
2945 return 0;
2947 if (! flag_exceptions)
2948 return 0;
2950 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2951 if (can_throw_external (insn))
2953 crtl->nothrow = 0;
2955 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2957 crtl->all_throwers_are_sibcalls = 0;
2958 return 0;
2962 for (insn = crtl->epilogue_delay_list; insn;
2963 insn = XEXP (insn, 1))
2964 if (can_throw_external (insn))
2966 crtl->nothrow = 0;
2968 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2970 crtl->all_throwers_are_sibcalls = 0;
2971 return 0;
2974 if (crtl->nothrow
2975 && (cgraph_function_body_availability (cgraph_node
2976 (current_function_decl))
2977 >= AVAIL_AVAILABLE))
2979 TREE_NOTHROW (current_function_decl) = 1;
2981 if (dump_file)
2982 fprintf (dump_file, "Marking function nothrow: %s\n\n",
2983 current_function_name ());
2985 return 0;
2988 struct rtl_opt_pass pass_set_nothrow_function_flags =
2991 RTL_PASS,
2992 "nothrow", /* name */
2993 NULL, /* gate */
2994 set_nothrow_function_flags, /* execute */
2995 NULL, /* sub */
2996 NULL, /* next */
2997 0, /* static_pass_number */
2998 0, /* tv_id */
2999 0, /* properties_required */
3000 0, /* properties_provided */
3001 0, /* properties_destroyed */
3002 0, /* todo_flags_start */
3003 TODO_dump_func, /* todo_flags_finish */
3008 /* Various hooks for unwind library. */
3010 /* Do any necessary initialization to access arbitrary stack frames.
3011 On the SPARC, this means flushing the register windows. */
3013 void
3014 expand_builtin_unwind_init (void)
3016 /* Set this so all the registers get saved in our frame; we need to be
3017 able to copy the saved values for any registers from frames we unwind. */
3018 crtl->saves_all_registers = 1;
3020 #ifdef SETUP_FRAME_ADDRESSES
3021 SETUP_FRAME_ADDRESSES ();
3022 #endif
3026 expand_builtin_eh_return_data_regno (tree exp)
3028 tree which = CALL_EXPR_ARG (exp, 0);
3029 unsigned HOST_WIDE_INT iwhich;
3031 if (TREE_CODE (which) != INTEGER_CST)
3033 error ("argument of %<__builtin_eh_return_regno%> must be constant");
3034 return constm1_rtx;
3037 iwhich = tree_low_cst (which, 1);
3038 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3039 if (iwhich == INVALID_REGNUM)
3040 return constm1_rtx;
3042 #ifdef DWARF_FRAME_REGNUM
3043 iwhich = DWARF_FRAME_REGNUM (iwhich);
3044 #else
3045 iwhich = DBX_REGISTER_NUMBER (iwhich);
3046 #endif
3048 return GEN_INT (iwhich);
3051 /* Given a value extracted from the return address register or stack slot,
3052 return the actual address encoded in that value. */
3055 expand_builtin_extract_return_addr (tree addr_tree)
3057 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3059 if (GET_MODE (addr) != Pmode
3060 && GET_MODE (addr) != VOIDmode)
3062 #ifdef POINTERS_EXTEND_UNSIGNED
3063 addr = convert_memory_address (Pmode, addr);
3064 #else
3065 addr = convert_to_mode (Pmode, addr, 0);
3066 #endif
3069 /* First mask out any unwanted bits. */
3070 #ifdef MASK_RETURN_ADDR
3071 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3072 #endif
3074 /* Then adjust to find the real return address. */
3075 #if defined (RETURN_ADDR_OFFSET)
3076 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3077 #endif
3079 return addr;
3082 /* Given an actual address in addr_tree, do any necessary encoding
3083 and return the value to be stored in the return address register or
3084 stack slot so the epilogue will return to that address. */
3087 expand_builtin_frob_return_addr (tree addr_tree)
3089 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3091 addr = convert_memory_address (Pmode, addr);
3093 #ifdef RETURN_ADDR_OFFSET
3094 addr = force_reg (Pmode, addr);
3095 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3096 #endif
3098 return addr;
3101 /* Set up the epilogue with the magic bits we'll need to return to the
3102 exception handler. */
3104 void
3105 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3106 tree handler_tree)
3108 rtx tmp;
3110 #ifdef EH_RETURN_STACKADJ_RTX
3111 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
3112 VOIDmode, EXPAND_NORMAL);
3113 tmp = convert_memory_address (Pmode, tmp);
3114 if (!crtl->eh.ehr_stackadj)
3115 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
3116 else if (tmp != crtl->eh.ehr_stackadj)
3117 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
3118 #endif
3120 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
3121 VOIDmode, EXPAND_NORMAL);
3122 tmp = convert_memory_address (Pmode, tmp);
3123 if (!crtl->eh.ehr_handler)
3124 crtl->eh.ehr_handler = copy_to_reg (tmp);
3125 else if (tmp != crtl->eh.ehr_handler)
3126 emit_move_insn (crtl->eh.ehr_handler, tmp);
3128 if (!crtl->eh.ehr_label)
3129 crtl->eh.ehr_label = gen_label_rtx ();
3130 emit_jump (crtl->eh.ehr_label);
3133 void
3134 expand_eh_return (void)
3136 rtx around_label;
3138 if (! crtl->eh.ehr_label)
3139 return;
3141 crtl->calls_eh_return = 1;
3143 #ifdef EH_RETURN_STACKADJ_RTX
3144 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3145 #endif
3147 around_label = gen_label_rtx ();
3148 emit_jump (around_label);
3150 emit_label (crtl->eh.ehr_label);
3151 clobber_return_register ();
3153 #ifdef EH_RETURN_STACKADJ_RTX
3154 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
3155 #endif
3157 #ifdef HAVE_eh_return
3158 if (HAVE_eh_return)
3159 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
3160 else
3161 #endif
3163 #ifdef EH_RETURN_HANDLER_RTX
3164 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
3165 #else
3166 error ("__builtin_eh_return not supported on this target");
3167 #endif
3170 emit_label (around_label);
3173 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3174 POINTERS_EXTEND_UNSIGNED and return it. */
3177 expand_builtin_extend_pointer (tree addr_tree)
3179 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3180 int extend;
3182 #ifdef POINTERS_EXTEND_UNSIGNED
3183 extend = POINTERS_EXTEND_UNSIGNED;
3184 #else
3185 /* The previous EH code did an unsigned extend by default, so we do this also
3186 for consistency. */
3187 extend = 1;
3188 #endif
3190 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
3193 /* In the following functions, we represent entries in the action table
3194 as 1-based indices. Special cases are:
3196 0: null action record, non-null landing pad; implies cleanups
3197 -1: null action record, null landing pad; implies no action
3198 -2: no call-site entry; implies must_not_throw
3199 -3: we have yet to process outer regions
3201 Further, no special cases apply to the "next" field of the record.
3202 For next, 0 means end of list. */
3204 struct action_record
3206 int offset;
3207 int filter;
3208 int next;
3211 static int
3212 action_record_eq (const void *pentry, const void *pdata)
3214 const struct action_record *entry = (const struct action_record *) pentry;
3215 const struct action_record *data = (const struct action_record *) pdata;
3216 return entry->filter == data->filter && entry->next == data->next;
3219 static hashval_t
3220 action_record_hash (const void *pentry)
3222 const struct action_record *entry = (const struct action_record *) pentry;
3223 return entry->next * 1009 + entry->filter;
3226 static int
3227 add_action_record (htab_t ar_hash, int filter, int next)
3229 struct action_record **slot, *new_ar, tmp;
3231 tmp.filter = filter;
3232 tmp.next = next;
3233 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3235 if ((new_ar = *slot) == NULL)
3237 new_ar = XNEW (struct action_record);
3238 new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3239 new_ar->filter = filter;
3240 new_ar->next = next;
3241 *slot = new_ar;
3243 /* The filter value goes in untouched. The link to the next
3244 record is a "self-relative" byte offset, or zero to indicate
3245 that there is no next record. So convert the absolute 1 based
3246 indices we've been carrying around into a displacement. */
3248 push_sleb128 (&crtl->eh.action_record_data, filter);
3249 if (next)
3250 next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3251 push_sleb128 (&crtl->eh.action_record_data, next);
3254 return new_ar->offset;
3257 static int
3258 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3260 struct eh_region *c;
3261 int next;
3263 /* If we've reached the top of the region chain, then we have
3264 no actions, and require no landing pad. */
3265 if (region == NULL)
3266 return -1;
3268 switch (region->type)
3270 case ERT_CLEANUP:
3271 /* A cleanup adds a zero filter to the beginning of the chain, but
3272 there are special cases to look out for. If there are *only*
3273 cleanups along a path, then it compresses to a zero action.
3274 Further, if there are multiple cleanups along a path, we only
3275 need to represent one of them, as that is enough to trigger
3276 entry to the landing pad at runtime. */
3277 next = collect_one_action_chain (ar_hash, region->outer);
3278 if (next <= 0)
3279 return 0;
3280 for (c = region->outer; c ; c = c->outer)
3281 if (c->type == ERT_CLEANUP)
3282 return next;
3283 return add_action_record (ar_hash, 0, next);
3285 case ERT_TRY:
3286 /* Process the associated catch regions in reverse order.
3287 If there's a catch-all handler, then we don't need to
3288 search outer regions. Use a magic -3 value to record
3289 that we haven't done the outer search. */
3290 next = -3;
3291 for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)
3293 if (c->u.eh_catch.type_list == NULL)
3295 /* Retrieve the filter from the head of the filter list
3296 where we have stored it (see assign_filter_values). */
3297 int filter
3298 = TREE_INT_CST_LOW (TREE_VALUE (c->u.eh_catch.filter_list));
3300 next = add_action_record (ar_hash, filter, 0);
3302 else
3304 /* Once the outer search is done, trigger an action record for
3305 each filter we have. */
3306 tree flt_node;
3308 if (next == -3)
3310 next = collect_one_action_chain (ar_hash, region->outer);
3312 /* If there is no next action, terminate the chain. */
3313 if (next == -1)
3314 next = 0;
3315 /* If all outer actions are cleanups or must_not_throw,
3316 we'll have no action record for it, since we had wanted
3317 to encode these states in the call-site record directly.
3318 Add a cleanup action to the chain to catch these. */
3319 else if (next <= 0)
3320 next = add_action_record (ar_hash, 0, 0);
3323 flt_node = c->u.eh_catch.filter_list;
3324 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3326 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3327 next = add_action_record (ar_hash, filter, next);
3331 return next;
3333 case ERT_ALLOWED_EXCEPTIONS:
3334 /* An exception specification adds its filter to the
3335 beginning of the chain. */
3336 next = collect_one_action_chain (ar_hash, region->outer);
3338 /* If there is no next action, terminate the chain. */
3339 if (next == -1)
3340 next = 0;
3341 /* If all outer actions are cleanups or must_not_throw,
3342 we'll have no action record for it, since we had wanted
3343 to encode these states in the call-site record directly.
3344 Add a cleanup action to the chain to catch these. */
3345 else if (next <= 0)
3346 next = add_action_record (ar_hash, 0, 0);
3348 return add_action_record (ar_hash, region->u.allowed.filter, next);
3350 case ERT_MUST_NOT_THROW:
3351 /* A must-not-throw region with no inner handlers or cleanups
3352 requires no call-site entry. Note that this differs from
3353 the no handler or cleanup case in that we do require an lsda
3354 to be generated. Return a magic -2 value to record this. */
3355 return -2;
3357 case ERT_CATCH:
3358 case ERT_THROW:
3359 /* CATCH regions are handled in TRY above. THROW regions are
3360 for optimization information only and produce no output. */
3361 return collect_one_action_chain (ar_hash, region->outer);
3363 default:
3364 gcc_unreachable ();
3368 static int
3369 add_call_site (rtx landing_pad, int action)
3371 call_site_record record;
3373 record = GGC_NEW (struct call_site_record);
3374 record->landing_pad = landing_pad;
3375 record->action = action;
3377 VEC_safe_push (call_site_record, gc, crtl->eh.call_site_record, record);
3379 return call_site_base + VEC_length (call_site_record, crtl->eh.call_site_record) - 1;
3382 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3383 The new note numbers will not refer to region numbers, but
3384 instead to call site entries. */
3386 unsigned int
3387 convert_to_eh_region_ranges (void)
3389 rtx insn, iter, note;
3390 htab_t ar_hash;
3391 int last_action = -3;
3392 rtx last_action_insn = NULL_RTX;
3393 rtx last_landing_pad = NULL_RTX;
3394 rtx first_no_action_insn = NULL_RTX;
3395 int call_site = 0;
3397 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3398 return 0;
3400 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
3402 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3404 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3405 if (INSN_P (iter))
3407 struct eh_region *region;
3408 int this_action;
3409 rtx this_landing_pad;
3411 insn = iter;
3412 if (NONJUMP_INSN_P (insn)
3413 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3414 insn = XVECEXP (PATTERN (insn), 0, 0);
3416 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3417 if (!note)
3419 if (! (CALL_P (insn)
3420 || (flag_non_call_exceptions
3421 && may_trap_p (PATTERN (insn)))))
3422 continue;
3423 this_action = -1;
3424 region = NULL;
3426 else
3428 if (INTVAL (XEXP (note, 0)) <= 0)
3429 continue;
3430 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3431 this_action = collect_one_action_chain (ar_hash, region);
3434 /* Existence of catch handlers, or must-not-throw regions
3435 implies that an lsda is needed (even if empty). */
3436 if (this_action != -1)
3437 crtl->uses_eh_lsda = 1;
3439 /* Delay creation of region notes for no-action regions
3440 until we're sure that an lsda will be required. */
3441 else if (last_action == -3)
3443 first_no_action_insn = iter;
3444 last_action = -1;
3447 /* Cleanups and handlers may share action chains but not
3448 landing pads. Collect the landing pad for this region. */
3449 if (this_action >= 0)
3451 struct eh_region *o;
3452 for (o = region; ! o->landing_pad ; o = o->outer)
3453 continue;
3454 this_landing_pad = o->landing_pad;
3456 else
3457 this_landing_pad = NULL_RTX;
3459 /* Differing actions or landing pads implies a change in call-site
3460 info, which implies some EH_REGION note should be emitted. */
3461 if (last_action != this_action
3462 || last_landing_pad != this_landing_pad)
3464 /* If we'd not seen a previous action (-3) or the previous
3465 action was must-not-throw (-2), then we do not need an
3466 end note. */
3467 if (last_action >= -1)
3469 /* If we delayed the creation of the begin, do it now. */
3470 if (first_no_action_insn)
3472 call_site = add_call_site (NULL_RTX, 0);
3473 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3474 first_no_action_insn);
3475 NOTE_EH_HANDLER (note) = call_site;
3476 first_no_action_insn = NULL_RTX;
3479 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3480 last_action_insn);
3481 NOTE_EH_HANDLER (note) = call_site;
3484 /* If the new action is must-not-throw, then no region notes
3485 are created. */
3486 if (this_action >= -1)
3488 call_site = add_call_site (this_landing_pad,
3489 this_action < 0 ? 0 : this_action);
3490 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3491 NOTE_EH_HANDLER (note) = call_site;
3494 last_action = this_action;
3495 last_landing_pad = this_landing_pad;
3497 last_action_insn = iter;
3500 if (last_action >= -1 && ! first_no_action_insn)
3502 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3503 NOTE_EH_HANDLER (note) = call_site;
3506 htab_delete (ar_hash);
3507 return 0;
3510 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
3513 RTL_PASS,
3514 "eh_ranges", /* name */
3515 NULL, /* gate */
3516 convert_to_eh_region_ranges, /* execute */
3517 NULL, /* sub */
3518 NULL, /* next */
3519 0, /* static_pass_number */
3520 0, /* tv_id */
3521 0, /* properties_required */
3522 0, /* properties_provided */
3523 0, /* properties_destroyed */
3524 0, /* todo_flags_start */
3525 TODO_dump_func, /* todo_flags_finish */
3530 static void
3531 push_uleb128 (varray_type *data_area, unsigned int value)
3535 unsigned char byte = value & 0x7f;
3536 value >>= 7;
3537 if (value)
3538 byte |= 0x80;
3539 VARRAY_PUSH_UCHAR (*data_area, byte);
3541 while (value);
3544 static void
3545 push_sleb128 (varray_type *data_area, int value)
3547 unsigned char byte;
3548 int more;
3552 byte = value & 0x7f;
3553 value >>= 7;
3554 more = ! ((value == 0 && (byte & 0x40) == 0)
3555 || (value == -1 && (byte & 0x40) != 0));
3556 if (more)
3557 byte |= 0x80;
3558 VARRAY_PUSH_UCHAR (*data_area, byte);
3560 while (more);
3564 #ifndef HAVE_AS_LEB128
3565 static int
3566 dw2_size_of_call_site_table (void)
3568 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3569 int size = n * (4 + 4 + 4);
3570 int i;
3572 for (i = 0; i < n; ++i)
3574 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3575 size += size_of_uleb128 (cs->action);
3578 return size;
3581 static int
3582 sjlj_size_of_call_site_table (void)
3584 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3585 int size = 0;
3586 int i;
3588 for (i = 0; i < n; ++i)
3590 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3591 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3592 size += size_of_uleb128 (cs->action);
3595 return size;
3597 #endif
3599 static void
3600 dw2_output_call_site_table (void)
3602 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3603 int i;
3605 for (i = 0; i < n; ++i)
3607 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3608 char reg_start_lab[32];
3609 char reg_end_lab[32];
3610 char landing_pad_lab[32];
3612 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3613 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3615 if (cs->landing_pad)
3616 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3617 CODE_LABEL_NUMBER (cs->landing_pad));
3619 /* ??? Perhaps use insn length scaling if the assembler supports
3620 generic arithmetic. */
3621 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3622 data4 if the function is small enough. */
3623 #ifdef HAVE_AS_LEB128
3624 dw2_asm_output_delta_uleb128 (reg_start_lab,
3625 current_function_func_begin_label,
3626 "region %d start", i);
3627 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3628 "length");
3629 if (cs->landing_pad)
3630 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3631 current_function_func_begin_label,
3632 "landing pad");
3633 else
3634 dw2_asm_output_data_uleb128 (0, "landing pad");
3635 #else
3636 dw2_asm_output_delta (4, reg_start_lab,
3637 current_function_func_begin_label,
3638 "region %d start", i);
3639 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3640 if (cs->landing_pad)
3641 dw2_asm_output_delta (4, landing_pad_lab,
3642 current_function_func_begin_label,
3643 "landing pad");
3644 else
3645 dw2_asm_output_data (4, 0, "landing pad");
3646 #endif
3647 dw2_asm_output_data_uleb128 (cs->action, "action");
3650 call_site_base += n;
3653 static void
3654 sjlj_output_call_site_table (void)
3656 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3657 int i;
3659 for (i = 0; i < n; ++i)
3661 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3663 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3664 "region %d landing pad", i);
3665 dw2_asm_output_data_uleb128 (cs->action, "action");
3668 call_site_base += n;
3671 #ifndef TARGET_UNWIND_INFO
3672 /* Switch to the section that should be used for exception tables. */
3674 static void
3675 switch_to_exception_section (const char * ARG_UNUSED (fnname))
3677 section *s;
3679 if (exception_section)
3680 s = exception_section;
3681 else
3683 /* Compute the section and cache it into exception_section,
3684 unless it depends on the function name. */
3685 if (targetm.have_named_sections)
3687 int flags;
3689 if (EH_TABLES_CAN_BE_READ_ONLY)
3691 int tt_format =
3692 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3693 flags = ((! flag_pic
3694 || ((tt_format & 0x70) != DW_EH_PE_absptr
3695 && (tt_format & 0x70) != DW_EH_PE_aligned))
3696 ? 0 : SECTION_WRITE);
3698 else
3699 flags = SECTION_WRITE;
3701 #ifdef HAVE_LD_EH_GC_SECTIONS
3702 if (flag_function_sections)
3704 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
3705 sprintf (section_name, ".gcc_except_table.%s", fnname);
3706 s = get_section (section_name, flags, NULL);
3707 free (section_name);
3709 else
3710 #endif
3711 exception_section
3712 = s = get_section (".gcc_except_table", flags, NULL);
3714 else
3715 exception_section
3716 = s = flag_pic ? data_section : readonly_data_section;
3719 switch_to_section (s);
3721 #endif
3724 /* Output a reference from an exception table to the type_info object TYPE.
3725 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
3726 the value. */
3728 static void
3729 output_ttype (tree type, int tt_format, int tt_format_size)
3731 rtx value;
3732 bool is_public = true;
3734 if (type == NULL_TREE)
3735 value = const0_rtx;
3736 else
3738 struct varpool_node *node;
3740 type = lookup_type_for_runtime (type);
3741 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3743 /* Let cgraph know that the rtti decl is used. Not all of the
3744 paths below go through assemble_integer, which would take
3745 care of this for us. */
3746 STRIP_NOPS (type);
3747 if (TREE_CODE (type) == ADDR_EXPR)
3749 type = TREE_OPERAND (type, 0);
3750 if (TREE_CODE (type) == VAR_DECL)
3752 node = varpool_node (type);
3753 if (node)
3754 varpool_mark_needed_node (node);
3755 is_public = TREE_PUBLIC (type);
3758 else
3759 gcc_assert (TREE_CODE (type) == INTEGER_CST);
3762 /* Allow the target to override the type table entry format. */
3763 if (targetm.asm_out.ttype (value))
3764 return;
3766 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3767 assemble_integer (value, tt_format_size,
3768 tt_format_size * BITS_PER_UNIT, 1);
3769 else
3770 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
3773 void
3774 output_function_exception_table (const char * ARG_UNUSED (fnname))
3776 int tt_format, cs_format, lp_format, i, n;
3777 #ifdef HAVE_AS_LEB128
3778 char ttype_label[32];
3779 char cs_after_size_label[32];
3780 char cs_end_label[32];
3781 #else
3782 int call_site_len;
3783 #endif
3784 int have_tt_data;
3785 int tt_format_size = 0;
3787 /* Not all functions need anything. */
3788 if (! crtl->uses_eh_lsda)
3789 return;
3791 if (eh_personality_libfunc)
3792 assemble_external_libcall (eh_personality_libfunc);
3794 #ifdef TARGET_UNWIND_INFO
3795 /* TODO: Move this into target file. */
3796 fputs ("\t.personality\t", asm_out_file);
3797 output_addr_const (asm_out_file, eh_personality_libfunc);
3798 fputs ("\n\t.handlerdata\n", asm_out_file);
3799 /* Note that varasm still thinks we're in the function's code section.
3800 The ".endp" directive that will immediately follow will take us back. */
3801 #else
3802 switch_to_exception_section (fnname);
3803 #endif
3805 /* If the target wants a label to begin the table, emit it here. */
3806 targetm.asm_out.except_table_label (asm_out_file);
3808 have_tt_data = (VEC_length (tree, crtl->eh.ttype_data) > 0
3809 || VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) > 0);
3811 /* Indicate the format of the @TType entries. */
3812 if (! have_tt_data)
3813 tt_format = DW_EH_PE_omit;
3814 else
3816 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3817 #ifdef HAVE_AS_LEB128
3818 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3819 current_function_funcdef_no);
3820 #endif
3821 tt_format_size = size_of_encoded_value (tt_format);
3823 assemble_align (tt_format_size * BITS_PER_UNIT);
3826 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3827 current_function_funcdef_no);
3829 /* The LSDA header. */
3831 /* Indicate the format of the landing pad start pointer. An omitted
3832 field implies @LPStart == @Start. */
3833 /* Currently we always put @LPStart == @Start. This field would
3834 be most useful in moving the landing pads completely out of
3835 line to another section, but it could also be used to minimize
3836 the size of uleb128 landing pad offsets. */
3837 lp_format = DW_EH_PE_omit;
3838 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3839 eh_data_format_name (lp_format));
3841 /* @LPStart pointer would go here. */
3843 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3844 eh_data_format_name (tt_format));
3846 #ifndef HAVE_AS_LEB128
3847 if (USING_SJLJ_EXCEPTIONS)
3848 call_site_len = sjlj_size_of_call_site_table ();
3849 else
3850 call_site_len = dw2_size_of_call_site_table ();
3851 #endif
3853 /* A pc-relative 4-byte displacement to the @TType data. */
3854 if (have_tt_data)
3856 #ifdef HAVE_AS_LEB128
3857 char ttype_after_disp_label[32];
3858 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3859 current_function_funcdef_no);
3860 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3861 "@TType base offset");
3862 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3863 #else
3864 /* Ug. Alignment queers things. */
3865 unsigned int before_disp, after_disp, last_disp, disp;
3867 before_disp = 1 + 1;
3868 after_disp = (1 + size_of_uleb128 (call_site_len)
3869 + call_site_len
3870 + VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data)
3871 + (VEC_length (tree, crtl->eh.ttype_data)
3872 * tt_format_size));
3874 disp = after_disp;
3877 unsigned int disp_size, pad;
3879 last_disp = disp;
3880 disp_size = size_of_uleb128 (disp);
3881 pad = before_disp + disp_size + after_disp;
3882 if (pad % tt_format_size)
3883 pad = tt_format_size - (pad % tt_format_size);
3884 else
3885 pad = 0;
3886 disp = after_disp + pad;
3888 while (disp != last_disp);
3890 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3891 #endif
3894 /* Indicate the format of the call-site offsets. */
3895 #ifdef HAVE_AS_LEB128
3896 cs_format = DW_EH_PE_uleb128;
3897 #else
3898 cs_format = DW_EH_PE_udata4;
3899 #endif
3900 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3901 eh_data_format_name (cs_format));
3903 #ifdef HAVE_AS_LEB128
3904 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3905 current_function_funcdef_no);
3906 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3907 current_function_funcdef_no);
3908 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3909 "Call-site table length");
3910 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3911 if (USING_SJLJ_EXCEPTIONS)
3912 sjlj_output_call_site_table ();
3913 else
3914 dw2_output_call_site_table ();
3915 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3916 #else
3917 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3918 if (USING_SJLJ_EXCEPTIONS)
3919 sjlj_output_call_site_table ();
3920 else
3921 dw2_output_call_site_table ();
3922 #endif
3924 /* ??? Decode and interpret the data for flag_debug_asm. */
3925 n = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data);
3926 for (i = 0; i < n; ++i)
3927 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.action_record_data, i),
3928 (i ? NULL : "Action record table"));
3930 if (have_tt_data)
3931 assemble_align (tt_format_size * BITS_PER_UNIT);
3933 i = VEC_length (tree, crtl->eh.ttype_data);
3934 while (i-- > 0)
3936 tree type = VEC_index (tree, crtl->eh.ttype_data, i);
3937 output_ttype (type, tt_format, tt_format_size);
3940 #ifdef HAVE_AS_LEB128
3941 if (have_tt_data)
3942 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3943 #endif
3945 /* ??? Decode and interpret the data for flag_debug_asm. */
3946 n = VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data);
3947 for (i = 0; i < n; ++i)
3949 if (targetm.arm_eabi_unwinder)
3951 tree type = VARRAY_TREE (crtl->eh.ehspec_data, i);
3952 output_ttype (type, tt_format, tt_format_size);
3954 else
3955 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.ehspec_data, i),
3956 (i ? NULL : "Exception specification table"));
3959 switch_to_section (current_function_section ());
3962 void
3963 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3965 fun->eh->throw_stmt_table = table;
3968 htab_t
3969 get_eh_throw_stmt_table (struct function *fun)
3971 return fun->eh->throw_stmt_table;
3974 /* Dump EH information to OUT. */
3976 void
3977 dump_eh_tree (FILE * out, struct function *fun)
3979 struct eh_region *i;
3980 int depth = 0;
3981 static const char *const type_name[] = { "unknown", "cleanup", "try", "catch",
3982 "allowed_exceptions", "must_not_throw",
3983 "throw"
3986 i = fun->eh->region_tree;
3987 if (!i)
3988 return;
3990 fprintf (out, "Eh tree:\n");
3991 while (1)
3993 fprintf (out, " %*s %i %s", depth * 2, "",
3994 i->region_number, type_name[(int) i->type]);
3995 if (i->tree_label)
3997 fprintf (out, " tree_label:");
3998 print_generic_expr (out, i->tree_label, 0);
4000 if (i->label)
4001 fprintf (out, " label:%i", INSN_UID (i->label));
4002 if (i->landing_pad)
4004 fprintf (out, " landing_pad:%i", INSN_UID (i->landing_pad));
4005 if (GET_CODE (i->landing_pad) == NOTE)
4006 fprintf (out, " (deleted)");
4008 if (i->post_landing_pad)
4010 fprintf (out, " post_landing_pad:%i", INSN_UID (i->post_landing_pad));
4011 if (GET_CODE (i->post_landing_pad) == NOTE)
4012 fprintf (out, " (deleted)");
4014 if (i->resume)
4016 fprintf (out, " resume:%i", INSN_UID (i->resume));
4017 if (GET_CODE (i->resume) == NOTE)
4018 fprintf (out, " (deleted)");
4020 if (i->may_contain_throw)
4021 fprintf (out, " may_contain_throw");
4022 switch (i->type)
4024 case ERT_CLEANUP:
4025 if (i->u.cleanup.prev_try)
4026 fprintf (out, " prev try:%i",
4027 i->u.cleanup.prev_try->region_number);
4028 break;
4030 case ERT_TRY:
4032 struct eh_region *c;
4033 fprintf (out, " catch regions:");
4034 for (c = i->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4035 fprintf (out, " %i", c->region_number);
4037 break;
4039 case ERT_CATCH:
4040 if (i->u.eh_catch.prev_catch)
4041 fprintf (out, " prev: %i",
4042 i->u.eh_catch.prev_catch->region_number);
4043 if (i->u.eh_catch.next_catch)
4044 fprintf (out, " next %i",
4045 i->u.eh_catch.next_catch->region_number);
4046 fprintf (out, " type:");
4047 print_generic_expr (out, i->u.eh_catch.type_list, 0);
4048 break;
4050 case ERT_ALLOWED_EXCEPTIONS:
4051 fprintf (out, " filter :%i types:", i->u.allowed.filter);
4052 print_generic_expr (out, i->u.allowed.type_list, 0);
4053 break;
4055 case ERT_THROW:
4056 fprintf (out, " type:");
4057 print_generic_expr (out, i->u.eh_throw.type, 0);
4058 break;
4060 case ERT_MUST_NOT_THROW:
4061 break;
4063 case ERT_UNKNOWN:
4064 break;
4066 if (i->aka)
4068 fprintf (out, " also known as:");
4069 dump_bitmap (out, i->aka);
4071 else
4072 fprintf (out, "\n");
4073 /* If there are sub-regions, process them. */
4074 if (i->inner)
4075 i = i->inner, depth++;
4076 /* If there are peers, process them. */
4077 else if (i->next_peer)
4078 i = i->next_peer;
4079 /* Otherwise, step back up the tree to the next peer. */
4080 else
4084 i = i->outer;
4085 depth--;
4086 if (i == NULL)
4087 return;
4089 while (i->next_peer == NULL);
4090 i = i->next_peer;
4095 /* Verify EH region invariants. */
4097 static bool
4098 verify_eh_region (struct eh_region *region, struct eh_region *prev_try)
4100 bool found = false;
4101 if (!region)
4102 return false;
4103 switch (region->type)
4105 case ERT_CLEANUP:
4106 if (region->u.cleanup.prev_try != prev_try)
4108 error ("Wrong prev_try pointer in EH region %i",
4109 region->region_number);
4110 found = true;
4112 break;
4113 case ERT_TRY:
4115 struct eh_region *c, *prev = NULL;
4116 if (region->u.eh_try.eh_catch->u.eh_catch.prev_catch)
4118 error ("Try region %i has wrong rh_catch pointer to %i",
4119 region->region_number,
4120 region->u.eh_try.eh_catch->region_number);
4121 found = true;
4123 for (c = region->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4125 if (c->outer != region->outer)
4127 error
4128 ("Catch region %i has different outer region than try region %i",
4129 c->region_number, region->region_number);
4130 found = true;
4132 if (c->u.eh_catch.prev_catch != prev)
4134 error ("Catch region %i has corrupted catchlist",
4135 c->region_number);
4136 found = true;
4138 prev = c;
4140 if (prev != region->u.eh_try.last_catch)
4142 error
4143 ("Try region %i has wrong last_catch pointer to %i instead of %i",
4144 region->region_number,
4145 region->u.eh_try.last_catch->region_number,
4146 prev->region_number);
4147 found = true;
4150 break;
4151 case ERT_CATCH:
4152 if (!region->u.eh_catch.prev_catch
4153 && (!region->next_peer || region->next_peer->type != ERT_TRY))
4155 error ("Catch region %i should be followed by try", region->region_number);
4156 found = true;
4158 break;
4159 case ERT_ALLOWED_EXCEPTIONS:
4160 case ERT_MUST_NOT_THROW:
4161 case ERT_THROW:
4162 break;
4163 case ERT_UNKNOWN:
4164 gcc_unreachable ();
4166 if (region->type == ERT_TRY)
4167 prev_try = region;
4168 else if (region->type == ERT_MUST_NOT_THROW
4169 || (region->type == ERT_ALLOWED_EXCEPTIONS
4170 && !region->u.allowed.type_list))
4171 prev_try = NULL;
4172 for (region = region->inner; region; region = region->next_peer)
4173 found |= verify_eh_region (region, prev_try);
4174 return found;
4177 /* Verify invariants on EH datastructures. */
4179 void
4180 verify_eh_tree (struct function *fun)
4182 struct eh_region *i, *outer = NULL;
4183 bool err = false;
4184 int nvisited = 0;
4185 int count = 0;
4186 int j;
4187 int depth = 0;
4189 if (!fun->eh->region_tree)
4190 return;
4191 for (j = fun->eh->last_region_number; j > 0; --j)
4192 if ((i = VEC_index (eh_region, fun->eh->region_array, j)))
4194 if (i->region_number == j)
4195 count++;
4196 if (i->region_number != j && (!i->aka || !bitmap_bit_p (i->aka, j)))
4198 error ("region_array is corrupted for region %i",
4199 i->region_number);
4200 err = true;
4203 i = fun->eh->region_tree;
4205 while (1)
4207 if (VEC_index (eh_region, fun->eh->region_array, i->region_number) != i)
4209 error ("region_array is corrupted for region %i", i->region_number);
4210 err = true;
4212 if (i->outer != outer)
4214 error ("outer block of region %i is wrong", i->region_number);
4215 err = true;
4217 if (i->may_contain_throw && outer && !outer->may_contain_throw)
4219 error
4220 ("region %i may contain throw and is contained in region that may not",
4221 i->region_number);
4222 err = true;
4224 if (depth < 0)
4226 error ("negative nesting depth of region %i", i->region_number);
4227 err = true;
4229 nvisited++;
4230 /* If there are sub-regions, process them. */
4231 if (i->inner)
4232 outer = i, i = i->inner, depth++;
4233 /* If there are peers, process them. */
4234 else if (i->next_peer)
4235 i = i->next_peer;
4236 /* Otherwise, step back up the tree to the next peer. */
4237 else
4241 i = i->outer;
4242 depth--;
4243 if (i == NULL)
4245 if (depth != -1)
4247 error ("tree list ends on depth %i", depth + 1);
4248 err = true;
4250 if (count != nvisited)
4252 error ("array does not match the region tree");
4253 err = true;
4255 if (!err)
4256 for (i = fun->eh->region_tree; i; i = i->next_peer)
4257 err |= verify_eh_region (i, NULL);
4259 if (err)
4261 dump_eh_tree (stderr, fun);
4262 internal_error ("verify_eh_tree failed");
4264 return;
4266 outer = i->outer;
4268 while (i->next_peer == NULL);
4269 i = i->next_peer;
4274 /* Initialize unwind_resume_libfunc. */
4276 void
4277 default_init_unwind_resume_libfunc (void)
4279 /* The default c++ routines aren't actually c++ specific, so use those. */
4280 unwind_resume_libfunc =
4281 init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
4282 : "_Unwind_Resume");
4286 static bool
4287 gate_handle_eh (void)
4289 return doing_eh (0);
4292 /* Complete generation of exception handling code. */
4293 static unsigned int
4294 rest_of_handle_eh (void)
4296 finish_eh_generation ();
4297 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4298 return 0;
4301 struct rtl_opt_pass pass_rtl_eh =
4304 RTL_PASS,
4305 "eh", /* name */
4306 gate_handle_eh, /* gate */
4307 rest_of_handle_eh, /* execute */
4308 NULL, /* sub */
4309 NULL, /* next */
4310 0, /* static_pass_number */
4311 TV_JUMP, /* tv_id */
4312 0, /* properties_required */
4313 0, /* properties_provided */
4314 0, /* properties_destroyed */
4315 0, /* todo_flags_start */
4316 TODO_dump_func /* todo_flags_finish */
4320 #include "gt-except.h"