* opt-functions.awk (var_type): New function.
[official-gcc.git] / gcc / except.c
blob9f1bfe926af5972937687c3d897aa7a53042c8e0
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
77 #include "diagnostic.h"
79 /* Provide defaults for stuff that may not be defined when using
80 sjlj exceptions. */
81 #ifndef EH_RETURN_DATA_REGNO
82 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
83 #endif
86 /* Protect cleanup actions with must-not-throw regions, with a call
87 to the given failure handler. */
88 tree (*lang_protect_cleanup_actions) (void);
90 /* Return true if type A catches type B. */
91 int (*lang_eh_type_covers) (tree a, tree b);
93 /* Map a type to a runtime object to match type. */
94 tree (*lang_eh_runtime_type) (tree);
96 /* A hash table of label to region number. */
98 struct ehl_map_entry GTY(())
100 rtx label;
101 struct eh_region *region;
104 static GTY(()) int call_site_base;
105 static GTY ((param_is (union tree_node)))
106 htab_t type_to_runtime_map;
108 /* Describe the SjLj_Function_Context structure. */
109 static GTY(()) tree sjlj_fc_type_node;
110 static int sjlj_fc_call_site_ofs;
111 static int sjlj_fc_data_ofs;
112 static int sjlj_fc_personality_ofs;
113 static int sjlj_fc_lsda_ofs;
114 static int sjlj_fc_jbuf_ofs;
116 /* Describes one exception region. */
117 struct eh_region GTY(())
119 /* The immediately surrounding region. */
120 struct eh_region *outer;
122 /* The list of immediately contained regions. */
123 struct eh_region *inner;
124 struct eh_region *next_peer;
126 /* An identifier for this region. */
127 int region_number;
129 /* When a region is deleted, its parents inherit the REG_EH_REGION
130 numbers already assigned. */
131 bitmap aka;
133 /* Each region does exactly one thing. */
134 enum eh_region_type
136 ERT_UNKNOWN = 0,
137 ERT_CLEANUP,
138 ERT_TRY,
139 ERT_CATCH,
140 ERT_ALLOWED_EXCEPTIONS,
141 ERT_MUST_NOT_THROW,
142 ERT_THROW,
143 ERT_FIXUP
144 } type;
146 /* Holds the action to perform based on the preceding type. */
147 union eh_region_u {
148 /* A list of catch blocks, a surrounding try block,
149 and the label for continuing after a catch. */
150 struct eh_region_u_try {
151 struct eh_region *catch;
152 struct eh_region *last_catch;
153 struct eh_region *prev_try;
154 rtx continue_label;
155 } GTY ((tag ("ERT_TRY"))) try;
157 /* The list through the catch handlers, the list of type objects
158 matched, and the list of associated filters. */
159 struct eh_region_u_catch {
160 struct eh_region *next_catch;
161 struct eh_region *prev_catch;
162 tree type_list;
163 tree filter_list;
164 } GTY ((tag ("ERT_CATCH"))) catch;
166 /* A tree_list of allowed types. */
167 struct eh_region_u_allowed {
168 tree type_list;
169 int filter;
170 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
172 /* The type given by a call to "throw foo();", or discovered
173 for a throw. */
174 struct eh_region_u_throw {
175 tree type;
176 } GTY ((tag ("ERT_THROW"))) throw;
178 /* Retain the cleanup expression even after expansion so that
179 we can match up fixup regions. */
180 struct eh_region_u_cleanup {
181 struct eh_region *prev_try;
182 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
184 /* The real region (by expression and by pointer) that fixup code
185 should live in. */
186 struct eh_region_u_fixup {
187 struct eh_region *real_region;
188 bool resolved;
189 } GTY ((tag ("ERT_FIXUP"))) fixup;
190 } GTY ((desc ("%0.type"))) u;
192 /* Entry point for this region's handler before landing pads are built. */
193 rtx label;
194 tree tree_label;
196 /* Entry point for this region's handler from the runtime eh library. */
197 rtx landing_pad;
199 /* Entry point for this region's handler from an inner region. */
200 rtx post_landing_pad;
202 /* The RESX insn for handing off control to the next outermost handler,
203 if appropriate. */
204 rtx resume;
206 /* True if something in this region may throw. */
207 unsigned may_contain_throw : 1;
210 struct call_site_record GTY(())
212 rtx landing_pad;
213 int action;
216 /* Used to save exception status for each function. */
217 struct eh_status GTY(())
219 /* The tree of all regions for this function. */
220 struct eh_region *region_tree;
222 /* The same information as an indexable array. */
223 struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
225 /* The most recently open region. */
226 struct eh_region *cur_region;
228 /* This is the region for which we are processing catch blocks. */
229 struct eh_region *try_region;
231 rtx filter;
232 rtx exc_ptr;
234 int built_landing_pads;
235 int last_region_number;
237 VEC(tree,gc) *ttype_data;
238 varray_type ehspec_data;
239 varray_type action_record_data;
241 htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
243 struct call_site_record * GTY ((length ("%h.call_site_data_used")))
244 call_site_data;
245 int call_site_data_used;
246 int call_site_data_size;
248 rtx ehr_stackadj;
249 rtx ehr_handler;
250 rtx ehr_label;
252 rtx sjlj_fc;
253 rtx sjlj_exit_after;
255 htab_t GTY((param_is (struct throw_stmt_node))) throw_stmt_table;
259 static int t2r_eq (const void *, const void *);
260 static hashval_t t2r_hash (const void *);
261 static void add_type_for_runtime (tree);
262 static tree lookup_type_for_runtime (tree);
264 static void remove_unreachable_regions (rtx);
266 static int ttypes_filter_eq (const void *, const void *);
267 static hashval_t ttypes_filter_hash (const void *);
268 static int ehspec_filter_eq (const void *, const void *);
269 static hashval_t ehspec_filter_hash (const void *);
270 static int add_ttypes_entry (htab_t, tree);
271 static int add_ehspec_entry (htab_t, htab_t, tree);
272 static void assign_filter_values (void);
273 static void build_post_landing_pads (void);
274 static void connect_post_landing_pads (void);
275 static void dw2_build_landing_pads (void);
277 struct sjlj_lp_info;
278 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
279 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
280 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
281 static void sjlj_emit_function_enter (rtx);
282 static void sjlj_emit_function_exit (void);
283 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
284 static void sjlj_build_landing_pads (void);
286 static hashval_t ehl_hash (const void *);
287 static int ehl_eq (const void *, const void *);
288 static void add_ehl_entry (rtx, struct eh_region *);
289 static void remove_exception_handler_label (rtx);
290 static void remove_eh_handler (struct eh_region *);
291 static int for_each_eh_label_1 (void **, void *);
293 /* The return value of reachable_next_level. */
294 enum reachable_code
296 /* The given exception is not processed by the given region. */
297 RNL_NOT_CAUGHT,
298 /* The given exception may need processing by the given region. */
299 RNL_MAYBE_CAUGHT,
300 /* The given exception is completely processed by the given region. */
301 RNL_CAUGHT,
302 /* The given exception is completely processed by the runtime. */
303 RNL_BLOCKED
306 struct reachable_info;
307 static enum reachable_code reachable_next_level (struct eh_region *, tree,
308 struct reachable_info *);
310 static int action_record_eq (const void *, const void *);
311 static hashval_t action_record_hash (const void *);
312 static int add_action_record (htab_t, int, int);
313 static int collect_one_action_chain (htab_t, struct eh_region *);
314 static int add_call_site (rtx, int);
316 static void push_uleb128 (varray_type *, unsigned int);
317 static void push_sleb128 (varray_type *, int);
318 #ifndef HAVE_AS_LEB128
319 static int dw2_size_of_call_site_table (void);
320 static int sjlj_size_of_call_site_table (void);
321 #endif
322 static void dw2_output_call_site_table (void);
323 static void sjlj_output_call_site_table (void);
326 /* Routine to see if exception handling is turned on.
327 DO_WARN is nonzero if we want to inform the user that exception
328 handling is turned off.
330 This is used to ensure that -fexceptions has been specified if the
331 compiler tries to use any exception-specific functions. */
334 doing_eh (int do_warn)
336 if (! flag_exceptions)
338 static int warned = 0;
339 if (! warned && do_warn)
341 error ("exception handling disabled, use -fexceptions to enable");
342 warned = 1;
344 return 0;
346 return 1;
350 void
351 init_eh (void)
353 if (! flag_exceptions)
354 return;
356 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
358 /* Create the SjLj_Function_Context structure. This should match
359 the definition in unwind-sjlj.c. */
360 if (USING_SJLJ_EXCEPTIONS)
362 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
364 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
366 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
367 build_pointer_type (sjlj_fc_type_node));
368 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
370 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
371 integer_type_node);
372 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
374 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
375 tmp = build_array_type (lang_hooks.types.type_for_mode (word_mode, 1),
376 tmp);
377 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
378 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
380 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
381 ptr_type_node);
382 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
384 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
385 ptr_type_node);
386 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
388 #ifdef DONT_USE_BUILTIN_SETJMP
389 #ifdef JMP_BUF_SIZE
390 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
391 #else
392 /* Should be large enough for most systems, if it is not,
393 JMP_BUF_SIZE should be defined with the proper value. It will
394 also tend to be larger than necessary for most systems, a more
395 optimal port will define JMP_BUF_SIZE. */
396 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
397 #endif
398 #else
399 /* builtin_setjmp takes a pointer to 5 words. */
400 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
401 #endif
402 tmp = build_index_type (tmp);
403 tmp = build_array_type (ptr_type_node, tmp);
404 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
405 #ifdef DONT_USE_BUILTIN_SETJMP
406 /* We don't know what the alignment requirements of the
407 runtime's jmp_buf has. Overestimate. */
408 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
409 DECL_USER_ALIGN (f_jbuf) = 1;
410 #endif
411 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
413 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
414 TREE_CHAIN (f_prev) = f_cs;
415 TREE_CHAIN (f_cs) = f_data;
416 TREE_CHAIN (f_data) = f_per;
417 TREE_CHAIN (f_per) = f_lsda;
418 TREE_CHAIN (f_lsda) = f_jbuf;
420 layout_type (sjlj_fc_type_node);
422 /* Cache the interesting field offsets so that we have
423 easy access from rtl. */
424 sjlj_fc_call_site_ofs
425 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
426 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
427 sjlj_fc_data_ofs
428 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
429 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
430 sjlj_fc_personality_ofs
431 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
432 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
433 sjlj_fc_lsda_ofs
434 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
435 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
436 sjlj_fc_jbuf_ofs
437 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
438 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
442 void
443 init_eh_for_function (void)
445 cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
448 /* Routines to generate the exception tree somewhat directly.
449 These are used from tree-eh.c when processing exception related
450 nodes during tree optimization. */
452 static struct eh_region *
453 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
455 struct eh_region *new;
457 #ifdef ENABLE_CHECKING
458 gcc_assert (doing_eh (0));
459 #endif
461 /* Insert a new blank region as a leaf in the tree. */
462 new = ggc_alloc_cleared (sizeof (*new));
463 new->type = type;
464 new->outer = outer;
465 if (outer)
467 new->next_peer = outer->inner;
468 outer->inner = new;
470 else
472 new->next_peer = cfun->eh->region_tree;
473 cfun->eh->region_tree = new;
476 new->region_number = ++cfun->eh->last_region_number;
478 return new;
481 struct eh_region *
482 gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
484 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
485 cleanup->u.cleanup.prev_try = prev_try;
486 return cleanup;
489 struct eh_region *
490 gen_eh_region_try (struct eh_region *outer)
492 return gen_eh_region (ERT_TRY, outer);
495 struct eh_region *
496 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
498 struct eh_region *c, *l;
499 tree type_list, type_node;
501 /* Ensure to always end up with a type list to normalize further
502 processing, then register each type against the runtime types map. */
503 type_list = type_or_list;
504 if (type_or_list)
506 if (TREE_CODE (type_or_list) != TREE_LIST)
507 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
509 type_node = type_list;
510 for (; type_node; type_node = TREE_CHAIN (type_node))
511 add_type_for_runtime (TREE_VALUE (type_node));
514 c = gen_eh_region (ERT_CATCH, t->outer);
515 c->u.catch.type_list = type_list;
516 l = t->u.try.last_catch;
517 c->u.catch.prev_catch = l;
518 if (l)
519 l->u.catch.next_catch = c;
520 else
521 t->u.try.catch = c;
522 t->u.try.last_catch = c;
524 return c;
527 struct eh_region *
528 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
530 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
531 region->u.allowed.type_list = allowed;
533 for (; allowed ; allowed = TREE_CHAIN (allowed))
534 add_type_for_runtime (TREE_VALUE (allowed));
536 return region;
539 struct eh_region *
540 gen_eh_region_must_not_throw (struct eh_region *outer)
542 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
546 get_eh_region_number (struct eh_region *region)
548 return region->region_number;
551 bool
552 get_eh_region_may_contain_throw (struct eh_region *region)
554 return region->may_contain_throw;
557 tree
558 get_eh_region_tree_label (struct eh_region *region)
560 return region->tree_label;
563 void
564 set_eh_region_tree_label (struct eh_region *region, tree lab)
566 region->tree_label = lab;
569 void
570 expand_resx_expr (tree exp)
572 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
573 struct eh_region *reg = cfun->eh->region_array[region_nr];
575 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
576 emit_barrier ();
579 /* Note that the current EH region (if any) may contain a throw, or a
580 call to a function which itself may contain a throw. */
582 void
583 note_eh_region_may_contain_throw (struct eh_region *region)
585 while (region && !region->may_contain_throw)
587 region->may_contain_throw = 1;
588 region = region->outer;
592 void
593 note_current_region_may_contain_throw (void)
595 note_eh_region_may_contain_throw (cfun->eh->cur_region);
599 /* Return an rtl expression for a pointer to the exception object
600 within a handler. */
603 get_exception_pointer (struct function *fun)
605 rtx exc_ptr = fun->eh->exc_ptr;
606 if (fun == cfun && ! exc_ptr)
608 exc_ptr = gen_reg_rtx (ptr_mode);
609 fun->eh->exc_ptr = exc_ptr;
611 return exc_ptr;
614 /* Return an rtl expression for the exception dispatch filter
615 within a handler. */
618 get_exception_filter (struct function *fun)
620 rtx filter = fun->eh->filter;
621 if (fun == cfun && ! filter)
623 filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
624 fun->eh->filter = filter;
626 return filter;
629 /* This section is for the exception handling specific optimization pass. */
631 /* Random access the exception region tree. */
633 void
634 collect_eh_region_array (void)
636 struct eh_region **array, *i;
638 i = cfun->eh->region_tree;
639 if (! i)
640 return;
642 array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
643 * sizeof (*array));
644 cfun->eh->region_array = array;
646 while (1)
648 array[i->region_number] = i;
650 /* If there are sub-regions, process them. */
651 if (i->inner)
652 i = i->inner;
653 /* If there are peers, process them. */
654 else if (i->next_peer)
655 i = i->next_peer;
656 /* Otherwise, step back up the tree to the next peer. */
657 else
659 do {
660 i = i->outer;
661 if (i == NULL)
662 return;
663 } while (i->next_peer == NULL);
664 i = i->next_peer;
669 /* Remove all regions whose labels are not reachable from insns. */
671 static void
672 remove_unreachable_regions (rtx insns)
674 int i, *uid_region_num;
675 bool *reachable;
676 struct eh_region *r;
677 rtx insn;
679 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
680 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
682 for (i = cfun->eh->last_region_number; i > 0; --i)
684 r = cfun->eh->region_array[i];
685 if (!r || r->region_number != i)
686 continue;
688 if (r->resume)
690 gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
691 uid_region_num[INSN_UID (r->resume)] = i;
693 if (r->label)
695 gcc_assert (!uid_region_num[INSN_UID (r->label)]);
696 uid_region_num[INSN_UID (r->label)] = i;
700 for (insn = insns; insn; insn = NEXT_INSN (insn))
701 reachable[uid_region_num[INSN_UID (insn)]] = true;
703 for (i = cfun->eh->last_region_number; i > 0; --i)
705 r = cfun->eh->region_array[i];
706 if (r && r->region_number == i && !reachable[i])
708 bool kill_it = true;
709 switch (r->type)
711 case ERT_THROW:
712 /* Don't remove ERT_THROW regions if their outer region
713 is reachable. */
714 if (r->outer && reachable[r->outer->region_number])
715 kill_it = false;
716 break;
718 case ERT_MUST_NOT_THROW:
719 /* MUST_NOT_THROW regions are implementable solely in the
720 runtime, but their existence continues to affect calls
721 within that region. Never delete them here. */
722 kill_it = false;
723 break;
725 case ERT_TRY:
727 /* TRY regions are reachable if any of its CATCH regions
728 are reachable. */
729 struct eh_region *c;
730 for (c = r->u.try.catch; c ; c = c->u.catch.next_catch)
731 if (reachable[c->region_number])
733 kill_it = false;
734 break;
736 break;
739 default:
740 break;
743 if (kill_it)
744 remove_eh_handler (r);
748 free (reachable);
749 free (uid_region_num);
752 /* Set up EH labels for RTL. */
754 void
755 convert_from_eh_region_ranges (void)
757 rtx insns = get_insns ();
758 int i, n = cfun->eh->last_region_number;
760 /* Most of the work is already done at the tree level. All we need to
761 do is collect the rtl labels that correspond to the tree labels that
762 collect the rtl labels that correspond to the tree labels
763 we allocated earlier. */
764 for (i = 1; i <= n; ++i)
766 struct eh_region *region = cfun->eh->region_array[i];
767 if (region && region->tree_label)
768 region->label = DECL_RTL_IF_SET (region->tree_label);
771 remove_unreachable_regions (insns);
774 static void
775 add_ehl_entry (rtx label, struct eh_region *region)
777 struct ehl_map_entry **slot, *entry;
779 LABEL_PRESERVE_P (label) = 1;
781 entry = ggc_alloc (sizeof (*entry));
782 entry->label = label;
783 entry->region = region;
785 slot = (struct ehl_map_entry **)
786 htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
788 /* Before landing pad creation, each exception handler has its own
789 label. After landing pad creation, the exception handlers may
790 share landing pads. This is ok, since maybe_remove_eh_handler
791 only requires the 1-1 mapping before landing pad creation. */
792 gcc_assert (!*slot || cfun->eh->built_landing_pads);
794 *slot = entry;
797 void
798 find_exception_handler_labels (void)
800 int i;
802 if (cfun->eh->exception_handler_label_map)
803 htab_empty (cfun->eh->exception_handler_label_map);
804 else
806 /* ??? The expansion factor here (3/2) must be greater than the htab
807 occupancy factor (4/3) to avoid unnecessary resizing. */
808 cfun->eh->exception_handler_label_map
809 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
810 ehl_hash, ehl_eq, NULL);
813 if (cfun->eh->region_tree == NULL)
814 return;
816 for (i = cfun->eh->last_region_number; i > 0; --i)
818 struct eh_region *region = cfun->eh->region_array[i];
819 rtx lab;
821 if (! region || region->region_number != i)
822 continue;
823 if (cfun->eh->built_landing_pads)
824 lab = region->landing_pad;
825 else
826 lab = region->label;
828 if (lab)
829 add_ehl_entry (lab, region);
832 /* For sjlj exceptions, need the return label to remain live until
833 after landing pad generation. */
834 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
835 add_ehl_entry (return_label, NULL);
838 bool
839 current_function_has_exception_handlers (void)
841 int i;
843 for (i = cfun->eh->last_region_number; i > 0; --i)
845 struct eh_region *region = cfun->eh->region_array[i];
847 if (! region || region->region_number != i)
848 continue;
849 if (region->type != ERT_THROW)
850 return true;
853 return false;
856 static struct eh_region *
857 duplicate_eh_region_1 (struct eh_region *o)
859 struct eh_region *n = ggc_alloc_cleared (sizeof (struct eh_region));
861 *n = *o;
863 n->region_number = o->region_number + cfun->eh->last_region_number;
864 gcc_assert (!o->aka);
866 return n;
869 static void
870 duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array,
871 struct eh_region *prev_try)
873 struct eh_region *n = n_array[o->region_number];
875 switch (n->type)
877 case ERT_TRY:
878 if (o->u.try.catch)
879 n->u.try.catch = n_array[o->u.try.catch->region_number];
880 if (o->u.try.last_catch)
881 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
882 break;
884 case ERT_CATCH:
885 if (o->u.catch.next_catch)
886 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
887 if (o->u.catch.prev_catch)
888 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
889 break;
891 case ERT_CLEANUP:
892 if (o->u.cleanup.prev_try)
893 n->u.cleanup.prev_try = n_array[o->u.cleanup.prev_try->region_number];
894 else
895 n->u.cleanup.prev_try = prev_try;
896 break;
898 default:
899 break;
902 if (o->outer)
903 n->outer = n_array[o->outer->region_number];
904 if (o->inner)
905 n->inner = n_array[o->inner->region_number];
906 if (o->next_peer)
907 n->next_peer = n_array[o->next_peer->region_number];
910 /* Duplicate the EH regions of IFUN into current function, root the tree in
911 OUTER_REGION and remap labels using MAP callback. */
913 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
914 void *data, int outer_region)
916 int ifun_last_region_number = ifun->eh->last_region_number;
917 struct eh_region **n_array, *root, *cur, *prev_try;
918 int i;
920 if (ifun_last_region_number == 0 || !ifun->eh->region_tree)
921 return 0;
923 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
925 /* Search for the containing ERT_TRY region to fix up
926 the prev_try short-cuts for ERT_CLEANUP regions. */
927 prev_try = NULL;
928 if (outer_region > 0)
929 for (prev_try = cfun->eh->region_array[outer_region];
930 prev_try && prev_try->type != ERT_TRY;
931 prev_try = prev_try->outer)
934 for (i = 1; i <= ifun_last_region_number; ++i)
936 cur = ifun->eh->region_array[i];
937 if (!cur || cur->region_number != i)
938 continue;
939 n_array[i] = duplicate_eh_region_1 (cur);
940 if (cur->tree_label)
942 tree newlabel = map (cur->tree_label, data);
943 n_array[i]->tree_label = newlabel;
945 else
946 n_array[i]->tree_label = NULL;
948 for (i = 1; i <= ifun_last_region_number; ++i)
950 cur = ifun->eh->region_array[i];
951 if (!cur || cur->region_number != i)
952 continue;
953 duplicate_eh_region_2 (cur, n_array, prev_try);
956 root = n_array[ifun->eh->region_tree->region_number];
957 gcc_assert (root->outer == NULL);
958 if (outer_region > 0)
960 struct eh_region *cur = cfun->eh->region_array[outer_region];
961 struct eh_region *p = cur->inner;
963 if (p)
965 while (p->next_peer)
966 p = p->next_peer;
967 p->next_peer = root;
969 else
970 cur->inner = root;
971 for (i = 1; i <= ifun_last_region_number; ++i)
972 if (n_array[i] && n_array[i]->outer == NULL)
973 n_array[i]->outer = cur;
975 else
977 struct eh_region *p = cfun->eh->region_tree;
978 if (p)
980 while (p->next_peer)
981 p = p->next_peer;
982 p->next_peer = root;
984 else
985 cfun->eh->region_tree = root;
988 free (n_array);
990 i = cfun->eh->last_region_number;
991 cfun->eh->last_region_number = i + ifun_last_region_number;
993 collect_eh_region_array ();
995 return i;
998 static int
999 t2r_eq (const void *pentry, const void *pdata)
1001 tree entry = (tree) pentry;
1002 tree data = (tree) pdata;
1004 return TREE_PURPOSE (entry) == data;
1007 static hashval_t
1008 t2r_hash (const void *pentry)
1010 tree entry = (tree) pentry;
1011 return TREE_HASH (TREE_PURPOSE (entry));
1014 static void
1015 add_type_for_runtime (tree type)
1017 tree *slot;
1019 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1020 TREE_HASH (type), INSERT);
1021 if (*slot == NULL)
1023 tree runtime = (*lang_eh_runtime_type) (type);
1024 *slot = tree_cons (type, runtime, NULL_TREE);
1028 static tree
1029 lookup_type_for_runtime (tree type)
1031 tree *slot;
1033 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1034 TREE_HASH (type), NO_INSERT);
1036 /* We should have always inserted the data earlier. */
1037 return TREE_VALUE (*slot);
1041 /* Represent an entry in @TTypes for either catch actions
1042 or exception filter actions. */
1043 struct ttypes_filter GTY(())
1045 tree t;
1046 int filter;
1049 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1050 (a tree) for a @TTypes type node we are thinking about adding. */
1052 static int
1053 ttypes_filter_eq (const void *pentry, const void *pdata)
1055 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1056 tree data = (tree) pdata;
1058 return entry->t == data;
1061 static hashval_t
1062 ttypes_filter_hash (const void *pentry)
1064 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1065 return TREE_HASH (entry->t);
1068 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1069 exception specification list we are thinking about adding. */
1070 /* ??? Currently we use the type lists in the order given. Someone
1071 should put these in some canonical order. */
1073 static int
1074 ehspec_filter_eq (const void *pentry, const void *pdata)
1076 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1077 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1079 return type_list_equal (entry->t, data->t);
1082 /* Hash function for exception specification lists. */
1084 static hashval_t
1085 ehspec_filter_hash (const void *pentry)
1087 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1088 hashval_t h = 0;
1089 tree list;
1091 for (list = entry->t; list ; list = TREE_CHAIN (list))
1092 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1093 return h;
1096 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
1097 to speed up the search. Return the filter value to be used. */
1099 static int
1100 add_ttypes_entry (htab_t ttypes_hash, tree type)
1102 struct ttypes_filter **slot, *n;
1104 slot = (struct ttypes_filter **)
1105 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1107 if ((n = *slot) == NULL)
1109 /* Filter value is a 1 based table index. */
1111 n = xmalloc (sizeof (*n));
1112 n->t = type;
1113 n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1;
1114 *slot = n;
1116 VEC_safe_push (tree, gc, cfun->eh->ttype_data, type);
1119 return n->filter;
1122 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1123 to speed up the search. Return the filter value to be used. */
1125 static int
1126 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1128 struct ttypes_filter **slot, *n;
1129 struct ttypes_filter dummy;
1131 dummy.t = list;
1132 slot = (struct ttypes_filter **)
1133 htab_find_slot (ehspec_hash, &dummy, INSERT);
1135 if ((n = *slot) == NULL)
1137 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1139 n = xmalloc (sizeof (*n));
1140 n->t = list;
1141 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1142 *slot = n;
1144 /* Look up each type in the list and encode its filter
1145 value as a uleb128. Terminate the list with 0. */
1146 for (; list ; list = TREE_CHAIN (list))
1147 push_uleb128 (&cfun->eh->ehspec_data,
1148 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1149 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1152 return n->filter;
1155 /* Generate the action filter values to be used for CATCH and
1156 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1157 we use lots of landing pads, and so every type or list can share
1158 the same filter value, which saves table space. */
1160 static void
1161 assign_filter_values (void)
1163 int i;
1164 htab_t ttypes, ehspec;
1166 cfun->eh->ttype_data = VEC_alloc (tree, gc, 16);
1167 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1169 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1170 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1172 for (i = cfun->eh->last_region_number; i > 0; --i)
1174 struct eh_region *r = cfun->eh->region_array[i];
1176 /* Mind we don't process a region more than once. */
1177 if (!r || r->region_number != i)
1178 continue;
1180 switch (r->type)
1182 case ERT_CATCH:
1183 /* Whatever type_list is (NULL or true list), we build a list
1184 of filters for the region. */
1185 r->u.catch.filter_list = NULL_TREE;
1187 if (r->u.catch.type_list != NULL)
1189 /* Get a filter value for each of the types caught and store
1190 them in the region's dedicated list. */
1191 tree tp_node = r->u.catch.type_list;
1193 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1195 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1196 tree flt_node = build_int_cst (NULL_TREE, flt);
1198 r->u.catch.filter_list
1199 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1202 else
1204 /* Get a filter value for the NULL list also since it will need
1205 an action record anyway. */
1206 int flt = add_ttypes_entry (ttypes, NULL);
1207 tree flt_node = build_int_cst (NULL_TREE, flt);
1209 r->u.catch.filter_list
1210 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1213 break;
1215 case ERT_ALLOWED_EXCEPTIONS:
1216 r->u.allowed.filter
1217 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1218 break;
1220 default:
1221 break;
1225 htab_delete (ttypes);
1226 htab_delete (ehspec);
1229 /* Emit SEQ into basic block just before INSN (that is assumed to be
1230 first instruction of some existing BB and return the newly
1231 produced block. */
1232 static basic_block
1233 emit_to_new_bb_before (rtx seq, rtx insn)
1235 rtx last;
1236 basic_block bb;
1237 edge e;
1238 edge_iterator ei;
1240 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1241 call), we don't want it to go into newly created landing pad or other EH
1242 construct. */
1243 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1244 if (e->flags & EDGE_FALLTHRU)
1245 force_nonfallthru (e);
1246 else
1247 ei_next (&ei);
1248 last = emit_insn_before (seq, insn);
1249 if (BARRIER_P (last))
1250 last = PREV_INSN (last);
1251 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1252 update_bb_for_insn (bb);
1253 bb->flags |= BB_SUPERBLOCK;
1254 return bb;
1257 /* Generate the code to actually handle exceptions, which will follow the
1258 landing pads. */
1260 static void
1261 build_post_landing_pads (void)
1263 int i;
1265 for (i = cfun->eh->last_region_number; i > 0; --i)
1267 struct eh_region *region = cfun->eh->region_array[i];
1268 rtx seq;
1270 /* Mind we don't process a region more than once. */
1271 if (!region || region->region_number != i)
1272 continue;
1274 switch (region->type)
1276 case ERT_TRY:
1277 /* ??? Collect the set of all non-overlapping catch handlers
1278 all the way up the chain until blocked by a cleanup. */
1279 /* ??? Outer try regions can share landing pads with inner
1280 try regions if the types are completely non-overlapping,
1281 and there are no intervening cleanups. */
1283 region->post_landing_pad = gen_label_rtx ();
1285 start_sequence ();
1287 emit_label (region->post_landing_pad);
1289 /* ??? It is mighty inconvenient to call back into the
1290 switch statement generation code in expand_end_case.
1291 Rapid prototyping sez a sequence of ifs. */
1293 struct eh_region *c;
1294 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1296 if (c->u.catch.type_list == NULL)
1297 emit_jump (c->label);
1298 else
1300 /* Need for one cmp/jump per type caught. Each type
1301 list entry has a matching entry in the filter list
1302 (see assign_filter_values). */
1303 tree tp_node = c->u.catch.type_list;
1304 tree flt_node = c->u.catch.filter_list;
1306 for (; tp_node; )
1308 emit_cmp_and_jump_insns
1309 (cfun->eh->filter,
1310 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1311 EQ, NULL_RTX,
1312 targetm.eh_return_filter_mode (), 0, c->label);
1314 tp_node = TREE_CHAIN (tp_node);
1315 flt_node = TREE_CHAIN (flt_node);
1321 /* We delay the generation of the _Unwind_Resume until we generate
1322 landing pads. We emit a marker here so as to get good control
1323 flow data in the meantime. */
1324 region->resume
1325 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1326 emit_barrier ();
1328 seq = get_insns ();
1329 end_sequence ();
1331 emit_to_new_bb_before (seq, region->u.try.catch->label);
1333 break;
1335 case ERT_ALLOWED_EXCEPTIONS:
1336 region->post_landing_pad = gen_label_rtx ();
1338 start_sequence ();
1340 emit_label (region->post_landing_pad);
1342 emit_cmp_and_jump_insns (cfun->eh->filter,
1343 GEN_INT (region->u.allowed.filter),
1344 EQ, NULL_RTX,
1345 targetm.eh_return_filter_mode (), 0, region->label);
1347 /* We delay the generation of the _Unwind_Resume until we generate
1348 landing pads. We emit a marker here so as to get good control
1349 flow data in the meantime. */
1350 region->resume
1351 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1352 emit_barrier ();
1354 seq = get_insns ();
1355 end_sequence ();
1357 emit_to_new_bb_before (seq, region->label);
1358 break;
1360 case ERT_CLEANUP:
1361 case ERT_MUST_NOT_THROW:
1362 region->post_landing_pad = region->label;
1363 break;
1365 case ERT_CATCH:
1366 case ERT_THROW:
1367 /* Nothing to do. */
1368 break;
1370 default:
1371 gcc_unreachable ();
1376 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1377 _Unwind_Resume otherwise. */
1379 static void
1380 connect_post_landing_pads (void)
1382 int i;
1384 for (i = cfun->eh->last_region_number; i > 0; --i)
1386 struct eh_region *region = cfun->eh->region_array[i];
1387 struct eh_region *outer;
1388 rtx seq;
1389 rtx barrier;
1391 /* Mind we don't process a region more than once. */
1392 if (!region || region->region_number != i)
1393 continue;
1395 /* If there is no RESX, or it has been deleted by flow, there's
1396 nothing to fix up. */
1397 if (! region->resume || INSN_DELETED_P (region->resume))
1398 continue;
1400 /* Search for another landing pad in this function. */
1401 for (outer = region->outer; outer ; outer = outer->outer)
1402 if (outer->post_landing_pad)
1403 break;
1405 start_sequence ();
1407 if (outer)
1409 edge e;
1410 basic_block src, dest;
1412 emit_jump (outer->post_landing_pad);
1413 src = BLOCK_FOR_INSN (region->resume);
1414 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1415 while (EDGE_COUNT (src->succs) > 0)
1416 remove_edge (EDGE_SUCC (src, 0));
1417 e = make_edge (src, dest, 0);
1418 e->probability = REG_BR_PROB_BASE;
1419 e->count = src->count;
1421 else
1423 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1424 VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1426 /* What we just emitted was a throwing libcall, so it got a
1427 barrier automatically added after it. If the last insn in
1428 the libcall sequence isn't the barrier, it's because the
1429 target emits multiple insns for a call, and there are insns
1430 after the actual call insn (which are redundant and would be
1431 optimized away). The barrier is inserted exactly after the
1432 call insn, so let's go get that and delete the insns after
1433 it, because below we need the barrier to be the last insn in
1434 the sequence. */
1435 delete_insns_since (NEXT_INSN (last_call_insn ()));
1438 seq = get_insns ();
1439 end_sequence ();
1440 barrier = emit_insn_before (seq, region->resume);
1441 /* Avoid duplicate barrier. */
1442 gcc_assert (BARRIER_P (barrier));
1443 delete_insn (barrier);
1444 delete_insn (region->resume);
1446 /* ??? From tree-ssa we can wind up with catch regions whose
1447 label is not instantiated, but whose resx is present. Now
1448 that we've dealt with the resx, kill the region. */
1449 if (region->label == NULL && region->type == ERT_CLEANUP)
1450 remove_eh_handler (region);
1455 static void
1456 dw2_build_landing_pads (void)
1458 int i;
1459 unsigned int j;
1461 for (i = cfun->eh->last_region_number; i > 0; --i)
1463 struct eh_region *region = cfun->eh->region_array[i];
1464 rtx seq;
1465 basic_block bb;
1466 bool clobbers_hard_regs = false;
1467 edge e;
1469 /* Mind we don't process a region more than once. */
1470 if (!region || region->region_number != i)
1471 continue;
1473 if (region->type != ERT_CLEANUP
1474 && region->type != ERT_TRY
1475 && region->type != ERT_ALLOWED_EXCEPTIONS)
1476 continue;
1478 start_sequence ();
1480 region->landing_pad = gen_label_rtx ();
1481 emit_label (region->landing_pad);
1483 #ifdef HAVE_exception_receiver
1484 if (HAVE_exception_receiver)
1485 emit_insn (gen_exception_receiver ());
1486 else
1487 #endif
1488 #ifdef HAVE_nonlocal_goto_receiver
1489 if (HAVE_nonlocal_goto_receiver)
1490 emit_insn (gen_nonlocal_goto_receiver ());
1491 else
1492 #endif
1493 { /* Nothing */ }
1495 /* If the eh_return data registers are call-saved, then we
1496 won't have considered them clobbered from the call that
1497 threw. Kill them now. */
1498 for (j = 0; ; ++j)
1500 unsigned r = EH_RETURN_DATA_REGNO (j);
1501 if (r == INVALID_REGNUM)
1502 break;
1503 if (! call_used_regs[r])
1505 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1506 clobbers_hard_regs = true;
1510 if (clobbers_hard_regs)
1512 /* @@@ This is a kludge. Not all machine descriptions define a
1513 blockage insn, but we must not allow the code we just generated
1514 to be reordered by scheduling. So emit an ASM_INPUT to act as
1515 blockage insn. */
1516 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1519 emit_move_insn (cfun->eh->exc_ptr,
1520 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1521 emit_move_insn (cfun->eh->filter,
1522 gen_rtx_REG (targetm.eh_return_filter_mode (),
1523 EH_RETURN_DATA_REGNO (1)));
1525 seq = get_insns ();
1526 end_sequence ();
1528 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1529 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1530 e->count = bb->count;
1531 e->probability = REG_BR_PROB_BASE;
1536 struct sjlj_lp_info
1538 int directly_reachable;
1539 int action_index;
1540 int dispatch_index;
1541 int call_site_index;
1544 static bool
1545 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1547 rtx insn;
1548 bool found_one = false;
1550 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1552 struct eh_region *region;
1553 enum reachable_code rc;
1554 tree type_thrown;
1555 rtx note;
1557 if (! INSN_P (insn))
1558 continue;
1560 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1561 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1562 continue;
1564 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1566 type_thrown = NULL_TREE;
1567 if (region->type == ERT_THROW)
1569 type_thrown = region->u.throw.type;
1570 region = region->outer;
1573 /* Find the first containing region that might handle the exception.
1574 That's the landing pad to which we will transfer control. */
1575 rc = RNL_NOT_CAUGHT;
1576 for (; region; region = region->outer)
1578 rc = reachable_next_level (region, type_thrown, NULL);
1579 if (rc != RNL_NOT_CAUGHT)
1580 break;
1582 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1584 lp_info[region->region_number].directly_reachable = 1;
1585 found_one = true;
1589 return found_one;
1592 static void
1593 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1595 htab_t ar_hash;
1596 int i, index;
1598 /* First task: build the action table. */
1600 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1601 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1603 for (i = cfun->eh->last_region_number; i > 0; --i)
1604 if (lp_info[i].directly_reachable)
1606 struct eh_region *r = cfun->eh->region_array[i];
1607 r->landing_pad = dispatch_label;
1608 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1609 if (lp_info[i].action_index != -1)
1610 cfun->uses_eh_lsda = 1;
1613 htab_delete (ar_hash);
1615 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1616 landing pad label for the region. For sjlj though, there is one
1617 common landing pad from which we dispatch to the post-landing pads.
1619 A region receives a dispatch index if it is directly reachable
1620 and requires in-function processing. Regions that share post-landing
1621 pads may share dispatch indices. */
1622 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1623 (see build_post_landing_pads) so we don't bother checking for it. */
1625 index = 0;
1626 for (i = cfun->eh->last_region_number; i > 0; --i)
1627 if (lp_info[i].directly_reachable)
1628 lp_info[i].dispatch_index = index++;
1630 /* Finally: assign call-site values. If dwarf2 terms, this would be
1631 the region number assigned by convert_to_eh_region_ranges, but
1632 handles no-action and must-not-throw differently. */
1634 call_site_base = 1;
1635 for (i = cfun->eh->last_region_number; i > 0; --i)
1636 if (lp_info[i].directly_reachable)
1638 int action = lp_info[i].action_index;
1640 /* Map must-not-throw to otherwise unused call-site index 0. */
1641 if (action == -2)
1642 index = 0;
1643 /* Map no-action to otherwise unused call-site index -1. */
1644 else if (action == -1)
1645 index = -1;
1646 /* Otherwise, look it up in the table. */
1647 else
1648 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1650 lp_info[i].call_site_index = index;
1654 static void
1655 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1657 int last_call_site = -2;
1658 rtx insn, mem;
1660 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1662 struct eh_region *region;
1663 int this_call_site;
1664 rtx note, before, p;
1666 /* Reset value tracking at extended basic block boundaries. */
1667 if (LABEL_P (insn))
1668 last_call_site = -2;
1670 if (! INSN_P (insn))
1671 continue;
1673 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1674 if (!note)
1676 /* Calls (and trapping insns) without notes are outside any
1677 exception handling region in this function. Mark them as
1678 no action. */
1679 if (CALL_P (insn)
1680 || (flag_non_call_exceptions
1681 && may_trap_p (PATTERN (insn))))
1682 this_call_site = -1;
1683 else
1684 continue;
1686 else
1688 /* Calls that are known to not throw need not be marked. */
1689 if (INTVAL (XEXP (note, 0)) <= 0)
1690 continue;
1692 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1693 this_call_site = lp_info[region->region_number].call_site_index;
1696 if (this_call_site == last_call_site)
1697 continue;
1699 /* Don't separate a call from it's argument loads. */
1700 before = insn;
1701 if (CALL_P (insn))
1702 before = find_first_parameter_load (insn, NULL_RTX);
1704 start_sequence ();
1705 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
1706 sjlj_fc_call_site_ofs);
1707 emit_move_insn (mem, GEN_INT (this_call_site));
1708 p = get_insns ();
1709 end_sequence ();
1711 emit_insn_before (p, before);
1712 last_call_site = this_call_site;
1716 /* Construct the SjLj_Function_Context. */
1718 static void
1719 sjlj_emit_function_enter (rtx dispatch_label)
1721 rtx fn_begin, fc, mem, seq;
1723 fc = cfun->eh->sjlj_fc;
1725 start_sequence ();
1727 /* We're storing this libcall's address into memory instead of
1728 calling it directly. Thus, we must call assemble_external_libcall
1729 here, as we can not depend on emit_library_call to do it for us. */
1730 assemble_external_libcall (eh_personality_libfunc);
1731 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1732 emit_move_insn (mem, eh_personality_libfunc);
1734 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1735 if (cfun->uses_eh_lsda)
1737 char buf[20];
1738 rtx sym;
1740 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1741 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1742 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1743 emit_move_insn (mem, sym);
1745 else
1746 emit_move_insn (mem, const0_rtx);
1748 #ifdef DONT_USE_BUILTIN_SETJMP
1750 rtx x, note;
1751 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1752 TYPE_MODE (integer_type_node), 1,
1753 plus_constant (XEXP (fc, 0),
1754 sjlj_fc_jbuf_ofs), Pmode);
1756 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
1757 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
1759 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1760 TYPE_MODE (integer_type_node), 0, dispatch_label);
1762 #else
1763 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
1764 dispatch_label);
1765 #endif
1767 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1768 1, XEXP (fc, 0), Pmode);
1770 seq = get_insns ();
1771 end_sequence ();
1773 /* ??? Instead of doing this at the beginning of the function,
1774 do this in a block that is at loop level 0 and dominates all
1775 can_throw_internal instructions. */
1777 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1778 if (NOTE_P (fn_begin)
1779 && (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG
1780 || NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK))
1781 break;
1782 if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1783 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
1784 else
1786 rtx last = BB_END (single_succ (ENTRY_BLOCK_PTR));
1787 for (; ; fn_begin = NEXT_INSN (fn_begin))
1788 if ((NOTE_P (fn_begin)
1789 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1790 || fn_begin == last)
1791 break;
1792 emit_insn_after (seq, fn_begin);
1796 /* Call back from expand_function_end to know where we should put
1797 the call to unwind_sjlj_unregister_libfunc if needed. */
1799 void
1800 sjlj_emit_function_exit_after (rtx after)
1802 cfun->eh->sjlj_exit_after = after;
1805 static void
1806 sjlj_emit_function_exit (void)
1808 rtx seq;
1809 edge e;
1810 edge_iterator ei;
1812 start_sequence ();
1814 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1815 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
1817 seq = get_insns ();
1818 end_sequence ();
1820 /* ??? Really this can be done in any block at loop level 0 that
1821 post-dominates all can_throw_internal instructions. This is
1822 the last possible moment. */
1824 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1825 if (e->flags & EDGE_FALLTHRU)
1826 break;
1827 if (e)
1829 rtx insn;
1831 /* Figure out whether the place we are supposed to insert libcall
1832 is inside the last basic block or after it. In the other case
1833 we need to emit to edge. */
1834 gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
1835 for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
1837 if (insn == cfun->eh->sjlj_exit_after)
1839 if (LABEL_P (insn))
1840 insn = NEXT_INSN (insn);
1841 emit_insn_after (seq, insn);
1842 return;
1844 if (insn == BB_END (e->src))
1845 break;
1847 insert_insn_on_edge (seq, e);
1851 static void
1852 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1854 int i, first_reachable;
1855 rtx mem, dispatch, seq, fc;
1856 rtx before;
1857 basic_block bb;
1858 edge e;
1860 fc = cfun->eh->sjlj_fc;
1862 start_sequence ();
1864 emit_label (dispatch_label);
1866 #ifndef DONT_USE_BUILTIN_SETJMP
1867 expand_builtin_setjmp_receiver (dispatch_label);
1868 #endif
1870 /* Load up dispatch index, exc_ptr and filter values from the
1871 function context. */
1872 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
1873 sjlj_fc_call_site_ofs);
1874 dispatch = copy_to_reg (mem);
1876 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
1877 if (word_mode != ptr_mode)
1879 #ifdef POINTERS_EXTEND_UNSIGNED
1880 mem = convert_memory_address (ptr_mode, mem);
1881 #else
1882 mem = convert_to_mode (ptr_mode, mem, 0);
1883 #endif
1885 emit_move_insn (cfun->eh->exc_ptr, mem);
1887 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
1888 emit_move_insn (cfun->eh->filter, mem);
1890 /* Jump to one of the directly reachable regions. */
1891 /* ??? This really ought to be using a switch statement. */
1893 first_reachable = 0;
1894 for (i = cfun->eh->last_region_number; i > 0; --i)
1896 if (! lp_info[i].directly_reachable)
1897 continue;
1899 if (! first_reachable)
1901 first_reachable = i;
1902 continue;
1905 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
1906 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
1907 cfun->eh->region_array[i]->post_landing_pad);
1910 seq = get_insns ();
1911 end_sequence ();
1913 before = cfun->eh->region_array[first_reachable]->post_landing_pad;
1915 bb = emit_to_new_bb_before (seq, before);
1916 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1917 e->count = bb->count;
1918 e->probability = REG_BR_PROB_BASE;
1921 static void
1922 sjlj_build_landing_pads (void)
1924 struct sjlj_lp_info *lp_info;
1926 lp_info = xcalloc (cfun->eh->last_region_number + 1,
1927 sizeof (struct sjlj_lp_info));
1929 if (sjlj_find_directly_reachable_regions (lp_info))
1931 rtx dispatch_label = gen_label_rtx ();
1933 cfun->eh->sjlj_fc
1934 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1935 int_size_in_bytes (sjlj_fc_type_node),
1936 TYPE_ALIGN (sjlj_fc_type_node));
1938 sjlj_assign_call_site_values (dispatch_label, lp_info);
1939 sjlj_mark_call_sites (lp_info);
1941 sjlj_emit_function_enter (dispatch_label);
1942 sjlj_emit_dispatch_table (dispatch_label, lp_info);
1943 sjlj_emit_function_exit ();
1946 free (lp_info);
1949 void
1950 finish_eh_generation (void)
1952 basic_block bb;
1954 /* Nothing to do if no regions created. */
1955 if (cfun->eh->region_tree == NULL)
1956 return;
1958 /* The object here is to provide find_basic_blocks with detailed
1959 information (via reachable_handlers) on how exception control
1960 flows within the function. In this first pass, we can include
1961 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
1962 regions, and hope that it will be useful in deleting unreachable
1963 handlers. Subsequently, we will generate landing pads which will
1964 connect many of the handlers, and then type information will not
1965 be effective. Still, this is a win over previous implementations. */
1967 /* These registers are used by the landing pads. Make sure they
1968 have been generated. */
1969 get_exception_pointer (cfun);
1970 get_exception_filter (cfun);
1972 /* Construct the landing pads. */
1974 assign_filter_values ();
1975 build_post_landing_pads ();
1976 connect_post_landing_pads ();
1977 if (USING_SJLJ_EXCEPTIONS)
1978 sjlj_build_landing_pads ();
1979 else
1980 dw2_build_landing_pads ();
1982 cfun->eh->built_landing_pads = 1;
1984 /* We've totally changed the CFG. Start over. */
1985 find_exception_handler_labels ();
1986 break_superblocks ();
1987 if (USING_SJLJ_EXCEPTIONS)
1988 commit_edge_insertions ();
1989 FOR_EACH_BB (bb)
1991 edge e;
1992 edge_iterator ei;
1993 bool eh = false;
1994 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1996 if (e->flags & EDGE_EH)
1998 remove_edge (e);
1999 eh = true;
2001 else
2002 ei_next (&ei);
2004 if (eh)
2005 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2009 static hashval_t
2010 ehl_hash (const void *pentry)
2012 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2014 /* 2^32 * ((sqrt(5) - 1) / 2) */
2015 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2016 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2019 static int
2020 ehl_eq (const void *pentry, const void *pdata)
2022 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2023 struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2025 return entry->label == data->label;
2028 /* This section handles removing dead code for flow. */
2030 /* Remove LABEL from exception_handler_label_map. */
2032 static void
2033 remove_exception_handler_label (rtx label)
2035 struct ehl_map_entry **slot, tmp;
2037 /* If exception_handler_label_map was not built yet,
2038 there is nothing to do. */
2039 if (cfun->eh->exception_handler_label_map == NULL)
2040 return;
2042 tmp.label = label;
2043 slot = (struct ehl_map_entry **)
2044 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2045 gcc_assert (slot);
2047 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2050 /* Splice REGION from the region tree etc. */
2052 static void
2053 remove_eh_handler (struct eh_region *region)
2055 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2056 rtx lab;
2058 /* For the benefit of efficiently handling REG_EH_REGION notes,
2059 replace this region in the region array with its containing
2060 region. Note that previous region deletions may result in
2061 multiple copies of this region in the array, so we have a
2062 list of alternate numbers by which we are known. */
2064 outer = region->outer;
2065 cfun->eh->region_array[region->region_number] = outer;
2066 if (region->aka)
2068 unsigned i;
2069 bitmap_iterator bi;
2071 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2073 cfun->eh->region_array[i] = outer;
2077 if (outer)
2079 if (!outer->aka)
2080 outer->aka = BITMAP_GGC_ALLOC ();
2081 if (region->aka)
2082 bitmap_ior_into (outer->aka, region->aka);
2083 bitmap_set_bit (outer->aka, region->region_number);
2086 if (cfun->eh->built_landing_pads)
2087 lab = region->landing_pad;
2088 else
2089 lab = region->label;
2090 if (lab)
2091 remove_exception_handler_label (lab);
2093 if (outer)
2094 pp_start = &outer->inner;
2095 else
2096 pp_start = &cfun->eh->region_tree;
2097 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2098 continue;
2099 *pp = region->next_peer;
2101 inner = region->inner;
2102 if (inner)
2104 for (p = inner; p->next_peer ; p = p->next_peer)
2105 p->outer = outer;
2106 p->outer = outer;
2108 p->next_peer = *pp_start;
2109 *pp_start = inner;
2112 if (region->type == ERT_CATCH)
2114 struct eh_region *try, *next, *prev;
2116 for (try = region->next_peer;
2117 try->type == ERT_CATCH;
2118 try = try->next_peer)
2119 continue;
2120 gcc_assert (try->type == ERT_TRY);
2122 next = region->u.catch.next_catch;
2123 prev = region->u.catch.prev_catch;
2125 if (next)
2126 next->u.catch.prev_catch = prev;
2127 else
2128 try->u.try.last_catch = prev;
2129 if (prev)
2130 prev->u.catch.next_catch = next;
2131 else
2133 try->u.try.catch = next;
2134 if (! next)
2135 remove_eh_handler (try);
2140 /* LABEL heads a basic block that is about to be deleted. If this
2141 label corresponds to an exception region, we may be able to
2142 delete the region. */
2144 void
2145 maybe_remove_eh_handler (rtx label)
2147 struct ehl_map_entry **slot, tmp;
2148 struct eh_region *region;
2150 /* ??? After generating landing pads, it's not so simple to determine
2151 if the region data is completely unused. One must examine the
2152 landing pad and the post landing pad, and whether an inner try block
2153 is referencing the catch handlers directly. */
2154 if (cfun->eh->built_landing_pads)
2155 return;
2157 tmp.label = label;
2158 slot = (struct ehl_map_entry **)
2159 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2160 if (! slot)
2161 return;
2162 region = (*slot)->region;
2163 if (! region)
2164 return;
2166 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2167 because there is no path to the fallback call to terminate.
2168 But the region continues to affect call-site data until there
2169 are no more contained calls, which we don't see here. */
2170 if (region->type == ERT_MUST_NOT_THROW)
2172 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2173 region->label = NULL_RTX;
2175 else
2176 remove_eh_handler (region);
2179 /* Invokes CALLBACK for every exception handler label. Only used by old
2180 loop hackery; should not be used by new code. */
2182 void
2183 for_each_eh_label (void (*callback) (rtx))
2185 htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
2186 (void *) &callback);
2189 static int
2190 for_each_eh_label_1 (void **pentry, void *data)
2192 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2193 void (*callback) (rtx) = *(void (**) (rtx)) data;
2195 (*callback) (entry->label);
2196 return 1;
2199 /* Invoke CALLBACK for every exception region in the current function. */
2201 void
2202 for_each_eh_region (void (*callback) (struct eh_region *))
2204 int i, n = cfun->eh->last_region_number;
2205 for (i = 1; i <= n; ++i)
2207 struct eh_region *region = cfun->eh->region_array[i];
2208 if (region)
2209 (*callback) (region);
2213 /* This section describes CFG exception edges for flow. */
2215 /* For communicating between calls to reachable_next_level. */
2216 struct reachable_info
2218 tree types_caught;
2219 tree types_allowed;
2220 void (*callback) (struct eh_region *, void *);
2221 void *callback_data;
2222 bool saw_any_handlers;
2225 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2226 base class of TYPE, is in HANDLED. */
2228 static int
2229 check_handled (tree handled, tree type)
2231 tree t;
2233 /* We can check for exact matches without front-end help. */
2234 if (! lang_eh_type_covers)
2236 for (t = handled; t ; t = TREE_CHAIN (t))
2237 if (TREE_VALUE (t) == type)
2238 return 1;
2240 else
2242 for (t = handled; t ; t = TREE_CHAIN (t))
2243 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2244 return 1;
2247 return 0;
2250 /* A subroutine of reachable_next_level. If we are collecting a list
2251 of handlers, add one. After landing pad generation, reference
2252 it instead of the handlers themselves. Further, the handlers are
2253 all wired together, so by referencing one, we've got them all.
2254 Before landing pad generation we reference each handler individually.
2256 LP_REGION contains the landing pad; REGION is the handler. */
2258 static void
2259 add_reachable_handler (struct reachable_info *info,
2260 struct eh_region *lp_region, struct eh_region *region)
2262 if (! info)
2263 return;
2265 info->saw_any_handlers = true;
2267 if (cfun->eh->built_landing_pads)
2268 info->callback (lp_region, info->callback_data);
2269 else
2270 info->callback (region, info->callback_data);
2273 /* Process one level of exception regions for reachability.
2274 If TYPE_THROWN is non-null, then it is the *exact* type being
2275 propagated. If INFO is non-null, then collect handler labels
2276 and caught/allowed type information between invocations. */
2278 static enum reachable_code
2279 reachable_next_level (struct eh_region *region, tree type_thrown,
2280 struct reachable_info *info)
2282 switch (region->type)
2284 case ERT_CLEANUP:
2285 /* Before landing-pad generation, we model control flow
2286 directly to the individual handlers. In this way we can
2287 see that catch handler types may shadow one another. */
2288 add_reachable_handler (info, region, region);
2289 return RNL_MAYBE_CAUGHT;
2291 case ERT_TRY:
2293 struct eh_region *c;
2294 enum reachable_code ret = RNL_NOT_CAUGHT;
2296 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2298 /* A catch-all handler ends the search. */
2299 if (c->u.catch.type_list == NULL)
2301 add_reachable_handler (info, region, c);
2302 return RNL_CAUGHT;
2305 if (type_thrown)
2307 /* If we have at least one type match, end the search. */
2308 tree tp_node = c->u.catch.type_list;
2310 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2312 tree type = TREE_VALUE (tp_node);
2314 if (type == type_thrown
2315 || (lang_eh_type_covers
2316 && (*lang_eh_type_covers) (type, type_thrown)))
2318 add_reachable_handler (info, region, c);
2319 return RNL_CAUGHT;
2323 /* If we have definitive information of a match failure,
2324 the catch won't trigger. */
2325 if (lang_eh_type_covers)
2326 return RNL_NOT_CAUGHT;
2329 /* At this point, we either don't know what type is thrown or
2330 don't have front-end assistance to help deciding if it is
2331 covered by one of the types in the list for this region.
2333 We'd then like to add this region to the list of reachable
2334 handlers since it is indeed potentially reachable based on the
2335 information we have.
2337 Actually, this handler is for sure not reachable if all the
2338 types it matches have already been caught. That is, it is only
2339 potentially reachable if at least one of the types it catches
2340 has not been previously caught. */
2342 if (! info)
2343 ret = RNL_MAYBE_CAUGHT;
2344 else
2346 tree tp_node = c->u.catch.type_list;
2347 bool maybe_reachable = false;
2349 /* Compute the potential reachability of this handler and
2350 update the list of types caught at the same time. */
2351 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2353 tree type = TREE_VALUE (tp_node);
2355 if (! check_handled (info->types_caught, type))
2357 info->types_caught
2358 = tree_cons (NULL, type, info->types_caught);
2360 maybe_reachable = true;
2364 if (maybe_reachable)
2366 add_reachable_handler (info, region, c);
2368 /* ??? If the catch type is a base class of every allowed
2369 type, then we know we can stop the search. */
2370 ret = RNL_MAYBE_CAUGHT;
2375 return ret;
2378 case ERT_ALLOWED_EXCEPTIONS:
2379 /* An empty list of types definitely ends the search. */
2380 if (region->u.allowed.type_list == NULL_TREE)
2382 add_reachable_handler (info, region, region);
2383 return RNL_CAUGHT;
2386 /* Collect a list of lists of allowed types for use in detecting
2387 when a catch may be transformed into a catch-all. */
2388 if (info)
2389 info->types_allowed = tree_cons (NULL_TREE,
2390 region->u.allowed.type_list,
2391 info->types_allowed);
2393 /* If we have definitive information about the type hierarchy,
2394 then we can tell if the thrown type will pass through the
2395 filter. */
2396 if (type_thrown && lang_eh_type_covers)
2398 if (check_handled (region->u.allowed.type_list, type_thrown))
2399 return RNL_NOT_CAUGHT;
2400 else
2402 add_reachable_handler (info, region, region);
2403 return RNL_CAUGHT;
2407 add_reachable_handler (info, region, region);
2408 return RNL_MAYBE_CAUGHT;
2410 case ERT_CATCH:
2411 /* Catch regions are handled by their controlling try region. */
2412 return RNL_NOT_CAUGHT;
2414 case ERT_MUST_NOT_THROW:
2415 /* Here we end our search, since no exceptions may propagate.
2416 If we've touched down at some landing pad previous, then the
2417 explicit function call we generated may be used. Otherwise
2418 the call is made by the runtime.
2420 Before inlining, do not perform this optimization. We may
2421 inline a subroutine that contains handlers, and that will
2422 change the value of saw_any_handlers. */
2424 if ((info && info->saw_any_handlers) || !cfun->after_inlining)
2426 add_reachable_handler (info, region, region);
2427 return RNL_CAUGHT;
2429 else
2430 return RNL_BLOCKED;
2432 case ERT_THROW:
2433 case ERT_FIXUP:
2434 case ERT_UNKNOWN:
2435 /* Shouldn't see these here. */
2436 gcc_unreachable ();
2437 break;
2438 default:
2439 gcc_unreachable ();
2443 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2445 void
2446 foreach_reachable_handler (int region_number, bool is_resx,
2447 void (*callback) (struct eh_region *, void *),
2448 void *callback_data)
2450 struct reachable_info info;
2451 struct eh_region *region;
2452 tree type_thrown;
2454 memset (&info, 0, sizeof (info));
2455 info.callback = callback;
2456 info.callback_data = callback_data;
2458 region = cfun->eh->region_array[region_number];
2460 type_thrown = NULL_TREE;
2461 if (is_resx)
2463 /* A RESX leaves a region instead of entering it. Thus the
2464 region itself may have been deleted out from under us. */
2465 if (region == NULL)
2466 return;
2467 region = region->outer;
2469 else if (region->type == ERT_THROW)
2471 type_thrown = region->u.throw.type;
2472 region = region->outer;
2475 while (region)
2477 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2478 break;
2479 /* If we have processed one cleanup, there is no point in
2480 processing any more of them. Each cleanup will have an edge
2481 to the next outer cleanup region, so the flow graph will be
2482 accurate. */
2483 if (region->type == ERT_CLEANUP)
2484 region = region->u.cleanup.prev_try;
2485 else
2486 region = region->outer;
2490 /* Retrieve a list of labels of exception handlers which can be
2491 reached by a given insn. */
2493 static void
2494 arh_to_landing_pad (struct eh_region *region, void *data)
2496 rtx *p_handlers = data;
2497 if (! *p_handlers)
2498 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2501 static void
2502 arh_to_label (struct eh_region *region, void *data)
2504 rtx *p_handlers = data;
2505 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2509 reachable_handlers (rtx insn)
2511 bool is_resx = false;
2512 rtx handlers = NULL;
2513 int region_number;
2515 if (JUMP_P (insn)
2516 && GET_CODE (PATTERN (insn)) == RESX)
2518 region_number = XINT (PATTERN (insn), 0);
2519 is_resx = true;
2521 else
2523 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2524 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2525 return NULL;
2526 region_number = INTVAL (XEXP (note, 0));
2529 foreach_reachable_handler (region_number, is_resx,
2530 (cfun->eh->built_landing_pads
2531 ? arh_to_landing_pad
2532 : arh_to_label),
2533 &handlers);
2535 return handlers;
2538 /* Determine if the given INSN can throw an exception that is caught
2539 within the function. */
2541 bool
2542 can_throw_internal_1 (int region_number)
2544 struct eh_region *region;
2545 tree type_thrown;
2547 region = cfun->eh->region_array[region_number];
2549 type_thrown = NULL_TREE;
2550 if (region->type == ERT_THROW)
2552 type_thrown = region->u.throw.type;
2553 region = region->outer;
2556 /* If this exception is ignored by each and every containing region,
2557 then control passes straight out. The runtime may handle some
2558 regions, which also do not require processing internally. */
2559 for (; region; region = region->outer)
2561 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2562 if (how == RNL_BLOCKED)
2563 return false;
2564 if (how != RNL_NOT_CAUGHT)
2565 return true;
2568 return false;
2571 bool
2572 can_throw_internal (rtx insn)
2574 rtx note;
2576 if (! INSN_P (insn))
2577 return false;
2579 if (JUMP_P (insn)
2580 && GET_CODE (PATTERN (insn)) == RESX
2581 && XINT (PATTERN (insn), 0) > 0)
2582 return can_throw_internal_1 (XINT (PATTERN (insn), 0));
2584 if (NONJUMP_INSN_P (insn)
2585 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2586 insn = XVECEXP (PATTERN (insn), 0, 0);
2588 /* Every insn that might throw has an EH_REGION note. */
2589 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2590 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2591 return false;
2593 return can_throw_internal_1 (INTVAL (XEXP (note, 0)));
2596 /* Determine if the given INSN can throw an exception that is
2597 visible outside the function. */
2599 bool
2600 can_throw_external_1 (int region_number)
2602 struct eh_region *region;
2603 tree type_thrown;
2605 region = cfun->eh->region_array[region_number];
2607 type_thrown = NULL_TREE;
2608 if (region->type == ERT_THROW)
2610 type_thrown = region->u.throw.type;
2611 region = region->outer;
2614 /* If the exception is caught or blocked by any containing region,
2615 then it is not seen by any calling function. */
2616 for (; region ; region = region->outer)
2617 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2618 return false;
2620 return true;
2623 bool
2624 can_throw_external (rtx insn)
2626 rtx note;
2628 if (! INSN_P (insn))
2629 return false;
2631 if (NONJUMP_INSN_P (insn)
2632 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2633 insn = XVECEXP (PATTERN (insn), 0, 0);
2635 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2636 if (!note)
2638 /* Calls (and trapping insns) without notes are outside any
2639 exception handling region in this function. We have to
2640 assume it might throw. Given that the front end and middle
2641 ends mark known NOTHROW functions, this isn't so wildly
2642 inaccurate. */
2643 return (CALL_P (insn)
2644 || (flag_non_call_exceptions
2645 && may_trap_p (PATTERN (insn))));
2647 if (INTVAL (XEXP (note, 0)) <= 0)
2648 return false;
2650 return can_throw_external_1 (INTVAL (XEXP (note, 0)));
2653 /* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls. */
2655 void
2656 set_nothrow_function_flags (void)
2658 rtx insn;
2660 TREE_NOTHROW (current_function_decl) = 1;
2662 /* Assume cfun->all_throwers_are_sibcalls until we encounter
2663 something that can throw an exception. We specifically exempt
2664 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2665 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2666 is optimistic. */
2668 cfun->all_throwers_are_sibcalls = 1;
2670 if (! flag_exceptions)
2671 return;
2673 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2674 if (can_throw_external (insn))
2676 TREE_NOTHROW (current_function_decl) = 0;
2678 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2680 cfun->all_throwers_are_sibcalls = 0;
2681 return;
2685 for (insn = current_function_epilogue_delay_list; insn;
2686 insn = XEXP (insn, 1))
2687 if (can_throw_external (insn))
2689 TREE_NOTHROW (current_function_decl) = 0;
2691 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2693 cfun->all_throwers_are_sibcalls = 0;
2694 return;
2700 /* Various hooks for unwind library. */
2702 /* Do any necessary initialization to access arbitrary stack frames.
2703 On the SPARC, this means flushing the register windows. */
2705 void
2706 expand_builtin_unwind_init (void)
2708 /* Set this so all the registers get saved in our frame; we need to be
2709 able to copy the saved values for any registers from frames we unwind. */
2710 current_function_has_nonlocal_label = 1;
2712 #ifdef SETUP_FRAME_ADDRESSES
2713 SETUP_FRAME_ADDRESSES ();
2714 #endif
2718 expand_builtin_eh_return_data_regno (tree arglist)
2720 tree which = TREE_VALUE (arglist);
2721 unsigned HOST_WIDE_INT iwhich;
2723 if (TREE_CODE (which) != INTEGER_CST)
2725 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2726 return constm1_rtx;
2729 iwhich = tree_low_cst (which, 1);
2730 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2731 if (iwhich == INVALID_REGNUM)
2732 return constm1_rtx;
2734 #ifdef DWARF_FRAME_REGNUM
2735 iwhich = DWARF_FRAME_REGNUM (iwhich);
2736 #else
2737 iwhich = DBX_REGISTER_NUMBER (iwhich);
2738 #endif
2740 return GEN_INT (iwhich);
2743 /* Given a value extracted from the return address register or stack slot,
2744 return the actual address encoded in that value. */
2747 expand_builtin_extract_return_addr (tree addr_tree)
2749 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2751 if (GET_MODE (addr) != Pmode
2752 && GET_MODE (addr) != VOIDmode)
2754 #ifdef POINTERS_EXTEND_UNSIGNED
2755 addr = convert_memory_address (Pmode, addr);
2756 #else
2757 addr = convert_to_mode (Pmode, addr, 0);
2758 #endif
2761 /* First mask out any unwanted bits. */
2762 #ifdef MASK_RETURN_ADDR
2763 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2764 #endif
2766 /* Then adjust to find the real return address. */
2767 #if defined (RETURN_ADDR_OFFSET)
2768 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2769 #endif
2771 return addr;
2774 /* Given an actual address in addr_tree, do any necessary encoding
2775 and return the value to be stored in the return address register or
2776 stack slot so the epilogue will return to that address. */
2779 expand_builtin_frob_return_addr (tree addr_tree)
2781 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2783 addr = convert_memory_address (Pmode, addr);
2785 #ifdef RETURN_ADDR_OFFSET
2786 addr = force_reg (Pmode, addr);
2787 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2788 #endif
2790 return addr;
2793 /* Set up the epilogue with the magic bits we'll need to return to the
2794 exception handler. */
2796 void
2797 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2798 tree handler_tree)
2800 rtx tmp;
2802 #ifdef EH_RETURN_STACKADJ_RTX
2803 tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2804 tmp = convert_memory_address (Pmode, tmp);
2805 if (!cfun->eh->ehr_stackadj)
2806 cfun->eh->ehr_stackadj = copy_to_reg (tmp);
2807 else if (tmp != cfun->eh->ehr_stackadj)
2808 emit_move_insn (cfun->eh->ehr_stackadj, tmp);
2809 #endif
2811 tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
2812 tmp = convert_memory_address (Pmode, tmp);
2813 if (!cfun->eh->ehr_handler)
2814 cfun->eh->ehr_handler = copy_to_reg (tmp);
2815 else if (tmp != cfun->eh->ehr_handler)
2816 emit_move_insn (cfun->eh->ehr_handler, tmp);
2818 if (!cfun->eh->ehr_label)
2819 cfun->eh->ehr_label = gen_label_rtx ();
2820 emit_jump (cfun->eh->ehr_label);
2823 void
2824 expand_eh_return (void)
2826 rtx around_label;
2828 if (! cfun->eh->ehr_label)
2829 return;
2831 current_function_calls_eh_return = 1;
2833 #ifdef EH_RETURN_STACKADJ_RTX
2834 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2835 #endif
2837 around_label = gen_label_rtx ();
2838 emit_jump (around_label);
2840 emit_label (cfun->eh->ehr_label);
2841 clobber_return_register ();
2843 #ifdef EH_RETURN_STACKADJ_RTX
2844 emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
2845 #endif
2847 #ifdef HAVE_eh_return
2848 if (HAVE_eh_return)
2849 emit_insn (gen_eh_return (cfun->eh->ehr_handler));
2850 else
2851 #endif
2853 #ifdef EH_RETURN_HANDLER_RTX
2854 emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
2855 #else
2856 error ("__builtin_eh_return not supported on this target");
2857 #endif
2860 emit_label (around_label);
2863 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2864 POINTERS_EXTEND_UNSIGNED and return it. */
2867 expand_builtin_extend_pointer (tree addr_tree)
2869 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2870 int extend;
2872 #ifdef POINTERS_EXTEND_UNSIGNED
2873 extend = POINTERS_EXTEND_UNSIGNED;
2874 #else
2875 /* The previous EH code did an unsigned extend by default, so we do this also
2876 for consistency. */
2877 extend = 1;
2878 #endif
2880 return convert_modes (word_mode, ptr_mode, addr, extend);
2883 /* In the following functions, we represent entries in the action table
2884 as 1-based indices. Special cases are:
2886 0: null action record, non-null landing pad; implies cleanups
2887 -1: null action record, null landing pad; implies no action
2888 -2: no call-site entry; implies must_not_throw
2889 -3: we have yet to process outer regions
2891 Further, no special cases apply to the "next" field of the record.
2892 For next, 0 means end of list. */
2894 struct action_record
2896 int offset;
2897 int filter;
2898 int next;
2901 static int
2902 action_record_eq (const void *pentry, const void *pdata)
2904 const struct action_record *entry = (const struct action_record *) pentry;
2905 const struct action_record *data = (const struct action_record *) pdata;
2906 return entry->filter == data->filter && entry->next == data->next;
2909 static hashval_t
2910 action_record_hash (const void *pentry)
2912 const struct action_record *entry = (const struct action_record *) pentry;
2913 return entry->next * 1009 + entry->filter;
2916 static int
2917 add_action_record (htab_t ar_hash, int filter, int next)
2919 struct action_record **slot, *new, tmp;
2921 tmp.filter = filter;
2922 tmp.next = next;
2923 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
2925 if ((new = *slot) == NULL)
2927 new = xmalloc (sizeof (*new));
2928 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
2929 new->filter = filter;
2930 new->next = next;
2931 *slot = new;
2933 /* The filter value goes in untouched. The link to the next
2934 record is a "self-relative" byte offset, or zero to indicate
2935 that there is no next record. So convert the absolute 1 based
2936 indices we've been carrying around into a displacement. */
2938 push_sleb128 (&cfun->eh->action_record_data, filter);
2939 if (next)
2940 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
2941 push_sleb128 (&cfun->eh->action_record_data, next);
2944 return new->offset;
2947 static int
2948 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
2950 struct eh_region *c;
2951 int next;
2953 /* If we've reached the top of the region chain, then we have
2954 no actions, and require no landing pad. */
2955 if (region == NULL)
2956 return -1;
2958 switch (region->type)
2960 case ERT_CLEANUP:
2961 /* A cleanup adds a zero filter to the beginning of the chain, but
2962 there are special cases to look out for. If there are *only*
2963 cleanups along a path, then it compresses to a zero action.
2964 Further, if there are multiple cleanups along a path, we only
2965 need to represent one of them, as that is enough to trigger
2966 entry to the landing pad at runtime. */
2967 next = collect_one_action_chain (ar_hash, region->outer);
2968 if (next <= 0)
2969 return 0;
2970 for (c = region->outer; c ; c = c->outer)
2971 if (c->type == ERT_CLEANUP)
2972 return next;
2973 return add_action_record (ar_hash, 0, next);
2975 case ERT_TRY:
2976 /* Process the associated catch regions in reverse order.
2977 If there's a catch-all handler, then we don't need to
2978 search outer regions. Use a magic -3 value to record
2979 that we haven't done the outer search. */
2980 next = -3;
2981 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
2983 if (c->u.catch.type_list == NULL)
2985 /* Retrieve the filter from the head of the filter list
2986 where we have stored it (see assign_filter_values). */
2987 int filter
2988 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
2990 next = add_action_record (ar_hash, filter, 0);
2992 else
2994 /* Once the outer search is done, trigger an action record for
2995 each filter we have. */
2996 tree flt_node;
2998 if (next == -3)
3000 next = collect_one_action_chain (ar_hash, region->outer);
3002 /* If there is no next action, terminate the chain. */
3003 if (next == -1)
3004 next = 0;
3005 /* If all outer actions are cleanups or must_not_throw,
3006 we'll have no action record for it, since we had wanted
3007 to encode these states in the call-site record directly.
3008 Add a cleanup action to the chain to catch these. */
3009 else if (next <= 0)
3010 next = add_action_record (ar_hash, 0, 0);
3013 flt_node = c->u.catch.filter_list;
3014 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3016 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3017 next = add_action_record (ar_hash, filter, next);
3021 return next;
3023 case ERT_ALLOWED_EXCEPTIONS:
3024 /* An exception specification adds its filter to the
3025 beginning of the chain. */
3026 next = collect_one_action_chain (ar_hash, region->outer);
3028 /* If there is no next action, terminate the chain. */
3029 if (next == -1)
3030 next = 0;
3031 /* If all outer actions are cleanups or must_not_throw,
3032 we'll have no action record for it, since we had wanted
3033 to encode these states in the call-site record directly.
3034 Add a cleanup action to the chain to catch these. */
3035 else if (next <= 0)
3036 next = add_action_record (ar_hash, 0, 0);
3038 return add_action_record (ar_hash, region->u.allowed.filter, next);
3040 case ERT_MUST_NOT_THROW:
3041 /* A must-not-throw region with no inner handlers or cleanups
3042 requires no call-site entry. Note that this differs from
3043 the no handler or cleanup case in that we do require an lsda
3044 to be generated. Return a magic -2 value to record this. */
3045 return -2;
3047 case ERT_CATCH:
3048 case ERT_THROW:
3049 /* CATCH regions are handled in TRY above. THROW regions are
3050 for optimization information only and produce no output. */
3051 return collect_one_action_chain (ar_hash, region->outer);
3053 default:
3054 gcc_unreachable ();
3058 static int
3059 add_call_site (rtx landing_pad, int action)
3061 struct call_site_record *data = cfun->eh->call_site_data;
3062 int used = cfun->eh->call_site_data_used;
3063 int size = cfun->eh->call_site_data_size;
3065 if (used >= size)
3067 size = (size ? size * 2 : 64);
3068 data = ggc_realloc (data, sizeof (*data) * size);
3069 cfun->eh->call_site_data = data;
3070 cfun->eh->call_site_data_size = size;
3073 data[used].landing_pad = landing_pad;
3074 data[used].action = action;
3076 cfun->eh->call_site_data_used = used + 1;
3078 return used + call_site_base;
3081 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3082 The new note numbers will not refer to region numbers, but
3083 instead to call site entries. */
3085 void
3086 convert_to_eh_region_ranges (void)
3088 rtx insn, iter, note;
3089 htab_t ar_hash;
3090 int last_action = -3;
3091 rtx last_action_insn = NULL_RTX;
3092 rtx last_landing_pad = NULL_RTX;
3093 rtx first_no_action_insn = NULL_RTX;
3094 int call_site = 0;
3096 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3097 return;
3099 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3101 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3103 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3104 if (INSN_P (iter))
3106 struct eh_region *region;
3107 int this_action;
3108 rtx this_landing_pad;
3110 insn = iter;
3111 if (NONJUMP_INSN_P (insn)
3112 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3113 insn = XVECEXP (PATTERN (insn), 0, 0);
3115 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3116 if (!note)
3118 if (! (CALL_P (insn)
3119 || (flag_non_call_exceptions
3120 && may_trap_p (PATTERN (insn)))))
3121 continue;
3122 this_action = -1;
3123 region = NULL;
3125 else
3127 if (INTVAL (XEXP (note, 0)) <= 0)
3128 continue;
3129 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3130 this_action = collect_one_action_chain (ar_hash, region);
3133 /* Existence of catch handlers, or must-not-throw regions
3134 implies that an lsda is needed (even if empty). */
3135 if (this_action != -1)
3136 cfun->uses_eh_lsda = 1;
3138 /* Delay creation of region notes for no-action regions
3139 until we're sure that an lsda will be required. */
3140 else if (last_action == -3)
3142 first_no_action_insn = iter;
3143 last_action = -1;
3146 /* Cleanups and handlers may share action chains but not
3147 landing pads. Collect the landing pad for this region. */
3148 if (this_action >= 0)
3150 struct eh_region *o;
3151 for (o = region; ! o->landing_pad ; o = o->outer)
3152 continue;
3153 this_landing_pad = o->landing_pad;
3155 else
3156 this_landing_pad = NULL_RTX;
3158 /* Differing actions or landing pads implies a change in call-site
3159 info, which implies some EH_REGION note should be emitted. */
3160 if (last_action != this_action
3161 || last_landing_pad != this_landing_pad)
3163 /* If we'd not seen a previous action (-3) or the previous
3164 action was must-not-throw (-2), then we do not need an
3165 end note. */
3166 if (last_action >= -1)
3168 /* If we delayed the creation of the begin, do it now. */
3169 if (first_no_action_insn)
3171 call_site = add_call_site (NULL_RTX, 0);
3172 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3173 first_no_action_insn);
3174 NOTE_EH_HANDLER (note) = call_site;
3175 first_no_action_insn = NULL_RTX;
3178 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3179 last_action_insn);
3180 NOTE_EH_HANDLER (note) = call_site;
3183 /* If the new action is must-not-throw, then no region notes
3184 are created. */
3185 if (this_action >= -1)
3187 call_site = add_call_site (this_landing_pad,
3188 this_action < 0 ? 0 : this_action);
3189 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3190 NOTE_EH_HANDLER (note) = call_site;
3193 last_action = this_action;
3194 last_landing_pad = this_landing_pad;
3196 last_action_insn = iter;
3199 if (last_action >= -1 && ! first_no_action_insn)
3201 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3202 NOTE_EH_HANDLER (note) = call_site;
3205 htab_delete (ar_hash);
3209 static void
3210 push_uleb128 (varray_type *data_area, unsigned int value)
3214 unsigned char byte = value & 0x7f;
3215 value >>= 7;
3216 if (value)
3217 byte |= 0x80;
3218 VARRAY_PUSH_UCHAR (*data_area, byte);
3220 while (value);
3223 static void
3224 push_sleb128 (varray_type *data_area, int value)
3226 unsigned char byte;
3227 int more;
3231 byte = value & 0x7f;
3232 value >>= 7;
3233 more = ! ((value == 0 && (byte & 0x40) == 0)
3234 || (value == -1 && (byte & 0x40) != 0));
3235 if (more)
3236 byte |= 0x80;
3237 VARRAY_PUSH_UCHAR (*data_area, byte);
3239 while (more);
3243 #ifndef HAVE_AS_LEB128
3244 static int
3245 dw2_size_of_call_site_table (void)
3247 int n = cfun->eh->call_site_data_used;
3248 int size = n * (4 + 4 + 4);
3249 int i;
3251 for (i = 0; i < n; ++i)
3253 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3254 size += size_of_uleb128 (cs->action);
3257 return size;
3260 static int
3261 sjlj_size_of_call_site_table (void)
3263 int n = cfun->eh->call_site_data_used;
3264 int size = 0;
3265 int i;
3267 for (i = 0; i < n; ++i)
3269 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3270 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3271 size += size_of_uleb128 (cs->action);
3274 return size;
3276 #endif
3278 static void
3279 dw2_output_call_site_table (void)
3281 int n = cfun->eh->call_site_data_used;
3282 int i;
3284 for (i = 0; i < n; ++i)
3286 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3287 char reg_start_lab[32];
3288 char reg_end_lab[32];
3289 char landing_pad_lab[32];
3291 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3292 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3294 if (cs->landing_pad)
3295 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3296 CODE_LABEL_NUMBER (cs->landing_pad));
3298 /* ??? Perhaps use insn length scaling if the assembler supports
3299 generic arithmetic. */
3300 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3301 data4 if the function is small enough. */
3302 #ifdef HAVE_AS_LEB128
3303 dw2_asm_output_delta_uleb128 (reg_start_lab,
3304 current_function_func_begin_label,
3305 "region %d start", i);
3306 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3307 "length");
3308 if (cs->landing_pad)
3309 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3310 current_function_func_begin_label,
3311 "landing pad");
3312 else
3313 dw2_asm_output_data_uleb128 (0, "landing pad");
3314 #else
3315 dw2_asm_output_delta (4, reg_start_lab,
3316 current_function_func_begin_label,
3317 "region %d start", i);
3318 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3319 if (cs->landing_pad)
3320 dw2_asm_output_delta (4, landing_pad_lab,
3321 current_function_func_begin_label,
3322 "landing pad");
3323 else
3324 dw2_asm_output_data (4, 0, "landing pad");
3325 #endif
3326 dw2_asm_output_data_uleb128 (cs->action, "action");
3329 call_site_base += n;
3332 static void
3333 sjlj_output_call_site_table (void)
3335 int n = cfun->eh->call_site_data_used;
3336 int i;
3338 for (i = 0; i < n; ++i)
3340 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3342 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3343 "region %d landing pad", i);
3344 dw2_asm_output_data_uleb128 (cs->action, "action");
3347 call_site_base += n;
3350 /* Tell assembler to switch to the section for the exception handling
3351 table. */
3353 void
3354 default_exception_section (void)
3356 if (targetm.have_named_sections)
3358 int flags;
3360 if (EH_TABLES_CAN_BE_READ_ONLY)
3362 int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3364 flags = (! flag_pic
3365 || ((tt_format & 0x70) != DW_EH_PE_absptr
3366 && (tt_format & 0x70) != DW_EH_PE_aligned))
3367 ? 0 : SECTION_WRITE;
3369 else
3370 flags = SECTION_WRITE;
3371 named_section_flags (".gcc_except_table", flags);
3373 else if (flag_pic)
3374 data_section ();
3375 else
3376 readonly_data_section ();
3379 void
3380 output_function_exception_table (void)
3382 int tt_format, cs_format, lp_format, i, n;
3383 #ifdef HAVE_AS_LEB128
3384 char ttype_label[32];
3385 char cs_after_size_label[32];
3386 char cs_end_label[32];
3387 #else
3388 int call_site_len;
3389 #endif
3390 int have_tt_data;
3391 int tt_format_size = 0;
3393 /* Not all functions need anything. */
3394 if (! cfun->uses_eh_lsda)
3395 return;
3397 #ifdef TARGET_UNWIND_INFO
3398 /* TODO: Move this into target file. */
3399 assemble_external_libcall (eh_personality_libfunc);
3400 fputs ("\t.personality\t", asm_out_file);
3401 output_addr_const (asm_out_file, eh_personality_libfunc);
3402 fputs ("\n\t.handlerdata\n", asm_out_file);
3403 /* Note that varasm still thinks we're in the function's code section.
3404 The ".endp" directive that will immediately follow will take us back. */
3405 #else
3406 targetm.asm_out.exception_section ();
3407 #endif
3409 have_tt_data = (VEC_length (tree, cfun->eh->ttype_data) > 0
3410 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3412 /* Indicate the format of the @TType entries. */
3413 if (! have_tt_data)
3414 tt_format = DW_EH_PE_omit;
3415 else
3417 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3418 #ifdef HAVE_AS_LEB128
3419 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3420 current_function_funcdef_no);
3421 #endif
3422 tt_format_size = size_of_encoded_value (tt_format);
3424 assemble_align (tt_format_size * BITS_PER_UNIT);
3427 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3428 current_function_funcdef_no);
3430 /* The LSDA header. */
3432 /* Indicate the format of the landing pad start pointer. An omitted
3433 field implies @LPStart == @Start. */
3434 /* Currently we always put @LPStart == @Start. This field would
3435 be most useful in moving the landing pads completely out of
3436 line to another section, but it could also be used to minimize
3437 the size of uleb128 landing pad offsets. */
3438 lp_format = DW_EH_PE_omit;
3439 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3440 eh_data_format_name (lp_format));
3442 /* @LPStart pointer would go here. */
3444 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3445 eh_data_format_name (tt_format));
3447 #ifndef HAVE_AS_LEB128
3448 if (USING_SJLJ_EXCEPTIONS)
3449 call_site_len = sjlj_size_of_call_site_table ();
3450 else
3451 call_site_len = dw2_size_of_call_site_table ();
3452 #endif
3454 /* A pc-relative 4-byte displacement to the @TType data. */
3455 if (have_tt_data)
3457 #ifdef HAVE_AS_LEB128
3458 char ttype_after_disp_label[32];
3459 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3460 current_function_funcdef_no);
3461 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3462 "@TType base offset");
3463 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3464 #else
3465 /* Ug. Alignment queers things. */
3466 unsigned int before_disp, after_disp, last_disp, disp;
3468 before_disp = 1 + 1;
3469 after_disp = (1 + size_of_uleb128 (call_site_len)
3470 + call_site_len
3471 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3472 + (VEC_length (tree, cfun->eh->ttype_data)
3473 * tt_format_size));
3475 disp = after_disp;
3478 unsigned int disp_size, pad;
3480 last_disp = disp;
3481 disp_size = size_of_uleb128 (disp);
3482 pad = before_disp + disp_size + after_disp;
3483 if (pad % tt_format_size)
3484 pad = tt_format_size - (pad % tt_format_size);
3485 else
3486 pad = 0;
3487 disp = after_disp + pad;
3489 while (disp != last_disp);
3491 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3492 #endif
3495 /* Indicate the format of the call-site offsets. */
3496 #ifdef HAVE_AS_LEB128
3497 cs_format = DW_EH_PE_uleb128;
3498 #else
3499 cs_format = DW_EH_PE_udata4;
3500 #endif
3501 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3502 eh_data_format_name (cs_format));
3504 #ifdef HAVE_AS_LEB128
3505 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3506 current_function_funcdef_no);
3507 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3508 current_function_funcdef_no);
3509 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3510 "Call-site table length");
3511 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3512 if (USING_SJLJ_EXCEPTIONS)
3513 sjlj_output_call_site_table ();
3514 else
3515 dw2_output_call_site_table ();
3516 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3517 #else
3518 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3519 if (USING_SJLJ_EXCEPTIONS)
3520 sjlj_output_call_site_table ();
3521 else
3522 dw2_output_call_site_table ();
3523 #endif
3525 /* ??? Decode and interpret the data for flag_debug_asm. */
3526 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3527 for (i = 0; i < n; ++i)
3528 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3529 (i ? NULL : "Action record table"));
3531 if (have_tt_data)
3532 assemble_align (tt_format_size * BITS_PER_UNIT);
3534 i = VEC_length (tree, cfun->eh->ttype_data);
3535 while (i-- > 0)
3537 tree type = VEC_index (tree, cfun->eh->ttype_data, i);
3538 rtx value;
3540 if (type == NULL_TREE)
3541 value = const0_rtx;
3542 else
3544 struct cgraph_varpool_node *node;
3546 type = lookup_type_for_runtime (type);
3547 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3549 /* Let cgraph know that the rtti decl is used. Not all of the
3550 paths below go through assemble_integer, which would take
3551 care of this for us. */
3552 STRIP_NOPS (type);
3553 if (TREE_CODE (type) == ADDR_EXPR)
3555 type = TREE_OPERAND (type, 0);
3556 if (TREE_CODE (type) == VAR_DECL)
3558 node = cgraph_varpool_node (type);
3559 if (node)
3560 cgraph_varpool_mark_needed_node (node);
3563 else
3564 gcc_assert (TREE_CODE (type) == INTEGER_CST);
3567 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3568 assemble_integer (value, tt_format_size,
3569 tt_format_size * BITS_PER_UNIT, 1);
3570 else
3571 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
3574 #ifdef HAVE_AS_LEB128
3575 if (have_tt_data)
3576 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3577 #endif
3579 /* ??? Decode and interpret the data for flag_debug_asm. */
3580 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3581 for (i = 0; i < n; ++i)
3582 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3583 (i ? NULL : "Exception specification table"));
3585 current_function_section (current_function_decl);
3588 void
3589 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3591 fun->eh->throw_stmt_table = table;
3594 htab_t
3595 get_eh_throw_stmt_table (struct function *fun)
3597 return fun->eh->throw_stmt_table;
3600 /* Dump EH information to OUT. */
3601 void
3602 dump_eh_tree (FILE *out, struct function *fun)
3604 struct eh_region *i;
3605 int depth = 0;
3606 static const char * const type_name[] = {"unknown", "cleanup", "try", "catch",
3607 "allowed_exceptions", "must_not_throw",
3608 "throw", "fixup"};
3610 i = fun->eh->region_tree;
3611 if (! i)
3612 return;
3614 fprintf (out, "Eh tree:\n");
3615 while (1)
3617 fprintf (out, " %*s %i %s", depth * 2, "",
3618 i->region_number, type_name [(int)i->type]);
3619 if (i->tree_label)
3621 fprintf (out, " tree_label:");
3622 print_generic_expr (out, i->tree_label, 0);
3624 fprintf (out, "\n");
3625 /* If there are sub-regions, process them. */
3626 if (i->inner)
3627 i = i->inner, depth++;
3628 /* If there are peers, process them. */
3629 else if (i->next_peer)
3630 i = i->next_peer;
3631 /* Otherwise, step back up the tree to the next peer. */
3632 else
3634 do {
3635 i = i->outer;
3636 depth--;
3637 if (i == NULL)
3638 return;
3639 } while (i->next_peer == NULL);
3640 i = i->next_peer;
3645 /* Verify some basic invariants on EH datastructures. Could be extended to
3646 catch more. */
3647 void
3648 verify_eh_tree (struct function *fun)
3650 struct eh_region *i, *outer = NULL;
3651 bool err = false;
3652 int nvisited = 0;
3653 int count = 0;
3654 int j;
3655 int depth = 0;
3657 i = fun->eh->region_tree;
3658 if (! i)
3659 return;
3660 for (j = fun->eh->last_region_number; j > 0; --j)
3661 if (fun->eh->region_array[j])
3663 count++;
3664 if (fun->eh->region_array[j]->region_number != j)
3666 error ("region_array is corrupted for region %i", i->region_number);
3667 err = true;
3671 while (1)
3673 if (fun->eh->region_array[i->region_number] != i)
3675 error ("region_array is corrupted for region %i", i->region_number);
3676 err = true;
3678 if (i->outer != outer)
3680 error ("outer block of region %i is wrong", i->region_number);
3681 err = true;
3683 if (i->may_contain_throw && outer && !outer->may_contain_throw)
3685 error ("region %i may contain throw and is contained in region that may not",
3686 i->region_number);
3687 err = true;
3689 if (depth < 0)
3691 error ("negative nesting depth of region %i", i->region_number);
3692 err = true;
3694 nvisited ++;
3695 /* If there are sub-regions, process them. */
3696 if (i->inner)
3697 outer = i, i = i->inner, depth++;
3698 /* If there are peers, process them. */
3699 else if (i->next_peer)
3700 i = i->next_peer;
3701 /* Otherwise, step back up the tree to the next peer. */
3702 else
3704 do {
3705 i = i->outer;
3706 depth--;
3707 if (i == NULL)
3709 if (depth != -1)
3711 error ("Tree list ends on depth %i", depth + 1);
3712 err = true;
3714 if (count != nvisited)
3716 error ("array does not match the region tree");
3717 err = true;
3719 if (err)
3721 dump_eh_tree (stderr, fun);
3722 internal_error ("verify_eh_tree failed.");
3724 return;
3726 outer = i->outer;
3727 } while (i->next_peer == NULL);
3728 i = i->next_peer;
3732 #include "gt-except.h"