2005-05-11 Kenneth Zadeck <zadeck@naturalbridge.com>
[official-gcc.git] / gcc / except.c
blob35105f9d7ad380827323b2047787c3fb855ce0e0
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
77 #include "diagnostic.h"
79 /* Provide defaults for stuff that may not be defined when using
80 sjlj exceptions. */
81 #ifndef EH_RETURN_DATA_REGNO
82 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
83 #endif
86 /* Protect cleanup actions with must-not-throw regions, with a call
87 to the given failure handler. */
88 tree (*lang_protect_cleanup_actions) (void);
90 /* Return true if type A catches type B. */
91 int (*lang_eh_type_covers) (tree a, tree b);
93 /* Map a type to a runtime object to match type. */
94 tree (*lang_eh_runtime_type) (tree);
96 /* A hash table of label to region number. */
98 struct ehl_map_entry GTY(())
100 rtx label;
101 struct eh_region *region;
104 static GTY(()) int call_site_base;
105 static GTY ((param_is (union tree_node)))
106 htab_t type_to_runtime_map;
108 /* Describe the SjLj_Function_Context structure. */
109 static GTY(()) tree sjlj_fc_type_node;
110 static int sjlj_fc_call_site_ofs;
111 static int sjlj_fc_data_ofs;
112 static int sjlj_fc_personality_ofs;
113 static int sjlj_fc_lsda_ofs;
114 static int sjlj_fc_jbuf_ofs;
116 /* Describes one exception region. */
117 struct eh_region GTY(())
119 /* The immediately surrounding region. */
120 struct eh_region *outer;
122 /* The list of immediately contained regions. */
123 struct eh_region *inner;
124 struct eh_region *next_peer;
126 /* An identifier for this region. */
127 int region_number;
129 /* When a region is deleted, its parents inherit the REG_EH_REGION
130 numbers already assigned. */
131 bitmap aka;
133 /* Each region does exactly one thing. */
134 enum eh_region_type
136 ERT_UNKNOWN = 0,
137 ERT_CLEANUP,
138 ERT_TRY,
139 ERT_CATCH,
140 ERT_ALLOWED_EXCEPTIONS,
141 ERT_MUST_NOT_THROW,
142 ERT_THROW,
143 ERT_FIXUP
144 } type;
146 /* Holds the action to perform based on the preceding type. */
147 union eh_region_u {
148 /* A list of catch blocks, a surrounding try block,
149 and the label for continuing after a catch. */
150 struct eh_region_u_try {
151 struct eh_region *catch;
152 struct eh_region *last_catch;
153 struct eh_region *prev_try;
154 rtx continue_label;
155 } GTY ((tag ("ERT_TRY"))) try;
157 /* The list through the catch handlers, the list of type objects
158 matched, and the list of associated filters. */
159 struct eh_region_u_catch {
160 struct eh_region *next_catch;
161 struct eh_region *prev_catch;
162 tree type_list;
163 tree filter_list;
164 } GTY ((tag ("ERT_CATCH"))) catch;
166 /* A tree_list of allowed types. */
167 struct eh_region_u_allowed {
168 tree type_list;
169 int filter;
170 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
172 /* The type given by a call to "throw foo();", or discovered
173 for a throw. */
174 struct eh_region_u_throw {
175 tree type;
176 } GTY ((tag ("ERT_THROW"))) throw;
178 /* Retain the cleanup expression even after expansion so that
179 we can match up fixup regions. */
180 struct eh_region_u_cleanup {
181 struct eh_region *prev_try;
182 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
184 /* The real region (by expression and by pointer) that fixup code
185 should live in. */
186 struct eh_region_u_fixup {
187 struct eh_region *real_region;
188 bool resolved;
189 } GTY ((tag ("ERT_FIXUP"))) fixup;
190 } GTY ((desc ("%0.type"))) u;
192 /* Entry point for this region's handler before landing pads are built. */
193 rtx label;
194 tree tree_label;
196 /* Entry point for this region's handler from the runtime eh library. */
197 rtx landing_pad;
199 /* Entry point for this region's handler from an inner region. */
200 rtx post_landing_pad;
202 /* The RESX insn for handing off control to the next outermost handler,
203 if appropriate. */
204 rtx resume;
206 /* True if something in this region may throw. */
207 unsigned may_contain_throw : 1;
210 struct call_site_record GTY(())
212 rtx landing_pad;
213 int action;
216 /* Used to save exception status for each function. */
217 struct eh_status GTY(())
219 /* The tree of all regions for this function. */
220 struct eh_region *region_tree;
222 /* The same information as an indexable array. */
223 struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
225 /* The most recently open region. */
226 struct eh_region *cur_region;
228 /* This is the region for which we are processing catch blocks. */
229 struct eh_region *try_region;
231 rtx filter;
232 rtx exc_ptr;
234 int built_landing_pads;
235 int last_region_number;
237 VEC(tree,gc) *ttype_data;
238 varray_type ehspec_data;
239 varray_type action_record_data;
241 htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
243 struct call_site_record * GTY ((length ("%h.call_site_data_used")))
244 call_site_data;
245 int call_site_data_used;
246 int call_site_data_size;
248 rtx ehr_stackadj;
249 rtx ehr_handler;
250 rtx ehr_label;
252 rtx sjlj_fc;
253 rtx sjlj_exit_after;
255 htab_t GTY((param_is (struct throw_stmt_node))) throw_stmt_table;
259 static int t2r_eq (const void *, const void *);
260 static hashval_t t2r_hash (const void *);
261 static void add_type_for_runtime (tree);
262 static tree lookup_type_for_runtime (tree);
264 static void remove_unreachable_regions (rtx);
266 static int ttypes_filter_eq (const void *, const void *);
267 static hashval_t ttypes_filter_hash (const void *);
268 static int ehspec_filter_eq (const void *, const void *);
269 static hashval_t ehspec_filter_hash (const void *);
270 static int add_ttypes_entry (htab_t, tree);
271 static int add_ehspec_entry (htab_t, htab_t, tree);
272 static void assign_filter_values (void);
273 static void build_post_landing_pads (void);
274 static void connect_post_landing_pads (void);
275 static void dw2_build_landing_pads (void);
277 struct sjlj_lp_info;
278 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
279 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
280 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
281 static void sjlj_emit_function_enter (rtx);
282 static void sjlj_emit_function_exit (void);
283 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
284 static void sjlj_build_landing_pads (void);
286 static hashval_t ehl_hash (const void *);
287 static int ehl_eq (const void *, const void *);
288 static void add_ehl_entry (rtx, struct eh_region *);
289 static void remove_exception_handler_label (rtx);
290 static void remove_eh_handler (struct eh_region *);
291 static int for_each_eh_label_1 (void **, void *);
293 /* The return value of reachable_next_level. */
294 enum reachable_code
296 /* The given exception is not processed by the given region. */
297 RNL_NOT_CAUGHT,
298 /* The given exception may need processing by the given region. */
299 RNL_MAYBE_CAUGHT,
300 /* The given exception is completely processed by the given region. */
301 RNL_CAUGHT,
302 /* The given exception is completely processed by the runtime. */
303 RNL_BLOCKED
306 struct reachable_info;
307 static enum reachable_code reachable_next_level (struct eh_region *, tree,
308 struct reachable_info *);
310 static int action_record_eq (const void *, const void *);
311 static hashval_t action_record_hash (const void *);
312 static int add_action_record (htab_t, int, int);
313 static int collect_one_action_chain (htab_t, struct eh_region *);
314 static int add_call_site (rtx, int);
316 static void push_uleb128 (varray_type *, unsigned int);
317 static void push_sleb128 (varray_type *, int);
318 #ifndef HAVE_AS_LEB128
319 static int dw2_size_of_call_site_table (void);
320 static int sjlj_size_of_call_site_table (void);
321 #endif
322 static void dw2_output_call_site_table (void);
323 static void sjlj_output_call_site_table (void);
326 /* Routine to see if exception handling is turned on.
327 DO_WARN is nonzero if we want to inform the user that exception
328 handling is turned off.
330 This is used to ensure that -fexceptions has been specified if the
331 compiler tries to use any exception-specific functions. */
334 doing_eh (int do_warn)
336 if (! flag_exceptions)
338 static int warned = 0;
339 if (! warned && do_warn)
341 error ("exception handling disabled, use -fexceptions to enable");
342 warned = 1;
344 return 0;
346 return 1;
350 void
351 init_eh (void)
353 if (! flag_exceptions)
354 return;
356 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
358 /* Create the SjLj_Function_Context structure. This should match
359 the definition in unwind-sjlj.c. */
360 if (USING_SJLJ_EXCEPTIONS)
362 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
364 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
366 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
367 build_pointer_type (sjlj_fc_type_node));
368 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
370 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
371 integer_type_node);
372 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
374 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
375 tmp = build_array_type (lang_hooks.types.type_for_mode (word_mode, 1),
376 tmp);
377 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
378 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
380 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
381 ptr_type_node);
382 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
384 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
385 ptr_type_node);
386 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
388 #ifdef DONT_USE_BUILTIN_SETJMP
389 #ifdef JMP_BUF_SIZE
390 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
391 #else
392 /* Should be large enough for most systems, if it is not,
393 JMP_BUF_SIZE should be defined with the proper value. It will
394 also tend to be larger than necessary for most systems, a more
395 optimal port will define JMP_BUF_SIZE. */
396 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
397 #endif
398 #else
399 /* builtin_setjmp takes a pointer to 5 words. */
400 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
401 #endif
402 tmp = build_index_type (tmp);
403 tmp = build_array_type (ptr_type_node, tmp);
404 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
405 #ifdef DONT_USE_BUILTIN_SETJMP
406 /* We don't know what the alignment requirements of the
407 runtime's jmp_buf has. Overestimate. */
408 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
409 DECL_USER_ALIGN (f_jbuf) = 1;
410 #endif
411 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
413 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
414 TREE_CHAIN (f_prev) = f_cs;
415 TREE_CHAIN (f_cs) = f_data;
416 TREE_CHAIN (f_data) = f_per;
417 TREE_CHAIN (f_per) = f_lsda;
418 TREE_CHAIN (f_lsda) = f_jbuf;
420 layout_type (sjlj_fc_type_node);
422 /* Cache the interesting field offsets so that we have
423 easy access from rtl. */
424 sjlj_fc_call_site_ofs
425 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
426 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
427 sjlj_fc_data_ofs
428 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
429 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
430 sjlj_fc_personality_ofs
431 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
432 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
433 sjlj_fc_lsda_ofs
434 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
435 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
436 sjlj_fc_jbuf_ofs
437 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
438 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
442 void
443 init_eh_for_function (void)
445 cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
448 /* Routines to generate the exception tree somewhat directly.
449 These are used from tree-eh.c when processing exception related
450 nodes during tree optimization. */
452 static struct eh_region *
453 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
455 struct eh_region *new;
457 #ifdef ENABLE_CHECKING
458 gcc_assert (doing_eh (0));
459 #endif
461 /* Insert a new blank region as a leaf in the tree. */
462 new = ggc_alloc_cleared (sizeof (*new));
463 new->type = type;
464 new->outer = outer;
465 if (outer)
467 new->next_peer = outer->inner;
468 outer->inner = new;
470 else
472 new->next_peer = cfun->eh->region_tree;
473 cfun->eh->region_tree = new;
476 new->region_number = ++cfun->eh->last_region_number;
478 return new;
481 struct eh_region *
482 gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
484 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
485 cleanup->u.cleanup.prev_try = prev_try;
486 return cleanup;
489 struct eh_region *
490 gen_eh_region_try (struct eh_region *outer)
492 return gen_eh_region (ERT_TRY, outer);
495 struct eh_region *
496 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
498 struct eh_region *c, *l;
499 tree type_list, type_node;
501 /* Ensure to always end up with a type list to normalize further
502 processing, then register each type against the runtime types map. */
503 type_list = type_or_list;
504 if (type_or_list)
506 if (TREE_CODE (type_or_list) != TREE_LIST)
507 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
509 type_node = type_list;
510 for (; type_node; type_node = TREE_CHAIN (type_node))
511 add_type_for_runtime (TREE_VALUE (type_node));
514 c = gen_eh_region (ERT_CATCH, t->outer);
515 c->u.catch.type_list = type_list;
516 l = t->u.try.last_catch;
517 c->u.catch.prev_catch = l;
518 if (l)
519 l->u.catch.next_catch = c;
520 else
521 t->u.try.catch = c;
522 t->u.try.last_catch = c;
524 return c;
527 struct eh_region *
528 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
530 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
531 region->u.allowed.type_list = allowed;
533 for (; allowed ; allowed = TREE_CHAIN (allowed))
534 add_type_for_runtime (TREE_VALUE (allowed));
536 return region;
539 struct eh_region *
540 gen_eh_region_must_not_throw (struct eh_region *outer)
542 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
546 get_eh_region_number (struct eh_region *region)
548 return region->region_number;
551 bool
552 get_eh_region_may_contain_throw (struct eh_region *region)
554 return region->may_contain_throw;
557 tree
558 get_eh_region_tree_label (struct eh_region *region)
560 return region->tree_label;
563 void
564 set_eh_region_tree_label (struct eh_region *region, tree lab)
566 region->tree_label = lab;
569 void
570 expand_resx_expr (tree exp)
572 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
573 struct eh_region *reg = cfun->eh->region_array[region_nr];
575 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
576 emit_barrier ();
579 /* Note that the current EH region (if any) may contain a throw, or a
580 call to a function which itself may contain a throw. */
582 void
583 note_eh_region_may_contain_throw (struct eh_region *region)
585 while (region && !region->may_contain_throw)
587 region->may_contain_throw = 1;
588 region = region->outer;
592 void
593 note_current_region_may_contain_throw (void)
595 note_eh_region_may_contain_throw (cfun->eh->cur_region);
599 /* Return an rtl expression for a pointer to the exception object
600 within a handler. */
603 get_exception_pointer (struct function *fun)
605 rtx exc_ptr = fun->eh->exc_ptr;
606 if (fun == cfun && ! exc_ptr)
608 exc_ptr = gen_reg_rtx (ptr_mode);
609 fun->eh->exc_ptr = exc_ptr;
611 return exc_ptr;
614 /* Return an rtl expression for the exception dispatch filter
615 within a handler. */
618 get_exception_filter (struct function *fun)
620 rtx filter = fun->eh->filter;
621 if (fun == cfun && ! filter)
623 filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
624 fun->eh->filter = filter;
626 return filter;
629 /* This section is for the exception handling specific optimization pass. */
631 /* Random access the exception region tree. */
633 void
634 collect_eh_region_array (void)
636 struct eh_region **array, *i;
638 i = cfun->eh->region_tree;
639 if (! i)
640 return;
642 array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
643 * sizeof (*array));
644 cfun->eh->region_array = array;
646 while (1)
648 array[i->region_number] = i;
650 /* If there are sub-regions, process them. */
651 if (i->inner)
652 i = i->inner;
653 /* If there are peers, process them. */
654 else if (i->next_peer)
655 i = i->next_peer;
656 /* Otherwise, step back up the tree to the next peer. */
657 else
659 do {
660 i = i->outer;
661 if (i == NULL)
662 return;
663 } while (i->next_peer == NULL);
664 i = i->next_peer;
669 /* Remove all regions whose labels are not reachable from insns. */
671 static void
672 remove_unreachable_regions (rtx insns)
674 int i, *uid_region_num;
675 bool *reachable;
676 struct eh_region *r;
677 rtx insn;
679 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
680 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
682 for (i = cfun->eh->last_region_number; i > 0; --i)
684 r = cfun->eh->region_array[i];
685 if (!r || r->region_number != i)
686 continue;
688 if (r->resume)
690 gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
691 uid_region_num[INSN_UID (r->resume)] = i;
693 if (r->label)
695 gcc_assert (!uid_region_num[INSN_UID (r->label)]);
696 uid_region_num[INSN_UID (r->label)] = i;
700 for (insn = insns; insn; insn = NEXT_INSN (insn))
701 reachable[uid_region_num[INSN_UID (insn)]] = true;
703 for (i = cfun->eh->last_region_number; i > 0; --i)
705 r = cfun->eh->region_array[i];
706 if (r && r->region_number == i && !reachable[i])
708 bool kill_it = true;
709 switch (r->type)
711 case ERT_THROW:
712 /* Don't remove ERT_THROW regions if their outer region
713 is reachable. */
714 if (r->outer && reachable[r->outer->region_number])
715 kill_it = false;
716 break;
718 case ERT_MUST_NOT_THROW:
719 /* MUST_NOT_THROW regions are implementable solely in the
720 runtime, but their existence continues to affect calls
721 within that region. Never delete them here. */
722 kill_it = false;
723 break;
725 case ERT_TRY:
727 /* TRY regions are reachable if any of its CATCH regions
728 are reachable. */
729 struct eh_region *c;
730 for (c = r->u.try.catch; c ; c = c->u.catch.next_catch)
731 if (reachable[c->region_number])
733 kill_it = false;
734 break;
736 break;
739 default:
740 break;
743 if (kill_it)
744 remove_eh_handler (r);
748 free (reachable);
749 free (uid_region_num);
752 /* Set up EH labels for RTL. */
754 void
755 convert_from_eh_region_ranges (void)
757 rtx insns = get_insns ();
758 int i, n = cfun->eh->last_region_number;
760 /* Most of the work is already done at the tree level. All we need to
761 do is collect the rtl labels that correspond to the tree labels that
762 collect the rtl labels that correspond to the tree labels
763 we allocated earlier. */
764 for (i = 1; i <= n; ++i)
766 struct eh_region *region = cfun->eh->region_array[i];
767 if (region && region->tree_label)
768 region->label = DECL_RTL_IF_SET (region->tree_label);
771 remove_unreachable_regions (insns);
774 static void
775 add_ehl_entry (rtx label, struct eh_region *region)
777 struct ehl_map_entry **slot, *entry;
779 LABEL_PRESERVE_P (label) = 1;
781 entry = ggc_alloc (sizeof (*entry));
782 entry->label = label;
783 entry->region = region;
785 slot = (struct ehl_map_entry **)
786 htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
788 /* Before landing pad creation, each exception handler has its own
789 label. After landing pad creation, the exception handlers may
790 share landing pads. This is ok, since maybe_remove_eh_handler
791 only requires the 1-1 mapping before landing pad creation. */
792 gcc_assert (!*slot || cfun->eh->built_landing_pads);
794 *slot = entry;
797 void
798 find_exception_handler_labels (void)
800 int i;
802 if (cfun->eh->exception_handler_label_map)
803 htab_empty (cfun->eh->exception_handler_label_map);
804 else
806 /* ??? The expansion factor here (3/2) must be greater than the htab
807 occupancy factor (4/3) to avoid unnecessary resizing. */
808 cfun->eh->exception_handler_label_map
809 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
810 ehl_hash, ehl_eq, NULL);
813 if (cfun->eh->region_tree == NULL)
814 return;
816 for (i = cfun->eh->last_region_number; i > 0; --i)
818 struct eh_region *region = cfun->eh->region_array[i];
819 rtx lab;
821 if (! region || region->region_number != i)
822 continue;
823 if (cfun->eh->built_landing_pads)
824 lab = region->landing_pad;
825 else
826 lab = region->label;
828 if (lab)
829 add_ehl_entry (lab, region);
832 /* For sjlj exceptions, need the return label to remain live until
833 after landing pad generation. */
834 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
835 add_ehl_entry (return_label, NULL);
838 bool
839 current_function_has_exception_handlers (void)
841 int i;
843 for (i = cfun->eh->last_region_number; i > 0; --i)
845 struct eh_region *region = cfun->eh->region_array[i];
847 if (! region || region->region_number != i)
848 continue;
849 if (region->type != ERT_THROW)
850 return true;
853 return false;
856 static int
857 t2r_eq (const void *pentry, const void *pdata)
859 tree entry = (tree) pentry;
860 tree data = (tree) pdata;
862 return TREE_PURPOSE (entry) == data;
865 static hashval_t
866 t2r_hash (const void *pentry)
868 tree entry = (tree) pentry;
869 return TREE_HASH (TREE_PURPOSE (entry));
872 static void
873 add_type_for_runtime (tree type)
875 tree *slot;
877 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
878 TREE_HASH (type), INSERT);
879 if (*slot == NULL)
881 tree runtime = (*lang_eh_runtime_type) (type);
882 *slot = tree_cons (type, runtime, NULL_TREE);
886 static tree
887 lookup_type_for_runtime (tree type)
889 tree *slot;
891 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
892 TREE_HASH (type), NO_INSERT);
894 /* We should have always inserted the data earlier. */
895 return TREE_VALUE (*slot);
899 /* Represent an entry in @TTypes for either catch actions
900 or exception filter actions. */
901 struct ttypes_filter GTY(())
903 tree t;
904 int filter;
907 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
908 (a tree) for a @TTypes type node we are thinking about adding. */
910 static int
911 ttypes_filter_eq (const void *pentry, const void *pdata)
913 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
914 tree data = (tree) pdata;
916 return entry->t == data;
919 static hashval_t
920 ttypes_filter_hash (const void *pentry)
922 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
923 return TREE_HASH (entry->t);
926 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
927 exception specification list we are thinking about adding. */
928 /* ??? Currently we use the type lists in the order given. Someone
929 should put these in some canonical order. */
931 static int
932 ehspec_filter_eq (const void *pentry, const void *pdata)
934 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
935 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
937 return type_list_equal (entry->t, data->t);
940 /* Hash function for exception specification lists. */
942 static hashval_t
943 ehspec_filter_hash (const void *pentry)
945 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
946 hashval_t h = 0;
947 tree list;
949 for (list = entry->t; list ; list = TREE_CHAIN (list))
950 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
951 return h;
954 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
955 to speed up the search. Return the filter value to be used. */
957 static int
958 add_ttypes_entry (htab_t ttypes_hash, tree type)
960 struct ttypes_filter **slot, *n;
962 slot = (struct ttypes_filter **)
963 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
965 if ((n = *slot) == NULL)
967 /* Filter value is a 1 based table index. */
969 n = xmalloc (sizeof (*n));
970 n->t = type;
971 n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1;
972 *slot = n;
974 VEC_safe_push (tree, gc, cfun->eh->ttype_data, type);
977 return n->filter;
980 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
981 to speed up the search. Return the filter value to be used. */
983 static int
984 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
986 struct ttypes_filter **slot, *n;
987 struct ttypes_filter dummy;
989 dummy.t = list;
990 slot = (struct ttypes_filter **)
991 htab_find_slot (ehspec_hash, &dummy, INSERT);
993 if ((n = *slot) == NULL)
995 /* Filter value is a -1 based byte index into a uleb128 buffer. */
997 n = xmalloc (sizeof (*n));
998 n->t = list;
999 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1000 *slot = n;
1002 /* Look up each type in the list and encode its filter
1003 value as a uleb128. Terminate the list with 0. */
1004 for (; list ; list = TREE_CHAIN (list))
1005 push_uleb128 (&cfun->eh->ehspec_data,
1006 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1007 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1010 return n->filter;
1013 /* Generate the action filter values to be used for CATCH and
1014 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1015 we use lots of landing pads, and so every type or list can share
1016 the same filter value, which saves table space. */
1018 static void
1019 assign_filter_values (void)
1021 int i;
1022 htab_t ttypes, ehspec;
1024 cfun->eh->ttype_data = VEC_alloc (tree, gc, 16);
1025 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1027 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1028 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1030 for (i = cfun->eh->last_region_number; i > 0; --i)
1032 struct eh_region *r = cfun->eh->region_array[i];
1034 /* Mind we don't process a region more than once. */
1035 if (!r || r->region_number != i)
1036 continue;
1038 switch (r->type)
1040 case ERT_CATCH:
1041 /* Whatever type_list is (NULL or true list), we build a list
1042 of filters for the region. */
1043 r->u.catch.filter_list = NULL_TREE;
1045 if (r->u.catch.type_list != NULL)
1047 /* Get a filter value for each of the types caught and store
1048 them in the region's dedicated list. */
1049 tree tp_node = r->u.catch.type_list;
1051 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1053 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1054 tree flt_node = build_int_cst (NULL_TREE, flt);
1056 r->u.catch.filter_list
1057 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1060 else
1062 /* Get a filter value for the NULL list also since it will need
1063 an action record anyway. */
1064 int flt = add_ttypes_entry (ttypes, NULL);
1065 tree flt_node = build_int_cst (NULL_TREE, flt);
1067 r->u.catch.filter_list
1068 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1071 break;
1073 case ERT_ALLOWED_EXCEPTIONS:
1074 r->u.allowed.filter
1075 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1076 break;
1078 default:
1079 break;
1083 htab_delete (ttypes);
1084 htab_delete (ehspec);
1087 /* Emit SEQ into basic block just before INSN (that is assumed to be
1088 first instruction of some existing BB and return the newly
1089 produced block. */
1090 static basic_block
1091 emit_to_new_bb_before (rtx seq, rtx insn)
1093 rtx last;
1094 basic_block bb;
1095 edge e;
1096 edge_iterator ei;
1098 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1099 call), we don't want it to go into newly created landing pad or other EH
1100 construct. */
1101 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1102 if (e->flags & EDGE_FALLTHRU)
1103 force_nonfallthru (e);
1104 else
1105 ei_next (&ei);
1106 last = emit_insn_before (seq, insn);
1107 if (BARRIER_P (last))
1108 last = PREV_INSN (last);
1109 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1110 update_bb_for_insn (bb);
1111 bb->flags |= BB_SUPERBLOCK;
1112 return bb;
1115 /* Generate the code to actually handle exceptions, which will follow the
1116 landing pads. */
1118 static void
1119 build_post_landing_pads (void)
1121 int i;
1123 for (i = cfun->eh->last_region_number; i > 0; --i)
1125 struct eh_region *region = cfun->eh->region_array[i];
1126 rtx seq;
1128 /* Mind we don't process a region more than once. */
1129 if (!region || region->region_number != i)
1130 continue;
1132 switch (region->type)
1134 case ERT_TRY:
1135 /* ??? Collect the set of all non-overlapping catch handlers
1136 all the way up the chain until blocked by a cleanup. */
1137 /* ??? Outer try regions can share landing pads with inner
1138 try regions if the types are completely non-overlapping,
1139 and there are no intervening cleanups. */
1141 region->post_landing_pad = gen_label_rtx ();
1143 start_sequence ();
1145 emit_label (region->post_landing_pad);
1147 /* ??? It is mighty inconvenient to call back into the
1148 switch statement generation code in expand_end_case.
1149 Rapid prototyping sez a sequence of ifs. */
1151 struct eh_region *c;
1152 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1154 if (c->u.catch.type_list == NULL)
1155 emit_jump (c->label);
1156 else
1158 /* Need for one cmp/jump per type caught. Each type
1159 list entry has a matching entry in the filter list
1160 (see assign_filter_values). */
1161 tree tp_node = c->u.catch.type_list;
1162 tree flt_node = c->u.catch.filter_list;
1164 for (; tp_node; )
1166 emit_cmp_and_jump_insns
1167 (cfun->eh->filter,
1168 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1169 EQ, NULL_RTX,
1170 targetm.eh_return_filter_mode (), 0, c->label);
1172 tp_node = TREE_CHAIN (tp_node);
1173 flt_node = TREE_CHAIN (flt_node);
1179 /* We delay the generation of the _Unwind_Resume until we generate
1180 landing pads. We emit a marker here so as to get good control
1181 flow data in the meantime. */
1182 region->resume
1183 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1184 emit_barrier ();
1186 seq = get_insns ();
1187 end_sequence ();
1189 emit_to_new_bb_before (seq, region->u.try.catch->label);
1191 break;
1193 case ERT_ALLOWED_EXCEPTIONS:
1194 region->post_landing_pad = gen_label_rtx ();
1196 start_sequence ();
1198 emit_label (region->post_landing_pad);
1200 emit_cmp_and_jump_insns (cfun->eh->filter,
1201 GEN_INT (region->u.allowed.filter),
1202 EQ, NULL_RTX,
1203 targetm.eh_return_filter_mode (), 0, region->label);
1205 /* We delay the generation of the _Unwind_Resume until we generate
1206 landing pads. We emit a marker here so as to get good control
1207 flow data in the meantime. */
1208 region->resume
1209 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1210 emit_barrier ();
1212 seq = get_insns ();
1213 end_sequence ();
1215 emit_to_new_bb_before (seq, region->label);
1216 break;
1218 case ERT_CLEANUP:
1219 case ERT_MUST_NOT_THROW:
1220 region->post_landing_pad = region->label;
1221 break;
1223 case ERT_CATCH:
1224 case ERT_THROW:
1225 /* Nothing to do. */
1226 break;
1228 default:
1229 gcc_unreachable ();
1234 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1235 _Unwind_Resume otherwise. */
1237 static void
1238 connect_post_landing_pads (void)
1240 int i;
1242 for (i = cfun->eh->last_region_number; i > 0; --i)
1244 struct eh_region *region = cfun->eh->region_array[i];
1245 struct eh_region *outer;
1246 rtx seq;
1247 rtx barrier;
1249 /* Mind we don't process a region more than once. */
1250 if (!region || region->region_number != i)
1251 continue;
1253 /* If there is no RESX, or it has been deleted by flow, there's
1254 nothing to fix up. */
1255 if (! region->resume || INSN_DELETED_P (region->resume))
1256 continue;
1258 /* Search for another landing pad in this function. */
1259 for (outer = region->outer; outer ; outer = outer->outer)
1260 if (outer->post_landing_pad)
1261 break;
1263 start_sequence ();
1265 if (outer)
1267 edge e;
1268 basic_block src, dest;
1270 emit_jump (outer->post_landing_pad);
1271 src = BLOCK_FOR_INSN (region->resume);
1272 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1273 while (EDGE_COUNT (src->succs) > 0)
1274 remove_edge (EDGE_SUCC (src, 0));
1275 e = make_edge (src, dest, 0);
1276 e->probability = REG_BR_PROB_BASE;
1277 e->count = src->count;
1279 else
1281 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1282 VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1284 /* What we just emitted was a throwing libcall, so it got a
1285 barrier automatically added after it. If the last insn in
1286 the libcall sequence isn't the barrier, it's because the
1287 target emits multiple insns for a call, and there are insns
1288 after the actual call insn (which are redundant and would be
1289 optimized away). The barrier is inserted exactly after the
1290 call insn, so let's go get that and delete the insns after
1291 it, because below we need the barrier to be the last insn in
1292 the sequence. */
1293 delete_insns_since (NEXT_INSN (last_call_insn ()));
1296 seq = get_insns ();
1297 end_sequence ();
1298 barrier = emit_insn_before (seq, region->resume);
1299 /* Avoid duplicate barrier. */
1300 gcc_assert (BARRIER_P (barrier));
1301 delete_insn (barrier);
1302 delete_insn (region->resume);
1304 /* ??? From tree-ssa we can wind up with catch regions whose
1305 label is not instantiated, but whose resx is present. Now
1306 that we've dealt with the resx, kill the region. */
1307 if (region->label == NULL && region->type == ERT_CLEANUP)
1308 remove_eh_handler (region);
1313 static void
1314 dw2_build_landing_pads (void)
1316 int i;
1317 unsigned int j;
1319 for (i = cfun->eh->last_region_number; i > 0; --i)
1321 struct eh_region *region = cfun->eh->region_array[i];
1322 rtx seq;
1323 basic_block bb;
1324 bool clobbers_hard_regs = false;
1325 edge e;
1327 /* Mind we don't process a region more than once. */
1328 if (!region || region->region_number != i)
1329 continue;
1331 if (region->type != ERT_CLEANUP
1332 && region->type != ERT_TRY
1333 && region->type != ERT_ALLOWED_EXCEPTIONS)
1334 continue;
1336 start_sequence ();
1338 region->landing_pad = gen_label_rtx ();
1339 emit_label (region->landing_pad);
1341 #ifdef HAVE_exception_receiver
1342 if (HAVE_exception_receiver)
1343 emit_insn (gen_exception_receiver ());
1344 else
1345 #endif
1346 #ifdef HAVE_nonlocal_goto_receiver
1347 if (HAVE_nonlocal_goto_receiver)
1348 emit_insn (gen_nonlocal_goto_receiver ());
1349 else
1350 #endif
1351 { /* Nothing */ }
1353 /* If the eh_return data registers are call-saved, then we
1354 won't have considered them clobbered from the call that
1355 threw. Kill them now. */
1356 for (j = 0; ; ++j)
1358 unsigned r = EH_RETURN_DATA_REGNO (j);
1359 if (r == INVALID_REGNUM)
1360 break;
1361 if (! call_used_regs[r])
1363 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1364 clobbers_hard_regs = true;
1368 if (clobbers_hard_regs)
1370 /* @@@ This is a kludge. Not all machine descriptions define a
1371 blockage insn, but we must not allow the code we just generated
1372 to be reordered by scheduling. So emit an ASM_INPUT to act as
1373 blockage insn. */
1374 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1377 emit_move_insn (cfun->eh->exc_ptr,
1378 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1379 emit_move_insn (cfun->eh->filter,
1380 gen_rtx_REG (targetm.eh_return_filter_mode (),
1381 EH_RETURN_DATA_REGNO (1)));
1383 seq = get_insns ();
1384 end_sequence ();
1386 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1387 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1388 e->count = bb->count;
1389 e->probability = REG_BR_PROB_BASE;
1394 struct sjlj_lp_info
1396 int directly_reachable;
1397 int action_index;
1398 int dispatch_index;
1399 int call_site_index;
1402 static bool
1403 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1405 rtx insn;
1406 bool found_one = false;
1408 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1410 struct eh_region *region;
1411 enum reachable_code rc;
1412 tree type_thrown;
1413 rtx note;
1415 if (! INSN_P (insn))
1416 continue;
1418 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1419 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1420 continue;
1422 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1424 type_thrown = NULL_TREE;
1425 if (region->type == ERT_THROW)
1427 type_thrown = region->u.throw.type;
1428 region = region->outer;
1431 /* Find the first containing region that might handle the exception.
1432 That's the landing pad to which we will transfer control. */
1433 rc = RNL_NOT_CAUGHT;
1434 for (; region; region = region->outer)
1436 rc = reachable_next_level (region, type_thrown, NULL);
1437 if (rc != RNL_NOT_CAUGHT)
1438 break;
1440 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1442 lp_info[region->region_number].directly_reachable = 1;
1443 found_one = true;
1447 return found_one;
1450 static void
1451 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1453 htab_t ar_hash;
1454 int i, index;
1456 /* First task: build the action table. */
1458 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1459 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1461 for (i = cfun->eh->last_region_number; i > 0; --i)
1462 if (lp_info[i].directly_reachable)
1464 struct eh_region *r = cfun->eh->region_array[i];
1465 r->landing_pad = dispatch_label;
1466 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1467 if (lp_info[i].action_index != -1)
1468 cfun->uses_eh_lsda = 1;
1471 htab_delete (ar_hash);
1473 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1474 landing pad label for the region. For sjlj though, there is one
1475 common landing pad from which we dispatch to the post-landing pads.
1477 A region receives a dispatch index if it is directly reachable
1478 and requires in-function processing. Regions that share post-landing
1479 pads may share dispatch indices. */
1480 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1481 (see build_post_landing_pads) so we don't bother checking for it. */
1483 index = 0;
1484 for (i = cfun->eh->last_region_number; i > 0; --i)
1485 if (lp_info[i].directly_reachable)
1486 lp_info[i].dispatch_index = index++;
1488 /* Finally: assign call-site values. If dwarf2 terms, this would be
1489 the region number assigned by convert_to_eh_region_ranges, but
1490 handles no-action and must-not-throw differently. */
1492 call_site_base = 1;
1493 for (i = cfun->eh->last_region_number; i > 0; --i)
1494 if (lp_info[i].directly_reachable)
1496 int action = lp_info[i].action_index;
1498 /* Map must-not-throw to otherwise unused call-site index 0. */
1499 if (action == -2)
1500 index = 0;
1501 /* Map no-action to otherwise unused call-site index -1. */
1502 else if (action == -1)
1503 index = -1;
1504 /* Otherwise, look it up in the table. */
1505 else
1506 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1508 lp_info[i].call_site_index = index;
1512 static void
1513 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1515 int last_call_site = -2;
1516 rtx insn, mem;
1518 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1520 struct eh_region *region;
1521 int this_call_site;
1522 rtx note, before, p;
1524 /* Reset value tracking at extended basic block boundaries. */
1525 if (LABEL_P (insn))
1526 last_call_site = -2;
1528 if (! INSN_P (insn))
1529 continue;
1531 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1532 if (!note)
1534 /* Calls (and trapping insns) without notes are outside any
1535 exception handling region in this function. Mark them as
1536 no action. */
1537 if (CALL_P (insn)
1538 || (flag_non_call_exceptions
1539 && may_trap_p (PATTERN (insn))))
1540 this_call_site = -1;
1541 else
1542 continue;
1544 else
1546 /* Calls that are known to not throw need not be marked. */
1547 if (INTVAL (XEXP (note, 0)) <= 0)
1548 continue;
1550 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1551 this_call_site = lp_info[region->region_number].call_site_index;
1554 if (this_call_site == last_call_site)
1555 continue;
1557 /* Don't separate a call from it's argument loads. */
1558 before = insn;
1559 if (CALL_P (insn))
1560 before = find_first_parameter_load (insn, NULL_RTX);
1562 start_sequence ();
1563 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
1564 sjlj_fc_call_site_ofs);
1565 emit_move_insn (mem, GEN_INT (this_call_site));
1566 p = get_insns ();
1567 end_sequence ();
1569 emit_insn_before (p, before);
1570 last_call_site = this_call_site;
1574 /* Construct the SjLj_Function_Context. */
1576 static void
1577 sjlj_emit_function_enter (rtx dispatch_label)
1579 rtx fn_begin, fc, mem, seq;
1581 fc = cfun->eh->sjlj_fc;
1583 start_sequence ();
1585 /* We're storing this libcall's address into memory instead of
1586 calling it directly. Thus, we must call assemble_external_libcall
1587 here, as we can not depend on emit_library_call to do it for us. */
1588 assemble_external_libcall (eh_personality_libfunc);
1589 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1590 emit_move_insn (mem, eh_personality_libfunc);
1592 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1593 if (cfun->uses_eh_lsda)
1595 char buf[20];
1596 rtx sym;
1598 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1599 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1600 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1601 emit_move_insn (mem, sym);
1603 else
1604 emit_move_insn (mem, const0_rtx);
1606 #ifdef DONT_USE_BUILTIN_SETJMP
1608 rtx x, note;
1609 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1610 TYPE_MODE (integer_type_node), 1,
1611 plus_constant (XEXP (fc, 0),
1612 sjlj_fc_jbuf_ofs), Pmode);
1614 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
1615 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
1617 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1618 TYPE_MODE (integer_type_node), 0, dispatch_label);
1620 #else
1621 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
1622 dispatch_label);
1623 #endif
1625 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1626 1, XEXP (fc, 0), Pmode);
1628 seq = get_insns ();
1629 end_sequence ();
1631 /* ??? Instead of doing this at the beginning of the function,
1632 do this in a block that is at loop level 0 and dominates all
1633 can_throw_internal instructions. */
1635 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1636 if (NOTE_P (fn_begin)
1637 && (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG
1638 || NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK))
1639 break;
1640 if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1641 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
1642 else
1644 rtx last = BB_END (single_succ (ENTRY_BLOCK_PTR));
1645 for (; ; fn_begin = NEXT_INSN (fn_begin))
1646 if ((NOTE_P (fn_begin)
1647 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1648 || fn_begin == last)
1649 break;
1650 emit_insn_after (seq, fn_begin);
1654 /* Call back from expand_function_end to know where we should put
1655 the call to unwind_sjlj_unregister_libfunc if needed. */
1657 void
1658 sjlj_emit_function_exit_after (rtx after)
1660 cfun->eh->sjlj_exit_after = after;
1663 static void
1664 sjlj_emit_function_exit (void)
1666 rtx seq;
1667 edge e;
1668 edge_iterator ei;
1670 start_sequence ();
1672 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1673 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
1675 seq = get_insns ();
1676 end_sequence ();
1678 /* ??? Really this can be done in any block at loop level 0 that
1679 post-dominates all can_throw_internal instructions. This is
1680 the last possible moment. */
1682 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1683 if (e->flags & EDGE_FALLTHRU)
1684 break;
1685 if (e)
1687 rtx insn;
1689 /* Figure out whether the place we are supposed to insert libcall
1690 is inside the last basic block or after it. In the other case
1691 we need to emit to edge. */
1692 gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
1693 for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
1695 if (insn == cfun->eh->sjlj_exit_after)
1697 if (LABEL_P (insn))
1698 insn = NEXT_INSN (insn);
1699 emit_insn_after (seq, insn);
1700 return;
1702 if (insn == BB_END (e->src))
1703 break;
1705 insert_insn_on_edge (seq, e);
1709 static void
1710 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1712 int i, first_reachable;
1713 rtx mem, dispatch, seq, fc;
1714 rtx before;
1715 basic_block bb;
1716 edge e;
1718 fc = cfun->eh->sjlj_fc;
1720 start_sequence ();
1722 emit_label (dispatch_label);
1724 #ifndef DONT_USE_BUILTIN_SETJMP
1725 expand_builtin_setjmp_receiver (dispatch_label);
1726 #endif
1728 /* Load up dispatch index, exc_ptr and filter values from the
1729 function context. */
1730 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
1731 sjlj_fc_call_site_ofs);
1732 dispatch = copy_to_reg (mem);
1734 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
1735 if (word_mode != ptr_mode)
1737 #ifdef POINTERS_EXTEND_UNSIGNED
1738 mem = convert_memory_address (ptr_mode, mem);
1739 #else
1740 mem = convert_to_mode (ptr_mode, mem, 0);
1741 #endif
1743 emit_move_insn (cfun->eh->exc_ptr, mem);
1745 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
1746 emit_move_insn (cfun->eh->filter, mem);
1748 /* Jump to one of the directly reachable regions. */
1749 /* ??? This really ought to be using a switch statement. */
1751 first_reachable = 0;
1752 for (i = cfun->eh->last_region_number; i > 0; --i)
1754 if (! lp_info[i].directly_reachable)
1755 continue;
1757 if (! first_reachable)
1759 first_reachable = i;
1760 continue;
1763 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
1764 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
1765 cfun->eh->region_array[i]->post_landing_pad);
1768 seq = get_insns ();
1769 end_sequence ();
1771 before = cfun->eh->region_array[first_reachable]->post_landing_pad;
1773 bb = emit_to_new_bb_before (seq, before);
1774 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1775 e->count = bb->count;
1776 e->probability = REG_BR_PROB_BASE;
1779 static void
1780 sjlj_build_landing_pads (void)
1782 struct sjlj_lp_info *lp_info;
1784 lp_info = xcalloc (cfun->eh->last_region_number + 1,
1785 sizeof (struct sjlj_lp_info));
1787 if (sjlj_find_directly_reachable_regions (lp_info))
1789 rtx dispatch_label = gen_label_rtx ();
1791 cfun->eh->sjlj_fc
1792 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1793 int_size_in_bytes (sjlj_fc_type_node),
1794 TYPE_ALIGN (sjlj_fc_type_node));
1796 sjlj_assign_call_site_values (dispatch_label, lp_info);
1797 sjlj_mark_call_sites (lp_info);
1799 sjlj_emit_function_enter (dispatch_label);
1800 sjlj_emit_dispatch_table (dispatch_label, lp_info);
1801 sjlj_emit_function_exit ();
1804 free (lp_info);
1807 void
1808 finish_eh_generation (void)
1810 basic_block bb;
1812 /* Nothing to do if no regions created. */
1813 if (cfun->eh->region_tree == NULL)
1814 return;
1816 /* The object here is to provide find_basic_blocks with detailed
1817 information (via reachable_handlers) on how exception control
1818 flows within the function. In this first pass, we can include
1819 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
1820 regions, and hope that it will be useful in deleting unreachable
1821 handlers. Subsequently, we will generate landing pads which will
1822 connect many of the handlers, and then type information will not
1823 be effective. Still, this is a win over previous implementations. */
1825 /* These registers are used by the landing pads. Make sure they
1826 have been generated. */
1827 get_exception_pointer (cfun);
1828 get_exception_filter (cfun);
1830 /* Construct the landing pads. */
1832 assign_filter_values ();
1833 build_post_landing_pads ();
1834 connect_post_landing_pads ();
1835 if (USING_SJLJ_EXCEPTIONS)
1836 sjlj_build_landing_pads ();
1837 else
1838 dw2_build_landing_pads ();
1840 cfun->eh->built_landing_pads = 1;
1842 /* We've totally changed the CFG. Start over. */
1843 find_exception_handler_labels ();
1844 break_superblocks ();
1845 if (USING_SJLJ_EXCEPTIONS)
1846 commit_edge_insertions ();
1847 FOR_EACH_BB (bb)
1849 edge e;
1850 edge_iterator ei;
1851 bool eh = false;
1852 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1854 if (e->flags & EDGE_EH)
1856 remove_edge (e);
1857 eh = true;
1859 else
1860 ei_next (&ei);
1862 if (eh)
1863 rtl_make_eh_edge (NULL, bb, BB_END (bb));
1867 static hashval_t
1868 ehl_hash (const void *pentry)
1870 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
1872 /* 2^32 * ((sqrt(5) - 1) / 2) */
1873 const hashval_t scaled_golden_ratio = 0x9e3779b9;
1874 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
1877 static int
1878 ehl_eq (const void *pentry, const void *pdata)
1880 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
1881 struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
1883 return entry->label == data->label;
1886 /* This section handles removing dead code for flow. */
1888 /* Remove LABEL from exception_handler_label_map. */
1890 static void
1891 remove_exception_handler_label (rtx label)
1893 struct ehl_map_entry **slot, tmp;
1895 /* If exception_handler_label_map was not built yet,
1896 there is nothing to do. */
1897 if (cfun->eh->exception_handler_label_map == NULL)
1898 return;
1900 tmp.label = label;
1901 slot = (struct ehl_map_entry **)
1902 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
1903 gcc_assert (slot);
1905 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
1908 /* Splice REGION from the region tree etc. */
1910 static void
1911 remove_eh_handler (struct eh_region *region)
1913 struct eh_region **pp, **pp_start, *p, *outer, *inner;
1914 rtx lab;
1916 /* For the benefit of efficiently handling REG_EH_REGION notes,
1917 replace this region in the region array with its containing
1918 region. Note that previous region deletions may result in
1919 multiple copies of this region in the array, so we have a
1920 list of alternate numbers by which we are known. */
1922 outer = region->outer;
1923 cfun->eh->region_array[region->region_number] = outer;
1924 if (region->aka)
1926 unsigned i;
1927 bitmap_iterator bi;
1929 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
1931 cfun->eh->region_array[i] = outer;
1935 if (outer)
1937 if (!outer->aka)
1938 outer->aka = BITMAP_GGC_ALLOC ();
1939 if (region->aka)
1940 bitmap_ior_into (outer->aka, region->aka);
1941 bitmap_set_bit (outer->aka, region->region_number);
1944 if (cfun->eh->built_landing_pads)
1945 lab = region->landing_pad;
1946 else
1947 lab = region->label;
1948 if (lab)
1949 remove_exception_handler_label (lab);
1951 if (outer)
1952 pp_start = &outer->inner;
1953 else
1954 pp_start = &cfun->eh->region_tree;
1955 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1956 continue;
1957 *pp = region->next_peer;
1959 inner = region->inner;
1960 if (inner)
1962 for (p = inner; p->next_peer ; p = p->next_peer)
1963 p->outer = outer;
1964 p->outer = outer;
1966 p->next_peer = *pp_start;
1967 *pp_start = inner;
1970 if (region->type == ERT_CATCH)
1972 struct eh_region *try, *next, *prev;
1974 for (try = region->next_peer;
1975 try->type == ERT_CATCH;
1976 try = try->next_peer)
1977 continue;
1978 gcc_assert (try->type == ERT_TRY);
1980 next = region->u.catch.next_catch;
1981 prev = region->u.catch.prev_catch;
1983 if (next)
1984 next->u.catch.prev_catch = prev;
1985 else
1986 try->u.try.last_catch = prev;
1987 if (prev)
1988 prev->u.catch.next_catch = next;
1989 else
1991 try->u.try.catch = next;
1992 if (! next)
1993 remove_eh_handler (try);
1998 /* LABEL heads a basic block that is about to be deleted. If this
1999 label corresponds to an exception region, we may be able to
2000 delete the region. */
2002 void
2003 maybe_remove_eh_handler (rtx label)
2005 struct ehl_map_entry **slot, tmp;
2006 struct eh_region *region;
2008 /* ??? After generating landing pads, it's not so simple to determine
2009 if the region data is completely unused. One must examine the
2010 landing pad and the post landing pad, and whether an inner try block
2011 is referencing the catch handlers directly. */
2012 if (cfun->eh->built_landing_pads)
2013 return;
2015 tmp.label = label;
2016 slot = (struct ehl_map_entry **)
2017 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2018 if (! slot)
2019 return;
2020 region = (*slot)->region;
2021 if (! region)
2022 return;
2024 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2025 because there is no path to the fallback call to terminate.
2026 But the region continues to affect call-site data until there
2027 are no more contained calls, which we don't see here. */
2028 if (region->type == ERT_MUST_NOT_THROW)
2030 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2031 region->label = NULL_RTX;
2033 else
2034 remove_eh_handler (region);
2037 /* Invokes CALLBACK for every exception handler label. Only used by old
2038 loop hackery; should not be used by new code. */
2040 void
2041 for_each_eh_label (void (*callback) (rtx))
2043 htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
2044 (void *) &callback);
2047 static int
2048 for_each_eh_label_1 (void **pentry, void *data)
2050 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2051 void (*callback) (rtx) = *(void (**) (rtx)) data;
2053 (*callback) (entry->label);
2054 return 1;
2057 /* Invoke CALLBACK for every exception region in the current function. */
2059 void
2060 for_each_eh_region (void (*callback) (struct eh_region *))
2062 int i, n = cfun->eh->last_region_number;
2063 for (i = 1; i <= n; ++i)
2065 struct eh_region *region = cfun->eh->region_array[i];
2066 if (region)
2067 (*callback) (region);
2071 /* This section describes CFG exception edges for flow. */
2073 /* For communicating between calls to reachable_next_level. */
2074 struct reachable_info
2076 tree types_caught;
2077 tree types_allowed;
2078 void (*callback) (struct eh_region *, void *);
2079 void *callback_data;
2080 bool saw_any_handlers;
2083 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2084 base class of TYPE, is in HANDLED. */
2086 static int
2087 check_handled (tree handled, tree type)
2089 tree t;
2091 /* We can check for exact matches without front-end help. */
2092 if (! lang_eh_type_covers)
2094 for (t = handled; t ; t = TREE_CHAIN (t))
2095 if (TREE_VALUE (t) == type)
2096 return 1;
2098 else
2100 for (t = handled; t ; t = TREE_CHAIN (t))
2101 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2102 return 1;
2105 return 0;
2108 /* A subroutine of reachable_next_level. If we are collecting a list
2109 of handlers, add one. After landing pad generation, reference
2110 it instead of the handlers themselves. Further, the handlers are
2111 all wired together, so by referencing one, we've got them all.
2112 Before landing pad generation we reference each handler individually.
2114 LP_REGION contains the landing pad; REGION is the handler. */
2116 static void
2117 add_reachable_handler (struct reachable_info *info,
2118 struct eh_region *lp_region, struct eh_region *region)
2120 if (! info)
2121 return;
2123 info->saw_any_handlers = true;
2125 if (cfun->eh->built_landing_pads)
2126 info->callback (lp_region, info->callback_data);
2127 else
2128 info->callback (region, info->callback_data);
2131 /* Process one level of exception regions for reachability.
2132 If TYPE_THROWN is non-null, then it is the *exact* type being
2133 propagated. If INFO is non-null, then collect handler labels
2134 and caught/allowed type information between invocations. */
2136 static enum reachable_code
2137 reachable_next_level (struct eh_region *region, tree type_thrown,
2138 struct reachable_info *info)
2140 switch (region->type)
2142 case ERT_CLEANUP:
2143 /* Before landing-pad generation, we model control flow
2144 directly to the individual handlers. In this way we can
2145 see that catch handler types may shadow one another. */
2146 add_reachable_handler (info, region, region);
2147 return RNL_MAYBE_CAUGHT;
2149 case ERT_TRY:
2151 struct eh_region *c;
2152 enum reachable_code ret = RNL_NOT_CAUGHT;
2154 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2156 /* A catch-all handler ends the search. */
2157 if (c->u.catch.type_list == NULL)
2159 add_reachable_handler (info, region, c);
2160 return RNL_CAUGHT;
2163 if (type_thrown)
2165 /* If we have at least one type match, end the search. */
2166 tree tp_node = c->u.catch.type_list;
2168 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2170 tree type = TREE_VALUE (tp_node);
2172 if (type == type_thrown
2173 || (lang_eh_type_covers
2174 && (*lang_eh_type_covers) (type, type_thrown)))
2176 add_reachable_handler (info, region, c);
2177 return RNL_CAUGHT;
2181 /* If we have definitive information of a match failure,
2182 the catch won't trigger. */
2183 if (lang_eh_type_covers)
2184 return RNL_NOT_CAUGHT;
2187 /* At this point, we either don't know what type is thrown or
2188 don't have front-end assistance to help deciding if it is
2189 covered by one of the types in the list for this region.
2191 We'd then like to add this region to the list of reachable
2192 handlers since it is indeed potentially reachable based on the
2193 information we have.
2195 Actually, this handler is for sure not reachable if all the
2196 types it matches have already been caught. That is, it is only
2197 potentially reachable if at least one of the types it catches
2198 has not been previously caught. */
2200 if (! info)
2201 ret = RNL_MAYBE_CAUGHT;
2202 else
2204 tree tp_node = c->u.catch.type_list;
2205 bool maybe_reachable = false;
2207 /* Compute the potential reachability of this handler and
2208 update the list of types caught at the same time. */
2209 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2211 tree type = TREE_VALUE (tp_node);
2213 if (! check_handled (info->types_caught, type))
2215 info->types_caught
2216 = tree_cons (NULL, type, info->types_caught);
2218 maybe_reachable = true;
2222 if (maybe_reachable)
2224 add_reachable_handler (info, region, c);
2226 /* ??? If the catch type is a base class of every allowed
2227 type, then we know we can stop the search. */
2228 ret = RNL_MAYBE_CAUGHT;
2233 return ret;
2236 case ERT_ALLOWED_EXCEPTIONS:
2237 /* An empty list of types definitely ends the search. */
2238 if (region->u.allowed.type_list == NULL_TREE)
2240 add_reachable_handler (info, region, region);
2241 return RNL_CAUGHT;
2244 /* Collect a list of lists of allowed types for use in detecting
2245 when a catch may be transformed into a catch-all. */
2246 if (info)
2247 info->types_allowed = tree_cons (NULL_TREE,
2248 region->u.allowed.type_list,
2249 info->types_allowed);
2251 /* If we have definitive information about the type hierarchy,
2252 then we can tell if the thrown type will pass through the
2253 filter. */
2254 if (type_thrown && lang_eh_type_covers)
2256 if (check_handled (region->u.allowed.type_list, type_thrown))
2257 return RNL_NOT_CAUGHT;
2258 else
2260 add_reachable_handler (info, region, region);
2261 return RNL_CAUGHT;
2265 add_reachable_handler (info, region, region);
2266 return RNL_MAYBE_CAUGHT;
2268 case ERT_CATCH:
2269 /* Catch regions are handled by their controlling try region. */
2270 return RNL_NOT_CAUGHT;
2272 case ERT_MUST_NOT_THROW:
2273 /* Here we end our search, since no exceptions may propagate.
2274 If we've touched down at some landing pad previous, then the
2275 explicit function call we generated may be used. Otherwise
2276 the call is made by the runtime. */
2277 if (info && info->saw_any_handlers)
2279 add_reachable_handler (info, region, region);
2280 return RNL_CAUGHT;
2282 else
2283 return RNL_BLOCKED;
2285 case ERT_THROW:
2286 case ERT_FIXUP:
2287 case ERT_UNKNOWN:
2288 /* Shouldn't see these here. */
2289 gcc_unreachable ();
2290 break;
2291 default:
2292 gcc_unreachable ();
2296 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2298 void
2299 foreach_reachable_handler (int region_number, bool is_resx,
2300 void (*callback) (struct eh_region *, void *),
2301 void *callback_data)
2303 struct reachable_info info;
2304 struct eh_region *region;
2305 tree type_thrown;
2307 memset (&info, 0, sizeof (info));
2308 info.callback = callback;
2309 info.callback_data = callback_data;
2311 region = cfun->eh->region_array[region_number];
2313 type_thrown = NULL_TREE;
2314 if (is_resx)
2316 /* A RESX leaves a region instead of entering it. Thus the
2317 region itself may have been deleted out from under us. */
2318 if (region == NULL)
2319 return;
2320 region = region->outer;
2322 else if (region->type == ERT_THROW)
2324 type_thrown = region->u.throw.type;
2325 region = region->outer;
2328 while (region)
2330 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2331 break;
2332 /* If we have processed one cleanup, there is no point in
2333 processing any more of them. Each cleanup will have an edge
2334 to the next outer cleanup region, so the flow graph will be
2335 accurate. */
2336 if (region->type == ERT_CLEANUP)
2337 region = region->u.cleanup.prev_try;
2338 else
2339 region = region->outer;
2343 /* Retrieve a list of labels of exception handlers which can be
2344 reached by a given insn. */
2346 static void
2347 arh_to_landing_pad (struct eh_region *region, void *data)
2349 rtx *p_handlers = data;
2350 if (! *p_handlers)
2351 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2354 static void
2355 arh_to_label (struct eh_region *region, void *data)
2357 rtx *p_handlers = data;
2358 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2362 reachable_handlers (rtx insn)
2364 bool is_resx = false;
2365 rtx handlers = NULL;
2366 int region_number;
2368 if (JUMP_P (insn)
2369 && GET_CODE (PATTERN (insn)) == RESX)
2371 region_number = XINT (PATTERN (insn), 0);
2372 is_resx = true;
2374 else
2376 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2377 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2378 return NULL;
2379 region_number = INTVAL (XEXP (note, 0));
2382 foreach_reachable_handler (region_number, is_resx,
2383 (cfun->eh->built_landing_pads
2384 ? arh_to_landing_pad
2385 : arh_to_label),
2386 &handlers);
2388 return handlers;
2391 /* Determine if the given INSN can throw an exception that is caught
2392 within the function. */
2394 bool
2395 can_throw_internal_1 (int region_number)
2397 struct eh_region *region;
2398 tree type_thrown;
2400 region = cfun->eh->region_array[region_number];
2402 type_thrown = NULL_TREE;
2403 if (region->type == ERT_THROW)
2405 type_thrown = region->u.throw.type;
2406 region = region->outer;
2409 /* If this exception is ignored by each and every containing region,
2410 then control passes straight out. The runtime may handle some
2411 regions, which also do not require processing internally. */
2412 for (; region; region = region->outer)
2414 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2415 if (how == RNL_BLOCKED)
2416 return false;
2417 if (how != RNL_NOT_CAUGHT)
2418 return true;
2421 return false;
2424 bool
2425 can_throw_internal (rtx insn)
2427 rtx note;
2429 if (! INSN_P (insn))
2430 return false;
2432 if (JUMP_P (insn)
2433 && GET_CODE (PATTERN (insn)) == RESX
2434 && XINT (PATTERN (insn), 0) > 0)
2435 return can_throw_internal_1 (XINT (PATTERN (insn), 0));
2437 if (NONJUMP_INSN_P (insn)
2438 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2439 insn = XVECEXP (PATTERN (insn), 0, 0);
2441 /* Every insn that might throw has an EH_REGION note. */
2442 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2443 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2444 return false;
2446 return can_throw_internal_1 (INTVAL (XEXP (note, 0)));
2449 /* Determine if the given INSN can throw an exception that is
2450 visible outside the function. */
2452 bool
2453 can_throw_external_1 (int region_number)
2455 struct eh_region *region;
2456 tree type_thrown;
2458 region = cfun->eh->region_array[region_number];
2460 type_thrown = NULL_TREE;
2461 if (region->type == ERT_THROW)
2463 type_thrown = region->u.throw.type;
2464 region = region->outer;
2467 /* If the exception is caught or blocked by any containing region,
2468 then it is not seen by any calling function. */
2469 for (; region ; region = region->outer)
2470 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2471 return false;
2473 return true;
2476 bool
2477 can_throw_external (rtx insn)
2479 rtx note;
2481 if (! INSN_P (insn))
2482 return false;
2484 if (NONJUMP_INSN_P (insn)
2485 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2486 insn = XVECEXP (PATTERN (insn), 0, 0);
2488 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2489 if (!note)
2491 /* Calls (and trapping insns) without notes are outside any
2492 exception handling region in this function. We have to
2493 assume it might throw. Given that the front end and middle
2494 ends mark known NOTHROW functions, this isn't so wildly
2495 inaccurate. */
2496 return (CALL_P (insn)
2497 || (flag_non_call_exceptions
2498 && may_trap_p (PATTERN (insn))));
2500 if (INTVAL (XEXP (note, 0)) <= 0)
2501 return false;
2503 return can_throw_external_1 (INTVAL (XEXP (note, 0)));
2506 /* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls. */
2508 void
2509 set_nothrow_function_flags (void)
2511 rtx insn;
2513 TREE_NOTHROW (current_function_decl) = 1;
2515 /* Assume cfun->all_throwers_are_sibcalls until we encounter
2516 something that can throw an exception. We specifically exempt
2517 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2518 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2519 is optimistic. */
2521 cfun->all_throwers_are_sibcalls = 1;
2523 if (! flag_exceptions)
2524 return;
2526 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2527 if (can_throw_external (insn))
2529 TREE_NOTHROW (current_function_decl) = 0;
2531 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2533 cfun->all_throwers_are_sibcalls = 0;
2534 return;
2538 for (insn = current_function_epilogue_delay_list; insn;
2539 insn = XEXP (insn, 1))
2540 if (can_throw_external (insn))
2542 TREE_NOTHROW (current_function_decl) = 0;
2544 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2546 cfun->all_throwers_are_sibcalls = 0;
2547 return;
2553 /* Various hooks for unwind library. */
2555 /* Do any necessary initialization to access arbitrary stack frames.
2556 On the SPARC, this means flushing the register windows. */
2558 void
2559 expand_builtin_unwind_init (void)
2561 /* Set this so all the registers get saved in our frame; we need to be
2562 able to copy the saved values for any registers from frames we unwind. */
2563 current_function_has_nonlocal_label = 1;
2565 #ifdef SETUP_FRAME_ADDRESSES
2566 SETUP_FRAME_ADDRESSES ();
2567 #endif
2571 expand_builtin_eh_return_data_regno (tree arglist)
2573 tree which = TREE_VALUE (arglist);
2574 unsigned HOST_WIDE_INT iwhich;
2576 if (TREE_CODE (which) != INTEGER_CST)
2578 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2579 return constm1_rtx;
2582 iwhich = tree_low_cst (which, 1);
2583 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2584 if (iwhich == INVALID_REGNUM)
2585 return constm1_rtx;
2587 #ifdef DWARF_FRAME_REGNUM
2588 iwhich = DWARF_FRAME_REGNUM (iwhich);
2589 #else
2590 iwhich = DBX_REGISTER_NUMBER (iwhich);
2591 #endif
2593 return GEN_INT (iwhich);
2596 /* Given a value extracted from the return address register or stack slot,
2597 return the actual address encoded in that value. */
2600 expand_builtin_extract_return_addr (tree addr_tree)
2602 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2604 if (GET_MODE (addr) != Pmode
2605 && GET_MODE (addr) != VOIDmode)
2607 #ifdef POINTERS_EXTEND_UNSIGNED
2608 addr = convert_memory_address (Pmode, addr);
2609 #else
2610 addr = convert_to_mode (Pmode, addr, 0);
2611 #endif
2614 /* First mask out any unwanted bits. */
2615 #ifdef MASK_RETURN_ADDR
2616 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2617 #endif
2619 /* Then adjust to find the real return address. */
2620 #if defined (RETURN_ADDR_OFFSET)
2621 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2622 #endif
2624 return addr;
2627 /* Given an actual address in addr_tree, do any necessary encoding
2628 and return the value to be stored in the return address register or
2629 stack slot so the epilogue will return to that address. */
2632 expand_builtin_frob_return_addr (tree addr_tree)
2634 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2636 addr = convert_memory_address (Pmode, addr);
2638 #ifdef RETURN_ADDR_OFFSET
2639 addr = force_reg (Pmode, addr);
2640 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2641 #endif
2643 return addr;
2646 /* Set up the epilogue with the magic bits we'll need to return to the
2647 exception handler. */
2649 void
2650 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2651 tree handler_tree)
2653 rtx tmp;
2655 #ifdef EH_RETURN_STACKADJ_RTX
2656 tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2657 tmp = convert_memory_address (Pmode, tmp);
2658 if (!cfun->eh->ehr_stackadj)
2659 cfun->eh->ehr_stackadj = copy_to_reg (tmp);
2660 else if (tmp != cfun->eh->ehr_stackadj)
2661 emit_move_insn (cfun->eh->ehr_stackadj, tmp);
2662 #endif
2664 tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
2665 tmp = convert_memory_address (Pmode, tmp);
2666 if (!cfun->eh->ehr_handler)
2667 cfun->eh->ehr_handler = copy_to_reg (tmp);
2668 else if (tmp != cfun->eh->ehr_handler)
2669 emit_move_insn (cfun->eh->ehr_handler, tmp);
2671 if (!cfun->eh->ehr_label)
2672 cfun->eh->ehr_label = gen_label_rtx ();
2673 emit_jump (cfun->eh->ehr_label);
2676 void
2677 expand_eh_return (void)
2679 rtx around_label;
2681 if (! cfun->eh->ehr_label)
2682 return;
2684 current_function_calls_eh_return = 1;
2686 #ifdef EH_RETURN_STACKADJ_RTX
2687 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2688 #endif
2690 around_label = gen_label_rtx ();
2691 emit_jump (around_label);
2693 emit_label (cfun->eh->ehr_label);
2694 clobber_return_register ();
2696 #ifdef EH_RETURN_STACKADJ_RTX
2697 emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
2698 #endif
2700 #ifdef HAVE_eh_return
2701 if (HAVE_eh_return)
2702 emit_insn (gen_eh_return (cfun->eh->ehr_handler));
2703 else
2704 #endif
2706 #ifdef EH_RETURN_HANDLER_RTX
2707 emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
2708 #else
2709 error ("__builtin_eh_return not supported on this target");
2710 #endif
2713 emit_label (around_label);
2716 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2717 POINTERS_EXTEND_UNSIGNED and return it. */
2720 expand_builtin_extend_pointer (tree addr_tree)
2722 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2723 int extend;
2725 #ifdef POINTERS_EXTEND_UNSIGNED
2726 extend = POINTERS_EXTEND_UNSIGNED;
2727 #else
2728 /* The previous EH code did an unsigned extend by default, so we do this also
2729 for consistency. */
2730 extend = 1;
2731 #endif
2733 return convert_modes (word_mode, ptr_mode, addr, extend);
2736 /* In the following functions, we represent entries in the action table
2737 as 1-based indices. Special cases are:
2739 0: null action record, non-null landing pad; implies cleanups
2740 -1: null action record, null landing pad; implies no action
2741 -2: no call-site entry; implies must_not_throw
2742 -3: we have yet to process outer regions
2744 Further, no special cases apply to the "next" field of the record.
2745 For next, 0 means end of list. */
2747 struct action_record
2749 int offset;
2750 int filter;
2751 int next;
2754 static int
2755 action_record_eq (const void *pentry, const void *pdata)
2757 const struct action_record *entry = (const struct action_record *) pentry;
2758 const struct action_record *data = (const struct action_record *) pdata;
2759 return entry->filter == data->filter && entry->next == data->next;
2762 static hashval_t
2763 action_record_hash (const void *pentry)
2765 const struct action_record *entry = (const struct action_record *) pentry;
2766 return entry->next * 1009 + entry->filter;
2769 static int
2770 add_action_record (htab_t ar_hash, int filter, int next)
2772 struct action_record **slot, *new, tmp;
2774 tmp.filter = filter;
2775 tmp.next = next;
2776 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
2778 if ((new = *slot) == NULL)
2780 new = xmalloc (sizeof (*new));
2781 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
2782 new->filter = filter;
2783 new->next = next;
2784 *slot = new;
2786 /* The filter value goes in untouched. The link to the next
2787 record is a "self-relative" byte offset, or zero to indicate
2788 that there is no next record. So convert the absolute 1 based
2789 indices we've been carrying around into a displacement. */
2791 push_sleb128 (&cfun->eh->action_record_data, filter);
2792 if (next)
2793 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
2794 push_sleb128 (&cfun->eh->action_record_data, next);
2797 return new->offset;
2800 static int
2801 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
2803 struct eh_region *c;
2804 int next;
2806 /* If we've reached the top of the region chain, then we have
2807 no actions, and require no landing pad. */
2808 if (region == NULL)
2809 return -1;
2811 switch (region->type)
2813 case ERT_CLEANUP:
2814 /* A cleanup adds a zero filter to the beginning of the chain, but
2815 there are special cases to look out for. If there are *only*
2816 cleanups along a path, then it compresses to a zero action.
2817 Further, if there are multiple cleanups along a path, we only
2818 need to represent one of them, as that is enough to trigger
2819 entry to the landing pad at runtime. */
2820 next = collect_one_action_chain (ar_hash, region->outer);
2821 if (next <= 0)
2822 return 0;
2823 for (c = region->outer; c ; c = c->outer)
2824 if (c->type == ERT_CLEANUP)
2825 return next;
2826 return add_action_record (ar_hash, 0, next);
2828 case ERT_TRY:
2829 /* Process the associated catch regions in reverse order.
2830 If there's a catch-all handler, then we don't need to
2831 search outer regions. Use a magic -3 value to record
2832 that we haven't done the outer search. */
2833 next = -3;
2834 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
2836 if (c->u.catch.type_list == NULL)
2838 /* Retrieve the filter from the head of the filter list
2839 where we have stored it (see assign_filter_values). */
2840 int filter
2841 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
2843 next = add_action_record (ar_hash, filter, 0);
2845 else
2847 /* Once the outer search is done, trigger an action record for
2848 each filter we have. */
2849 tree flt_node;
2851 if (next == -3)
2853 next = collect_one_action_chain (ar_hash, region->outer);
2855 /* If there is no next action, terminate the chain. */
2856 if (next == -1)
2857 next = 0;
2858 /* If all outer actions are cleanups or must_not_throw,
2859 we'll have no action record for it, since we had wanted
2860 to encode these states in the call-site record directly.
2861 Add a cleanup action to the chain to catch these. */
2862 else if (next <= 0)
2863 next = add_action_record (ar_hash, 0, 0);
2866 flt_node = c->u.catch.filter_list;
2867 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2869 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2870 next = add_action_record (ar_hash, filter, next);
2874 return next;
2876 case ERT_ALLOWED_EXCEPTIONS:
2877 /* An exception specification adds its filter to the
2878 beginning of the chain. */
2879 next = collect_one_action_chain (ar_hash, region->outer);
2881 /* If there is no next action, terminate the chain. */
2882 if (next == -1)
2883 next = 0;
2884 /* If all outer actions are cleanups or must_not_throw,
2885 we'll have no action record for it, since we had wanted
2886 to encode these states in the call-site record directly.
2887 Add a cleanup action to the chain to catch these. */
2888 else if (next <= 0)
2889 next = add_action_record (ar_hash, 0, 0);
2891 return add_action_record (ar_hash, region->u.allowed.filter, next);
2893 case ERT_MUST_NOT_THROW:
2894 /* A must-not-throw region with no inner handlers or cleanups
2895 requires no call-site entry. Note that this differs from
2896 the no handler or cleanup case in that we do require an lsda
2897 to be generated. Return a magic -2 value to record this. */
2898 return -2;
2900 case ERT_CATCH:
2901 case ERT_THROW:
2902 /* CATCH regions are handled in TRY above. THROW regions are
2903 for optimization information only and produce no output. */
2904 return collect_one_action_chain (ar_hash, region->outer);
2906 default:
2907 gcc_unreachable ();
2911 static int
2912 add_call_site (rtx landing_pad, int action)
2914 struct call_site_record *data = cfun->eh->call_site_data;
2915 int used = cfun->eh->call_site_data_used;
2916 int size = cfun->eh->call_site_data_size;
2918 if (used >= size)
2920 size = (size ? size * 2 : 64);
2921 data = ggc_realloc (data, sizeof (*data) * size);
2922 cfun->eh->call_site_data = data;
2923 cfun->eh->call_site_data_size = size;
2926 data[used].landing_pad = landing_pad;
2927 data[used].action = action;
2929 cfun->eh->call_site_data_used = used + 1;
2931 return used + call_site_base;
2934 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2935 The new note numbers will not refer to region numbers, but
2936 instead to call site entries. */
2938 void
2939 convert_to_eh_region_ranges (void)
2941 rtx insn, iter, note;
2942 htab_t ar_hash;
2943 int last_action = -3;
2944 rtx last_action_insn = NULL_RTX;
2945 rtx last_landing_pad = NULL_RTX;
2946 rtx first_no_action_insn = NULL_RTX;
2947 int call_site = 0;
2949 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
2950 return;
2952 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
2954 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2956 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2957 if (INSN_P (iter))
2959 struct eh_region *region;
2960 int this_action;
2961 rtx this_landing_pad;
2963 insn = iter;
2964 if (NONJUMP_INSN_P (insn)
2965 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2966 insn = XVECEXP (PATTERN (insn), 0, 0);
2968 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2969 if (!note)
2971 if (! (CALL_P (insn)
2972 || (flag_non_call_exceptions
2973 && may_trap_p (PATTERN (insn)))))
2974 continue;
2975 this_action = -1;
2976 region = NULL;
2978 else
2980 if (INTVAL (XEXP (note, 0)) <= 0)
2981 continue;
2982 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2983 this_action = collect_one_action_chain (ar_hash, region);
2986 /* Existence of catch handlers, or must-not-throw regions
2987 implies that an lsda is needed (even if empty). */
2988 if (this_action != -1)
2989 cfun->uses_eh_lsda = 1;
2991 /* Delay creation of region notes for no-action regions
2992 until we're sure that an lsda will be required. */
2993 else if (last_action == -3)
2995 first_no_action_insn = iter;
2996 last_action = -1;
2999 /* Cleanups and handlers may share action chains but not
3000 landing pads. Collect the landing pad for this region. */
3001 if (this_action >= 0)
3003 struct eh_region *o;
3004 for (o = region; ! o->landing_pad ; o = o->outer)
3005 continue;
3006 this_landing_pad = o->landing_pad;
3008 else
3009 this_landing_pad = NULL_RTX;
3011 /* Differing actions or landing pads implies a change in call-site
3012 info, which implies some EH_REGION note should be emitted. */
3013 if (last_action != this_action
3014 || last_landing_pad != this_landing_pad)
3016 /* If we'd not seen a previous action (-3) or the previous
3017 action was must-not-throw (-2), then we do not need an
3018 end note. */
3019 if (last_action >= -1)
3021 /* If we delayed the creation of the begin, do it now. */
3022 if (first_no_action_insn)
3024 call_site = add_call_site (NULL_RTX, 0);
3025 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3026 first_no_action_insn);
3027 NOTE_EH_HANDLER (note) = call_site;
3028 first_no_action_insn = NULL_RTX;
3031 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3032 last_action_insn);
3033 NOTE_EH_HANDLER (note) = call_site;
3036 /* If the new action is must-not-throw, then no region notes
3037 are created. */
3038 if (this_action >= -1)
3040 call_site = add_call_site (this_landing_pad,
3041 this_action < 0 ? 0 : this_action);
3042 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3043 NOTE_EH_HANDLER (note) = call_site;
3046 last_action = this_action;
3047 last_landing_pad = this_landing_pad;
3049 last_action_insn = iter;
3052 if (last_action >= -1 && ! first_no_action_insn)
3054 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3055 NOTE_EH_HANDLER (note) = call_site;
3058 htab_delete (ar_hash);
3062 static void
3063 push_uleb128 (varray_type *data_area, unsigned int value)
3067 unsigned char byte = value & 0x7f;
3068 value >>= 7;
3069 if (value)
3070 byte |= 0x80;
3071 VARRAY_PUSH_UCHAR (*data_area, byte);
3073 while (value);
3076 static void
3077 push_sleb128 (varray_type *data_area, int value)
3079 unsigned char byte;
3080 int more;
3084 byte = value & 0x7f;
3085 value >>= 7;
3086 more = ! ((value == 0 && (byte & 0x40) == 0)
3087 || (value == -1 && (byte & 0x40) != 0));
3088 if (more)
3089 byte |= 0x80;
3090 VARRAY_PUSH_UCHAR (*data_area, byte);
3092 while (more);
3096 #ifndef HAVE_AS_LEB128
3097 static int
3098 dw2_size_of_call_site_table (void)
3100 int n = cfun->eh->call_site_data_used;
3101 int size = n * (4 + 4 + 4);
3102 int i;
3104 for (i = 0; i < n; ++i)
3106 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3107 size += size_of_uleb128 (cs->action);
3110 return size;
3113 static int
3114 sjlj_size_of_call_site_table (void)
3116 int n = cfun->eh->call_site_data_used;
3117 int size = 0;
3118 int i;
3120 for (i = 0; i < n; ++i)
3122 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3123 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3124 size += size_of_uleb128 (cs->action);
3127 return size;
3129 #endif
3131 static void
3132 dw2_output_call_site_table (void)
3134 int n = cfun->eh->call_site_data_used;
3135 int i;
3137 for (i = 0; i < n; ++i)
3139 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3140 char reg_start_lab[32];
3141 char reg_end_lab[32];
3142 char landing_pad_lab[32];
3144 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3145 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3147 if (cs->landing_pad)
3148 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3149 CODE_LABEL_NUMBER (cs->landing_pad));
3151 /* ??? Perhaps use insn length scaling if the assembler supports
3152 generic arithmetic. */
3153 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3154 data4 if the function is small enough. */
3155 #ifdef HAVE_AS_LEB128
3156 dw2_asm_output_delta_uleb128 (reg_start_lab,
3157 current_function_func_begin_label,
3158 "region %d start", i);
3159 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3160 "length");
3161 if (cs->landing_pad)
3162 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3163 current_function_func_begin_label,
3164 "landing pad");
3165 else
3166 dw2_asm_output_data_uleb128 (0, "landing pad");
3167 #else
3168 dw2_asm_output_delta (4, reg_start_lab,
3169 current_function_func_begin_label,
3170 "region %d start", i);
3171 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3172 if (cs->landing_pad)
3173 dw2_asm_output_delta (4, landing_pad_lab,
3174 current_function_func_begin_label,
3175 "landing pad");
3176 else
3177 dw2_asm_output_data (4, 0, "landing pad");
3178 #endif
3179 dw2_asm_output_data_uleb128 (cs->action, "action");
3182 call_site_base += n;
3185 static void
3186 sjlj_output_call_site_table (void)
3188 int n = cfun->eh->call_site_data_used;
3189 int i;
3191 for (i = 0; i < n; ++i)
3193 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3195 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3196 "region %d landing pad", i);
3197 dw2_asm_output_data_uleb128 (cs->action, "action");
3200 call_site_base += n;
3203 /* Tell assembler to switch to the section for the exception handling
3204 table. */
3206 void
3207 default_exception_section (void)
3209 if (targetm.have_named_sections)
3211 int flags;
3213 if (EH_TABLES_CAN_BE_READ_ONLY)
3215 int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3217 flags = (! flag_pic
3218 || ((tt_format & 0x70) != DW_EH_PE_absptr
3219 && (tt_format & 0x70) != DW_EH_PE_aligned))
3220 ? 0 : SECTION_WRITE;
3222 else
3223 flags = SECTION_WRITE;
3224 named_section_flags (".gcc_except_table", flags);
3226 else if (flag_pic)
3227 data_section ();
3228 else
3229 readonly_data_section ();
3232 void
3233 output_function_exception_table (void)
3235 int tt_format, cs_format, lp_format, i, n;
3236 #ifdef HAVE_AS_LEB128
3237 char ttype_label[32];
3238 char cs_after_size_label[32];
3239 char cs_end_label[32];
3240 #else
3241 int call_site_len;
3242 #endif
3243 int have_tt_data;
3244 int tt_format_size = 0;
3246 /* Not all functions need anything. */
3247 if (! cfun->uses_eh_lsda)
3248 return;
3250 #ifdef TARGET_UNWIND_INFO
3251 /* TODO: Move this into target file. */
3252 assemble_external_libcall (eh_personality_libfunc);
3253 fputs ("\t.personality\t", asm_out_file);
3254 output_addr_const (asm_out_file, eh_personality_libfunc);
3255 fputs ("\n\t.handlerdata\n", asm_out_file);
3256 /* Note that varasm still thinks we're in the function's code section.
3257 The ".endp" directive that will immediately follow will take us back. */
3258 #else
3259 targetm.asm_out.exception_section ();
3260 #endif
3262 have_tt_data = (VEC_length (tree, cfun->eh->ttype_data) > 0
3263 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3265 /* Indicate the format of the @TType entries. */
3266 if (! have_tt_data)
3267 tt_format = DW_EH_PE_omit;
3268 else
3270 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3271 #ifdef HAVE_AS_LEB128
3272 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3273 current_function_funcdef_no);
3274 #endif
3275 tt_format_size = size_of_encoded_value (tt_format);
3277 assemble_align (tt_format_size * BITS_PER_UNIT);
3280 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3281 current_function_funcdef_no);
3283 /* The LSDA header. */
3285 /* Indicate the format of the landing pad start pointer. An omitted
3286 field implies @LPStart == @Start. */
3287 /* Currently we always put @LPStart == @Start. This field would
3288 be most useful in moving the landing pads completely out of
3289 line to another section, but it could also be used to minimize
3290 the size of uleb128 landing pad offsets. */
3291 lp_format = DW_EH_PE_omit;
3292 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3293 eh_data_format_name (lp_format));
3295 /* @LPStart pointer would go here. */
3297 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3298 eh_data_format_name (tt_format));
3300 #ifndef HAVE_AS_LEB128
3301 if (USING_SJLJ_EXCEPTIONS)
3302 call_site_len = sjlj_size_of_call_site_table ();
3303 else
3304 call_site_len = dw2_size_of_call_site_table ();
3305 #endif
3307 /* A pc-relative 4-byte displacement to the @TType data. */
3308 if (have_tt_data)
3310 #ifdef HAVE_AS_LEB128
3311 char ttype_after_disp_label[32];
3312 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3313 current_function_funcdef_no);
3314 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3315 "@TType base offset");
3316 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3317 #else
3318 /* Ug. Alignment queers things. */
3319 unsigned int before_disp, after_disp, last_disp, disp;
3321 before_disp = 1 + 1;
3322 after_disp = (1 + size_of_uleb128 (call_site_len)
3323 + call_site_len
3324 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3325 + (VEC_length (tree, cfun->eh->ttype_data)
3326 * tt_format_size));
3328 disp = after_disp;
3331 unsigned int disp_size, pad;
3333 last_disp = disp;
3334 disp_size = size_of_uleb128 (disp);
3335 pad = before_disp + disp_size + after_disp;
3336 if (pad % tt_format_size)
3337 pad = tt_format_size - (pad % tt_format_size);
3338 else
3339 pad = 0;
3340 disp = after_disp + pad;
3342 while (disp != last_disp);
3344 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3345 #endif
3348 /* Indicate the format of the call-site offsets. */
3349 #ifdef HAVE_AS_LEB128
3350 cs_format = DW_EH_PE_uleb128;
3351 #else
3352 cs_format = DW_EH_PE_udata4;
3353 #endif
3354 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3355 eh_data_format_name (cs_format));
3357 #ifdef HAVE_AS_LEB128
3358 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3359 current_function_funcdef_no);
3360 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3361 current_function_funcdef_no);
3362 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3363 "Call-site table length");
3364 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3365 if (USING_SJLJ_EXCEPTIONS)
3366 sjlj_output_call_site_table ();
3367 else
3368 dw2_output_call_site_table ();
3369 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3370 #else
3371 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3372 if (USING_SJLJ_EXCEPTIONS)
3373 sjlj_output_call_site_table ();
3374 else
3375 dw2_output_call_site_table ();
3376 #endif
3378 /* ??? Decode and interpret the data for flag_debug_asm. */
3379 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3380 for (i = 0; i < n; ++i)
3381 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3382 (i ? NULL : "Action record table"));
3384 if (have_tt_data)
3385 assemble_align (tt_format_size * BITS_PER_UNIT);
3387 i = VEC_length (tree, cfun->eh->ttype_data);
3388 while (i-- > 0)
3390 tree type = VEC_index (tree, cfun->eh->ttype_data, i);
3391 rtx value;
3393 if (type == NULL_TREE)
3394 value = const0_rtx;
3395 else
3397 struct cgraph_varpool_node *node;
3399 type = lookup_type_for_runtime (type);
3400 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3402 /* Let cgraph know that the rtti decl is used. Not all of the
3403 paths below go through assemble_integer, which would take
3404 care of this for us. */
3405 STRIP_NOPS (type);
3406 if (TREE_CODE (type) == ADDR_EXPR)
3408 type = TREE_OPERAND (type, 0);
3409 if (TREE_CODE (type) == VAR_DECL)
3411 node = cgraph_varpool_node (type);
3412 if (node)
3413 cgraph_varpool_mark_needed_node (node);
3416 else
3417 gcc_assert (TREE_CODE (type) == INTEGER_CST);
3420 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3421 assemble_integer (value, tt_format_size,
3422 tt_format_size * BITS_PER_UNIT, 1);
3423 else
3424 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
3427 #ifdef HAVE_AS_LEB128
3428 if (have_tt_data)
3429 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3430 #endif
3432 /* ??? Decode and interpret the data for flag_debug_asm. */
3433 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3434 for (i = 0; i < n; ++i)
3435 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3436 (i ? NULL : "Exception specification table"));
3438 current_function_section (current_function_decl);
3441 void
3442 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3444 fun->eh->throw_stmt_table = table;
3447 htab_t
3448 get_eh_throw_stmt_table (struct function *fun)
3450 return fun->eh->throw_stmt_table;
3453 /* Dump EH information to OUT. */
3454 void
3455 dump_eh_tree (FILE *out, struct function *fun)
3457 struct eh_region *i;
3458 int depth = 0;
3459 static const char * const type_name[] = {"unknown", "cleanup", "try", "catch",
3460 "allowed_exceptions", "must_not_throw",
3461 "throw", "fixup"};
3463 i = fun->eh->region_tree;
3464 if (! i)
3465 return;
3467 fprintf (out, "Eh tree:\n");
3468 while (1)
3470 fprintf (out, " %*s %i %s", depth * 2, "",
3471 i->region_number, type_name [(int)i->type]);
3472 if (i->tree_label)
3474 fprintf (out, " tree_label:");
3475 print_generic_expr (out, i->tree_label, 0);
3477 fprintf (out, "\n");
3478 /* If there are sub-regions, process them. */
3479 if (i->inner)
3480 i = i->inner, depth++;
3481 /* If there are peers, process them. */
3482 else if (i->next_peer)
3483 i = i->next_peer;
3484 /* Otherwise, step back up the tree to the next peer. */
3485 else
3487 do {
3488 i = i->outer;
3489 depth--;
3490 if (i == NULL)
3491 return;
3492 } while (i->next_peer == NULL);
3493 i = i->next_peer;
3498 /* Verify some basic invariants on EH datastructures. Could be extended to
3499 catch more. */
3500 void
3501 verify_eh_tree (struct function *fun)
3503 struct eh_region *i, *outer = NULL;
3504 bool err = false;
3505 int nvisited = 0;
3506 int count = 0;
3507 int j;
3508 int depth = 0;
3510 i = fun->eh->region_tree;
3511 if (! i)
3512 return;
3513 for (j = fun->eh->last_region_number; j > 0; --j)
3514 if (fun->eh->region_array[j])
3516 count++;
3517 if (fun->eh->region_array[j]->region_number != j)
3519 error ("region_array is corrupted for region %i", i->region_number);
3520 err = true;
3524 while (1)
3526 if (fun->eh->region_array[i->region_number] != i)
3528 error ("region_array is corrupted for region %i", i->region_number);
3529 err = true;
3531 if (i->outer != outer)
3533 error ("outer block of region %i is wrong", i->region_number);
3534 err = true;
3536 if (i->may_contain_throw && outer && !outer->may_contain_throw)
3538 error ("region %i may contain throw and is contained in region that may not",
3539 i->region_number);
3540 err = true;
3542 if (depth < 0)
3544 error ("negative nesting depth of region %i", i->region_number);
3545 err = true;
3547 nvisited ++;
3548 /* If there are sub-regions, process them. */
3549 if (i->inner)
3550 outer = i, i = i->inner, depth++;
3551 /* If there are peers, process them. */
3552 else if (i->next_peer)
3553 i = i->next_peer;
3554 /* Otherwise, step back up the tree to the next peer. */
3555 else
3557 do {
3558 i = i->outer;
3559 depth--;
3560 if (i == NULL)
3562 if (depth != -1)
3564 error ("Tree list ends on depth %i", depth + 1);
3565 err = true;
3567 if (count != nvisited)
3569 error ("array does not match the region tree");
3570 err = true;
3572 if (err)
3574 dump_eh_tree (stderr, fun);
3575 internal_error ("verify_eh_tree failed.");
3577 return;
3579 outer = i->outer;
3580 } while (i->next_peer == NULL);
3581 i = i->next_peer;
3585 #include "gt-except.h"