2005-06-28 Andreas Krebbel <krebbel1@de.ibm.com>
[official-gcc.git] / gcc / except.c
blob3c4291019e0a6124c4f55aa5ecbf3ec6c5ae7776
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
77 #include "diagnostic.h"
79 /* Provide defaults for stuff that may not be defined when using
80 sjlj exceptions. */
81 #ifndef EH_RETURN_DATA_REGNO
82 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
83 #endif
86 /* Protect cleanup actions with must-not-throw regions, with a call
87 to the given failure handler. */
88 tree (*lang_protect_cleanup_actions) (void);
90 /* Return true if type A catches type B. */
91 int (*lang_eh_type_covers) (tree a, tree b);
93 /* Map a type to a runtime object to match type. */
94 tree (*lang_eh_runtime_type) (tree);
96 /* A hash table of label to region number. */
98 struct ehl_map_entry GTY(())
100 rtx label;
101 struct eh_region *region;
104 static GTY(()) int call_site_base;
105 static GTY ((param_is (union tree_node)))
106 htab_t type_to_runtime_map;
108 /* Describe the SjLj_Function_Context structure. */
109 static GTY(()) tree sjlj_fc_type_node;
110 static int sjlj_fc_call_site_ofs;
111 static int sjlj_fc_data_ofs;
112 static int sjlj_fc_personality_ofs;
113 static int sjlj_fc_lsda_ofs;
114 static int sjlj_fc_jbuf_ofs;
116 /* Describes one exception region. */
117 struct eh_region GTY(())
119 /* The immediately surrounding region. */
120 struct eh_region *outer;
122 /* The list of immediately contained regions. */
123 struct eh_region *inner;
124 struct eh_region *next_peer;
126 /* An identifier for this region. */
127 int region_number;
129 /* When a region is deleted, its parents inherit the REG_EH_REGION
130 numbers already assigned. */
131 bitmap aka;
133 /* Each region does exactly one thing. */
134 enum eh_region_type
136 ERT_UNKNOWN = 0,
137 ERT_CLEANUP,
138 ERT_TRY,
139 ERT_CATCH,
140 ERT_ALLOWED_EXCEPTIONS,
141 ERT_MUST_NOT_THROW,
142 ERT_THROW
143 } type;
145 /* Holds the action to perform based on the preceding type. */
146 union eh_region_u {
147 /* A list of catch blocks, a surrounding try block,
148 and the label for continuing after a catch. */
149 struct eh_region_u_try {
150 struct eh_region *catch;
151 struct eh_region *last_catch;
152 } GTY ((tag ("ERT_TRY"))) try;
154 /* The list through the catch handlers, the list of type objects
155 matched, and the list of associated filters. */
156 struct eh_region_u_catch {
157 struct eh_region *next_catch;
158 struct eh_region *prev_catch;
159 tree type_list;
160 tree filter_list;
161 } GTY ((tag ("ERT_CATCH"))) catch;
163 /* A tree_list of allowed types. */
164 struct eh_region_u_allowed {
165 tree type_list;
166 int filter;
167 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
169 /* The type given by a call to "throw foo();", or discovered
170 for a throw. */
171 struct eh_region_u_throw {
172 tree type;
173 } GTY ((tag ("ERT_THROW"))) throw;
175 /* Retain the cleanup expression even after expansion so that
176 we can match up fixup regions. */
177 struct eh_region_u_cleanup {
178 struct eh_region *prev_try;
179 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
180 } GTY ((desc ("%0.type"))) u;
182 /* Entry point for this region's handler before landing pads are built. */
183 rtx label;
184 tree tree_label;
186 /* Entry point for this region's handler from the runtime eh library. */
187 rtx landing_pad;
189 /* Entry point for this region's handler from an inner region. */
190 rtx post_landing_pad;
192 /* The RESX insn for handing off control to the next outermost handler,
193 if appropriate. */
194 rtx resume;
196 /* True if something in this region may throw. */
197 unsigned may_contain_throw : 1;
200 struct call_site_record GTY(())
202 rtx landing_pad;
203 int action;
206 /* Used to save exception status for each function. */
207 struct eh_status GTY(())
209 /* The tree of all regions for this function. */
210 struct eh_region *region_tree;
212 /* The same information as an indexable array. */
213 struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
215 /* The most recently open region. */
216 struct eh_region *cur_region;
218 /* This is the region for which we are processing catch blocks. */
219 struct eh_region *try_region;
221 rtx filter;
222 rtx exc_ptr;
224 int built_landing_pads;
225 int last_region_number;
227 VEC(tree,gc) *ttype_data;
228 varray_type ehspec_data;
229 varray_type action_record_data;
231 htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
233 struct call_site_record * GTY ((length ("%h.call_site_data_used")))
234 call_site_data;
235 int call_site_data_used;
236 int call_site_data_size;
238 rtx ehr_stackadj;
239 rtx ehr_handler;
240 rtx ehr_label;
242 rtx sjlj_fc;
243 rtx sjlj_exit_after;
245 htab_t GTY((param_is (struct throw_stmt_node))) throw_stmt_table;
249 static int t2r_eq (const void *, const void *);
250 static hashval_t t2r_hash (const void *);
251 static void add_type_for_runtime (tree);
252 static tree lookup_type_for_runtime (tree);
254 static void remove_unreachable_regions (rtx);
256 static int ttypes_filter_eq (const void *, const void *);
257 static hashval_t ttypes_filter_hash (const void *);
258 static int ehspec_filter_eq (const void *, const void *);
259 static hashval_t ehspec_filter_hash (const void *);
260 static int add_ttypes_entry (htab_t, tree);
261 static int add_ehspec_entry (htab_t, htab_t, tree);
262 static void assign_filter_values (void);
263 static void build_post_landing_pads (void);
264 static void connect_post_landing_pads (void);
265 static void dw2_build_landing_pads (void);
267 struct sjlj_lp_info;
268 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
269 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
270 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
271 static void sjlj_emit_function_enter (rtx);
272 static void sjlj_emit_function_exit (void);
273 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
274 static void sjlj_build_landing_pads (void);
276 static hashval_t ehl_hash (const void *);
277 static int ehl_eq (const void *, const void *);
278 static void add_ehl_entry (rtx, struct eh_region *);
279 static void remove_exception_handler_label (rtx);
280 static void remove_eh_handler (struct eh_region *);
281 static int for_each_eh_label_1 (void **, void *);
283 /* The return value of reachable_next_level. */
284 enum reachable_code
286 /* The given exception is not processed by the given region. */
287 RNL_NOT_CAUGHT,
288 /* The given exception may need processing by the given region. */
289 RNL_MAYBE_CAUGHT,
290 /* The given exception is completely processed by the given region. */
291 RNL_CAUGHT,
292 /* The given exception is completely processed by the runtime. */
293 RNL_BLOCKED
296 struct reachable_info;
297 static enum reachable_code reachable_next_level (struct eh_region *, tree,
298 struct reachable_info *);
300 static int action_record_eq (const void *, const void *);
301 static hashval_t action_record_hash (const void *);
302 static int add_action_record (htab_t, int, int);
303 static int collect_one_action_chain (htab_t, struct eh_region *);
304 static int add_call_site (rtx, int);
306 static void push_uleb128 (varray_type *, unsigned int);
307 static void push_sleb128 (varray_type *, int);
308 #ifndef HAVE_AS_LEB128
309 static int dw2_size_of_call_site_table (void);
310 static int sjlj_size_of_call_site_table (void);
311 #endif
312 static void dw2_output_call_site_table (void);
313 static void sjlj_output_call_site_table (void);
316 /* Routine to see if exception handling is turned on.
317 DO_WARN is nonzero if we want to inform the user that exception
318 handling is turned off.
320 This is used to ensure that -fexceptions has been specified if the
321 compiler tries to use any exception-specific functions. */
324 doing_eh (int do_warn)
326 if (! flag_exceptions)
328 static int warned = 0;
329 if (! warned && do_warn)
331 error ("exception handling disabled, use -fexceptions to enable");
332 warned = 1;
334 return 0;
336 return 1;
340 void
341 init_eh (void)
343 if (! flag_exceptions)
344 return;
346 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
348 /* Create the SjLj_Function_Context structure. This should match
349 the definition in unwind-sjlj.c. */
350 if (USING_SJLJ_EXCEPTIONS)
352 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
354 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
356 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
357 build_pointer_type (sjlj_fc_type_node));
358 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
360 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
361 integer_type_node);
362 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
364 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
365 tmp = build_array_type (lang_hooks.types.type_for_mode (word_mode, 1),
366 tmp);
367 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
368 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
370 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
371 ptr_type_node);
372 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
374 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
375 ptr_type_node);
376 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
378 #ifdef DONT_USE_BUILTIN_SETJMP
379 #ifdef JMP_BUF_SIZE
380 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
381 #else
382 /* Should be large enough for most systems, if it is not,
383 JMP_BUF_SIZE should be defined with the proper value. It will
384 also tend to be larger than necessary for most systems, a more
385 optimal port will define JMP_BUF_SIZE. */
386 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
387 #endif
388 #else
389 /* builtin_setjmp takes a pointer to 5 words. */
390 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
391 #endif
392 tmp = build_index_type (tmp);
393 tmp = build_array_type (ptr_type_node, tmp);
394 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
395 #ifdef DONT_USE_BUILTIN_SETJMP
396 /* We don't know what the alignment requirements of the
397 runtime's jmp_buf has. Overestimate. */
398 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
399 DECL_USER_ALIGN (f_jbuf) = 1;
400 #endif
401 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
403 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
404 TREE_CHAIN (f_prev) = f_cs;
405 TREE_CHAIN (f_cs) = f_data;
406 TREE_CHAIN (f_data) = f_per;
407 TREE_CHAIN (f_per) = f_lsda;
408 TREE_CHAIN (f_lsda) = f_jbuf;
410 layout_type (sjlj_fc_type_node);
412 /* Cache the interesting field offsets so that we have
413 easy access from rtl. */
414 sjlj_fc_call_site_ofs
415 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
416 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
417 sjlj_fc_data_ofs
418 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
419 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
420 sjlj_fc_personality_ofs
421 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
422 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
423 sjlj_fc_lsda_ofs
424 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
425 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
426 sjlj_fc_jbuf_ofs
427 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
428 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
432 void
433 init_eh_for_function (void)
435 cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
438 /* Routines to generate the exception tree somewhat directly.
439 These are used from tree-eh.c when processing exception related
440 nodes during tree optimization. */
442 static struct eh_region *
443 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
445 struct eh_region *new;
447 #ifdef ENABLE_CHECKING
448 gcc_assert (doing_eh (0));
449 #endif
451 /* Insert a new blank region as a leaf in the tree. */
452 new = ggc_alloc_cleared (sizeof (*new));
453 new->type = type;
454 new->outer = outer;
455 if (outer)
457 new->next_peer = outer->inner;
458 outer->inner = new;
460 else
462 new->next_peer = cfun->eh->region_tree;
463 cfun->eh->region_tree = new;
466 new->region_number = ++cfun->eh->last_region_number;
468 return new;
471 struct eh_region *
472 gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
474 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
475 cleanup->u.cleanup.prev_try = prev_try;
476 return cleanup;
479 struct eh_region *
480 gen_eh_region_try (struct eh_region *outer)
482 return gen_eh_region (ERT_TRY, outer);
485 struct eh_region *
486 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
488 struct eh_region *c, *l;
489 tree type_list, type_node;
491 /* Ensure to always end up with a type list to normalize further
492 processing, then register each type against the runtime types map. */
493 type_list = type_or_list;
494 if (type_or_list)
496 if (TREE_CODE (type_or_list) != TREE_LIST)
497 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
499 type_node = type_list;
500 for (; type_node; type_node = TREE_CHAIN (type_node))
501 add_type_for_runtime (TREE_VALUE (type_node));
504 c = gen_eh_region (ERT_CATCH, t->outer);
505 c->u.catch.type_list = type_list;
506 l = t->u.try.last_catch;
507 c->u.catch.prev_catch = l;
508 if (l)
509 l->u.catch.next_catch = c;
510 else
511 t->u.try.catch = c;
512 t->u.try.last_catch = c;
514 return c;
517 struct eh_region *
518 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
520 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
521 region->u.allowed.type_list = allowed;
523 for (; allowed ; allowed = TREE_CHAIN (allowed))
524 add_type_for_runtime (TREE_VALUE (allowed));
526 return region;
529 struct eh_region *
530 gen_eh_region_must_not_throw (struct eh_region *outer)
532 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
536 get_eh_region_number (struct eh_region *region)
538 return region->region_number;
541 bool
542 get_eh_region_may_contain_throw (struct eh_region *region)
544 return region->may_contain_throw;
547 tree
548 get_eh_region_tree_label (struct eh_region *region)
550 return region->tree_label;
553 void
554 set_eh_region_tree_label (struct eh_region *region, tree lab)
556 region->tree_label = lab;
559 void
560 expand_resx_expr (tree exp)
562 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
563 struct eh_region *reg = cfun->eh->region_array[region_nr];
565 gcc_assert (!reg->resume);
566 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
567 emit_barrier ();
570 /* Note that the current EH region (if any) may contain a throw, or a
571 call to a function which itself may contain a throw. */
573 void
574 note_eh_region_may_contain_throw (struct eh_region *region)
576 while (region && !region->may_contain_throw)
578 region->may_contain_throw = 1;
579 region = region->outer;
583 void
584 note_current_region_may_contain_throw (void)
586 note_eh_region_may_contain_throw (cfun->eh->cur_region);
590 /* Return an rtl expression for a pointer to the exception object
591 within a handler. */
594 get_exception_pointer (struct function *fun)
596 rtx exc_ptr = fun->eh->exc_ptr;
597 if (fun == cfun && ! exc_ptr)
599 exc_ptr = gen_reg_rtx (ptr_mode);
600 fun->eh->exc_ptr = exc_ptr;
602 return exc_ptr;
605 /* Return an rtl expression for the exception dispatch filter
606 within a handler. */
609 get_exception_filter (struct function *fun)
611 rtx filter = fun->eh->filter;
612 if (fun == cfun && ! filter)
614 filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
615 fun->eh->filter = filter;
617 return filter;
620 /* This section is for the exception handling specific optimization pass. */
622 /* Random access the exception region tree. */
624 void
625 collect_eh_region_array (void)
627 struct eh_region **array, *i;
629 i = cfun->eh->region_tree;
630 if (! i)
631 return;
633 array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
634 * sizeof (*array));
635 cfun->eh->region_array = array;
637 while (1)
639 array[i->region_number] = i;
641 /* If there are sub-regions, process them. */
642 if (i->inner)
643 i = i->inner;
644 /* If there are peers, process them. */
645 else if (i->next_peer)
646 i = i->next_peer;
647 /* Otherwise, step back up the tree to the next peer. */
648 else
650 do {
651 i = i->outer;
652 if (i == NULL)
653 return;
654 } while (i->next_peer == NULL);
655 i = i->next_peer;
660 /* Remove all regions whose labels are not reachable from insns. */
662 static void
663 remove_unreachable_regions (rtx insns)
665 int i, *uid_region_num;
666 bool *reachable;
667 struct eh_region *r;
668 rtx insn;
670 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
671 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
673 for (i = cfun->eh->last_region_number; i > 0; --i)
675 r = cfun->eh->region_array[i];
676 if (!r || r->region_number != i)
677 continue;
679 if (r->resume)
681 gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
682 uid_region_num[INSN_UID (r->resume)] = i;
684 if (r->label)
686 gcc_assert (!uid_region_num[INSN_UID (r->label)]);
687 uid_region_num[INSN_UID (r->label)] = i;
691 for (insn = insns; insn; insn = NEXT_INSN (insn))
692 reachable[uid_region_num[INSN_UID (insn)]] = true;
694 for (i = cfun->eh->last_region_number; i > 0; --i)
696 r = cfun->eh->region_array[i];
697 if (r && r->region_number == i && !reachable[i])
699 bool kill_it = true;
700 switch (r->type)
702 case ERT_THROW:
703 /* Don't remove ERT_THROW regions if their outer region
704 is reachable. */
705 if (r->outer && reachable[r->outer->region_number])
706 kill_it = false;
707 break;
709 case ERT_MUST_NOT_THROW:
710 /* MUST_NOT_THROW regions are implementable solely in the
711 runtime, but their existence continues to affect calls
712 within that region. Never delete them here. */
713 kill_it = false;
714 break;
716 case ERT_TRY:
718 /* TRY regions are reachable if any of its CATCH regions
719 are reachable. */
720 struct eh_region *c;
721 for (c = r->u.try.catch; c ; c = c->u.catch.next_catch)
722 if (reachable[c->region_number])
724 kill_it = false;
725 break;
727 break;
730 default:
731 break;
734 if (kill_it)
735 remove_eh_handler (r);
739 free (reachable);
740 free (uid_region_num);
743 /* Set up EH labels for RTL. */
745 void
746 convert_from_eh_region_ranges (void)
748 rtx insns = get_insns ();
749 int i, n = cfun->eh->last_region_number;
751 /* Most of the work is already done at the tree level. All we need to
752 do is collect the rtl labels that correspond to the tree labels that
753 collect the rtl labels that correspond to the tree labels
754 we allocated earlier. */
755 for (i = 1; i <= n; ++i)
757 struct eh_region *region = cfun->eh->region_array[i];
758 if (region && region->tree_label)
759 region->label = DECL_RTL_IF_SET (region->tree_label);
762 remove_unreachable_regions (insns);
765 static void
766 add_ehl_entry (rtx label, struct eh_region *region)
768 struct ehl_map_entry **slot, *entry;
770 LABEL_PRESERVE_P (label) = 1;
772 entry = ggc_alloc (sizeof (*entry));
773 entry->label = label;
774 entry->region = region;
776 slot = (struct ehl_map_entry **)
777 htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
779 /* Before landing pad creation, each exception handler has its own
780 label. After landing pad creation, the exception handlers may
781 share landing pads. This is ok, since maybe_remove_eh_handler
782 only requires the 1-1 mapping before landing pad creation. */
783 gcc_assert (!*slot || cfun->eh->built_landing_pads);
785 *slot = entry;
788 void
789 find_exception_handler_labels (void)
791 int i;
793 if (cfun->eh->exception_handler_label_map)
794 htab_empty (cfun->eh->exception_handler_label_map);
795 else
797 /* ??? The expansion factor here (3/2) must be greater than the htab
798 occupancy factor (4/3) to avoid unnecessary resizing. */
799 cfun->eh->exception_handler_label_map
800 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
801 ehl_hash, ehl_eq, NULL);
804 if (cfun->eh->region_tree == NULL)
805 return;
807 for (i = cfun->eh->last_region_number; i > 0; --i)
809 struct eh_region *region = cfun->eh->region_array[i];
810 rtx lab;
812 if (! region || region->region_number != i)
813 continue;
814 if (cfun->eh->built_landing_pads)
815 lab = region->landing_pad;
816 else
817 lab = region->label;
819 if (lab)
820 add_ehl_entry (lab, region);
823 /* For sjlj exceptions, need the return label to remain live until
824 after landing pad generation. */
825 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
826 add_ehl_entry (return_label, NULL);
829 /* Returns true if the current function has exception handling regions. */
831 bool
832 current_function_has_exception_handlers (void)
834 int i;
836 for (i = cfun->eh->last_region_number; i > 0; --i)
838 struct eh_region *region = cfun->eh->region_array[i];
840 if (region
841 && region->region_number == i
842 && region->type != ERT_THROW)
843 return true;
846 return false;
849 static struct eh_region *
850 duplicate_eh_region_1 (struct eh_region *o)
852 struct eh_region *n = ggc_alloc_cleared (sizeof (struct eh_region));
854 *n = *o;
856 n->region_number = o->region_number + cfun->eh->last_region_number;
857 gcc_assert (!o->aka);
859 return n;
862 static void
863 duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array,
864 struct eh_region *prev_try)
866 struct eh_region *n = n_array[o->region_number];
868 switch (n->type)
870 case ERT_TRY:
871 if (o->u.try.catch)
872 n->u.try.catch = n_array[o->u.try.catch->region_number];
873 if (o->u.try.last_catch)
874 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
875 break;
877 case ERT_CATCH:
878 if (o->u.catch.next_catch)
879 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
880 if (o->u.catch.prev_catch)
881 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
882 break;
884 case ERT_CLEANUP:
885 if (o->u.cleanup.prev_try)
886 n->u.cleanup.prev_try = n_array[o->u.cleanup.prev_try->region_number];
887 else
888 n->u.cleanup.prev_try = prev_try;
889 break;
891 default:
892 break;
895 if (o->outer)
896 n->outer = n_array[o->outer->region_number];
897 if (o->inner)
898 n->inner = n_array[o->inner->region_number];
899 if (o->next_peer)
900 n->next_peer = n_array[o->next_peer->region_number];
903 /* Duplicate the EH regions of IFUN into current function, root the tree in
904 OUTER_REGION and remap labels using MAP callback. */
906 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
907 void *data, int outer_region)
909 int ifun_last_region_number = ifun->eh->last_region_number;
910 struct eh_region **n_array, *root, *cur, *prev_try;
911 int i;
913 if (ifun_last_region_number == 0 || !ifun->eh->region_tree)
914 return 0;
916 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
918 /* Search for the containing ERT_TRY region to fix up
919 the prev_try short-cuts for ERT_CLEANUP regions. */
920 prev_try = NULL;
921 if (outer_region > 0)
922 for (prev_try = cfun->eh->region_array[outer_region];
923 prev_try && prev_try->type != ERT_TRY;
924 prev_try = prev_try->outer)
927 for (i = 1; i <= ifun_last_region_number; ++i)
929 cur = ifun->eh->region_array[i];
930 if (!cur || cur->region_number != i)
931 continue;
932 n_array[i] = duplicate_eh_region_1 (cur);
933 if (cur->tree_label)
935 tree newlabel = map (cur->tree_label, data);
936 n_array[i]->tree_label = newlabel;
938 else
939 n_array[i]->tree_label = NULL;
941 for (i = 1; i <= ifun_last_region_number; ++i)
943 cur = ifun->eh->region_array[i];
944 if (!cur || cur->region_number != i)
945 continue;
946 duplicate_eh_region_2 (cur, n_array, prev_try);
949 root = n_array[ifun->eh->region_tree->region_number];
950 gcc_assert (root->outer == NULL);
951 if (outer_region > 0)
953 struct eh_region *cur = cfun->eh->region_array[outer_region];
954 struct eh_region *p = cur->inner;
956 if (p)
958 while (p->next_peer)
959 p = p->next_peer;
960 p->next_peer = root;
962 else
963 cur->inner = root;
964 for (i = 1; i <= ifun_last_region_number; ++i)
965 if (n_array[i] && n_array[i]->outer == NULL)
966 n_array[i]->outer = cur;
968 else
970 struct eh_region *p = cfun->eh->region_tree;
971 if (p)
973 while (p->next_peer)
974 p = p->next_peer;
975 p->next_peer = root;
977 else
978 cfun->eh->region_tree = root;
981 free (n_array);
983 i = cfun->eh->last_region_number;
984 cfun->eh->last_region_number = i + ifun_last_region_number;
986 collect_eh_region_array ();
988 return i;
991 static int
992 t2r_eq (const void *pentry, const void *pdata)
994 tree entry = (tree) pentry;
995 tree data = (tree) pdata;
997 return TREE_PURPOSE (entry) == data;
1000 static hashval_t
1001 t2r_hash (const void *pentry)
1003 tree entry = (tree) pentry;
1004 return TREE_HASH (TREE_PURPOSE (entry));
1007 static void
1008 add_type_for_runtime (tree type)
1010 tree *slot;
1012 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1013 TREE_HASH (type), INSERT);
1014 if (*slot == NULL)
1016 tree runtime = (*lang_eh_runtime_type) (type);
1017 *slot = tree_cons (type, runtime, NULL_TREE);
1021 static tree
1022 lookup_type_for_runtime (tree type)
1024 tree *slot;
1026 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1027 TREE_HASH (type), NO_INSERT);
1029 /* We should have always inserted the data earlier. */
1030 return TREE_VALUE (*slot);
1034 /* Represent an entry in @TTypes for either catch actions
1035 or exception filter actions. */
1036 struct ttypes_filter GTY(())
1038 tree t;
1039 int filter;
1042 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1043 (a tree) for a @TTypes type node we are thinking about adding. */
1045 static int
1046 ttypes_filter_eq (const void *pentry, const void *pdata)
1048 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1049 tree data = (tree) pdata;
1051 return entry->t == data;
1054 static hashval_t
1055 ttypes_filter_hash (const void *pentry)
1057 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1058 return TREE_HASH (entry->t);
1061 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1062 exception specification list we are thinking about adding. */
1063 /* ??? Currently we use the type lists in the order given. Someone
1064 should put these in some canonical order. */
1066 static int
1067 ehspec_filter_eq (const void *pentry, const void *pdata)
1069 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1070 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1072 return type_list_equal (entry->t, data->t);
1075 /* Hash function for exception specification lists. */
1077 static hashval_t
1078 ehspec_filter_hash (const void *pentry)
1080 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1081 hashval_t h = 0;
1082 tree list;
1084 for (list = entry->t; list ; list = TREE_CHAIN (list))
1085 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1086 return h;
1089 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
1090 to speed up the search. Return the filter value to be used. */
1092 static int
1093 add_ttypes_entry (htab_t ttypes_hash, tree type)
1095 struct ttypes_filter **slot, *n;
1097 slot = (struct ttypes_filter **)
1098 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1100 if ((n = *slot) == NULL)
1102 /* Filter value is a 1 based table index. */
1104 n = xmalloc (sizeof (*n));
1105 n->t = type;
1106 n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1;
1107 *slot = n;
1109 VEC_safe_push (tree, gc, cfun->eh->ttype_data, type);
1112 return n->filter;
1115 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1116 to speed up the search. Return the filter value to be used. */
1118 static int
1119 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1121 struct ttypes_filter **slot, *n;
1122 struct ttypes_filter dummy;
1124 dummy.t = list;
1125 slot = (struct ttypes_filter **)
1126 htab_find_slot (ehspec_hash, &dummy, INSERT);
1128 if ((n = *slot) == NULL)
1130 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1132 n = xmalloc (sizeof (*n));
1133 n->t = list;
1134 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1135 *slot = n;
1137 /* Look up each type in the list and encode its filter
1138 value as a uleb128. Terminate the list with 0. */
1139 for (; list ; list = TREE_CHAIN (list))
1140 push_uleb128 (&cfun->eh->ehspec_data,
1141 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1142 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1145 return n->filter;
1148 /* Generate the action filter values to be used for CATCH and
1149 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1150 we use lots of landing pads, and so every type or list can share
1151 the same filter value, which saves table space. */
1153 static void
1154 assign_filter_values (void)
1156 int i;
1157 htab_t ttypes, ehspec;
1159 cfun->eh->ttype_data = VEC_alloc (tree, gc, 16);
1160 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1162 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1163 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1165 for (i = cfun->eh->last_region_number; i > 0; --i)
1167 struct eh_region *r = cfun->eh->region_array[i];
1169 /* Mind we don't process a region more than once. */
1170 if (!r || r->region_number != i)
1171 continue;
1173 switch (r->type)
1175 case ERT_CATCH:
1176 /* Whatever type_list is (NULL or true list), we build a list
1177 of filters for the region. */
1178 r->u.catch.filter_list = NULL_TREE;
1180 if (r->u.catch.type_list != NULL)
1182 /* Get a filter value for each of the types caught and store
1183 them in the region's dedicated list. */
1184 tree tp_node = r->u.catch.type_list;
1186 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1188 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1189 tree flt_node = build_int_cst (NULL_TREE, flt);
1191 r->u.catch.filter_list
1192 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1195 else
1197 /* Get a filter value for the NULL list also since it will need
1198 an action record anyway. */
1199 int flt = add_ttypes_entry (ttypes, NULL);
1200 tree flt_node = build_int_cst (NULL_TREE, flt);
1202 r->u.catch.filter_list
1203 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1206 break;
1208 case ERT_ALLOWED_EXCEPTIONS:
1209 r->u.allowed.filter
1210 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1211 break;
1213 default:
1214 break;
1218 htab_delete (ttypes);
1219 htab_delete (ehspec);
1222 /* Emit SEQ into basic block just before INSN (that is assumed to be
1223 first instruction of some existing BB and return the newly
1224 produced block. */
1225 static basic_block
1226 emit_to_new_bb_before (rtx seq, rtx insn)
1228 rtx last;
1229 basic_block bb;
1230 edge e;
1231 edge_iterator ei;
1233 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1234 call), we don't want it to go into newly created landing pad or other EH
1235 construct. */
1236 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1237 if (e->flags & EDGE_FALLTHRU)
1238 force_nonfallthru (e);
1239 else
1240 ei_next (&ei);
1241 last = emit_insn_before (seq, insn);
1242 if (BARRIER_P (last))
1243 last = PREV_INSN (last);
1244 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1245 update_bb_for_insn (bb);
1246 bb->flags |= BB_SUPERBLOCK;
1247 return bb;
1250 /* Generate the code to actually handle exceptions, which will follow the
1251 landing pads. */
1253 static void
1254 build_post_landing_pads (void)
1256 int i;
1258 for (i = cfun->eh->last_region_number; i > 0; --i)
1260 struct eh_region *region = cfun->eh->region_array[i];
1261 rtx seq;
1263 /* Mind we don't process a region more than once. */
1264 if (!region || region->region_number != i)
1265 continue;
1267 switch (region->type)
1269 case ERT_TRY:
1270 /* ??? Collect the set of all non-overlapping catch handlers
1271 all the way up the chain until blocked by a cleanup. */
1272 /* ??? Outer try regions can share landing pads with inner
1273 try regions if the types are completely non-overlapping,
1274 and there are no intervening cleanups. */
1276 region->post_landing_pad = gen_label_rtx ();
1278 start_sequence ();
1280 emit_label (region->post_landing_pad);
1282 /* ??? It is mighty inconvenient to call back into the
1283 switch statement generation code in expand_end_case.
1284 Rapid prototyping sez a sequence of ifs. */
1286 struct eh_region *c;
1287 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1289 if (c->u.catch.type_list == NULL)
1290 emit_jump (c->label);
1291 else
1293 /* Need for one cmp/jump per type caught. Each type
1294 list entry has a matching entry in the filter list
1295 (see assign_filter_values). */
1296 tree tp_node = c->u.catch.type_list;
1297 tree flt_node = c->u.catch.filter_list;
1299 for (; tp_node; )
1301 emit_cmp_and_jump_insns
1302 (cfun->eh->filter,
1303 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1304 EQ, NULL_RTX,
1305 targetm.eh_return_filter_mode (), 0, c->label);
1307 tp_node = TREE_CHAIN (tp_node);
1308 flt_node = TREE_CHAIN (flt_node);
1314 /* We delay the generation of the _Unwind_Resume until we generate
1315 landing pads. We emit a marker here so as to get good control
1316 flow data in the meantime. */
1317 region->resume
1318 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1319 emit_barrier ();
1321 seq = get_insns ();
1322 end_sequence ();
1324 emit_to_new_bb_before (seq, region->u.try.catch->label);
1326 break;
1328 case ERT_ALLOWED_EXCEPTIONS:
1329 region->post_landing_pad = gen_label_rtx ();
1331 start_sequence ();
1333 emit_label (region->post_landing_pad);
1335 emit_cmp_and_jump_insns (cfun->eh->filter,
1336 GEN_INT (region->u.allowed.filter),
1337 EQ, NULL_RTX,
1338 targetm.eh_return_filter_mode (), 0, region->label);
1340 /* We delay the generation of the _Unwind_Resume until we generate
1341 landing pads. We emit a marker here so as to get good control
1342 flow data in the meantime. */
1343 region->resume
1344 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1345 emit_barrier ();
1347 seq = get_insns ();
1348 end_sequence ();
1350 emit_to_new_bb_before (seq, region->label);
1351 break;
1353 case ERT_CLEANUP:
1354 case ERT_MUST_NOT_THROW:
1355 region->post_landing_pad = region->label;
1356 break;
1358 case ERT_CATCH:
1359 case ERT_THROW:
1360 /* Nothing to do. */
1361 break;
1363 default:
1364 gcc_unreachable ();
1369 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1370 _Unwind_Resume otherwise. */
1372 static void
1373 connect_post_landing_pads (void)
1375 int i;
1377 for (i = cfun->eh->last_region_number; i > 0; --i)
1379 struct eh_region *region = cfun->eh->region_array[i];
1380 struct eh_region *outer;
1381 rtx seq;
1382 rtx barrier;
1384 /* Mind we don't process a region more than once. */
1385 if (!region || region->region_number != i)
1386 continue;
1388 /* If there is no RESX, or it has been deleted by flow, there's
1389 nothing to fix up. */
1390 if (! region->resume || INSN_DELETED_P (region->resume))
1391 continue;
1393 /* Search for another landing pad in this function. */
1394 for (outer = region->outer; outer ; outer = outer->outer)
1395 if (outer->post_landing_pad)
1396 break;
1398 start_sequence ();
1400 if (outer)
1402 edge e;
1403 basic_block src, dest;
1405 emit_jump (outer->post_landing_pad);
1406 src = BLOCK_FOR_INSN (region->resume);
1407 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1408 while (EDGE_COUNT (src->succs) > 0)
1409 remove_edge (EDGE_SUCC (src, 0));
1410 e = make_edge (src, dest, 0);
1411 e->probability = REG_BR_PROB_BASE;
1412 e->count = src->count;
1414 else
1416 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1417 VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1419 /* What we just emitted was a throwing libcall, so it got a
1420 barrier automatically added after it. If the last insn in
1421 the libcall sequence isn't the barrier, it's because the
1422 target emits multiple insns for a call, and there are insns
1423 after the actual call insn (which are redundant and would be
1424 optimized away). The barrier is inserted exactly after the
1425 call insn, so let's go get that and delete the insns after
1426 it, because below we need the barrier to be the last insn in
1427 the sequence. */
1428 delete_insns_since (NEXT_INSN (last_call_insn ()));
1431 seq = get_insns ();
1432 end_sequence ();
1433 barrier = emit_insn_before (seq, region->resume);
1434 /* Avoid duplicate barrier. */
1435 gcc_assert (BARRIER_P (barrier));
1436 delete_insn (barrier);
1437 delete_insn (region->resume);
1439 /* ??? From tree-ssa we can wind up with catch regions whose
1440 label is not instantiated, but whose resx is present. Now
1441 that we've dealt with the resx, kill the region. */
1442 if (region->label == NULL && region->type == ERT_CLEANUP)
1443 remove_eh_handler (region);
1448 static void
1449 dw2_build_landing_pads (void)
1451 int i;
1452 unsigned int j;
1454 for (i = cfun->eh->last_region_number; i > 0; --i)
1456 struct eh_region *region = cfun->eh->region_array[i];
1457 rtx seq;
1458 basic_block bb;
1459 bool clobbers_hard_regs = false;
1460 edge e;
1462 /* Mind we don't process a region more than once. */
1463 if (!region || region->region_number != i)
1464 continue;
1466 if (region->type != ERT_CLEANUP
1467 && region->type != ERT_TRY
1468 && region->type != ERT_ALLOWED_EXCEPTIONS)
1469 continue;
1471 start_sequence ();
1473 region->landing_pad = gen_label_rtx ();
1474 emit_label (region->landing_pad);
1476 #ifdef HAVE_exception_receiver
1477 if (HAVE_exception_receiver)
1478 emit_insn (gen_exception_receiver ());
1479 else
1480 #endif
1481 #ifdef HAVE_nonlocal_goto_receiver
1482 if (HAVE_nonlocal_goto_receiver)
1483 emit_insn (gen_nonlocal_goto_receiver ());
1484 else
1485 #endif
1486 { /* Nothing */ }
1488 /* If the eh_return data registers are call-saved, then we
1489 won't have considered them clobbered from the call that
1490 threw. Kill them now. */
1491 for (j = 0; ; ++j)
1493 unsigned r = EH_RETURN_DATA_REGNO (j);
1494 if (r == INVALID_REGNUM)
1495 break;
1496 if (! call_used_regs[r])
1498 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1499 clobbers_hard_regs = true;
1503 if (clobbers_hard_regs)
1505 /* @@@ This is a kludge. Not all machine descriptions define a
1506 blockage insn, but we must not allow the code we just generated
1507 to be reordered by scheduling. So emit an ASM_INPUT to act as
1508 blockage insn. */
1509 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1512 emit_move_insn (cfun->eh->exc_ptr,
1513 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1514 emit_move_insn (cfun->eh->filter,
1515 gen_rtx_REG (targetm.eh_return_filter_mode (),
1516 EH_RETURN_DATA_REGNO (1)));
1518 seq = get_insns ();
1519 end_sequence ();
1521 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1522 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1523 e->count = bb->count;
1524 e->probability = REG_BR_PROB_BASE;
1529 struct sjlj_lp_info
1531 int directly_reachable;
1532 int action_index;
1533 int dispatch_index;
1534 int call_site_index;
1537 static bool
1538 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1540 rtx insn;
1541 bool found_one = false;
1543 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1545 struct eh_region *region;
1546 enum reachable_code rc;
1547 tree type_thrown;
1548 rtx note;
1550 if (! INSN_P (insn))
1551 continue;
1553 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1554 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1555 continue;
1557 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1559 type_thrown = NULL_TREE;
1560 if (region->type == ERT_THROW)
1562 type_thrown = region->u.throw.type;
1563 region = region->outer;
1566 /* Find the first containing region that might handle the exception.
1567 That's the landing pad to which we will transfer control. */
1568 rc = RNL_NOT_CAUGHT;
1569 for (; region; region = region->outer)
1571 rc = reachable_next_level (region, type_thrown, NULL);
1572 if (rc != RNL_NOT_CAUGHT)
1573 break;
1575 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1577 lp_info[region->region_number].directly_reachable = 1;
1578 found_one = true;
1582 return found_one;
1585 static void
1586 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1588 htab_t ar_hash;
1589 int i, index;
1591 /* First task: build the action table. */
1593 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1594 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1596 for (i = cfun->eh->last_region_number; i > 0; --i)
1597 if (lp_info[i].directly_reachable)
1599 struct eh_region *r = cfun->eh->region_array[i];
1600 r->landing_pad = dispatch_label;
1601 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1602 if (lp_info[i].action_index != -1)
1603 cfun->uses_eh_lsda = 1;
1606 htab_delete (ar_hash);
1608 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1609 landing pad label for the region. For sjlj though, there is one
1610 common landing pad from which we dispatch to the post-landing pads.
1612 A region receives a dispatch index if it is directly reachable
1613 and requires in-function processing. Regions that share post-landing
1614 pads may share dispatch indices. */
1615 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1616 (see build_post_landing_pads) so we don't bother checking for it. */
1618 index = 0;
1619 for (i = cfun->eh->last_region_number; i > 0; --i)
1620 if (lp_info[i].directly_reachable)
1621 lp_info[i].dispatch_index = index++;
1623 /* Finally: assign call-site values. If dwarf2 terms, this would be
1624 the region number assigned by convert_to_eh_region_ranges, but
1625 handles no-action and must-not-throw differently. */
1627 call_site_base = 1;
1628 for (i = cfun->eh->last_region_number; i > 0; --i)
1629 if (lp_info[i].directly_reachable)
1631 int action = lp_info[i].action_index;
1633 /* Map must-not-throw to otherwise unused call-site index 0. */
1634 if (action == -2)
1635 index = 0;
1636 /* Map no-action to otherwise unused call-site index -1. */
1637 else if (action == -1)
1638 index = -1;
1639 /* Otherwise, look it up in the table. */
1640 else
1641 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1643 lp_info[i].call_site_index = index;
1647 static void
1648 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1650 int last_call_site = -2;
1651 rtx insn, mem;
1653 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1655 struct eh_region *region;
1656 int this_call_site;
1657 rtx note, before, p;
1659 /* Reset value tracking at extended basic block boundaries. */
1660 if (LABEL_P (insn))
1661 last_call_site = -2;
1663 if (! INSN_P (insn))
1664 continue;
1666 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1667 if (!note)
1669 /* Calls (and trapping insns) without notes are outside any
1670 exception handling region in this function. Mark them as
1671 no action. */
1672 if (CALL_P (insn)
1673 || (flag_non_call_exceptions
1674 && may_trap_p (PATTERN (insn))))
1675 this_call_site = -1;
1676 else
1677 continue;
1679 else
1681 /* Calls that are known to not throw need not be marked. */
1682 if (INTVAL (XEXP (note, 0)) <= 0)
1683 continue;
1685 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1686 this_call_site = lp_info[region->region_number].call_site_index;
1689 if (this_call_site == last_call_site)
1690 continue;
1692 /* Don't separate a call from it's argument loads. */
1693 before = insn;
1694 if (CALL_P (insn))
1695 before = find_first_parameter_load (insn, NULL_RTX);
1697 start_sequence ();
1698 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
1699 sjlj_fc_call_site_ofs);
1700 emit_move_insn (mem, GEN_INT (this_call_site));
1701 p = get_insns ();
1702 end_sequence ();
1704 emit_insn_before (p, before);
1705 last_call_site = this_call_site;
1709 /* Construct the SjLj_Function_Context. */
1711 static void
1712 sjlj_emit_function_enter (rtx dispatch_label)
1714 rtx fn_begin, fc, mem, seq;
1716 fc = cfun->eh->sjlj_fc;
1718 start_sequence ();
1720 /* We're storing this libcall's address into memory instead of
1721 calling it directly. Thus, we must call assemble_external_libcall
1722 here, as we can not depend on emit_library_call to do it for us. */
1723 assemble_external_libcall (eh_personality_libfunc);
1724 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1725 emit_move_insn (mem, eh_personality_libfunc);
1727 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1728 if (cfun->uses_eh_lsda)
1730 char buf[20];
1731 rtx sym;
1733 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1734 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1735 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1736 emit_move_insn (mem, sym);
1738 else
1739 emit_move_insn (mem, const0_rtx);
1741 #ifdef DONT_USE_BUILTIN_SETJMP
1743 rtx x, note;
1744 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1745 TYPE_MODE (integer_type_node), 1,
1746 plus_constant (XEXP (fc, 0),
1747 sjlj_fc_jbuf_ofs), Pmode);
1749 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
1750 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
1752 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1753 TYPE_MODE (integer_type_node), 0, dispatch_label);
1755 #else
1756 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
1757 dispatch_label);
1758 #endif
1760 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1761 1, XEXP (fc, 0), Pmode);
1763 seq = get_insns ();
1764 end_sequence ();
1766 /* ??? Instead of doing this at the beginning of the function,
1767 do this in a block that is at loop level 0 and dominates all
1768 can_throw_internal instructions. */
1770 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1771 if (NOTE_P (fn_begin)
1772 && (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG
1773 || NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK))
1774 break;
1775 if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1776 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
1777 else
1779 rtx last = BB_END (single_succ (ENTRY_BLOCK_PTR));
1780 for (; ; fn_begin = NEXT_INSN (fn_begin))
1781 if ((NOTE_P (fn_begin)
1782 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1783 || fn_begin == last)
1784 break;
1785 emit_insn_after (seq, fn_begin);
1789 /* Call back from expand_function_end to know where we should put
1790 the call to unwind_sjlj_unregister_libfunc if needed. */
1792 void
1793 sjlj_emit_function_exit_after (rtx after)
1795 cfun->eh->sjlj_exit_after = after;
1798 static void
1799 sjlj_emit_function_exit (void)
1801 rtx seq;
1802 edge e;
1803 edge_iterator ei;
1805 start_sequence ();
1807 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1808 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
1810 seq = get_insns ();
1811 end_sequence ();
1813 /* ??? Really this can be done in any block at loop level 0 that
1814 post-dominates all can_throw_internal instructions. This is
1815 the last possible moment. */
1817 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1818 if (e->flags & EDGE_FALLTHRU)
1819 break;
1820 if (e)
1822 rtx insn;
1824 /* Figure out whether the place we are supposed to insert libcall
1825 is inside the last basic block or after it. In the other case
1826 we need to emit to edge. */
1827 gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
1828 for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
1830 if (insn == cfun->eh->sjlj_exit_after)
1832 if (LABEL_P (insn))
1833 insn = NEXT_INSN (insn);
1834 emit_insn_after (seq, insn);
1835 return;
1837 if (insn == BB_END (e->src))
1838 break;
1840 insert_insn_on_edge (seq, e);
1844 static void
1845 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1847 int i, first_reachable;
1848 rtx mem, dispatch, seq, fc;
1849 rtx before;
1850 basic_block bb;
1851 edge e;
1853 fc = cfun->eh->sjlj_fc;
1855 start_sequence ();
1857 emit_label (dispatch_label);
1859 #ifndef DONT_USE_BUILTIN_SETJMP
1860 expand_builtin_setjmp_receiver (dispatch_label);
1861 #endif
1863 /* Load up dispatch index, exc_ptr and filter values from the
1864 function context. */
1865 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
1866 sjlj_fc_call_site_ofs);
1867 dispatch = copy_to_reg (mem);
1869 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
1870 if (word_mode != ptr_mode)
1872 #ifdef POINTERS_EXTEND_UNSIGNED
1873 mem = convert_memory_address (ptr_mode, mem);
1874 #else
1875 mem = convert_to_mode (ptr_mode, mem, 0);
1876 #endif
1878 emit_move_insn (cfun->eh->exc_ptr, mem);
1880 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
1881 emit_move_insn (cfun->eh->filter, mem);
1883 /* Jump to one of the directly reachable regions. */
1884 /* ??? This really ought to be using a switch statement. */
1886 first_reachable = 0;
1887 for (i = cfun->eh->last_region_number; i > 0; --i)
1889 if (! lp_info[i].directly_reachable)
1890 continue;
1892 if (! first_reachable)
1894 first_reachable = i;
1895 continue;
1898 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
1899 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
1900 cfun->eh->region_array[i]->post_landing_pad);
1903 seq = get_insns ();
1904 end_sequence ();
1906 before = cfun->eh->region_array[first_reachable]->post_landing_pad;
1908 bb = emit_to_new_bb_before (seq, before);
1909 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1910 e->count = bb->count;
1911 e->probability = REG_BR_PROB_BASE;
1914 static void
1915 sjlj_build_landing_pads (void)
1917 struct sjlj_lp_info *lp_info;
1919 lp_info = xcalloc (cfun->eh->last_region_number + 1,
1920 sizeof (struct sjlj_lp_info));
1922 if (sjlj_find_directly_reachable_regions (lp_info))
1924 rtx dispatch_label = gen_label_rtx ();
1926 cfun->eh->sjlj_fc
1927 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1928 int_size_in_bytes (sjlj_fc_type_node),
1929 TYPE_ALIGN (sjlj_fc_type_node));
1931 sjlj_assign_call_site_values (dispatch_label, lp_info);
1932 sjlj_mark_call_sites (lp_info);
1934 sjlj_emit_function_enter (dispatch_label);
1935 sjlj_emit_dispatch_table (dispatch_label, lp_info);
1936 sjlj_emit_function_exit ();
1939 free (lp_info);
1942 void
1943 finish_eh_generation (void)
1945 basic_block bb;
1947 /* Nothing to do if no regions created. */
1948 if (cfun->eh->region_tree == NULL)
1949 return;
1951 /* The object here is to provide find_basic_blocks with detailed
1952 information (via reachable_handlers) on how exception control
1953 flows within the function. In this first pass, we can include
1954 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
1955 regions, and hope that it will be useful in deleting unreachable
1956 handlers. Subsequently, we will generate landing pads which will
1957 connect many of the handlers, and then type information will not
1958 be effective. Still, this is a win over previous implementations. */
1960 /* These registers are used by the landing pads. Make sure they
1961 have been generated. */
1962 get_exception_pointer (cfun);
1963 get_exception_filter (cfun);
1965 /* Construct the landing pads. */
1967 assign_filter_values ();
1968 build_post_landing_pads ();
1969 connect_post_landing_pads ();
1970 if (USING_SJLJ_EXCEPTIONS)
1971 sjlj_build_landing_pads ();
1972 else
1973 dw2_build_landing_pads ();
1975 cfun->eh->built_landing_pads = 1;
1977 /* We've totally changed the CFG. Start over. */
1978 find_exception_handler_labels ();
1979 break_superblocks ();
1980 if (USING_SJLJ_EXCEPTIONS)
1981 commit_edge_insertions ();
1982 FOR_EACH_BB (bb)
1984 edge e;
1985 edge_iterator ei;
1986 bool eh = false;
1987 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1989 if (e->flags & EDGE_EH)
1991 remove_edge (e);
1992 eh = true;
1994 else
1995 ei_next (&ei);
1997 if (eh)
1998 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2002 static hashval_t
2003 ehl_hash (const void *pentry)
2005 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2007 /* 2^32 * ((sqrt(5) - 1) / 2) */
2008 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2009 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2012 static int
2013 ehl_eq (const void *pentry, const void *pdata)
2015 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2016 struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2018 return entry->label == data->label;
2021 /* This section handles removing dead code for flow. */
2023 /* Remove LABEL from exception_handler_label_map. */
2025 static void
2026 remove_exception_handler_label (rtx label)
2028 struct ehl_map_entry **slot, tmp;
2030 /* If exception_handler_label_map was not built yet,
2031 there is nothing to do. */
2032 if (cfun->eh->exception_handler_label_map == NULL)
2033 return;
2035 tmp.label = label;
2036 slot = (struct ehl_map_entry **)
2037 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2038 gcc_assert (slot);
2040 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2043 /* Splice REGION from the region tree etc. */
2045 static void
2046 remove_eh_handler (struct eh_region *region)
2048 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2049 rtx lab;
2051 /* For the benefit of efficiently handling REG_EH_REGION notes,
2052 replace this region in the region array with its containing
2053 region. Note that previous region deletions may result in
2054 multiple copies of this region in the array, so we have a
2055 list of alternate numbers by which we are known. */
2057 outer = region->outer;
2058 cfun->eh->region_array[region->region_number] = outer;
2059 if (region->aka)
2061 unsigned i;
2062 bitmap_iterator bi;
2064 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2066 cfun->eh->region_array[i] = outer;
2070 if (outer)
2072 if (!outer->aka)
2073 outer->aka = BITMAP_GGC_ALLOC ();
2074 if (region->aka)
2075 bitmap_ior_into (outer->aka, region->aka);
2076 bitmap_set_bit (outer->aka, region->region_number);
2079 if (cfun->eh->built_landing_pads)
2080 lab = region->landing_pad;
2081 else
2082 lab = region->label;
2083 if (lab)
2084 remove_exception_handler_label (lab);
2086 if (outer)
2087 pp_start = &outer->inner;
2088 else
2089 pp_start = &cfun->eh->region_tree;
2090 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2091 continue;
2092 *pp = region->next_peer;
2094 inner = region->inner;
2095 if (inner)
2097 for (p = inner; p->next_peer ; p = p->next_peer)
2098 p->outer = outer;
2099 p->outer = outer;
2101 p->next_peer = *pp_start;
2102 *pp_start = inner;
2105 if (region->type == ERT_CATCH)
2107 struct eh_region *try, *next, *prev;
2109 for (try = region->next_peer;
2110 try->type == ERT_CATCH;
2111 try = try->next_peer)
2112 continue;
2113 gcc_assert (try->type == ERT_TRY);
2115 next = region->u.catch.next_catch;
2116 prev = region->u.catch.prev_catch;
2118 if (next)
2119 next->u.catch.prev_catch = prev;
2120 else
2121 try->u.try.last_catch = prev;
2122 if (prev)
2123 prev->u.catch.next_catch = next;
2124 else
2126 try->u.try.catch = next;
2127 if (! next)
2128 remove_eh_handler (try);
2133 /* LABEL heads a basic block that is about to be deleted. If this
2134 label corresponds to an exception region, we may be able to
2135 delete the region. */
2137 void
2138 maybe_remove_eh_handler (rtx label)
2140 struct ehl_map_entry **slot, tmp;
2141 struct eh_region *region;
2143 /* ??? After generating landing pads, it's not so simple to determine
2144 if the region data is completely unused. One must examine the
2145 landing pad and the post landing pad, and whether an inner try block
2146 is referencing the catch handlers directly. */
2147 if (cfun->eh->built_landing_pads)
2148 return;
2150 tmp.label = label;
2151 slot = (struct ehl_map_entry **)
2152 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2153 if (! slot)
2154 return;
2155 region = (*slot)->region;
2156 if (! region)
2157 return;
2159 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2160 because there is no path to the fallback call to terminate.
2161 But the region continues to affect call-site data until there
2162 are no more contained calls, which we don't see here. */
2163 if (region->type == ERT_MUST_NOT_THROW)
2165 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2166 region->label = NULL_RTX;
2168 else
2169 remove_eh_handler (region);
2172 /* Invokes CALLBACK for every exception handler label. Only used by old
2173 loop hackery; should not be used by new code. */
2175 void
2176 for_each_eh_label (void (*callback) (rtx))
2178 htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
2179 (void *) &callback);
2182 static int
2183 for_each_eh_label_1 (void **pentry, void *data)
2185 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2186 void (*callback) (rtx) = *(void (**) (rtx)) data;
2188 (*callback) (entry->label);
2189 return 1;
2192 /* Invoke CALLBACK for every exception region in the current function. */
2194 void
2195 for_each_eh_region (void (*callback) (struct eh_region *))
2197 int i, n = cfun->eh->last_region_number;
2198 for (i = 1; i <= n; ++i)
2200 struct eh_region *region = cfun->eh->region_array[i];
2201 if (region)
2202 (*callback) (region);
2206 /* This section describes CFG exception edges for flow. */
2208 /* For communicating between calls to reachable_next_level. */
2209 struct reachable_info
2211 tree types_caught;
2212 tree types_allowed;
2213 void (*callback) (struct eh_region *, void *);
2214 void *callback_data;
2215 bool saw_any_handlers;
2218 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2219 base class of TYPE, is in HANDLED. */
2221 static int
2222 check_handled (tree handled, tree type)
2224 tree t;
2226 /* We can check for exact matches without front-end help. */
2227 if (! lang_eh_type_covers)
2229 for (t = handled; t ; t = TREE_CHAIN (t))
2230 if (TREE_VALUE (t) == type)
2231 return 1;
2233 else
2235 for (t = handled; t ; t = TREE_CHAIN (t))
2236 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2237 return 1;
2240 return 0;
2243 /* A subroutine of reachable_next_level. If we are collecting a list
2244 of handlers, add one. After landing pad generation, reference
2245 it instead of the handlers themselves. Further, the handlers are
2246 all wired together, so by referencing one, we've got them all.
2247 Before landing pad generation we reference each handler individually.
2249 LP_REGION contains the landing pad; REGION is the handler. */
2251 static void
2252 add_reachable_handler (struct reachable_info *info,
2253 struct eh_region *lp_region, struct eh_region *region)
2255 if (! info)
2256 return;
2258 info->saw_any_handlers = true;
2260 if (cfun->eh->built_landing_pads)
2261 info->callback (lp_region, info->callback_data);
2262 else
2263 info->callback (region, info->callback_data);
2266 /* Process one level of exception regions for reachability.
2267 If TYPE_THROWN is non-null, then it is the *exact* type being
2268 propagated. If INFO is non-null, then collect handler labels
2269 and caught/allowed type information between invocations. */
2271 static enum reachable_code
2272 reachable_next_level (struct eh_region *region, tree type_thrown,
2273 struct reachable_info *info)
2275 switch (region->type)
2277 case ERT_CLEANUP:
2278 /* Before landing-pad generation, we model control flow
2279 directly to the individual handlers. In this way we can
2280 see that catch handler types may shadow one another. */
2281 add_reachable_handler (info, region, region);
2282 return RNL_MAYBE_CAUGHT;
2284 case ERT_TRY:
2286 struct eh_region *c;
2287 enum reachable_code ret = RNL_NOT_CAUGHT;
2289 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2291 /* A catch-all handler ends the search. */
2292 if (c->u.catch.type_list == NULL)
2294 add_reachable_handler (info, region, c);
2295 return RNL_CAUGHT;
2298 if (type_thrown)
2300 /* If we have at least one type match, end the search. */
2301 tree tp_node = c->u.catch.type_list;
2303 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2305 tree type = TREE_VALUE (tp_node);
2307 if (type == type_thrown
2308 || (lang_eh_type_covers
2309 && (*lang_eh_type_covers) (type, type_thrown)))
2311 add_reachable_handler (info, region, c);
2312 return RNL_CAUGHT;
2316 /* If we have definitive information of a match failure,
2317 the catch won't trigger. */
2318 if (lang_eh_type_covers)
2319 return RNL_NOT_CAUGHT;
2322 /* At this point, we either don't know what type is thrown or
2323 don't have front-end assistance to help deciding if it is
2324 covered by one of the types in the list for this region.
2326 We'd then like to add this region to the list of reachable
2327 handlers since it is indeed potentially reachable based on the
2328 information we have.
2330 Actually, this handler is for sure not reachable if all the
2331 types it matches have already been caught. That is, it is only
2332 potentially reachable if at least one of the types it catches
2333 has not been previously caught. */
2335 if (! info)
2336 ret = RNL_MAYBE_CAUGHT;
2337 else
2339 tree tp_node = c->u.catch.type_list;
2340 bool maybe_reachable = false;
2342 /* Compute the potential reachability of this handler and
2343 update the list of types caught at the same time. */
2344 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2346 tree type = TREE_VALUE (tp_node);
2348 if (! check_handled (info->types_caught, type))
2350 info->types_caught
2351 = tree_cons (NULL, type, info->types_caught);
2353 maybe_reachable = true;
2357 if (maybe_reachable)
2359 add_reachable_handler (info, region, c);
2361 /* ??? If the catch type is a base class of every allowed
2362 type, then we know we can stop the search. */
2363 ret = RNL_MAYBE_CAUGHT;
2368 return ret;
2371 case ERT_ALLOWED_EXCEPTIONS:
2372 /* An empty list of types definitely ends the search. */
2373 if (region->u.allowed.type_list == NULL_TREE)
2375 add_reachable_handler (info, region, region);
2376 return RNL_CAUGHT;
2379 /* Collect a list of lists of allowed types for use in detecting
2380 when a catch may be transformed into a catch-all. */
2381 if (info)
2382 info->types_allowed = tree_cons (NULL_TREE,
2383 region->u.allowed.type_list,
2384 info->types_allowed);
2386 /* If we have definitive information about the type hierarchy,
2387 then we can tell if the thrown type will pass through the
2388 filter. */
2389 if (type_thrown && lang_eh_type_covers)
2391 if (check_handled (region->u.allowed.type_list, type_thrown))
2392 return RNL_NOT_CAUGHT;
2393 else
2395 add_reachable_handler (info, region, region);
2396 return RNL_CAUGHT;
2400 add_reachable_handler (info, region, region);
2401 return RNL_MAYBE_CAUGHT;
2403 case ERT_CATCH:
2404 /* Catch regions are handled by their controlling try region. */
2405 return RNL_NOT_CAUGHT;
2407 case ERT_MUST_NOT_THROW:
2408 /* Here we end our search, since no exceptions may propagate.
2409 If we've touched down at some landing pad previous, then the
2410 explicit function call we generated may be used. Otherwise
2411 the call is made by the runtime.
2413 Before inlining, do not perform this optimization. We may
2414 inline a subroutine that contains handlers, and that will
2415 change the value of saw_any_handlers. */
2417 if ((info && info->saw_any_handlers) || !cfun->after_inlining)
2419 add_reachable_handler (info, region, region);
2420 return RNL_CAUGHT;
2422 else
2423 return RNL_BLOCKED;
2425 case ERT_THROW:
2426 case ERT_UNKNOWN:
2427 /* Shouldn't see these here. */
2428 gcc_unreachable ();
2429 break;
2430 default:
2431 gcc_unreachable ();
2435 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2437 void
2438 foreach_reachable_handler (int region_number, bool is_resx,
2439 void (*callback) (struct eh_region *, void *),
2440 void *callback_data)
2442 struct reachable_info info;
2443 struct eh_region *region;
2444 tree type_thrown;
2446 memset (&info, 0, sizeof (info));
2447 info.callback = callback;
2448 info.callback_data = callback_data;
2450 region = cfun->eh->region_array[region_number];
2452 type_thrown = NULL_TREE;
2453 if (is_resx)
2455 /* A RESX leaves a region instead of entering it. Thus the
2456 region itself may have been deleted out from under us. */
2457 if (region == NULL)
2458 return;
2459 region = region->outer;
2461 else if (region->type == ERT_THROW)
2463 type_thrown = region->u.throw.type;
2464 region = region->outer;
2467 while (region)
2469 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2470 break;
2471 /* If we have processed one cleanup, there is no point in
2472 processing any more of them. Each cleanup will have an edge
2473 to the next outer cleanup region, so the flow graph will be
2474 accurate. */
2475 if (region->type == ERT_CLEANUP)
2476 region = region->u.cleanup.prev_try;
2477 else
2478 region = region->outer;
2482 /* Retrieve a list of labels of exception handlers which can be
2483 reached by a given insn. */
2485 static void
2486 arh_to_landing_pad (struct eh_region *region, void *data)
2488 rtx *p_handlers = data;
2489 if (! *p_handlers)
2490 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2493 static void
2494 arh_to_label (struct eh_region *region, void *data)
2496 rtx *p_handlers = data;
2497 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2501 reachable_handlers (rtx insn)
2503 bool is_resx = false;
2504 rtx handlers = NULL;
2505 int region_number;
2507 if (JUMP_P (insn)
2508 && GET_CODE (PATTERN (insn)) == RESX)
2510 region_number = XINT (PATTERN (insn), 0);
2511 is_resx = true;
2513 else
2515 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2516 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2517 return NULL;
2518 region_number = INTVAL (XEXP (note, 0));
2521 foreach_reachable_handler (region_number, is_resx,
2522 (cfun->eh->built_landing_pads
2523 ? arh_to_landing_pad
2524 : arh_to_label),
2525 &handlers);
2527 return handlers;
2530 /* Determine if the given INSN can throw an exception that is caught
2531 within the function. */
2533 bool
2534 can_throw_internal_1 (int region_number, bool is_resx)
2536 struct eh_region *region;
2537 tree type_thrown;
2539 region = cfun->eh->region_array[region_number];
2541 type_thrown = NULL_TREE;
2542 if (is_resx)
2543 region = region->outer;
2544 else if (region->type == ERT_THROW)
2546 type_thrown = region->u.throw.type;
2547 region = region->outer;
2550 /* If this exception is ignored by each and every containing region,
2551 then control passes straight out. The runtime may handle some
2552 regions, which also do not require processing internally. */
2553 for (; region; region = region->outer)
2555 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2556 if (how == RNL_BLOCKED)
2557 return false;
2558 if (how != RNL_NOT_CAUGHT)
2559 return true;
2562 return false;
2565 bool
2566 can_throw_internal (rtx insn)
2568 rtx note;
2570 if (! INSN_P (insn))
2571 return false;
2573 if (JUMP_P (insn)
2574 && GET_CODE (PATTERN (insn)) == RESX
2575 && XINT (PATTERN (insn), 0) > 0)
2576 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true);
2578 if (NONJUMP_INSN_P (insn)
2579 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2580 insn = XVECEXP (PATTERN (insn), 0, 0);
2582 /* Every insn that might throw has an EH_REGION note. */
2583 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2584 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2585 return false;
2587 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false);
2590 /* Determine if the given INSN can throw an exception that is
2591 visible outside the function. */
2593 bool
2594 can_throw_external_1 (int region_number, bool is_resx)
2596 struct eh_region *region;
2597 tree type_thrown;
2599 region = cfun->eh->region_array[region_number];
2601 type_thrown = NULL_TREE;
2602 if (is_resx)
2603 region = region->outer;
2604 else if (region->type == ERT_THROW)
2606 type_thrown = region->u.throw.type;
2607 region = region->outer;
2610 /* If the exception is caught or blocked by any containing region,
2611 then it is not seen by any calling function. */
2612 for (; region ; region = region->outer)
2613 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2614 return false;
2616 return true;
2619 bool
2620 can_throw_external (rtx insn)
2622 rtx note;
2624 if (! INSN_P (insn))
2625 return false;
2627 if (JUMP_P (insn)
2628 && GET_CODE (PATTERN (insn)) == RESX
2629 && XINT (PATTERN (insn), 0) > 0)
2630 return can_throw_external_1 (XINT (PATTERN (insn), 0), true);
2632 if (NONJUMP_INSN_P (insn)
2633 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2634 insn = XVECEXP (PATTERN (insn), 0, 0);
2636 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2637 if (!note)
2639 /* Calls (and trapping insns) without notes are outside any
2640 exception handling region in this function. We have to
2641 assume it might throw. Given that the front end and middle
2642 ends mark known NOTHROW functions, this isn't so wildly
2643 inaccurate. */
2644 return (CALL_P (insn)
2645 || (flag_non_call_exceptions
2646 && may_trap_p (PATTERN (insn))));
2648 if (INTVAL (XEXP (note, 0)) <= 0)
2649 return false;
2651 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false);
2654 /* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls. */
2656 void
2657 set_nothrow_function_flags (void)
2659 rtx insn;
2661 TREE_NOTHROW (current_function_decl) = 1;
2663 /* Assume cfun->all_throwers_are_sibcalls until we encounter
2664 something that can throw an exception. We specifically exempt
2665 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2666 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2667 is optimistic. */
2669 cfun->all_throwers_are_sibcalls = 1;
2671 if (! flag_exceptions)
2672 return;
2674 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2675 if (can_throw_external (insn))
2677 TREE_NOTHROW (current_function_decl) = 0;
2679 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2681 cfun->all_throwers_are_sibcalls = 0;
2682 return;
2686 for (insn = current_function_epilogue_delay_list; insn;
2687 insn = XEXP (insn, 1))
2688 if (can_throw_external (insn))
2690 TREE_NOTHROW (current_function_decl) = 0;
2692 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2694 cfun->all_throwers_are_sibcalls = 0;
2695 return;
2701 /* Various hooks for unwind library. */
2703 /* Do any necessary initialization to access arbitrary stack frames.
2704 On the SPARC, this means flushing the register windows. */
2706 void
2707 expand_builtin_unwind_init (void)
2709 /* Set this so all the registers get saved in our frame; we need to be
2710 able to copy the saved values for any registers from frames we unwind. */
2711 current_function_has_nonlocal_label = 1;
2713 #ifdef SETUP_FRAME_ADDRESSES
2714 SETUP_FRAME_ADDRESSES ();
2715 #endif
2719 expand_builtin_eh_return_data_regno (tree arglist)
2721 tree which = TREE_VALUE (arglist);
2722 unsigned HOST_WIDE_INT iwhich;
2724 if (TREE_CODE (which) != INTEGER_CST)
2726 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2727 return constm1_rtx;
2730 iwhich = tree_low_cst (which, 1);
2731 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2732 if (iwhich == INVALID_REGNUM)
2733 return constm1_rtx;
2735 #ifdef DWARF_FRAME_REGNUM
2736 iwhich = DWARF_FRAME_REGNUM (iwhich);
2737 #else
2738 iwhich = DBX_REGISTER_NUMBER (iwhich);
2739 #endif
2741 return GEN_INT (iwhich);
2744 /* Given a value extracted from the return address register or stack slot,
2745 return the actual address encoded in that value. */
2748 expand_builtin_extract_return_addr (tree addr_tree)
2750 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2752 if (GET_MODE (addr) != Pmode
2753 && GET_MODE (addr) != VOIDmode)
2755 #ifdef POINTERS_EXTEND_UNSIGNED
2756 addr = convert_memory_address (Pmode, addr);
2757 #else
2758 addr = convert_to_mode (Pmode, addr, 0);
2759 #endif
2762 /* First mask out any unwanted bits. */
2763 #ifdef MASK_RETURN_ADDR
2764 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2765 #endif
2767 /* Then adjust to find the real return address. */
2768 #if defined (RETURN_ADDR_OFFSET)
2769 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2770 #endif
2772 return addr;
2775 /* Given an actual address in addr_tree, do any necessary encoding
2776 and return the value to be stored in the return address register or
2777 stack slot so the epilogue will return to that address. */
2780 expand_builtin_frob_return_addr (tree addr_tree)
2782 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2784 addr = convert_memory_address (Pmode, addr);
2786 #ifdef RETURN_ADDR_OFFSET
2787 addr = force_reg (Pmode, addr);
2788 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2789 #endif
2791 return addr;
2794 /* Set up the epilogue with the magic bits we'll need to return to the
2795 exception handler. */
2797 void
2798 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2799 tree handler_tree)
2801 rtx tmp;
2803 #ifdef EH_RETURN_STACKADJ_RTX
2804 tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2805 tmp = convert_memory_address (Pmode, tmp);
2806 if (!cfun->eh->ehr_stackadj)
2807 cfun->eh->ehr_stackadj = copy_to_reg (tmp);
2808 else if (tmp != cfun->eh->ehr_stackadj)
2809 emit_move_insn (cfun->eh->ehr_stackadj, tmp);
2810 #endif
2812 tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
2813 tmp = convert_memory_address (Pmode, tmp);
2814 if (!cfun->eh->ehr_handler)
2815 cfun->eh->ehr_handler = copy_to_reg (tmp);
2816 else if (tmp != cfun->eh->ehr_handler)
2817 emit_move_insn (cfun->eh->ehr_handler, tmp);
2819 if (!cfun->eh->ehr_label)
2820 cfun->eh->ehr_label = gen_label_rtx ();
2821 emit_jump (cfun->eh->ehr_label);
2824 void
2825 expand_eh_return (void)
2827 rtx around_label;
2829 if (! cfun->eh->ehr_label)
2830 return;
2832 current_function_calls_eh_return = 1;
2834 #ifdef EH_RETURN_STACKADJ_RTX
2835 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2836 #endif
2838 around_label = gen_label_rtx ();
2839 emit_jump (around_label);
2841 emit_label (cfun->eh->ehr_label);
2842 clobber_return_register ();
2844 #ifdef EH_RETURN_STACKADJ_RTX
2845 emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
2846 #endif
2848 #ifdef HAVE_eh_return
2849 if (HAVE_eh_return)
2850 emit_insn (gen_eh_return (cfun->eh->ehr_handler));
2851 else
2852 #endif
2854 #ifdef EH_RETURN_HANDLER_RTX
2855 emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
2856 #else
2857 error ("__builtin_eh_return not supported on this target");
2858 #endif
2861 emit_label (around_label);
2864 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2865 POINTERS_EXTEND_UNSIGNED and return it. */
2868 expand_builtin_extend_pointer (tree addr_tree)
2870 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2871 int extend;
2873 #ifdef POINTERS_EXTEND_UNSIGNED
2874 extend = POINTERS_EXTEND_UNSIGNED;
2875 #else
2876 /* The previous EH code did an unsigned extend by default, so we do this also
2877 for consistency. */
2878 extend = 1;
2879 #endif
2881 return convert_modes (word_mode, ptr_mode, addr, extend);
2884 /* In the following functions, we represent entries in the action table
2885 as 1-based indices. Special cases are:
2887 0: null action record, non-null landing pad; implies cleanups
2888 -1: null action record, null landing pad; implies no action
2889 -2: no call-site entry; implies must_not_throw
2890 -3: we have yet to process outer regions
2892 Further, no special cases apply to the "next" field of the record.
2893 For next, 0 means end of list. */
2895 struct action_record
2897 int offset;
2898 int filter;
2899 int next;
2902 static int
2903 action_record_eq (const void *pentry, const void *pdata)
2905 const struct action_record *entry = (const struct action_record *) pentry;
2906 const struct action_record *data = (const struct action_record *) pdata;
2907 return entry->filter == data->filter && entry->next == data->next;
2910 static hashval_t
2911 action_record_hash (const void *pentry)
2913 const struct action_record *entry = (const struct action_record *) pentry;
2914 return entry->next * 1009 + entry->filter;
2917 static int
2918 add_action_record (htab_t ar_hash, int filter, int next)
2920 struct action_record **slot, *new, tmp;
2922 tmp.filter = filter;
2923 tmp.next = next;
2924 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
2926 if ((new = *slot) == NULL)
2928 new = xmalloc (sizeof (*new));
2929 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
2930 new->filter = filter;
2931 new->next = next;
2932 *slot = new;
2934 /* The filter value goes in untouched. The link to the next
2935 record is a "self-relative" byte offset, or zero to indicate
2936 that there is no next record. So convert the absolute 1 based
2937 indices we've been carrying around into a displacement. */
2939 push_sleb128 (&cfun->eh->action_record_data, filter);
2940 if (next)
2941 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
2942 push_sleb128 (&cfun->eh->action_record_data, next);
2945 return new->offset;
2948 static int
2949 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
2951 struct eh_region *c;
2952 int next;
2954 /* If we've reached the top of the region chain, then we have
2955 no actions, and require no landing pad. */
2956 if (region == NULL)
2957 return -1;
2959 switch (region->type)
2961 case ERT_CLEANUP:
2962 /* A cleanup adds a zero filter to the beginning of the chain, but
2963 there are special cases to look out for. If there are *only*
2964 cleanups along a path, then it compresses to a zero action.
2965 Further, if there are multiple cleanups along a path, we only
2966 need to represent one of them, as that is enough to trigger
2967 entry to the landing pad at runtime. */
2968 next = collect_one_action_chain (ar_hash, region->outer);
2969 if (next <= 0)
2970 return 0;
2971 for (c = region->outer; c ; c = c->outer)
2972 if (c->type == ERT_CLEANUP)
2973 return next;
2974 return add_action_record (ar_hash, 0, next);
2976 case ERT_TRY:
2977 /* Process the associated catch regions in reverse order.
2978 If there's a catch-all handler, then we don't need to
2979 search outer regions. Use a magic -3 value to record
2980 that we haven't done the outer search. */
2981 next = -3;
2982 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
2984 if (c->u.catch.type_list == NULL)
2986 /* Retrieve the filter from the head of the filter list
2987 where we have stored it (see assign_filter_values). */
2988 int filter
2989 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
2991 next = add_action_record (ar_hash, filter, 0);
2993 else
2995 /* Once the outer search is done, trigger an action record for
2996 each filter we have. */
2997 tree flt_node;
2999 if (next == -3)
3001 next = collect_one_action_chain (ar_hash, region->outer);
3003 /* If there is no next action, terminate the chain. */
3004 if (next == -1)
3005 next = 0;
3006 /* If all outer actions are cleanups or must_not_throw,
3007 we'll have no action record for it, since we had wanted
3008 to encode these states in the call-site record directly.
3009 Add a cleanup action to the chain to catch these. */
3010 else if (next <= 0)
3011 next = add_action_record (ar_hash, 0, 0);
3014 flt_node = c->u.catch.filter_list;
3015 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3017 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3018 next = add_action_record (ar_hash, filter, next);
3022 return next;
3024 case ERT_ALLOWED_EXCEPTIONS:
3025 /* An exception specification adds its filter to the
3026 beginning of the chain. */
3027 next = collect_one_action_chain (ar_hash, region->outer);
3029 /* If there is no next action, terminate the chain. */
3030 if (next == -1)
3031 next = 0;
3032 /* If all outer actions are cleanups or must_not_throw,
3033 we'll have no action record for it, since we had wanted
3034 to encode these states in the call-site record directly.
3035 Add a cleanup action to the chain to catch these. */
3036 else if (next <= 0)
3037 next = add_action_record (ar_hash, 0, 0);
3039 return add_action_record (ar_hash, region->u.allowed.filter, next);
3041 case ERT_MUST_NOT_THROW:
3042 /* A must-not-throw region with no inner handlers or cleanups
3043 requires no call-site entry. Note that this differs from
3044 the no handler or cleanup case in that we do require an lsda
3045 to be generated. Return a magic -2 value to record this. */
3046 return -2;
3048 case ERT_CATCH:
3049 case ERT_THROW:
3050 /* CATCH regions are handled in TRY above. THROW regions are
3051 for optimization information only and produce no output. */
3052 return collect_one_action_chain (ar_hash, region->outer);
3054 default:
3055 gcc_unreachable ();
3059 static int
3060 add_call_site (rtx landing_pad, int action)
3062 struct call_site_record *data = cfun->eh->call_site_data;
3063 int used = cfun->eh->call_site_data_used;
3064 int size = cfun->eh->call_site_data_size;
3066 if (used >= size)
3068 size = (size ? size * 2 : 64);
3069 data = ggc_realloc (data, sizeof (*data) * size);
3070 cfun->eh->call_site_data = data;
3071 cfun->eh->call_site_data_size = size;
3074 data[used].landing_pad = landing_pad;
3075 data[used].action = action;
3077 cfun->eh->call_site_data_used = used + 1;
3079 return used + call_site_base;
3082 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3083 The new note numbers will not refer to region numbers, but
3084 instead to call site entries. */
3086 void
3087 convert_to_eh_region_ranges (void)
3089 rtx insn, iter, note;
3090 htab_t ar_hash;
3091 int last_action = -3;
3092 rtx last_action_insn = NULL_RTX;
3093 rtx last_landing_pad = NULL_RTX;
3094 rtx first_no_action_insn = NULL_RTX;
3095 int call_site = 0;
3097 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3098 return;
3100 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3102 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3104 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3105 if (INSN_P (iter))
3107 struct eh_region *region;
3108 int this_action;
3109 rtx this_landing_pad;
3111 insn = iter;
3112 if (NONJUMP_INSN_P (insn)
3113 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3114 insn = XVECEXP (PATTERN (insn), 0, 0);
3116 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3117 if (!note)
3119 if (! (CALL_P (insn)
3120 || (flag_non_call_exceptions
3121 && may_trap_p (PATTERN (insn)))))
3122 continue;
3123 this_action = -1;
3124 region = NULL;
3126 else
3128 if (INTVAL (XEXP (note, 0)) <= 0)
3129 continue;
3130 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3131 this_action = collect_one_action_chain (ar_hash, region);
3134 /* Existence of catch handlers, or must-not-throw regions
3135 implies that an lsda is needed (even if empty). */
3136 if (this_action != -1)
3137 cfun->uses_eh_lsda = 1;
3139 /* Delay creation of region notes for no-action regions
3140 until we're sure that an lsda will be required. */
3141 else if (last_action == -3)
3143 first_no_action_insn = iter;
3144 last_action = -1;
3147 /* Cleanups and handlers may share action chains but not
3148 landing pads. Collect the landing pad for this region. */
3149 if (this_action >= 0)
3151 struct eh_region *o;
3152 for (o = region; ! o->landing_pad ; o = o->outer)
3153 continue;
3154 this_landing_pad = o->landing_pad;
3156 else
3157 this_landing_pad = NULL_RTX;
3159 /* Differing actions or landing pads implies a change in call-site
3160 info, which implies some EH_REGION note should be emitted. */
3161 if (last_action != this_action
3162 || last_landing_pad != this_landing_pad)
3164 /* If we'd not seen a previous action (-3) or the previous
3165 action was must-not-throw (-2), then we do not need an
3166 end note. */
3167 if (last_action >= -1)
3169 /* If we delayed the creation of the begin, do it now. */
3170 if (first_no_action_insn)
3172 call_site = add_call_site (NULL_RTX, 0);
3173 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3174 first_no_action_insn);
3175 NOTE_EH_HANDLER (note) = call_site;
3176 first_no_action_insn = NULL_RTX;
3179 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3180 last_action_insn);
3181 NOTE_EH_HANDLER (note) = call_site;
3184 /* If the new action is must-not-throw, then no region notes
3185 are created. */
3186 if (this_action >= -1)
3188 call_site = add_call_site (this_landing_pad,
3189 this_action < 0 ? 0 : this_action);
3190 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3191 NOTE_EH_HANDLER (note) = call_site;
3194 last_action = this_action;
3195 last_landing_pad = this_landing_pad;
3197 last_action_insn = iter;
3200 if (last_action >= -1 && ! first_no_action_insn)
3202 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3203 NOTE_EH_HANDLER (note) = call_site;
3206 htab_delete (ar_hash);
3210 static void
3211 push_uleb128 (varray_type *data_area, unsigned int value)
3215 unsigned char byte = value & 0x7f;
3216 value >>= 7;
3217 if (value)
3218 byte |= 0x80;
3219 VARRAY_PUSH_UCHAR (*data_area, byte);
3221 while (value);
3224 static void
3225 push_sleb128 (varray_type *data_area, int value)
3227 unsigned char byte;
3228 int more;
3232 byte = value & 0x7f;
3233 value >>= 7;
3234 more = ! ((value == 0 && (byte & 0x40) == 0)
3235 || (value == -1 && (byte & 0x40) != 0));
3236 if (more)
3237 byte |= 0x80;
3238 VARRAY_PUSH_UCHAR (*data_area, byte);
3240 while (more);
3244 #ifndef HAVE_AS_LEB128
3245 static int
3246 dw2_size_of_call_site_table (void)
3248 int n = cfun->eh->call_site_data_used;
3249 int size = n * (4 + 4 + 4);
3250 int i;
3252 for (i = 0; i < n; ++i)
3254 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3255 size += size_of_uleb128 (cs->action);
3258 return size;
3261 static int
3262 sjlj_size_of_call_site_table (void)
3264 int n = cfun->eh->call_site_data_used;
3265 int size = 0;
3266 int i;
3268 for (i = 0; i < n; ++i)
3270 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3271 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3272 size += size_of_uleb128 (cs->action);
3275 return size;
3277 #endif
3279 static void
3280 dw2_output_call_site_table (void)
3282 int n = cfun->eh->call_site_data_used;
3283 int i;
3285 for (i = 0; i < n; ++i)
3287 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3288 char reg_start_lab[32];
3289 char reg_end_lab[32];
3290 char landing_pad_lab[32];
3292 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3293 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3295 if (cs->landing_pad)
3296 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3297 CODE_LABEL_NUMBER (cs->landing_pad));
3299 /* ??? Perhaps use insn length scaling if the assembler supports
3300 generic arithmetic. */
3301 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3302 data4 if the function is small enough. */
3303 #ifdef HAVE_AS_LEB128
3304 dw2_asm_output_delta_uleb128 (reg_start_lab,
3305 current_function_func_begin_label,
3306 "region %d start", i);
3307 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3308 "length");
3309 if (cs->landing_pad)
3310 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3311 current_function_func_begin_label,
3312 "landing pad");
3313 else
3314 dw2_asm_output_data_uleb128 (0, "landing pad");
3315 #else
3316 dw2_asm_output_delta (4, reg_start_lab,
3317 current_function_func_begin_label,
3318 "region %d start", i);
3319 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3320 if (cs->landing_pad)
3321 dw2_asm_output_delta (4, landing_pad_lab,
3322 current_function_func_begin_label,
3323 "landing pad");
3324 else
3325 dw2_asm_output_data (4, 0, "landing pad");
3326 #endif
3327 dw2_asm_output_data_uleb128 (cs->action, "action");
3330 call_site_base += n;
3333 static void
3334 sjlj_output_call_site_table (void)
3336 int n = cfun->eh->call_site_data_used;
3337 int i;
3339 for (i = 0; i < n; ++i)
3341 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3343 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3344 "region %d landing pad", i);
3345 dw2_asm_output_data_uleb128 (cs->action, "action");
3348 call_site_base += n;
3351 /* Tell assembler to switch to the section for the exception handling
3352 table. */
3354 void
3355 default_exception_section (void)
3357 if (targetm.have_named_sections)
3359 int flags;
3361 if (EH_TABLES_CAN_BE_READ_ONLY)
3363 int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3365 flags = (! flag_pic
3366 || ((tt_format & 0x70) != DW_EH_PE_absptr
3367 && (tt_format & 0x70) != DW_EH_PE_aligned))
3368 ? 0 : SECTION_WRITE;
3370 else
3371 flags = SECTION_WRITE;
3372 named_section_flags (".gcc_except_table", flags);
3374 else if (flag_pic)
3375 data_section ();
3376 else
3377 readonly_data_section ();
3380 void
3381 output_function_exception_table (void)
3383 int tt_format, cs_format, lp_format, i, n;
3384 #ifdef HAVE_AS_LEB128
3385 char ttype_label[32];
3386 char cs_after_size_label[32];
3387 char cs_end_label[32];
3388 #else
3389 int call_site_len;
3390 #endif
3391 int have_tt_data;
3392 int tt_format_size = 0;
3394 /* Not all functions need anything. */
3395 if (! cfun->uses_eh_lsda)
3396 return;
3398 #ifdef TARGET_UNWIND_INFO
3399 /* TODO: Move this into target file. */
3400 assemble_external_libcall (eh_personality_libfunc);
3401 fputs ("\t.personality\t", asm_out_file);
3402 output_addr_const (asm_out_file, eh_personality_libfunc);
3403 fputs ("\n\t.handlerdata\n", asm_out_file);
3404 /* Note that varasm still thinks we're in the function's code section.
3405 The ".endp" directive that will immediately follow will take us back. */
3406 #else
3407 targetm.asm_out.exception_section ();
3408 #endif
3410 have_tt_data = (VEC_length (tree, cfun->eh->ttype_data) > 0
3411 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3413 /* Indicate the format of the @TType entries. */
3414 if (! have_tt_data)
3415 tt_format = DW_EH_PE_omit;
3416 else
3418 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3419 #ifdef HAVE_AS_LEB128
3420 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3421 current_function_funcdef_no);
3422 #endif
3423 tt_format_size = size_of_encoded_value (tt_format);
3425 assemble_align (tt_format_size * BITS_PER_UNIT);
3428 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3429 current_function_funcdef_no);
3431 /* The LSDA header. */
3433 /* Indicate the format of the landing pad start pointer. An omitted
3434 field implies @LPStart == @Start. */
3435 /* Currently we always put @LPStart == @Start. This field would
3436 be most useful in moving the landing pads completely out of
3437 line to another section, but it could also be used to minimize
3438 the size of uleb128 landing pad offsets. */
3439 lp_format = DW_EH_PE_omit;
3440 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3441 eh_data_format_name (lp_format));
3443 /* @LPStart pointer would go here. */
3445 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3446 eh_data_format_name (tt_format));
3448 #ifndef HAVE_AS_LEB128
3449 if (USING_SJLJ_EXCEPTIONS)
3450 call_site_len = sjlj_size_of_call_site_table ();
3451 else
3452 call_site_len = dw2_size_of_call_site_table ();
3453 #endif
3455 /* A pc-relative 4-byte displacement to the @TType data. */
3456 if (have_tt_data)
3458 #ifdef HAVE_AS_LEB128
3459 char ttype_after_disp_label[32];
3460 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3461 current_function_funcdef_no);
3462 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3463 "@TType base offset");
3464 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3465 #else
3466 /* Ug. Alignment queers things. */
3467 unsigned int before_disp, after_disp, last_disp, disp;
3469 before_disp = 1 + 1;
3470 after_disp = (1 + size_of_uleb128 (call_site_len)
3471 + call_site_len
3472 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3473 + (VEC_length (tree, cfun->eh->ttype_data)
3474 * tt_format_size));
3476 disp = after_disp;
3479 unsigned int disp_size, pad;
3481 last_disp = disp;
3482 disp_size = size_of_uleb128 (disp);
3483 pad = before_disp + disp_size + after_disp;
3484 if (pad % tt_format_size)
3485 pad = tt_format_size - (pad % tt_format_size);
3486 else
3487 pad = 0;
3488 disp = after_disp + pad;
3490 while (disp != last_disp);
3492 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3493 #endif
3496 /* Indicate the format of the call-site offsets. */
3497 #ifdef HAVE_AS_LEB128
3498 cs_format = DW_EH_PE_uleb128;
3499 #else
3500 cs_format = DW_EH_PE_udata4;
3501 #endif
3502 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3503 eh_data_format_name (cs_format));
3505 #ifdef HAVE_AS_LEB128
3506 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3507 current_function_funcdef_no);
3508 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3509 current_function_funcdef_no);
3510 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3511 "Call-site table length");
3512 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3513 if (USING_SJLJ_EXCEPTIONS)
3514 sjlj_output_call_site_table ();
3515 else
3516 dw2_output_call_site_table ();
3517 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3518 #else
3519 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3520 if (USING_SJLJ_EXCEPTIONS)
3521 sjlj_output_call_site_table ();
3522 else
3523 dw2_output_call_site_table ();
3524 #endif
3526 /* ??? Decode and interpret the data for flag_debug_asm. */
3527 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3528 for (i = 0; i < n; ++i)
3529 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3530 (i ? NULL : "Action record table"));
3532 if (have_tt_data)
3533 assemble_align (tt_format_size * BITS_PER_UNIT);
3535 i = VEC_length (tree, cfun->eh->ttype_data);
3536 while (i-- > 0)
3538 tree type = VEC_index (tree, cfun->eh->ttype_data, i);
3539 rtx value;
3541 if (type == NULL_TREE)
3542 value = const0_rtx;
3543 else
3545 struct cgraph_varpool_node *node;
3547 type = lookup_type_for_runtime (type);
3548 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3550 /* Let cgraph know that the rtti decl is used. Not all of the
3551 paths below go through assemble_integer, which would take
3552 care of this for us. */
3553 STRIP_NOPS (type);
3554 if (TREE_CODE (type) == ADDR_EXPR)
3556 type = TREE_OPERAND (type, 0);
3557 if (TREE_CODE (type) == VAR_DECL)
3559 node = cgraph_varpool_node (type);
3560 if (node)
3561 cgraph_varpool_mark_needed_node (node);
3564 else
3565 gcc_assert (TREE_CODE (type) == INTEGER_CST);
3568 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3569 assemble_integer (value, tt_format_size,
3570 tt_format_size * BITS_PER_UNIT, 1);
3571 else
3572 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
3575 #ifdef HAVE_AS_LEB128
3576 if (have_tt_data)
3577 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3578 #endif
3580 /* ??? Decode and interpret the data for flag_debug_asm. */
3581 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3582 for (i = 0; i < n; ++i)
3583 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3584 (i ? NULL : "Exception specification table"));
3586 current_function_section (current_function_decl);
3589 void
3590 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3592 fun->eh->throw_stmt_table = table;
3595 htab_t
3596 get_eh_throw_stmt_table (struct function *fun)
3598 return fun->eh->throw_stmt_table;
3601 /* Dump EH information to OUT. */
3602 void
3603 dump_eh_tree (FILE *out, struct function *fun)
3605 struct eh_region *i;
3606 int depth = 0;
3607 static const char * const type_name[] = {"unknown", "cleanup", "try", "catch",
3608 "allowed_exceptions", "must_not_throw",
3609 "throw"};
3611 i = fun->eh->region_tree;
3612 if (! i)
3613 return;
3615 fprintf (out, "Eh tree:\n");
3616 while (1)
3618 fprintf (out, " %*s %i %s", depth * 2, "",
3619 i->region_number, type_name [(int)i->type]);
3620 if (i->tree_label)
3622 fprintf (out, " tree_label:");
3623 print_generic_expr (out, i->tree_label, 0);
3625 fprintf (out, "\n");
3626 /* If there are sub-regions, process them. */
3627 if (i->inner)
3628 i = i->inner, depth++;
3629 /* If there are peers, process them. */
3630 else if (i->next_peer)
3631 i = i->next_peer;
3632 /* Otherwise, step back up the tree to the next peer. */
3633 else
3635 do {
3636 i = i->outer;
3637 depth--;
3638 if (i == NULL)
3639 return;
3640 } while (i->next_peer == NULL);
3641 i = i->next_peer;
3646 /* Verify some basic invariants on EH datastructures. Could be extended to
3647 catch more. */
3648 void
3649 verify_eh_tree (struct function *fun)
3651 struct eh_region *i, *outer = NULL;
3652 bool err = false;
3653 int nvisited = 0;
3654 int count = 0;
3655 int j;
3656 int depth = 0;
3658 i = fun->eh->region_tree;
3659 if (! i)
3660 return;
3661 for (j = fun->eh->last_region_number; j > 0; --j)
3662 if (fun->eh->region_array[j])
3664 count++;
3665 if (fun->eh->region_array[j]->region_number != j)
3667 error ("region_array is corrupted for region %i", i->region_number);
3668 err = true;
3672 while (1)
3674 if (fun->eh->region_array[i->region_number] != i)
3676 error ("region_array is corrupted for region %i", i->region_number);
3677 err = true;
3679 if (i->outer != outer)
3681 error ("outer block of region %i is wrong", i->region_number);
3682 err = true;
3684 if (i->may_contain_throw && outer && !outer->may_contain_throw)
3686 error ("region %i may contain throw and is contained in region that may not",
3687 i->region_number);
3688 err = true;
3690 if (depth < 0)
3692 error ("negative nesting depth of region %i", i->region_number);
3693 err = true;
3695 nvisited ++;
3696 /* If there are sub-regions, process them. */
3697 if (i->inner)
3698 outer = i, i = i->inner, depth++;
3699 /* If there are peers, process them. */
3700 else if (i->next_peer)
3701 i = i->next_peer;
3702 /* Otherwise, step back up the tree to the next peer. */
3703 else
3705 do {
3706 i = i->outer;
3707 depth--;
3708 if (i == NULL)
3710 if (depth != -1)
3712 error ("Tree list ends on depth %i", depth + 1);
3713 err = true;
3715 if (count != nvisited)
3717 error ("array does not match the region tree");
3718 err = true;
3720 if (err)
3722 dump_eh_tree (stderr, fun);
3723 internal_error ("verify_eh_tree failed.");
3725 return;
3727 outer = i->outer;
3728 } while (i->next_peer == NULL);
3729 i = i->next_peer;
3733 #include "gt-except.h"