* tree-ssa-phiopt.c (conditional_replacement): Construct proper SSA
[official-gcc.git] / gcc / except.c
blob91c917069ece0b1479cd5bbbb262af1b5fc8c0d4
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
79 #include "timevar.h"
81 /* Provide defaults for stuff that may not be defined when using
82 sjlj exceptions. */
83 #ifndef EH_RETURN_DATA_REGNO
84 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
85 #endif
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 tree (*lang_protect_cleanup_actions) (void);
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers) (tree a, tree b);
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type) (tree);
98 /* A hash table of label to region number. */
100 struct ehl_map_entry GTY(())
102 rtx label;
103 struct eh_region *region;
106 static GTY(()) int call_site_base;
107 static GTY ((param_is (union tree_node)))
108 htab_t type_to_runtime_map;
110 /* Describe the SjLj_Function_Context structure. */
111 static GTY(()) tree sjlj_fc_type_node;
112 static int sjlj_fc_call_site_ofs;
113 static int sjlj_fc_data_ofs;
114 static int sjlj_fc_personality_ofs;
115 static int sjlj_fc_lsda_ofs;
116 static int sjlj_fc_jbuf_ofs;
118 /* Describes one exception region. */
119 struct eh_region GTY(())
121 /* The immediately surrounding region. */
122 struct eh_region *outer;
124 /* The list of immediately contained regions. */
125 struct eh_region *inner;
126 struct eh_region *next_peer;
128 /* An identifier for this region. */
129 int region_number;
131 /* When a region is deleted, its parents inherit the REG_EH_REGION
132 numbers already assigned. */
133 bitmap aka;
135 /* Each region does exactly one thing. */
136 enum eh_region_type
138 ERT_UNKNOWN = 0,
139 ERT_CLEANUP,
140 ERT_TRY,
141 ERT_CATCH,
142 ERT_ALLOWED_EXCEPTIONS,
143 ERT_MUST_NOT_THROW,
144 ERT_THROW
145 } type;
147 /* Holds the action to perform based on the preceding type. */
148 union eh_region_u {
149 /* A list of catch blocks, a surrounding try block,
150 and the label for continuing after a catch. */
151 struct eh_region_u_try {
152 struct eh_region *catch;
153 struct eh_region *last_catch;
154 } GTY ((tag ("ERT_TRY"))) try;
156 /* The list through the catch handlers, the list of type objects
157 matched, and the list of associated filters. */
158 struct eh_region_u_catch {
159 struct eh_region *next_catch;
160 struct eh_region *prev_catch;
161 tree type_list;
162 tree filter_list;
163 } GTY ((tag ("ERT_CATCH"))) catch;
165 /* A tree_list of allowed types. */
166 struct eh_region_u_allowed {
167 tree type_list;
168 int filter;
169 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
171 /* The type given by a call to "throw foo();", or discovered
172 for a throw. */
173 struct eh_region_u_throw {
174 tree type;
175 } GTY ((tag ("ERT_THROW"))) throw;
177 /* Retain the cleanup expression even after expansion so that
178 we can match up fixup regions. */
179 struct eh_region_u_cleanup {
180 struct eh_region *prev_try;
181 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
182 } GTY ((desc ("%0.type"))) u;
184 /* Entry point for this region's handler before landing pads are built. */
185 rtx label;
186 tree tree_label;
188 /* Entry point for this region's handler from the runtime eh library. */
189 rtx landing_pad;
191 /* Entry point for this region's handler from an inner region. */
192 rtx post_landing_pad;
194 /* The RESX insn for handing off control to the next outermost handler,
195 if appropriate. */
196 rtx resume;
198 /* True if something in this region may throw. */
199 unsigned may_contain_throw : 1;
202 struct call_site_record GTY(())
204 rtx landing_pad;
205 int action;
208 /* Used to save exception status for each function. */
209 struct eh_status GTY(())
211 /* The tree of all regions for this function. */
212 struct eh_region *region_tree;
214 /* The same information as an indexable array. */
215 struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
217 /* The most recently open region. */
218 struct eh_region *cur_region;
220 /* This is the region for which we are processing catch blocks. */
221 struct eh_region *try_region;
223 rtx filter;
224 rtx exc_ptr;
226 int built_landing_pads;
227 int last_region_number;
229 VEC(tree,gc) *ttype_data;
230 varray_type ehspec_data;
231 varray_type action_record_data;
233 htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
235 struct call_site_record * GTY ((length ("%h.call_site_data_used")))
236 call_site_data;
237 int call_site_data_used;
238 int call_site_data_size;
240 rtx ehr_stackadj;
241 rtx ehr_handler;
242 rtx ehr_label;
244 rtx sjlj_fc;
245 rtx sjlj_exit_after;
247 htab_t GTY((param_is (struct throw_stmt_node))) throw_stmt_table;
251 static int t2r_eq (const void *, const void *);
252 static hashval_t t2r_hash (const void *);
253 static void add_type_for_runtime (tree);
254 static tree lookup_type_for_runtime (tree);
256 static void remove_unreachable_regions (rtx);
258 static int ttypes_filter_eq (const void *, const void *);
259 static hashval_t ttypes_filter_hash (const void *);
260 static int ehspec_filter_eq (const void *, const void *);
261 static hashval_t ehspec_filter_hash (const void *);
262 static int add_ttypes_entry (htab_t, tree);
263 static int add_ehspec_entry (htab_t, htab_t, tree);
264 static void assign_filter_values (void);
265 static void build_post_landing_pads (void);
266 static void connect_post_landing_pads (void);
267 static void dw2_build_landing_pads (void);
269 struct sjlj_lp_info;
270 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
271 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
272 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
273 static void sjlj_emit_function_enter (rtx);
274 static void sjlj_emit_function_exit (void);
275 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
276 static void sjlj_build_landing_pads (void);
278 static hashval_t ehl_hash (const void *);
279 static int ehl_eq (const void *, const void *);
280 static void add_ehl_entry (rtx, struct eh_region *);
281 static void remove_exception_handler_label (rtx);
282 static void remove_eh_handler (struct eh_region *);
283 static int for_each_eh_label_1 (void **, void *);
285 /* The return value of reachable_next_level. */
286 enum reachable_code
288 /* The given exception is not processed by the given region. */
289 RNL_NOT_CAUGHT,
290 /* The given exception may need processing by the given region. */
291 RNL_MAYBE_CAUGHT,
292 /* The given exception is completely processed by the given region. */
293 RNL_CAUGHT,
294 /* The given exception is completely processed by the runtime. */
295 RNL_BLOCKED
298 struct reachable_info;
299 static enum reachable_code reachable_next_level (struct eh_region *, tree,
300 struct reachable_info *);
302 static int action_record_eq (const void *, const void *);
303 static hashval_t action_record_hash (const void *);
304 static int add_action_record (htab_t, int, int);
305 static int collect_one_action_chain (htab_t, struct eh_region *);
306 static int add_call_site (rtx, int);
308 static void push_uleb128 (varray_type *, unsigned int);
309 static void push_sleb128 (varray_type *, int);
310 #ifndef HAVE_AS_LEB128
311 static int dw2_size_of_call_site_table (void);
312 static int sjlj_size_of_call_site_table (void);
313 #endif
314 static void dw2_output_call_site_table (void);
315 static void sjlj_output_call_site_table (void);
318 /* Routine to see if exception handling is turned on.
319 DO_WARN is nonzero if we want to inform the user that exception
320 handling is turned off.
322 This is used to ensure that -fexceptions has been specified if the
323 compiler tries to use any exception-specific functions. */
326 doing_eh (int do_warn)
328 if (! flag_exceptions)
330 static int warned = 0;
331 if (! warned && do_warn)
333 error ("exception handling disabled, use -fexceptions to enable");
334 warned = 1;
336 return 0;
338 return 1;
342 void
343 init_eh (void)
345 if (! flag_exceptions)
346 return;
348 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
350 /* Create the SjLj_Function_Context structure. This should match
351 the definition in unwind-sjlj.c. */
352 if (USING_SJLJ_EXCEPTIONS)
354 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
356 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
358 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
359 build_pointer_type (sjlj_fc_type_node));
360 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
362 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
363 integer_type_node);
364 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
366 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
367 tmp = build_array_type (lang_hooks.types.type_for_mode (word_mode, 1),
368 tmp);
369 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
370 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
372 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
373 ptr_type_node);
374 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
376 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
377 ptr_type_node);
378 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
380 #ifdef DONT_USE_BUILTIN_SETJMP
381 #ifdef JMP_BUF_SIZE
382 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
383 #else
384 /* Should be large enough for most systems, if it is not,
385 JMP_BUF_SIZE should be defined with the proper value. It will
386 also tend to be larger than necessary for most systems, a more
387 optimal port will define JMP_BUF_SIZE. */
388 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
389 #endif
390 #else
391 /* builtin_setjmp takes a pointer to 5 words. */
392 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
393 #endif
394 tmp = build_index_type (tmp);
395 tmp = build_array_type (ptr_type_node, tmp);
396 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
397 #ifdef DONT_USE_BUILTIN_SETJMP
398 /* We don't know what the alignment requirements of the
399 runtime's jmp_buf has. Overestimate. */
400 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
401 DECL_USER_ALIGN (f_jbuf) = 1;
402 #endif
403 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
405 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
406 TREE_CHAIN (f_prev) = f_cs;
407 TREE_CHAIN (f_cs) = f_data;
408 TREE_CHAIN (f_data) = f_per;
409 TREE_CHAIN (f_per) = f_lsda;
410 TREE_CHAIN (f_lsda) = f_jbuf;
412 layout_type (sjlj_fc_type_node);
414 /* Cache the interesting field offsets so that we have
415 easy access from rtl. */
416 sjlj_fc_call_site_ofs
417 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
418 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
419 sjlj_fc_data_ofs
420 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
421 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
422 sjlj_fc_personality_ofs
423 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
424 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
425 sjlj_fc_lsda_ofs
426 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
427 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
428 sjlj_fc_jbuf_ofs
429 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
430 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
434 void
435 init_eh_for_function (void)
437 cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
440 /* Routines to generate the exception tree somewhat directly.
441 These are used from tree-eh.c when processing exception related
442 nodes during tree optimization. */
444 static struct eh_region *
445 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
447 struct eh_region *new;
449 #ifdef ENABLE_CHECKING
450 gcc_assert (doing_eh (0));
451 #endif
453 /* Insert a new blank region as a leaf in the tree. */
454 new = ggc_alloc_cleared (sizeof (*new));
455 new->type = type;
456 new->outer = outer;
457 if (outer)
459 new->next_peer = outer->inner;
460 outer->inner = new;
462 else
464 new->next_peer = cfun->eh->region_tree;
465 cfun->eh->region_tree = new;
468 new->region_number = ++cfun->eh->last_region_number;
470 return new;
473 struct eh_region *
474 gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
476 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
477 cleanup->u.cleanup.prev_try = prev_try;
478 return cleanup;
481 struct eh_region *
482 gen_eh_region_try (struct eh_region *outer)
484 return gen_eh_region (ERT_TRY, outer);
487 struct eh_region *
488 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
490 struct eh_region *c, *l;
491 tree type_list, type_node;
493 /* Ensure to always end up with a type list to normalize further
494 processing, then register each type against the runtime types map. */
495 type_list = type_or_list;
496 if (type_or_list)
498 if (TREE_CODE (type_or_list) != TREE_LIST)
499 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
501 type_node = type_list;
502 for (; type_node; type_node = TREE_CHAIN (type_node))
503 add_type_for_runtime (TREE_VALUE (type_node));
506 c = gen_eh_region (ERT_CATCH, t->outer);
507 c->u.catch.type_list = type_list;
508 l = t->u.try.last_catch;
509 c->u.catch.prev_catch = l;
510 if (l)
511 l->u.catch.next_catch = c;
512 else
513 t->u.try.catch = c;
514 t->u.try.last_catch = c;
516 return c;
519 struct eh_region *
520 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
522 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
523 region->u.allowed.type_list = allowed;
525 for (; allowed ; allowed = TREE_CHAIN (allowed))
526 add_type_for_runtime (TREE_VALUE (allowed));
528 return region;
531 struct eh_region *
532 gen_eh_region_must_not_throw (struct eh_region *outer)
534 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
538 get_eh_region_number (struct eh_region *region)
540 return region->region_number;
543 bool
544 get_eh_region_may_contain_throw (struct eh_region *region)
546 return region->may_contain_throw;
549 tree
550 get_eh_region_tree_label (struct eh_region *region)
552 return region->tree_label;
555 void
556 set_eh_region_tree_label (struct eh_region *region, tree lab)
558 region->tree_label = lab;
561 void
562 expand_resx_expr (tree exp)
564 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
565 struct eh_region *reg = cfun->eh->region_array[region_nr];
567 gcc_assert (!reg->resume);
568 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
569 emit_barrier ();
572 /* Note that the current EH region (if any) may contain a throw, or a
573 call to a function which itself may contain a throw. */
575 void
576 note_eh_region_may_contain_throw (struct eh_region *region)
578 while (region && !region->may_contain_throw)
580 region->may_contain_throw = 1;
581 region = region->outer;
585 void
586 note_current_region_may_contain_throw (void)
588 note_eh_region_may_contain_throw (cfun->eh->cur_region);
592 /* Return an rtl expression for a pointer to the exception object
593 within a handler. */
596 get_exception_pointer (struct function *fun)
598 rtx exc_ptr = fun->eh->exc_ptr;
599 if (fun == cfun && ! exc_ptr)
601 exc_ptr = gen_reg_rtx (ptr_mode);
602 fun->eh->exc_ptr = exc_ptr;
604 return exc_ptr;
607 /* Return an rtl expression for the exception dispatch filter
608 within a handler. */
611 get_exception_filter (struct function *fun)
613 rtx filter = fun->eh->filter;
614 if (fun == cfun && ! filter)
616 filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
617 fun->eh->filter = filter;
619 return filter;
622 /* This section is for the exception handling specific optimization pass. */
624 /* Random access the exception region tree. */
626 void
627 collect_eh_region_array (void)
629 struct eh_region **array, *i;
631 i = cfun->eh->region_tree;
632 if (! i)
633 return;
635 array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
636 * sizeof (*array));
637 cfun->eh->region_array = array;
639 while (1)
641 array[i->region_number] = i;
643 /* If there are sub-regions, process them. */
644 if (i->inner)
645 i = i->inner;
646 /* If there are peers, process them. */
647 else if (i->next_peer)
648 i = i->next_peer;
649 /* Otherwise, step back up the tree to the next peer. */
650 else
652 do {
653 i = i->outer;
654 if (i == NULL)
655 return;
656 } while (i->next_peer == NULL);
657 i = i->next_peer;
662 /* Remove all regions whose labels are not reachable from insns. */
664 static void
665 remove_unreachable_regions (rtx insns)
667 int i, *uid_region_num;
668 bool *reachable;
669 struct eh_region *r;
670 rtx insn;
672 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
673 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
675 for (i = cfun->eh->last_region_number; i > 0; --i)
677 r = cfun->eh->region_array[i];
678 if (!r || r->region_number != i)
679 continue;
681 if (r->resume)
683 gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
684 uid_region_num[INSN_UID (r->resume)] = i;
686 if (r->label)
688 gcc_assert (!uid_region_num[INSN_UID (r->label)]);
689 uid_region_num[INSN_UID (r->label)] = i;
693 for (insn = insns; insn; insn = NEXT_INSN (insn))
694 reachable[uid_region_num[INSN_UID (insn)]] = true;
696 for (i = cfun->eh->last_region_number; i > 0; --i)
698 r = cfun->eh->region_array[i];
699 if (r && r->region_number == i && !reachable[i])
701 bool kill_it = true;
702 switch (r->type)
704 case ERT_THROW:
705 /* Don't remove ERT_THROW regions if their outer region
706 is reachable. */
707 if (r->outer && reachable[r->outer->region_number])
708 kill_it = false;
709 break;
711 case ERT_MUST_NOT_THROW:
712 /* MUST_NOT_THROW regions are implementable solely in the
713 runtime, but their existence continues to affect calls
714 within that region. Never delete them here. */
715 kill_it = false;
716 break;
718 case ERT_TRY:
720 /* TRY regions are reachable if any of its CATCH regions
721 are reachable. */
722 struct eh_region *c;
723 for (c = r->u.try.catch; c ; c = c->u.catch.next_catch)
724 if (reachable[c->region_number])
726 kill_it = false;
727 break;
729 break;
732 default:
733 break;
736 if (kill_it)
737 remove_eh_handler (r);
741 free (reachable);
742 free (uid_region_num);
745 /* Set up EH labels for RTL. */
747 void
748 convert_from_eh_region_ranges (void)
750 rtx insns = get_insns ();
751 int i, n = cfun->eh->last_region_number;
753 /* Most of the work is already done at the tree level. All we need to
754 do is collect the rtl labels that correspond to the tree labels that
755 collect the rtl labels that correspond to the tree labels
756 we allocated earlier. */
757 for (i = 1; i <= n; ++i)
759 struct eh_region *region = cfun->eh->region_array[i];
760 if (region && region->tree_label)
761 region->label = DECL_RTL_IF_SET (region->tree_label);
764 remove_unreachable_regions (insns);
767 static void
768 add_ehl_entry (rtx label, struct eh_region *region)
770 struct ehl_map_entry **slot, *entry;
772 LABEL_PRESERVE_P (label) = 1;
774 entry = ggc_alloc (sizeof (*entry));
775 entry->label = label;
776 entry->region = region;
778 slot = (struct ehl_map_entry **)
779 htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
781 /* Before landing pad creation, each exception handler has its own
782 label. After landing pad creation, the exception handlers may
783 share landing pads. This is ok, since maybe_remove_eh_handler
784 only requires the 1-1 mapping before landing pad creation. */
785 gcc_assert (!*slot || cfun->eh->built_landing_pads);
787 *slot = entry;
790 void
791 find_exception_handler_labels (void)
793 int i;
795 if (cfun->eh->exception_handler_label_map)
796 htab_empty (cfun->eh->exception_handler_label_map);
797 else
799 /* ??? The expansion factor here (3/2) must be greater than the htab
800 occupancy factor (4/3) to avoid unnecessary resizing. */
801 cfun->eh->exception_handler_label_map
802 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
803 ehl_hash, ehl_eq, NULL);
806 if (cfun->eh->region_tree == NULL)
807 return;
809 for (i = cfun->eh->last_region_number; i > 0; --i)
811 struct eh_region *region = cfun->eh->region_array[i];
812 rtx lab;
814 if (! region || region->region_number != i)
815 continue;
816 if (cfun->eh->built_landing_pads)
817 lab = region->landing_pad;
818 else
819 lab = region->label;
821 if (lab)
822 add_ehl_entry (lab, region);
825 /* For sjlj exceptions, need the return label to remain live until
826 after landing pad generation. */
827 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
828 add_ehl_entry (return_label, NULL);
831 /* Returns true if the current function has exception handling regions. */
833 bool
834 current_function_has_exception_handlers (void)
836 int i;
838 for (i = cfun->eh->last_region_number; i > 0; --i)
840 struct eh_region *region = cfun->eh->region_array[i];
842 if (region
843 && region->region_number == i
844 && region->type != ERT_THROW)
845 return true;
848 return false;
851 static struct eh_region *
852 duplicate_eh_region_1 (struct eh_region *o)
854 struct eh_region *n = ggc_alloc_cleared (sizeof (struct eh_region));
856 *n = *o;
858 n->region_number = o->region_number + cfun->eh->last_region_number;
859 gcc_assert (!o->aka);
861 return n;
864 static void
865 duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array,
866 struct eh_region *prev_try)
868 struct eh_region *n = n_array[o->region_number];
870 switch (n->type)
872 case ERT_TRY:
873 if (o->u.try.catch)
874 n->u.try.catch = n_array[o->u.try.catch->region_number];
875 if (o->u.try.last_catch)
876 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
877 break;
879 case ERT_CATCH:
880 if (o->u.catch.next_catch)
881 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
882 if (o->u.catch.prev_catch)
883 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
884 break;
886 case ERT_CLEANUP:
887 if (o->u.cleanup.prev_try)
888 n->u.cleanup.prev_try = n_array[o->u.cleanup.prev_try->region_number];
889 else
890 n->u.cleanup.prev_try = prev_try;
891 break;
893 default:
894 break;
897 if (o->outer)
898 n->outer = n_array[o->outer->region_number];
899 if (o->inner)
900 n->inner = n_array[o->inner->region_number];
901 if (o->next_peer)
902 n->next_peer = n_array[o->next_peer->region_number];
905 /* Duplicate the EH regions of IFUN into current function, root the tree in
906 OUTER_REGION and remap labels using MAP callback. */
908 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
909 void *data, int outer_region)
911 int ifun_last_region_number = ifun->eh->last_region_number;
912 struct eh_region **n_array, *root, *cur, *prev_try;
913 int i;
915 if (ifun_last_region_number == 0 || !ifun->eh->region_tree)
916 return 0;
918 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
920 /* Search for the containing ERT_TRY region to fix up
921 the prev_try short-cuts for ERT_CLEANUP regions. */
922 prev_try = NULL;
923 if (outer_region > 0)
924 for (prev_try = cfun->eh->region_array[outer_region];
925 prev_try && prev_try->type != ERT_TRY;
926 prev_try = prev_try->outer)
929 for (i = 1; i <= ifun_last_region_number; ++i)
931 cur = ifun->eh->region_array[i];
932 if (!cur || cur->region_number != i)
933 continue;
934 n_array[i] = duplicate_eh_region_1 (cur);
935 if (cur->tree_label)
937 tree newlabel = map (cur->tree_label, data);
938 n_array[i]->tree_label = newlabel;
940 else
941 n_array[i]->tree_label = NULL;
943 for (i = 1; i <= ifun_last_region_number; ++i)
945 cur = ifun->eh->region_array[i];
946 if (!cur || cur->region_number != i)
947 continue;
948 duplicate_eh_region_2 (cur, n_array, prev_try);
951 root = n_array[ifun->eh->region_tree->region_number];
952 gcc_assert (root->outer == NULL);
953 if (outer_region > 0)
955 struct eh_region *cur = cfun->eh->region_array[outer_region];
956 struct eh_region *p = cur->inner;
958 if (p)
960 while (p->next_peer)
961 p = p->next_peer;
962 p->next_peer = root;
964 else
965 cur->inner = root;
966 for (i = 1; i <= ifun_last_region_number; ++i)
967 if (n_array[i] && n_array[i]->outer == NULL)
968 n_array[i]->outer = cur;
970 else
972 struct eh_region *p = cfun->eh->region_tree;
973 if (p)
975 while (p->next_peer)
976 p = p->next_peer;
977 p->next_peer = root;
979 else
980 cfun->eh->region_tree = root;
983 free (n_array);
985 i = cfun->eh->last_region_number;
986 cfun->eh->last_region_number = i + ifun_last_region_number;
988 collect_eh_region_array ();
990 return i;
993 static int
994 t2r_eq (const void *pentry, const void *pdata)
996 tree entry = (tree) pentry;
997 tree data = (tree) pdata;
999 return TREE_PURPOSE (entry) == data;
1002 static hashval_t
1003 t2r_hash (const void *pentry)
1005 tree entry = (tree) pentry;
1006 return TREE_HASH (TREE_PURPOSE (entry));
1009 static void
1010 add_type_for_runtime (tree type)
1012 tree *slot;
1014 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1015 TREE_HASH (type), INSERT);
1016 if (*slot == NULL)
1018 tree runtime = (*lang_eh_runtime_type) (type);
1019 *slot = tree_cons (type, runtime, NULL_TREE);
1023 static tree
1024 lookup_type_for_runtime (tree type)
1026 tree *slot;
1028 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1029 TREE_HASH (type), NO_INSERT);
1031 /* We should have always inserted the data earlier. */
1032 return TREE_VALUE (*slot);
1036 /* Represent an entry in @TTypes for either catch actions
1037 or exception filter actions. */
1038 struct ttypes_filter GTY(())
1040 tree t;
1041 int filter;
1044 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1045 (a tree) for a @TTypes type node we are thinking about adding. */
1047 static int
1048 ttypes_filter_eq (const void *pentry, const void *pdata)
1050 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1051 tree data = (tree) pdata;
1053 return entry->t == data;
1056 static hashval_t
1057 ttypes_filter_hash (const void *pentry)
1059 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1060 return TREE_HASH (entry->t);
1063 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1064 exception specification list we are thinking about adding. */
1065 /* ??? Currently we use the type lists in the order given. Someone
1066 should put these in some canonical order. */
1068 static int
1069 ehspec_filter_eq (const void *pentry, const void *pdata)
1071 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1072 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1074 return type_list_equal (entry->t, data->t);
1077 /* Hash function for exception specification lists. */
1079 static hashval_t
1080 ehspec_filter_hash (const void *pentry)
1082 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1083 hashval_t h = 0;
1084 tree list;
1086 for (list = entry->t; list ; list = TREE_CHAIN (list))
1087 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1088 return h;
1091 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
1092 to speed up the search. Return the filter value to be used. */
1094 static int
1095 add_ttypes_entry (htab_t ttypes_hash, tree type)
1097 struct ttypes_filter **slot, *n;
1099 slot = (struct ttypes_filter **)
1100 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1102 if ((n = *slot) == NULL)
1104 /* Filter value is a 1 based table index. */
1106 n = xmalloc (sizeof (*n));
1107 n->t = type;
1108 n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1;
1109 *slot = n;
1111 VEC_safe_push (tree, gc, cfun->eh->ttype_data, type);
1114 return n->filter;
1117 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1118 to speed up the search. Return the filter value to be used. */
1120 static int
1121 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1123 struct ttypes_filter **slot, *n;
1124 struct ttypes_filter dummy;
1126 dummy.t = list;
1127 slot = (struct ttypes_filter **)
1128 htab_find_slot (ehspec_hash, &dummy, INSERT);
1130 if ((n = *slot) == NULL)
1132 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1134 n = xmalloc (sizeof (*n));
1135 n->t = list;
1136 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1137 *slot = n;
1139 /* Generate a 0 terminated list of filter values. */
1140 for (; list ; list = TREE_CHAIN (list))
1142 if (targetm.arm_eabi_unwinder)
1143 VARRAY_PUSH_TREE (cfun->eh->ehspec_data, TREE_VALUE (list));
1144 else
1146 /* Look up each type in the list and encode its filter
1147 value as a uleb128. */
1148 push_uleb128 (&cfun->eh->ehspec_data,
1149 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1152 if (targetm.arm_eabi_unwinder)
1153 VARRAY_PUSH_TREE (cfun->eh->ehspec_data, NULL_TREE);
1154 else
1155 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1158 return n->filter;
1161 /* Generate the action filter values to be used for CATCH and
1162 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1163 we use lots of landing pads, and so every type or list can share
1164 the same filter value, which saves table space. */
1166 static void
1167 assign_filter_values (void)
1169 int i;
1170 htab_t ttypes, ehspec;
1172 cfun->eh->ttype_data = VEC_alloc (tree, gc, 16);
1173 if (targetm.arm_eabi_unwinder)
1174 VARRAY_TREE_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1175 else
1176 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1178 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1179 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1181 for (i = cfun->eh->last_region_number; i > 0; --i)
1183 struct eh_region *r = cfun->eh->region_array[i];
1185 /* Mind we don't process a region more than once. */
1186 if (!r || r->region_number != i)
1187 continue;
1189 switch (r->type)
1191 case ERT_CATCH:
1192 /* Whatever type_list is (NULL or true list), we build a list
1193 of filters for the region. */
1194 r->u.catch.filter_list = NULL_TREE;
1196 if (r->u.catch.type_list != NULL)
1198 /* Get a filter value for each of the types caught and store
1199 them in the region's dedicated list. */
1200 tree tp_node = r->u.catch.type_list;
1202 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1204 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1205 tree flt_node = build_int_cst (NULL_TREE, flt);
1207 r->u.catch.filter_list
1208 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1211 else
1213 /* Get a filter value for the NULL list also since it will need
1214 an action record anyway. */
1215 int flt = add_ttypes_entry (ttypes, NULL);
1216 tree flt_node = build_int_cst (NULL_TREE, flt);
1218 r->u.catch.filter_list
1219 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1222 break;
1224 case ERT_ALLOWED_EXCEPTIONS:
1225 r->u.allowed.filter
1226 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1227 break;
1229 default:
1230 break;
1234 htab_delete (ttypes);
1235 htab_delete (ehspec);
1238 /* Emit SEQ into basic block just before INSN (that is assumed to be
1239 first instruction of some existing BB and return the newly
1240 produced block. */
1241 static basic_block
1242 emit_to_new_bb_before (rtx seq, rtx insn)
1244 rtx last;
1245 basic_block bb;
1246 edge e;
1247 edge_iterator ei;
1249 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1250 call), we don't want it to go into newly created landing pad or other EH
1251 construct. */
1252 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1253 if (e->flags & EDGE_FALLTHRU)
1254 force_nonfallthru (e);
1255 else
1256 ei_next (&ei);
1257 last = emit_insn_before (seq, insn);
1258 if (BARRIER_P (last))
1259 last = PREV_INSN (last);
1260 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1261 update_bb_for_insn (bb);
1262 bb->flags |= BB_SUPERBLOCK;
1263 return bb;
1266 /* Generate the code to actually handle exceptions, which will follow the
1267 landing pads. */
1269 static void
1270 build_post_landing_pads (void)
1272 int i;
1274 for (i = cfun->eh->last_region_number; i > 0; --i)
1276 struct eh_region *region = cfun->eh->region_array[i];
1277 rtx seq;
1279 /* Mind we don't process a region more than once. */
1280 if (!region || region->region_number != i)
1281 continue;
1283 switch (region->type)
1285 case ERT_TRY:
1286 /* ??? Collect the set of all non-overlapping catch handlers
1287 all the way up the chain until blocked by a cleanup. */
1288 /* ??? Outer try regions can share landing pads with inner
1289 try regions if the types are completely non-overlapping,
1290 and there are no intervening cleanups. */
1292 region->post_landing_pad = gen_label_rtx ();
1294 start_sequence ();
1296 emit_label (region->post_landing_pad);
1298 /* ??? It is mighty inconvenient to call back into the
1299 switch statement generation code in expand_end_case.
1300 Rapid prototyping sez a sequence of ifs. */
1302 struct eh_region *c;
1303 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1305 if (c->u.catch.type_list == NULL)
1306 emit_jump (c->label);
1307 else
1309 /* Need for one cmp/jump per type caught. Each type
1310 list entry has a matching entry in the filter list
1311 (see assign_filter_values). */
1312 tree tp_node = c->u.catch.type_list;
1313 tree flt_node = c->u.catch.filter_list;
1315 for (; tp_node; )
1317 emit_cmp_and_jump_insns
1318 (cfun->eh->filter,
1319 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1320 EQ, NULL_RTX,
1321 targetm.eh_return_filter_mode (), 0, c->label);
1323 tp_node = TREE_CHAIN (tp_node);
1324 flt_node = TREE_CHAIN (flt_node);
1330 /* We delay the generation of the _Unwind_Resume until we generate
1331 landing pads. We emit a marker here so as to get good control
1332 flow data in the meantime. */
1333 region->resume
1334 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1335 emit_barrier ();
1337 seq = get_insns ();
1338 end_sequence ();
1340 emit_to_new_bb_before (seq, region->u.try.catch->label);
1342 break;
1344 case ERT_ALLOWED_EXCEPTIONS:
1345 region->post_landing_pad = gen_label_rtx ();
1347 start_sequence ();
1349 emit_label (region->post_landing_pad);
1351 emit_cmp_and_jump_insns (cfun->eh->filter,
1352 GEN_INT (region->u.allowed.filter),
1353 EQ, NULL_RTX,
1354 targetm.eh_return_filter_mode (), 0, region->label);
1356 /* We delay the generation of the _Unwind_Resume until we generate
1357 landing pads. We emit a marker here so as to get good control
1358 flow data in the meantime. */
1359 region->resume
1360 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1361 emit_barrier ();
1363 seq = get_insns ();
1364 end_sequence ();
1366 emit_to_new_bb_before (seq, region->label);
1367 break;
1369 case ERT_CLEANUP:
1370 case ERT_MUST_NOT_THROW:
1371 region->post_landing_pad = region->label;
1372 break;
1374 case ERT_CATCH:
1375 case ERT_THROW:
1376 /* Nothing to do. */
1377 break;
1379 default:
1380 gcc_unreachable ();
1385 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1386 _Unwind_Resume otherwise. */
1388 static void
1389 connect_post_landing_pads (void)
1391 int i;
1393 for (i = cfun->eh->last_region_number; i > 0; --i)
1395 struct eh_region *region = cfun->eh->region_array[i];
1396 struct eh_region *outer;
1397 rtx seq;
1398 rtx barrier;
1400 /* Mind we don't process a region more than once. */
1401 if (!region || region->region_number != i)
1402 continue;
1404 /* If there is no RESX, or it has been deleted by flow, there's
1405 nothing to fix up. */
1406 if (! region->resume || INSN_DELETED_P (region->resume))
1407 continue;
1409 /* Search for another landing pad in this function. */
1410 for (outer = region->outer; outer ; outer = outer->outer)
1411 if (outer->post_landing_pad)
1412 break;
1414 start_sequence ();
1416 if (outer)
1418 edge e;
1419 basic_block src, dest;
1421 emit_jump (outer->post_landing_pad);
1422 src = BLOCK_FOR_INSN (region->resume);
1423 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1424 while (EDGE_COUNT (src->succs) > 0)
1425 remove_edge (EDGE_SUCC (src, 0));
1426 e = make_edge (src, dest, 0);
1427 e->probability = REG_BR_PROB_BASE;
1428 e->count = src->count;
1430 else
1432 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1433 VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1435 /* What we just emitted was a throwing libcall, so it got a
1436 barrier automatically added after it. If the last insn in
1437 the libcall sequence isn't the barrier, it's because the
1438 target emits multiple insns for a call, and there are insns
1439 after the actual call insn (which are redundant and would be
1440 optimized away). The barrier is inserted exactly after the
1441 call insn, so let's go get that and delete the insns after
1442 it, because below we need the barrier to be the last insn in
1443 the sequence. */
1444 delete_insns_since (NEXT_INSN (last_call_insn ()));
1447 seq = get_insns ();
1448 end_sequence ();
1449 barrier = emit_insn_before (seq, region->resume);
1450 /* Avoid duplicate barrier. */
1451 gcc_assert (BARRIER_P (barrier));
1452 delete_insn (barrier);
1453 delete_insn (region->resume);
1455 /* ??? From tree-ssa we can wind up with catch regions whose
1456 label is not instantiated, but whose resx is present. Now
1457 that we've dealt with the resx, kill the region. */
1458 if (region->label == NULL && region->type == ERT_CLEANUP)
1459 remove_eh_handler (region);
1464 static void
1465 dw2_build_landing_pads (void)
1467 int i;
1468 unsigned int j;
1470 for (i = cfun->eh->last_region_number; i > 0; --i)
1472 struct eh_region *region = cfun->eh->region_array[i];
1473 rtx seq;
1474 basic_block bb;
1475 bool clobbers_hard_regs = false;
1476 edge e;
1478 /* Mind we don't process a region more than once. */
1479 if (!region || region->region_number != i)
1480 continue;
1482 if (region->type != ERT_CLEANUP
1483 && region->type != ERT_TRY
1484 && region->type != ERT_ALLOWED_EXCEPTIONS)
1485 continue;
1487 start_sequence ();
1489 region->landing_pad = gen_label_rtx ();
1490 emit_label (region->landing_pad);
1492 #ifdef HAVE_exception_receiver
1493 if (HAVE_exception_receiver)
1494 emit_insn (gen_exception_receiver ());
1495 else
1496 #endif
1497 #ifdef HAVE_nonlocal_goto_receiver
1498 if (HAVE_nonlocal_goto_receiver)
1499 emit_insn (gen_nonlocal_goto_receiver ());
1500 else
1501 #endif
1502 { /* Nothing */ }
1504 /* If the eh_return data registers are call-saved, then we
1505 won't have considered them clobbered from the call that
1506 threw. Kill them now. */
1507 for (j = 0; ; ++j)
1509 unsigned r = EH_RETURN_DATA_REGNO (j);
1510 if (r == INVALID_REGNUM)
1511 break;
1512 if (! call_used_regs[r])
1514 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1515 clobbers_hard_regs = true;
1519 if (clobbers_hard_regs)
1521 /* @@@ This is a kludge. Not all machine descriptions define a
1522 blockage insn, but we must not allow the code we just generated
1523 to be reordered by scheduling. So emit an ASM_INPUT to act as
1524 blockage insn. */
1525 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1528 emit_move_insn (cfun->eh->exc_ptr,
1529 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1530 emit_move_insn (cfun->eh->filter,
1531 gen_rtx_REG (targetm.eh_return_filter_mode (),
1532 EH_RETURN_DATA_REGNO (1)));
1534 seq = get_insns ();
1535 end_sequence ();
1537 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1538 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1539 e->count = bb->count;
1540 e->probability = REG_BR_PROB_BASE;
1545 struct sjlj_lp_info
1547 int directly_reachable;
1548 int action_index;
1549 int dispatch_index;
1550 int call_site_index;
1553 static bool
1554 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1556 rtx insn;
1557 bool found_one = false;
1559 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1561 struct eh_region *region;
1562 enum reachable_code rc;
1563 tree type_thrown;
1564 rtx note;
1566 if (! INSN_P (insn))
1567 continue;
1569 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1570 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1571 continue;
1573 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1575 type_thrown = NULL_TREE;
1576 if (region->type == ERT_THROW)
1578 type_thrown = region->u.throw.type;
1579 region = region->outer;
1582 /* Find the first containing region that might handle the exception.
1583 That's the landing pad to which we will transfer control. */
1584 rc = RNL_NOT_CAUGHT;
1585 for (; region; region = region->outer)
1587 rc = reachable_next_level (region, type_thrown, NULL);
1588 if (rc != RNL_NOT_CAUGHT)
1589 break;
1591 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1593 lp_info[region->region_number].directly_reachable = 1;
1594 found_one = true;
1598 return found_one;
1601 static void
1602 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1604 htab_t ar_hash;
1605 int i, index;
1607 /* First task: build the action table. */
1609 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1610 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1612 for (i = cfun->eh->last_region_number; i > 0; --i)
1613 if (lp_info[i].directly_reachable)
1615 struct eh_region *r = cfun->eh->region_array[i];
1616 r->landing_pad = dispatch_label;
1617 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1618 if (lp_info[i].action_index != -1)
1619 cfun->uses_eh_lsda = 1;
1622 htab_delete (ar_hash);
1624 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1625 landing pad label for the region. For sjlj though, there is one
1626 common landing pad from which we dispatch to the post-landing pads.
1628 A region receives a dispatch index if it is directly reachable
1629 and requires in-function processing. Regions that share post-landing
1630 pads may share dispatch indices. */
1631 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1632 (see build_post_landing_pads) so we don't bother checking for it. */
1634 index = 0;
1635 for (i = cfun->eh->last_region_number; i > 0; --i)
1636 if (lp_info[i].directly_reachable)
1637 lp_info[i].dispatch_index = index++;
1639 /* Finally: assign call-site values. If dwarf2 terms, this would be
1640 the region number assigned by convert_to_eh_region_ranges, but
1641 handles no-action and must-not-throw differently. */
1643 call_site_base = 1;
1644 for (i = cfun->eh->last_region_number; i > 0; --i)
1645 if (lp_info[i].directly_reachable)
1647 int action = lp_info[i].action_index;
1649 /* Map must-not-throw to otherwise unused call-site index 0. */
1650 if (action == -2)
1651 index = 0;
1652 /* Map no-action to otherwise unused call-site index -1. */
1653 else if (action == -1)
1654 index = -1;
1655 /* Otherwise, look it up in the table. */
1656 else
1657 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1659 lp_info[i].call_site_index = index;
1663 static void
1664 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1666 int last_call_site = -2;
1667 rtx insn, mem;
1669 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1671 struct eh_region *region;
1672 int this_call_site;
1673 rtx note, before, p;
1675 /* Reset value tracking at extended basic block boundaries. */
1676 if (LABEL_P (insn))
1677 last_call_site = -2;
1679 if (! INSN_P (insn))
1680 continue;
1682 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1683 if (!note)
1685 /* Calls (and trapping insns) without notes are outside any
1686 exception handling region in this function. Mark them as
1687 no action. */
1688 if (CALL_P (insn)
1689 || (flag_non_call_exceptions
1690 && may_trap_p (PATTERN (insn))))
1691 this_call_site = -1;
1692 else
1693 continue;
1695 else
1697 /* Calls that are known to not throw need not be marked. */
1698 if (INTVAL (XEXP (note, 0)) <= 0)
1699 continue;
1701 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1702 this_call_site = lp_info[region->region_number].call_site_index;
1705 if (this_call_site == last_call_site)
1706 continue;
1708 /* Don't separate a call from it's argument loads. */
1709 before = insn;
1710 if (CALL_P (insn))
1711 before = find_first_parameter_load (insn, NULL_RTX);
1713 start_sequence ();
1714 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
1715 sjlj_fc_call_site_ofs);
1716 emit_move_insn (mem, GEN_INT (this_call_site));
1717 p = get_insns ();
1718 end_sequence ();
1720 emit_insn_before (p, before);
1721 last_call_site = this_call_site;
1725 /* Construct the SjLj_Function_Context. */
1727 static void
1728 sjlj_emit_function_enter (rtx dispatch_label)
1730 rtx fn_begin, fc, mem, seq;
1731 bool fn_begin_outside_block;
1733 fc = cfun->eh->sjlj_fc;
1735 start_sequence ();
1737 /* We're storing this libcall's address into memory instead of
1738 calling it directly. Thus, we must call assemble_external_libcall
1739 here, as we can not depend on emit_library_call to do it for us. */
1740 assemble_external_libcall (eh_personality_libfunc);
1741 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1742 emit_move_insn (mem, eh_personality_libfunc);
1744 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1745 if (cfun->uses_eh_lsda)
1747 char buf[20];
1748 rtx sym;
1750 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1751 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1752 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1753 emit_move_insn (mem, sym);
1755 else
1756 emit_move_insn (mem, const0_rtx);
1758 #ifdef DONT_USE_BUILTIN_SETJMP
1760 rtx x, note;
1761 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1762 TYPE_MODE (integer_type_node), 1,
1763 plus_constant (XEXP (fc, 0),
1764 sjlj_fc_jbuf_ofs), Pmode);
1766 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
1767 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
1769 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1770 TYPE_MODE (integer_type_node), 0, dispatch_label);
1772 #else
1773 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
1774 dispatch_label);
1775 #endif
1777 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1778 1, XEXP (fc, 0), Pmode);
1780 seq = get_insns ();
1781 end_sequence ();
1783 /* ??? Instead of doing this at the beginning of the function,
1784 do this in a block that is at loop level 0 and dominates all
1785 can_throw_internal instructions. */
1787 fn_begin_outside_block = true;
1788 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1789 if (NOTE_P (fn_begin))
1791 if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1792 break;
1793 else if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK)
1794 fn_begin_outside_block = false;
1797 if (fn_begin_outside_block)
1798 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
1799 else
1800 emit_insn_after (seq, fn_begin);
1803 /* Call back from expand_function_end to know where we should put
1804 the call to unwind_sjlj_unregister_libfunc if needed. */
1806 void
1807 sjlj_emit_function_exit_after (rtx after)
1809 cfun->eh->sjlj_exit_after = after;
1812 static void
1813 sjlj_emit_function_exit (void)
1815 rtx seq;
1816 edge e;
1817 edge_iterator ei;
1819 start_sequence ();
1821 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1822 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
1824 seq = get_insns ();
1825 end_sequence ();
1827 /* ??? Really this can be done in any block at loop level 0 that
1828 post-dominates all can_throw_internal instructions. This is
1829 the last possible moment. */
1831 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1832 if (e->flags & EDGE_FALLTHRU)
1833 break;
1834 if (e)
1836 rtx insn;
1838 /* Figure out whether the place we are supposed to insert libcall
1839 is inside the last basic block or after it. In the other case
1840 we need to emit to edge. */
1841 gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
1842 for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
1844 if (insn == cfun->eh->sjlj_exit_after)
1846 if (LABEL_P (insn))
1847 insn = NEXT_INSN (insn);
1848 emit_insn_after (seq, insn);
1849 return;
1851 if (insn == BB_END (e->src))
1852 break;
1854 insert_insn_on_edge (seq, e);
1858 static void
1859 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1861 int i, first_reachable;
1862 rtx mem, dispatch, seq, fc;
1863 rtx before;
1864 basic_block bb;
1865 edge e;
1867 fc = cfun->eh->sjlj_fc;
1869 start_sequence ();
1871 emit_label (dispatch_label);
1873 #ifndef DONT_USE_BUILTIN_SETJMP
1874 expand_builtin_setjmp_receiver (dispatch_label);
1875 #endif
1877 /* Load up dispatch index, exc_ptr and filter values from the
1878 function context. */
1879 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
1880 sjlj_fc_call_site_ofs);
1881 dispatch = copy_to_reg (mem);
1883 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
1884 if (word_mode != ptr_mode)
1886 #ifdef POINTERS_EXTEND_UNSIGNED
1887 mem = convert_memory_address (ptr_mode, mem);
1888 #else
1889 mem = convert_to_mode (ptr_mode, mem, 0);
1890 #endif
1892 emit_move_insn (cfun->eh->exc_ptr, mem);
1894 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
1895 emit_move_insn (cfun->eh->filter, mem);
1897 /* Jump to one of the directly reachable regions. */
1898 /* ??? This really ought to be using a switch statement. */
1900 first_reachable = 0;
1901 for (i = cfun->eh->last_region_number; i > 0; --i)
1903 if (! lp_info[i].directly_reachable)
1904 continue;
1906 if (! first_reachable)
1908 first_reachable = i;
1909 continue;
1912 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
1913 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
1914 cfun->eh->region_array[i]->post_landing_pad);
1917 seq = get_insns ();
1918 end_sequence ();
1920 before = cfun->eh->region_array[first_reachable]->post_landing_pad;
1922 bb = emit_to_new_bb_before (seq, before);
1923 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1924 e->count = bb->count;
1925 e->probability = REG_BR_PROB_BASE;
1928 static void
1929 sjlj_build_landing_pads (void)
1931 struct sjlj_lp_info *lp_info;
1933 lp_info = xcalloc (cfun->eh->last_region_number + 1,
1934 sizeof (struct sjlj_lp_info));
1936 if (sjlj_find_directly_reachable_regions (lp_info))
1938 rtx dispatch_label = gen_label_rtx ();
1940 cfun->eh->sjlj_fc
1941 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1942 int_size_in_bytes (sjlj_fc_type_node),
1943 TYPE_ALIGN (sjlj_fc_type_node));
1945 sjlj_assign_call_site_values (dispatch_label, lp_info);
1946 sjlj_mark_call_sites (lp_info);
1948 sjlj_emit_function_enter (dispatch_label);
1949 sjlj_emit_dispatch_table (dispatch_label, lp_info);
1950 sjlj_emit_function_exit ();
1953 free (lp_info);
1956 void
1957 finish_eh_generation (void)
1959 basic_block bb;
1961 /* Nothing to do if no regions created. */
1962 if (cfun->eh->region_tree == NULL)
1963 return;
1965 /* The object here is to provide find_basic_blocks with detailed
1966 information (via reachable_handlers) on how exception control
1967 flows within the function. In this first pass, we can include
1968 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
1969 regions, and hope that it will be useful in deleting unreachable
1970 handlers. Subsequently, we will generate landing pads which will
1971 connect many of the handlers, and then type information will not
1972 be effective. Still, this is a win over previous implementations. */
1974 /* These registers are used by the landing pads. Make sure they
1975 have been generated. */
1976 get_exception_pointer (cfun);
1977 get_exception_filter (cfun);
1979 /* Construct the landing pads. */
1981 assign_filter_values ();
1982 build_post_landing_pads ();
1983 connect_post_landing_pads ();
1984 if (USING_SJLJ_EXCEPTIONS)
1985 sjlj_build_landing_pads ();
1986 else
1987 dw2_build_landing_pads ();
1989 cfun->eh->built_landing_pads = 1;
1991 /* We've totally changed the CFG. Start over. */
1992 find_exception_handler_labels ();
1993 break_superblocks ();
1994 if (USING_SJLJ_EXCEPTIONS)
1995 commit_edge_insertions ();
1996 FOR_EACH_BB (bb)
1998 edge e;
1999 edge_iterator ei;
2000 bool eh = false;
2001 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2003 if (e->flags & EDGE_EH)
2005 remove_edge (e);
2006 eh = true;
2008 else
2009 ei_next (&ei);
2011 if (eh)
2012 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2016 static hashval_t
2017 ehl_hash (const void *pentry)
2019 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2021 /* 2^32 * ((sqrt(5) - 1) / 2) */
2022 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2023 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2026 static int
2027 ehl_eq (const void *pentry, const void *pdata)
2029 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2030 struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2032 return entry->label == data->label;
2035 /* This section handles removing dead code for flow. */
2037 /* Remove LABEL from exception_handler_label_map. */
2039 static void
2040 remove_exception_handler_label (rtx label)
2042 struct ehl_map_entry **slot, tmp;
2044 /* If exception_handler_label_map was not built yet,
2045 there is nothing to do. */
2046 if (cfun->eh->exception_handler_label_map == NULL)
2047 return;
2049 tmp.label = label;
2050 slot = (struct ehl_map_entry **)
2051 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2052 gcc_assert (slot);
2054 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2057 /* Splice REGION from the region tree etc. */
2059 static void
2060 remove_eh_handler (struct eh_region *region)
2062 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2063 rtx lab;
2065 /* For the benefit of efficiently handling REG_EH_REGION notes,
2066 replace this region in the region array with its containing
2067 region. Note that previous region deletions may result in
2068 multiple copies of this region in the array, so we have a
2069 list of alternate numbers by which we are known. */
2071 outer = region->outer;
2072 cfun->eh->region_array[region->region_number] = outer;
2073 if (region->aka)
2075 unsigned i;
2076 bitmap_iterator bi;
2078 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2080 cfun->eh->region_array[i] = outer;
2084 if (outer)
2086 if (!outer->aka)
2087 outer->aka = BITMAP_GGC_ALLOC ();
2088 if (region->aka)
2089 bitmap_ior_into (outer->aka, region->aka);
2090 bitmap_set_bit (outer->aka, region->region_number);
2093 if (cfun->eh->built_landing_pads)
2094 lab = region->landing_pad;
2095 else
2096 lab = region->label;
2097 if (lab)
2098 remove_exception_handler_label (lab);
2100 if (outer)
2101 pp_start = &outer->inner;
2102 else
2103 pp_start = &cfun->eh->region_tree;
2104 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2105 continue;
2106 *pp = region->next_peer;
2108 inner = region->inner;
2109 if (inner)
2111 for (p = inner; p->next_peer ; p = p->next_peer)
2112 p->outer = outer;
2113 p->outer = outer;
2115 p->next_peer = *pp_start;
2116 *pp_start = inner;
2119 if (region->type == ERT_CATCH)
2121 struct eh_region *try, *next, *prev;
2123 for (try = region->next_peer;
2124 try->type == ERT_CATCH;
2125 try = try->next_peer)
2126 continue;
2127 gcc_assert (try->type == ERT_TRY);
2129 next = region->u.catch.next_catch;
2130 prev = region->u.catch.prev_catch;
2132 if (next)
2133 next->u.catch.prev_catch = prev;
2134 else
2135 try->u.try.last_catch = prev;
2136 if (prev)
2137 prev->u.catch.next_catch = next;
2138 else
2140 try->u.try.catch = next;
2141 if (! next)
2142 remove_eh_handler (try);
2147 /* LABEL heads a basic block that is about to be deleted. If this
2148 label corresponds to an exception region, we may be able to
2149 delete the region. */
2151 void
2152 maybe_remove_eh_handler (rtx label)
2154 struct ehl_map_entry **slot, tmp;
2155 struct eh_region *region;
2157 /* ??? After generating landing pads, it's not so simple to determine
2158 if the region data is completely unused. One must examine the
2159 landing pad and the post landing pad, and whether an inner try block
2160 is referencing the catch handlers directly. */
2161 if (cfun->eh->built_landing_pads)
2162 return;
2164 tmp.label = label;
2165 slot = (struct ehl_map_entry **)
2166 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2167 if (! slot)
2168 return;
2169 region = (*slot)->region;
2170 if (! region)
2171 return;
2173 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2174 because there is no path to the fallback call to terminate.
2175 But the region continues to affect call-site data until there
2176 are no more contained calls, which we don't see here. */
2177 if (region->type == ERT_MUST_NOT_THROW)
2179 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2180 region->label = NULL_RTX;
2182 else
2183 remove_eh_handler (region);
2186 /* Invokes CALLBACK for every exception handler label. Only used by old
2187 loop hackery; should not be used by new code. */
2189 void
2190 for_each_eh_label (void (*callback) (rtx))
2192 htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
2193 (void *) &callback);
2196 static int
2197 for_each_eh_label_1 (void **pentry, void *data)
2199 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2200 void (*callback) (rtx) = *(void (**) (rtx)) data;
2202 (*callback) (entry->label);
2203 return 1;
2206 /* Invoke CALLBACK for every exception region in the current function. */
2208 void
2209 for_each_eh_region (void (*callback) (struct eh_region *))
2211 int i, n = cfun->eh->last_region_number;
2212 for (i = 1; i <= n; ++i)
2214 struct eh_region *region = cfun->eh->region_array[i];
2215 if (region)
2216 (*callback) (region);
2220 /* This section describes CFG exception edges for flow. */
2222 /* For communicating between calls to reachable_next_level. */
2223 struct reachable_info
2225 tree types_caught;
2226 tree types_allowed;
2227 void (*callback) (struct eh_region *, void *);
2228 void *callback_data;
2229 bool saw_any_handlers;
2232 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2233 base class of TYPE, is in HANDLED. */
2235 static int
2236 check_handled (tree handled, tree type)
2238 tree t;
2240 /* We can check for exact matches without front-end help. */
2241 if (! lang_eh_type_covers)
2243 for (t = handled; t ; t = TREE_CHAIN (t))
2244 if (TREE_VALUE (t) == type)
2245 return 1;
2247 else
2249 for (t = handled; t ; t = TREE_CHAIN (t))
2250 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2251 return 1;
2254 return 0;
2257 /* A subroutine of reachable_next_level. If we are collecting a list
2258 of handlers, add one. After landing pad generation, reference
2259 it instead of the handlers themselves. Further, the handlers are
2260 all wired together, so by referencing one, we've got them all.
2261 Before landing pad generation we reference each handler individually.
2263 LP_REGION contains the landing pad; REGION is the handler. */
2265 static void
2266 add_reachable_handler (struct reachable_info *info,
2267 struct eh_region *lp_region, struct eh_region *region)
2269 if (! info)
2270 return;
2272 info->saw_any_handlers = true;
2274 if (cfun->eh->built_landing_pads)
2275 info->callback (lp_region, info->callback_data);
2276 else
2277 info->callback (region, info->callback_data);
2280 /* Process one level of exception regions for reachability.
2281 If TYPE_THROWN is non-null, then it is the *exact* type being
2282 propagated. If INFO is non-null, then collect handler labels
2283 and caught/allowed type information between invocations. */
2285 static enum reachable_code
2286 reachable_next_level (struct eh_region *region, tree type_thrown,
2287 struct reachable_info *info)
2289 switch (region->type)
2291 case ERT_CLEANUP:
2292 /* Before landing-pad generation, we model control flow
2293 directly to the individual handlers. In this way we can
2294 see that catch handler types may shadow one another. */
2295 add_reachable_handler (info, region, region);
2296 return RNL_MAYBE_CAUGHT;
2298 case ERT_TRY:
2300 struct eh_region *c;
2301 enum reachable_code ret = RNL_NOT_CAUGHT;
2303 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2305 /* A catch-all handler ends the search. */
2306 if (c->u.catch.type_list == NULL)
2308 add_reachable_handler (info, region, c);
2309 return RNL_CAUGHT;
2312 if (type_thrown)
2314 /* If we have at least one type match, end the search. */
2315 tree tp_node = c->u.catch.type_list;
2317 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2319 tree type = TREE_VALUE (tp_node);
2321 if (type == type_thrown
2322 || (lang_eh_type_covers
2323 && (*lang_eh_type_covers) (type, type_thrown)))
2325 add_reachable_handler (info, region, c);
2326 return RNL_CAUGHT;
2330 /* If we have definitive information of a match failure,
2331 the catch won't trigger. */
2332 if (lang_eh_type_covers)
2333 return RNL_NOT_CAUGHT;
2336 /* At this point, we either don't know what type is thrown or
2337 don't have front-end assistance to help deciding if it is
2338 covered by one of the types in the list for this region.
2340 We'd then like to add this region to the list of reachable
2341 handlers since it is indeed potentially reachable based on the
2342 information we have.
2344 Actually, this handler is for sure not reachable if all the
2345 types it matches have already been caught. That is, it is only
2346 potentially reachable if at least one of the types it catches
2347 has not been previously caught. */
2349 if (! info)
2350 ret = RNL_MAYBE_CAUGHT;
2351 else
2353 tree tp_node = c->u.catch.type_list;
2354 bool maybe_reachable = false;
2356 /* Compute the potential reachability of this handler and
2357 update the list of types caught at the same time. */
2358 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2360 tree type = TREE_VALUE (tp_node);
2362 if (! check_handled (info->types_caught, type))
2364 info->types_caught
2365 = tree_cons (NULL, type, info->types_caught);
2367 maybe_reachable = true;
2371 if (maybe_reachable)
2373 add_reachable_handler (info, region, c);
2375 /* ??? If the catch type is a base class of every allowed
2376 type, then we know we can stop the search. */
2377 ret = RNL_MAYBE_CAUGHT;
2382 return ret;
2385 case ERT_ALLOWED_EXCEPTIONS:
2386 /* An empty list of types definitely ends the search. */
2387 if (region->u.allowed.type_list == NULL_TREE)
2389 add_reachable_handler (info, region, region);
2390 return RNL_CAUGHT;
2393 /* Collect a list of lists of allowed types for use in detecting
2394 when a catch may be transformed into a catch-all. */
2395 if (info)
2396 info->types_allowed = tree_cons (NULL_TREE,
2397 region->u.allowed.type_list,
2398 info->types_allowed);
2400 /* If we have definitive information about the type hierarchy,
2401 then we can tell if the thrown type will pass through the
2402 filter. */
2403 if (type_thrown && lang_eh_type_covers)
2405 if (check_handled (region->u.allowed.type_list, type_thrown))
2406 return RNL_NOT_CAUGHT;
2407 else
2409 add_reachable_handler (info, region, region);
2410 return RNL_CAUGHT;
2414 add_reachable_handler (info, region, region);
2415 return RNL_MAYBE_CAUGHT;
2417 case ERT_CATCH:
2418 /* Catch regions are handled by their controlling try region. */
2419 return RNL_NOT_CAUGHT;
2421 case ERT_MUST_NOT_THROW:
2422 /* Here we end our search, since no exceptions may propagate.
2423 If we've touched down at some landing pad previous, then the
2424 explicit function call we generated may be used. Otherwise
2425 the call is made by the runtime.
2427 Before inlining, do not perform this optimization. We may
2428 inline a subroutine that contains handlers, and that will
2429 change the value of saw_any_handlers. */
2431 if ((info && info->saw_any_handlers) || !cfun->after_inlining)
2433 add_reachable_handler (info, region, region);
2434 return RNL_CAUGHT;
2436 else
2437 return RNL_BLOCKED;
2439 case ERT_THROW:
2440 case ERT_UNKNOWN:
2441 /* Shouldn't see these here. */
2442 gcc_unreachable ();
2443 break;
2444 default:
2445 gcc_unreachable ();
2449 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2451 void
2452 foreach_reachable_handler (int region_number, bool is_resx,
2453 void (*callback) (struct eh_region *, void *),
2454 void *callback_data)
2456 struct reachable_info info;
2457 struct eh_region *region;
2458 tree type_thrown;
2460 memset (&info, 0, sizeof (info));
2461 info.callback = callback;
2462 info.callback_data = callback_data;
2464 region = cfun->eh->region_array[region_number];
2466 type_thrown = NULL_TREE;
2467 if (is_resx)
2469 /* A RESX leaves a region instead of entering it. Thus the
2470 region itself may have been deleted out from under us. */
2471 if (region == NULL)
2472 return;
2473 region = region->outer;
2475 else if (region->type == ERT_THROW)
2477 type_thrown = region->u.throw.type;
2478 region = region->outer;
2481 while (region)
2483 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2484 break;
2485 /* If we have processed one cleanup, there is no point in
2486 processing any more of them. Each cleanup will have an edge
2487 to the next outer cleanup region, so the flow graph will be
2488 accurate. */
2489 if (region->type == ERT_CLEANUP)
2490 region = region->u.cleanup.prev_try;
2491 else
2492 region = region->outer;
2496 /* Retrieve a list of labels of exception handlers which can be
2497 reached by a given insn. */
2499 static void
2500 arh_to_landing_pad (struct eh_region *region, void *data)
2502 rtx *p_handlers = data;
2503 if (! *p_handlers)
2504 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2507 static void
2508 arh_to_label (struct eh_region *region, void *data)
2510 rtx *p_handlers = data;
2511 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2515 reachable_handlers (rtx insn)
2517 bool is_resx = false;
2518 rtx handlers = NULL;
2519 int region_number;
2521 if (JUMP_P (insn)
2522 && GET_CODE (PATTERN (insn)) == RESX)
2524 region_number = XINT (PATTERN (insn), 0);
2525 is_resx = true;
2527 else
2529 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2530 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2531 return NULL;
2532 region_number = INTVAL (XEXP (note, 0));
2535 foreach_reachable_handler (region_number, is_resx,
2536 (cfun->eh->built_landing_pads
2537 ? arh_to_landing_pad
2538 : arh_to_label),
2539 &handlers);
2541 return handlers;
2544 /* Determine if the given INSN can throw an exception that is caught
2545 within the function. */
2547 bool
2548 can_throw_internal_1 (int region_number, bool is_resx)
2550 struct eh_region *region;
2551 tree type_thrown;
2553 region = cfun->eh->region_array[region_number];
2555 type_thrown = NULL_TREE;
2556 if (is_resx)
2557 region = region->outer;
2558 else if (region->type == ERT_THROW)
2560 type_thrown = region->u.throw.type;
2561 region = region->outer;
2564 /* If this exception is ignored by each and every containing region,
2565 then control passes straight out. The runtime may handle some
2566 regions, which also do not require processing internally. */
2567 for (; region; region = region->outer)
2569 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2570 if (how == RNL_BLOCKED)
2571 return false;
2572 if (how != RNL_NOT_CAUGHT)
2573 return true;
2576 return false;
2579 bool
2580 can_throw_internal (rtx insn)
2582 rtx note;
2584 if (! INSN_P (insn))
2585 return false;
2587 if (JUMP_P (insn)
2588 && GET_CODE (PATTERN (insn)) == RESX
2589 && XINT (PATTERN (insn), 0) > 0)
2590 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true);
2592 if (NONJUMP_INSN_P (insn)
2593 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2594 insn = XVECEXP (PATTERN (insn), 0, 0);
2596 /* Every insn that might throw has an EH_REGION note. */
2597 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2598 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2599 return false;
2601 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false);
2604 /* Determine if the given INSN can throw an exception that is
2605 visible outside the function. */
2607 bool
2608 can_throw_external_1 (int region_number, bool is_resx)
2610 struct eh_region *region;
2611 tree type_thrown;
2613 region = cfun->eh->region_array[region_number];
2615 type_thrown = NULL_TREE;
2616 if (is_resx)
2617 region = region->outer;
2618 else if (region->type == ERT_THROW)
2620 type_thrown = region->u.throw.type;
2621 region = region->outer;
2624 /* If the exception is caught or blocked by any containing region,
2625 then it is not seen by any calling function. */
2626 for (; region ; region = region->outer)
2627 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2628 return false;
2630 return true;
2633 bool
2634 can_throw_external (rtx insn)
2636 rtx note;
2638 if (! INSN_P (insn))
2639 return false;
2641 if (JUMP_P (insn)
2642 && GET_CODE (PATTERN (insn)) == RESX
2643 && XINT (PATTERN (insn), 0) > 0)
2644 return can_throw_external_1 (XINT (PATTERN (insn), 0), true);
2646 if (NONJUMP_INSN_P (insn)
2647 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2648 insn = XVECEXP (PATTERN (insn), 0, 0);
2650 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2651 if (!note)
2653 /* Calls (and trapping insns) without notes are outside any
2654 exception handling region in this function. We have to
2655 assume it might throw. Given that the front end and middle
2656 ends mark known NOTHROW functions, this isn't so wildly
2657 inaccurate. */
2658 return (CALL_P (insn)
2659 || (flag_non_call_exceptions
2660 && may_trap_p (PATTERN (insn))));
2662 if (INTVAL (XEXP (note, 0)) <= 0)
2663 return false;
2665 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false);
2668 /* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls. */
2670 void
2671 set_nothrow_function_flags (void)
2673 rtx insn;
2675 TREE_NOTHROW (current_function_decl) = 1;
2677 /* Assume cfun->all_throwers_are_sibcalls until we encounter
2678 something that can throw an exception. We specifically exempt
2679 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2680 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2681 is optimistic. */
2683 cfun->all_throwers_are_sibcalls = 1;
2685 if (! flag_exceptions)
2686 return;
2688 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2689 if (can_throw_external (insn))
2691 TREE_NOTHROW (current_function_decl) = 0;
2693 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2695 cfun->all_throwers_are_sibcalls = 0;
2696 return;
2700 for (insn = current_function_epilogue_delay_list; insn;
2701 insn = XEXP (insn, 1))
2702 if (can_throw_external (insn))
2704 TREE_NOTHROW (current_function_decl) = 0;
2706 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2708 cfun->all_throwers_are_sibcalls = 0;
2709 return;
2714 struct tree_opt_pass pass_set_nothrow_function_flags =
2716 NULL, /* name */
2717 NULL, /* gate */
2718 set_nothrow_function_flags, /* execute */
2719 NULL, /* sub */
2720 NULL, /* next */
2721 0, /* static_pass_number */
2722 0, /* tv_id */
2723 0, /* properties_required */
2724 0, /* properties_provided */
2725 0, /* properties_destroyed */
2726 0, /* todo_flags_start */
2727 0, /* todo_flags_finish */
2728 0 /* letter */
2732 /* Various hooks for unwind library. */
2734 /* Do any necessary initialization to access arbitrary stack frames.
2735 On the SPARC, this means flushing the register windows. */
2737 void
2738 expand_builtin_unwind_init (void)
2740 /* Set this so all the registers get saved in our frame; we need to be
2741 able to copy the saved values for any registers from frames we unwind. */
2742 current_function_has_nonlocal_label = 1;
2744 #ifdef SETUP_FRAME_ADDRESSES
2745 SETUP_FRAME_ADDRESSES ();
2746 #endif
2750 expand_builtin_eh_return_data_regno (tree arglist)
2752 tree which = TREE_VALUE (arglist);
2753 unsigned HOST_WIDE_INT iwhich;
2755 if (TREE_CODE (which) != INTEGER_CST)
2757 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2758 return constm1_rtx;
2761 iwhich = tree_low_cst (which, 1);
2762 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2763 if (iwhich == INVALID_REGNUM)
2764 return constm1_rtx;
2766 #ifdef DWARF_FRAME_REGNUM
2767 iwhich = DWARF_FRAME_REGNUM (iwhich);
2768 #else
2769 iwhich = DBX_REGISTER_NUMBER (iwhich);
2770 #endif
2772 return GEN_INT (iwhich);
2775 /* Given a value extracted from the return address register or stack slot,
2776 return the actual address encoded in that value. */
2779 expand_builtin_extract_return_addr (tree addr_tree)
2781 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2783 if (GET_MODE (addr) != Pmode
2784 && GET_MODE (addr) != VOIDmode)
2786 #ifdef POINTERS_EXTEND_UNSIGNED
2787 addr = convert_memory_address (Pmode, addr);
2788 #else
2789 addr = convert_to_mode (Pmode, addr, 0);
2790 #endif
2793 /* First mask out any unwanted bits. */
2794 #ifdef MASK_RETURN_ADDR
2795 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2796 #endif
2798 /* Then adjust to find the real return address. */
2799 #if defined (RETURN_ADDR_OFFSET)
2800 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2801 #endif
2803 return addr;
2806 /* Given an actual address in addr_tree, do any necessary encoding
2807 and return the value to be stored in the return address register or
2808 stack slot so the epilogue will return to that address. */
2811 expand_builtin_frob_return_addr (tree addr_tree)
2813 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2815 addr = convert_memory_address (Pmode, addr);
2817 #ifdef RETURN_ADDR_OFFSET
2818 addr = force_reg (Pmode, addr);
2819 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2820 #endif
2822 return addr;
2825 /* Set up the epilogue with the magic bits we'll need to return to the
2826 exception handler. */
2828 void
2829 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2830 tree handler_tree)
2832 rtx tmp;
2834 #ifdef EH_RETURN_STACKADJ_RTX
2835 tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2836 tmp = convert_memory_address (Pmode, tmp);
2837 if (!cfun->eh->ehr_stackadj)
2838 cfun->eh->ehr_stackadj = copy_to_reg (tmp);
2839 else if (tmp != cfun->eh->ehr_stackadj)
2840 emit_move_insn (cfun->eh->ehr_stackadj, tmp);
2841 #endif
2843 tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
2844 tmp = convert_memory_address (Pmode, tmp);
2845 if (!cfun->eh->ehr_handler)
2846 cfun->eh->ehr_handler = copy_to_reg (tmp);
2847 else if (tmp != cfun->eh->ehr_handler)
2848 emit_move_insn (cfun->eh->ehr_handler, tmp);
2850 if (!cfun->eh->ehr_label)
2851 cfun->eh->ehr_label = gen_label_rtx ();
2852 emit_jump (cfun->eh->ehr_label);
2855 void
2856 expand_eh_return (void)
2858 rtx around_label;
2860 if (! cfun->eh->ehr_label)
2861 return;
2863 current_function_calls_eh_return = 1;
2865 #ifdef EH_RETURN_STACKADJ_RTX
2866 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2867 #endif
2869 around_label = gen_label_rtx ();
2870 emit_jump (around_label);
2872 emit_label (cfun->eh->ehr_label);
2873 clobber_return_register ();
2875 #ifdef EH_RETURN_STACKADJ_RTX
2876 emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
2877 #endif
2879 #ifdef HAVE_eh_return
2880 if (HAVE_eh_return)
2881 emit_insn (gen_eh_return (cfun->eh->ehr_handler));
2882 else
2883 #endif
2885 #ifdef EH_RETURN_HANDLER_RTX
2886 emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
2887 #else
2888 error ("__builtin_eh_return not supported on this target");
2889 #endif
2892 emit_label (around_label);
2895 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2896 POINTERS_EXTEND_UNSIGNED and return it. */
2899 expand_builtin_extend_pointer (tree addr_tree)
2901 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2902 int extend;
2904 #ifdef POINTERS_EXTEND_UNSIGNED
2905 extend = POINTERS_EXTEND_UNSIGNED;
2906 #else
2907 /* The previous EH code did an unsigned extend by default, so we do this also
2908 for consistency. */
2909 extend = 1;
2910 #endif
2912 return convert_modes (word_mode, ptr_mode, addr, extend);
2915 /* In the following functions, we represent entries in the action table
2916 as 1-based indices. Special cases are:
2918 0: null action record, non-null landing pad; implies cleanups
2919 -1: null action record, null landing pad; implies no action
2920 -2: no call-site entry; implies must_not_throw
2921 -3: we have yet to process outer regions
2923 Further, no special cases apply to the "next" field of the record.
2924 For next, 0 means end of list. */
2926 struct action_record
2928 int offset;
2929 int filter;
2930 int next;
2933 static int
2934 action_record_eq (const void *pentry, const void *pdata)
2936 const struct action_record *entry = (const struct action_record *) pentry;
2937 const struct action_record *data = (const struct action_record *) pdata;
2938 return entry->filter == data->filter && entry->next == data->next;
2941 static hashval_t
2942 action_record_hash (const void *pentry)
2944 const struct action_record *entry = (const struct action_record *) pentry;
2945 return entry->next * 1009 + entry->filter;
2948 static int
2949 add_action_record (htab_t ar_hash, int filter, int next)
2951 struct action_record **slot, *new, tmp;
2953 tmp.filter = filter;
2954 tmp.next = next;
2955 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
2957 if ((new = *slot) == NULL)
2959 new = xmalloc (sizeof (*new));
2960 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
2961 new->filter = filter;
2962 new->next = next;
2963 *slot = new;
2965 /* The filter value goes in untouched. The link to the next
2966 record is a "self-relative" byte offset, or zero to indicate
2967 that there is no next record. So convert the absolute 1 based
2968 indices we've been carrying around into a displacement. */
2970 push_sleb128 (&cfun->eh->action_record_data, filter);
2971 if (next)
2972 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
2973 push_sleb128 (&cfun->eh->action_record_data, next);
2976 return new->offset;
2979 static int
2980 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
2982 struct eh_region *c;
2983 int next;
2985 /* If we've reached the top of the region chain, then we have
2986 no actions, and require no landing pad. */
2987 if (region == NULL)
2988 return -1;
2990 switch (region->type)
2992 case ERT_CLEANUP:
2993 /* A cleanup adds a zero filter to the beginning of the chain, but
2994 there are special cases to look out for. If there are *only*
2995 cleanups along a path, then it compresses to a zero action.
2996 Further, if there are multiple cleanups along a path, we only
2997 need to represent one of them, as that is enough to trigger
2998 entry to the landing pad at runtime. */
2999 next = collect_one_action_chain (ar_hash, region->outer);
3000 if (next <= 0)
3001 return 0;
3002 for (c = region->outer; c ; c = c->outer)
3003 if (c->type == ERT_CLEANUP)
3004 return next;
3005 return add_action_record (ar_hash, 0, next);
3007 case ERT_TRY:
3008 /* Process the associated catch regions in reverse order.
3009 If there's a catch-all handler, then we don't need to
3010 search outer regions. Use a magic -3 value to record
3011 that we haven't done the outer search. */
3012 next = -3;
3013 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3015 if (c->u.catch.type_list == NULL)
3017 /* Retrieve the filter from the head of the filter list
3018 where we have stored it (see assign_filter_values). */
3019 int filter
3020 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3022 next = add_action_record (ar_hash, filter, 0);
3024 else
3026 /* Once the outer search is done, trigger an action record for
3027 each filter we have. */
3028 tree flt_node;
3030 if (next == -3)
3032 next = collect_one_action_chain (ar_hash, region->outer);
3034 /* If there is no next action, terminate the chain. */
3035 if (next == -1)
3036 next = 0;
3037 /* If all outer actions are cleanups or must_not_throw,
3038 we'll have no action record for it, since we had wanted
3039 to encode these states in the call-site record directly.
3040 Add a cleanup action to the chain to catch these. */
3041 else if (next <= 0)
3042 next = add_action_record (ar_hash, 0, 0);
3045 flt_node = c->u.catch.filter_list;
3046 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3048 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3049 next = add_action_record (ar_hash, filter, next);
3053 return next;
3055 case ERT_ALLOWED_EXCEPTIONS:
3056 /* An exception specification adds its filter to the
3057 beginning of the chain. */
3058 next = collect_one_action_chain (ar_hash, region->outer);
3060 /* If there is no next action, terminate the chain. */
3061 if (next == -1)
3062 next = 0;
3063 /* If all outer actions are cleanups or must_not_throw,
3064 we'll have no action record for it, since we had wanted
3065 to encode these states in the call-site record directly.
3066 Add a cleanup action to the chain to catch these. */
3067 else if (next <= 0)
3068 next = add_action_record (ar_hash, 0, 0);
3070 return add_action_record (ar_hash, region->u.allowed.filter, next);
3072 case ERT_MUST_NOT_THROW:
3073 /* A must-not-throw region with no inner handlers or cleanups
3074 requires no call-site entry. Note that this differs from
3075 the no handler or cleanup case in that we do require an lsda
3076 to be generated. Return a magic -2 value to record this. */
3077 return -2;
3079 case ERT_CATCH:
3080 case ERT_THROW:
3081 /* CATCH regions are handled in TRY above. THROW regions are
3082 for optimization information only and produce no output. */
3083 return collect_one_action_chain (ar_hash, region->outer);
3085 default:
3086 gcc_unreachable ();
3090 static int
3091 add_call_site (rtx landing_pad, int action)
3093 struct call_site_record *data = cfun->eh->call_site_data;
3094 int used = cfun->eh->call_site_data_used;
3095 int size = cfun->eh->call_site_data_size;
3097 if (used >= size)
3099 size = (size ? size * 2 : 64);
3100 data = ggc_realloc (data, sizeof (*data) * size);
3101 cfun->eh->call_site_data = data;
3102 cfun->eh->call_site_data_size = size;
3105 data[used].landing_pad = landing_pad;
3106 data[used].action = action;
3108 cfun->eh->call_site_data_used = used + 1;
3110 return used + call_site_base;
3113 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3114 The new note numbers will not refer to region numbers, but
3115 instead to call site entries. */
3117 void
3118 convert_to_eh_region_ranges (void)
3120 rtx insn, iter, note;
3121 htab_t ar_hash;
3122 int last_action = -3;
3123 rtx last_action_insn = NULL_RTX;
3124 rtx last_landing_pad = NULL_RTX;
3125 rtx first_no_action_insn = NULL_RTX;
3126 int call_site = 0;
3128 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3129 return;
3131 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3133 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3135 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3136 if (INSN_P (iter))
3138 struct eh_region *region;
3139 int this_action;
3140 rtx this_landing_pad;
3142 insn = iter;
3143 if (NONJUMP_INSN_P (insn)
3144 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3145 insn = XVECEXP (PATTERN (insn), 0, 0);
3147 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3148 if (!note)
3150 if (! (CALL_P (insn)
3151 || (flag_non_call_exceptions
3152 && may_trap_p (PATTERN (insn)))))
3153 continue;
3154 this_action = -1;
3155 region = NULL;
3157 else
3159 if (INTVAL (XEXP (note, 0)) <= 0)
3160 continue;
3161 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3162 this_action = collect_one_action_chain (ar_hash, region);
3165 /* Existence of catch handlers, or must-not-throw regions
3166 implies that an lsda is needed (even if empty). */
3167 if (this_action != -1)
3168 cfun->uses_eh_lsda = 1;
3170 /* Delay creation of region notes for no-action regions
3171 until we're sure that an lsda will be required. */
3172 else if (last_action == -3)
3174 first_no_action_insn = iter;
3175 last_action = -1;
3178 /* Cleanups and handlers may share action chains but not
3179 landing pads. Collect the landing pad for this region. */
3180 if (this_action >= 0)
3182 struct eh_region *o;
3183 for (o = region; ! o->landing_pad ; o = o->outer)
3184 continue;
3185 this_landing_pad = o->landing_pad;
3187 else
3188 this_landing_pad = NULL_RTX;
3190 /* Differing actions or landing pads implies a change in call-site
3191 info, which implies some EH_REGION note should be emitted. */
3192 if (last_action != this_action
3193 || last_landing_pad != this_landing_pad)
3195 /* If we'd not seen a previous action (-3) or the previous
3196 action was must-not-throw (-2), then we do not need an
3197 end note. */
3198 if (last_action >= -1)
3200 /* If we delayed the creation of the begin, do it now. */
3201 if (first_no_action_insn)
3203 call_site = add_call_site (NULL_RTX, 0);
3204 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3205 first_no_action_insn);
3206 NOTE_EH_HANDLER (note) = call_site;
3207 first_no_action_insn = NULL_RTX;
3210 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3211 last_action_insn);
3212 NOTE_EH_HANDLER (note) = call_site;
3215 /* If the new action is must-not-throw, then no region notes
3216 are created. */
3217 if (this_action >= -1)
3219 call_site = add_call_site (this_landing_pad,
3220 this_action < 0 ? 0 : this_action);
3221 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3222 NOTE_EH_HANDLER (note) = call_site;
3225 last_action = this_action;
3226 last_landing_pad = this_landing_pad;
3228 last_action_insn = iter;
3231 if (last_action >= -1 && ! first_no_action_insn)
3233 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3234 NOTE_EH_HANDLER (note) = call_site;
3237 htab_delete (ar_hash);
3240 struct tree_opt_pass pass_convert_to_eh_region_ranges =
3242 "eh-ranges", /* name */
3243 NULL, /* gate */
3244 convert_to_eh_region_ranges, /* execute */
3245 NULL, /* sub */
3246 NULL, /* next */
3247 0, /* static_pass_number */
3248 0, /* tv_id */
3249 0, /* properties_required */
3250 0, /* properties_provided */
3251 0, /* properties_destroyed */
3252 0, /* todo_flags_start */
3253 TODO_dump_func, /* todo_flags_finish */
3254 0 /* letter */
3258 static void
3259 push_uleb128 (varray_type *data_area, unsigned int value)
3263 unsigned char byte = value & 0x7f;
3264 value >>= 7;
3265 if (value)
3266 byte |= 0x80;
3267 VARRAY_PUSH_UCHAR (*data_area, byte);
3269 while (value);
3272 static void
3273 push_sleb128 (varray_type *data_area, int value)
3275 unsigned char byte;
3276 int more;
3280 byte = value & 0x7f;
3281 value >>= 7;
3282 more = ! ((value == 0 && (byte & 0x40) == 0)
3283 || (value == -1 && (byte & 0x40) != 0));
3284 if (more)
3285 byte |= 0x80;
3286 VARRAY_PUSH_UCHAR (*data_area, byte);
3288 while (more);
3292 #ifndef HAVE_AS_LEB128
3293 static int
3294 dw2_size_of_call_site_table (void)
3296 int n = cfun->eh->call_site_data_used;
3297 int size = n * (4 + 4 + 4);
3298 int i;
3300 for (i = 0; i < n; ++i)
3302 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3303 size += size_of_uleb128 (cs->action);
3306 return size;
3309 static int
3310 sjlj_size_of_call_site_table (void)
3312 int n = cfun->eh->call_site_data_used;
3313 int size = 0;
3314 int i;
3316 for (i = 0; i < n; ++i)
3318 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3319 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3320 size += size_of_uleb128 (cs->action);
3323 return size;
3325 #endif
3327 static void
3328 dw2_output_call_site_table (void)
3330 int n = cfun->eh->call_site_data_used;
3331 int i;
3333 for (i = 0; i < n; ++i)
3335 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3336 char reg_start_lab[32];
3337 char reg_end_lab[32];
3338 char landing_pad_lab[32];
3340 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3341 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3343 if (cs->landing_pad)
3344 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3345 CODE_LABEL_NUMBER (cs->landing_pad));
3347 /* ??? Perhaps use insn length scaling if the assembler supports
3348 generic arithmetic. */
3349 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3350 data4 if the function is small enough. */
3351 #ifdef HAVE_AS_LEB128
3352 dw2_asm_output_delta_uleb128 (reg_start_lab,
3353 current_function_func_begin_label,
3354 "region %d start", i);
3355 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3356 "length");
3357 if (cs->landing_pad)
3358 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3359 current_function_func_begin_label,
3360 "landing pad");
3361 else
3362 dw2_asm_output_data_uleb128 (0, "landing pad");
3363 #else
3364 dw2_asm_output_delta (4, reg_start_lab,
3365 current_function_func_begin_label,
3366 "region %d start", i);
3367 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3368 if (cs->landing_pad)
3369 dw2_asm_output_delta (4, landing_pad_lab,
3370 current_function_func_begin_label,
3371 "landing pad");
3372 else
3373 dw2_asm_output_data (4, 0, "landing pad");
3374 #endif
3375 dw2_asm_output_data_uleb128 (cs->action, "action");
3378 call_site_base += n;
3381 static void
3382 sjlj_output_call_site_table (void)
3384 int n = cfun->eh->call_site_data_used;
3385 int i;
3387 for (i = 0; i < n; ++i)
3389 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3391 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3392 "region %d landing pad", i);
3393 dw2_asm_output_data_uleb128 (cs->action, "action");
3396 call_site_base += n;
3399 /* Tell assembler to switch to the section for the exception handling
3400 table. */
3402 void
3403 default_exception_section (void)
3405 if (targetm.have_named_sections)
3407 int flags;
3409 if (EH_TABLES_CAN_BE_READ_ONLY)
3411 int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3413 flags = (! flag_pic
3414 || ((tt_format & 0x70) != DW_EH_PE_absptr
3415 && (tt_format & 0x70) != DW_EH_PE_aligned))
3416 ? 0 : SECTION_WRITE;
3418 else
3419 flags = SECTION_WRITE;
3420 named_section_flags (".gcc_except_table", flags);
3422 else if (flag_pic)
3423 data_section ();
3424 else
3425 readonly_data_section ();
3429 /* Output a reference from an exception table to the type_info object TYPE.
3430 TT_FORMAT and TT_FORMAT_SIZE descibe the DWARF encoding method used for
3431 the value. */
3433 static void
3434 output_ttype (tree type, int tt_format, int tt_format_size)
3436 rtx value;
3438 if (type == NULL_TREE)
3439 value = const0_rtx;
3440 else
3442 struct cgraph_varpool_node *node;
3444 type = lookup_type_for_runtime (type);
3445 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3447 /* Let cgraph know that the rtti decl is used. Not all of the
3448 paths below go through assemble_integer, which would take
3449 care of this for us. */
3450 STRIP_NOPS (type);
3451 if (TREE_CODE (type) == ADDR_EXPR)
3453 type = TREE_OPERAND (type, 0);
3454 if (TREE_CODE (type) == VAR_DECL)
3456 node = cgraph_varpool_node (type);
3457 if (node)
3458 cgraph_varpool_mark_needed_node (node);
3461 else if (TREE_CODE (type) != INTEGER_CST)
3462 abort ();
3465 /* Allow the target to override the type table entry format. */
3466 if (targetm.asm_out.ttype (value))
3467 return;
3469 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3470 assemble_integer (value, tt_format_size,
3471 tt_format_size * BITS_PER_UNIT, 1);
3472 else
3473 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
3476 void
3477 output_function_exception_table (void)
3479 int tt_format, cs_format, lp_format, i, n;
3480 #ifdef HAVE_AS_LEB128
3481 char ttype_label[32];
3482 char cs_after_size_label[32];
3483 char cs_end_label[32];
3484 #else
3485 int call_site_len;
3486 #endif
3487 int have_tt_data;
3488 int tt_format_size = 0;
3490 /* Not all functions need anything. */
3491 if (! cfun->uses_eh_lsda)
3492 return;
3494 #ifdef TARGET_UNWIND_INFO
3495 /* TODO: Move this into target file. */
3496 assemble_external_libcall (eh_personality_libfunc);
3497 fputs ("\t.personality\t", asm_out_file);
3498 output_addr_const (asm_out_file, eh_personality_libfunc);
3499 fputs ("\n\t.handlerdata\n", asm_out_file);
3500 /* Note that varasm still thinks we're in the function's code section.
3501 The ".endp" directive that will immediately follow will take us back. */
3502 #else
3503 targetm.asm_out.exception_section ();
3504 #endif
3506 have_tt_data = (VEC_length (tree, cfun->eh->ttype_data) > 0
3507 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3509 /* Indicate the format of the @TType entries. */
3510 if (! have_tt_data)
3511 tt_format = DW_EH_PE_omit;
3512 else
3514 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3515 #ifdef HAVE_AS_LEB128
3516 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3517 current_function_funcdef_no);
3518 #endif
3519 tt_format_size = size_of_encoded_value (tt_format);
3521 assemble_align (tt_format_size * BITS_PER_UNIT);
3524 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3525 current_function_funcdef_no);
3527 /* The LSDA header. */
3529 /* Indicate the format of the landing pad start pointer. An omitted
3530 field implies @LPStart == @Start. */
3531 /* Currently we always put @LPStart == @Start. This field would
3532 be most useful in moving the landing pads completely out of
3533 line to another section, but it could also be used to minimize
3534 the size of uleb128 landing pad offsets. */
3535 lp_format = DW_EH_PE_omit;
3536 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3537 eh_data_format_name (lp_format));
3539 /* @LPStart pointer would go here. */
3541 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3542 eh_data_format_name (tt_format));
3544 #ifndef HAVE_AS_LEB128
3545 if (USING_SJLJ_EXCEPTIONS)
3546 call_site_len = sjlj_size_of_call_site_table ();
3547 else
3548 call_site_len = dw2_size_of_call_site_table ();
3549 #endif
3551 /* A pc-relative 4-byte displacement to the @TType data. */
3552 if (have_tt_data)
3554 #ifdef HAVE_AS_LEB128
3555 char ttype_after_disp_label[32];
3556 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3557 current_function_funcdef_no);
3558 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3559 "@TType base offset");
3560 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3561 #else
3562 /* Ug. Alignment queers things. */
3563 unsigned int before_disp, after_disp, last_disp, disp;
3565 before_disp = 1 + 1;
3566 after_disp = (1 + size_of_uleb128 (call_site_len)
3567 + call_site_len
3568 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3569 + (VEC_length (tree, cfun->eh->ttype_data)
3570 * tt_format_size));
3572 disp = after_disp;
3575 unsigned int disp_size, pad;
3577 last_disp = disp;
3578 disp_size = size_of_uleb128 (disp);
3579 pad = before_disp + disp_size + after_disp;
3580 if (pad % tt_format_size)
3581 pad = tt_format_size - (pad % tt_format_size);
3582 else
3583 pad = 0;
3584 disp = after_disp + pad;
3586 while (disp != last_disp);
3588 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3589 #endif
3592 /* Indicate the format of the call-site offsets. */
3593 #ifdef HAVE_AS_LEB128
3594 cs_format = DW_EH_PE_uleb128;
3595 #else
3596 cs_format = DW_EH_PE_udata4;
3597 #endif
3598 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3599 eh_data_format_name (cs_format));
3601 #ifdef HAVE_AS_LEB128
3602 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3603 current_function_funcdef_no);
3604 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3605 current_function_funcdef_no);
3606 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3607 "Call-site table length");
3608 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3609 if (USING_SJLJ_EXCEPTIONS)
3610 sjlj_output_call_site_table ();
3611 else
3612 dw2_output_call_site_table ();
3613 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3614 #else
3615 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3616 if (USING_SJLJ_EXCEPTIONS)
3617 sjlj_output_call_site_table ();
3618 else
3619 dw2_output_call_site_table ();
3620 #endif
3622 /* ??? Decode and interpret the data for flag_debug_asm. */
3623 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3624 for (i = 0; i < n; ++i)
3625 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3626 (i ? NULL : "Action record table"));
3628 if (have_tt_data)
3629 assemble_align (tt_format_size * BITS_PER_UNIT);
3631 i = VEC_length (tree, cfun->eh->ttype_data);
3632 while (i-- > 0)
3634 tree type = VEC_index (tree, cfun->eh->ttype_data, i);
3635 output_ttype (type, tt_format, tt_format_size);
3638 #ifdef HAVE_AS_LEB128
3639 if (have_tt_data)
3640 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3641 #endif
3643 /* ??? Decode and interpret the data for flag_debug_asm. */
3644 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3645 for (i = 0; i < n; ++i)
3647 if (targetm.arm_eabi_unwinder)
3649 tree type = VARRAY_TREE (cfun->eh->ehspec_data, i);
3650 output_ttype (type, tt_format, tt_format_size);
3652 else
3653 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3654 (i ? NULL : "Exception specification table"));
3657 current_function_section (current_function_decl);
3660 void
3661 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3663 fun->eh->throw_stmt_table = table;
3666 htab_t
3667 get_eh_throw_stmt_table (struct function *fun)
3669 return fun->eh->throw_stmt_table;
3672 /* Dump EH information to OUT. */
3673 void
3674 dump_eh_tree (FILE *out, struct function *fun)
3676 struct eh_region *i;
3677 int depth = 0;
3678 static const char * const type_name[] = {"unknown", "cleanup", "try", "catch",
3679 "allowed_exceptions", "must_not_throw",
3680 "throw"};
3682 i = fun->eh->region_tree;
3683 if (! i)
3684 return;
3686 fprintf (out, "Eh tree:\n");
3687 while (1)
3689 fprintf (out, " %*s %i %s", depth * 2, "",
3690 i->region_number, type_name [(int)i->type]);
3691 if (i->tree_label)
3693 fprintf (out, " tree_label:");
3694 print_generic_expr (out, i->tree_label, 0);
3696 fprintf (out, "\n");
3697 /* If there are sub-regions, process them. */
3698 if (i->inner)
3699 i = i->inner, depth++;
3700 /* If there are peers, process them. */
3701 else if (i->next_peer)
3702 i = i->next_peer;
3703 /* Otherwise, step back up the tree to the next peer. */
3704 else
3706 do {
3707 i = i->outer;
3708 depth--;
3709 if (i == NULL)
3710 return;
3711 } while (i->next_peer == NULL);
3712 i = i->next_peer;
3717 /* Verify some basic invariants on EH datastructures. Could be extended to
3718 catch more. */
3719 void
3720 verify_eh_tree (struct function *fun)
3722 struct eh_region *i, *outer = NULL;
3723 bool err = false;
3724 int nvisited = 0;
3725 int count = 0;
3726 int j;
3727 int depth = 0;
3729 i = fun->eh->region_tree;
3730 if (! i)
3731 return;
3732 for (j = fun->eh->last_region_number; j > 0; --j)
3733 if (fun->eh->region_array[j])
3735 count++;
3736 if (fun->eh->region_array[j]->region_number != j)
3738 error ("region_array is corrupted for region %i", i->region_number);
3739 err = true;
3743 while (1)
3745 if (fun->eh->region_array[i->region_number] != i)
3747 error ("region_array is corrupted for region %i", i->region_number);
3748 err = true;
3750 if (i->outer != outer)
3752 error ("outer block of region %i is wrong", i->region_number);
3753 err = true;
3755 if (i->may_contain_throw && outer && !outer->may_contain_throw)
3757 error ("region %i may contain throw and is contained in region that may not",
3758 i->region_number);
3759 err = true;
3761 if (depth < 0)
3763 error ("negative nesting depth of region %i", i->region_number);
3764 err = true;
3766 nvisited ++;
3767 /* If there are sub-regions, process them. */
3768 if (i->inner)
3769 outer = i, i = i->inner, depth++;
3770 /* If there are peers, process them. */
3771 else if (i->next_peer)
3772 i = i->next_peer;
3773 /* Otherwise, step back up the tree to the next peer. */
3774 else
3776 do {
3777 i = i->outer;
3778 depth--;
3779 if (i == NULL)
3781 if (depth != -1)
3783 error ("tree list ends on depth %i", depth + 1);
3784 err = true;
3786 if (count != nvisited)
3788 error ("array does not match the region tree");
3789 err = true;
3791 if (err)
3793 dump_eh_tree (stderr, fun);
3794 internal_error ("verify_eh_tree failed");
3796 return;
3798 outer = i->outer;
3799 } while (i->next_peer == NULL);
3800 i = i->next_peer;
3805 /* Initialize unwind_resume_libfunc. */
3807 void
3808 default_init_unwind_resume_libfunc (void)
3810 /* The default c++ routines aren't actually c++ specific, so use those. */
3811 unwind_resume_libfunc =
3812 init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
3813 : "_Unwind_Resume");
3817 static bool
3818 gate_handle_eh (void)
3820 return doing_eh (0);
3823 /* Complete generation of exception handling code. */
3824 static void
3825 rest_of_handle_eh (void)
3827 cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
3828 finish_eh_generation ();
3829 cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
3832 struct tree_opt_pass pass_rtl_eh =
3834 "eh", /* name */
3835 gate_handle_eh, /* gate */
3836 rest_of_handle_eh, /* execute */
3837 NULL, /* sub */
3838 NULL, /* next */
3839 0, /* static_pass_number */
3840 TV_JUMP, /* tv_id */
3841 0, /* properties_required */
3842 0, /* properties_provided */
3843 0, /* properties_destroyed */
3844 0, /* todo_flags_start */
3845 TODO_dump_func, /* todo_flags_finish */
3846 'h' /* letter */
3849 #include "gt-except.h"