* c-tree.h (default_function_array_conversion): Declare.
[official-gcc.git] / gcc / except.c
blobc10fc883df7601be3f2dbc031c47b822d02c8ceb
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
77 #include "diagnostic.h"
79 /* Provide defaults for stuff that may not be defined when using
80 sjlj exceptions. */
81 #ifndef EH_RETURN_DATA_REGNO
82 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
83 #endif
86 /* Protect cleanup actions with must-not-throw regions, with a call
87 to the given failure handler. */
88 tree (*lang_protect_cleanup_actions) (void);
90 /* Return true if type A catches type B. */
91 int (*lang_eh_type_covers) (tree a, tree b);
93 /* Map a type to a runtime object to match type. */
94 tree (*lang_eh_runtime_type) (tree);
96 /* A hash table of label to region number. */
98 struct ehl_map_entry GTY(())
100 rtx label;
101 struct eh_region *region;
104 static GTY(()) int call_site_base;
105 static GTY ((param_is (union tree_node)))
106 htab_t type_to_runtime_map;
108 /* Describe the SjLj_Function_Context structure. */
109 static GTY(()) tree sjlj_fc_type_node;
110 static int sjlj_fc_call_site_ofs;
111 static int sjlj_fc_data_ofs;
112 static int sjlj_fc_personality_ofs;
113 static int sjlj_fc_lsda_ofs;
114 static int sjlj_fc_jbuf_ofs;
116 /* Describes one exception region. */
117 struct eh_region GTY(())
119 /* The immediately surrounding region. */
120 struct eh_region *outer;
122 /* The list of immediately contained regions. */
123 struct eh_region *inner;
124 struct eh_region *next_peer;
126 /* An identifier for this region. */
127 int region_number;
129 /* When a region is deleted, its parents inherit the REG_EH_REGION
130 numbers already assigned. */
131 bitmap aka;
133 /* Each region does exactly one thing. */
134 enum eh_region_type
136 ERT_UNKNOWN = 0,
137 ERT_CLEANUP,
138 ERT_TRY,
139 ERT_CATCH,
140 ERT_ALLOWED_EXCEPTIONS,
141 ERT_MUST_NOT_THROW,
142 ERT_THROW
143 } type;
145 /* Holds the action to perform based on the preceding type. */
146 union eh_region_u {
147 /* A list of catch blocks, a surrounding try block,
148 and the label for continuing after a catch. */
149 struct eh_region_u_try {
150 struct eh_region *catch;
151 struct eh_region *last_catch;
152 } GTY ((tag ("ERT_TRY"))) try;
154 /* The list through the catch handlers, the list of type objects
155 matched, and the list of associated filters. */
156 struct eh_region_u_catch {
157 struct eh_region *next_catch;
158 struct eh_region *prev_catch;
159 tree type_list;
160 tree filter_list;
161 } GTY ((tag ("ERT_CATCH"))) catch;
163 /* A tree_list of allowed types. */
164 struct eh_region_u_allowed {
165 tree type_list;
166 int filter;
167 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
169 /* The type given by a call to "throw foo();", or discovered
170 for a throw. */
171 struct eh_region_u_throw {
172 tree type;
173 } GTY ((tag ("ERT_THROW"))) throw;
175 /* Retain the cleanup expression even after expansion so that
176 we can match up fixup regions. */
177 struct eh_region_u_cleanup {
178 struct eh_region *prev_try;
179 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
180 } GTY ((desc ("%0.type"))) u;
182 /* Entry point for this region's handler before landing pads are built. */
183 rtx label;
184 tree tree_label;
186 /* Entry point for this region's handler from the runtime eh library. */
187 rtx landing_pad;
189 /* Entry point for this region's handler from an inner region. */
190 rtx post_landing_pad;
192 /* The RESX insn for handing off control to the next outermost handler,
193 if appropriate. */
194 rtx resume;
196 /* True if something in this region may throw. */
197 unsigned may_contain_throw : 1;
200 struct call_site_record GTY(())
202 rtx landing_pad;
203 int action;
206 /* Used to save exception status for each function. */
207 struct eh_status GTY(())
209 /* The tree of all regions for this function. */
210 struct eh_region *region_tree;
212 /* The same information as an indexable array. */
213 struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
215 /* The most recently open region. */
216 struct eh_region *cur_region;
218 /* This is the region for which we are processing catch blocks. */
219 struct eh_region *try_region;
221 rtx filter;
222 rtx exc_ptr;
224 int built_landing_pads;
225 int last_region_number;
227 VEC(tree,gc) *ttype_data;
228 varray_type ehspec_data;
229 varray_type action_record_data;
231 htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
233 struct call_site_record * GTY ((length ("%h.call_site_data_used")))
234 call_site_data;
235 int call_site_data_used;
236 int call_site_data_size;
238 rtx ehr_stackadj;
239 rtx ehr_handler;
240 rtx ehr_label;
242 rtx sjlj_fc;
243 rtx sjlj_exit_after;
245 htab_t GTY((param_is (struct throw_stmt_node))) throw_stmt_table;
249 static int t2r_eq (const void *, const void *);
250 static hashval_t t2r_hash (const void *);
251 static void add_type_for_runtime (tree);
252 static tree lookup_type_for_runtime (tree);
254 static void remove_unreachable_regions (rtx);
256 static int ttypes_filter_eq (const void *, const void *);
257 static hashval_t ttypes_filter_hash (const void *);
258 static int ehspec_filter_eq (const void *, const void *);
259 static hashval_t ehspec_filter_hash (const void *);
260 static int add_ttypes_entry (htab_t, tree);
261 static int add_ehspec_entry (htab_t, htab_t, tree);
262 static void assign_filter_values (void);
263 static void build_post_landing_pads (void);
264 static void connect_post_landing_pads (void);
265 static void dw2_build_landing_pads (void);
267 struct sjlj_lp_info;
268 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
269 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
270 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
271 static void sjlj_emit_function_enter (rtx);
272 static void sjlj_emit_function_exit (void);
273 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
274 static void sjlj_build_landing_pads (void);
276 static hashval_t ehl_hash (const void *);
277 static int ehl_eq (const void *, const void *);
278 static void add_ehl_entry (rtx, struct eh_region *);
279 static void remove_exception_handler_label (rtx);
280 static void remove_eh_handler (struct eh_region *);
281 static int for_each_eh_label_1 (void **, void *);
283 /* The return value of reachable_next_level. */
284 enum reachable_code
286 /* The given exception is not processed by the given region. */
287 RNL_NOT_CAUGHT,
288 /* The given exception may need processing by the given region. */
289 RNL_MAYBE_CAUGHT,
290 /* The given exception is completely processed by the given region. */
291 RNL_CAUGHT,
292 /* The given exception is completely processed by the runtime. */
293 RNL_BLOCKED
296 struct reachable_info;
297 static enum reachable_code reachable_next_level (struct eh_region *, tree,
298 struct reachable_info *);
300 static int action_record_eq (const void *, const void *);
301 static hashval_t action_record_hash (const void *);
302 static int add_action_record (htab_t, int, int);
303 static int collect_one_action_chain (htab_t, struct eh_region *);
304 static int add_call_site (rtx, int);
306 static void push_uleb128 (varray_type *, unsigned int);
307 static void push_sleb128 (varray_type *, int);
308 #ifndef HAVE_AS_LEB128
309 static int dw2_size_of_call_site_table (void);
310 static int sjlj_size_of_call_site_table (void);
311 #endif
312 static void dw2_output_call_site_table (void);
313 static void sjlj_output_call_site_table (void);
316 /* Routine to see if exception handling is turned on.
317 DO_WARN is nonzero if we want to inform the user that exception
318 handling is turned off.
320 This is used to ensure that -fexceptions has been specified if the
321 compiler tries to use any exception-specific functions. */
324 doing_eh (int do_warn)
326 if (! flag_exceptions)
328 static int warned = 0;
329 if (! warned && do_warn)
331 error ("exception handling disabled, use -fexceptions to enable");
332 warned = 1;
334 return 0;
336 return 1;
340 void
341 init_eh (void)
343 if (! flag_exceptions)
344 return;
346 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
348 /* Create the SjLj_Function_Context structure. This should match
349 the definition in unwind-sjlj.c. */
350 if (USING_SJLJ_EXCEPTIONS)
352 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
354 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
356 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
357 build_pointer_type (sjlj_fc_type_node));
358 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
360 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
361 integer_type_node);
362 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
364 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
365 tmp = build_array_type (lang_hooks.types.type_for_mode (word_mode, 1),
366 tmp);
367 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
368 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
370 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
371 ptr_type_node);
372 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
374 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
375 ptr_type_node);
376 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
378 #ifdef DONT_USE_BUILTIN_SETJMP
379 #ifdef JMP_BUF_SIZE
380 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
381 #else
382 /* Should be large enough for most systems, if it is not,
383 JMP_BUF_SIZE should be defined with the proper value. It will
384 also tend to be larger than necessary for most systems, a more
385 optimal port will define JMP_BUF_SIZE. */
386 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
387 #endif
388 #else
389 /* builtin_setjmp takes a pointer to 5 words. */
390 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
391 #endif
392 tmp = build_index_type (tmp);
393 tmp = build_array_type (ptr_type_node, tmp);
394 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
395 #ifdef DONT_USE_BUILTIN_SETJMP
396 /* We don't know what the alignment requirements of the
397 runtime's jmp_buf has. Overestimate. */
398 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
399 DECL_USER_ALIGN (f_jbuf) = 1;
400 #endif
401 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
403 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
404 TREE_CHAIN (f_prev) = f_cs;
405 TREE_CHAIN (f_cs) = f_data;
406 TREE_CHAIN (f_data) = f_per;
407 TREE_CHAIN (f_per) = f_lsda;
408 TREE_CHAIN (f_lsda) = f_jbuf;
410 layout_type (sjlj_fc_type_node);
412 /* Cache the interesting field offsets so that we have
413 easy access from rtl. */
414 sjlj_fc_call_site_ofs
415 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
416 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
417 sjlj_fc_data_ofs
418 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
419 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
420 sjlj_fc_personality_ofs
421 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
422 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
423 sjlj_fc_lsda_ofs
424 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
425 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
426 sjlj_fc_jbuf_ofs
427 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
428 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
432 void
433 init_eh_for_function (void)
435 cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
438 /* Routines to generate the exception tree somewhat directly.
439 These are used from tree-eh.c when processing exception related
440 nodes during tree optimization. */
442 static struct eh_region *
443 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
445 struct eh_region *new;
447 #ifdef ENABLE_CHECKING
448 gcc_assert (doing_eh (0));
449 #endif
451 /* Insert a new blank region as a leaf in the tree. */
452 new = ggc_alloc_cleared (sizeof (*new));
453 new->type = type;
454 new->outer = outer;
455 if (outer)
457 new->next_peer = outer->inner;
458 outer->inner = new;
460 else
462 new->next_peer = cfun->eh->region_tree;
463 cfun->eh->region_tree = new;
466 new->region_number = ++cfun->eh->last_region_number;
468 return new;
471 struct eh_region *
472 gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
474 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
475 cleanup->u.cleanup.prev_try = prev_try;
476 return cleanup;
479 struct eh_region *
480 gen_eh_region_try (struct eh_region *outer)
482 return gen_eh_region (ERT_TRY, outer);
485 struct eh_region *
486 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
488 struct eh_region *c, *l;
489 tree type_list, type_node;
491 /* Ensure to always end up with a type list to normalize further
492 processing, then register each type against the runtime types map. */
493 type_list = type_or_list;
494 if (type_or_list)
496 if (TREE_CODE (type_or_list) != TREE_LIST)
497 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
499 type_node = type_list;
500 for (; type_node; type_node = TREE_CHAIN (type_node))
501 add_type_for_runtime (TREE_VALUE (type_node));
504 c = gen_eh_region (ERT_CATCH, t->outer);
505 c->u.catch.type_list = type_list;
506 l = t->u.try.last_catch;
507 c->u.catch.prev_catch = l;
508 if (l)
509 l->u.catch.next_catch = c;
510 else
511 t->u.try.catch = c;
512 t->u.try.last_catch = c;
514 return c;
517 struct eh_region *
518 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
520 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
521 region->u.allowed.type_list = allowed;
523 for (; allowed ; allowed = TREE_CHAIN (allowed))
524 add_type_for_runtime (TREE_VALUE (allowed));
526 return region;
529 struct eh_region *
530 gen_eh_region_must_not_throw (struct eh_region *outer)
532 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
536 get_eh_region_number (struct eh_region *region)
538 return region->region_number;
541 bool
542 get_eh_region_may_contain_throw (struct eh_region *region)
544 return region->may_contain_throw;
547 tree
548 get_eh_region_tree_label (struct eh_region *region)
550 return region->tree_label;
553 void
554 set_eh_region_tree_label (struct eh_region *region, tree lab)
556 region->tree_label = lab;
559 void
560 expand_resx_expr (tree exp)
562 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
563 struct eh_region *reg = cfun->eh->region_array[region_nr];
565 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
566 emit_barrier ();
569 /* Note that the current EH region (if any) may contain a throw, or a
570 call to a function which itself may contain a throw. */
572 void
573 note_eh_region_may_contain_throw (struct eh_region *region)
575 while (region && !region->may_contain_throw)
577 region->may_contain_throw = 1;
578 region = region->outer;
582 void
583 note_current_region_may_contain_throw (void)
585 note_eh_region_may_contain_throw (cfun->eh->cur_region);
589 /* Return an rtl expression for a pointer to the exception object
590 within a handler. */
593 get_exception_pointer (struct function *fun)
595 rtx exc_ptr = fun->eh->exc_ptr;
596 if (fun == cfun && ! exc_ptr)
598 exc_ptr = gen_reg_rtx (ptr_mode);
599 fun->eh->exc_ptr = exc_ptr;
601 return exc_ptr;
604 /* Return an rtl expression for the exception dispatch filter
605 within a handler. */
608 get_exception_filter (struct function *fun)
610 rtx filter = fun->eh->filter;
611 if (fun == cfun && ! filter)
613 filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
614 fun->eh->filter = filter;
616 return filter;
619 /* This section is for the exception handling specific optimization pass. */
621 /* Random access the exception region tree. */
623 void
624 collect_eh_region_array (void)
626 struct eh_region **array, *i;
628 i = cfun->eh->region_tree;
629 if (! i)
630 return;
632 array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
633 * sizeof (*array));
634 cfun->eh->region_array = array;
636 while (1)
638 array[i->region_number] = i;
640 /* If there are sub-regions, process them. */
641 if (i->inner)
642 i = i->inner;
643 /* If there are peers, process them. */
644 else if (i->next_peer)
645 i = i->next_peer;
646 /* Otherwise, step back up the tree to the next peer. */
647 else
649 do {
650 i = i->outer;
651 if (i == NULL)
652 return;
653 } while (i->next_peer == NULL);
654 i = i->next_peer;
659 /* Remove all regions whose labels are not reachable from insns. */
661 static void
662 remove_unreachable_regions (rtx insns)
664 int i, *uid_region_num;
665 bool *reachable;
666 struct eh_region *r;
667 rtx insn;
669 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
670 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
672 for (i = cfun->eh->last_region_number; i > 0; --i)
674 r = cfun->eh->region_array[i];
675 if (!r || r->region_number != i)
676 continue;
678 if (r->resume)
680 gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
681 uid_region_num[INSN_UID (r->resume)] = i;
683 if (r->label)
685 gcc_assert (!uid_region_num[INSN_UID (r->label)]);
686 uid_region_num[INSN_UID (r->label)] = i;
690 for (insn = insns; insn; insn = NEXT_INSN (insn))
691 reachable[uid_region_num[INSN_UID (insn)]] = true;
693 for (i = cfun->eh->last_region_number; i > 0; --i)
695 r = cfun->eh->region_array[i];
696 if (r && r->region_number == i && !reachable[i])
698 bool kill_it = true;
699 switch (r->type)
701 case ERT_THROW:
702 /* Don't remove ERT_THROW regions if their outer region
703 is reachable. */
704 if (r->outer && reachable[r->outer->region_number])
705 kill_it = false;
706 break;
708 case ERT_MUST_NOT_THROW:
709 /* MUST_NOT_THROW regions are implementable solely in the
710 runtime, but their existence continues to affect calls
711 within that region. Never delete them here. */
712 kill_it = false;
713 break;
715 case ERT_TRY:
717 /* TRY regions are reachable if any of its CATCH regions
718 are reachable. */
719 struct eh_region *c;
720 for (c = r->u.try.catch; c ; c = c->u.catch.next_catch)
721 if (reachable[c->region_number])
723 kill_it = false;
724 break;
726 break;
729 default:
730 break;
733 if (kill_it)
734 remove_eh_handler (r);
738 free (reachable);
739 free (uid_region_num);
742 /* Set up EH labels for RTL. */
744 void
745 convert_from_eh_region_ranges (void)
747 rtx insns = get_insns ();
748 int i, n = cfun->eh->last_region_number;
750 /* Most of the work is already done at the tree level. All we need to
751 do is collect the rtl labels that correspond to the tree labels that
752 collect the rtl labels that correspond to the tree labels
753 we allocated earlier. */
754 for (i = 1; i <= n; ++i)
756 struct eh_region *region = cfun->eh->region_array[i];
757 if (region && region->tree_label)
758 region->label = DECL_RTL_IF_SET (region->tree_label);
761 remove_unreachable_regions (insns);
764 static void
765 add_ehl_entry (rtx label, struct eh_region *region)
767 struct ehl_map_entry **slot, *entry;
769 LABEL_PRESERVE_P (label) = 1;
771 entry = ggc_alloc (sizeof (*entry));
772 entry->label = label;
773 entry->region = region;
775 slot = (struct ehl_map_entry **)
776 htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
778 /* Before landing pad creation, each exception handler has its own
779 label. After landing pad creation, the exception handlers may
780 share landing pads. This is ok, since maybe_remove_eh_handler
781 only requires the 1-1 mapping before landing pad creation. */
782 gcc_assert (!*slot || cfun->eh->built_landing_pads);
784 *slot = entry;
787 void
788 find_exception_handler_labels (void)
790 int i;
792 if (cfun->eh->exception_handler_label_map)
793 htab_empty (cfun->eh->exception_handler_label_map);
794 else
796 /* ??? The expansion factor here (3/2) must be greater than the htab
797 occupancy factor (4/3) to avoid unnecessary resizing. */
798 cfun->eh->exception_handler_label_map
799 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
800 ehl_hash, ehl_eq, NULL);
803 if (cfun->eh->region_tree == NULL)
804 return;
806 for (i = cfun->eh->last_region_number; i > 0; --i)
808 struct eh_region *region = cfun->eh->region_array[i];
809 rtx lab;
811 if (! region || region->region_number != i)
812 continue;
813 if (cfun->eh->built_landing_pads)
814 lab = region->landing_pad;
815 else
816 lab = region->label;
818 if (lab)
819 add_ehl_entry (lab, region);
822 /* For sjlj exceptions, need the return label to remain live until
823 after landing pad generation. */
824 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
825 add_ehl_entry (return_label, NULL);
828 bool
829 current_function_has_exception_handlers (void)
831 int i;
833 for (i = cfun->eh->last_region_number; i > 0; --i)
835 struct eh_region *region = cfun->eh->region_array[i];
837 if (! region || region->region_number != i)
838 continue;
839 if (region->type != ERT_THROW)
840 return true;
843 return false;
846 static struct eh_region *
847 duplicate_eh_region_1 (struct eh_region *o)
849 struct eh_region *n = ggc_alloc_cleared (sizeof (struct eh_region));
851 *n = *o;
853 n->region_number = o->region_number + cfun->eh->last_region_number;
854 gcc_assert (!o->aka);
856 return n;
859 static void
860 duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array,
861 struct eh_region *prev_try)
863 struct eh_region *n = n_array[o->region_number];
865 switch (n->type)
867 case ERT_TRY:
868 if (o->u.try.catch)
869 n->u.try.catch = n_array[o->u.try.catch->region_number];
870 if (o->u.try.last_catch)
871 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
872 break;
874 case ERT_CATCH:
875 if (o->u.catch.next_catch)
876 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
877 if (o->u.catch.prev_catch)
878 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
879 break;
881 case ERT_CLEANUP:
882 if (o->u.cleanup.prev_try)
883 n->u.cleanup.prev_try = n_array[o->u.cleanup.prev_try->region_number];
884 else
885 n->u.cleanup.prev_try = prev_try;
886 break;
888 default:
889 break;
892 if (o->outer)
893 n->outer = n_array[o->outer->region_number];
894 if (o->inner)
895 n->inner = n_array[o->inner->region_number];
896 if (o->next_peer)
897 n->next_peer = n_array[o->next_peer->region_number];
900 /* Duplicate the EH regions of IFUN into current function, root the tree in
901 OUTER_REGION and remap labels using MAP callback. */
903 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
904 void *data, int outer_region)
906 int ifun_last_region_number = ifun->eh->last_region_number;
907 struct eh_region **n_array, *root, *cur, *prev_try;
908 int i;
910 if (ifun_last_region_number == 0 || !ifun->eh->region_tree)
911 return 0;
913 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
915 /* Search for the containing ERT_TRY region to fix up
916 the prev_try short-cuts for ERT_CLEANUP regions. */
917 prev_try = NULL;
918 if (outer_region > 0)
919 for (prev_try = cfun->eh->region_array[outer_region];
920 prev_try && prev_try->type != ERT_TRY;
921 prev_try = prev_try->outer)
924 for (i = 1; i <= ifun_last_region_number; ++i)
926 cur = ifun->eh->region_array[i];
927 if (!cur || cur->region_number != i)
928 continue;
929 n_array[i] = duplicate_eh_region_1 (cur);
930 if (cur->tree_label)
932 tree newlabel = map (cur->tree_label, data);
933 n_array[i]->tree_label = newlabel;
935 else
936 n_array[i]->tree_label = NULL;
938 for (i = 1; i <= ifun_last_region_number; ++i)
940 cur = ifun->eh->region_array[i];
941 if (!cur || cur->region_number != i)
942 continue;
943 duplicate_eh_region_2 (cur, n_array, prev_try);
946 root = n_array[ifun->eh->region_tree->region_number];
947 gcc_assert (root->outer == NULL);
948 if (outer_region > 0)
950 struct eh_region *cur = cfun->eh->region_array[outer_region];
951 struct eh_region *p = cur->inner;
953 if (p)
955 while (p->next_peer)
956 p = p->next_peer;
957 p->next_peer = root;
959 else
960 cur->inner = root;
961 for (i = 1; i <= ifun_last_region_number; ++i)
962 if (n_array[i] && n_array[i]->outer == NULL)
963 n_array[i]->outer = cur;
965 else
967 struct eh_region *p = cfun->eh->region_tree;
968 if (p)
970 while (p->next_peer)
971 p = p->next_peer;
972 p->next_peer = root;
974 else
975 cfun->eh->region_tree = root;
978 free (n_array);
980 i = cfun->eh->last_region_number;
981 cfun->eh->last_region_number = i + ifun_last_region_number;
983 collect_eh_region_array ();
985 return i;
988 static int
989 t2r_eq (const void *pentry, const void *pdata)
991 tree entry = (tree) pentry;
992 tree data = (tree) pdata;
994 return TREE_PURPOSE (entry) == data;
997 static hashval_t
998 t2r_hash (const void *pentry)
1000 tree entry = (tree) pentry;
1001 return TREE_HASH (TREE_PURPOSE (entry));
1004 static void
1005 add_type_for_runtime (tree type)
1007 tree *slot;
1009 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1010 TREE_HASH (type), INSERT);
1011 if (*slot == NULL)
1013 tree runtime = (*lang_eh_runtime_type) (type);
1014 *slot = tree_cons (type, runtime, NULL_TREE);
1018 static tree
1019 lookup_type_for_runtime (tree type)
1021 tree *slot;
1023 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1024 TREE_HASH (type), NO_INSERT);
1026 /* We should have always inserted the data earlier. */
1027 return TREE_VALUE (*slot);
1031 /* Represent an entry in @TTypes for either catch actions
1032 or exception filter actions. */
1033 struct ttypes_filter GTY(())
1035 tree t;
1036 int filter;
1039 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1040 (a tree) for a @TTypes type node we are thinking about adding. */
1042 static int
1043 ttypes_filter_eq (const void *pentry, const void *pdata)
1045 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1046 tree data = (tree) pdata;
1048 return entry->t == data;
1051 static hashval_t
1052 ttypes_filter_hash (const void *pentry)
1054 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1055 return TREE_HASH (entry->t);
1058 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1059 exception specification list we are thinking about adding. */
1060 /* ??? Currently we use the type lists in the order given. Someone
1061 should put these in some canonical order. */
1063 static int
1064 ehspec_filter_eq (const void *pentry, const void *pdata)
1066 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1067 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1069 return type_list_equal (entry->t, data->t);
1072 /* Hash function for exception specification lists. */
1074 static hashval_t
1075 ehspec_filter_hash (const void *pentry)
1077 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1078 hashval_t h = 0;
1079 tree list;
1081 for (list = entry->t; list ; list = TREE_CHAIN (list))
1082 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1083 return h;
1086 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
1087 to speed up the search. Return the filter value to be used. */
1089 static int
1090 add_ttypes_entry (htab_t ttypes_hash, tree type)
1092 struct ttypes_filter **slot, *n;
1094 slot = (struct ttypes_filter **)
1095 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1097 if ((n = *slot) == NULL)
1099 /* Filter value is a 1 based table index. */
1101 n = xmalloc (sizeof (*n));
1102 n->t = type;
1103 n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1;
1104 *slot = n;
1106 VEC_safe_push (tree, gc, cfun->eh->ttype_data, type);
1109 return n->filter;
1112 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1113 to speed up the search. Return the filter value to be used. */
1115 static int
1116 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1118 struct ttypes_filter **slot, *n;
1119 struct ttypes_filter dummy;
1121 dummy.t = list;
1122 slot = (struct ttypes_filter **)
1123 htab_find_slot (ehspec_hash, &dummy, INSERT);
1125 if ((n = *slot) == NULL)
1127 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1129 n = xmalloc (sizeof (*n));
1130 n->t = list;
1131 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1132 *slot = n;
1134 /* Look up each type in the list and encode its filter
1135 value as a uleb128. Terminate the list with 0. */
1136 for (; list ; list = TREE_CHAIN (list))
1137 push_uleb128 (&cfun->eh->ehspec_data,
1138 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1139 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1142 return n->filter;
1145 /* Generate the action filter values to be used for CATCH and
1146 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1147 we use lots of landing pads, and so every type or list can share
1148 the same filter value, which saves table space. */
1150 static void
1151 assign_filter_values (void)
1153 int i;
1154 htab_t ttypes, ehspec;
1156 cfun->eh->ttype_data = VEC_alloc (tree, gc, 16);
1157 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1159 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1160 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1162 for (i = cfun->eh->last_region_number; i > 0; --i)
1164 struct eh_region *r = cfun->eh->region_array[i];
1166 /* Mind we don't process a region more than once. */
1167 if (!r || r->region_number != i)
1168 continue;
1170 switch (r->type)
1172 case ERT_CATCH:
1173 /* Whatever type_list is (NULL or true list), we build a list
1174 of filters for the region. */
1175 r->u.catch.filter_list = NULL_TREE;
1177 if (r->u.catch.type_list != NULL)
1179 /* Get a filter value for each of the types caught and store
1180 them in the region's dedicated list. */
1181 tree tp_node = r->u.catch.type_list;
1183 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1185 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1186 tree flt_node = build_int_cst (NULL_TREE, flt);
1188 r->u.catch.filter_list
1189 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1192 else
1194 /* Get a filter value for the NULL list also since it will need
1195 an action record anyway. */
1196 int flt = add_ttypes_entry (ttypes, NULL);
1197 tree flt_node = build_int_cst (NULL_TREE, flt);
1199 r->u.catch.filter_list
1200 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1203 break;
1205 case ERT_ALLOWED_EXCEPTIONS:
1206 r->u.allowed.filter
1207 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1208 break;
1210 default:
1211 break;
1215 htab_delete (ttypes);
1216 htab_delete (ehspec);
1219 /* Emit SEQ into basic block just before INSN (that is assumed to be
1220 first instruction of some existing BB and return the newly
1221 produced block. */
1222 static basic_block
1223 emit_to_new_bb_before (rtx seq, rtx insn)
1225 rtx last;
1226 basic_block bb;
1227 edge e;
1228 edge_iterator ei;
1230 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1231 call), we don't want it to go into newly created landing pad or other EH
1232 construct. */
1233 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1234 if (e->flags & EDGE_FALLTHRU)
1235 force_nonfallthru (e);
1236 else
1237 ei_next (&ei);
1238 last = emit_insn_before (seq, insn);
1239 if (BARRIER_P (last))
1240 last = PREV_INSN (last);
1241 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1242 update_bb_for_insn (bb);
1243 bb->flags |= BB_SUPERBLOCK;
1244 return bb;
1247 /* Generate the code to actually handle exceptions, which will follow the
1248 landing pads. */
1250 static void
1251 build_post_landing_pads (void)
1253 int i;
1255 for (i = cfun->eh->last_region_number; i > 0; --i)
1257 struct eh_region *region = cfun->eh->region_array[i];
1258 rtx seq;
1260 /* Mind we don't process a region more than once. */
1261 if (!region || region->region_number != i)
1262 continue;
1264 switch (region->type)
1266 case ERT_TRY:
1267 /* ??? Collect the set of all non-overlapping catch handlers
1268 all the way up the chain until blocked by a cleanup. */
1269 /* ??? Outer try regions can share landing pads with inner
1270 try regions if the types are completely non-overlapping,
1271 and there are no intervening cleanups. */
1273 region->post_landing_pad = gen_label_rtx ();
1275 start_sequence ();
1277 emit_label (region->post_landing_pad);
1279 /* ??? It is mighty inconvenient to call back into the
1280 switch statement generation code in expand_end_case.
1281 Rapid prototyping sez a sequence of ifs. */
1283 struct eh_region *c;
1284 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1286 if (c->u.catch.type_list == NULL)
1287 emit_jump (c->label);
1288 else
1290 /* Need for one cmp/jump per type caught. Each type
1291 list entry has a matching entry in the filter list
1292 (see assign_filter_values). */
1293 tree tp_node = c->u.catch.type_list;
1294 tree flt_node = c->u.catch.filter_list;
1296 for (; tp_node; )
1298 emit_cmp_and_jump_insns
1299 (cfun->eh->filter,
1300 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1301 EQ, NULL_RTX,
1302 targetm.eh_return_filter_mode (), 0, c->label);
1304 tp_node = TREE_CHAIN (tp_node);
1305 flt_node = TREE_CHAIN (flt_node);
1311 /* We delay the generation of the _Unwind_Resume until we generate
1312 landing pads. We emit a marker here so as to get good control
1313 flow data in the meantime. */
1314 region->resume
1315 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1316 emit_barrier ();
1318 seq = get_insns ();
1319 end_sequence ();
1321 emit_to_new_bb_before (seq, region->u.try.catch->label);
1323 break;
1325 case ERT_ALLOWED_EXCEPTIONS:
1326 region->post_landing_pad = gen_label_rtx ();
1328 start_sequence ();
1330 emit_label (region->post_landing_pad);
1332 emit_cmp_and_jump_insns (cfun->eh->filter,
1333 GEN_INT (region->u.allowed.filter),
1334 EQ, NULL_RTX,
1335 targetm.eh_return_filter_mode (), 0, region->label);
1337 /* We delay the generation of the _Unwind_Resume until we generate
1338 landing pads. We emit a marker here so as to get good control
1339 flow data in the meantime. */
1340 region->resume
1341 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1342 emit_barrier ();
1344 seq = get_insns ();
1345 end_sequence ();
1347 emit_to_new_bb_before (seq, region->label);
1348 break;
1350 case ERT_CLEANUP:
1351 case ERT_MUST_NOT_THROW:
1352 region->post_landing_pad = region->label;
1353 break;
1355 case ERT_CATCH:
1356 case ERT_THROW:
1357 /* Nothing to do. */
1358 break;
1360 default:
1361 gcc_unreachable ();
1366 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1367 _Unwind_Resume otherwise. */
1369 static void
1370 connect_post_landing_pads (void)
1372 int i;
1374 for (i = cfun->eh->last_region_number; i > 0; --i)
1376 struct eh_region *region = cfun->eh->region_array[i];
1377 struct eh_region *outer;
1378 rtx seq;
1379 rtx barrier;
1381 /* Mind we don't process a region more than once. */
1382 if (!region || region->region_number != i)
1383 continue;
1385 /* If there is no RESX, or it has been deleted by flow, there's
1386 nothing to fix up. */
1387 if (! region->resume || INSN_DELETED_P (region->resume))
1388 continue;
1390 /* Search for another landing pad in this function. */
1391 for (outer = region->outer; outer ; outer = outer->outer)
1392 if (outer->post_landing_pad)
1393 break;
1395 start_sequence ();
1397 if (outer)
1399 edge e;
1400 basic_block src, dest;
1402 emit_jump (outer->post_landing_pad);
1403 src = BLOCK_FOR_INSN (region->resume);
1404 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1405 while (EDGE_COUNT (src->succs) > 0)
1406 remove_edge (EDGE_SUCC (src, 0));
1407 e = make_edge (src, dest, 0);
1408 e->probability = REG_BR_PROB_BASE;
1409 e->count = src->count;
1411 else
1413 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1414 VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1416 /* What we just emitted was a throwing libcall, so it got a
1417 barrier automatically added after it. If the last insn in
1418 the libcall sequence isn't the barrier, it's because the
1419 target emits multiple insns for a call, and there are insns
1420 after the actual call insn (which are redundant and would be
1421 optimized away). The barrier is inserted exactly after the
1422 call insn, so let's go get that and delete the insns after
1423 it, because below we need the barrier to be the last insn in
1424 the sequence. */
1425 delete_insns_since (NEXT_INSN (last_call_insn ()));
1428 seq = get_insns ();
1429 end_sequence ();
1430 barrier = emit_insn_before (seq, region->resume);
1431 /* Avoid duplicate barrier. */
1432 gcc_assert (BARRIER_P (barrier));
1433 delete_insn (barrier);
1434 delete_insn (region->resume);
1436 /* ??? From tree-ssa we can wind up with catch regions whose
1437 label is not instantiated, but whose resx is present. Now
1438 that we've dealt with the resx, kill the region. */
1439 if (region->label == NULL && region->type == ERT_CLEANUP)
1440 remove_eh_handler (region);
1445 static void
1446 dw2_build_landing_pads (void)
1448 int i;
1449 unsigned int j;
1451 for (i = cfun->eh->last_region_number; i > 0; --i)
1453 struct eh_region *region = cfun->eh->region_array[i];
1454 rtx seq;
1455 basic_block bb;
1456 bool clobbers_hard_regs = false;
1457 edge e;
1459 /* Mind we don't process a region more than once. */
1460 if (!region || region->region_number != i)
1461 continue;
1463 if (region->type != ERT_CLEANUP
1464 && region->type != ERT_TRY
1465 && region->type != ERT_ALLOWED_EXCEPTIONS)
1466 continue;
1468 start_sequence ();
1470 region->landing_pad = gen_label_rtx ();
1471 emit_label (region->landing_pad);
1473 #ifdef HAVE_exception_receiver
1474 if (HAVE_exception_receiver)
1475 emit_insn (gen_exception_receiver ());
1476 else
1477 #endif
1478 #ifdef HAVE_nonlocal_goto_receiver
1479 if (HAVE_nonlocal_goto_receiver)
1480 emit_insn (gen_nonlocal_goto_receiver ());
1481 else
1482 #endif
1483 { /* Nothing */ }
1485 /* If the eh_return data registers are call-saved, then we
1486 won't have considered them clobbered from the call that
1487 threw. Kill them now. */
1488 for (j = 0; ; ++j)
1490 unsigned r = EH_RETURN_DATA_REGNO (j);
1491 if (r == INVALID_REGNUM)
1492 break;
1493 if (! call_used_regs[r])
1495 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1496 clobbers_hard_regs = true;
1500 if (clobbers_hard_regs)
1502 /* @@@ This is a kludge. Not all machine descriptions define a
1503 blockage insn, but we must not allow the code we just generated
1504 to be reordered by scheduling. So emit an ASM_INPUT to act as
1505 blockage insn. */
1506 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1509 emit_move_insn (cfun->eh->exc_ptr,
1510 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1511 emit_move_insn (cfun->eh->filter,
1512 gen_rtx_REG (targetm.eh_return_filter_mode (),
1513 EH_RETURN_DATA_REGNO (1)));
1515 seq = get_insns ();
1516 end_sequence ();
1518 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1519 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1520 e->count = bb->count;
1521 e->probability = REG_BR_PROB_BASE;
1526 struct sjlj_lp_info
1528 int directly_reachable;
1529 int action_index;
1530 int dispatch_index;
1531 int call_site_index;
1534 static bool
1535 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1537 rtx insn;
1538 bool found_one = false;
1540 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1542 struct eh_region *region;
1543 enum reachable_code rc;
1544 tree type_thrown;
1545 rtx note;
1547 if (! INSN_P (insn))
1548 continue;
1550 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1551 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1552 continue;
1554 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1556 type_thrown = NULL_TREE;
1557 if (region->type == ERT_THROW)
1559 type_thrown = region->u.throw.type;
1560 region = region->outer;
1563 /* Find the first containing region that might handle the exception.
1564 That's the landing pad to which we will transfer control. */
1565 rc = RNL_NOT_CAUGHT;
1566 for (; region; region = region->outer)
1568 rc = reachable_next_level (region, type_thrown, NULL);
1569 if (rc != RNL_NOT_CAUGHT)
1570 break;
1572 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1574 lp_info[region->region_number].directly_reachable = 1;
1575 found_one = true;
1579 return found_one;
1582 static void
1583 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1585 htab_t ar_hash;
1586 int i, index;
1588 /* First task: build the action table. */
1590 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1591 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1593 for (i = cfun->eh->last_region_number; i > 0; --i)
1594 if (lp_info[i].directly_reachable)
1596 struct eh_region *r = cfun->eh->region_array[i];
1597 r->landing_pad = dispatch_label;
1598 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1599 if (lp_info[i].action_index != -1)
1600 cfun->uses_eh_lsda = 1;
1603 htab_delete (ar_hash);
1605 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1606 landing pad label for the region. For sjlj though, there is one
1607 common landing pad from which we dispatch to the post-landing pads.
1609 A region receives a dispatch index if it is directly reachable
1610 and requires in-function processing. Regions that share post-landing
1611 pads may share dispatch indices. */
1612 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1613 (see build_post_landing_pads) so we don't bother checking for it. */
1615 index = 0;
1616 for (i = cfun->eh->last_region_number; i > 0; --i)
1617 if (lp_info[i].directly_reachable)
1618 lp_info[i].dispatch_index = index++;
1620 /* Finally: assign call-site values. If dwarf2 terms, this would be
1621 the region number assigned by convert_to_eh_region_ranges, but
1622 handles no-action and must-not-throw differently. */
1624 call_site_base = 1;
1625 for (i = cfun->eh->last_region_number; i > 0; --i)
1626 if (lp_info[i].directly_reachable)
1628 int action = lp_info[i].action_index;
1630 /* Map must-not-throw to otherwise unused call-site index 0. */
1631 if (action == -2)
1632 index = 0;
1633 /* Map no-action to otherwise unused call-site index -1. */
1634 else if (action == -1)
1635 index = -1;
1636 /* Otherwise, look it up in the table. */
1637 else
1638 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1640 lp_info[i].call_site_index = index;
1644 static void
1645 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1647 int last_call_site = -2;
1648 rtx insn, mem;
1650 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1652 struct eh_region *region;
1653 int this_call_site;
1654 rtx note, before, p;
1656 /* Reset value tracking at extended basic block boundaries. */
1657 if (LABEL_P (insn))
1658 last_call_site = -2;
1660 if (! INSN_P (insn))
1661 continue;
1663 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1664 if (!note)
1666 /* Calls (and trapping insns) without notes are outside any
1667 exception handling region in this function. Mark them as
1668 no action. */
1669 if (CALL_P (insn)
1670 || (flag_non_call_exceptions
1671 && may_trap_p (PATTERN (insn))))
1672 this_call_site = -1;
1673 else
1674 continue;
1676 else
1678 /* Calls that are known to not throw need not be marked. */
1679 if (INTVAL (XEXP (note, 0)) <= 0)
1680 continue;
1682 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1683 this_call_site = lp_info[region->region_number].call_site_index;
1686 if (this_call_site == last_call_site)
1687 continue;
1689 /* Don't separate a call from it's argument loads. */
1690 before = insn;
1691 if (CALL_P (insn))
1692 before = find_first_parameter_load (insn, NULL_RTX);
1694 start_sequence ();
1695 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
1696 sjlj_fc_call_site_ofs);
1697 emit_move_insn (mem, GEN_INT (this_call_site));
1698 p = get_insns ();
1699 end_sequence ();
1701 emit_insn_before (p, before);
1702 last_call_site = this_call_site;
1706 /* Construct the SjLj_Function_Context. */
1708 static void
1709 sjlj_emit_function_enter (rtx dispatch_label)
1711 rtx fn_begin, fc, mem, seq;
1713 fc = cfun->eh->sjlj_fc;
1715 start_sequence ();
1717 /* We're storing this libcall's address into memory instead of
1718 calling it directly. Thus, we must call assemble_external_libcall
1719 here, as we can not depend on emit_library_call to do it for us. */
1720 assemble_external_libcall (eh_personality_libfunc);
1721 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1722 emit_move_insn (mem, eh_personality_libfunc);
1724 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1725 if (cfun->uses_eh_lsda)
1727 char buf[20];
1728 rtx sym;
1730 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1731 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1732 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1733 emit_move_insn (mem, sym);
1735 else
1736 emit_move_insn (mem, const0_rtx);
1738 #ifdef DONT_USE_BUILTIN_SETJMP
1740 rtx x, note;
1741 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1742 TYPE_MODE (integer_type_node), 1,
1743 plus_constant (XEXP (fc, 0),
1744 sjlj_fc_jbuf_ofs), Pmode);
1746 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
1747 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
1749 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1750 TYPE_MODE (integer_type_node), 0, dispatch_label);
1752 #else
1753 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
1754 dispatch_label);
1755 #endif
1757 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1758 1, XEXP (fc, 0), Pmode);
1760 seq = get_insns ();
1761 end_sequence ();
1763 /* ??? Instead of doing this at the beginning of the function,
1764 do this in a block that is at loop level 0 and dominates all
1765 can_throw_internal instructions. */
1767 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1768 if (NOTE_P (fn_begin)
1769 && (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG
1770 || NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK))
1771 break;
1772 if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1773 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
1774 else
1776 rtx last = BB_END (single_succ (ENTRY_BLOCK_PTR));
1777 for (; ; fn_begin = NEXT_INSN (fn_begin))
1778 if ((NOTE_P (fn_begin)
1779 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1780 || fn_begin == last)
1781 break;
1782 emit_insn_after (seq, fn_begin);
1786 /* Call back from expand_function_end to know where we should put
1787 the call to unwind_sjlj_unregister_libfunc if needed. */
1789 void
1790 sjlj_emit_function_exit_after (rtx after)
1792 cfun->eh->sjlj_exit_after = after;
1795 static void
1796 sjlj_emit_function_exit (void)
1798 rtx seq;
1799 edge e;
1800 edge_iterator ei;
1802 start_sequence ();
1804 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1805 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
1807 seq = get_insns ();
1808 end_sequence ();
1810 /* ??? Really this can be done in any block at loop level 0 that
1811 post-dominates all can_throw_internal instructions. This is
1812 the last possible moment. */
1814 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1815 if (e->flags & EDGE_FALLTHRU)
1816 break;
1817 if (e)
1819 rtx insn;
1821 /* Figure out whether the place we are supposed to insert libcall
1822 is inside the last basic block or after it. In the other case
1823 we need to emit to edge. */
1824 gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
1825 for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
1827 if (insn == cfun->eh->sjlj_exit_after)
1829 if (LABEL_P (insn))
1830 insn = NEXT_INSN (insn);
1831 emit_insn_after (seq, insn);
1832 return;
1834 if (insn == BB_END (e->src))
1835 break;
1837 insert_insn_on_edge (seq, e);
1841 static void
1842 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1844 int i, first_reachable;
1845 rtx mem, dispatch, seq, fc;
1846 rtx before;
1847 basic_block bb;
1848 edge e;
1850 fc = cfun->eh->sjlj_fc;
1852 start_sequence ();
1854 emit_label (dispatch_label);
1856 #ifndef DONT_USE_BUILTIN_SETJMP
1857 expand_builtin_setjmp_receiver (dispatch_label);
1858 #endif
1860 /* Load up dispatch index, exc_ptr and filter values from the
1861 function context. */
1862 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
1863 sjlj_fc_call_site_ofs);
1864 dispatch = copy_to_reg (mem);
1866 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
1867 if (word_mode != ptr_mode)
1869 #ifdef POINTERS_EXTEND_UNSIGNED
1870 mem = convert_memory_address (ptr_mode, mem);
1871 #else
1872 mem = convert_to_mode (ptr_mode, mem, 0);
1873 #endif
1875 emit_move_insn (cfun->eh->exc_ptr, mem);
1877 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
1878 emit_move_insn (cfun->eh->filter, mem);
1880 /* Jump to one of the directly reachable regions. */
1881 /* ??? This really ought to be using a switch statement. */
1883 first_reachable = 0;
1884 for (i = cfun->eh->last_region_number; i > 0; --i)
1886 if (! lp_info[i].directly_reachable)
1887 continue;
1889 if (! first_reachable)
1891 first_reachable = i;
1892 continue;
1895 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
1896 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
1897 cfun->eh->region_array[i]->post_landing_pad);
1900 seq = get_insns ();
1901 end_sequence ();
1903 before = cfun->eh->region_array[first_reachable]->post_landing_pad;
1905 bb = emit_to_new_bb_before (seq, before);
1906 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1907 e->count = bb->count;
1908 e->probability = REG_BR_PROB_BASE;
1911 static void
1912 sjlj_build_landing_pads (void)
1914 struct sjlj_lp_info *lp_info;
1916 lp_info = xcalloc (cfun->eh->last_region_number + 1,
1917 sizeof (struct sjlj_lp_info));
1919 if (sjlj_find_directly_reachable_regions (lp_info))
1921 rtx dispatch_label = gen_label_rtx ();
1923 cfun->eh->sjlj_fc
1924 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1925 int_size_in_bytes (sjlj_fc_type_node),
1926 TYPE_ALIGN (sjlj_fc_type_node));
1928 sjlj_assign_call_site_values (dispatch_label, lp_info);
1929 sjlj_mark_call_sites (lp_info);
1931 sjlj_emit_function_enter (dispatch_label);
1932 sjlj_emit_dispatch_table (dispatch_label, lp_info);
1933 sjlj_emit_function_exit ();
1936 free (lp_info);
1939 void
1940 finish_eh_generation (void)
1942 basic_block bb;
1944 /* Nothing to do if no regions created. */
1945 if (cfun->eh->region_tree == NULL)
1946 return;
1948 /* The object here is to provide find_basic_blocks with detailed
1949 information (via reachable_handlers) on how exception control
1950 flows within the function. In this first pass, we can include
1951 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
1952 regions, and hope that it will be useful in deleting unreachable
1953 handlers. Subsequently, we will generate landing pads which will
1954 connect many of the handlers, and then type information will not
1955 be effective. Still, this is a win over previous implementations. */
1957 /* These registers are used by the landing pads. Make sure they
1958 have been generated. */
1959 get_exception_pointer (cfun);
1960 get_exception_filter (cfun);
1962 /* Construct the landing pads. */
1964 assign_filter_values ();
1965 build_post_landing_pads ();
1966 connect_post_landing_pads ();
1967 if (USING_SJLJ_EXCEPTIONS)
1968 sjlj_build_landing_pads ();
1969 else
1970 dw2_build_landing_pads ();
1972 cfun->eh->built_landing_pads = 1;
1974 /* We've totally changed the CFG. Start over. */
1975 find_exception_handler_labels ();
1976 break_superblocks ();
1977 if (USING_SJLJ_EXCEPTIONS)
1978 commit_edge_insertions ();
1979 FOR_EACH_BB (bb)
1981 edge e;
1982 edge_iterator ei;
1983 bool eh = false;
1984 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1986 if (e->flags & EDGE_EH)
1988 remove_edge (e);
1989 eh = true;
1991 else
1992 ei_next (&ei);
1994 if (eh)
1995 rtl_make_eh_edge (NULL, bb, BB_END (bb));
1999 static hashval_t
2000 ehl_hash (const void *pentry)
2002 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2004 /* 2^32 * ((sqrt(5) - 1) / 2) */
2005 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2006 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2009 static int
2010 ehl_eq (const void *pentry, const void *pdata)
2012 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2013 struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2015 return entry->label == data->label;
2018 /* This section handles removing dead code for flow. */
2020 /* Remove LABEL from exception_handler_label_map. */
2022 static void
2023 remove_exception_handler_label (rtx label)
2025 struct ehl_map_entry **slot, tmp;
2027 /* If exception_handler_label_map was not built yet,
2028 there is nothing to do. */
2029 if (cfun->eh->exception_handler_label_map == NULL)
2030 return;
2032 tmp.label = label;
2033 slot = (struct ehl_map_entry **)
2034 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2035 gcc_assert (slot);
2037 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2040 /* Splice REGION from the region tree etc. */
2042 static void
2043 remove_eh_handler (struct eh_region *region)
2045 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2046 rtx lab;
2048 /* For the benefit of efficiently handling REG_EH_REGION notes,
2049 replace this region in the region array with its containing
2050 region. Note that previous region deletions may result in
2051 multiple copies of this region in the array, so we have a
2052 list of alternate numbers by which we are known. */
2054 outer = region->outer;
2055 cfun->eh->region_array[region->region_number] = outer;
2056 if (region->aka)
2058 unsigned i;
2059 bitmap_iterator bi;
2061 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2063 cfun->eh->region_array[i] = outer;
2067 if (outer)
2069 if (!outer->aka)
2070 outer->aka = BITMAP_GGC_ALLOC ();
2071 if (region->aka)
2072 bitmap_ior_into (outer->aka, region->aka);
2073 bitmap_set_bit (outer->aka, region->region_number);
2076 if (cfun->eh->built_landing_pads)
2077 lab = region->landing_pad;
2078 else
2079 lab = region->label;
2080 if (lab)
2081 remove_exception_handler_label (lab);
2083 if (outer)
2084 pp_start = &outer->inner;
2085 else
2086 pp_start = &cfun->eh->region_tree;
2087 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2088 continue;
2089 *pp = region->next_peer;
2091 inner = region->inner;
2092 if (inner)
2094 for (p = inner; p->next_peer ; p = p->next_peer)
2095 p->outer = outer;
2096 p->outer = outer;
2098 p->next_peer = *pp_start;
2099 *pp_start = inner;
2102 if (region->type == ERT_CATCH)
2104 struct eh_region *try, *next, *prev;
2106 for (try = region->next_peer;
2107 try->type == ERT_CATCH;
2108 try = try->next_peer)
2109 continue;
2110 gcc_assert (try->type == ERT_TRY);
2112 next = region->u.catch.next_catch;
2113 prev = region->u.catch.prev_catch;
2115 if (next)
2116 next->u.catch.prev_catch = prev;
2117 else
2118 try->u.try.last_catch = prev;
2119 if (prev)
2120 prev->u.catch.next_catch = next;
2121 else
2123 try->u.try.catch = next;
2124 if (! next)
2125 remove_eh_handler (try);
2130 /* LABEL heads a basic block that is about to be deleted. If this
2131 label corresponds to an exception region, we may be able to
2132 delete the region. */
2134 void
2135 maybe_remove_eh_handler (rtx label)
2137 struct ehl_map_entry **slot, tmp;
2138 struct eh_region *region;
2140 /* ??? After generating landing pads, it's not so simple to determine
2141 if the region data is completely unused. One must examine the
2142 landing pad and the post landing pad, and whether an inner try block
2143 is referencing the catch handlers directly. */
2144 if (cfun->eh->built_landing_pads)
2145 return;
2147 tmp.label = label;
2148 slot = (struct ehl_map_entry **)
2149 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2150 if (! slot)
2151 return;
2152 region = (*slot)->region;
2153 if (! region)
2154 return;
2156 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2157 because there is no path to the fallback call to terminate.
2158 But the region continues to affect call-site data until there
2159 are no more contained calls, which we don't see here. */
2160 if (region->type == ERT_MUST_NOT_THROW)
2162 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2163 region->label = NULL_RTX;
2165 else
2166 remove_eh_handler (region);
2169 /* Invokes CALLBACK for every exception handler label. Only used by old
2170 loop hackery; should not be used by new code. */
2172 void
2173 for_each_eh_label (void (*callback) (rtx))
2175 htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
2176 (void *) &callback);
2179 static int
2180 for_each_eh_label_1 (void **pentry, void *data)
2182 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2183 void (*callback) (rtx) = *(void (**) (rtx)) data;
2185 (*callback) (entry->label);
2186 return 1;
2189 /* Invoke CALLBACK for every exception region in the current function. */
2191 void
2192 for_each_eh_region (void (*callback) (struct eh_region *))
2194 int i, n = cfun->eh->last_region_number;
2195 for (i = 1; i <= n; ++i)
2197 struct eh_region *region = cfun->eh->region_array[i];
2198 if (region)
2199 (*callback) (region);
2203 /* This section describes CFG exception edges for flow. */
2205 /* For communicating between calls to reachable_next_level. */
2206 struct reachable_info
2208 tree types_caught;
2209 tree types_allowed;
2210 void (*callback) (struct eh_region *, void *);
2211 void *callback_data;
2212 bool saw_any_handlers;
2215 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2216 base class of TYPE, is in HANDLED. */
2218 static int
2219 check_handled (tree handled, tree type)
2221 tree t;
2223 /* We can check for exact matches without front-end help. */
2224 if (! lang_eh_type_covers)
2226 for (t = handled; t ; t = TREE_CHAIN (t))
2227 if (TREE_VALUE (t) == type)
2228 return 1;
2230 else
2232 for (t = handled; t ; t = TREE_CHAIN (t))
2233 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2234 return 1;
2237 return 0;
2240 /* A subroutine of reachable_next_level. If we are collecting a list
2241 of handlers, add one. After landing pad generation, reference
2242 it instead of the handlers themselves. Further, the handlers are
2243 all wired together, so by referencing one, we've got them all.
2244 Before landing pad generation we reference each handler individually.
2246 LP_REGION contains the landing pad; REGION is the handler. */
2248 static void
2249 add_reachable_handler (struct reachable_info *info,
2250 struct eh_region *lp_region, struct eh_region *region)
2252 if (! info)
2253 return;
2255 info->saw_any_handlers = true;
2257 if (cfun->eh->built_landing_pads)
2258 info->callback (lp_region, info->callback_data);
2259 else
2260 info->callback (region, info->callback_data);
2263 /* Process one level of exception regions for reachability.
2264 If TYPE_THROWN is non-null, then it is the *exact* type being
2265 propagated. If INFO is non-null, then collect handler labels
2266 and caught/allowed type information between invocations. */
2268 static enum reachable_code
2269 reachable_next_level (struct eh_region *region, tree type_thrown,
2270 struct reachable_info *info)
2272 switch (region->type)
2274 case ERT_CLEANUP:
2275 /* Before landing-pad generation, we model control flow
2276 directly to the individual handlers. In this way we can
2277 see that catch handler types may shadow one another. */
2278 add_reachable_handler (info, region, region);
2279 return RNL_MAYBE_CAUGHT;
2281 case ERT_TRY:
2283 struct eh_region *c;
2284 enum reachable_code ret = RNL_NOT_CAUGHT;
2286 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2288 /* A catch-all handler ends the search. */
2289 if (c->u.catch.type_list == NULL)
2291 add_reachable_handler (info, region, c);
2292 return RNL_CAUGHT;
2295 if (type_thrown)
2297 /* If we have at least one type match, end the search. */
2298 tree tp_node = c->u.catch.type_list;
2300 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2302 tree type = TREE_VALUE (tp_node);
2304 if (type == type_thrown
2305 || (lang_eh_type_covers
2306 && (*lang_eh_type_covers) (type, type_thrown)))
2308 add_reachable_handler (info, region, c);
2309 return RNL_CAUGHT;
2313 /* If we have definitive information of a match failure,
2314 the catch won't trigger. */
2315 if (lang_eh_type_covers)
2316 return RNL_NOT_CAUGHT;
2319 /* At this point, we either don't know what type is thrown or
2320 don't have front-end assistance to help deciding if it is
2321 covered by one of the types in the list for this region.
2323 We'd then like to add this region to the list of reachable
2324 handlers since it is indeed potentially reachable based on the
2325 information we have.
2327 Actually, this handler is for sure not reachable if all the
2328 types it matches have already been caught. That is, it is only
2329 potentially reachable if at least one of the types it catches
2330 has not been previously caught. */
2332 if (! info)
2333 ret = RNL_MAYBE_CAUGHT;
2334 else
2336 tree tp_node = c->u.catch.type_list;
2337 bool maybe_reachable = false;
2339 /* Compute the potential reachability of this handler and
2340 update the list of types caught at the same time. */
2341 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2343 tree type = TREE_VALUE (tp_node);
2345 if (! check_handled (info->types_caught, type))
2347 info->types_caught
2348 = tree_cons (NULL, type, info->types_caught);
2350 maybe_reachable = true;
2354 if (maybe_reachable)
2356 add_reachable_handler (info, region, c);
2358 /* ??? If the catch type is a base class of every allowed
2359 type, then we know we can stop the search. */
2360 ret = RNL_MAYBE_CAUGHT;
2365 return ret;
2368 case ERT_ALLOWED_EXCEPTIONS:
2369 /* An empty list of types definitely ends the search. */
2370 if (region->u.allowed.type_list == NULL_TREE)
2372 add_reachable_handler (info, region, region);
2373 return RNL_CAUGHT;
2376 /* Collect a list of lists of allowed types for use in detecting
2377 when a catch may be transformed into a catch-all. */
2378 if (info)
2379 info->types_allowed = tree_cons (NULL_TREE,
2380 region->u.allowed.type_list,
2381 info->types_allowed);
2383 /* If we have definitive information about the type hierarchy,
2384 then we can tell if the thrown type will pass through the
2385 filter. */
2386 if (type_thrown && lang_eh_type_covers)
2388 if (check_handled (region->u.allowed.type_list, type_thrown))
2389 return RNL_NOT_CAUGHT;
2390 else
2392 add_reachable_handler (info, region, region);
2393 return RNL_CAUGHT;
2397 add_reachable_handler (info, region, region);
2398 return RNL_MAYBE_CAUGHT;
2400 case ERT_CATCH:
2401 /* Catch regions are handled by their controlling try region. */
2402 return RNL_NOT_CAUGHT;
2404 case ERT_MUST_NOT_THROW:
2405 /* Here we end our search, since no exceptions may propagate.
2406 If we've touched down at some landing pad previous, then the
2407 explicit function call we generated may be used. Otherwise
2408 the call is made by the runtime.
2410 Before inlining, do not perform this optimization. We may
2411 inline a subroutine that contains handlers, and that will
2412 change the value of saw_any_handlers. */
2414 if ((info && info->saw_any_handlers) || !cfun->after_inlining)
2416 add_reachable_handler (info, region, region);
2417 return RNL_CAUGHT;
2419 else
2420 return RNL_BLOCKED;
2422 case ERT_THROW:
2423 case ERT_UNKNOWN:
2424 /* Shouldn't see these here. */
2425 gcc_unreachable ();
2426 break;
2427 default:
2428 gcc_unreachable ();
2432 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2434 void
2435 foreach_reachable_handler (int region_number, bool is_resx,
2436 void (*callback) (struct eh_region *, void *),
2437 void *callback_data)
2439 struct reachable_info info;
2440 struct eh_region *region;
2441 tree type_thrown;
2443 memset (&info, 0, sizeof (info));
2444 info.callback = callback;
2445 info.callback_data = callback_data;
2447 region = cfun->eh->region_array[region_number];
2449 type_thrown = NULL_TREE;
2450 if (is_resx)
2452 /* A RESX leaves a region instead of entering it. Thus the
2453 region itself may have been deleted out from under us. */
2454 if (region == NULL)
2455 return;
2456 region = region->outer;
2458 else if (region->type == ERT_THROW)
2460 type_thrown = region->u.throw.type;
2461 region = region->outer;
2464 while (region)
2466 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2467 break;
2468 /* If we have processed one cleanup, there is no point in
2469 processing any more of them. Each cleanup will have an edge
2470 to the next outer cleanup region, so the flow graph will be
2471 accurate. */
2472 if (region->type == ERT_CLEANUP)
2473 region = region->u.cleanup.prev_try;
2474 else
2475 region = region->outer;
2479 /* Retrieve a list of labels of exception handlers which can be
2480 reached by a given insn. */
2482 static void
2483 arh_to_landing_pad (struct eh_region *region, void *data)
2485 rtx *p_handlers = data;
2486 if (! *p_handlers)
2487 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2490 static void
2491 arh_to_label (struct eh_region *region, void *data)
2493 rtx *p_handlers = data;
2494 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2498 reachable_handlers (rtx insn)
2500 bool is_resx = false;
2501 rtx handlers = NULL;
2502 int region_number;
2504 if (JUMP_P (insn)
2505 && GET_CODE (PATTERN (insn)) == RESX)
2507 region_number = XINT (PATTERN (insn), 0);
2508 is_resx = true;
2510 else
2512 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2513 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2514 return NULL;
2515 region_number = INTVAL (XEXP (note, 0));
2518 foreach_reachable_handler (region_number, is_resx,
2519 (cfun->eh->built_landing_pads
2520 ? arh_to_landing_pad
2521 : arh_to_label),
2522 &handlers);
2524 return handlers;
2527 /* Determine if the given INSN can throw an exception that is caught
2528 within the function. */
2530 bool
2531 can_throw_internal_1 (int region_number, bool is_resx)
2533 struct eh_region *region;
2534 tree type_thrown;
2536 region = cfun->eh->region_array[region_number];
2538 type_thrown = NULL_TREE;
2539 if (is_resx)
2540 region = region->outer;
2541 else if (region->type == ERT_THROW)
2543 type_thrown = region->u.throw.type;
2544 region = region->outer;
2547 /* If this exception is ignored by each and every containing region,
2548 then control passes straight out. The runtime may handle some
2549 regions, which also do not require processing internally. */
2550 for (; region; region = region->outer)
2552 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2553 if (how == RNL_BLOCKED)
2554 return false;
2555 if (how != RNL_NOT_CAUGHT)
2556 return true;
2559 return false;
2562 bool
2563 can_throw_internal (rtx insn)
2565 rtx note;
2567 if (! INSN_P (insn))
2568 return false;
2570 if (JUMP_P (insn)
2571 && GET_CODE (PATTERN (insn)) == RESX
2572 && XINT (PATTERN (insn), 0) > 0)
2573 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true);
2575 if (NONJUMP_INSN_P (insn)
2576 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2577 insn = XVECEXP (PATTERN (insn), 0, 0);
2579 /* Every insn that might throw has an EH_REGION note. */
2580 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2581 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2582 return false;
2584 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false);
2587 /* Determine if the given INSN can throw an exception that is
2588 visible outside the function. */
2590 bool
2591 can_throw_external_1 (int region_number, bool is_resx)
2593 struct eh_region *region;
2594 tree type_thrown;
2596 region = cfun->eh->region_array[region_number];
2598 type_thrown = NULL_TREE;
2599 if (is_resx)
2600 region = region->outer;
2601 else if (region->type == ERT_THROW)
2603 type_thrown = region->u.throw.type;
2604 region = region->outer;
2607 /* If the exception is caught or blocked by any containing region,
2608 then it is not seen by any calling function. */
2609 for (; region ; region = region->outer)
2610 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2611 return false;
2613 return true;
2616 bool
2617 can_throw_external (rtx insn)
2619 rtx note;
2621 if (! INSN_P (insn))
2622 return false;
2624 if (JUMP_P (insn)
2625 && GET_CODE (PATTERN (insn)) == RESX
2626 && XINT (PATTERN (insn), 0) > 0)
2627 return can_throw_external_1 (XINT (PATTERN (insn), 0), true);
2629 if (NONJUMP_INSN_P (insn)
2630 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2631 insn = XVECEXP (PATTERN (insn), 0, 0);
2633 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2634 if (!note)
2636 /* Calls (and trapping insns) without notes are outside any
2637 exception handling region in this function. We have to
2638 assume it might throw. Given that the front end and middle
2639 ends mark known NOTHROW functions, this isn't so wildly
2640 inaccurate. */
2641 return (CALL_P (insn)
2642 || (flag_non_call_exceptions
2643 && may_trap_p (PATTERN (insn))));
2645 if (INTVAL (XEXP (note, 0)) <= 0)
2646 return false;
2648 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false);
2651 /* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls. */
2653 void
2654 set_nothrow_function_flags (void)
2656 rtx insn;
2658 TREE_NOTHROW (current_function_decl) = 1;
2660 /* Assume cfun->all_throwers_are_sibcalls until we encounter
2661 something that can throw an exception. We specifically exempt
2662 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2663 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2664 is optimistic. */
2666 cfun->all_throwers_are_sibcalls = 1;
2668 if (! flag_exceptions)
2669 return;
2671 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2672 if (can_throw_external (insn))
2674 TREE_NOTHROW (current_function_decl) = 0;
2676 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2678 cfun->all_throwers_are_sibcalls = 0;
2679 return;
2683 for (insn = current_function_epilogue_delay_list; insn;
2684 insn = XEXP (insn, 1))
2685 if (can_throw_external (insn))
2687 TREE_NOTHROW (current_function_decl) = 0;
2689 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2691 cfun->all_throwers_are_sibcalls = 0;
2692 return;
2698 /* Various hooks for unwind library. */
2700 /* Do any necessary initialization to access arbitrary stack frames.
2701 On the SPARC, this means flushing the register windows. */
2703 void
2704 expand_builtin_unwind_init (void)
2706 /* Set this so all the registers get saved in our frame; we need to be
2707 able to copy the saved values for any registers from frames we unwind. */
2708 current_function_has_nonlocal_label = 1;
2710 #ifdef SETUP_FRAME_ADDRESSES
2711 SETUP_FRAME_ADDRESSES ();
2712 #endif
2716 expand_builtin_eh_return_data_regno (tree arglist)
2718 tree which = TREE_VALUE (arglist);
2719 unsigned HOST_WIDE_INT iwhich;
2721 if (TREE_CODE (which) != INTEGER_CST)
2723 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2724 return constm1_rtx;
2727 iwhich = tree_low_cst (which, 1);
2728 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2729 if (iwhich == INVALID_REGNUM)
2730 return constm1_rtx;
2732 #ifdef DWARF_FRAME_REGNUM
2733 iwhich = DWARF_FRAME_REGNUM (iwhich);
2734 #else
2735 iwhich = DBX_REGISTER_NUMBER (iwhich);
2736 #endif
2738 return GEN_INT (iwhich);
2741 /* Given a value extracted from the return address register or stack slot,
2742 return the actual address encoded in that value. */
2745 expand_builtin_extract_return_addr (tree addr_tree)
2747 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2749 if (GET_MODE (addr) != Pmode
2750 && GET_MODE (addr) != VOIDmode)
2752 #ifdef POINTERS_EXTEND_UNSIGNED
2753 addr = convert_memory_address (Pmode, addr);
2754 #else
2755 addr = convert_to_mode (Pmode, addr, 0);
2756 #endif
2759 /* First mask out any unwanted bits. */
2760 #ifdef MASK_RETURN_ADDR
2761 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2762 #endif
2764 /* Then adjust to find the real return address. */
2765 #if defined (RETURN_ADDR_OFFSET)
2766 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2767 #endif
2769 return addr;
2772 /* Given an actual address in addr_tree, do any necessary encoding
2773 and return the value to be stored in the return address register or
2774 stack slot so the epilogue will return to that address. */
2777 expand_builtin_frob_return_addr (tree addr_tree)
2779 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2781 addr = convert_memory_address (Pmode, addr);
2783 #ifdef RETURN_ADDR_OFFSET
2784 addr = force_reg (Pmode, addr);
2785 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2786 #endif
2788 return addr;
2791 /* Set up the epilogue with the magic bits we'll need to return to the
2792 exception handler. */
2794 void
2795 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2796 tree handler_tree)
2798 rtx tmp;
2800 #ifdef EH_RETURN_STACKADJ_RTX
2801 tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2802 tmp = convert_memory_address (Pmode, tmp);
2803 if (!cfun->eh->ehr_stackadj)
2804 cfun->eh->ehr_stackadj = copy_to_reg (tmp);
2805 else if (tmp != cfun->eh->ehr_stackadj)
2806 emit_move_insn (cfun->eh->ehr_stackadj, tmp);
2807 #endif
2809 tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
2810 tmp = convert_memory_address (Pmode, tmp);
2811 if (!cfun->eh->ehr_handler)
2812 cfun->eh->ehr_handler = copy_to_reg (tmp);
2813 else if (tmp != cfun->eh->ehr_handler)
2814 emit_move_insn (cfun->eh->ehr_handler, tmp);
2816 if (!cfun->eh->ehr_label)
2817 cfun->eh->ehr_label = gen_label_rtx ();
2818 emit_jump (cfun->eh->ehr_label);
2821 void
2822 expand_eh_return (void)
2824 rtx around_label;
2826 if (! cfun->eh->ehr_label)
2827 return;
2829 current_function_calls_eh_return = 1;
2831 #ifdef EH_RETURN_STACKADJ_RTX
2832 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2833 #endif
2835 around_label = gen_label_rtx ();
2836 emit_jump (around_label);
2838 emit_label (cfun->eh->ehr_label);
2839 clobber_return_register ();
2841 #ifdef EH_RETURN_STACKADJ_RTX
2842 emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
2843 #endif
2845 #ifdef HAVE_eh_return
2846 if (HAVE_eh_return)
2847 emit_insn (gen_eh_return (cfun->eh->ehr_handler));
2848 else
2849 #endif
2851 #ifdef EH_RETURN_HANDLER_RTX
2852 emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
2853 #else
2854 error ("__builtin_eh_return not supported on this target");
2855 #endif
2858 emit_label (around_label);
2861 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2862 POINTERS_EXTEND_UNSIGNED and return it. */
2865 expand_builtin_extend_pointer (tree addr_tree)
2867 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2868 int extend;
2870 #ifdef POINTERS_EXTEND_UNSIGNED
2871 extend = POINTERS_EXTEND_UNSIGNED;
2872 #else
2873 /* The previous EH code did an unsigned extend by default, so we do this also
2874 for consistency. */
2875 extend = 1;
2876 #endif
2878 return convert_modes (word_mode, ptr_mode, addr, extend);
2881 /* In the following functions, we represent entries in the action table
2882 as 1-based indices. Special cases are:
2884 0: null action record, non-null landing pad; implies cleanups
2885 -1: null action record, null landing pad; implies no action
2886 -2: no call-site entry; implies must_not_throw
2887 -3: we have yet to process outer regions
2889 Further, no special cases apply to the "next" field of the record.
2890 For next, 0 means end of list. */
2892 struct action_record
2894 int offset;
2895 int filter;
2896 int next;
2899 static int
2900 action_record_eq (const void *pentry, const void *pdata)
2902 const struct action_record *entry = (const struct action_record *) pentry;
2903 const struct action_record *data = (const struct action_record *) pdata;
2904 return entry->filter == data->filter && entry->next == data->next;
2907 static hashval_t
2908 action_record_hash (const void *pentry)
2910 const struct action_record *entry = (const struct action_record *) pentry;
2911 return entry->next * 1009 + entry->filter;
2914 static int
2915 add_action_record (htab_t ar_hash, int filter, int next)
2917 struct action_record **slot, *new, tmp;
2919 tmp.filter = filter;
2920 tmp.next = next;
2921 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
2923 if ((new = *slot) == NULL)
2925 new = xmalloc (sizeof (*new));
2926 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
2927 new->filter = filter;
2928 new->next = next;
2929 *slot = new;
2931 /* The filter value goes in untouched. The link to the next
2932 record is a "self-relative" byte offset, or zero to indicate
2933 that there is no next record. So convert the absolute 1 based
2934 indices we've been carrying around into a displacement. */
2936 push_sleb128 (&cfun->eh->action_record_data, filter);
2937 if (next)
2938 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
2939 push_sleb128 (&cfun->eh->action_record_data, next);
2942 return new->offset;
2945 static int
2946 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
2948 struct eh_region *c;
2949 int next;
2951 /* If we've reached the top of the region chain, then we have
2952 no actions, and require no landing pad. */
2953 if (region == NULL)
2954 return -1;
2956 switch (region->type)
2958 case ERT_CLEANUP:
2959 /* A cleanup adds a zero filter to the beginning of the chain, but
2960 there are special cases to look out for. If there are *only*
2961 cleanups along a path, then it compresses to a zero action.
2962 Further, if there are multiple cleanups along a path, we only
2963 need to represent one of them, as that is enough to trigger
2964 entry to the landing pad at runtime. */
2965 next = collect_one_action_chain (ar_hash, region->outer);
2966 if (next <= 0)
2967 return 0;
2968 for (c = region->outer; c ; c = c->outer)
2969 if (c->type == ERT_CLEANUP)
2970 return next;
2971 return add_action_record (ar_hash, 0, next);
2973 case ERT_TRY:
2974 /* Process the associated catch regions in reverse order.
2975 If there's a catch-all handler, then we don't need to
2976 search outer regions. Use a magic -3 value to record
2977 that we haven't done the outer search. */
2978 next = -3;
2979 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
2981 if (c->u.catch.type_list == NULL)
2983 /* Retrieve the filter from the head of the filter list
2984 where we have stored it (see assign_filter_values). */
2985 int filter
2986 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
2988 next = add_action_record (ar_hash, filter, 0);
2990 else
2992 /* Once the outer search is done, trigger an action record for
2993 each filter we have. */
2994 tree flt_node;
2996 if (next == -3)
2998 next = collect_one_action_chain (ar_hash, region->outer);
3000 /* If there is no next action, terminate the chain. */
3001 if (next == -1)
3002 next = 0;
3003 /* If all outer actions are cleanups or must_not_throw,
3004 we'll have no action record for it, since we had wanted
3005 to encode these states in the call-site record directly.
3006 Add a cleanup action to the chain to catch these. */
3007 else if (next <= 0)
3008 next = add_action_record (ar_hash, 0, 0);
3011 flt_node = c->u.catch.filter_list;
3012 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3014 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3015 next = add_action_record (ar_hash, filter, next);
3019 return next;
3021 case ERT_ALLOWED_EXCEPTIONS:
3022 /* An exception specification adds its filter to the
3023 beginning of the chain. */
3024 next = collect_one_action_chain (ar_hash, region->outer);
3026 /* If there is no next action, terminate the chain. */
3027 if (next == -1)
3028 next = 0;
3029 /* If all outer actions are cleanups or must_not_throw,
3030 we'll have no action record for it, since we had wanted
3031 to encode these states in the call-site record directly.
3032 Add a cleanup action to the chain to catch these. */
3033 else if (next <= 0)
3034 next = add_action_record (ar_hash, 0, 0);
3036 return add_action_record (ar_hash, region->u.allowed.filter, next);
3038 case ERT_MUST_NOT_THROW:
3039 /* A must-not-throw region with no inner handlers or cleanups
3040 requires no call-site entry. Note that this differs from
3041 the no handler or cleanup case in that we do require an lsda
3042 to be generated. Return a magic -2 value to record this. */
3043 return -2;
3045 case ERT_CATCH:
3046 case ERT_THROW:
3047 /* CATCH regions are handled in TRY above. THROW regions are
3048 for optimization information only and produce no output. */
3049 return collect_one_action_chain (ar_hash, region->outer);
3051 default:
3052 gcc_unreachable ();
3056 static int
3057 add_call_site (rtx landing_pad, int action)
3059 struct call_site_record *data = cfun->eh->call_site_data;
3060 int used = cfun->eh->call_site_data_used;
3061 int size = cfun->eh->call_site_data_size;
3063 if (used >= size)
3065 size = (size ? size * 2 : 64);
3066 data = ggc_realloc (data, sizeof (*data) * size);
3067 cfun->eh->call_site_data = data;
3068 cfun->eh->call_site_data_size = size;
3071 data[used].landing_pad = landing_pad;
3072 data[used].action = action;
3074 cfun->eh->call_site_data_used = used + 1;
3076 return used + call_site_base;
3079 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3080 The new note numbers will not refer to region numbers, but
3081 instead to call site entries. */
3083 void
3084 convert_to_eh_region_ranges (void)
3086 rtx insn, iter, note;
3087 htab_t ar_hash;
3088 int last_action = -3;
3089 rtx last_action_insn = NULL_RTX;
3090 rtx last_landing_pad = NULL_RTX;
3091 rtx first_no_action_insn = NULL_RTX;
3092 int call_site = 0;
3094 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3095 return;
3097 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3099 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3101 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3102 if (INSN_P (iter))
3104 struct eh_region *region;
3105 int this_action;
3106 rtx this_landing_pad;
3108 insn = iter;
3109 if (NONJUMP_INSN_P (insn)
3110 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3111 insn = XVECEXP (PATTERN (insn), 0, 0);
3113 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3114 if (!note)
3116 if (! (CALL_P (insn)
3117 || (flag_non_call_exceptions
3118 && may_trap_p (PATTERN (insn)))))
3119 continue;
3120 this_action = -1;
3121 region = NULL;
3123 else
3125 if (INTVAL (XEXP (note, 0)) <= 0)
3126 continue;
3127 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3128 this_action = collect_one_action_chain (ar_hash, region);
3131 /* Existence of catch handlers, or must-not-throw regions
3132 implies that an lsda is needed (even if empty). */
3133 if (this_action != -1)
3134 cfun->uses_eh_lsda = 1;
3136 /* Delay creation of region notes for no-action regions
3137 until we're sure that an lsda will be required. */
3138 else if (last_action == -3)
3140 first_no_action_insn = iter;
3141 last_action = -1;
3144 /* Cleanups and handlers may share action chains but not
3145 landing pads. Collect the landing pad for this region. */
3146 if (this_action >= 0)
3148 struct eh_region *o;
3149 for (o = region; ! o->landing_pad ; o = o->outer)
3150 continue;
3151 this_landing_pad = o->landing_pad;
3153 else
3154 this_landing_pad = NULL_RTX;
3156 /* Differing actions or landing pads implies a change in call-site
3157 info, which implies some EH_REGION note should be emitted. */
3158 if (last_action != this_action
3159 || last_landing_pad != this_landing_pad)
3161 /* If we'd not seen a previous action (-3) or the previous
3162 action was must-not-throw (-2), then we do not need an
3163 end note. */
3164 if (last_action >= -1)
3166 /* If we delayed the creation of the begin, do it now. */
3167 if (first_no_action_insn)
3169 call_site = add_call_site (NULL_RTX, 0);
3170 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3171 first_no_action_insn);
3172 NOTE_EH_HANDLER (note) = call_site;
3173 first_no_action_insn = NULL_RTX;
3176 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3177 last_action_insn);
3178 NOTE_EH_HANDLER (note) = call_site;
3181 /* If the new action is must-not-throw, then no region notes
3182 are created. */
3183 if (this_action >= -1)
3185 call_site = add_call_site (this_landing_pad,
3186 this_action < 0 ? 0 : this_action);
3187 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3188 NOTE_EH_HANDLER (note) = call_site;
3191 last_action = this_action;
3192 last_landing_pad = this_landing_pad;
3194 last_action_insn = iter;
3197 if (last_action >= -1 && ! first_no_action_insn)
3199 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3200 NOTE_EH_HANDLER (note) = call_site;
3203 htab_delete (ar_hash);
3207 static void
3208 push_uleb128 (varray_type *data_area, unsigned int value)
3212 unsigned char byte = value & 0x7f;
3213 value >>= 7;
3214 if (value)
3215 byte |= 0x80;
3216 VARRAY_PUSH_UCHAR (*data_area, byte);
3218 while (value);
3221 static void
3222 push_sleb128 (varray_type *data_area, int value)
3224 unsigned char byte;
3225 int more;
3229 byte = value & 0x7f;
3230 value >>= 7;
3231 more = ! ((value == 0 && (byte & 0x40) == 0)
3232 || (value == -1 && (byte & 0x40) != 0));
3233 if (more)
3234 byte |= 0x80;
3235 VARRAY_PUSH_UCHAR (*data_area, byte);
3237 while (more);
3241 #ifndef HAVE_AS_LEB128
3242 static int
3243 dw2_size_of_call_site_table (void)
3245 int n = cfun->eh->call_site_data_used;
3246 int size = n * (4 + 4 + 4);
3247 int i;
3249 for (i = 0; i < n; ++i)
3251 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3252 size += size_of_uleb128 (cs->action);
3255 return size;
3258 static int
3259 sjlj_size_of_call_site_table (void)
3261 int n = cfun->eh->call_site_data_used;
3262 int size = 0;
3263 int i;
3265 for (i = 0; i < n; ++i)
3267 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3268 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3269 size += size_of_uleb128 (cs->action);
3272 return size;
3274 #endif
3276 static void
3277 dw2_output_call_site_table (void)
3279 int n = cfun->eh->call_site_data_used;
3280 int i;
3282 for (i = 0; i < n; ++i)
3284 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3285 char reg_start_lab[32];
3286 char reg_end_lab[32];
3287 char landing_pad_lab[32];
3289 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3290 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3292 if (cs->landing_pad)
3293 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3294 CODE_LABEL_NUMBER (cs->landing_pad));
3296 /* ??? Perhaps use insn length scaling if the assembler supports
3297 generic arithmetic. */
3298 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3299 data4 if the function is small enough. */
3300 #ifdef HAVE_AS_LEB128
3301 dw2_asm_output_delta_uleb128 (reg_start_lab,
3302 current_function_func_begin_label,
3303 "region %d start", i);
3304 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3305 "length");
3306 if (cs->landing_pad)
3307 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3308 current_function_func_begin_label,
3309 "landing pad");
3310 else
3311 dw2_asm_output_data_uleb128 (0, "landing pad");
3312 #else
3313 dw2_asm_output_delta (4, reg_start_lab,
3314 current_function_func_begin_label,
3315 "region %d start", i);
3316 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3317 if (cs->landing_pad)
3318 dw2_asm_output_delta (4, landing_pad_lab,
3319 current_function_func_begin_label,
3320 "landing pad");
3321 else
3322 dw2_asm_output_data (4, 0, "landing pad");
3323 #endif
3324 dw2_asm_output_data_uleb128 (cs->action, "action");
3327 call_site_base += n;
3330 static void
3331 sjlj_output_call_site_table (void)
3333 int n = cfun->eh->call_site_data_used;
3334 int i;
3336 for (i = 0; i < n; ++i)
3338 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3340 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3341 "region %d landing pad", i);
3342 dw2_asm_output_data_uleb128 (cs->action, "action");
3345 call_site_base += n;
3348 /* Tell assembler to switch to the section for the exception handling
3349 table. */
3351 void
3352 default_exception_section (void)
3354 if (targetm.have_named_sections)
3356 int flags;
3358 if (EH_TABLES_CAN_BE_READ_ONLY)
3360 int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3362 flags = (! flag_pic
3363 || ((tt_format & 0x70) != DW_EH_PE_absptr
3364 && (tt_format & 0x70) != DW_EH_PE_aligned))
3365 ? 0 : SECTION_WRITE;
3367 else
3368 flags = SECTION_WRITE;
3369 named_section_flags (".gcc_except_table", flags);
3371 else if (flag_pic)
3372 data_section ();
3373 else
3374 readonly_data_section ();
3377 void
3378 output_function_exception_table (void)
3380 int tt_format, cs_format, lp_format, i, n;
3381 #ifdef HAVE_AS_LEB128
3382 char ttype_label[32];
3383 char cs_after_size_label[32];
3384 char cs_end_label[32];
3385 #else
3386 int call_site_len;
3387 #endif
3388 int have_tt_data;
3389 int tt_format_size = 0;
3391 /* Not all functions need anything. */
3392 if (! cfun->uses_eh_lsda)
3393 return;
3395 #ifdef TARGET_UNWIND_INFO
3396 /* TODO: Move this into target file. */
3397 assemble_external_libcall (eh_personality_libfunc);
3398 fputs ("\t.personality\t", asm_out_file);
3399 output_addr_const (asm_out_file, eh_personality_libfunc);
3400 fputs ("\n\t.handlerdata\n", asm_out_file);
3401 /* Note that varasm still thinks we're in the function's code section.
3402 The ".endp" directive that will immediately follow will take us back. */
3403 #else
3404 targetm.asm_out.exception_section ();
3405 #endif
3407 have_tt_data = (VEC_length (tree, cfun->eh->ttype_data) > 0
3408 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3410 /* Indicate the format of the @TType entries. */
3411 if (! have_tt_data)
3412 tt_format = DW_EH_PE_omit;
3413 else
3415 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3416 #ifdef HAVE_AS_LEB128
3417 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3418 current_function_funcdef_no);
3419 #endif
3420 tt_format_size = size_of_encoded_value (tt_format);
3422 assemble_align (tt_format_size * BITS_PER_UNIT);
3425 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3426 current_function_funcdef_no);
3428 /* The LSDA header. */
3430 /* Indicate the format of the landing pad start pointer. An omitted
3431 field implies @LPStart == @Start. */
3432 /* Currently we always put @LPStart == @Start. This field would
3433 be most useful in moving the landing pads completely out of
3434 line to another section, but it could also be used to minimize
3435 the size of uleb128 landing pad offsets. */
3436 lp_format = DW_EH_PE_omit;
3437 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3438 eh_data_format_name (lp_format));
3440 /* @LPStart pointer would go here. */
3442 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3443 eh_data_format_name (tt_format));
3445 #ifndef HAVE_AS_LEB128
3446 if (USING_SJLJ_EXCEPTIONS)
3447 call_site_len = sjlj_size_of_call_site_table ();
3448 else
3449 call_site_len = dw2_size_of_call_site_table ();
3450 #endif
3452 /* A pc-relative 4-byte displacement to the @TType data. */
3453 if (have_tt_data)
3455 #ifdef HAVE_AS_LEB128
3456 char ttype_after_disp_label[32];
3457 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3458 current_function_funcdef_no);
3459 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3460 "@TType base offset");
3461 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3462 #else
3463 /* Ug. Alignment queers things. */
3464 unsigned int before_disp, after_disp, last_disp, disp;
3466 before_disp = 1 + 1;
3467 after_disp = (1 + size_of_uleb128 (call_site_len)
3468 + call_site_len
3469 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3470 + (VEC_length (tree, cfun->eh->ttype_data)
3471 * tt_format_size));
3473 disp = after_disp;
3476 unsigned int disp_size, pad;
3478 last_disp = disp;
3479 disp_size = size_of_uleb128 (disp);
3480 pad = before_disp + disp_size + after_disp;
3481 if (pad % tt_format_size)
3482 pad = tt_format_size - (pad % tt_format_size);
3483 else
3484 pad = 0;
3485 disp = after_disp + pad;
3487 while (disp != last_disp);
3489 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3490 #endif
3493 /* Indicate the format of the call-site offsets. */
3494 #ifdef HAVE_AS_LEB128
3495 cs_format = DW_EH_PE_uleb128;
3496 #else
3497 cs_format = DW_EH_PE_udata4;
3498 #endif
3499 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3500 eh_data_format_name (cs_format));
3502 #ifdef HAVE_AS_LEB128
3503 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3504 current_function_funcdef_no);
3505 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3506 current_function_funcdef_no);
3507 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3508 "Call-site table length");
3509 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3510 if (USING_SJLJ_EXCEPTIONS)
3511 sjlj_output_call_site_table ();
3512 else
3513 dw2_output_call_site_table ();
3514 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3515 #else
3516 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3517 if (USING_SJLJ_EXCEPTIONS)
3518 sjlj_output_call_site_table ();
3519 else
3520 dw2_output_call_site_table ();
3521 #endif
3523 /* ??? Decode and interpret the data for flag_debug_asm. */
3524 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3525 for (i = 0; i < n; ++i)
3526 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3527 (i ? NULL : "Action record table"));
3529 if (have_tt_data)
3530 assemble_align (tt_format_size * BITS_PER_UNIT);
3532 i = VEC_length (tree, cfun->eh->ttype_data);
3533 while (i-- > 0)
3535 tree type = VEC_index (tree, cfun->eh->ttype_data, i);
3536 rtx value;
3538 if (type == NULL_TREE)
3539 value = const0_rtx;
3540 else
3542 struct cgraph_varpool_node *node;
3544 type = lookup_type_for_runtime (type);
3545 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3547 /* Let cgraph know that the rtti decl is used. Not all of the
3548 paths below go through assemble_integer, which would take
3549 care of this for us. */
3550 STRIP_NOPS (type);
3551 if (TREE_CODE (type) == ADDR_EXPR)
3553 type = TREE_OPERAND (type, 0);
3554 if (TREE_CODE (type) == VAR_DECL)
3556 node = cgraph_varpool_node (type);
3557 if (node)
3558 cgraph_varpool_mark_needed_node (node);
3561 else
3562 gcc_assert (TREE_CODE (type) == INTEGER_CST);
3565 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3566 assemble_integer (value, tt_format_size,
3567 tt_format_size * BITS_PER_UNIT, 1);
3568 else
3569 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
3572 #ifdef HAVE_AS_LEB128
3573 if (have_tt_data)
3574 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3575 #endif
3577 /* ??? Decode and interpret the data for flag_debug_asm. */
3578 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3579 for (i = 0; i < n; ++i)
3580 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3581 (i ? NULL : "Exception specification table"));
3583 current_function_section (current_function_decl);
3586 void
3587 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3589 fun->eh->throw_stmt_table = table;
3592 htab_t
3593 get_eh_throw_stmt_table (struct function *fun)
3595 return fun->eh->throw_stmt_table;
3598 /* Dump EH information to OUT. */
3599 void
3600 dump_eh_tree (FILE *out, struct function *fun)
3602 struct eh_region *i;
3603 int depth = 0;
3604 static const char * const type_name[] = {"unknown", "cleanup", "try", "catch",
3605 "allowed_exceptions", "must_not_throw",
3606 "throw"};
3608 i = fun->eh->region_tree;
3609 if (! i)
3610 return;
3612 fprintf (out, "Eh tree:\n");
3613 while (1)
3615 fprintf (out, " %*s %i %s", depth * 2, "",
3616 i->region_number, type_name [(int)i->type]);
3617 if (i->tree_label)
3619 fprintf (out, " tree_label:");
3620 print_generic_expr (out, i->tree_label, 0);
3622 fprintf (out, "\n");
3623 /* If there are sub-regions, process them. */
3624 if (i->inner)
3625 i = i->inner, depth++;
3626 /* If there are peers, process them. */
3627 else if (i->next_peer)
3628 i = i->next_peer;
3629 /* Otherwise, step back up the tree to the next peer. */
3630 else
3632 do {
3633 i = i->outer;
3634 depth--;
3635 if (i == NULL)
3636 return;
3637 } while (i->next_peer == NULL);
3638 i = i->next_peer;
3643 /* Verify some basic invariants on EH datastructures. Could be extended to
3644 catch more. */
3645 void
3646 verify_eh_tree (struct function *fun)
3648 struct eh_region *i, *outer = NULL;
3649 bool err = false;
3650 int nvisited = 0;
3651 int count = 0;
3652 int j;
3653 int depth = 0;
3655 i = fun->eh->region_tree;
3656 if (! i)
3657 return;
3658 for (j = fun->eh->last_region_number; j > 0; --j)
3659 if (fun->eh->region_array[j])
3661 count++;
3662 if (fun->eh->region_array[j]->region_number != j)
3664 error ("region_array is corrupted for region %i", i->region_number);
3665 err = true;
3669 while (1)
3671 if (fun->eh->region_array[i->region_number] != i)
3673 error ("region_array is corrupted for region %i", i->region_number);
3674 err = true;
3676 if (i->outer != outer)
3678 error ("outer block of region %i is wrong", i->region_number);
3679 err = true;
3681 if (i->may_contain_throw && outer && !outer->may_contain_throw)
3683 error ("region %i may contain throw and is contained in region that may not",
3684 i->region_number);
3685 err = true;
3687 if (depth < 0)
3689 error ("negative nesting depth of region %i", i->region_number);
3690 err = true;
3692 nvisited ++;
3693 /* If there are sub-regions, process them. */
3694 if (i->inner)
3695 outer = i, i = i->inner, depth++;
3696 /* If there are peers, process them. */
3697 else if (i->next_peer)
3698 i = i->next_peer;
3699 /* Otherwise, step back up the tree to the next peer. */
3700 else
3702 do {
3703 i = i->outer;
3704 depth--;
3705 if (i == NULL)
3707 if (depth != -1)
3709 error ("Tree list ends on depth %i", depth + 1);
3710 err = true;
3712 if (count != nvisited)
3714 error ("array does not match the region tree");
3715 err = true;
3717 if (err)
3719 dump_eh_tree (stderr, fun);
3720 internal_error ("verify_eh_tree failed.");
3722 return;
3724 outer = i->outer;
3725 } while (i->next_peer == NULL);
3726 i = i->next_peer;
3730 #include "gt-except.h"