2005-04-25 Paul Brook <paul@codesourcery.com>
[official-gcc.git] / gcc / except.c
blobb70a1f764c51ae93cfb82f304da190230d05ebcc
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
78 /* Provide defaults for stuff that may not be defined when using
79 sjlj exceptions. */
80 #ifndef EH_RETURN_DATA_REGNO
81 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
82 #endif
85 /* Protect cleanup actions with must-not-throw regions, with a call
86 to the given failure handler. */
87 tree (*lang_protect_cleanup_actions) (void);
89 /* Return true if type A catches type B. */
90 int (*lang_eh_type_covers) (tree a, tree b);
92 /* Map a type to a runtime object to match type. */
93 tree (*lang_eh_runtime_type) (tree);
95 /* A hash table of label to region number. */
97 struct ehl_map_entry GTY(())
99 rtx label;
100 struct eh_region *region;
103 static GTY(()) int call_site_base;
104 static GTY ((param_is (union tree_node)))
105 htab_t type_to_runtime_map;
107 /* Describe the SjLj_Function_Context structure. */
108 static GTY(()) tree sjlj_fc_type_node;
109 static int sjlj_fc_call_site_ofs;
110 static int sjlj_fc_data_ofs;
111 static int sjlj_fc_personality_ofs;
112 static int sjlj_fc_lsda_ofs;
113 static int sjlj_fc_jbuf_ofs;
115 /* Describes one exception region. */
116 struct eh_region GTY(())
118 /* The immediately surrounding region. */
119 struct eh_region *outer;
121 /* The list of immediately contained regions. */
122 struct eh_region *inner;
123 struct eh_region *next_peer;
125 /* An identifier for this region. */
126 int region_number;
128 /* When a region is deleted, its parents inherit the REG_EH_REGION
129 numbers already assigned. */
130 bitmap aka;
132 /* Each region does exactly one thing. */
133 enum eh_region_type
135 ERT_UNKNOWN = 0,
136 ERT_CLEANUP,
137 ERT_TRY,
138 ERT_CATCH,
139 ERT_ALLOWED_EXCEPTIONS,
140 ERT_MUST_NOT_THROW,
141 ERT_THROW,
142 ERT_FIXUP
143 } type;
145 /* Holds the action to perform based on the preceding type. */
146 union eh_region_u {
147 /* A list of catch blocks, a surrounding try block,
148 and the label for continuing after a catch. */
149 struct eh_region_u_try {
150 struct eh_region *catch;
151 struct eh_region *last_catch;
152 struct eh_region *prev_try;
153 rtx continue_label;
154 } GTY ((tag ("ERT_TRY"))) try;
156 /* The list through the catch handlers, the list of type objects
157 matched, and the list of associated filters. */
158 struct eh_region_u_catch {
159 struct eh_region *next_catch;
160 struct eh_region *prev_catch;
161 tree type_list;
162 tree filter_list;
163 } GTY ((tag ("ERT_CATCH"))) catch;
165 /* A tree_list of allowed types. */
166 struct eh_region_u_allowed {
167 tree type_list;
168 int filter;
169 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
171 /* The type given by a call to "throw foo();", or discovered
172 for a throw. */
173 struct eh_region_u_throw {
174 tree type;
175 } GTY ((tag ("ERT_THROW"))) throw;
177 /* Retain the cleanup expression even after expansion so that
178 we can match up fixup regions. */
179 struct eh_region_u_cleanup {
180 struct eh_region *prev_try;
181 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
183 /* The real region (by expression and by pointer) that fixup code
184 should live in. */
185 struct eh_region_u_fixup {
186 struct eh_region *real_region;
187 bool resolved;
188 } GTY ((tag ("ERT_FIXUP"))) fixup;
189 } GTY ((desc ("%0.type"))) u;
191 /* Entry point for this region's handler before landing pads are built. */
192 rtx label;
193 tree tree_label;
195 /* Entry point for this region's handler from the runtime eh library. */
196 rtx landing_pad;
198 /* Entry point for this region's handler from an inner region. */
199 rtx post_landing_pad;
201 /* The RESX insn for handing off control to the next outermost handler,
202 if appropriate. */
203 rtx resume;
205 /* True if something in this region may throw. */
206 unsigned may_contain_throw : 1;
209 struct call_site_record GTY(())
211 rtx landing_pad;
212 int action;
215 /* Used to save exception status for each function. */
216 struct eh_status GTY(())
218 /* The tree of all regions for this function. */
219 struct eh_region *region_tree;
221 /* The same information as an indexable array. */
222 struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
224 /* The most recently open region. */
225 struct eh_region *cur_region;
227 /* This is the region for which we are processing catch blocks. */
228 struct eh_region *try_region;
230 rtx filter;
231 rtx exc_ptr;
233 int built_landing_pads;
234 int last_region_number;
236 varray_type ttype_data;
237 varray_type ehspec_data;
238 varray_type action_record_data;
240 htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
242 struct call_site_record * GTY ((length ("%h.call_site_data_used")))
243 call_site_data;
244 int call_site_data_used;
245 int call_site_data_size;
247 rtx ehr_stackadj;
248 rtx ehr_handler;
249 rtx ehr_label;
251 rtx sjlj_fc;
252 rtx sjlj_exit_after;
256 static int t2r_eq (const void *, const void *);
257 static hashval_t t2r_hash (const void *);
258 static void add_type_for_runtime (tree);
259 static tree lookup_type_for_runtime (tree);
261 static void remove_unreachable_regions (rtx);
263 static int ttypes_filter_eq (const void *, const void *);
264 static hashval_t ttypes_filter_hash (const void *);
265 static int ehspec_filter_eq (const void *, const void *);
266 static hashval_t ehspec_filter_hash (const void *);
267 static int add_ttypes_entry (htab_t, tree);
268 static int add_ehspec_entry (htab_t, htab_t, tree);
269 static void assign_filter_values (void);
270 static void build_post_landing_pads (void);
271 static void connect_post_landing_pads (void);
272 static void dw2_build_landing_pads (void);
274 struct sjlj_lp_info;
275 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
276 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
277 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
278 static void sjlj_emit_function_enter (rtx);
279 static void sjlj_emit_function_exit (void);
280 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
281 static void sjlj_build_landing_pads (void);
283 static hashval_t ehl_hash (const void *);
284 static int ehl_eq (const void *, const void *);
285 static void add_ehl_entry (rtx, struct eh_region *);
286 static void remove_exception_handler_label (rtx);
287 static void remove_eh_handler (struct eh_region *);
288 static int for_each_eh_label_1 (void **, void *);
290 /* The return value of reachable_next_level. */
291 enum reachable_code
293 /* The given exception is not processed by the given region. */
294 RNL_NOT_CAUGHT,
295 /* The given exception may need processing by the given region. */
296 RNL_MAYBE_CAUGHT,
297 /* The given exception is completely processed by the given region. */
298 RNL_CAUGHT,
299 /* The given exception is completely processed by the runtime. */
300 RNL_BLOCKED
303 struct reachable_info;
304 static enum reachable_code reachable_next_level (struct eh_region *, tree,
305 struct reachable_info *);
307 static int action_record_eq (const void *, const void *);
308 static hashval_t action_record_hash (const void *);
309 static int add_action_record (htab_t, int, int);
310 static int collect_one_action_chain (htab_t, struct eh_region *);
311 static int add_call_site (rtx, int);
313 static void push_uleb128 (varray_type *, unsigned int);
314 static void push_sleb128 (varray_type *, int);
315 #ifndef HAVE_AS_LEB128
316 static int dw2_size_of_call_site_table (void);
317 static int sjlj_size_of_call_site_table (void);
318 #endif
319 static void dw2_output_call_site_table (void);
320 static void sjlj_output_call_site_table (void);
323 /* Routine to see if exception handling is turned on.
324 DO_WARN is nonzero if we want to inform the user that exception
325 handling is turned off.
327 This is used to ensure that -fexceptions has been specified if the
328 compiler tries to use any exception-specific functions. */
331 doing_eh (int do_warn)
333 if (! flag_exceptions)
335 static int warned = 0;
336 if (! warned && do_warn)
338 error ("exception handling disabled, use -fexceptions to enable");
339 warned = 1;
341 return 0;
343 return 1;
347 void
348 init_eh (void)
350 if (! flag_exceptions)
351 return;
353 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
355 /* Create the SjLj_Function_Context structure. This should match
356 the definition in unwind-sjlj.c. */
357 if (USING_SJLJ_EXCEPTIONS)
359 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
361 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
363 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
364 build_pointer_type (sjlj_fc_type_node));
365 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
367 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
368 integer_type_node);
369 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
371 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
372 tmp = build_array_type (lang_hooks.types.type_for_mode (word_mode, 1),
373 tmp);
374 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
375 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
377 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
378 ptr_type_node);
379 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
381 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
382 ptr_type_node);
383 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
385 #ifdef DONT_USE_BUILTIN_SETJMP
386 #ifdef JMP_BUF_SIZE
387 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
388 #else
389 /* Should be large enough for most systems, if it is not,
390 JMP_BUF_SIZE should be defined with the proper value. It will
391 also tend to be larger than necessary for most systems, a more
392 optimal port will define JMP_BUF_SIZE. */
393 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
394 #endif
395 #else
396 /* builtin_setjmp takes a pointer to 5 words. */
397 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
398 #endif
399 tmp = build_index_type (tmp);
400 tmp = build_array_type (ptr_type_node, tmp);
401 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
402 #ifdef DONT_USE_BUILTIN_SETJMP
403 /* We don't know what the alignment requirements of the
404 runtime's jmp_buf has. Overestimate. */
405 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
406 DECL_USER_ALIGN (f_jbuf) = 1;
407 #endif
408 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
410 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
411 TREE_CHAIN (f_prev) = f_cs;
412 TREE_CHAIN (f_cs) = f_data;
413 TREE_CHAIN (f_data) = f_per;
414 TREE_CHAIN (f_per) = f_lsda;
415 TREE_CHAIN (f_lsda) = f_jbuf;
417 layout_type (sjlj_fc_type_node);
419 /* Cache the interesting field offsets so that we have
420 easy access from rtl. */
421 sjlj_fc_call_site_ofs
422 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
423 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
424 sjlj_fc_data_ofs
425 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
426 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
427 sjlj_fc_personality_ofs
428 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
429 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
430 sjlj_fc_lsda_ofs
431 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
432 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
433 sjlj_fc_jbuf_ofs
434 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
435 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
439 void
440 init_eh_for_function (void)
442 cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
445 /* Routines to generate the exception tree somewhat directly.
446 These are used from tree-eh.c when processing exception related
447 nodes during tree optimization. */
449 static struct eh_region *
450 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
452 struct eh_region *new;
454 #ifdef ENABLE_CHECKING
455 gcc_assert (doing_eh (0));
456 #endif
458 /* Insert a new blank region as a leaf in the tree. */
459 new = ggc_alloc_cleared (sizeof (*new));
460 new->type = type;
461 new->outer = outer;
462 if (outer)
464 new->next_peer = outer->inner;
465 outer->inner = new;
467 else
469 new->next_peer = cfun->eh->region_tree;
470 cfun->eh->region_tree = new;
473 new->region_number = ++cfun->eh->last_region_number;
475 return new;
478 struct eh_region *
479 gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
481 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
482 cleanup->u.cleanup.prev_try = prev_try;
483 return cleanup;
486 struct eh_region *
487 gen_eh_region_try (struct eh_region *outer)
489 return gen_eh_region (ERT_TRY, outer);
492 struct eh_region *
493 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
495 struct eh_region *c, *l;
496 tree type_list, type_node;
498 /* Ensure to always end up with a type list to normalize further
499 processing, then register each type against the runtime types map. */
500 type_list = type_or_list;
501 if (type_or_list)
503 if (TREE_CODE (type_or_list) != TREE_LIST)
504 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
506 type_node = type_list;
507 for (; type_node; type_node = TREE_CHAIN (type_node))
508 add_type_for_runtime (TREE_VALUE (type_node));
511 c = gen_eh_region (ERT_CATCH, t->outer);
512 c->u.catch.type_list = type_list;
513 l = t->u.try.last_catch;
514 c->u.catch.prev_catch = l;
515 if (l)
516 l->u.catch.next_catch = c;
517 else
518 t->u.try.catch = c;
519 t->u.try.last_catch = c;
521 return c;
524 struct eh_region *
525 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
527 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
528 region->u.allowed.type_list = allowed;
530 for (; allowed ; allowed = TREE_CHAIN (allowed))
531 add_type_for_runtime (TREE_VALUE (allowed));
533 return region;
536 struct eh_region *
537 gen_eh_region_must_not_throw (struct eh_region *outer)
539 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
543 get_eh_region_number (struct eh_region *region)
545 return region->region_number;
548 bool
549 get_eh_region_may_contain_throw (struct eh_region *region)
551 return region->may_contain_throw;
554 tree
555 get_eh_region_tree_label (struct eh_region *region)
557 return region->tree_label;
560 void
561 set_eh_region_tree_label (struct eh_region *region, tree lab)
563 region->tree_label = lab;
566 void
567 expand_resx_expr (tree exp)
569 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
570 struct eh_region *reg = cfun->eh->region_array[region_nr];
572 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
573 emit_barrier ();
576 /* Note that the current EH region (if any) may contain a throw, or a
577 call to a function which itself may contain a throw. */
579 void
580 note_eh_region_may_contain_throw (struct eh_region *region)
582 while (region && !region->may_contain_throw)
584 region->may_contain_throw = 1;
585 region = region->outer;
589 void
590 note_current_region_may_contain_throw (void)
592 note_eh_region_may_contain_throw (cfun->eh->cur_region);
596 /* Return an rtl expression for a pointer to the exception object
597 within a handler. */
600 get_exception_pointer (struct function *fun)
602 rtx exc_ptr = fun->eh->exc_ptr;
603 if (fun == cfun && ! exc_ptr)
605 exc_ptr = gen_reg_rtx (ptr_mode);
606 fun->eh->exc_ptr = exc_ptr;
608 return exc_ptr;
611 /* Return an rtl expression for the exception dispatch filter
612 within a handler. */
615 get_exception_filter (struct function *fun)
617 rtx filter = fun->eh->filter;
618 if (fun == cfun && ! filter)
620 filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
621 fun->eh->filter = filter;
623 return filter;
626 /* This section is for the exception handling specific optimization pass. */
628 /* Random access the exception region tree. */
630 void
631 collect_eh_region_array (void)
633 struct eh_region **array, *i;
635 i = cfun->eh->region_tree;
636 if (! i)
637 return;
639 array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
640 * sizeof (*array));
641 cfun->eh->region_array = array;
643 while (1)
645 array[i->region_number] = i;
647 /* If there are sub-regions, process them. */
648 if (i->inner)
649 i = i->inner;
650 /* If there are peers, process them. */
651 else if (i->next_peer)
652 i = i->next_peer;
653 /* Otherwise, step back up the tree to the next peer. */
654 else
656 do {
657 i = i->outer;
658 if (i == NULL)
659 return;
660 } while (i->next_peer == NULL);
661 i = i->next_peer;
666 /* Remove all regions whose labels are not reachable from insns. */
668 static void
669 remove_unreachable_regions (rtx insns)
671 int i, *uid_region_num;
672 bool *reachable;
673 struct eh_region *r;
674 rtx insn;
676 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
677 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
679 for (i = cfun->eh->last_region_number; i > 0; --i)
681 r = cfun->eh->region_array[i];
682 if (!r || r->region_number != i)
683 continue;
685 if (r->resume)
687 gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
688 uid_region_num[INSN_UID (r->resume)] = i;
690 if (r->label)
692 gcc_assert (!uid_region_num[INSN_UID (r->label)]);
693 uid_region_num[INSN_UID (r->label)] = i;
697 for (insn = insns; insn; insn = NEXT_INSN (insn))
698 reachable[uid_region_num[INSN_UID (insn)]] = true;
700 for (i = cfun->eh->last_region_number; i > 0; --i)
702 r = cfun->eh->region_array[i];
703 if (r && r->region_number == i && !reachable[i])
705 bool kill_it = true;
706 switch (r->type)
708 case ERT_THROW:
709 /* Don't remove ERT_THROW regions if their outer region
710 is reachable. */
711 if (r->outer && reachable[r->outer->region_number])
712 kill_it = false;
713 break;
715 case ERT_MUST_NOT_THROW:
716 /* MUST_NOT_THROW regions are implementable solely in the
717 runtime, but their existence continues to affect calls
718 within that region. Never delete them here. */
719 kill_it = false;
720 break;
722 case ERT_TRY:
724 /* TRY regions are reachable if any of its CATCH regions
725 are reachable. */
726 struct eh_region *c;
727 for (c = r->u.try.catch; c ; c = c->u.catch.next_catch)
728 if (reachable[c->region_number])
730 kill_it = false;
731 break;
733 break;
736 default:
737 break;
740 if (kill_it)
741 remove_eh_handler (r);
745 free (reachable);
746 free (uid_region_num);
749 /* Set up EH labels for RTL. */
751 void
752 convert_from_eh_region_ranges (void)
754 rtx insns = get_insns ();
755 int i, n = cfun->eh->last_region_number;
757 /* Most of the work is already done at the tree level. All we need to
758 do is collect the rtl labels that correspond to the tree labels that
759 collect the rtl labels that correspond to the tree labels
760 we allocated earlier. */
761 for (i = 1; i <= n; ++i)
763 struct eh_region *region = cfun->eh->region_array[i];
764 if (region && region->tree_label)
765 region->label = DECL_RTL_IF_SET (region->tree_label);
768 remove_unreachable_regions (insns);
771 static void
772 add_ehl_entry (rtx label, struct eh_region *region)
774 struct ehl_map_entry **slot, *entry;
776 LABEL_PRESERVE_P (label) = 1;
778 entry = ggc_alloc (sizeof (*entry));
779 entry->label = label;
780 entry->region = region;
782 slot = (struct ehl_map_entry **)
783 htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
785 /* Before landing pad creation, each exception handler has its own
786 label. After landing pad creation, the exception handlers may
787 share landing pads. This is ok, since maybe_remove_eh_handler
788 only requires the 1-1 mapping before landing pad creation. */
789 gcc_assert (!*slot || cfun->eh->built_landing_pads);
791 *slot = entry;
794 void
795 find_exception_handler_labels (void)
797 int i;
799 if (cfun->eh->exception_handler_label_map)
800 htab_empty (cfun->eh->exception_handler_label_map);
801 else
803 /* ??? The expansion factor here (3/2) must be greater than the htab
804 occupancy factor (4/3) to avoid unnecessary resizing. */
805 cfun->eh->exception_handler_label_map
806 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
807 ehl_hash, ehl_eq, NULL);
810 if (cfun->eh->region_tree == NULL)
811 return;
813 for (i = cfun->eh->last_region_number; i > 0; --i)
815 struct eh_region *region = cfun->eh->region_array[i];
816 rtx lab;
818 if (! region || region->region_number != i)
819 continue;
820 if (cfun->eh->built_landing_pads)
821 lab = region->landing_pad;
822 else
823 lab = region->label;
825 if (lab)
826 add_ehl_entry (lab, region);
829 /* For sjlj exceptions, need the return label to remain live until
830 after landing pad generation. */
831 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
832 add_ehl_entry (return_label, NULL);
835 bool
836 current_function_has_exception_handlers (void)
838 int i;
840 for (i = cfun->eh->last_region_number; i > 0; --i)
842 struct eh_region *region = cfun->eh->region_array[i];
844 if (! region || region->region_number != i)
845 continue;
846 if (region->type != ERT_THROW)
847 return true;
850 return false;
853 static int
854 t2r_eq (const void *pentry, const void *pdata)
856 tree entry = (tree) pentry;
857 tree data = (tree) pdata;
859 return TREE_PURPOSE (entry) == data;
862 static hashval_t
863 t2r_hash (const void *pentry)
865 tree entry = (tree) pentry;
866 return TREE_HASH (TREE_PURPOSE (entry));
869 static void
870 add_type_for_runtime (tree type)
872 tree *slot;
874 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
875 TREE_HASH (type), INSERT);
876 if (*slot == NULL)
878 tree runtime = (*lang_eh_runtime_type) (type);
879 *slot = tree_cons (type, runtime, NULL_TREE);
883 static tree
884 lookup_type_for_runtime (tree type)
886 tree *slot;
888 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
889 TREE_HASH (type), NO_INSERT);
891 /* We should have always inserted the data earlier. */
892 return TREE_VALUE (*slot);
896 /* Represent an entry in @TTypes for either catch actions
897 or exception filter actions. */
898 struct ttypes_filter GTY(())
900 tree t;
901 int filter;
904 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
905 (a tree) for a @TTypes type node we are thinking about adding. */
907 static int
908 ttypes_filter_eq (const void *pentry, const void *pdata)
910 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
911 tree data = (tree) pdata;
913 return entry->t == data;
916 static hashval_t
917 ttypes_filter_hash (const void *pentry)
919 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
920 return TREE_HASH (entry->t);
923 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
924 exception specification list we are thinking about adding. */
925 /* ??? Currently we use the type lists in the order given. Someone
926 should put these in some canonical order. */
928 static int
929 ehspec_filter_eq (const void *pentry, const void *pdata)
931 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
932 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
934 return type_list_equal (entry->t, data->t);
937 /* Hash function for exception specification lists. */
939 static hashval_t
940 ehspec_filter_hash (const void *pentry)
942 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
943 hashval_t h = 0;
944 tree list;
946 for (list = entry->t; list ; list = TREE_CHAIN (list))
947 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
948 return h;
951 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
952 to speed up the search. Return the filter value to be used. */
954 static int
955 add_ttypes_entry (htab_t ttypes_hash, tree type)
957 struct ttypes_filter **slot, *n;
959 slot = (struct ttypes_filter **)
960 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
962 if ((n = *slot) == NULL)
964 /* Filter value is a 1 based table index. */
966 n = xmalloc (sizeof (*n));
967 n->t = type;
968 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
969 *slot = n;
971 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
974 return n->filter;
977 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
978 to speed up the search. Return the filter value to be used. */
980 static int
981 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
983 struct ttypes_filter **slot, *n;
984 struct ttypes_filter dummy;
986 dummy.t = list;
987 slot = (struct ttypes_filter **)
988 htab_find_slot (ehspec_hash, &dummy, INSERT);
990 if ((n = *slot) == NULL)
992 /* Filter value is a -1 based byte index into a uleb128 buffer. */
994 n = xmalloc (sizeof (*n));
995 n->t = list;
996 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
997 *slot = n;
999 /* Look up each type in the list and encode its filter
1000 value as a uleb128. Terminate the list with 0. */
1001 for (; list ; list = TREE_CHAIN (list))
1002 push_uleb128 (&cfun->eh->ehspec_data,
1003 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1004 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1007 return n->filter;
1010 /* Generate the action filter values to be used for CATCH and
1011 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1012 we use lots of landing pads, and so every type or list can share
1013 the same filter value, which saves table space. */
1015 static void
1016 assign_filter_values (void)
1018 int i;
1019 htab_t ttypes, ehspec;
1021 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1022 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1024 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1025 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1027 for (i = cfun->eh->last_region_number; i > 0; --i)
1029 struct eh_region *r = cfun->eh->region_array[i];
1031 /* Mind we don't process a region more than once. */
1032 if (!r || r->region_number != i)
1033 continue;
1035 switch (r->type)
1037 case ERT_CATCH:
1038 /* Whatever type_list is (NULL or true list), we build a list
1039 of filters for the region. */
1040 r->u.catch.filter_list = NULL_TREE;
1042 if (r->u.catch.type_list != NULL)
1044 /* Get a filter value for each of the types caught and store
1045 them in the region's dedicated list. */
1046 tree tp_node = r->u.catch.type_list;
1048 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1050 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1051 tree flt_node = build_int_cst (NULL_TREE, flt);
1053 r->u.catch.filter_list
1054 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1057 else
1059 /* Get a filter value for the NULL list also since it will need
1060 an action record anyway. */
1061 int flt = add_ttypes_entry (ttypes, NULL);
1062 tree flt_node = build_int_cst (NULL_TREE, flt);
1064 r->u.catch.filter_list
1065 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1068 break;
1070 case ERT_ALLOWED_EXCEPTIONS:
1071 r->u.allowed.filter
1072 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1073 break;
1075 default:
1076 break;
1080 htab_delete (ttypes);
1081 htab_delete (ehspec);
1084 /* Emit SEQ into basic block just before INSN (that is assumed to be
1085 first instruction of some existing BB and return the newly
1086 produced block. */
1087 static basic_block
1088 emit_to_new_bb_before (rtx seq, rtx insn)
1090 rtx last;
1091 basic_block bb;
1092 edge e;
1093 edge_iterator ei;
1095 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1096 call), we don't want it to go into newly created landing pad or other EH
1097 construct. */
1098 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1099 if (e->flags & EDGE_FALLTHRU)
1100 force_nonfallthru (e);
1101 else
1102 ei_next (&ei);
1103 last = emit_insn_before (seq, insn);
1104 if (BARRIER_P (last))
1105 last = PREV_INSN (last);
1106 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1107 update_bb_for_insn (bb);
1108 bb->flags |= BB_SUPERBLOCK;
1109 return bb;
1112 /* Generate the code to actually handle exceptions, which will follow the
1113 landing pads. */
1115 static void
1116 build_post_landing_pads (void)
1118 int i;
1120 for (i = cfun->eh->last_region_number; i > 0; --i)
1122 struct eh_region *region = cfun->eh->region_array[i];
1123 rtx seq;
1125 /* Mind we don't process a region more than once. */
1126 if (!region || region->region_number != i)
1127 continue;
1129 switch (region->type)
1131 case ERT_TRY:
1132 /* ??? Collect the set of all non-overlapping catch handlers
1133 all the way up the chain until blocked by a cleanup. */
1134 /* ??? Outer try regions can share landing pads with inner
1135 try regions if the types are completely non-overlapping,
1136 and there are no intervening cleanups. */
1138 region->post_landing_pad = gen_label_rtx ();
1140 start_sequence ();
1142 emit_label (region->post_landing_pad);
1144 /* ??? It is mighty inconvenient to call back into the
1145 switch statement generation code in expand_end_case.
1146 Rapid prototyping sez a sequence of ifs. */
1148 struct eh_region *c;
1149 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1151 if (c->u.catch.type_list == NULL)
1152 emit_jump (c->label);
1153 else
1155 /* Need for one cmp/jump per type caught. Each type
1156 list entry has a matching entry in the filter list
1157 (see assign_filter_values). */
1158 tree tp_node = c->u.catch.type_list;
1159 tree flt_node = c->u.catch.filter_list;
1161 for (; tp_node; )
1163 emit_cmp_and_jump_insns
1164 (cfun->eh->filter,
1165 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1166 EQ, NULL_RTX,
1167 targetm.eh_return_filter_mode (), 0, c->label);
1169 tp_node = TREE_CHAIN (tp_node);
1170 flt_node = TREE_CHAIN (flt_node);
1176 /* We delay the generation of the _Unwind_Resume until we generate
1177 landing pads. We emit a marker here so as to get good control
1178 flow data in the meantime. */
1179 region->resume
1180 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1181 emit_barrier ();
1183 seq = get_insns ();
1184 end_sequence ();
1186 emit_to_new_bb_before (seq, region->u.try.catch->label);
1188 break;
1190 case ERT_ALLOWED_EXCEPTIONS:
1191 region->post_landing_pad = gen_label_rtx ();
1193 start_sequence ();
1195 emit_label (region->post_landing_pad);
1197 emit_cmp_and_jump_insns (cfun->eh->filter,
1198 GEN_INT (region->u.allowed.filter),
1199 EQ, NULL_RTX,
1200 targetm.eh_return_filter_mode (), 0, region->label);
1202 /* We delay the generation of the _Unwind_Resume until we generate
1203 landing pads. We emit a marker here so as to get good control
1204 flow data in the meantime. */
1205 region->resume
1206 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1207 emit_barrier ();
1209 seq = get_insns ();
1210 end_sequence ();
1212 emit_to_new_bb_before (seq, region->label);
1213 break;
1215 case ERT_CLEANUP:
1216 case ERT_MUST_NOT_THROW:
1217 region->post_landing_pad = region->label;
1218 break;
1220 case ERT_CATCH:
1221 case ERT_THROW:
1222 /* Nothing to do. */
1223 break;
1225 default:
1226 gcc_unreachable ();
1231 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1232 _Unwind_Resume otherwise. */
1234 static void
1235 connect_post_landing_pads (void)
1237 int i;
1239 for (i = cfun->eh->last_region_number; i > 0; --i)
1241 struct eh_region *region = cfun->eh->region_array[i];
1242 struct eh_region *outer;
1243 rtx seq;
1244 rtx barrier;
1246 /* Mind we don't process a region more than once. */
1247 if (!region || region->region_number != i)
1248 continue;
1250 /* If there is no RESX, or it has been deleted by flow, there's
1251 nothing to fix up. */
1252 if (! region->resume || INSN_DELETED_P (region->resume))
1253 continue;
1255 /* Search for another landing pad in this function. */
1256 for (outer = region->outer; outer ; outer = outer->outer)
1257 if (outer->post_landing_pad)
1258 break;
1260 start_sequence ();
1262 if (outer)
1264 edge e;
1265 basic_block src, dest;
1267 emit_jump (outer->post_landing_pad);
1268 src = BLOCK_FOR_INSN (region->resume);
1269 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1270 while (EDGE_COUNT (src->succs) > 0)
1271 remove_edge (EDGE_SUCC (src, 0));
1272 e = make_edge (src, dest, 0);
1273 e->probability = REG_BR_PROB_BASE;
1274 e->count = src->count;
1276 else
1278 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1279 VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1281 /* What we just emitted was a throwing libcall, so it got a
1282 barrier automatically added after it. If the last insn in
1283 the libcall sequence isn't the barrier, it's because the
1284 target emits multiple insns for a call, and there are insns
1285 after the actual call insn (which are redundant and would be
1286 optimized away). The barrier is inserted exactly after the
1287 call insn, so let's go get that and delete the insns after
1288 it, because below we need the barrier to be the last insn in
1289 the sequence. */
1290 delete_insns_since (NEXT_INSN (last_call_insn ()));
1293 seq = get_insns ();
1294 end_sequence ();
1295 barrier = emit_insn_before (seq, region->resume);
1296 /* Avoid duplicate barrier. */
1297 gcc_assert (BARRIER_P (barrier));
1298 delete_insn (barrier);
1299 delete_insn (region->resume);
1301 /* ??? From tree-ssa we can wind up with catch regions whose
1302 label is not instantiated, but whose resx is present. Now
1303 that we've dealt with the resx, kill the region. */
1304 if (region->label == NULL && region->type == ERT_CLEANUP)
1305 remove_eh_handler (region);
1310 static void
1311 dw2_build_landing_pads (void)
1313 int i;
1314 unsigned int j;
1316 for (i = cfun->eh->last_region_number; i > 0; --i)
1318 struct eh_region *region = cfun->eh->region_array[i];
1319 rtx seq;
1320 basic_block bb;
1321 bool clobbers_hard_regs = false;
1322 edge e;
1324 /* Mind we don't process a region more than once. */
1325 if (!region || region->region_number != i)
1326 continue;
1328 if (region->type != ERT_CLEANUP
1329 && region->type != ERT_TRY
1330 && region->type != ERT_ALLOWED_EXCEPTIONS)
1331 continue;
1333 start_sequence ();
1335 region->landing_pad = gen_label_rtx ();
1336 emit_label (region->landing_pad);
1338 #ifdef HAVE_exception_receiver
1339 if (HAVE_exception_receiver)
1340 emit_insn (gen_exception_receiver ());
1341 else
1342 #endif
1343 #ifdef HAVE_nonlocal_goto_receiver
1344 if (HAVE_nonlocal_goto_receiver)
1345 emit_insn (gen_nonlocal_goto_receiver ());
1346 else
1347 #endif
1348 { /* Nothing */ }
1350 /* If the eh_return data registers are call-saved, then we
1351 won't have considered them clobbered from the call that
1352 threw. Kill them now. */
1353 for (j = 0; ; ++j)
1355 unsigned r = EH_RETURN_DATA_REGNO (j);
1356 if (r == INVALID_REGNUM)
1357 break;
1358 if (! call_used_regs[r])
1360 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1361 clobbers_hard_regs = true;
1365 if (clobbers_hard_regs)
1367 /* @@@ This is a kludge. Not all machine descriptions define a
1368 blockage insn, but we must not allow the code we just generated
1369 to be reordered by scheduling. So emit an ASM_INPUT to act as
1370 blockage insn. */
1371 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1374 emit_move_insn (cfun->eh->exc_ptr,
1375 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1376 emit_move_insn (cfun->eh->filter,
1377 gen_rtx_REG (targetm.eh_return_filter_mode (),
1378 EH_RETURN_DATA_REGNO (1)));
1380 seq = get_insns ();
1381 end_sequence ();
1383 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1384 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1385 e->count = bb->count;
1386 e->probability = REG_BR_PROB_BASE;
1391 struct sjlj_lp_info
1393 int directly_reachable;
1394 int action_index;
1395 int dispatch_index;
1396 int call_site_index;
1399 static bool
1400 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1402 rtx insn;
1403 bool found_one = false;
1405 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1407 struct eh_region *region;
1408 enum reachable_code rc;
1409 tree type_thrown;
1410 rtx note;
1412 if (! INSN_P (insn))
1413 continue;
1415 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1416 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1417 continue;
1419 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1421 type_thrown = NULL_TREE;
1422 if (region->type == ERT_THROW)
1424 type_thrown = region->u.throw.type;
1425 region = region->outer;
1428 /* Find the first containing region that might handle the exception.
1429 That's the landing pad to which we will transfer control. */
1430 rc = RNL_NOT_CAUGHT;
1431 for (; region; region = region->outer)
1433 rc = reachable_next_level (region, type_thrown, NULL);
1434 if (rc != RNL_NOT_CAUGHT)
1435 break;
1437 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1439 lp_info[region->region_number].directly_reachable = 1;
1440 found_one = true;
1444 return found_one;
1447 static void
1448 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1450 htab_t ar_hash;
1451 int i, index;
1453 /* First task: build the action table. */
1455 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1456 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1458 for (i = cfun->eh->last_region_number; i > 0; --i)
1459 if (lp_info[i].directly_reachable)
1461 struct eh_region *r = cfun->eh->region_array[i];
1462 r->landing_pad = dispatch_label;
1463 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1464 if (lp_info[i].action_index != -1)
1465 cfun->uses_eh_lsda = 1;
1468 htab_delete (ar_hash);
1470 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1471 landing pad label for the region. For sjlj though, there is one
1472 common landing pad from which we dispatch to the post-landing pads.
1474 A region receives a dispatch index if it is directly reachable
1475 and requires in-function processing. Regions that share post-landing
1476 pads may share dispatch indices. */
1477 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1478 (see build_post_landing_pads) so we don't bother checking for it. */
1480 index = 0;
1481 for (i = cfun->eh->last_region_number; i > 0; --i)
1482 if (lp_info[i].directly_reachable)
1483 lp_info[i].dispatch_index = index++;
1485 /* Finally: assign call-site values. If dwarf2 terms, this would be
1486 the region number assigned by convert_to_eh_region_ranges, but
1487 handles no-action and must-not-throw differently. */
1489 call_site_base = 1;
1490 for (i = cfun->eh->last_region_number; i > 0; --i)
1491 if (lp_info[i].directly_reachable)
1493 int action = lp_info[i].action_index;
1495 /* Map must-not-throw to otherwise unused call-site index 0. */
1496 if (action == -2)
1497 index = 0;
1498 /* Map no-action to otherwise unused call-site index -1. */
1499 else if (action == -1)
1500 index = -1;
1501 /* Otherwise, look it up in the table. */
1502 else
1503 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1505 lp_info[i].call_site_index = index;
1509 static void
1510 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1512 int last_call_site = -2;
1513 rtx insn, mem;
1515 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1517 struct eh_region *region;
1518 int this_call_site;
1519 rtx note, before, p;
1521 /* Reset value tracking at extended basic block boundaries. */
1522 if (LABEL_P (insn))
1523 last_call_site = -2;
1525 if (! INSN_P (insn))
1526 continue;
1528 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1529 if (!note)
1531 /* Calls (and trapping insns) without notes are outside any
1532 exception handling region in this function. Mark them as
1533 no action. */
1534 if (CALL_P (insn)
1535 || (flag_non_call_exceptions
1536 && may_trap_p (PATTERN (insn))))
1537 this_call_site = -1;
1538 else
1539 continue;
1541 else
1543 /* Calls that are known to not throw need not be marked. */
1544 if (INTVAL (XEXP (note, 0)) <= 0)
1545 continue;
1547 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1548 this_call_site = lp_info[region->region_number].call_site_index;
1551 if (this_call_site == last_call_site)
1552 continue;
1554 /* Don't separate a call from it's argument loads. */
1555 before = insn;
1556 if (CALL_P (insn))
1557 before = find_first_parameter_load (insn, NULL_RTX);
1559 start_sequence ();
1560 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
1561 sjlj_fc_call_site_ofs);
1562 emit_move_insn (mem, GEN_INT (this_call_site));
1563 p = get_insns ();
1564 end_sequence ();
1566 emit_insn_before (p, before);
1567 last_call_site = this_call_site;
1571 /* Construct the SjLj_Function_Context. */
1573 static void
1574 sjlj_emit_function_enter (rtx dispatch_label)
1576 rtx fn_begin, fc, mem, seq;
1578 fc = cfun->eh->sjlj_fc;
1580 start_sequence ();
1582 /* We're storing this libcall's address into memory instead of
1583 calling it directly. Thus, we must call assemble_external_libcall
1584 here, as we can not depend on emit_library_call to do it for us. */
1585 assemble_external_libcall (eh_personality_libfunc);
1586 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1587 emit_move_insn (mem, eh_personality_libfunc);
1589 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1590 if (cfun->uses_eh_lsda)
1592 char buf[20];
1593 rtx sym;
1595 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1596 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1597 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1598 emit_move_insn (mem, sym);
1600 else
1601 emit_move_insn (mem, const0_rtx);
1603 #ifdef DONT_USE_BUILTIN_SETJMP
1605 rtx x, note;
1606 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1607 TYPE_MODE (integer_type_node), 1,
1608 plus_constant (XEXP (fc, 0),
1609 sjlj_fc_jbuf_ofs), Pmode);
1611 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
1612 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
1614 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1615 TYPE_MODE (integer_type_node), 0, dispatch_label);
1617 #else
1618 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
1619 dispatch_label);
1620 #endif
1622 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1623 1, XEXP (fc, 0), Pmode);
1625 seq = get_insns ();
1626 end_sequence ();
1628 /* ??? Instead of doing this at the beginning of the function,
1629 do this in a block that is at loop level 0 and dominates all
1630 can_throw_internal instructions. */
1632 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1633 if (NOTE_P (fn_begin)
1634 && (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG
1635 || NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK))
1636 break;
1637 if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1638 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
1639 else
1641 rtx last = BB_END (single_succ (ENTRY_BLOCK_PTR));
1642 for (; ; fn_begin = NEXT_INSN (fn_begin))
1643 if ((NOTE_P (fn_begin)
1644 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1645 || fn_begin == last)
1646 break;
1647 emit_insn_after (seq, fn_begin);
1651 /* Call back from expand_function_end to know where we should put
1652 the call to unwind_sjlj_unregister_libfunc if needed. */
1654 void
1655 sjlj_emit_function_exit_after (rtx after)
1657 cfun->eh->sjlj_exit_after = after;
1660 static void
1661 sjlj_emit_function_exit (void)
1663 rtx seq;
1664 edge e;
1665 edge_iterator ei;
1667 start_sequence ();
1669 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1670 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
1672 seq = get_insns ();
1673 end_sequence ();
1675 /* ??? Really this can be done in any block at loop level 0 that
1676 post-dominates all can_throw_internal instructions. This is
1677 the last possible moment. */
1679 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1680 if (e->flags & EDGE_FALLTHRU)
1681 break;
1682 if (e)
1684 rtx insn;
1686 /* Figure out whether the place we are supposed to insert libcall
1687 is inside the last basic block or after it. In the other case
1688 we need to emit to edge. */
1689 gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
1690 for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
1692 if (insn == cfun->eh->sjlj_exit_after)
1694 if (LABEL_P (insn))
1695 insn = NEXT_INSN (insn);
1696 emit_insn_after (seq, insn);
1697 return;
1699 if (insn == BB_END (e->src))
1700 break;
1702 insert_insn_on_edge (seq, e);
1706 static void
1707 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1709 int i, first_reachable;
1710 rtx mem, dispatch, seq, fc;
1711 rtx before;
1712 basic_block bb;
1713 edge e;
1715 fc = cfun->eh->sjlj_fc;
1717 start_sequence ();
1719 emit_label (dispatch_label);
1721 #ifndef DONT_USE_BUILTIN_SETJMP
1722 expand_builtin_setjmp_receiver (dispatch_label);
1723 #endif
1725 /* Load up dispatch index, exc_ptr and filter values from the
1726 function context. */
1727 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
1728 sjlj_fc_call_site_ofs);
1729 dispatch = copy_to_reg (mem);
1731 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
1732 if (word_mode != ptr_mode)
1734 #ifdef POINTERS_EXTEND_UNSIGNED
1735 mem = convert_memory_address (ptr_mode, mem);
1736 #else
1737 mem = convert_to_mode (ptr_mode, mem, 0);
1738 #endif
1740 emit_move_insn (cfun->eh->exc_ptr, mem);
1742 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
1743 emit_move_insn (cfun->eh->filter, mem);
1745 /* Jump to one of the directly reachable regions. */
1746 /* ??? This really ought to be using a switch statement. */
1748 first_reachable = 0;
1749 for (i = cfun->eh->last_region_number; i > 0; --i)
1751 if (! lp_info[i].directly_reachable)
1752 continue;
1754 if (! first_reachable)
1756 first_reachable = i;
1757 continue;
1760 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
1761 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
1762 cfun->eh->region_array[i]->post_landing_pad);
1765 seq = get_insns ();
1766 end_sequence ();
1768 before = cfun->eh->region_array[first_reachable]->post_landing_pad;
1770 bb = emit_to_new_bb_before (seq, before);
1771 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1772 e->count = bb->count;
1773 e->probability = REG_BR_PROB_BASE;
1776 static void
1777 sjlj_build_landing_pads (void)
1779 struct sjlj_lp_info *lp_info;
1781 lp_info = xcalloc (cfun->eh->last_region_number + 1,
1782 sizeof (struct sjlj_lp_info));
1784 if (sjlj_find_directly_reachable_regions (lp_info))
1786 rtx dispatch_label = gen_label_rtx ();
1788 cfun->eh->sjlj_fc
1789 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1790 int_size_in_bytes (sjlj_fc_type_node),
1791 TYPE_ALIGN (sjlj_fc_type_node));
1793 sjlj_assign_call_site_values (dispatch_label, lp_info);
1794 sjlj_mark_call_sites (lp_info);
1796 sjlj_emit_function_enter (dispatch_label);
1797 sjlj_emit_dispatch_table (dispatch_label, lp_info);
1798 sjlj_emit_function_exit ();
1801 free (lp_info);
1804 void
1805 finish_eh_generation (void)
1807 basic_block bb;
1809 /* Nothing to do if no regions created. */
1810 if (cfun->eh->region_tree == NULL)
1811 return;
1813 /* The object here is to provide find_basic_blocks with detailed
1814 information (via reachable_handlers) on how exception control
1815 flows within the function. In this first pass, we can include
1816 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
1817 regions, and hope that it will be useful in deleting unreachable
1818 handlers. Subsequently, we will generate landing pads which will
1819 connect many of the handlers, and then type information will not
1820 be effective. Still, this is a win over previous implementations. */
1822 /* These registers are used by the landing pads. Make sure they
1823 have been generated. */
1824 get_exception_pointer (cfun);
1825 get_exception_filter (cfun);
1827 /* Construct the landing pads. */
1829 assign_filter_values ();
1830 build_post_landing_pads ();
1831 connect_post_landing_pads ();
1832 if (USING_SJLJ_EXCEPTIONS)
1833 sjlj_build_landing_pads ();
1834 else
1835 dw2_build_landing_pads ();
1837 cfun->eh->built_landing_pads = 1;
1839 /* We've totally changed the CFG. Start over. */
1840 find_exception_handler_labels ();
1841 break_superblocks ();
1842 if (USING_SJLJ_EXCEPTIONS)
1843 commit_edge_insertions ();
1844 FOR_EACH_BB (bb)
1846 edge e;
1847 edge_iterator ei;
1848 bool eh = false;
1849 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1851 if (e->flags & EDGE_EH)
1853 remove_edge (e);
1854 eh = true;
1856 else
1857 ei_next (&ei);
1859 if (eh)
1860 rtl_make_eh_edge (NULL, bb, BB_END (bb));
1864 static hashval_t
1865 ehl_hash (const void *pentry)
1867 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
1869 /* 2^32 * ((sqrt(5) - 1) / 2) */
1870 const hashval_t scaled_golden_ratio = 0x9e3779b9;
1871 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
1874 static int
1875 ehl_eq (const void *pentry, const void *pdata)
1877 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
1878 struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
1880 return entry->label == data->label;
1883 /* This section handles removing dead code for flow. */
1885 /* Remove LABEL from exception_handler_label_map. */
1887 static void
1888 remove_exception_handler_label (rtx label)
1890 struct ehl_map_entry **slot, tmp;
1892 /* If exception_handler_label_map was not built yet,
1893 there is nothing to do. */
1894 if (cfun->eh->exception_handler_label_map == NULL)
1895 return;
1897 tmp.label = label;
1898 slot = (struct ehl_map_entry **)
1899 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
1900 gcc_assert (slot);
1902 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
1905 /* Splice REGION from the region tree etc. */
1907 static void
1908 remove_eh_handler (struct eh_region *region)
1910 struct eh_region **pp, **pp_start, *p, *outer, *inner;
1911 rtx lab;
1913 /* For the benefit of efficiently handling REG_EH_REGION notes,
1914 replace this region in the region array with its containing
1915 region. Note that previous region deletions may result in
1916 multiple copies of this region in the array, so we have a
1917 list of alternate numbers by which we are known. */
1919 outer = region->outer;
1920 cfun->eh->region_array[region->region_number] = outer;
1921 if (region->aka)
1923 unsigned i;
1924 bitmap_iterator bi;
1926 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
1928 cfun->eh->region_array[i] = outer;
1932 if (outer)
1934 if (!outer->aka)
1935 outer->aka = BITMAP_GGC_ALLOC ();
1936 if (region->aka)
1937 bitmap_ior_into (outer->aka, region->aka);
1938 bitmap_set_bit (outer->aka, region->region_number);
1941 if (cfun->eh->built_landing_pads)
1942 lab = region->landing_pad;
1943 else
1944 lab = region->label;
1945 if (lab)
1946 remove_exception_handler_label (lab);
1948 if (outer)
1949 pp_start = &outer->inner;
1950 else
1951 pp_start = &cfun->eh->region_tree;
1952 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1953 continue;
1954 *pp = region->next_peer;
1956 inner = region->inner;
1957 if (inner)
1959 for (p = inner; p->next_peer ; p = p->next_peer)
1960 p->outer = outer;
1961 p->outer = outer;
1963 p->next_peer = *pp_start;
1964 *pp_start = inner;
1967 if (region->type == ERT_CATCH)
1969 struct eh_region *try, *next, *prev;
1971 for (try = region->next_peer;
1972 try->type == ERT_CATCH;
1973 try = try->next_peer)
1974 continue;
1975 gcc_assert (try->type == ERT_TRY);
1977 next = region->u.catch.next_catch;
1978 prev = region->u.catch.prev_catch;
1980 if (next)
1981 next->u.catch.prev_catch = prev;
1982 else
1983 try->u.try.last_catch = prev;
1984 if (prev)
1985 prev->u.catch.next_catch = next;
1986 else
1988 try->u.try.catch = next;
1989 if (! next)
1990 remove_eh_handler (try);
1995 /* LABEL heads a basic block that is about to be deleted. If this
1996 label corresponds to an exception region, we may be able to
1997 delete the region. */
1999 void
2000 maybe_remove_eh_handler (rtx label)
2002 struct ehl_map_entry **slot, tmp;
2003 struct eh_region *region;
2005 /* ??? After generating landing pads, it's not so simple to determine
2006 if the region data is completely unused. One must examine the
2007 landing pad and the post landing pad, and whether an inner try block
2008 is referencing the catch handlers directly. */
2009 if (cfun->eh->built_landing_pads)
2010 return;
2012 tmp.label = label;
2013 slot = (struct ehl_map_entry **)
2014 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2015 if (! slot)
2016 return;
2017 region = (*slot)->region;
2018 if (! region)
2019 return;
2021 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2022 because there is no path to the fallback call to terminate.
2023 But the region continues to affect call-site data until there
2024 are no more contained calls, which we don't see here. */
2025 if (region->type == ERT_MUST_NOT_THROW)
2027 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2028 region->label = NULL_RTX;
2030 else
2031 remove_eh_handler (region);
2034 /* Invokes CALLBACK for every exception handler label. Only used by old
2035 loop hackery; should not be used by new code. */
2037 void
2038 for_each_eh_label (void (*callback) (rtx))
2040 htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
2041 (void *) &callback);
2044 static int
2045 for_each_eh_label_1 (void **pentry, void *data)
2047 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2048 void (*callback) (rtx) = *(void (**) (rtx)) data;
2050 (*callback) (entry->label);
2051 return 1;
2054 /* Invoke CALLBACK for every exception region in the current function. */
2056 void
2057 for_each_eh_region (void (*callback) (struct eh_region *))
2059 int i, n = cfun->eh->last_region_number;
2060 for (i = 1; i <= n; ++i)
2062 struct eh_region *region = cfun->eh->region_array[i];
2063 if (region)
2064 (*callback) (region);
2068 /* This section describes CFG exception edges for flow. */
2070 /* For communicating between calls to reachable_next_level. */
2071 struct reachable_info
2073 tree types_caught;
2074 tree types_allowed;
2075 void (*callback) (struct eh_region *, void *);
2076 void *callback_data;
2077 bool saw_any_handlers;
2080 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2081 base class of TYPE, is in HANDLED. */
2083 static int
2084 check_handled (tree handled, tree type)
2086 tree t;
2088 /* We can check for exact matches without front-end help. */
2089 if (! lang_eh_type_covers)
2091 for (t = handled; t ; t = TREE_CHAIN (t))
2092 if (TREE_VALUE (t) == type)
2093 return 1;
2095 else
2097 for (t = handled; t ; t = TREE_CHAIN (t))
2098 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2099 return 1;
2102 return 0;
2105 /* A subroutine of reachable_next_level. If we are collecting a list
2106 of handlers, add one. After landing pad generation, reference
2107 it instead of the handlers themselves. Further, the handlers are
2108 all wired together, so by referencing one, we've got them all.
2109 Before landing pad generation we reference each handler individually.
2111 LP_REGION contains the landing pad; REGION is the handler. */
2113 static void
2114 add_reachable_handler (struct reachable_info *info,
2115 struct eh_region *lp_region, struct eh_region *region)
2117 if (! info)
2118 return;
2120 info->saw_any_handlers = true;
2122 if (cfun->eh->built_landing_pads)
2123 info->callback (lp_region, info->callback_data);
2124 else
2125 info->callback (region, info->callback_data);
2128 /* Process one level of exception regions for reachability.
2129 If TYPE_THROWN is non-null, then it is the *exact* type being
2130 propagated. If INFO is non-null, then collect handler labels
2131 and caught/allowed type information between invocations. */
2133 static enum reachable_code
2134 reachable_next_level (struct eh_region *region, tree type_thrown,
2135 struct reachable_info *info)
2137 switch (region->type)
2139 case ERT_CLEANUP:
2140 /* Before landing-pad generation, we model control flow
2141 directly to the individual handlers. In this way we can
2142 see that catch handler types may shadow one another. */
2143 add_reachable_handler (info, region, region);
2144 return RNL_MAYBE_CAUGHT;
2146 case ERT_TRY:
2148 struct eh_region *c;
2149 enum reachable_code ret = RNL_NOT_CAUGHT;
2151 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2153 /* A catch-all handler ends the search. */
2154 if (c->u.catch.type_list == NULL)
2156 add_reachable_handler (info, region, c);
2157 return RNL_CAUGHT;
2160 if (type_thrown)
2162 /* If we have at least one type match, end the search. */
2163 tree tp_node = c->u.catch.type_list;
2165 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2167 tree type = TREE_VALUE (tp_node);
2169 if (type == type_thrown
2170 || (lang_eh_type_covers
2171 && (*lang_eh_type_covers) (type, type_thrown)))
2173 add_reachable_handler (info, region, c);
2174 return RNL_CAUGHT;
2178 /* If we have definitive information of a match failure,
2179 the catch won't trigger. */
2180 if (lang_eh_type_covers)
2181 return RNL_NOT_CAUGHT;
2184 /* At this point, we either don't know what type is thrown or
2185 don't have front-end assistance to help deciding if it is
2186 covered by one of the types in the list for this region.
2188 We'd then like to add this region to the list of reachable
2189 handlers since it is indeed potentially reachable based on the
2190 information we have.
2192 Actually, this handler is for sure not reachable if all the
2193 types it matches have already been caught. That is, it is only
2194 potentially reachable if at least one of the types it catches
2195 has not been previously caught. */
2197 if (! info)
2198 ret = RNL_MAYBE_CAUGHT;
2199 else
2201 tree tp_node = c->u.catch.type_list;
2202 bool maybe_reachable = false;
2204 /* Compute the potential reachability of this handler and
2205 update the list of types caught at the same time. */
2206 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2208 tree type = TREE_VALUE (tp_node);
2210 if (! check_handled (info->types_caught, type))
2212 info->types_caught
2213 = tree_cons (NULL, type, info->types_caught);
2215 maybe_reachable = true;
2219 if (maybe_reachable)
2221 add_reachable_handler (info, region, c);
2223 /* ??? If the catch type is a base class of every allowed
2224 type, then we know we can stop the search. */
2225 ret = RNL_MAYBE_CAUGHT;
2230 return ret;
2233 case ERT_ALLOWED_EXCEPTIONS:
2234 /* An empty list of types definitely ends the search. */
2235 if (region->u.allowed.type_list == NULL_TREE)
2237 add_reachable_handler (info, region, region);
2238 return RNL_CAUGHT;
2241 /* Collect a list of lists of allowed types for use in detecting
2242 when a catch may be transformed into a catch-all. */
2243 if (info)
2244 info->types_allowed = tree_cons (NULL_TREE,
2245 region->u.allowed.type_list,
2246 info->types_allowed);
2248 /* If we have definitive information about the type hierarchy,
2249 then we can tell if the thrown type will pass through the
2250 filter. */
2251 if (type_thrown && lang_eh_type_covers)
2253 if (check_handled (region->u.allowed.type_list, type_thrown))
2254 return RNL_NOT_CAUGHT;
2255 else
2257 add_reachable_handler (info, region, region);
2258 return RNL_CAUGHT;
2262 add_reachable_handler (info, region, region);
2263 return RNL_MAYBE_CAUGHT;
2265 case ERT_CATCH:
2266 /* Catch regions are handled by their controlling try region. */
2267 return RNL_NOT_CAUGHT;
2269 case ERT_MUST_NOT_THROW:
2270 /* Here we end our search, since no exceptions may propagate.
2271 If we've touched down at some landing pad previous, then the
2272 explicit function call we generated may be used. Otherwise
2273 the call is made by the runtime. */
2274 if (info && info->saw_any_handlers)
2276 add_reachable_handler (info, region, region);
2277 return RNL_CAUGHT;
2279 else
2280 return RNL_BLOCKED;
2282 case ERT_THROW:
2283 case ERT_FIXUP:
2284 case ERT_UNKNOWN:
2285 /* Shouldn't see these here. */
2286 gcc_unreachable ();
2287 break;
2288 default:
2289 gcc_unreachable ();
2293 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2295 void
2296 foreach_reachable_handler (int region_number, bool is_resx,
2297 void (*callback) (struct eh_region *, void *),
2298 void *callback_data)
2300 struct reachable_info info;
2301 struct eh_region *region;
2302 tree type_thrown;
2304 memset (&info, 0, sizeof (info));
2305 info.callback = callback;
2306 info.callback_data = callback_data;
2308 region = cfun->eh->region_array[region_number];
2310 type_thrown = NULL_TREE;
2311 if (is_resx)
2313 /* A RESX leaves a region instead of entering it. Thus the
2314 region itself may have been deleted out from under us. */
2315 if (region == NULL)
2316 return;
2317 region = region->outer;
2319 else if (region->type == ERT_THROW)
2321 type_thrown = region->u.throw.type;
2322 region = region->outer;
2325 while (region)
2327 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2328 break;
2329 /* If we have processed one cleanup, there is no point in
2330 processing any more of them. Each cleanup will have an edge
2331 to the next outer cleanup region, so the flow graph will be
2332 accurate. */
2333 if (region->type == ERT_CLEANUP)
2334 region = region->u.cleanup.prev_try;
2335 else
2336 region = region->outer;
2340 /* Retrieve a list of labels of exception handlers which can be
2341 reached by a given insn. */
2343 static void
2344 arh_to_landing_pad (struct eh_region *region, void *data)
2346 rtx *p_handlers = data;
2347 if (! *p_handlers)
2348 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2351 static void
2352 arh_to_label (struct eh_region *region, void *data)
2354 rtx *p_handlers = data;
2355 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2359 reachable_handlers (rtx insn)
2361 bool is_resx = false;
2362 rtx handlers = NULL;
2363 int region_number;
2365 if (JUMP_P (insn)
2366 && GET_CODE (PATTERN (insn)) == RESX)
2368 region_number = XINT (PATTERN (insn), 0);
2369 is_resx = true;
2371 else
2373 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2374 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2375 return NULL;
2376 region_number = INTVAL (XEXP (note, 0));
2379 foreach_reachable_handler (region_number, is_resx,
2380 (cfun->eh->built_landing_pads
2381 ? arh_to_landing_pad
2382 : arh_to_label),
2383 &handlers);
2385 return handlers;
2388 /* Determine if the given INSN can throw an exception that is caught
2389 within the function. */
2391 bool
2392 can_throw_internal_1 (int region_number)
2394 struct eh_region *region;
2395 tree type_thrown;
2397 region = cfun->eh->region_array[region_number];
2399 type_thrown = NULL_TREE;
2400 if (region->type == ERT_THROW)
2402 type_thrown = region->u.throw.type;
2403 region = region->outer;
2406 /* If this exception is ignored by each and every containing region,
2407 then control passes straight out. The runtime may handle some
2408 regions, which also do not require processing internally. */
2409 for (; region; region = region->outer)
2411 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2412 if (how == RNL_BLOCKED)
2413 return false;
2414 if (how != RNL_NOT_CAUGHT)
2415 return true;
2418 return false;
2421 bool
2422 can_throw_internal (rtx insn)
2424 rtx note;
2426 if (! INSN_P (insn))
2427 return false;
2429 if (JUMP_P (insn)
2430 && GET_CODE (PATTERN (insn)) == RESX
2431 && XINT (PATTERN (insn), 0) > 0)
2432 return can_throw_internal_1 (XINT (PATTERN (insn), 0));
2434 if (NONJUMP_INSN_P (insn)
2435 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2436 insn = XVECEXP (PATTERN (insn), 0, 0);
2438 /* Every insn that might throw has an EH_REGION note. */
2439 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2440 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2441 return false;
2443 return can_throw_internal_1 (INTVAL (XEXP (note, 0)));
2446 /* Determine if the given INSN can throw an exception that is
2447 visible outside the function. */
2449 bool
2450 can_throw_external_1 (int region_number)
2452 struct eh_region *region;
2453 tree type_thrown;
2455 region = cfun->eh->region_array[region_number];
2457 type_thrown = NULL_TREE;
2458 if (region->type == ERT_THROW)
2460 type_thrown = region->u.throw.type;
2461 region = region->outer;
2464 /* If the exception is caught or blocked by any containing region,
2465 then it is not seen by any calling function. */
2466 for (; region ; region = region->outer)
2467 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2468 return false;
2470 return true;
2473 bool
2474 can_throw_external (rtx insn)
2476 rtx note;
2478 if (! INSN_P (insn))
2479 return false;
2481 if (NONJUMP_INSN_P (insn)
2482 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2483 insn = XVECEXP (PATTERN (insn), 0, 0);
2485 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2486 if (!note)
2488 /* Calls (and trapping insns) without notes are outside any
2489 exception handling region in this function. We have to
2490 assume it might throw. Given that the front end and middle
2491 ends mark known NOTHROW functions, this isn't so wildly
2492 inaccurate. */
2493 return (CALL_P (insn)
2494 || (flag_non_call_exceptions
2495 && may_trap_p (PATTERN (insn))));
2497 if (INTVAL (XEXP (note, 0)) <= 0)
2498 return false;
2500 return can_throw_external_1 (INTVAL (XEXP (note, 0)));
2503 /* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls. */
2505 void
2506 set_nothrow_function_flags (void)
2508 rtx insn;
2510 TREE_NOTHROW (current_function_decl) = 1;
2512 /* Assume cfun->all_throwers_are_sibcalls until we encounter
2513 something that can throw an exception. We specifically exempt
2514 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2515 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2516 is optimistic. */
2518 cfun->all_throwers_are_sibcalls = 1;
2520 if (! flag_exceptions)
2521 return;
2523 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2524 if (can_throw_external (insn))
2526 TREE_NOTHROW (current_function_decl) = 0;
2528 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2530 cfun->all_throwers_are_sibcalls = 0;
2531 return;
2535 for (insn = current_function_epilogue_delay_list; insn;
2536 insn = XEXP (insn, 1))
2537 if (can_throw_external (insn))
2539 TREE_NOTHROW (current_function_decl) = 0;
2541 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2543 cfun->all_throwers_are_sibcalls = 0;
2544 return;
2550 /* Various hooks for unwind library. */
2552 /* Do any necessary initialization to access arbitrary stack frames.
2553 On the SPARC, this means flushing the register windows. */
2555 void
2556 expand_builtin_unwind_init (void)
2558 /* Set this so all the registers get saved in our frame; we need to be
2559 able to copy the saved values for any registers from frames we unwind. */
2560 current_function_has_nonlocal_label = 1;
2562 #ifdef SETUP_FRAME_ADDRESSES
2563 SETUP_FRAME_ADDRESSES ();
2564 #endif
2568 expand_builtin_eh_return_data_regno (tree arglist)
2570 tree which = TREE_VALUE (arglist);
2571 unsigned HOST_WIDE_INT iwhich;
2573 if (TREE_CODE (which) != INTEGER_CST)
2575 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2576 return constm1_rtx;
2579 iwhich = tree_low_cst (which, 1);
2580 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2581 if (iwhich == INVALID_REGNUM)
2582 return constm1_rtx;
2584 #ifdef DWARF_FRAME_REGNUM
2585 iwhich = DWARF_FRAME_REGNUM (iwhich);
2586 #else
2587 iwhich = DBX_REGISTER_NUMBER (iwhich);
2588 #endif
2590 return GEN_INT (iwhich);
2593 /* Given a value extracted from the return address register or stack slot,
2594 return the actual address encoded in that value. */
2597 expand_builtin_extract_return_addr (tree addr_tree)
2599 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2601 if (GET_MODE (addr) != Pmode
2602 && GET_MODE (addr) != VOIDmode)
2604 #ifdef POINTERS_EXTEND_UNSIGNED
2605 addr = convert_memory_address (Pmode, addr);
2606 #else
2607 addr = convert_to_mode (Pmode, addr, 0);
2608 #endif
2611 /* First mask out any unwanted bits. */
2612 #ifdef MASK_RETURN_ADDR
2613 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2614 #endif
2616 /* Then adjust to find the real return address. */
2617 #if defined (RETURN_ADDR_OFFSET)
2618 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2619 #endif
2621 return addr;
2624 /* Given an actual address in addr_tree, do any necessary encoding
2625 and return the value to be stored in the return address register or
2626 stack slot so the epilogue will return to that address. */
2629 expand_builtin_frob_return_addr (tree addr_tree)
2631 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2633 addr = convert_memory_address (Pmode, addr);
2635 #ifdef RETURN_ADDR_OFFSET
2636 addr = force_reg (Pmode, addr);
2637 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2638 #endif
2640 return addr;
2643 /* Set up the epilogue with the magic bits we'll need to return to the
2644 exception handler. */
2646 void
2647 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2648 tree handler_tree)
2650 rtx tmp;
2652 #ifdef EH_RETURN_STACKADJ_RTX
2653 tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2654 tmp = convert_memory_address (Pmode, tmp);
2655 if (!cfun->eh->ehr_stackadj)
2656 cfun->eh->ehr_stackadj = copy_to_reg (tmp);
2657 else if (tmp != cfun->eh->ehr_stackadj)
2658 emit_move_insn (cfun->eh->ehr_stackadj, tmp);
2659 #endif
2661 tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
2662 tmp = convert_memory_address (Pmode, tmp);
2663 if (!cfun->eh->ehr_handler)
2664 cfun->eh->ehr_handler = copy_to_reg (tmp);
2665 else if (tmp != cfun->eh->ehr_handler)
2666 emit_move_insn (cfun->eh->ehr_handler, tmp);
2668 if (!cfun->eh->ehr_label)
2669 cfun->eh->ehr_label = gen_label_rtx ();
2670 emit_jump (cfun->eh->ehr_label);
2673 void
2674 expand_eh_return (void)
2676 rtx around_label;
2678 if (! cfun->eh->ehr_label)
2679 return;
2681 current_function_calls_eh_return = 1;
2683 #ifdef EH_RETURN_STACKADJ_RTX
2684 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2685 #endif
2687 around_label = gen_label_rtx ();
2688 emit_jump (around_label);
2690 emit_label (cfun->eh->ehr_label);
2691 clobber_return_register ();
2693 #ifdef EH_RETURN_STACKADJ_RTX
2694 emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
2695 #endif
2697 #ifdef HAVE_eh_return
2698 if (HAVE_eh_return)
2699 emit_insn (gen_eh_return (cfun->eh->ehr_handler));
2700 else
2701 #endif
2703 #ifdef EH_RETURN_HANDLER_RTX
2704 emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
2705 #else
2706 error ("__builtin_eh_return not supported on this target");
2707 #endif
2710 emit_label (around_label);
2713 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2714 POINTERS_EXTEND_UNSIGNED and return it. */
2717 expand_builtin_extend_pointer (tree addr_tree)
2719 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2720 int extend;
2722 #ifdef POINTERS_EXTEND_UNSIGNED
2723 extend = POINTERS_EXTEND_UNSIGNED;
2724 #else
2725 /* The previous EH code did an unsigned extend by default, so we do this also
2726 for consistency. */
2727 extend = 1;
2728 #endif
2730 return convert_modes (word_mode, ptr_mode, addr, extend);
2733 /* In the following functions, we represent entries in the action table
2734 as 1-based indices. Special cases are:
2736 0: null action record, non-null landing pad; implies cleanups
2737 -1: null action record, null landing pad; implies no action
2738 -2: no call-site entry; implies must_not_throw
2739 -3: we have yet to process outer regions
2741 Further, no special cases apply to the "next" field of the record.
2742 For next, 0 means end of list. */
2744 struct action_record
2746 int offset;
2747 int filter;
2748 int next;
2751 static int
2752 action_record_eq (const void *pentry, const void *pdata)
2754 const struct action_record *entry = (const struct action_record *) pentry;
2755 const struct action_record *data = (const struct action_record *) pdata;
2756 return entry->filter == data->filter && entry->next == data->next;
2759 static hashval_t
2760 action_record_hash (const void *pentry)
2762 const struct action_record *entry = (const struct action_record *) pentry;
2763 return entry->next * 1009 + entry->filter;
2766 static int
2767 add_action_record (htab_t ar_hash, int filter, int next)
2769 struct action_record **slot, *new, tmp;
2771 tmp.filter = filter;
2772 tmp.next = next;
2773 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
2775 if ((new = *slot) == NULL)
2777 new = xmalloc (sizeof (*new));
2778 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
2779 new->filter = filter;
2780 new->next = next;
2781 *slot = new;
2783 /* The filter value goes in untouched. The link to the next
2784 record is a "self-relative" byte offset, or zero to indicate
2785 that there is no next record. So convert the absolute 1 based
2786 indices we've been carrying around into a displacement. */
2788 push_sleb128 (&cfun->eh->action_record_data, filter);
2789 if (next)
2790 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
2791 push_sleb128 (&cfun->eh->action_record_data, next);
2794 return new->offset;
2797 static int
2798 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
2800 struct eh_region *c;
2801 int next;
2803 /* If we've reached the top of the region chain, then we have
2804 no actions, and require no landing pad. */
2805 if (region == NULL)
2806 return -1;
2808 switch (region->type)
2810 case ERT_CLEANUP:
2811 /* A cleanup adds a zero filter to the beginning of the chain, but
2812 there are special cases to look out for. If there are *only*
2813 cleanups along a path, then it compresses to a zero action.
2814 Further, if there are multiple cleanups along a path, we only
2815 need to represent one of them, as that is enough to trigger
2816 entry to the landing pad at runtime. */
2817 next = collect_one_action_chain (ar_hash, region->outer);
2818 if (next <= 0)
2819 return 0;
2820 for (c = region->outer; c ; c = c->outer)
2821 if (c->type == ERT_CLEANUP)
2822 return next;
2823 return add_action_record (ar_hash, 0, next);
2825 case ERT_TRY:
2826 /* Process the associated catch regions in reverse order.
2827 If there's a catch-all handler, then we don't need to
2828 search outer regions. Use a magic -3 value to record
2829 that we haven't done the outer search. */
2830 next = -3;
2831 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
2833 if (c->u.catch.type_list == NULL)
2835 /* Retrieve the filter from the head of the filter list
2836 where we have stored it (see assign_filter_values). */
2837 int filter
2838 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
2840 next = add_action_record (ar_hash, filter, 0);
2842 else
2844 /* Once the outer search is done, trigger an action record for
2845 each filter we have. */
2846 tree flt_node;
2848 if (next == -3)
2850 next = collect_one_action_chain (ar_hash, region->outer);
2852 /* If there is no next action, terminate the chain. */
2853 if (next == -1)
2854 next = 0;
2855 /* If all outer actions are cleanups or must_not_throw,
2856 we'll have no action record for it, since we had wanted
2857 to encode these states in the call-site record directly.
2858 Add a cleanup action to the chain to catch these. */
2859 else if (next <= 0)
2860 next = add_action_record (ar_hash, 0, 0);
2863 flt_node = c->u.catch.filter_list;
2864 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2866 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2867 next = add_action_record (ar_hash, filter, next);
2871 return next;
2873 case ERT_ALLOWED_EXCEPTIONS:
2874 /* An exception specification adds its filter to the
2875 beginning of the chain. */
2876 next = collect_one_action_chain (ar_hash, region->outer);
2878 /* If there is no next action, terminate the chain. */
2879 if (next == -1)
2880 next = 0;
2881 /* If all outer actions are cleanups or must_not_throw,
2882 we'll have no action record for it, since we had wanted
2883 to encode these states in the call-site record directly.
2884 Add a cleanup action to the chain to catch these. */
2885 else if (next <= 0)
2886 next = add_action_record (ar_hash, 0, 0);
2888 return add_action_record (ar_hash, region->u.allowed.filter, next);
2890 case ERT_MUST_NOT_THROW:
2891 /* A must-not-throw region with no inner handlers or cleanups
2892 requires no call-site entry. Note that this differs from
2893 the no handler or cleanup case in that we do require an lsda
2894 to be generated. Return a magic -2 value to record this. */
2895 return -2;
2897 case ERT_CATCH:
2898 case ERT_THROW:
2899 /* CATCH regions are handled in TRY above. THROW regions are
2900 for optimization information only and produce no output. */
2901 return collect_one_action_chain (ar_hash, region->outer);
2903 default:
2904 gcc_unreachable ();
2908 static int
2909 add_call_site (rtx landing_pad, int action)
2911 struct call_site_record *data = cfun->eh->call_site_data;
2912 int used = cfun->eh->call_site_data_used;
2913 int size = cfun->eh->call_site_data_size;
2915 if (used >= size)
2917 size = (size ? size * 2 : 64);
2918 data = ggc_realloc (data, sizeof (*data) * size);
2919 cfun->eh->call_site_data = data;
2920 cfun->eh->call_site_data_size = size;
2923 data[used].landing_pad = landing_pad;
2924 data[used].action = action;
2926 cfun->eh->call_site_data_used = used + 1;
2928 return used + call_site_base;
2931 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2932 The new note numbers will not refer to region numbers, but
2933 instead to call site entries. */
2935 void
2936 convert_to_eh_region_ranges (void)
2938 rtx insn, iter, note;
2939 htab_t ar_hash;
2940 int last_action = -3;
2941 rtx last_action_insn = NULL_RTX;
2942 rtx last_landing_pad = NULL_RTX;
2943 rtx first_no_action_insn = NULL_RTX;
2944 int call_site = 0;
2946 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
2947 return;
2949 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
2951 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2953 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2954 if (INSN_P (iter))
2956 struct eh_region *region;
2957 int this_action;
2958 rtx this_landing_pad;
2960 insn = iter;
2961 if (NONJUMP_INSN_P (insn)
2962 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2963 insn = XVECEXP (PATTERN (insn), 0, 0);
2965 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2966 if (!note)
2968 if (! (CALL_P (insn)
2969 || (flag_non_call_exceptions
2970 && may_trap_p (PATTERN (insn)))))
2971 continue;
2972 this_action = -1;
2973 region = NULL;
2975 else
2977 if (INTVAL (XEXP (note, 0)) <= 0)
2978 continue;
2979 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2980 this_action = collect_one_action_chain (ar_hash, region);
2983 /* Existence of catch handlers, or must-not-throw regions
2984 implies that an lsda is needed (even if empty). */
2985 if (this_action != -1)
2986 cfun->uses_eh_lsda = 1;
2988 /* Delay creation of region notes for no-action regions
2989 until we're sure that an lsda will be required. */
2990 else if (last_action == -3)
2992 first_no_action_insn = iter;
2993 last_action = -1;
2996 /* Cleanups and handlers may share action chains but not
2997 landing pads. Collect the landing pad for this region. */
2998 if (this_action >= 0)
3000 struct eh_region *o;
3001 for (o = region; ! o->landing_pad ; o = o->outer)
3002 continue;
3003 this_landing_pad = o->landing_pad;
3005 else
3006 this_landing_pad = NULL_RTX;
3008 /* Differing actions or landing pads implies a change in call-site
3009 info, which implies some EH_REGION note should be emitted. */
3010 if (last_action != this_action
3011 || last_landing_pad != this_landing_pad)
3013 /* If we'd not seen a previous action (-3) or the previous
3014 action was must-not-throw (-2), then we do not need an
3015 end note. */
3016 if (last_action >= -1)
3018 /* If we delayed the creation of the begin, do it now. */
3019 if (first_no_action_insn)
3021 call_site = add_call_site (NULL_RTX, 0);
3022 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3023 first_no_action_insn);
3024 NOTE_EH_HANDLER (note) = call_site;
3025 first_no_action_insn = NULL_RTX;
3028 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3029 last_action_insn);
3030 NOTE_EH_HANDLER (note) = call_site;
3033 /* If the new action is must-not-throw, then no region notes
3034 are created. */
3035 if (this_action >= -1)
3037 call_site = add_call_site (this_landing_pad,
3038 this_action < 0 ? 0 : this_action);
3039 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3040 NOTE_EH_HANDLER (note) = call_site;
3043 last_action = this_action;
3044 last_landing_pad = this_landing_pad;
3046 last_action_insn = iter;
3049 if (last_action >= -1 && ! first_no_action_insn)
3051 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3052 NOTE_EH_HANDLER (note) = call_site;
3055 htab_delete (ar_hash);
3059 static void
3060 push_uleb128 (varray_type *data_area, unsigned int value)
3064 unsigned char byte = value & 0x7f;
3065 value >>= 7;
3066 if (value)
3067 byte |= 0x80;
3068 VARRAY_PUSH_UCHAR (*data_area, byte);
3070 while (value);
3073 static void
3074 push_sleb128 (varray_type *data_area, int value)
3076 unsigned char byte;
3077 int more;
3081 byte = value & 0x7f;
3082 value >>= 7;
3083 more = ! ((value == 0 && (byte & 0x40) == 0)
3084 || (value == -1 && (byte & 0x40) != 0));
3085 if (more)
3086 byte |= 0x80;
3087 VARRAY_PUSH_UCHAR (*data_area, byte);
3089 while (more);
3093 #ifndef HAVE_AS_LEB128
3094 static int
3095 dw2_size_of_call_site_table (void)
3097 int n = cfun->eh->call_site_data_used;
3098 int size = n * (4 + 4 + 4);
3099 int i;
3101 for (i = 0; i < n; ++i)
3103 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3104 size += size_of_uleb128 (cs->action);
3107 return size;
3110 static int
3111 sjlj_size_of_call_site_table (void)
3113 int n = cfun->eh->call_site_data_used;
3114 int size = 0;
3115 int i;
3117 for (i = 0; i < n; ++i)
3119 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3120 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3121 size += size_of_uleb128 (cs->action);
3124 return size;
3126 #endif
3128 static void
3129 dw2_output_call_site_table (void)
3131 int n = cfun->eh->call_site_data_used;
3132 int i;
3134 for (i = 0; i < n; ++i)
3136 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3137 char reg_start_lab[32];
3138 char reg_end_lab[32];
3139 char landing_pad_lab[32];
3141 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3142 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3144 if (cs->landing_pad)
3145 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3146 CODE_LABEL_NUMBER (cs->landing_pad));
3148 /* ??? Perhaps use insn length scaling if the assembler supports
3149 generic arithmetic. */
3150 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3151 data4 if the function is small enough. */
3152 #ifdef HAVE_AS_LEB128
3153 dw2_asm_output_delta_uleb128 (reg_start_lab,
3154 current_function_func_begin_label,
3155 "region %d start", i);
3156 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3157 "length");
3158 if (cs->landing_pad)
3159 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3160 current_function_func_begin_label,
3161 "landing pad");
3162 else
3163 dw2_asm_output_data_uleb128 (0, "landing pad");
3164 #else
3165 dw2_asm_output_delta (4, reg_start_lab,
3166 current_function_func_begin_label,
3167 "region %d start", i);
3168 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3169 if (cs->landing_pad)
3170 dw2_asm_output_delta (4, landing_pad_lab,
3171 current_function_func_begin_label,
3172 "landing pad");
3173 else
3174 dw2_asm_output_data (4, 0, "landing pad");
3175 #endif
3176 dw2_asm_output_data_uleb128 (cs->action, "action");
3179 call_site_base += n;
3182 static void
3183 sjlj_output_call_site_table (void)
3185 int n = cfun->eh->call_site_data_used;
3186 int i;
3188 for (i = 0; i < n; ++i)
3190 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3192 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3193 "region %d landing pad", i);
3194 dw2_asm_output_data_uleb128 (cs->action, "action");
3197 call_site_base += n;
3200 /* Tell assembler to switch to the section for the exception handling
3201 table. */
3203 void
3204 default_exception_section (void)
3206 if (targetm.have_named_sections)
3208 int flags;
3210 if (EH_TABLES_CAN_BE_READ_ONLY)
3212 int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3214 flags = (! flag_pic
3215 || ((tt_format & 0x70) != DW_EH_PE_absptr
3216 && (tt_format & 0x70) != DW_EH_PE_aligned))
3217 ? 0 : SECTION_WRITE;
3219 else
3220 flags = SECTION_WRITE;
3221 named_section_flags (".gcc_except_table", flags);
3223 else if (flag_pic)
3224 data_section ();
3225 else
3226 readonly_data_section ();
3229 void
3230 output_function_exception_table (void)
3232 int tt_format, cs_format, lp_format, i, n;
3233 #ifdef HAVE_AS_LEB128
3234 char ttype_label[32];
3235 char cs_after_size_label[32];
3236 char cs_end_label[32];
3237 #else
3238 int call_site_len;
3239 #endif
3240 int have_tt_data;
3241 int tt_format_size = 0;
3243 /* Not all functions need anything. */
3244 if (! cfun->uses_eh_lsda)
3245 return;
3247 #ifdef TARGET_UNWIND_INFO
3248 /* TODO: Move this into target file. */
3249 assemble_external_libcall (eh_personality_libfunc);
3250 fputs ("\t.personality\t", asm_out_file);
3251 output_addr_const (asm_out_file, eh_personality_libfunc);
3252 fputs ("\n\t.handlerdata\n", asm_out_file);
3253 /* Note that varasm still thinks we're in the function's code section.
3254 The ".endp" directive that will immediately follow will take us back. */
3255 #else
3256 targetm.asm_out.exception_section ();
3257 #endif
3259 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3260 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3262 /* Indicate the format of the @TType entries. */
3263 if (! have_tt_data)
3264 tt_format = DW_EH_PE_omit;
3265 else
3267 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3268 #ifdef HAVE_AS_LEB128
3269 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3270 current_function_funcdef_no);
3271 #endif
3272 tt_format_size = size_of_encoded_value (tt_format);
3274 assemble_align (tt_format_size * BITS_PER_UNIT);
3277 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3278 current_function_funcdef_no);
3280 /* The LSDA header. */
3282 /* Indicate the format of the landing pad start pointer. An omitted
3283 field implies @LPStart == @Start. */
3284 /* Currently we always put @LPStart == @Start. This field would
3285 be most useful in moving the landing pads completely out of
3286 line to another section, but it could also be used to minimize
3287 the size of uleb128 landing pad offsets. */
3288 lp_format = DW_EH_PE_omit;
3289 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3290 eh_data_format_name (lp_format));
3292 /* @LPStart pointer would go here. */
3294 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3295 eh_data_format_name (tt_format));
3297 #ifndef HAVE_AS_LEB128
3298 if (USING_SJLJ_EXCEPTIONS)
3299 call_site_len = sjlj_size_of_call_site_table ();
3300 else
3301 call_site_len = dw2_size_of_call_site_table ();
3302 #endif
3304 /* A pc-relative 4-byte displacement to the @TType data. */
3305 if (have_tt_data)
3307 #ifdef HAVE_AS_LEB128
3308 char ttype_after_disp_label[32];
3309 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3310 current_function_funcdef_no);
3311 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3312 "@TType base offset");
3313 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3314 #else
3315 /* Ug. Alignment queers things. */
3316 unsigned int before_disp, after_disp, last_disp, disp;
3318 before_disp = 1 + 1;
3319 after_disp = (1 + size_of_uleb128 (call_site_len)
3320 + call_site_len
3321 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3322 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3323 * tt_format_size));
3325 disp = after_disp;
3328 unsigned int disp_size, pad;
3330 last_disp = disp;
3331 disp_size = size_of_uleb128 (disp);
3332 pad = before_disp + disp_size + after_disp;
3333 if (pad % tt_format_size)
3334 pad = tt_format_size - (pad % tt_format_size);
3335 else
3336 pad = 0;
3337 disp = after_disp + pad;
3339 while (disp != last_disp);
3341 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3342 #endif
3345 /* Indicate the format of the call-site offsets. */
3346 #ifdef HAVE_AS_LEB128
3347 cs_format = DW_EH_PE_uleb128;
3348 #else
3349 cs_format = DW_EH_PE_udata4;
3350 #endif
3351 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3352 eh_data_format_name (cs_format));
3354 #ifdef HAVE_AS_LEB128
3355 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3356 current_function_funcdef_no);
3357 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3358 current_function_funcdef_no);
3359 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3360 "Call-site table length");
3361 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3362 if (USING_SJLJ_EXCEPTIONS)
3363 sjlj_output_call_site_table ();
3364 else
3365 dw2_output_call_site_table ();
3366 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3367 #else
3368 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3369 if (USING_SJLJ_EXCEPTIONS)
3370 sjlj_output_call_site_table ();
3371 else
3372 dw2_output_call_site_table ();
3373 #endif
3375 /* ??? Decode and interpret the data for flag_debug_asm. */
3376 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3377 for (i = 0; i < n; ++i)
3378 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3379 (i ? NULL : "Action record table"));
3381 if (have_tt_data)
3382 assemble_align (tt_format_size * BITS_PER_UNIT);
3384 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3385 while (i-- > 0)
3387 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3388 rtx value;
3390 if (type == NULL_TREE)
3391 value = const0_rtx;
3392 else
3394 struct cgraph_varpool_node *node;
3396 type = lookup_type_for_runtime (type);
3397 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3399 /* Let cgraph know that the rtti decl is used. Not all of the
3400 paths below go through assemble_integer, which would take
3401 care of this for us. */
3402 STRIP_NOPS (type);
3403 if (TREE_CODE (type) == ADDR_EXPR)
3405 type = TREE_OPERAND (type, 0);
3406 if (TREE_CODE (type) == VAR_DECL)
3408 node = cgraph_varpool_node (type);
3409 if (node)
3410 cgraph_varpool_mark_needed_node (node);
3413 else
3414 gcc_assert (TREE_CODE (type) == INTEGER_CST);
3417 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3418 assemble_integer (value, tt_format_size,
3419 tt_format_size * BITS_PER_UNIT, 1);
3420 else
3421 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
3424 #ifdef HAVE_AS_LEB128
3425 if (have_tt_data)
3426 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3427 #endif
3429 /* ??? Decode and interpret the data for flag_debug_asm. */
3430 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3431 for (i = 0; i < n; ++i)
3432 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3433 (i ? NULL : "Exception specification table"));
3435 current_function_section (current_function_decl);
3438 #include "gt-except.h"