config/stormy16/stormy16.c (combine_bnp): Add code to handle zero_extension and
[official-gcc.git] / gcc / except.c
blobc399eb11f7d715906853373bcada5ce9f3cb20a9
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
78 /* Provide defaults for stuff that may not be defined when using
79 sjlj exceptions. */
80 #ifndef EH_RETURN_DATA_REGNO
81 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
82 #endif
85 /* Protect cleanup actions with must-not-throw regions, with a call
86 to the given failure handler. */
87 tree (*lang_protect_cleanup_actions) (void);
89 /* Return true if type A catches type B. */
90 int (*lang_eh_type_covers) (tree a, tree b);
92 /* Map a type to a runtime object to match type. */
93 tree (*lang_eh_runtime_type) (tree);
95 /* A hash table of label to region number. */
97 struct ehl_map_entry GTY(())
99 rtx label;
100 struct eh_region *region;
103 static GTY(()) int call_site_base;
104 static GTY ((param_is (union tree_node)))
105 htab_t type_to_runtime_map;
107 /* Describe the SjLj_Function_Context structure. */
108 static GTY(()) tree sjlj_fc_type_node;
109 static int sjlj_fc_call_site_ofs;
110 static int sjlj_fc_data_ofs;
111 static int sjlj_fc_personality_ofs;
112 static int sjlj_fc_lsda_ofs;
113 static int sjlj_fc_jbuf_ofs;
115 /* Describes one exception region. */
116 struct eh_region GTY(())
118 /* The immediately surrounding region. */
119 struct eh_region *outer;
121 /* The list of immediately contained regions. */
122 struct eh_region *inner;
123 struct eh_region *next_peer;
125 /* An identifier for this region. */
126 int region_number;
128 /* When a region is deleted, its parents inherit the REG_EH_REGION
129 numbers already assigned. */
130 bitmap aka;
132 /* Each region does exactly one thing. */
133 enum eh_region_type
135 ERT_UNKNOWN = 0,
136 ERT_CLEANUP,
137 ERT_TRY,
138 ERT_CATCH,
139 ERT_ALLOWED_EXCEPTIONS,
140 ERT_MUST_NOT_THROW,
141 ERT_THROW,
142 ERT_FIXUP
143 } type;
145 /* Holds the action to perform based on the preceding type. */
146 union eh_region_u {
147 /* A list of catch blocks, a surrounding try block,
148 and the label for continuing after a catch. */
149 struct eh_region_u_try {
150 struct eh_region *catch;
151 struct eh_region *last_catch;
152 struct eh_region *prev_try;
153 rtx continue_label;
154 } GTY ((tag ("ERT_TRY"))) try;
156 /* The list through the catch handlers, the list of type objects
157 matched, and the list of associated filters. */
158 struct eh_region_u_catch {
159 struct eh_region *next_catch;
160 struct eh_region *prev_catch;
161 tree type_list;
162 tree filter_list;
163 } GTY ((tag ("ERT_CATCH"))) catch;
165 /* A tree_list of allowed types. */
166 struct eh_region_u_allowed {
167 tree type_list;
168 int filter;
169 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
171 /* The type given by a call to "throw foo();", or discovered
172 for a throw. */
173 struct eh_region_u_throw {
174 tree type;
175 } GTY ((tag ("ERT_THROW"))) throw;
177 /* Retain the cleanup expression even after expansion so that
178 we can match up fixup regions. */
179 struct eh_region_u_cleanup {
180 tree exp;
181 struct eh_region *prev_try;
182 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
184 /* The real region (by expression and by pointer) that fixup code
185 should live in. */
186 struct eh_region_u_fixup {
187 tree cleanup_exp;
188 struct eh_region *real_region;
189 bool resolved;
190 } GTY ((tag ("ERT_FIXUP"))) fixup;
191 } GTY ((desc ("%0.type"))) u;
193 /* Entry point for this region's handler before landing pads are built. */
194 rtx label;
195 tree tree_label;
197 /* Entry point for this region's handler from the runtime eh library. */
198 rtx landing_pad;
200 /* Entry point for this region's handler from an inner region. */
201 rtx post_landing_pad;
203 /* The RESX insn for handing off control to the next outermost handler,
204 if appropriate. */
205 rtx resume;
207 /* True if something in this region may throw. */
208 unsigned may_contain_throw : 1;
211 struct call_site_record GTY(())
213 rtx landing_pad;
214 int action;
217 /* Used to save exception status for each function. */
218 struct eh_status GTY(())
220 /* The tree of all regions for this function. */
221 struct eh_region *region_tree;
223 /* The same information as an indexable array. */
224 struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
226 /* The most recently open region. */
227 struct eh_region *cur_region;
229 /* This is the region for which we are processing catch blocks. */
230 struct eh_region *try_region;
232 rtx filter;
233 rtx exc_ptr;
235 int built_landing_pads;
236 int last_region_number;
238 varray_type ttype_data;
239 varray_type ehspec_data;
240 varray_type action_record_data;
242 htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
244 struct call_site_record * GTY ((length ("%h.call_site_data_used")))
245 call_site_data;
246 int call_site_data_used;
247 int call_site_data_size;
249 rtx ehr_stackadj;
250 rtx ehr_handler;
251 rtx ehr_label;
253 rtx sjlj_fc;
254 rtx sjlj_exit_after;
258 static int t2r_eq (const void *, const void *);
259 static hashval_t t2r_hash (const void *);
260 static void add_type_for_runtime (tree);
261 static tree lookup_type_for_runtime (tree);
263 static void resolve_fixup_regions (void);
264 static void remove_fixup_regions (void);
265 static void remove_unreachable_regions (rtx);
266 static void convert_from_eh_region_ranges_1 (rtx *, int *, int);
268 static int ttypes_filter_eq (const void *, const void *);
269 static hashval_t ttypes_filter_hash (const void *);
270 static int ehspec_filter_eq (const void *, const void *);
271 static hashval_t ehspec_filter_hash (const void *);
272 static int add_ttypes_entry (htab_t, tree);
273 static int add_ehspec_entry (htab_t, htab_t, tree);
274 static void assign_filter_values (void);
275 static void build_post_landing_pads (void);
276 static void connect_post_landing_pads (void);
277 static void dw2_build_landing_pads (void);
279 struct sjlj_lp_info;
280 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
281 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
282 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
283 static void sjlj_emit_function_enter (rtx);
284 static void sjlj_emit_function_exit (void);
285 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
286 static void sjlj_build_landing_pads (void);
288 static hashval_t ehl_hash (const void *);
289 static int ehl_eq (const void *, const void *);
290 static void add_ehl_entry (rtx, struct eh_region *);
291 static void remove_exception_handler_label (rtx);
292 static void remove_eh_handler (struct eh_region *);
293 static int for_each_eh_label_1 (void **, void *);
295 /* The return value of reachable_next_level. */
296 enum reachable_code
298 /* The given exception is not processed by the given region. */
299 RNL_NOT_CAUGHT,
300 /* The given exception may need processing by the given region. */
301 RNL_MAYBE_CAUGHT,
302 /* The given exception is completely processed by the given region. */
303 RNL_CAUGHT,
304 /* The given exception is completely processed by the runtime. */
305 RNL_BLOCKED
308 struct reachable_info;
309 static enum reachable_code reachable_next_level (struct eh_region *, tree,
310 struct reachable_info *);
312 static int action_record_eq (const void *, const void *);
313 static hashval_t action_record_hash (const void *);
314 static int add_action_record (htab_t, int, int);
315 static int collect_one_action_chain (htab_t, struct eh_region *);
316 static int add_call_site (rtx, int);
318 static void push_uleb128 (varray_type *, unsigned int);
319 static void push_sleb128 (varray_type *, int);
320 #ifndef HAVE_AS_LEB128
321 static int dw2_size_of_call_site_table (void);
322 static int sjlj_size_of_call_site_table (void);
323 #endif
324 static void dw2_output_call_site_table (void);
325 static void sjlj_output_call_site_table (void);
328 /* Routine to see if exception handling is turned on.
329 DO_WARN is nonzero if we want to inform the user that exception
330 handling is turned off.
332 This is used to ensure that -fexceptions has been specified if the
333 compiler tries to use any exception-specific functions. */
336 doing_eh (int do_warn)
338 if (! flag_exceptions)
340 static int warned = 0;
341 if (! warned && do_warn)
343 error ("exception handling disabled, use -fexceptions to enable");
344 warned = 1;
346 return 0;
348 return 1;
352 void
353 init_eh (void)
355 if (! flag_exceptions)
356 return;
358 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
360 /* Create the SjLj_Function_Context structure. This should match
361 the definition in unwind-sjlj.c. */
362 if (USING_SJLJ_EXCEPTIONS)
364 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
366 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
368 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
369 build_pointer_type (sjlj_fc_type_node));
370 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
372 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
373 integer_type_node);
374 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
376 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
377 tmp = build_array_type (lang_hooks.types.type_for_mode (word_mode, 1),
378 tmp);
379 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
380 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
382 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
383 ptr_type_node);
384 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
386 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
387 ptr_type_node);
388 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
390 #ifdef DONT_USE_BUILTIN_SETJMP
391 #ifdef JMP_BUF_SIZE
392 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
393 #else
394 /* Should be large enough for most systems, if it is not,
395 JMP_BUF_SIZE should be defined with the proper value. It will
396 also tend to be larger than necessary for most systems, a more
397 optimal port will define JMP_BUF_SIZE. */
398 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
399 #endif
400 #else
401 /* builtin_setjmp takes a pointer to 5 words. */
402 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
403 #endif
404 tmp = build_index_type (tmp);
405 tmp = build_array_type (ptr_type_node, tmp);
406 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
407 #ifdef DONT_USE_BUILTIN_SETJMP
408 /* We don't know what the alignment requirements of the
409 runtime's jmp_buf has. Overestimate. */
410 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
411 DECL_USER_ALIGN (f_jbuf) = 1;
412 #endif
413 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
415 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
416 TREE_CHAIN (f_prev) = f_cs;
417 TREE_CHAIN (f_cs) = f_data;
418 TREE_CHAIN (f_data) = f_per;
419 TREE_CHAIN (f_per) = f_lsda;
420 TREE_CHAIN (f_lsda) = f_jbuf;
422 layout_type (sjlj_fc_type_node);
424 /* Cache the interesting field offsets so that we have
425 easy access from rtl. */
426 sjlj_fc_call_site_ofs
427 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
428 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
429 sjlj_fc_data_ofs
430 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
431 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
432 sjlj_fc_personality_ofs
433 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
434 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
435 sjlj_fc_lsda_ofs
436 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
437 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
438 sjlj_fc_jbuf_ofs
439 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
440 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
444 void
445 init_eh_for_function (void)
447 cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
450 /* Routines to generate the exception tree somewhat directly.
451 These are used from tree-eh.c when processing exception related
452 nodes during tree optimization. */
454 static struct eh_region *
455 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
457 struct eh_region *new;
459 #ifdef ENABLE_CHECKING
460 gcc_assert (doing_eh (0));
461 #endif
463 /* Insert a new blank region as a leaf in the tree. */
464 new = ggc_alloc_cleared (sizeof (*new));
465 new->type = type;
466 new->outer = outer;
467 if (outer)
469 new->next_peer = outer->inner;
470 outer->inner = new;
472 else
474 new->next_peer = cfun->eh->region_tree;
475 cfun->eh->region_tree = new;
478 new->region_number = ++cfun->eh->last_region_number;
480 return new;
483 struct eh_region *
484 gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
486 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
487 cleanup->u.cleanup.prev_try = prev_try;
488 return cleanup;
491 struct eh_region *
492 gen_eh_region_try (struct eh_region *outer)
494 return gen_eh_region (ERT_TRY, outer);
497 struct eh_region *
498 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
500 struct eh_region *c, *l;
501 tree type_list, type_node;
503 /* Ensure to always end up with a type list to normalize further
504 processing, then register each type against the runtime types map. */
505 type_list = type_or_list;
506 if (type_or_list)
508 if (TREE_CODE (type_or_list) != TREE_LIST)
509 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
511 type_node = type_list;
512 for (; type_node; type_node = TREE_CHAIN (type_node))
513 add_type_for_runtime (TREE_VALUE (type_node));
516 c = gen_eh_region (ERT_CATCH, t->outer);
517 c->u.catch.type_list = type_list;
518 l = t->u.try.last_catch;
519 c->u.catch.prev_catch = l;
520 if (l)
521 l->u.catch.next_catch = c;
522 else
523 t->u.try.catch = c;
524 t->u.try.last_catch = c;
526 return c;
529 struct eh_region *
530 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
532 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
533 region->u.allowed.type_list = allowed;
535 for (; allowed ; allowed = TREE_CHAIN (allowed))
536 add_type_for_runtime (TREE_VALUE (allowed));
538 return region;
541 struct eh_region *
542 gen_eh_region_must_not_throw (struct eh_region *outer)
544 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
548 get_eh_region_number (struct eh_region *region)
550 return region->region_number;
553 bool
554 get_eh_region_may_contain_throw (struct eh_region *region)
556 return region->may_contain_throw;
559 tree
560 get_eh_region_tree_label (struct eh_region *region)
562 return region->tree_label;
565 void
566 set_eh_region_tree_label (struct eh_region *region, tree lab)
568 region->tree_label = lab;
571 void
572 expand_resx_expr (tree exp)
574 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
575 struct eh_region *reg = cfun->eh->region_array[region_nr];
577 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
578 emit_barrier ();
581 /* Note that the current EH region (if any) may contain a throw, or a
582 call to a function which itself may contain a throw. */
584 void
585 note_eh_region_may_contain_throw (struct eh_region *region)
587 while (region && !region->may_contain_throw)
589 region->may_contain_throw = 1;
590 region = region->outer;
594 void
595 note_current_region_may_contain_throw (void)
597 note_eh_region_may_contain_throw (cfun->eh->cur_region);
601 /* Return an rtl expression for a pointer to the exception object
602 within a handler. */
605 get_exception_pointer (struct function *fun)
607 rtx exc_ptr = fun->eh->exc_ptr;
608 if (fun == cfun && ! exc_ptr)
610 exc_ptr = gen_reg_rtx (ptr_mode);
611 fun->eh->exc_ptr = exc_ptr;
613 return exc_ptr;
616 /* Return an rtl expression for the exception dispatch filter
617 within a handler. */
620 get_exception_filter (struct function *fun)
622 rtx filter = fun->eh->filter;
623 if (fun == cfun && ! filter)
625 filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
626 fun->eh->filter = filter;
628 return filter;
631 /* This section is for the exception handling specific optimization pass. */
633 /* Random access the exception region tree. */
635 void
636 collect_eh_region_array (void)
638 struct eh_region **array, *i;
640 i = cfun->eh->region_tree;
641 if (! i)
642 return;
644 array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
645 * sizeof (*array));
646 cfun->eh->region_array = array;
648 while (1)
650 array[i->region_number] = i;
652 /* If there are sub-regions, process them. */
653 if (i->inner)
654 i = i->inner;
655 /* If there are peers, process them. */
656 else if (i->next_peer)
657 i = i->next_peer;
658 /* Otherwise, step back up the tree to the next peer. */
659 else
661 do {
662 i = i->outer;
663 if (i == NULL)
664 return;
665 } while (i->next_peer == NULL);
666 i = i->next_peer;
671 static void
672 resolve_one_fixup_region (struct eh_region *fixup)
674 struct eh_region *cleanup, *real;
675 int j, n;
677 n = cfun->eh->last_region_number;
678 cleanup = 0;
680 for (j = 1; j <= n; ++j)
682 cleanup = cfun->eh->region_array[j];
683 if (cleanup && cleanup->type == ERT_CLEANUP
684 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
685 break;
687 gcc_assert (j <= n);
689 real = cleanup->outer;
690 if (real && real->type == ERT_FIXUP)
692 if (!real->u.fixup.resolved)
693 resolve_one_fixup_region (real);
694 real = real->u.fixup.real_region;
697 fixup->u.fixup.real_region = real;
698 fixup->u.fixup.resolved = true;
701 static void
702 resolve_fixup_regions (void)
704 int i, n = cfun->eh->last_region_number;
706 for (i = 1; i <= n; ++i)
708 struct eh_region *fixup = cfun->eh->region_array[i];
710 if (!fixup || fixup->type != ERT_FIXUP || fixup->u.fixup.resolved)
711 continue;
713 resolve_one_fixup_region (fixup);
717 /* Now that we've discovered what region actually encloses a fixup,
718 we can shuffle pointers and remove them from the tree. */
720 static void
721 remove_fixup_regions (void)
723 int i;
724 rtx insn, note;
725 struct eh_region *fixup;
727 /* Walk the insn chain and adjust the REG_EH_REGION numbers
728 for instructions referencing fixup regions. This is only
729 strictly necessary for fixup regions with no parent, but
730 doesn't hurt to do it for all regions. */
731 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
732 if (INSN_P (insn)
733 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
734 && INTVAL (XEXP (note, 0)) > 0
735 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
736 && fixup->type == ERT_FIXUP)
738 if (fixup->u.fixup.real_region)
739 XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
740 else
741 remove_note (insn, note);
744 /* Remove the fixup regions from the tree. */
745 for (i = cfun->eh->last_region_number; i > 0; --i)
747 fixup = cfun->eh->region_array[i];
748 if (! fixup)
749 continue;
751 /* Allow GC to maybe free some memory. */
752 if (fixup->type == ERT_CLEANUP)
753 fixup->u.cleanup.exp = NULL_TREE;
755 if (fixup->type != ERT_FIXUP)
756 continue;
758 if (fixup->inner)
760 struct eh_region *parent, *p, **pp;
762 parent = fixup->u.fixup.real_region;
764 /* Fix up the children's parent pointers; find the end of
765 the list. */
766 for (p = fixup->inner; ; p = p->next_peer)
768 p->outer = parent;
769 if (! p->next_peer)
770 break;
773 /* In the tree of cleanups, only outer-inner ordering matters.
774 So link the children back in anywhere at the correct level. */
775 if (parent)
776 pp = &parent->inner;
777 else
778 pp = &cfun->eh->region_tree;
779 p->next_peer = *pp;
780 *pp = fixup->inner;
781 fixup->inner = NULL;
784 remove_eh_handler (fixup);
788 /* Remove all regions whose labels are not reachable from insns. */
790 static void
791 remove_unreachable_regions (rtx insns)
793 int i, *uid_region_num;
794 bool *reachable;
795 struct eh_region *r;
796 rtx insn;
798 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
799 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
801 for (i = cfun->eh->last_region_number; i > 0; --i)
803 r = cfun->eh->region_array[i];
804 if (!r || r->region_number != i)
805 continue;
807 if (r->resume)
809 gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
810 uid_region_num[INSN_UID (r->resume)] = i;
812 if (r->label)
814 gcc_assert (!uid_region_num[INSN_UID (r->label)]);
815 uid_region_num[INSN_UID (r->label)] = i;
819 for (insn = insns; insn; insn = NEXT_INSN (insn))
820 reachable[uid_region_num[INSN_UID (insn)]] = true;
822 for (i = cfun->eh->last_region_number; i > 0; --i)
824 r = cfun->eh->region_array[i];
825 if (r && r->region_number == i && !reachable[i])
827 bool kill_it = true;
828 switch (r->type)
830 case ERT_THROW:
831 /* Don't remove ERT_THROW regions if their outer region
832 is reachable. */
833 if (r->outer && reachable[r->outer->region_number])
834 kill_it = false;
835 break;
837 case ERT_MUST_NOT_THROW:
838 /* MUST_NOT_THROW regions are implementable solely in the
839 runtime, but their existence continues to affect calls
840 within that region. Never delete them here. */
841 kill_it = false;
842 break;
844 case ERT_TRY:
846 /* TRY regions are reachable if any of its CATCH regions
847 are reachable. */
848 struct eh_region *c;
849 for (c = r->u.try.catch; c ; c = c->u.catch.next_catch)
850 if (reachable[c->region_number])
852 kill_it = false;
853 break;
855 break;
858 default:
859 break;
862 if (kill_it)
863 remove_eh_handler (r);
867 free (reachable);
868 free (uid_region_num);
871 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
872 can_throw instruction in the region. */
874 static void
875 convert_from_eh_region_ranges_1 (rtx *pinsns, int *orig_sp, int cur)
877 int *sp = orig_sp;
878 rtx insn, next;
880 for (insn = *pinsns; insn ; insn = next)
882 next = NEXT_INSN (insn);
883 if (NOTE_P (insn))
885 int kind = NOTE_LINE_NUMBER (insn);
886 if (kind == NOTE_INSN_EH_REGION_BEG
887 || kind == NOTE_INSN_EH_REGION_END)
889 if (kind == NOTE_INSN_EH_REGION_BEG)
891 struct eh_region *r;
893 *sp++ = cur;
894 cur = NOTE_EH_HANDLER (insn);
896 r = cfun->eh->region_array[cur];
897 if (r->type == ERT_FIXUP)
899 r = r->u.fixup.real_region;
900 cur = r ? r->region_number : 0;
902 else if (r->type == ERT_CATCH)
904 r = r->outer;
905 cur = r ? r->region_number : 0;
908 else
909 cur = *--sp;
911 if (insn == *pinsns)
912 *pinsns = next;
913 remove_insn (insn);
914 continue;
917 else if (INSN_P (insn))
919 if (cur > 0
920 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
921 /* Calls can always potentially throw exceptions, unless
922 they have a REG_EH_REGION note with a value of 0 or less.
923 Which should be the only possible kind so far. */
924 && (CALL_P (insn)
925 /* If we wanted exceptions for non-call insns, then
926 any may_trap_p instruction could throw. */
927 || (flag_non_call_exceptions
928 && GET_CODE (PATTERN (insn)) != CLOBBER
929 && GET_CODE (PATTERN (insn)) != USE
930 && may_trap_p (PATTERN (insn)))))
932 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
933 REG_NOTES (insn));
938 gcc_assert (sp == orig_sp);
941 static void
942 collect_rtl_labels_from_trees (void)
944 int i, n = cfun->eh->last_region_number;
945 for (i = 1; i <= n; ++i)
947 struct eh_region *reg = cfun->eh->region_array[i];
948 if (reg && reg->tree_label)
949 reg->label = DECL_RTL_IF_SET (reg->tree_label);
953 void
954 convert_from_eh_region_ranges (void)
956 rtx insns = get_insns ();
958 if (cfun->eh->region_array)
960 /* If the region array already exists, assume we're coming from
961 optimize_function_tree. In this case all we need to do is
962 collect the rtl labels that correspond to the tree labels
963 that we allocated earlier. */
964 collect_rtl_labels_from_trees ();
966 else
968 int *stack;
970 collect_eh_region_array ();
971 resolve_fixup_regions ();
973 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
974 convert_from_eh_region_ranges_1 (&insns, stack, 0);
975 free (stack);
977 remove_fixup_regions ();
980 remove_unreachable_regions (insns);
983 static void
984 add_ehl_entry (rtx label, struct eh_region *region)
986 struct ehl_map_entry **slot, *entry;
988 LABEL_PRESERVE_P (label) = 1;
990 entry = ggc_alloc (sizeof (*entry));
991 entry->label = label;
992 entry->region = region;
994 slot = (struct ehl_map_entry **)
995 htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
997 /* Before landing pad creation, each exception handler has its own
998 label. After landing pad creation, the exception handlers may
999 share landing pads. This is ok, since maybe_remove_eh_handler
1000 only requires the 1-1 mapping before landing pad creation. */
1001 gcc_assert (!*slot || cfun->eh->built_landing_pads);
1003 *slot = entry;
1006 void
1007 find_exception_handler_labels (void)
1009 int i;
1011 if (cfun->eh->exception_handler_label_map)
1012 htab_empty (cfun->eh->exception_handler_label_map);
1013 else
1015 /* ??? The expansion factor here (3/2) must be greater than the htab
1016 occupancy factor (4/3) to avoid unnecessary resizing. */
1017 cfun->eh->exception_handler_label_map
1018 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
1019 ehl_hash, ehl_eq, NULL);
1022 if (cfun->eh->region_tree == NULL)
1023 return;
1025 for (i = cfun->eh->last_region_number; i > 0; --i)
1027 struct eh_region *region = cfun->eh->region_array[i];
1028 rtx lab;
1030 if (! region || region->region_number != i)
1031 continue;
1032 if (cfun->eh->built_landing_pads)
1033 lab = region->landing_pad;
1034 else
1035 lab = region->label;
1037 if (lab)
1038 add_ehl_entry (lab, region);
1041 /* For sjlj exceptions, need the return label to remain live until
1042 after landing pad generation. */
1043 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1044 add_ehl_entry (return_label, NULL);
1047 bool
1048 current_function_has_exception_handlers (void)
1050 int i;
1052 for (i = cfun->eh->last_region_number; i > 0; --i)
1054 struct eh_region *region = cfun->eh->region_array[i];
1056 if (! region || region->region_number != i)
1057 continue;
1058 if (region->type != ERT_THROW)
1059 return true;
1062 return false;
1065 static int
1066 t2r_eq (const void *pentry, const void *pdata)
1068 tree entry = (tree) pentry;
1069 tree data = (tree) pdata;
1071 return TREE_PURPOSE (entry) == data;
1074 static hashval_t
1075 t2r_hash (const void *pentry)
1077 tree entry = (tree) pentry;
1078 return TREE_HASH (TREE_PURPOSE (entry));
1081 static void
1082 add_type_for_runtime (tree type)
1084 tree *slot;
1086 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1087 TREE_HASH (type), INSERT);
1088 if (*slot == NULL)
1090 tree runtime = (*lang_eh_runtime_type) (type);
1091 *slot = tree_cons (type, runtime, NULL_TREE);
1095 static tree
1096 lookup_type_for_runtime (tree type)
1098 tree *slot;
1100 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1101 TREE_HASH (type), NO_INSERT);
1103 /* We should have always inserted the data earlier. */
1104 return TREE_VALUE (*slot);
1108 /* Represent an entry in @TTypes for either catch actions
1109 or exception filter actions. */
1110 struct ttypes_filter GTY(())
1112 tree t;
1113 int filter;
1116 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1117 (a tree) for a @TTypes type node we are thinking about adding. */
1119 static int
1120 ttypes_filter_eq (const void *pentry, const void *pdata)
1122 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1123 tree data = (tree) pdata;
1125 return entry->t == data;
1128 static hashval_t
1129 ttypes_filter_hash (const void *pentry)
1131 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1132 return TREE_HASH (entry->t);
1135 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1136 exception specification list we are thinking about adding. */
1137 /* ??? Currently we use the type lists in the order given. Someone
1138 should put these in some canonical order. */
1140 static int
1141 ehspec_filter_eq (const void *pentry, const void *pdata)
1143 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1144 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1146 return type_list_equal (entry->t, data->t);
1149 /* Hash function for exception specification lists. */
1151 static hashval_t
1152 ehspec_filter_hash (const void *pentry)
1154 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1155 hashval_t h = 0;
1156 tree list;
1158 for (list = entry->t; list ; list = TREE_CHAIN (list))
1159 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1160 return h;
1163 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
1164 to speed up the search. Return the filter value to be used. */
1166 static int
1167 add_ttypes_entry (htab_t ttypes_hash, tree type)
1169 struct ttypes_filter **slot, *n;
1171 slot = (struct ttypes_filter **)
1172 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1174 if ((n = *slot) == NULL)
1176 /* Filter value is a 1 based table index. */
1178 n = xmalloc (sizeof (*n));
1179 n->t = type;
1180 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1181 *slot = n;
1183 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1186 return n->filter;
1189 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1190 to speed up the search. Return the filter value to be used. */
1192 static int
1193 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1195 struct ttypes_filter **slot, *n;
1196 struct ttypes_filter dummy;
1198 dummy.t = list;
1199 slot = (struct ttypes_filter **)
1200 htab_find_slot (ehspec_hash, &dummy, INSERT);
1202 if ((n = *slot) == NULL)
1204 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1206 n = xmalloc (sizeof (*n));
1207 n->t = list;
1208 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1209 *slot = n;
1211 /* Look up each type in the list and encode its filter
1212 value as a uleb128. Terminate the list with 0. */
1213 for (; list ; list = TREE_CHAIN (list))
1214 push_uleb128 (&cfun->eh->ehspec_data,
1215 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1216 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1219 return n->filter;
1222 /* Generate the action filter values to be used for CATCH and
1223 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1224 we use lots of landing pads, and so every type or list can share
1225 the same filter value, which saves table space. */
1227 static void
1228 assign_filter_values (void)
1230 int i;
1231 htab_t ttypes, ehspec;
1233 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1234 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1236 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1237 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1239 for (i = cfun->eh->last_region_number; i > 0; --i)
1241 struct eh_region *r = cfun->eh->region_array[i];
1243 /* Mind we don't process a region more than once. */
1244 if (!r || r->region_number != i)
1245 continue;
1247 switch (r->type)
1249 case ERT_CATCH:
1250 /* Whatever type_list is (NULL or true list), we build a list
1251 of filters for the region. */
1252 r->u.catch.filter_list = NULL_TREE;
1254 if (r->u.catch.type_list != NULL)
1256 /* Get a filter value for each of the types caught and store
1257 them in the region's dedicated list. */
1258 tree tp_node = r->u.catch.type_list;
1260 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1262 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1263 tree flt_node = build_int_cst (NULL_TREE, flt);
1265 r->u.catch.filter_list
1266 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1269 else
1271 /* Get a filter value for the NULL list also since it will need
1272 an action record anyway. */
1273 int flt = add_ttypes_entry (ttypes, NULL);
1274 tree flt_node = build_int_cst (NULL_TREE, flt);
1276 r->u.catch.filter_list
1277 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1280 break;
1282 case ERT_ALLOWED_EXCEPTIONS:
1283 r->u.allowed.filter
1284 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1285 break;
1287 default:
1288 break;
1292 htab_delete (ttypes);
1293 htab_delete (ehspec);
1296 /* Emit SEQ into basic block just before INSN (that is assumed to be
1297 first instruction of some existing BB and return the newly
1298 produced block. */
1299 static basic_block
1300 emit_to_new_bb_before (rtx seq, rtx insn)
1302 rtx last;
1303 basic_block bb;
1304 edge e;
1305 edge_iterator ei;
1307 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1308 call), we don't want it to go into newly created landing pad or other EH
1309 construct. */
1310 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1311 if (e->flags & EDGE_FALLTHRU)
1312 force_nonfallthru (e);
1313 else
1314 ei_next (&ei);
1315 last = emit_insn_before (seq, insn);
1316 if (BARRIER_P (last))
1317 last = PREV_INSN (last);
1318 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1319 update_bb_for_insn (bb);
1320 bb->flags |= BB_SUPERBLOCK;
1321 return bb;
1324 /* Generate the code to actually handle exceptions, which will follow the
1325 landing pads. */
1327 static void
1328 build_post_landing_pads (void)
1330 int i;
1332 for (i = cfun->eh->last_region_number; i > 0; --i)
1334 struct eh_region *region = cfun->eh->region_array[i];
1335 rtx seq;
1337 /* Mind we don't process a region more than once. */
1338 if (!region || region->region_number != i)
1339 continue;
1341 switch (region->type)
1343 case ERT_TRY:
1344 /* ??? Collect the set of all non-overlapping catch handlers
1345 all the way up the chain until blocked by a cleanup. */
1346 /* ??? Outer try regions can share landing pads with inner
1347 try regions if the types are completely non-overlapping,
1348 and there are no intervening cleanups. */
1350 region->post_landing_pad = gen_label_rtx ();
1352 start_sequence ();
1354 emit_label (region->post_landing_pad);
1356 /* ??? It is mighty inconvenient to call back into the
1357 switch statement generation code in expand_end_case.
1358 Rapid prototyping sez a sequence of ifs. */
1360 struct eh_region *c;
1361 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1363 if (c->u.catch.type_list == NULL)
1364 emit_jump (c->label);
1365 else
1367 /* Need for one cmp/jump per type caught. Each type
1368 list entry has a matching entry in the filter list
1369 (see assign_filter_values). */
1370 tree tp_node = c->u.catch.type_list;
1371 tree flt_node = c->u.catch.filter_list;
1373 for (; tp_node; )
1375 emit_cmp_and_jump_insns
1376 (cfun->eh->filter,
1377 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1378 EQ, NULL_RTX,
1379 targetm.eh_return_filter_mode (), 0, c->label);
1381 tp_node = TREE_CHAIN (tp_node);
1382 flt_node = TREE_CHAIN (flt_node);
1388 /* We delay the generation of the _Unwind_Resume until we generate
1389 landing pads. We emit a marker here so as to get good control
1390 flow data in the meantime. */
1391 region->resume
1392 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1393 emit_barrier ();
1395 seq = get_insns ();
1396 end_sequence ();
1398 emit_to_new_bb_before (seq, region->u.try.catch->label);
1400 break;
1402 case ERT_ALLOWED_EXCEPTIONS:
1403 region->post_landing_pad = gen_label_rtx ();
1405 start_sequence ();
1407 emit_label (region->post_landing_pad);
1409 emit_cmp_and_jump_insns (cfun->eh->filter,
1410 GEN_INT (region->u.allowed.filter),
1411 EQ, NULL_RTX,
1412 targetm.eh_return_filter_mode (), 0, region->label);
1414 /* We delay the generation of the _Unwind_Resume until we generate
1415 landing pads. We emit a marker here so as to get good control
1416 flow data in the meantime. */
1417 region->resume
1418 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1419 emit_barrier ();
1421 seq = get_insns ();
1422 end_sequence ();
1424 emit_to_new_bb_before (seq, region->label);
1425 break;
1427 case ERT_CLEANUP:
1428 case ERT_MUST_NOT_THROW:
1429 region->post_landing_pad = region->label;
1430 break;
1432 case ERT_CATCH:
1433 case ERT_THROW:
1434 /* Nothing to do. */
1435 break;
1437 default:
1438 gcc_unreachable ();
1443 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1444 _Unwind_Resume otherwise. */
1446 static void
1447 connect_post_landing_pads (void)
1449 int i;
1451 for (i = cfun->eh->last_region_number; i > 0; --i)
1453 struct eh_region *region = cfun->eh->region_array[i];
1454 struct eh_region *outer;
1455 rtx seq;
1456 rtx barrier;
1458 /* Mind we don't process a region more than once. */
1459 if (!region || region->region_number != i)
1460 continue;
1462 /* If there is no RESX, or it has been deleted by flow, there's
1463 nothing to fix up. */
1464 if (! region->resume || INSN_DELETED_P (region->resume))
1465 continue;
1467 /* Search for another landing pad in this function. */
1468 for (outer = region->outer; outer ; outer = outer->outer)
1469 if (outer->post_landing_pad)
1470 break;
1472 start_sequence ();
1474 if (outer)
1476 edge e;
1477 basic_block src, dest;
1479 emit_jump (outer->post_landing_pad);
1480 src = BLOCK_FOR_INSN (region->resume);
1481 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1482 while (EDGE_COUNT (src->succs) > 0)
1483 remove_edge (EDGE_SUCC (src, 0));
1484 e = make_edge (src, dest, 0);
1485 e->probability = REG_BR_PROB_BASE;
1486 e->count = src->count;
1488 else
1490 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1491 VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1493 /* What we just emitted was a throwing libcall, so it got a
1494 barrier automatically added after it. If the last insn in
1495 the libcall sequence isn't the barrier, it's because the
1496 target emits multiple insns for a call, and there are insns
1497 after the actual call insn (which are redundant and would be
1498 optimized away). The barrier is inserted exactly after the
1499 call insn, so let's go get that and delete the insns after
1500 it, because below we need the barrier to be the last insn in
1501 the sequence. */
1502 delete_insns_since (NEXT_INSN (last_call_insn ()));
1505 seq = get_insns ();
1506 end_sequence ();
1507 barrier = emit_insn_before (seq, region->resume);
1508 /* Avoid duplicate barrier. */
1509 gcc_assert (BARRIER_P (barrier));
1510 delete_insn (barrier);
1511 delete_insn (region->resume);
1513 /* ??? From tree-ssa we can wind up with catch regions whose
1514 label is not instantiated, but whose resx is present. Now
1515 that we've dealt with the resx, kill the region. */
1516 if (region->label == NULL && region->type == ERT_CLEANUP)
1517 remove_eh_handler (region);
1522 static void
1523 dw2_build_landing_pads (void)
1525 int i;
1526 unsigned int j;
1528 for (i = cfun->eh->last_region_number; i > 0; --i)
1530 struct eh_region *region = cfun->eh->region_array[i];
1531 rtx seq;
1532 basic_block bb;
1533 bool clobbers_hard_regs = false;
1534 edge e;
1536 /* Mind we don't process a region more than once. */
1537 if (!region || region->region_number != i)
1538 continue;
1540 if (region->type != ERT_CLEANUP
1541 && region->type != ERT_TRY
1542 && region->type != ERT_ALLOWED_EXCEPTIONS)
1543 continue;
1545 start_sequence ();
1547 region->landing_pad = gen_label_rtx ();
1548 emit_label (region->landing_pad);
1550 #ifdef HAVE_exception_receiver
1551 if (HAVE_exception_receiver)
1552 emit_insn (gen_exception_receiver ());
1553 else
1554 #endif
1555 #ifdef HAVE_nonlocal_goto_receiver
1556 if (HAVE_nonlocal_goto_receiver)
1557 emit_insn (gen_nonlocal_goto_receiver ());
1558 else
1559 #endif
1560 { /* Nothing */ }
1562 /* If the eh_return data registers are call-saved, then we
1563 won't have considered them clobbered from the call that
1564 threw. Kill them now. */
1565 for (j = 0; ; ++j)
1567 unsigned r = EH_RETURN_DATA_REGNO (j);
1568 if (r == INVALID_REGNUM)
1569 break;
1570 if (! call_used_regs[r])
1572 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1573 clobbers_hard_regs = true;
1577 if (clobbers_hard_regs)
1579 /* @@@ This is a kludge. Not all machine descriptions define a
1580 blockage insn, but we must not allow the code we just generated
1581 to be reordered by scheduling. So emit an ASM_INPUT to act as
1582 blockage insn. */
1583 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1586 emit_move_insn (cfun->eh->exc_ptr,
1587 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1588 emit_move_insn (cfun->eh->filter,
1589 gen_rtx_REG (targetm.eh_return_filter_mode (),
1590 EH_RETURN_DATA_REGNO (1)));
1592 seq = get_insns ();
1593 end_sequence ();
1595 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1596 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1597 e->count = bb->count;
1598 e->probability = REG_BR_PROB_BASE;
1603 struct sjlj_lp_info
1605 int directly_reachable;
1606 int action_index;
1607 int dispatch_index;
1608 int call_site_index;
1611 static bool
1612 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1614 rtx insn;
1615 bool found_one = false;
1617 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1619 struct eh_region *region;
1620 enum reachable_code rc;
1621 tree type_thrown;
1622 rtx note;
1624 if (! INSN_P (insn))
1625 continue;
1627 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1628 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1629 continue;
1631 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1633 type_thrown = NULL_TREE;
1634 if (region->type == ERT_THROW)
1636 type_thrown = region->u.throw.type;
1637 region = region->outer;
1640 /* Find the first containing region that might handle the exception.
1641 That's the landing pad to which we will transfer control. */
1642 rc = RNL_NOT_CAUGHT;
1643 for (; region; region = region->outer)
1645 rc = reachable_next_level (region, type_thrown, NULL);
1646 if (rc != RNL_NOT_CAUGHT)
1647 break;
1649 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1651 lp_info[region->region_number].directly_reachable = 1;
1652 found_one = true;
1656 return found_one;
1659 static void
1660 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1662 htab_t ar_hash;
1663 int i, index;
1665 /* First task: build the action table. */
1667 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1668 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1670 for (i = cfun->eh->last_region_number; i > 0; --i)
1671 if (lp_info[i].directly_reachable)
1673 struct eh_region *r = cfun->eh->region_array[i];
1674 r->landing_pad = dispatch_label;
1675 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1676 if (lp_info[i].action_index != -1)
1677 cfun->uses_eh_lsda = 1;
1680 htab_delete (ar_hash);
1682 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1683 landing pad label for the region. For sjlj though, there is one
1684 common landing pad from which we dispatch to the post-landing pads.
1686 A region receives a dispatch index if it is directly reachable
1687 and requires in-function processing. Regions that share post-landing
1688 pads may share dispatch indices. */
1689 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1690 (see build_post_landing_pads) so we don't bother checking for it. */
1692 index = 0;
1693 for (i = cfun->eh->last_region_number; i > 0; --i)
1694 if (lp_info[i].directly_reachable)
1695 lp_info[i].dispatch_index = index++;
1697 /* Finally: assign call-site values. If dwarf2 terms, this would be
1698 the region number assigned by convert_to_eh_region_ranges, but
1699 handles no-action and must-not-throw differently. */
1701 call_site_base = 1;
1702 for (i = cfun->eh->last_region_number; i > 0; --i)
1703 if (lp_info[i].directly_reachable)
1705 int action = lp_info[i].action_index;
1707 /* Map must-not-throw to otherwise unused call-site index 0. */
1708 if (action == -2)
1709 index = 0;
1710 /* Map no-action to otherwise unused call-site index -1. */
1711 else if (action == -1)
1712 index = -1;
1713 /* Otherwise, look it up in the table. */
1714 else
1715 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1717 lp_info[i].call_site_index = index;
1721 static void
1722 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1724 int last_call_site = -2;
1725 rtx insn, mem;
1727 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1729 struct eh_region *region;
1730 int this_call_site;
1731 rtx note, before, p;
1733 /* Reset value tracking at extended basic block boundaries. */
1734 if (LABEL_P (insn))
1735 last_call_site = -2;
1737 if (! INSN_P (insn))
1738 continue;
1740 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1741 if (!note)
1743 /* Calls (and trapping insns) without notes are outside any
1744 exception handling region in this function. Mark them as
1745 no action. */
1746 if (CALL_P (insn)
1747 || (flag_non_call_exceptions
1748 && may_trap_p (PATTERN (insn))))
1749 this_call_site = -1;
1750 else
1751 continue;
1753 else
1755 /* Calls that are known to not throw need not be marked. */
1756 if (INTVAL (XEXP (note, 0)) <= 0)
1757 continue;
1759 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1760 this_call_site = lp_info[region->region_number].call_site_index;
1763 if (this_call_site == last_call_site)
1764 continue;
1766 /* Don't separate a call from it's argument loads. */
1767 before = insn;
1768 if (CALL_P (insn))
1769 before = find_first_parameter_load (insn, NULL_RTX);
1771 start_sequence ();
1772 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
1773 sjlj_fc_call_site_ofs);
1774 emit_move_insn (mem, GEN_INT (this_call_site));
1775 p = get_insns ();
1776 end_sequence ();
1778 emit_insn_before (p, before);
1779 last_call_site = this_call_site;
1783 /* Construct the SjLj_Function_Context. */
1785 static void
1786 sjlj_emit_function_enter (rtx dispatch_label)
1788 rtx fn_begin, fc, mem, seq;
1790 fc = cfun->eh->sjlj_fc;
1792 start_sequence ();
1794 /* We're storing this libcall's address into memory instead of
1795 calling it directly. Thus, we must call assemble_external_libcall
1796 here, as we can not depend on emit_library_call to do it for us. */
1797 assemble_external_libcall (eh_personality_libfunc);
1798 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1799 emit_move_insn (mem, eh_personality_libfunc);
1801 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1802 if (cfun->uses_eh_lsda)
1804 char buf[20];
1805 rtx sym;
1807 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1808 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1809 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1810 emit_move_insn (mem, sym);
1812 else
1813 emit_move_insn (mem, const0_rtx);
1815 #ifdef DONT_USE_BUILTIN_SETJMP
1817 rtx x, note;
1818 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1819 TYPE_MODE (integer_type_node), 1,
1820 plus_constant (XEXP (fc, 0),
1821 sjlj_fc_jbuf_ofs), Pmode);
1823 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
1824 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
1826 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1827 TYPE_MODE (integer_type_node), 0, dispatch_label);
1829 #else
1830 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
1831 dispatch_label);
1832 #endif
1834 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1835 1, XEXP (fc, 0), Pmode);
1837 seq = get_insns ();
1838 end_sequence ();
1840 /* ??? Instead of doing this at the beginning of the function,
1841 do this in a block that is at loop level 0 and dominates all
1842 can_throw_internal instructions. */
1844 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1845 if (NOTE_P (fn_begin)
1846 && (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG
1847 || NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK))
1848 break;
1849 if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1850 insert_insn_on_edge (seq, EDGE_SUCC (ENTRY_BLOCK_PTR, 0));
1851 else
1853 rtx last = BB_END (EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest);
1854 for (; ; fn_begin = NEXT_INSN (fn_begin))
1855 if ((NOTE_P (fn_begin)
1856 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1857 || fn_begin == last)
1858 break;
1859 emit_insn_after (seq, fn_begin);
1863 /* Call back from expand_function_end to know where we should put
1864 the call to unwind_sjlj_unregister_libfunc if needed. */
1866 void
1867 sjlj_emit_function_exit_after (rtx after)
1869 cfun->eh->sjlj_exit_after = after;
1872 static void
1873 sjlj_emit_function_exit (void)
1875 rtx seq;
1876 edge e;
1877 edge_iterator ei;
1879 start_sequence ();
1881 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1882 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
1884 seq = get_insns ();
1885 end_sequence ();
1887 /* ??? Really this can be done in any block at loop level 0 that
1888 post-dominates all can_throw_internal instructions. This is
1889 the last possible moment. */
1891 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1892 if (e->flags & EDGE_FALLTHRU)
1893 break;
1894 if (e)
1896 rtx insn;
1898 /* Figure out whether the place we are supposed to insert libcall
1899 is inside the last basic block or after it. In the other case
1900 we need to emit to edge. */
1901 gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
1902 for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
1904 if (insn == cfun->eh->sjlj_exit_after)
1906 if (LABEL_P (insn))
1907 insn = NEXT_INSN (insn);
1908 emit_insn_after (seq, insn);
1909 return;
1911 if (insn == BB_END (e->src))
1912 break;
1914 insert_insn_on_edge (seq, e);
1918 static void
1919 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1921 int i, first_reachable;
1922 rtx mem, dispatch, seq, fc;
1923 rtx before;
1924 basic_block bb;
1925 edge e;
1927 fc = cfun->eh->sjlj_fc;
1929 start_sequence ();
1931 emit_label (dispatch_label);
1933 #ifndef DONT_USE_BUILTIN_SETJMP
1934 expand_builtin_setjmp_receiver (dispatch_label);
1935 #endif
1937 /* Load up dispatch index, exc_ptr and filter values from the
1938 function context. */
1939 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
1940 sjlj_fc_call_site_ofs);
1941 dispatch = copy_to_reg (mem);
1943 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
1944 if (word_mode != ptr_mode)
1946 #ifdef POINTERS_EXTEND_UNSIGNED
1947 mem = convert_memory_address (ptr_mode, mem);
1948 #else
1949 mem = convert_to_mode (ptr_mode, mem, 0);
1950 #endif
1952 emit_move_insn (cfun->eh->exc_ptr, mem);
1954 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
1955 emit_move_insn (cfun->eh->filter, mem);
1957 /* Jump to one of the directly reachable regions. */
1958 /* ??? This really ought to be using a switch statement. */
1960 first_reachable = 0;
1961 for (i = cfun->eh->last_region_number; i > 0; --i)
1963 if (! lp_info[i].directly_reachable)
1964 continue;
1966 if (! first_reachable)
1968 first_reachable = i;
1969 continue;
1972 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
1973 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
1974 cfun->eh->region_array[i]->post_landing_pad);
1977 seq = get_insns ();
1978 end_sequence ();
1980 before = cfun->eh->region_array[first_reachable]->post_landing_pad;
1982 bb = emit_to_new_bb_before (seq, before);
1983 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1984 e->count = bb->count;
1985 e->probability = REG_BR_PROB_BASE;
1988 static void
1989 sjlj_build_landing_pads (void)
1991 struct sjlj_lp_info *lp_info;
1993 lp_info = xcalloc (cfun->eh->last_region_number + 1,
1994 sizeof (struct sjlj_lp_info));
1996 if (sjlj_find_directly_reachable_regions (lp_info))
1998 rtx dispatch_label = gen_label_rtx ();
2000 cfun->eh->sjlj_fc
2001 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2002 int_size_in_bytes (sjlj_fc_type_node),
2003 TYPE_ALIGN (sjlj_fc_type_node));
2005 sjlj_assign_call_site_values (dispatch_label, lp_info);
2006 sjlj_mark_call_sites (lp_info);
2008 sjlj_emit_function_enter (dispatch_label);
2009 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2010 sjlj_emit_function_exit ();
2013 free (lp_info);
2016 void
2017 finish_eh_generation (void)
2019 basic_block bb;
2021 /* Nothing to do if no regions created. */
2022 if (cfun->eh->region_tree == NULL)
2023 return;
2025 /* The object here is to provide find_basic_blocks with detailed
2026 information (via reachable_handlers) on how exception control
2027 flows within the function. In this first pass, we can include
2028 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2029 regions, and hope that it will be useful in deleting unreachable
2030 handlers. Subsequently, we will generate landing pads which will
2031 connect many of the handlers, and then type information will not
2032 be effective. Still, this is a win over previous implementations. */
2034 /* These registers are used by the landing pads. Make sure they
2035 have been generated. */
2036 get_exception_pointer (cfun);
2037 get_exception_filter (cfun);
2039 /* Construct the landing pads. */
2041 assign_filter_values ();
2042 build_post_landing_pads ();
2043 connect_post_landing_pads ();
2044 if (USING_SJLJ_EXCEPTIONS)
2045 sjlj_build_landing_pads ();
2046 else
2047 dw2_build_landing_pads ();
2049 cfun->eh->built_landing_pads = 1;
2051 /* We've totally changed the CFG. Start over. */
2052 find_exception_handler_labels ();
2053 break_superblocks ();
2054 if (USING_SJLJ_EXCEPTIONS)
2055 commit_edge_insertions ();
2056 FOR_EACH_BB (bb)
2058 edge e;
2059 edge_iterator ei;
2060 bool eh = false;
2061 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2063 if (e->flags & EDGE_EH)
2065 remove_edge (e);
2066 eh = true;
2068 else
2069 ei_next (&ei);
2071 if (eh)
2072 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2076 static hashval_t
2077 ehl_hash (const void *pentry)
2079 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2081 /* 2^32 * ((sqrt(5) - 1) / 2) */
2082 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2083 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2086 static int
2087 ehl_eq (const void *pentry, const void *pdata)
2089 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2090 struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2092 return entry->label == data->label;
2095 /* This section handles removing dead code for flow. */
2097 /* Remove LABEL from exception_handler_label_map. */
2099 static void
2100 remove_exception_handler_label (rtx label)
2102 struct ehl_map_entry **slot, tmp;
2104 /* If exception_handler_label_map was not built yet,
2105 there is nothing to do. */
2106 if (cfun->eh->exception_handler_label_map == NULL)
2107 return;
2109 tmp.label = label;
2110 slot = (struct ehl_map_entry **)
2111 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2112 gcc_assert (slot);
2114 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2117 /* Splice REGION from the region tree etc. */
2119 static void
2120 remove_eh_handler (struct eh_region *region)
2122 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2123 rtx lab;
2125 /* For the benefit of efficiently handling REG_EH_REGION notes,
2126 replace this region in the region array with its containing
2127 region. Note that previous region deletions may result in
2128 multiple copies of this region in the array, so we have a
2129 list of alternate numbers by which we are known. */
2131 outer = region->outer;
2132 cfun->eh->region_array[region->region_number] = outer;
2133 if (region->aka)
2135 unsigned i;
2136 bitmap_iterator bi;
2138 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2140 cfun->eh->region_array[i] = outer;
2144 if (outer)
2146 if (!outer->aka)
2147 outer->aka = BITMAP_GGC_ALLOC ();
2148 if (region->aka)
2149 bitmap_ior_into (outer->aka, region->aka);
2150 bitmap_set_bit (outer->aka, region->region_number);
2153 if (cfun->eh->built_landing_pads)
2154 lab = region->landing_pad;
2155 else
2156 lab = region->label;
2157 if (lab)
2158 remove_exception_handler_label (lab);
2160 if (outer)
2161 pp_start = &outer->inner;
2162 else
2163 pp_start = &cfun->eh->region_tree;
2164 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2165 continue;
2166 *pp = region->next_peer;
2168 inner = region->inner;
2169 if (inner)
2171 for (p = inner; p->next_peer ; p = p->next_peer)
2172 p->outer = outer;
2173 p->outer = outer;
2175 p->next_peer = *pp_start;
2176 *pp_start = inner;
2179 if (region->type == ERT_CATCH)
2181 struct eh_region *try, *next, *prev;
2183 for (try = region->next_peer;
2184 try->type == ERT_CATCH;
2185 try = try->next_peer)
2186 continue;
2187 gcc_assert (try->type == ERT_TRY);
2189 next = region->u.catch.next_catch;
2190 prev = region->u.catch.prev_catch;
2192 if (next)
2193 next->u.catch.prev_catch = prev;
2194 else
2195 try->u.try.last_catch = prev;
2196 if (prev)
2197 prev->u.catch.next_catch = next;
2198 else
2200 try->u.try.catch = next;
2201 if (! next)
2202 remove_eh_handler (try);
2207 /* LABEL heads a basic block that is about to be deleted. If this
2208 label corresponds to an exception region, we may be able to
2209 delete the region. */
2211 void
2212 maybe_remove_eh_handler (rtx label)
2214 struct ehl_map_entry **slot, tmp;
2215 struct eh_region *region;
2217 /* ??? After generating landing pads, it's not so simple to determine
2218 if the region data is completely unused. One must examine the
2219 landing pad and the post landing pad, and whether an inner try block
2220 is referencing the catch handlers directly. */
2221 if (cfun->eh->built_landing_pads)
2222 return;
2224 tmp.label = label;
2225 slot = (struct ehl_map_entry **)
2226 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2227 if (! slot)
2228 return;
2229 region = (*slot)->region;
2230 if (! region)
2231 return;
2233 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2234 because there is no path to the fallback call to terminate.
2235 But the region continues to affect call-site data until there
2236 are no more contained calls, which we don't see here. */
2237 if (region->type == ERT_MUST_NOT_THROW)
2239 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2240 region->label = NULL_RTX;
2242 else
2243 remove_eh_handler (region);
2246 /* Invokes CALLBACK for every exception handler label. Only used by old
2247 loop hackery; should not be used by new code. */
2249 void
2250 for_each_eh_label (void (*callback) (rtx))
2252 htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
2253 (void *) &callback);
2256 static int
2257 for_each_eh_label_1 (void **pentry, void *data)
2259 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2260 void (*callback) (rtx) = *(void (**) (rtx)) data;
2262 (*callback) (entry->label);
2263 return 1;
2266 /* Invoke CALLBACK for every exception region in the current function. */
2268 void
2269 for_each_eh_region (void (*callback) (struct eh_region *))
2271 int i, n = cfun->eh->last_region_number;
2272 for (i = 1; i <= n; ++i)
2274 struct eh_region *region = cfun->eh->region_array[i];
2275 if (region)
2276 (*callback) (region);
2280 /* This section describes CFG exception edges for flow. */
2282 /* For communicating between calls to reachable_next_level. */
2283 struct reachable_info
2285 tree types_caught;
2286 tree types_allowed;
2287 void (*callback) (struct eh_region *, void *);
2288 void *callback_data;
2289 bool saw_any_handlers;
2292 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2293 base class of TYPE, is in HANDLED. */
2296 check_handled (tree handled, tree type)
2298 tree t;
2300 /* We can check for exact matches without front-end help. */
2301 if (! lang_eh_type_covers)
2303 for (t = handled; t ; t = TREE_CHAIN (t))
2304 if (TREE_VALUE (t) == type)
2305 return 1;
2307 else
2309 for (t = handled; t ; t = TREE_CHAIN (t))
2310 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2311 return 1;
2314 return 0;
2317 /* A subroutine of reachable_next_level. If we are collecting a list
2318 of handlers, add one. After landing pad generation, reference
2319 it instead of the handlers themselves. Further, the handlers are
2320 all wired together, so by referencing one, we've got them all.
2321 Before landing pad generation we reference each handler individually.
2323 LP_REGION contains the landing pad; REGION is the handler. */
2325 static void
2326 add_reachable_handler (struct reachable_info *info,
2327 struct eh_region *lp_region, struct eh_region *region)
2329 if (! info)
2330 return;
2332 info->saw_any_handlers = true;
2334 if (cfun->eh->built_landing_pads)
2335 info->callback (lp_region, info->callback_data);
2336 else
2337 info->callback (region, info->callback_data);
2340 /* Process one level of exception regions for reachability.
2341 If TYPE_THROWN is non-null, then it is the *exact* type being
2342 propagated. If INFO is non-null, then collect handler labels
2343 and caught/allowed type information between invocations. */
2345 static enum reachable_code
2346 reachable_next_level (struct eh_region *region, tree type_thrown,
2347 struct reachable_info *info)
2349 switch (region->type)
2351 case ERT_CLEANUP:
2352 /* Before landing-pad generation, we model control flow
2353 directly to the individual handlers. In this way we can
2354 see that catch handler types may shadow one another. */
2355 add_reachable_handler (info, region, region);
2356 return RNL_MAYBE_CAUGHT;
2358 case ERT_TRY:
2360 struct eh_region *c;
2361 enum reachable_code ret = RNL_NOT_CAUGHT;
2363 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2365 /* A catch-all handler ends the search. */
2366 if (c->u.catch.type_list == NULL)
2368 add_reachable_handler (info, region, c);
2369 return RNL_CAUGHT;
2372 if (type_thrown)
2374 /* If we have at least one type match, end the search. */
2375 tree tp_node = c->u.catch.type_list;
2377 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2379 tree type = TREE_VALUE (tp_node);
2381 if (type == type_thrown
2382 || (lang_eh_type_covers
2383 && (*lang_eh_type_covers) (type, type_thrown)))
2385 add_reachable_handler (info, region, c);
2386 return RNL_CAUGHT;
2390 /* If we have definitive information of a match failure,
2391 the catch won't trigger. */
2392 if (lang_eh_type_covers)
2393 return RNL_NOT_CAUGHT;
2396 /* At this point, we either don't know what type is thrown or
2397 don't have front-end assistance to help deciding if it is
2398 covered by one of the types in the list for this region.
2400 We'd then like to add this region to the list of reachable
2401 handlers since it is indeed potentially reachable based on the
2402 information we have.
2404 Actually, this handler is for sure not reachable if all the
2405 types it matches have already been caught. That is, it is only
2406 potentially reachable if at least one of the types it catches
2407 has not been previously caught. */
2409 if (! info)
2410 ret = RNL_MAYBE_CAUGHT;
2411 else
2413 tree tp_node = c->u.catch.type_list;
2414 bool maybe_reachable = false;
2416 /* Compute the potential reachability of this handler and
2417 update the list of types caught at the same time. */
2418 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2420 tree type = TREE_VALUE (tp_node);
2422 if (! check_handled (info->types_caught, type))
2424 info->types_caught
2425 = tree_cons (NULL, type, info->types_caught);
2427 maybe_reachable = true;
2431 if (maybe_reachable)
2433 add_reachable_handler (info, region, c);
2435 /* ??? If the catch type is a base class of every allowed
2436 type, then we know we can stop the search. */
2437 ret = RNL_MAYBE_CAUGHT;
2442 return ret;
2445 case ERT_ALLOWED_EXCEPTIONS:
2446 /* An empty list of types definitely ends the search. */
2447 if (region->u.allowed.type_list == NULL_TREE)
2449 add_reachable_handler (info, region, region);
2450 return RNL_CAUGHT;
2453 /* Collect a list of lists of allowed types for use in detecting
2454 when a catch may be transformed into a catch-all. */
2455 if (info)
2456 info->types_allowed = tree_cons (NULL_TREE,
2457 region->u.allowed.type_list,
2458 info->types_allowed);
2460 /* If we have definitive information about the type hierarchy,
2461 then we can tell if the thrown type will pass through the
2462 filter. */
2463 if (type_thrown && lang_eh_type_covers)
2465 if (check_handled (region->u.allowed.type_list, type_thrown))
2466 return RNL_NOT_CAUGHT;
2467 else
2469 add_reachable_handler (info, region, region);
2470 return RNL_CAUGHT;
2474 add_reachable_handler (info, region, region);
2475 return RNL_MAYBE_CAUGHT;
2477 case ERT_CATCH:
2478 /* Catch regions are handled by their controlling try region. */
2479 return RNL_NOT_CAUGHT;
2481 case ERT_MUST_NOT_THROW:
2482 /* Here we end our search, since no exceptions may propagate.
2483 If we've touched down at some landing pad previous, then the
2484 explicit function call we generated may be used. Otherwise
2485 the call is made by the runtime. */
2486 if (info && info->saw_any_handlers)
2488 add_reachable_handler (info, region, region);
2489 return RNL_CAUGHT;
2491 else
2492 return RNL_BLOCKED;
2494 case ERT_THROW:
2495 case ERT_FIXUP:
2496 case ERT_UNKNOWN:
2497 /* Shouldn't see these here. */
2498 gcc_unreachable ();
2499 break;
2500 default:
2501 gcc_unreachable ();
2505 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2507 void
2508 foreach_reachable_handler (int region_number, bool is_resx,
2509 void (*callback) (struct eh_region *, void *),
2510 void *callback_data)
2512 struct reachable_info info;
2513 struct eh_region *region;
2514 tree type_thrown;
2516 memset (&info, 0, sizeof (info));
2517 info.callback = callback;
2518 info.callback_data = callback_data;
2520 region = cfun->eh->region_array[region_number];
2522 type_thrown = NULL_TREE;
2523 if (is_resx)
2525 /* A RESX leaves a region instead of entering it. Thus the
2526 region itself may have been deleted out from under us. */
2527 if (region == NULL)
2528 return;
2529 region = region->outer;
2531 else if (region->type == ERT_THROW)
2533 type_thrown = region->u.throw.type;
2534 region = region->outer;
2537 while (region)
2539 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2540 break;
2541 /* If we have processed one cleanup, there is no point in
2542 processing any more of them. Each cleanup will have an edge
2543 to the next outer cleanup region, so the flow graph will be
2544 accurate. */
2545 if (region->type == ERT_CLEANUP)
2546 region = region->u.cleanup.prev_try;
2547 else
2548 region = region->outer;
2552 /* Retrieve a list of labels of exception handlers which can be
2553 reached by a given insn. */
2555 static void
2556 arh_to_landing_pad (struct eh_region *region, void *data)
2558 rtx *p_handlers = data;
2559 if (! *p_handlers)
2560 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2563 static void
2564 arh_to_label (struct eh_region *region, void *data)
2566 rtx *p_handlers = data;
2567 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2571 reachable_handlers (rtx insn)
2573 bool is_resx = false;
2574 rtx handlers = NULL;
2575 int region_number;
2577 if (JUMP_P (insn)
2578 && GET_CODE (PATTERN (insn)) == RESX)
2580 region_number = XINT (PATTERN (insn), 0);
2581 is_resx = true;
2583 else
2585 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2586 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2587 return NULL;
2588 region_number = INTVAL (XEXP (note, 0));
2591 foreach_reachable_handler (region_number, is_resx,
2592 (cfun->eh->built_landing_pads
2593 ? arh_to_landing_pad
2594 : arh_to_label),
2595 &handlers);
2597 return handlers;
2600 /* Determine if the given INSN can throw an exception that is caught
2601 within the function. */
2603 bool
2604 can_throw_internal_1 (int region_number)
2606 struct eh_region *region;
2607 tree type_thrown;
2609 region = cfun->eh->region_array[region_number];
2611 type_thrown = NULL_TREE;
2612 if (region->type == ERT_THROW)
2614 type_thrown = region->u.throw.type;
2615 region = region->outer;
2618 /* If this exception is ignored by each and every containing region,
2619 then control passes straight out. The runtime may handle some
2620 regions, which also do not require processing internally. */
2621 for (; region; region = region->outer)
2623 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2624 if (how == RNL_BLOCKED)
2625 return false;
2626 if (how != RNL_NOT_CAUGHT)
2627 return true;
2630 return false;
2633 bool
2634 can_throw_internal (rtx insn)
2636 rtx note;
2638 if (! INSN_P (insn))
2639 return false;
2641 if (JUMP_P (insn)
2642 && GET_CODE (PATTERN (insn)) == RESX
2643 && XINT (PATTERN (insn), 0) > 0)
2644 return can_throw_internal_1 (XINT (PATTERN (insn), 0));
2646 if (NONJUMP_INSN_P (insn)
2647 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2648 insn = XVECEXP (PATTERN (insn), 0, 0);
2650 /* Every insn that might throw has an EH_REGION note. */
2651 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2652 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2653 return false;
2655 return can_throw_internal_1 (INTVAL (XEXP (note, 0)));
2658 /* Determine if the given INSN can throw an exception that is
2659 visible outside the function. */
2661 bool
2662 can_throw_external_1 (int region_number)
2664 struct eh_region *region;
2665 tree type_thrown;
2667 region = cfun->eh->region_array[region_number];
2669 type_thrown = NULL_TREE;
2670 if (region->type == ERT_THROW)
2672 type_thrown = region->u.throw.type;
2673 region = region->outer;
2676 /* If the exception is caught or blocked by any containing region,
2677 then it is not seen by any calling function. */
2678 for (; region ; region = region->outer)
2679 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2680 return false;
2682 return true;
2685 bool
2686 can_throw_external (rtx insn)
2688 rtx note;
2690 if (! INSN_P (insn))
2691 return false;
2693 if (NONJUMP_INSN_P (insn)
2694 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2695 insn = XVECEXP (PATTERN (insn), 0, 0);
2697 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2698 if (!note)
2700 /* Calls (and trapping insns) without notes are outside any
2701 exception handling region in this function. We have to
2702 assume it might throw. Given that the front end and middle
2703 ends mark known NOTHROW functions, this isn't so wildly
2704 inaccurate. */
2705 return (CALL_P (insn)
2706 || (flag_non_call_exceptions
2707 && may_trap_p (PATTERN (insn))));
2709 if (INTVAL (XEXP (note, 0)) <= 0)
2710 return false;
2712 return can_throw_external_1 (INTVAL (XEXP (note, 0)));
2715 /* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls. */
2717 void
2718 set_nothrow_function_flags (void)
2720 rtx insn;
2722 TREE_NOTHROW (current_function_decl) = 1;
2724 /* Assume cfun->all_throwers_are_sibcalls until we encounter
2725 something that can throw an exception. We specifically exempt
2726 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2727 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2728 is optimistic. */
2730 cfun->all_throwers_are_sibcalls = 1;
2732 if (! flag_exceptions)
2733 return;
2735 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2736 if (can_throw_external (insn))
2738 TREE_NOTHROW (current_function_decl) = 0;
2740 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2742 cfun->all_throwers_are_sibcalls = 0;
2743 return;
2747 for (insn = current_function_epilogue_delay_list; insn;
2748 insn = XEXP (insn, 1))
2749 if (can_throw_external (insn))
2751 TREE_NOTHROW (current_function_decl) = 0;
2753 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2755 cfun->all_throwers_are_sibcalls = 0;
2756 return;
2762 /* Various hooks for unwind library. */
2764 /* Do any necessary initialization to access arbitrary stack frames.
2765 On the SPARC, this means flushing the register windows. */
2767 void
2768 expand_builtin_unwind_init (void)
2770 /* Set this so all the registers get saved in our frame; we need to be
2771 able to copy the saved values for any registers from frames we unwind. */
2772 current_function_has_nonlocal_label = 1;
2774 #ifdef SETUP_FRAME_ADDRESSES
2775 SETUP_FRAME_ADDRESSES ();
2776 #endif
2780 expand_builtin_eh_return_data_regno (tree arglist)
2782 tree which = TREE_VALUE (arglist);
2783 unsigned HOST_WIDE_INT iwhich;
2785 if (TREE_CODE (which) != INTEGER_CST)
2787 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2788 return constm1_rtx;
2791 iwhich = tree_low_cst (which, 1);
2792 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2793 if (iwhich == INVALID_REGNUM)
2794 return constm1_rtx;
2796 #ifdef DWARF_FRAME_REGNUM
2797 iwhich = DWARF_FRAME_REGNUM (iwhich);
2798 #else
2799 iwhich = DBX_REGISTER_NUMBER (iwhich);
2800 #endif
2802 return GEN_INT (iwhich);
2805 /* Given a value extracted from the return address register or stack slot,
2806 return the actual address encoded in that value. */
2809 expand_builtin_extract_return_addr (tree addr_tree)
2811 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2813 if (GET_MODE (addr) != Pmode
2814 && GET_MODE (addr) != VOIDmode)
2816 #ifdef POINTERS_EXTEND_UNSIGNED
2817 addr = convert_memory_address (Pmode, addr);
2818 #else
2819 addr = convert_to_mode (Pmode, addr, 0);
2820 #endif
2823 /* First mask out any unwanted bits. */
2824 #ifdef MASK_RETURN_ADDR
2825 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2826 #endif
2828 /* Then adjust to find the real return address. */
2829 #if defined (RETURN_ADDR_OFFSET)
2830 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2831 #endif
2833 return addr;
2836 /* Given an actual address in addr_tree, do any necessary encoding
2837 and return the value to be stored in the return address register or
2838 stack slot so the epilogue will return to that address. */
2841 expand_builtin_frob_return_addr (tree addr_tree)
2843 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2845 addr = convert_memory_address (Pmode, addr);
2847 #ifdef RETURN_ADDR_OFFSET
2848 addr = force_reg (Pmode, addr);
2849 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2850 #endif
2852 return addr;
2855 /* Set up the epilogue with the magic bits we'll need to return to the
2856 exception handler. */
2858 void
2859 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2860 tree handler_tree)
2862 rtx tmp;
2864 #ifdef EH_RETURN_STACKADJ_RTX
2865 tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2866 tmp = convert_memory_address (Pmode, tmp);
2867 if (!cfun->eh->ehr_stackadj)
2868 cfun->eh->ehr_stackadj = copy_to_reg (tmp);
2869 else if (tmp != cfun->eh->ehr_stackadj)
2870 emit_move_insn (cfun->eh->ehr_stackadj, tmp);
2871 #endif
2873 tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
2874 tmp = convert_memory_address (Pmode, tmp);
2875 if (!cfun->eh->ehr_handler)
2876 cfun->eh->ehr_handler = copy_to_reg (tmp);
2877 else if (tmp != cfun->eh->ehr_handler)
2878 emit_move_insn (cfun->eh->ehr_handler, tmp);
2880 if (!cfun->eh->ehr_label)
2881 cfun->eh->ehr_label = gen_label_rtx ();
2882 emit_jump (cfun->eh->ehr_label);
2885 void
2886 expand_eh_return (void)
2888 rtx around_label;
2890 if (! cfun->eh->ehr_label)
2891 return;
2893 current_function_calls_eh_return = 1;
2895 #ifdef EH_RETURN_STACKADJ_RTX
2896 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2897 #endif
2899 around_label = gen_label_rtx ();
2900 emit_jump (around_label);
2902 emit_label (cfun->eh->ehr_label);
2903 clobber_return_register ();
2905 #ifdef EH_RETURN_STACKADJ_RTX
2906 emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
2907 #endif
2909 #ifdef HAVE_eh_return
2910 if (HAVE_eh_return)
2911 emit_insn (gen_eh_return (cfun->eh->ehr_handler));
2912 else
2913 #endif
2915 #ifdef EH_RETURN_HANDLER_RTX
2916 emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
2917 #else
2918 error ("__builtin_eh_return not supported on this target");
2919 #endif
2922 emit_label (around_label);
2925 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2926 POINTERS_EXTEND_UNSIGNED and return it. */
2929 expand_builtin_extend_pointer (tree addr_tree)
2931 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2932 int extend;
2934 #ifdef POINTERS_EXTEND_UNSIGNED
2935 extend = POINTERS_EXTEND_UNSIGNED;
2936 #else
2937 /* The previous EH code did an unsigned extend by default, so we do this also
2938 for consistency. */
2939 extend = 1;
2940 #endif
2942 return convert_modes (word_mode, ptr_mode, addr, extend);
2945 /* In the following functions, we represent entries in the action table
2946 as 1-based indices. Special cases are:
2948 0: null action record, non-null landing pad; implies cleanups
2949 -1: null action record, null landing pad; implies no action
2950 -2: no call-site entry; implies must_not_throw
2951 -3: we have yet to process outer regions
2953 Further, no special cases apply to the "next" field of the record.
2954 For next, 0 means end of list. */
2956 struct action_record
2958 int offset;
2959 int filter;
2960 int next;
2963 static int
2964 action_record_eq (const void *pentry, const void *pdata)
2966 const struct action_record *entry = (const struct action_record *) pentry;
2967 const struct action_record *data = (const struct action_record *) pdata;
2968 return entry->filter == data->filter && entry->next == data->next;
2971 static hashval_t
2972 action_record_hash (const void *pentry)
2974 const struct action_record *entry = (const struct action_record *) pentry;
2975 return entry->next * 1009 + entry->filter;
2978 static int
2979 add_action_record (htab_t ar_hash, int filter, int next)
2981 struct action_record **slot, *new, tmp;
2983 tmp.filter = filter;
2984 tmp.next = next;
2985 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
2987 if ((new = *slot) == NULL)
2989 new = xmalloc (sizeof (*new));
2990 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
2991 new->filter = filter;
2992 new->next = next;
2993 *slot = new;
2995 /* The filter value goes in untouched. The link to the next
2996 record is a "self-relative" byte offset, or zero to indicate
2997 that there is no next record. So convert the absolute 1 based
2998 indices we've been carrying around into a displacement. */
3000 push_sleb128 (&cfun->eh->action_record_data, filter);
3001 if (next)
3002 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3003 push_sleb128 (&cfun->eh->action_record_data, next);
3006 return new->offset;
3009 static int
3010 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3012 struct eh_region *c;
3013 int next;
3015 /* If we've reached the top of the region chain, then we have
3016 no actions, and require no landing pad. */
3017 if (region == NULL)
3018 return -1;
3020 switch (region->type)
3022 case ERT_CLEANUP:
3023 /* A cleanup adds a zero filter to the beginning of the chain, but
3024 there are special cases to look out for. If there are *only*
3025 cleanups along a path, then it compresses to a zero action.
3026 Further, if there are multiple cleanups along a path, we only
3027 need to represent one of them, as that is enough to trigger
3028 entry to the landing pad at runtime. */
3029 next = collect_one_action_chain (ar_hash, region->outer);
3030 if (next <= 0)
3031 return 0;
3032 for (c = region->outer; c ; c = c->outer)
3033 if (c->type == ERT_CLEANUP)
3034 return next;
3035 return add_action_record (ar_hash, 0, next);
3037 case ERT_TRY:
3038 /* Process the associated catch regions in reverse order.
3039 If there's a catch-all handler, then we don't need to
3040 search outer regions. Use a magic -3 value to record
3041 that we haven't done the outer search. */
3042 next = -3;
3043 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3045 if (c->u.catch.type_list == NULL)
3047 /* Retrieve the filter from the head of the filter list
3048 where we have stored it (see assign_filter_values). */
3049 int filter
3050 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3052 next = add_action_record (ar_hash, filter, 0);
3054 else
3056 /* Once the outer search is done, trigger an action record for
3057 each filter we have. */
3058 tree flt_node;
3060 if (next == -3)
3062 next = collect_one_action_chain (ar_hash, region->outer);
3064 /* If there is no next action, terminate the chain. */
3065 if (next == -1)
3066 next = 0;
3067 /* If all outer actions are cleanups or must_not_throw,
3068 we'll have no action record for it, since we had wanted
3069 to encode these states in the call-site record directly.
3070 Add a cleanup action to the chain to catch these. */
3071 else if (next <= 0)
3072 next = add_action_record (ar_hash, 0, 0);
3075 flt_node = c->u.catch.filter_list;
3076 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3078 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3079 next = add_action_record (ar_hash, filter, next);
3083 return next;
3085 case ERT_ALLOWED_EXCEPTIONS:
3086 /* An exception specification adds its filter to the
3087 beginning of the chain. */
3088 next = collect_one_action_chain (ar_hash, region->outer);
3090 /* If there is no next action, terminate the chain. */
3091 if (next == -1)
3092 next = 0;
3093 /* If all outer actions are cleanups or must_not_throw,
3094 we'll have no action record for it, since we had wanted
3095 to encode these states in the call-site record directly.
3096 Add a cleanup action to the chain to catch these. */
3097 else if (next <= 0)
3098 next = add_action_record (ar_hash, 0, 0);
3100 return add_action_record (ar_hash, region->u.allowed.filter, next);
3102 case ERT_MUST_NOT_THROW:
3103 /* A must-not-throw region with no inner handlers or cleanups
3104 requires no call-site entry. Note that this differs from
3105 the no handler or cleanup case in that we do require an lsda
3106 to be generated. Return a magic -2 value to record this. */
3107 return -2;
3109 case ERT_CATCH:
3110 case ERT_THROW:
3111 /* CATCH regions are handled in TRY above. THROW regions are
3112 for optimization information only and produce no output. */
3113 return collect_one_action_chain (ar_hash, region->outer);
3115 default:
3116 gcc_unreachable ();
3120 static int
3121 add_call_site (rtx landing_pad, int action)
3123 struct call_site_record *data = cfun->eh->call_site_data;
3124 int used = cfun->eh->call_site_data_used;
3125 int size = cfun->eh->call_site_data_size;
3127 if (used >= size)
3129 size = (size ? size * 2 : 64);
3130 data = ggc_realloc (data, sizeof (*data) * size);
3131 cfun->eh->call_site_data = data;
3132 cfun->eh->call_site_data_size = size;
3135 data[used].landing_pad = landing_pad;
3136 data[used].action = action;
3138 cfun->eh->call_site_data_used = used + 1;
3140 return used + call_site_base;
3143 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3144 The new note numbers will not refer to region numbers, but
3145 instead to call site entries. */
3147 void
3148 convert_to_eh_region_ranges (void)
3150 rtx insn, iter, note;
3151 htab_t ar_hash;
3152 int last_action = -3;
3153 rtx last_action_insn = NULL_RTX;
3154 rtx last_landing_pad = NULL_RTX;
3155 rtx first_no_action_insn = NULL_RTX;
3156 int call_site = 0;
3158 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3159 return;
3161 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3163 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3165 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3166 if (INSN_P (iter))
3168 struct eh_region *region;
3169 int this_action;
3170 rtx this_landing_pad;
3172 insn = iter;
3173 if (NONJUMP_INSN_P (insn)
3174 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3175 insn = XVECEXP (PATTERN (insn), 0, 0);
3177 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3178 if (!note)
3180 if (! (CALL_P (insn)
3181 || (flag_non_call_exceptions
3182 && may_trap_p (PATTERN (insn)))))
3183 continue;
3184 this_action = -1;
3185 region = NULL;
3187 else
3189 if (INTVAL (XEXP (note, 0)) <= 0)
3190 continue;
3191 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3192 this_action = collect_one_action_chain (ar_hash, region);
3195 /* Existence of catch handlers, or must-not-throw regions
3196 implies that an lsda is needed (even if empty). */
3197 if (this_action != -1)
3198 cfun->uses_eh_lsda = 1;
3200 /* Delay creation of region notes for no-action regions
3201 until we're sure that an lsda will be required. */
3202 else if (last_action == -3)
3204 first_no_action_insn = iter;
3205 last_action = -1;
3208 /* Cleanups and handlers may share action chains but not
3209 landing pads. Collect the landing pad for this region. */
3210 if (this_action >= 0)
3212 struct eh_region *o;
3213 for (o = region; ! o->landing_pad ; o = o->outer)
3214 continue;
3215 this_landing_pad = o->landing_pad;
3217 else
3218 this_landing_pad = NULL_RTX;
3220 /* Differing actions or landing pads implies a change in call-site
3221 info, which implies some EH_REGION note should be emitted. */
3222 if (last_action != this_action
3223 || last_landing_pad != this_landing_pad)
3225 /* If we'd not seen a previous action (-3) or the previous
3226 action was must-not-throw (-2), then we do not need an
3227 end note. */
3228 if (last_action >= -1)
3230 /* If we delayed the creation of the begin, do it now. */
3231 if (first_no_action_insn)
3233 call_site = add_call_site (NULL_RTX, 0);
3234 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3235 first_no_action_insn);
3236 NOTE_EH_HANDLER (note) = call_site;
3237 first_no_action_insn = NULL_RTX;
3240 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3241 last_action_insn);
3242 NOTE_EH_HANDLER (note) = call_site;
3245 /* If the new action is must-not-throw, then no region notes
3246 are created. */
3247 if (this_action >= -1)
3249 call_site = add_call_site (this_landing_pad,
3250 this_action < 0 ? 0 : this_action);
3251 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3252 NOTE_EH_HANDLER (note) = call_site;
3255 last_action = this_action;
3256 last_landing_pad = this_landing_pad;
3258 last_action_insn = iter;
3261 if (last_action >= -1 && ! first_no_action_insn)
3263 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3264 NOTE_EH_HANDLER (note) = call_site;
3267 htab_delete (ar_hash);
3271 static void
3272 push_uleb128 (varray_type *data_area, unsigned int value)
3276 unsigned char byte = value & 0x7f;
3277 value >>= 7;
3278 if (value)
3279 byte |= 0x80;
3280 VARRAY_PUSH_UCHAR (*data_area, byte);
3282 while (value);
3285 static void
3286 push_sleb128 (varray_type *data_area, int value)
3288 unsigned char byte;
3289 int more;
3293 byte = value & 0x7f;
3294 value >>= 7;
3295 more = ! ((value == 0 && (byte & 0x40) == 0)
3296 || (value == -1 && (byte & 0x40) != 0));
3297 if (more)
3298 byte |= 0x80;
3299 VARRAY_PUSH_UCHAR (*data_area, byte);
3301 while (more);
3305 #ifndef HAVE_AS_LEB128
3306 static int
3307 dw2_size_of_call_site_table (void)
3309 int n = cfun->eh->call_site_data_used;
3310 int size = n * (4 + 4 + 4);
3311 int i;
3313 for (i = 0; i < n; ++i)
3315 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3316 size += size_of_uleb128 (cs->action);
3319 return size;
3322 static int
3323 sjlj_size_of_call_site_table (void)
3325 int n = cfun->eh->call_site_data_used;
3326 int size = 0;
3327 int i;
3329 for (i = 0; i < n; ++i)
3331 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3332 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3333 size += size_of_uleb128 (cs->action);
3336 return size;
3338 #endif
3340 static void
3341 dw2_output_call_site_table (void)
3343 int n = cfun->eh->call_site_data_used;
3344 int i;
3346 for (i = 0; i < n; ++i)
3348 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3349 char reg_start_lab[32];
3350 char reg_end_lab[32];
3351 char landing_pad_lab[32];
3353 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3354 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3356 if (cs->landing_pad)
3357 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3358 CODE_LABEL_NUMBER (cs->landing_pad));
3360 /* ??? Perhaps use insn length scaling if the assembler supports
3361 generic arithmetic. */
3362 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3363 data4 if the function is small enough. */
3364 #ifdef HAVE_AS_LEB128
3365 dw2_asm_output_delta_uleb128 (reg_start_lab,
3366 current_function_func_begin_label,
3367 "region %d start", i);
3368 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3369 "length");
3370 if (cs->landing_pad)
3371 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3372 current_function_func_begin_label,
3373 "landing pad");
3374 else
3375 dw2_asm_output_data_uleb128 (0, "landing pad");
3376 #else
3377 dw2_asm_output_delta (4, reg_start_lab,
3378 current_function_func_begin_label,
3379 "region %d start", i);
3380 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3381 if (cs->landing_pad)
3382 dw2_asm_output_delta (4, landing_pad_lab,
3383 current_function_func_begin_label,
3384 "landing pad");
3385 else
3386 dw2_asm_output_data (4, 0, "landing pad");
3387 #endif
3388 dw2_asm_output_data_uleb128 (cs->action, "action");
3391 call_site_base += n;
3394 static void
3395 sjlj_output_call_site_table (void)
3397 int n = cfun->eh->call_site_data_used;
3398 int i;
3400 for (i = 0; i < n; ++i)
3402 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3404 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3405 "region %d landing pad", i);
3406 dw2_asm_output_data_uleb128 (cs->action, "action");
3409 call_site_base += n;
3412 /* Tell assembler to switch to the section for the exception handling
3413 table. */
3415 void
3416 default_exception_section (void)
3418 if (targetm.have_named_sections)
3420 int flags;
3421 #ifdef HAVE_LD_RO_RW_SECTION_MIXING
3422 int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3424 flags = (! flag_pic
3425 || ((tt_format & 0x70) != DW_EH_PE_absptr
3426 && (tt_format & 0x70) != DW_EH_PE_aligned))
3427 ? 0 : SECTION_WRITE;
3428 #else
3429 flags = SECTION_WRITE;
3430 #endif
3431 named_section_flags (".gcc_except_table", flags);
3433 else if (flag_pic)
3434 data_section ();
3435 else
3436 readonly_data_section ();
3439 void
3440 output_function_exception_table (void)
3442 int tt_format, cs_format, lp_format, i, n;
3443 #ifdef HAVE_AS_LEB128
3444 char ttype_label[32];
3445 char cs_after_size_label[32];
3446 char cs_end_label[32];
3447 #else
3448 int call_site_len;
3449 #endif
3450 int have_tt_data;
3451 int tt_format_size = 0;
3453 /* Not all functions need anything. */
3454 if (! cfun->uses_eh_lsda)
3455 return;
3457 #ifdef TARGET_UNWIND_INFO
3458 /* TODO: Move this into target file. */
3459 fputs ("\t.personality\t", asm_out_file);
3460 output_addr_const (asm_out_file, eh_personality_libfunc);
3461 fputs ("\n\t.handlerdata\n", asm_out_file);
3462 /* Note that varasm still thinks we're in the function's code section.
3463 The ".endp" directive that will immediately follow will take us back. */
3464 #else
3465 targetm.asm_out.exception_section ();
3466 #endif
3468 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3469 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3471 /* Indicate the format of the @TType entries. */
3472 if (! have_tt_data)
3473 tt_format = DW_EH_PE_omit;
3474 else
3476 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3477 #ifdef HAVE_AS_LEB128
3478 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3479 current_function_funcdef_no);
3480 #endif
3481 tt_format_size = size_of_encoded_value (tt_format);
3483 assemble_align (tt_format_size * BITS_PER_UNIT);
3486 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3487 current_function_funcdef_no);
3489 /* The LSDA header. */
3491 /* Indicate the format of the landing pad start pointer. An omitted
3492 field implies @LPStart == @Start. */
3493 /* Currently we always put @LPStart == @Start. This field would
3494 be most useful in moving the landing pads completely out of
3495 line to another section, but it could also be used to minimize
3496 the size of uleb128 landing pad offsets. */
3497 lp_format = DW_EH_PE_omit;
3498 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3499 eh_data_format_name (lp_format));
3501 /* @LPStart pointer would go here. */
3503 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3504 eh_data_format_name (tt_format));
3506 #ifndef HAVE_AS_LEB128
3507 if (USING_SJLJ_EXCEPTIONS)
3508 call_site_len = sjlj_size_of_call_site_table ();
3509 else
3510 call_site_len = dw2_size_of_call_site_table ();
3511 #endif
3513 /* A pc-relative 4-byte displacement to the @TType data. */
3514 if (have_tt_data)
3516 #ifdef HAVE_AS_LEB128
3517 char ttype_after_disp_label[32];
3518 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3519 current_function_funcdef_no);
3520 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3521 "@TType base offset");
3522 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3523 #else
3524 /* Ug. Alignment queers things. */
3525 unsigned int before_disp, after_disp, last_disp, disp;
3527 before_disp = 1 + 1;
3528 after_disp = (1 + size_of_uleb128 (call_site_len)
3529 + call_site_len
3530 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3531 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3532 * tt_format_size));
3534 disp = after_disp;
3537 unsigned int disp_size, pad;
3539 last_disp = disp;
3540 disp_size = size_of_uleb128 (disp);
3541 pad = before_disp + disp_size + after_disp;
3542 if (pad % tt_format_size)
3543 pad = tt_format_size - (pad % tt_format_size);
3544 else
3545 pad = 0;
3546 disp = after_disp + pad;
3548 while (disp != last_disp);
3550 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3551 #endif
3554 /* Indicate the format of the call-site offsets. */
3555 #ifdef HAVE_AS_LEB128
3556 cs_format = DW_EH_PE_uleb128;
3557 #else
3558 cs_format = DW_EH_PE_udata4;
3559 #endif
3560 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3561 eh_data_format_name (cs_format));
3563 #ifdef HAVE_AS_LEB128
3564 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3565 current_function_funcdef_no);
3566 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3567 current_function_funcdef_no);
3568 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3569 "Call-site table length");
3570 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3571 if (USING_SJLJ_EXCEPTIONS)
3572 sjlj_output_call_site_table ();
3573 else
3574 dw2_output_call_site_table ();
3575 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3576 #else
3577 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3578 if (USING_SJLJ_EXCEPTIONS)
3579 sjlj_output_call_site_table ();
3580 else
3581 dw2_output_call_site_table ();
3582 #endif
3584 /* ??? Decode and interpret the data for flag_debug_asm. */
3585 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3586 for (i = 0; i < n; ++i)
3587 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3588 (i ? NULL : "Action record table"));
3590 if (have_tt_data)
3591 assemble_align (tt_format_size * BITS_PER_UNIT);
3593 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3594 while (i-- > 0)
3596 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3597 rtx value;
3599 if (type == NULL_TREE)
3600 value = const0_rtx;
3601 else
3603 struct cgraph_varpool_node *node;
3605 type = lookup_type_for_runtime (type);
3606 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3608 /* Let cgraph know that the rtti decl is used. Not all of the
3609 paths below go through assemble_integer, which would take
3610 care of this for us. */
3611 STRIP_NOPS (type);
3612 if (TREE_CODE (type) == ADDR_EXPR)
3614 type = TREE_OPERAND (type, 0);
3615 if (TREE_CODE (type) == VAR_DECL)
3617 node = cgraph_varpool_node (type);
3618 if (node)
3619 cgraph_varpool_mark_needed_node (node);
3622 else
3623 gcc_assert (TREE_CODE (type) == INTEGER_CST);
3626 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3627 assemble_integer (value, tt_format_size,
3628 tt_format_size * BITS_PER_UNIT, 1);
3629 else
3630 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
3633 #ifdef HAVE_AS_LEB128
3634 if (have_tt_data)
3635 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3636 #endif
3638 /* ??? Decode and interpret the data for flag_debug_asm. */
3639 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3640 for (i = 0; i < n; ++i)
3641 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3642 (i ? NULL : "Exception specification table"));
3644 function_section (current_function_decl);
3647 #include "gt-except.h"