* configure.in: Add ${libgcj} to noconfigdirs for xtensa-*-* targets.
[official-gcc.git] / gcc / except.c
blob63a35702b9130a8c94b7648ce6f54113428d50b3
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
77 /* Provide defaults for stuff that may not be defined when using
78 sjlj exceptions. */
79 #ifndef EH_RETURN_DATA_REGNO
80 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
81 #endif
84 /* Nonzero means enable synchronous exceptions for non-call instructions. */
85 int flag_non_call_exceptions;
87 /* Protect cleanup actions with must-not-throw regions, with a call
88 to the given failure handler. */
89 tree (*lang_protect_cleanup_actions) (void);
91 /* Return true if type A catches type B. */
92 int (*lang_eh_type_covers) (tree a, tree b);
94 /* Map a type to a runtime object to match type. */
95 tree (*lang_eh_runtime_type) (tree);
97 /* A hash table of label to region number. */
99 struct ehl_map_entry GTY(())
101 rtx label;
102 struct eh_region *region;
105 static GTY(()) int call_site_base;
106 static GTY ((param_is (union tree_node)))
107 htab_t type_to_runtime_map;
109 /* Describe the SjLj_Function_Context structure. */
110 static GTY(()) tree sjlj_fc_type_node;
111 static int sjlj_fc_call_site_ofs;
112 static int sjlj_fc_data_ofs;
113 static int sjlj_fc_personality_ofs;
114 static int sjlj_fc_lsda_ofs;
115 static int sjlj_fc_jbuf_ofs;
117 /* Describes one exception region. */
118 struct eh_region GTY(())
120 /* The immediately surrounding region. */
121 struct eh_region *outer;
123 /* The list of immediately contained regions. */
124 struct eh_region *inner;
125 struct eh_region *next_peer;
127 /* An identifier for this region. */
128 int region_number;
130 /* When a region is deleted, its parents inherit the REG_EH_REGION
131 numbers already assigned. */
132 bitmap aka;
134 /* Each region does exactly one thing. */
135 enum eh_region_type
137 ERT_UNKNOWN = 0,
138 ERT_CLEANUP,
139 ERT_TRY,
140 ERT_CATCH,
141 ERT_ALLOWED_EXCEPTIONS,
142 ERT_MUST_NOT_THROW,
143 ERT_THROW,
144 ERT_FIXUP
145 } type;
147 /* Holds the action to perform based on the preceding type. */
148 union eh_region_u {
149 /* A list of catch blocks, a surrounding try block,
150 and the label for continuing after a catch. */
151 struct eh_region_u_try {
152 struct eh_region *catch;
153 struct eh_region *last_catch;
154 struct eh_region *prev_try;
155 rtx continue_label;
156 } GTY ((tag ("ERT_TRY"))) try;
158 /* The list through the catch handlers, the list of type objects
159 matched, and the list of associated filters. */
160 struct eh_region_u_catch {
161 struct eh_region *next_catch;
162 struct eh_region *prev_catch;
163 tree type_list;
164 tree filter_list;
165 } GTY ((tag ("ERT_CATCH"))) catch;
167 /* A tree_list of allowed types. */
168 struct eh_region_u_allowed {
169 tree type_list;
170 int filter;
171 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
173 /* The type given by a call to "throw foo();", or discovered
174 for a throw. */
175 struct eh_region_u_throw {
176 tree type;
177 } GTY ((tag ("ERT_THROW"))) throw;
179 /* Retain the cleanup expression even after expansion so that
180 we can match up fixup regions. */
181 struct eh_region_u_cleanup {
182 tree exp;
183 struct eh_region *prev_try;
184 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
186 /* The real region (by expression and by pointer) that fixup code
187 should live in. */
188 struct eh_region_u_fixup {
189 tree cleanup_exp;
190 struct eh_region *real_region;
191 } GTY ((tag ("ERT_FIXUP"))) fixup;
192 } GTY ((desc ("%0.type"))) u;
194 /* Entry point for this region's handler before landing pads are built. */
195 rtx label;
197 /* Entry point for this region's handler from the runtime eh library. */
198 rtx landing_pad;
200 /* Entry point for this region's handler from an inner region. */
201 rtx post_landing_pad;
203 /* The RESX insn for handing off control to the next outermost handler,
204 if appropriate. */
205 rtx resume;
207 /* True if something in this region may throw. */
208 unsigned may_contain_throw : 1;
211 struct call_site_record GTY(())
213 rtx landing_pad;
214 int action;
217 /* Used to save exception status for each function. */
218 struct eh_status GTY(())
220 /* The tree of all regions for this function. */
221 struct eh_region *region_tree;
223 /* The same information as an indexable array. */
224 struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
226 /* The most recently open region. */
227 struct eh_region *cur_region;
229 /* This is the region for which we are processing catch blocks. */
230 struct eh_region *try_region;
232 rtx filter;
233 rtx exc_ptr;
235 int built_landing_pads;
236 int last_region_number;
238 varray_type ttype_data;
239 varray_type ehspec_data;
240 varray_type action_record_data;
242 htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
244 struct call_site_record * GTY ((length ("%h.call_site_data_used")))
245 call_site_data;
246 int call_site_data_used;
247 int call_site_data_size;
249 rtx ehr_stackadj;
250 rtx ehr_handler;
251 rtx ehr_label;
253 rtx sjlj_fc;
254 rtx sjlj_exit_after;
258 static int t2r_eq (const void *, const void *);
259 static hashval_t t2r_hash (const void *);
260 static void add_type_for_runtime (tree);
261 static tree lookup_type_for_runtime (tree);
263 static struct eh_region *expand_eh_region_end (void);
265 static rtx get_exception_filter (struct function *);
267 static void collect_eh_region_array (void);
268 static void resolve_fixup_regions (void);
269 static void remove_fixup_regions (void);
270 static void remove_unreachable_regions (rtx);
271 static void convert_from_eh_region_ranges_1 (rtx *, int *, int);
273 static struct eh_region *duplicate_eh_region_1 (struct eh_region *,
274 struct inline_remap *);
275 static void duplicate_eh_region_2 (struct eh_region *, struct eh_region **);
276 static int ttypes_filter_eq (const void *, const void *);
277 static hashval_t ttypes_filter_hash (const void *);
278 static int ehspec_filter_eq (const void *, const void *);
279 static hashval_t ehspec_filter_hash (const void *);
280 static int add_ttypes_entry (htab_t, tree);
281 static int add_ehspec_entry (htab_t, htab_t, tree);
282 static void assign_filter_values (void);
283 static void build_post_landing_pads (void);
284 static void connect_post_landing_pads (void);
285 static void dw2_build_landing_pads (void);
287 struct sjlj_lp_info;
288 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
289 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
290 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
291 static void sjlj_emit_function_enter (rtx);
292 static void sjlj_emit_function_exit (void);
293 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
294 static void sjlj_build_landing_pads (void);
296 static hashval_t ehl_hash (const void *);
297 static int ehl_eq (const void *, const void *);
298 static void add_ehl_entry (rtx, struct eh_region *);
299 static void remove_exception_handler_label (rtx);
300 static void remove_eh_handler (struct eh_region *);
301 static int for_each_eh_label_1 (void **, void *);
303 struct reachable_info;
305 /* The return value of reachable_next_level. */
306 enum reachable_code
308 /* The given exception is not processed by the given region. */
309 RNL_NOT_CAUGHT,
310 /* The given exception may need processing by the given region. */
311 RNL_MAYBE_CAUGHT,
312 /* The given exception is completely processed by the given region. */
313 RNL_CAUGHT,
314 /* The given exception is completely processed by the runtime. */
315 RNL_BLOCKED
318 static int check_handled (tree, tree);
319 static void add_reachable_handler (struct reachable_info *,
320 struct eh_region *, struct eh_region *);
321 static enum reachable_code reachable_next_level (struct eh_region *, tree,
322 struct reachable_info *);
324 static int action_record_eq (const void *, const void *);
325 static hashval_t action_record_hash (const void *);
326 static int add_action_record (htab_t, int, int);
327 static int collect_one_action_chain (htab_t, struct eh_region *);
328 static int add_call_site (rtx, int);
330 static void push_uleb128 (varray_type *, unsigned int);
331 static void push_sleb128 (varray_type *, int);
332 #ifndef HAVE_AS_LEB128
333 static int dw2_size_of_call_site_table (void);
334 static int sjlj_size_of_call_site_table (void);
335 #endif
336 static void dw2_output_call_site_table (void);
337 static void sjlj_output_call_site_table (void);
340 /* Routine to see if exception handling is turned on.
341 DO_WARN is nonzero if we want to inform the user that exception
342 handling is turned off.
344 This is used to ensure that -fexceptions has been specified if the
345 compiler tries to use any exception-specific functions. */
348 doing_eh (int do_warn)
350 if (! flag_exceptions)
352 static int warned = 0;
353 if (! warned && do_warn)
355 error ("exception handling disabled, use -fexceptions to enable");
356 warned = 1;
358 return 0;
360 return 1;
364 void
365 init_eh (void)
367 if (! flag_exceptions)
368 return;
370 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
372 /* Create the SjLj_Function_Context structure. This should match
373 the definition in unwind-sjlj.c. */
374 if (USING_SJLJ_EXCEPTIONS)
376 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
378 sjlj_fc_type_node = (*lang_hooks.types.make_type) (RECORD_TYPE);
380 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
381 build_pointer_type (sjlj_fc_type_node));
382 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
384 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
385 integer_type_node);
386 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
388 tmp = build_index_type (build_int_2 (4 - 1, 0));
389 tmp = build_array_type ((*lang_hooks.types.type_for_mode) (word_mode, 1),
390 tmp);
391 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
392 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
394 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
395 ptr_type_node);
396 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
398 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
399 ptr_type_node);
400 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
402 #ifdef DONT_USE_BUILTIN_SETJMP
403 #ifdef JMP_BUF_SIZE
404 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
405 #else
406 /* Should be large enough for most systems, if it is not,
407 JMP_BUF_SIZE should be defined with the proper value. It will
408 also tend to be larger than necessary for most systems, a more
409 optimal port will define JMP_BUF_SIZE. */
410 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
411 #endif
412 #else
413 /* This is 2 for builtin_setjmp, plus whatever the target requires
414 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
415 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
416 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
417 #endif
418 tmp = build_index_type (tmp);
419 tmp = build_array_type (ptr_type_node, tmp);
420 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
421 #ifdef DONT_USE_BUILTIN_SETJMP
422 /* We don't know what the alignment requirements of the
423 runtime's jmp_buf has. Overestimate. */
424 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
425 DECL_USER_ALIGN (f_jbuf) = 1;
426 #endif
427 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
429 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
430 TREE_CHAIN (f_prev) = f_cs;
431 TREE_CHAIN (f_cs) = f_data;
432 TREE_CHAIN (f_data) = f_per;
433 TREE_CHAIN (f_per) = f_lsda;
434 TREE_CHAIN (f_lsda) = f_jbuf;
436 layout_type (sjlj_fc_type_node);
438 /* Cache the interesting field offsets so that we have
439 easy access from rtl. */
440 sjlj_fc_call_site_ofs
441 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
442 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
443 sjlj_fc_data_ofs
444 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
445 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
446 sjlj_fc_personality_ofs
447 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
448 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
449 sjlj_fc_lsda_ofs
450 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
451 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
452 sjlj_fc_jbuf_ofs
453 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
454 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
458 void
459 init_eh_for_function (void)
461 cfun->eh = (struct eh_status *)
462 ggc_alloc_cleared (sizeof (struct eh_status));
465 /* Start an exception handling region. All instructions emitted
466 after this point are considered to be part of the region until
467 expand_eh_region_end is invoked. */
469 void
470 expand_eh_region_start (void)
472 struct eh_region *new_region;
473 struct eh_region *cur_region;
474 rtx note;
476 if (! doing_eh (0))
477 return;
479 /* Insert a new blank region as a leaf in the tree. */
480 new_region = (struct eh_region *) ggc_alloc_cleared (sizeof (*new_region));
481 cur_region = cfun->eh->cur_region;
482 new_region->outer = cur_region;
483 if (cur_region)
485 new_region->next_peer = cur_region->inner;
486 cur_region->inner = new_region;
488 else
490 new_region->next_peer = cfun->eh->region_tree;
491 cfun->eh->region_tree = new_region;
493 cfun->eh->cur_region = new_region;
495 /* Create a note marking the start of this region. */
496 new_region->region_number = ++cfun->eh->last_region_number;
497 note = emit_note (NOTE_INSN_EH_REGION_BEG);
498 NOTE_EH_HANDLER (note) = new_region->region_number;
501 /* Common code to end a region. Returns the region just ended. */
503 static struct eh_region *
504 expand_eh_region_end (void)
506 struct eh_region *cur_region = cfun->eh->cur_region;
507 rtx note;
509 /* Create a note marking the end of this region. */
510 note = emit_note (NOTE_INSN_EH_REGION_END);
511 NOTE_EH_HANDLER (note) = cur_region->region_number;
513 /* Pop. */
514 cfun->eh->cur_region = cur_region->outer;
516 return cur_region;
519 /* End an exception handling region for a cleanup. HANDLER is an
520 expression to expand for the cleanup. */
522 void
523 expand_eh_region_end_cleanup (tree handler)
525 struct eh_region *region;
526 tree protect_cleanup_actions;
527 rtx around_label;
528 rtx data_save[2];
530 if (! doing_eh (0))
531 return;
533 region = expand_eh_region_end ();
534 region->type = ERT_CLEANUP;
535 region->label = gen_label_rtx ();
536 region->u.cleanup.exp = handler;
537 region->u.cleanup.prev_try = cfun->eh->try_region;
539 around_label = gen_label_rtx ();
540 emit_jump (around_label);
542 emit_label (region->label);
544 if (flag_non_call_exceptions || region->may_contain_throw)
546 /* Give the language a chance to specify an action to be taken if an
547 exception is thrown that would propagate out of the HANDLER. */
548 protect_cleanup_actions
549 = (lang_protect_cleanup_actions
550 ? (*lang_protect_cleanup_actions) ()
551 : NULL_TREE);
553 if (protect_cleanup_actions)
554 expand_eh_region_start ();
556 /* In case this cleanup involves an inline destructor with a try block in
557 it, we need to save the EH return data registers around it. */
558 data_save[0] = gen_reg_rtx (ptr_mode);
559 emit_move_insn (data_save[0], get_exception_pointer (cfun));
560 data_save[1] = gen_reg_rtx (word_mode);
561 emit_move_insn (data_save[1], get_exception_filter (cfun));
563 expand_expr (handler, const0_rtx, VOIDmode, 0);
565 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
566 emit_move_insn (cfun->eh->filter, data_save[1]);
568 if (protect_cleanup_actions)
569 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
571 /* We need any stack adjustment complete before the around_label. */
572 do_pending_stack_adjust ();
575 /* We delay the generation of the _Unwind_Resume until we generate
576 landing pads. We emit a marker here so as to get good control
577 flow data in the meantime. */
578 region->resume
579 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
580 emit_barrier ();
582 emit_label (around_label);
585 /* End an exception handling region for a try block, and prepares
586 for subsequent calls to expand_start_catch. */
588 void
589 expand_start_all_catch (void)
591 struct eh_region *region;
593 if (! doing_eh (1))
594 return;
596 region = expand_eh_region_end ();
597 region->type = ERT_TRY;
598 region->u.try.prev_try = cfun->eh->try_region;
599 region->u.try.continue_label = gen_label_rtx ();
601 cfun->eh->try_region = region;
603 emit_jump (region->u.try.continue_label);
606 /* Begin a catch clause. TYPE is the type caught, a list of such types, or
607 null if this is a catch-all clause. Providing a type list enables to
608 associate the catch region with potentially several exception types, which
609 is useful e.g. for Ada. */
611 void
612 expand_start_catch (tree type_or_list)
614 struct eh_region *t, *c, *l;
615 tree type_list;
617 if (! doing_eh (0))
618 return;
620 type_list = type_or_list;
622 if (type_or_list)
624 /* Ensure to always end up with a type list to normalize further
625 processing, then register each type against the runtime types
626 map. */
627 tree type_node;
629 if (TREE_CODE (type_or_list) != TREE_LIST)
630 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
632 type_node = type_list;
633 for (; type_node; type_node = TREE_CHAIN (type_node))
634 add_type_for_runtime (TREE_VALUE (type_node));
637 expand_eh_region_start ();
639 t = cfun->eh->try_region;
640 c = cfun->eh->cur_region;
641 c->type = ERT_CATCH;
642 c->u.catch.type_list = type_list;
643 c->label = gen_label_rtx ();
645 l = t->u.try.last_catch;
646 c->u.catch.prev_catch = l;
647 if (l)
648 l->u.catch.next_catch = c;
649 else
650 t->u.try.catch = c;
651 t->u.try.last_catch = c;
653 emit_label (c->label);
656 /* End a catch clause. Control will resume after the try/catch block. */
658 void
659 expand_end_catch (void)
661 struct eh_region *try_region;
663 if (! doing_eh (0))
664 return;
666 expand_eh_region_end ();
667 try_region = cfun->eh->try_region;
669 emit_jump (try_region->u.try.continue_label);
672 /* End a sequence of catch handlers for a try block. */
674 void
675 expand_end_all_catch (void)
677 struct eh_region *try_region;
679 if (! doing_eh (0))
680 return;
682 try_region = cfun->eh->try_region;
683 cfun->eh->try_region = try_region->u.try.prev_try;
685 emit_label (try_region->u.try.continue_label);
688 /* End an exception region for an exception type filter. ALLOWED is a
689 TREE_LIST of types to be matched by the runtime. FAILURE is an
690 expression to invoke if a mismatch occurs.
692 ??? We could use these semantics for calls to rethrow, too; if we can
693 see the surrounding catch clause, we know that the exception we're
694 rethrowing satisfies the "filter" of the catch type. */
696 void
697 expand_eh_region_end_allowed (tree allowed, tree failure)
699 struct eh_region *region;
700 rtx around_label;
702 if (! doing_eh (0))
703 return;
705 region = expand_eh_region_end ();
706 region->type = ERT_ALLOWED_EXCEPTIONS;
707 region->u.allowed.type_list = allowed;
708 region->label = gen_label_rtx ();
710 for (; allowed ; allowed = TREE_CHAIN (allowed))
711 add_type_for_runtime (TREE_VALUE (allowed));
713 /* We must emit the call to FAILURE here, so that if this function
714 throws a different exception, that it will be processed by the
715 correct region. */
717 around_label = gen_label_rtx ();
718 emit_jump (around_label);
720 emit_label (region->label);
721 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
722 /* We must adjust the stack before we reach the AROUND_LABEL because
723 the call to FAILURE does not occur on all paths to the
724 AROUND_LABEL. */
725 do_pending_stack_adjust ();
727 emit_label (around_label);
730 /* End an exception region for a must-not-throw filter. FAILURE is an
731 expression invoke if an uncaught exception propagates this far.
733 This is conceptually identical to expand_eh_region_end_allowed with
734 an empty allowed list (if you passed "std::terminate" instead of
735 "__cxa_call_unexpected"), but they are represented differently in
736 the C++ LSDA. */
738 void
739 expand_eh_region_end_must_not_throw (tree failure)
741 struct eh_region *region;
742 rtx around_label;
744 if (! doing_eh (0))
745 return;
747 region = expand_eh_region_end ();
748 region->type = ERT_MUST_NOT_THROW;
749 region->label = gen_label_rtx ();
751 /* We must emit the call to FAILURE here, so that if this function
752 throws a different exception, that it will be processed by the
753 correct region. */
755 around_label = gen_label_rtx ();
756 emit_jump (around_label);
758 emit_label (region->label);
759 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
761 emit_label (around_label);
764 /* End an exception region for a throw. No handling goes on here,
765 but it's the easiest way for the front-end to indicate what type
766 is being thrown. */
768 void
769 expand_eh_region_end_throw (tree type)
771 struct eh_region *region;
773 if (! doing_eh (0))
774 return;
776 region = expand_eh_region_end ();
777 region->type = ERT_THROW;
778 region->u.throw.type = type;
781 /* End a fixup region. Within this region the cleanups for the immediately
782 enclosing region are _not_ run. This is used for goto cleanup to avoid
783 destroying an object twice.
785 This would be an extraordinarily simple prospect, were it not for the
786 fact that we don't actually know what the immediately enclosing region
787 is. This surprising fact is because expand_cleanups is currently
788 generating a sequence that it will insert somewhere else. We collect
789 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
791 void
792 expand_eh_region_end_fixup (tree handler)
794 struct eh_region *fixup;
796 if (! doing_eh (0))
797 return;
799 fixup = expand_eh_region_end ();
800 fixup->type = ERT_FIXUP;
801 fixup->u.fixup.cleanup_exp = handler;
804 /* Note that the current EH region (if any) may contain a throw, or a
805 call to a function which itself may contain a throw. */
807 void
808 note_eh_region_may_contain_throw (void)
810 struct eh_region *region;
812 region = cfun->eh->cur_region;
813 while (region && !region->may_contain_throw)
815 region->may_contain_throw = 1;
816 region = region->outer;
820 /* Return an rtl expression for a pointer to the exception object
821 within a handler. */
824 get_exception_pointer (struct function *fun)
826 rtx exc_ptr = fun->eh->exc_ptr;
827 if (fun == cfun && ! exc_ptr)
829 exc_ptr = gen_reg_rtx (ptr_mode);
830 fun->eh->exc_ptr = exc_ptr;
832 return exc_ptr;
835 /* Return an rtl expression for the exception dispatch filter
836 within a handler. */
838 static rtx
839 get_exception_filter (struct function *fun)
841 rtx filter = fun->eh->filter;
842 if (fun == cfun && ! filter)
844 filter = gen_reg_rtx (word_mode);
845 fun->eh->filter = filter;
847 return filter;
850 /* This section is for the exception handling specific optimization pass. */
852 /* Random access the exception region tree. It's just as simple to
853 collect the regions this way as in expand_eh_region_start, but
854 without having to realloc memory. */
856 static void
857 collect_eh_region_array (void)
859 struct eh_region **array, *i;
861 i = cfun->eh->region_tree;
862 if (! i)
863 return;
865 array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
866 * sizeof (*array));
867 cfun->eh->region_array = array;
869 while (1)
871 array[i->region_number] = i;
873 /* If there are sub-regions, process them. */
874 if (i->inner)
875 i = i->inner;
876 /* If there are peers, process them. */
877 else if (i->next_peer)
878 i = i->next_peer;
879 /* Otherwise, step back up the tree to the next peer. */
880 else
882 do {
883 i = i->outer;
884 if (i == NULL)
885 return;
886 } while (i->next_peer == NULL);
887 i = i->next_peer;
892 static void
893 resolve_fixup_regions (void)
895 int i, j, n = cfun->eh->last_region_number;
897 for (i = 1; i <= n; ++i)
899 struct eh_region *fixup = cfun->eh->region_array[i];
900 struct eh_region *cleanup = 0;
902 if (! fixup || fixup->type != ERT_FIXUP)
903 continue;
905 for (j = 1; j <= n; ++j)
907 cleanup = cfun->eh->region_array[j];
908 if (cleanup && cleanup->type == ERT_CLEANUP
909 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
910 break;
912 if (j > n)
913 abort ();
915 fixup->u.fixup.real_region = cleanup->outer;
919 /* Now that we've discovered what region actually encloses a fixup,
920 we can shuffle pointers and remove them from the tree. */
922 static void
923 remove_fixup_regions (void)
925 int i;
926 rtx insn, note;
927 struct eh_region *fixup;
929 /* Walk the insn chain and adjust the REG_EH_REGION numbers
930 for instructions referencing fixup regions. This is only
931 strictly necessary for fixup regions with no parent, but
932 doesn't hurt to do it for all regions. */
933 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
934 if (INSN_P (insn)
935 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
936 && INTVAL (XEXP (note, 0)) > 0
937 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
938 && fixup->type == ERT_FIXUP)
940 if (fixup->u.fixup.real_region)
941 XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
942 else
943 remove_note (insn, note);
946 /* Remove the fixup regions from the tree. */
947 for (i = cfun->eh->last_region_number; i > 0; --i)
949 fixup = cfun->eh->region_array[i];
950 if (! fixup)
951 continue;
953 /* Allow GC to maybe free some memory. */
954 if (fixup->type == ERT_CLEANUP)
955 fixup->u.cleanup.exp = NULL_TREE;
957 if (fixup->type != ERT_FIXUP)
958 continue;
960 if (fixup->inner)
962 struct eh_region *parent, *p, **pp;
964 parent = fixup->u.fixup.real_region;
966 /* Fix up the children's parent pointers; find the end of
967 the list. */
968 for (p = fixup->inner; ; p = p->next_peer)
970 p->outer = parent;
971 if (! p->next_peer)
972 break;
975 /* In the tree of cleanups, only outer-inner ordering matters.
976 So link the children back in anywhere at the correct level. */
977 if (parent)
978 pp = &parent->inner;
979 else
980 pp = &cfun->eh->region_tree;
981 p->next_peer = *pp;
982 *pp = fixup->inner;
983 fixup->inner = NULL;
986 remove_eh_handler (fixup);
990 /* Remove all regions whose labels are not reachable from insns. */
992 static void
993 remove_unreachable_regions (rtx insns)
995 int i, *uid_region_num;
996 bool *reachable;
997 struct eh_region *r;
998 rtx insn;
1000 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
1001 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
1003 for (i = cfun->eh->last_region_number; i > 0; --i)
1005 r = cfun->eh->region_array[i];
1006 if (!r || r->region_number != i)
1007 continue;
1009 if (r->resume)
1011 if (uid_region_num[INSN_UID (r->resume)])
1012 abort ();
1013 uid_region_num[INSN_UID (r->resume)] = i;
1015 if (r->label)
1017 if (uid_region_num[INSN_UID (r->label)])
1018 abort ();
1019 uid_region_num[INSN_UID (r->label)] = i;
1021 if (r->type == ERT_TRY && r->u.try.continue_label)
1023 if (uid_region_num[INSN_UID (r->u.try.continue_label)])
1024 abort ();
1025 uid_region_num[INSN_UID (r->u.try.continue_label)] = i;
1029 for (insn = insns; insn; insn = NEXT_INSN (insn))
1030 reachable[uid_region_num[INSN_UID (insn)]] = true;
1032 for (i = cfun->eh->last_region_number; i > 0; --i)
1034 r = cfun->eh->region_array[i];
1035 if (r && r->region_number == i && !reachable[i])
1037 /* Don't remove ERT_THROW regions if their outer region
1038 is reachable. */
1039 if (r->type == ERT_THROW
1040 && r->outer
1041 && reachable[r->outer->region_number])
1042 continue;
1044 remove_eh_handler (r);
1048 free (reachable);
1049 free (uid_region_num);
1052 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1053 can_throw instruction in the region. */
1055 static void
1056 convert_from_eh_region_ranges_1 (rtx *pinsns, int *orig_sp, int cur)
1058 int *sp = orig_sp;
1059 rtx insn, next;
1061 for (insn = *pinsns; insn ; insn = next)
1063 next = NEXT_INSN (insn);
1064 if (GET_CODE (insn) == NOTE)
1066 int kind = NOTE_LINE_NUMBER (insn);
1067 if (kind == NOTE_INSN_EH_REGION_BEG
1068 || kind == NOTE_INSN_EH_REGION_END)
1070 if (kind == NOTE_INSN_EH_REGION_BEG)
1072 struct eh_region *r;
1074 *sp++ = cur;
1075 cur = NOTE_EH_HANDLER (insn);
1077 r = cfun->eh->region_array[cur];
1078 if (r->type == ERT_FIXUP)
1080 r = r->u.fixup.real_region;
1081 cur = r ? r->region_number : 0;
1083 else if (r->type == ERT_CATCH)
1085 r = r->outer;
1086 cur = r ? r->region_number : 0;
1089 else
1090 cur = *--sp;
1092 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1093 requires extra care to adjust sequence start. */
1094 if (insn == *pinsns)
1095 *pinsns = next;
1096 remove_insn (insn);
1097 continue;
1100 else if (INSN_P (insn))
1102 if (cur > 0
1103 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1104 /* Calls can always potentially throw exceptions, unless
1105 they have a REG_EH_REGION note with a value of 0 or less.
1106 Which should be the only possible kind so far. */
1107 && (GET_CODE (insn) == CALL_INSN
1108 /* If we wanted exceptions for non-call insns, then
1109 any may_trap_p instruction could throw. */
1110 || (flag_non_call_exceptions
1111 && GET_CODE (PATTERN (insn)) != CLOBBER
1112 && GET_CODE (PATTERN (insn)) != USE
1113 && may_trap_p (PATTERN (insn)))))
1115 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1116 REG_NOTES (insn));
1119 if (GET_CODE (insn) == CALL_INSN
1120 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1122 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1123 sp, cur);
1124 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1125 sp, cur);
1126 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1127 sp, cur);
1132 if (sp != orig_sp)
1133 abort ();
1136 void
1137 convert_from_eh_region_ranges (void)
1139 int *stack;
1140 rtx insns;
1142 collect_eh_region_array ();
1143 resolve_fixup_regions ();
1145 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1146 insns = get_insns ();
1147 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1148 free (stack);
1150 remove_fixup_regions ();
1151 remove_unreachable_regions (insns);
1154 static void
1155 add_ehl_entry (rtx label, struct eh_region *region)
1157 struct ehl_map_entry **slot, *entry;
1159 LABEL_PRESERVE_P (label) = 1;
1161 entry = (struct ehl_map_entry *) ggc_alloc (sizeof (*entry));
1162 entry->label = label;
1163 entry->region = region;
1165 slot = (struct ehl_map_entry **)
1166 htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
1168 /* Before landing pad creation, each exception handler has its own
1169 label. After landing pad creation, the exception handlers may
1170 share landing pads. This is ok, since maybe_remove_eh_handler
1171 only requires the 1-1 mapping before landing pad creation. */
1172 if (*slot && !cfun->eh->built_landing_pads)
1173 abort ();
1175 *slot = entry;
1178 void
1179 find_exception_handler_labels (void)
1181 int i;
1183 if (cfun->eh->exception_handler_label_map)
1184 htab_empty (cfun->eh->exception_handler_label_map);
1185 else
1187 /* ??? The expansion factor here (3/2) must be greater than the htab
1188 occupancy factor (4/3) to avoid unnecessary resizing. */
1189 cfun->eh->exception_handler_label_map
1190 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
1191 ehl_hash, ehl_eq, NULL);
1194 if (cfun->eh->region_tree == NULL)
1195 return;
1197 for (i = cfun->eh->last_region_number; i > 0; --i)
1199 struct eh_region *region = cfun->eh->region_array[i];
1200 rtx lab;
1202 if (! region || region->region_number != i)
1203 continue;
1204 if (cfun->eh->built_landing_pads)
1205 lab = region->landing_pad;
1206 else
1207 lab = region->label;
1209 if (lab)
1210 add_ehl_entry (lab, region);
1213 /* For sjlj exceptions, need the return label to remain live until
1214 after landing pad generation. */
1215 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1216 add_ehl_entry (return_label, NULL);
1219 bool
1220 current_function_has_exception_handlers (void)
1222 int i;
1224 for (i = cfun->eh->last_region_number; i > 0; --i)
1226 struct eh_region *region = cfun->eh->region_array[i];
1228 if (! region || region->region_number != i)
1229 continue;
1230 if (region->type != ERT_THROW)
1231 return true;
1234 return false;
1237 static struct eh_region *
1238 duplicate_eh_region_1 (struct eh_region *o, struct inline_remap *map)
1240 struct eh_region *n
1241 = (struct eh_region *) ggc_alloc_cleared (sizeof (struct eh_region));
1243 n->region_number = o->region_number + cfun->eh->last_region_number;
1244 n->type = o->type;
1246 switch (n->type)
1248 case ERT_CLEANUP:
1249 case ERT_MUST_NOT_THROW:
1250 break;
1252 case ERT_TRY:
1253 if (o->u.try.continue_label)
1254 n->u.try.continue_label
1255 = get_label_from_map (map,
1256 CODE_LABEL_NUMBER (o->u.try.continue_label));
1257 break;
1259 case ERT_CATCH:
1260 n->u.catch.type_list = o->u.catch.type_list;
1261 break;
1263 case ERT_ALLOWED_EXCEPTIONS:
1264 n->u.allowed.type_list = o->u.allowed.type_list;
1265 break;
1267 case ERT_THROW:
1268 n->u.throw.type = o->u.throw.type;
1270 default:
1271 abort ();
1274 if (o->label)
1275 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1276 if (o->resume)
1278 n->resume = map->insn_map[INSN_UID (o->resume)];
1279 if (n->resume == NULL)
1280 abort ();
1283 return n;
1286 static void
1287 duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array)
1289 struct eh_region *n = n_array[o->region_number];
1291 switch (n->type)
1293 case ERT_TRY:
1294 n->u.try.catch = n_array[o->u.try.catch->region_number];
1295 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1296 break;
1298 case ERT_CATCH:
1299 if (o->u.catch.next_catch)
1300 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1301 if (o->u.catch.prev_catch)
1302 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1303 break;
1305 default:
1306 break;
1309 if (o->outer)
1310 n->outer = n_array[o->outer->region_number];
1311 if (o->inner)
1312 n->inner = n_array[o->inner->region_number];
1313 if (o->next_peer)
1314 n->next_peer = n_array[o->next_peer->region_number];
1318 duplicate_eh_regions (struct function *ifun, struct inline_remap *map)
1320 int ifun_last_region_number = ifun->eh->last_region_number;
1321 struct eh_region **n_array, *root, *cur;
1322 int i;
1324 if (ifun_last_region_number == 0)
1325 return 0;
1327 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1329 for (i = 1; i <= ifun_last_region_number; ++i)
1331 cur = ifun->eh->region_array[i];
1332 if (!cur || cur->region_number != i)
1333 continue;
1334 n_array[i] = duplicate_eh_region_1 (cur, map);
1336 for (i = 1; i <= ifun_last_region_number; ++i)
1338 cur = ifun->eh->region_array[i];
1339 if (!cur || cur->region_number != i)
1340 continue;
1341 duplicate_eh_region_2 (cur, n_array);
1344 root = n_array[ifun->eh->region_tree->region_number];
1345 cur = cfun->eh->cur_region;
1346 if (cur)
1348 struct eh_region *p = cur->inner;
1349 if (p)
1351 while (p->next_peer)
1352 p = p->next_peer;
1353 p->next_peer = root;
1355 else
1356 cur->inner = root;
1358 for (i = 1; i <= ifun_last_region_number; ++i)
1359 if (n_array[i] && n_array[i]->outer == NULL)
1360 n_array[i]->outer = cur;
1362 else
1364 struct eh_region *p = cfun->eh->region_tree;
1365 if (p)
1367 while (p->next_peer)
1368 p = p->next_peer;
1369 p->next_peer = root;
1371 else
1372 cfun->eh->region_tree = root;
1375 free (n_array);
1377 i = cfun->eh->last_region_number;
1378 cfun->eh->last_region_number = i + ifun_last_region_number;
1379 return i;
1383 static int
1384 t2r_eq (const void *pentry, const void *pdata)
1386 tree entry = (tree) pentry;
1387 tree data = (tree) pdata;
1389 return TREE_PURPOSE (entry) == data;
1392 static hashval_t
1393 t2r_hash (const void *pentry)
1395 tree entry = (tree) pentry;
1396 return TYPE_HASH (TREE_PURPOSE (entry));
1399 static void
1400 add_type_for_runtime (tree type)
1402 tree *slot;
1404 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1405 TYPE_HASH (type), INSERT);
1406 if (*slot == NULL)
1408 tree runtime = (*lang_eh_runtime_type) (type);
1409 *slot = tree_cons (type, runtime, NULL_TREE);
1413 static tree
1414 lookup_type_for_runtime (tree type)
1416 tree *slot;
1418 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1419 TYPE_HASH (type), NO_INSERT);
1421 /* We should have always inserted the data earlier. */
1422 return TREE_VALUE (*slot);
1426 /* Represent an entry in @TTypes for either catch actions
1427 or exception filter actions. */
1428 struct ttypes_filter GTY(())
1430 tree t;
1431 int filter;
1434 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1435 (a tree) for a @TTypes type node we are thinking about adding. */
1437 static int
1438 ttypes_filter_eq (const void *pentry, const void *pdata)
1440 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1441 tree data = (tree) pdata;
1443 return entry->t == data;
1446 static hashval_t
1447 ttypes_filter_hash (const void *pentry)
1449 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1450 return TYPE_HASH (entry->t);
1453 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1454 exception specification list we are thinking about adding. */
1455 /* ??? Currently we use the type lists in the order given. Someone
1456 should put these in some canonical order. */
1458 static int
1459 ehspec_filter_eq (const void *pentry, const void *pdata)
1461 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1462 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1464 return type_list_equal (entry->t, data->t);
1467 /* Hash function for exception specification lists. */
1469 static hashval_t
1470 ehspec_filter_hash (const void *pentry)
1472 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1473 hashval_t h = 0;
1474 tree list;
1476 for (list = entry->t; list ; list = TREE_CHAIN (list))
1477 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1478 return h;
1481 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1482 up the search. Return the filter value to be used. */
1484 static int
1485 add_ttypes_entry (htab_t ttypes_hash, tree type)
1487 struct ttypes_filter **slot, *n;
1489 slot = (struct ttypes_filter **)
1490 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1492 if ((n = *slot) == NULL)
1494 /* Filter value is a 1 based table index. */
1496 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1497 n->t = type;
1498 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1499 *slot = n;
1501 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1504 return n->filter;
1507 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1508 to speed up the search. Return the filter value to be used. */
1510 static int
1511 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1513 struct ttypes_filter **slot, *n;
1514 struct ttypes_filter dummy;
1516 dummy.t = list;
1517 slot = (struct ttypes_filter **)
1518 htab_find_slot (ehspec_hash, &dummy, INSERT);
1520 if ((n = *slot) == NULL)
1522 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1524 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1525 n->t = list;
1526 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1527 *slot = n;
1529 /* Look up each type in the list and encode its filter
1530 value as a uleb128. Terminate the list with 0. */
1531 for (; list ; list = TREE_CHAIN (list))
1532 push_uleb128 (&cfun->eh->ehspec_data,
1533 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1534 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1537 return n->filter;
1540 /* Generate the action filter values to be used for CATCH and
1541 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1542 we use lots of landing pads, and so every type or list can share
1543 the same filter value, which saves table space. */
1545 static void
1546 assign_filter_values (void)
1548 int i;
1549 htab_t ttypes, ehspec;
1551 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1552 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1554 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1555 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1557 for (i = cfun->eh->last_region_number; i > 0; --i)
1559 struct eh_region *r = cfun->eh->region_array[i];
1561 /* Mind we don't process a region more than once. */
1562 if (!r || r->region_number != i)
1563 continue;
1565 switch (r->type)
1567 case ERT_CATCH:
1568 /* Whatever type_list is (NULL or true list), we build a list
1569 of filters for the region. */
1570 r->u.catch.filter_list = NULL_TREE;
1572 if (r->u.catch.type_list != NULL)
1574 /* Get a filter value for each of the types caught and store
1575 them in the region's dedicated list. */
1576 tree tp_node = r->u.catch.type_list;
1578 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1580 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1581 tree flt_node = build_int_2 (flt, 0);
1583 r->u.catch.filter_list
1584 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1587 else
1589 /* Get a filter value for the NULL list also since it will need
1590 an action record anyway. */
1591 int flt = add_ttypes_entry (ttypes, NULL);
1592 tree flt_node = build_int_2 (flt, 0);
1594 r->u.catch.filter_list
1595 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1598 break;
1600 case ERT_ALLOWED_EXCEPTIONS:
1601 r->u.allowed.filter
1602 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1603 break;
1605 default:
1606 break;
1610 htab_delete (ttypes);
1611 htab_delete (ehspec);
1614 /* Generate the code to actually handle exceptions, which will follow the
1615 landing pads. */
1617 static void
1618 build_post_landing_pads (void)
1620 int i;
1622 for (i = cfun->eh->last_region_number; i > 0; --i)
1624 struct eh_region *region = cfun->eh->region_array[i];
1625 rtx seq;
1627 /* Mind we don't process a region more than once. */
1628 if (!region || region->region_number != i)
1629 continue;
1631 switch (region->type)
1633 case ERT_TRY:
1634 /* ??? Collect the set of all non-overlapping catch handlers
1635 all the way up the chain until blocked by a cleanup. */
1636 /* ??? Outer try regions can share landing pads with inner
1637 try regions if the types are completely non-overlapping,
1638 and there are no intervening cleanups. */
1640 region->post_landing_pad = gen_label_rtx ();
1642 start_sequence ();
1644 emit_label (region->post_landing_pad);
1646 /* ??? It is mighty inconvenient to call back into the
1647 switch statement generation code in expand_end_case.
1648 Rapid prototyping sez a sequence of ifs. */
1650 struct eh_region *c;
1651 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1653 if (c->u.catch.type_list == NULL)
1654 emit_jump (c->label);
1655 else
1657 /* Need for one cmp/jump per type caught. Each type
1658 list entry has a matching entry in the filter list
1659 (see assign_filter_values). */
1660 tree tp_node = c->u.catch.type_list;
1661 tree flt_node = c->u.catch.filter_list;
1663 for (; tp_node; )
1665 emit_cmp_and_jump_insns
1666 (cfun->eh->filter,
1667 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1668 EQ, NULL_RTX, word_mode, 0, c->label);
1670 tp_node = TREE_CHAIN (tp_node);
1671 flt_node = TREE_CHAIN (flt_node);
1677 /* We delay the generation of the _Unwind_Resume until we generate
1678 landing pads. We emit a marker here so as to get good control
1679 flow data in the meantime. */
1680 region->resume
1681 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1682 emit_barrier ();
1684 seq = get_insns ();
1685 end_sequence ();
1687 emit_insn_before (seq, region->u.try.catch->label);
1688 break;
1690 case ERT_ALLOWED_EXCEPTIONS:
1691 region->post_landing_pad = gen_label_rtx ();
1693 start_sequence ();
1695 emit_label (region->post_landing_pad);
1697 emit_cmp_and_jump_insns (cfun->eh->filter,
1698 GEN_INT (region->u.allowed.filter),
1699 EQ, NULL_RTX, word_mode, 0, region->label);
1701 /* We delay the generation of the _Unwind_Resume until we generate
1702 landing pads. We emit a marker here so as to get good control
1703 flow data in the meantime. */
1704 region->resume
1705 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1706 emit_barrier ();
1708 seq = get_insns ();
1709 end_sequence ();
1711 emit_insn_before (seq, region->label);
1712 break;
1714 case ERT_CLEANUP:
1715 case ERT_MUST_NOT_THROW:
1716 region->post_landing_pad = region->label;
1717 break;
1719 case ERT_CATCH:
1720 case ERT_THROW:
1721 /* Nothing to do. */
1722 break;
1724 default:
1725 abort ();
1730 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1731 _Unwind_Resume otherwise. */
1733 static void
1734 connect_post_landing_pads (void)
1736 int i;
1738 for (i = cfun->eh->last_region_number; i > 0; --i)
1740 struct eh_region *region = cfun->eh->region_array[i];
1741 struct eh_region *outer;
1742 rtx seq;
1744 /* Mind we don't process a region more than once. */
1745 if (!region || region->region_number != i)
1746 continue;
1748 /* If there is no RESX, or it has been deleted by flow, there's
1749 nothing to fix up. */
1750 if (! region->resume || INSN_DELETED_P (region->resume))
1751 continue;
1753 /* Search for another landing pad in this function. */
1754 for (outer = region->outer; outer ; outer = outer->outer)
1755 if (outer->post_landing_pad)
1756 break;
1758 start_sequence ();
1760 if (outer)
1761 emit_jump (outer->post_landing_pad);
1762 else
1763 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1764 VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1766 seq = get_insns ();
1767 end_sequence ();
1768 emit_insn_before (seq, region->resume);
1769 delete_insn (region->resume);
1774 static void
1775 dw2_build_landing_pads (void)
1777 int i;
1778 unsigned int j;
1780 for (i = cfun->eh->last_region_number; i > 0; --i)
1782 struct eh_region *region = cfun->eh->region_array[i];
1783 rtx seq;
1784 bool clobbers_hard_regs = false;
1786 /* Mind we don't process a region more than once. */
1787 if (!region || region->region_number != i)
1788 continue;
1790 if (region->type != ERT_CLEANUP
1791 && region->type != ERT_TRY
1792 && region->type != ERT_ALLOWED_EXCEPTIONS)
1793 continue;
1795 start_sequence ();
1797 region->landing_pad = gen_label_rtx ();
1798 emit_label (region->landing_pad);
1800 #ifdef HAVE_exception_receiver
1801 if (HAVE_exception_receiver)
1802 emit_insn (gen_exception_receiver ());
1803 else
1804 #endif
1805 #ifdef HAVE_nonlocal_goto_receiver
1806 if (HAVE_nonlocal_goto_receiver)
1807 emit_insn (gen_nonlocal_goto_receiver ());
1808 else
1809 #endif
1810 { /* Nothing */ }
1812 /* If the eh_return data registers are call-saved, then we
1813 won't have considered them clobbered from the call that
1814 threw. Kill them now. */
1815 for (j = 0; ; ++j)
1817 unsigned r = EH_RETURN_DATA_REGNO (j);
1818 if (r == INVALID_REGNUM)
1819 break;
1820 if (! call_used_regs[r])
1822 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1823 clobbers_hard_regs = true;
1827 if (clobbers_hard_regs)
1829 /* @@@ This is a kludge. Not all machine descriptions define a
1830 blockage insn, but we must not allow the code we just generated
1831 to be reordered by scheduling. So emit an ASM_INPUT to act as
1832 blockage insn. */
1833 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1836 emit_move_insn (cfun->eh->exc_ptr,
1837 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1838 emit_move_insn (cfun->eh->filter,
1839 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
1841 seq = get_insns ();
1842 end_sequence ();
1844 emit_insn_before (seq, region->post_landing_pad);
1849 struct sjlj_lp_info
1851 int directly_reachable;
1852 int action_index;
1853 int dispatch_index;
1854 int call_site_index;
1857 static bool
1858 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1860 rtx insn;
1861 bool found_one = false;
1863 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1865 struct eh_region *region;
1866 enum reachable_code rc;
1867 tree type_thrown;
1868 rtx note;
1870 if (! INSN_P (insn))
1871 continue;
1873 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1874 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1875 continue;
1877 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1879 type_thrown = NULL_TREE;
1880 if (region->type == ERT_THROW)
1882 type_thrown = region->u.throw.type;
1883 region = region->outer;
1886 /* Find the first containing region that might handle the exception.
1887 That's the landing pad to which we will transfer control. */
1888 rc = RNL_NOT_CAUGHT;
1889 for (; region; region = region->outer)
1891 rc = reachable_next_level (region, type_thrown, 0);
1892 if (rc != RNL_NOT_CAUGHT)
1893 break;
1895 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1897 lp_info[region->region_number].directly_reachable = 1;
1898 found_one = true;
1902 return found_one;
1905 static void
1906 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1908 htab_t ar_hash;
1909 int i, index;
1911 /* First task: build the action table. */
1913 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1914 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1916 for (i = cfun->eh->last_region_number; i > 0; --i)
1917 if (lp_info[i].directly_reachable)
1919 struct eh_region *r = cfun->eh->region_array[i];
1920 r->landing_pad = dispatch_label;
1921 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1922 if (lp_info[i].action_index != -1)
1923 cfun->uses_eh_lsda = 1;
1926 htab_delete (ar_hash);
1928 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1929 landing pad label for the region. For sjlj though, there is one
1930 common landing pad from which we dispatch to the post-landing pads.
1932 A region receives a dispatch index if it is directly reachable
1933 and requires in-function processing. Regions that share post-landing
1934 pads may share dispatch indices. */
1935 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1936 (see build_post_landing_pads) so we don't bother checking for it. */
1938 index = 0;
1939 for (i = cfun->eh->last_region_number; i > 0; --i)
1940 if (lp_info[i].directly_reachable)
1941 lp_info[i].dispatch_index = index++;
1943 /* Finally: assign call-site values. If dwarf2 terms, this would be
1944 the region number assigned by convert_to_eh_region_ranges, but
1945 handles no-action and must-not-throw differently. */
1947 call_site_base = 1;
1948 for (i = cfun->eh->last_region_number; i > 0; --i)
1949 if (lp_info[i].directly_reachable)
1951 int action = lp_info[i].action_index;
1953 /* Map must-not-throw to otherwise unused call-site index 0. */
1954 if (action == -2)
1955 index = 0;
1956 /* Map no-action to otherwise unused call-site index -1. */
1957 else if (action == -1)
1958 index = -1;
1959 /* Otherwise, look it up in the table. */
1960 else
1961 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1963 lp_info[i].call_site_index = index;
1967 static void
1968 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1970 int last_call_site = -2;
1971 rtx insn, mem;
1973 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1975 struct eh_region *region;
1976 int this_call_site;
1977 rtx note, before, p;
1979 /* Reset value tracking at extended basic block boundaries. */
1980 if (GET_CODE (insn) == CODE_LABEL)
1981 last_call_site = -2;
1983 if (! INSN_P (insn))
1984 continue;
1986 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1987 if (!note)
1989 /* Calls (and trapping insns) without notes are outside any
1990 exception handling region in this function. Mark them as
1991 no action. */
1992 if (GET_CODE (insn) == CALL_INSN
1993 || (flag_non_call_exceptions
1994 && may_trap_p (PATTERN (insn))))
1995 this_call_site = -1;
1996 else
1997 continue;
1999 else
2001 /* Calls that are known to not throw need not be marked. */
2002 if (INTVAL (XEXP (note, 0)) <= 0)
2003 continue;
2005 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2006 this_call_site = lp_info[region->region_number].call_site_index;
2009 if (this_call_site == last_call_site)
2010 continue;
2012 /* Don't separate a call from it's argument loads. */
2013 before = insn;
2014 if (GET_CODE (insn) == CALL_INSN)
2015 before = find_first_parameter_load (insn, NULL_RTX);
2017 start_sequence ();
2018 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2019 sjlj_fc_call_site_ofs);
2020 emit_move_insn (mem, GEN_INT (this_call_site));
2021 p = get_insns ();
2022 end_sequence ();
2024 emit_insn_before (p, before);
2025 last_call_site = this_call_site;
2029 /* Construct the SjLj_Function_Context. */
2031 static void
2032 sjlj_emit_function_enter (rtx dispatch_label)
2034 rtx fn_begin, fc, mem, seq;
2036 fc = cfun->eh->sjlj_fc;
2038 start_sequence ();
2040 /* We're storing this libcall's address into memory instead of
2041 calling it directly. Thus, we must call assemble_external_libcall
2042 here, as we can not depend on emit_library_call to do it for us. */
2043 assemble_external_libcall (eh_personality_libfunc);
2044 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2045 emit_move_insn (mem, eh_personality_libfunc);
2047 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2048 if (cfun->uses_eh_lsda)
2050 char buf[20];
2051 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2052 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
2054 else
2055 emit_move_insn (mem, const0_rtx);
2057 #ifdef DONT_USE_BUILTIN_SETJMP
2059 rtx x, note;
2060 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2061 TYPE_MODE (integer_type_node), 1,
2062 plus_constant (XEXP (fc, 0),
2063 sjlj_fc_jbuf_ofs), Pmode);
2065 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
2066 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2068 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2069 TYPE_MODE (integer_type_node), 0, dispatch_label);
2071 #else
2072 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2073 dispatch_label);
2074 #endif
2076 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2077 1, XEXP (fc, 0), Pmode);
2079 seq = get_insns ();
2080 end_sequence ();
2082 /* ??? Instead of doing this at the beginning of the function,
2083 do this in a block that is at loop level 0 and dominates all
2084 can_throw_internal instructions. */
2086 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2087 if (GET_CODE (fn_begin) == NOTE
2088 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2089 break;
2090 emit_insn_after (seq, fn_begin);
2093 /* Call back from expand_function_end to know where we should put
2094 the call to unwind_sjlj_unregister_libfunc if needed. */
2096 void
2097 sjlj_emit_function_exit_after (rtx after)
2099 cfun->eh->sjlj_exit_after = after;
2102 static void
2103 sjlj_emit_function_exit (void)
2105 rtx seq;
2107 start_sequence ();
2109 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2110 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2112 seq = get_insns ();
2113 end_sequence ();
2115 /* ??? Really this can be done in any block at loop level 0 that
2116 post-dominates all can_throw_internal instructions. This is
2117 the last possible moment. */
2119 emit_insn_after (seq, cfun->eh->sjlj_exit_after);
2122 static void
2123 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2125 int i, first_reachable;
2126 rtx mem, dispatch, seq, fc;
2128 fc = cfun->eh->sjlj_fc;
2130 start_sequence ();
2132 emit_label (dispatch_label);
2134 #ifndef DONT_USE_BUILTIN_SETJMP
2135 expand_builtin_setjmp_receiver (dispatch_label);
2136 #endif
2138 /* Load up dispatch index, exc_ptr and filter values from the
2139 function context. */
2140 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2141 sjlj_fc_call_site_ofs);
2142 dispatch = copy_to_reg (mem);
2144 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
2145 if (word_mode != Pmode)
2147 #ifdef POINTERS_EXTEND_UNSIGNED
2148 mem = convert_memory_address (Pmode, mem);
2149 #else
2150 mem = convert_to_mode (Pmode, mem, 0);
2151 #endif
2153 emit_move_insn (cfun->eh->exc_ptr, mem);
2155 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2156 emit_move_insn (cfun->eh->filter, mem);
2158 /* Jump to one of the directly reachable regions. */
2159 /* ??? This really ought to be using a switch statement. */
2161 first_reachable = 0;
2162 for (i = cfun->eh->last_region_number; i > 0; --i)
2164 if (! lp_info[i].directly_reachable)
2165 continue;
2167 if (! first_reachable)
2169 first_reachable = i;
2170 continue;
2173 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2174 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2175 cfun->eh->region_array[i]->post_landing_pad);
2178 seq = get_insns ();
2179 end_sequence ();
2181 emit_insn_before (seq, (cfun->eh->region_array[first_reachable]
2182 ->post_landing_pad));
2185 static void
2186 sjlj_build_landing_pads (void)
2188 struct sjlj_lp_info *lp_info;
2190 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2191 sizeof (struct sjlj_lp_info));
2193 if (sjlj_find_directly_reachable_regions (lp_info))
2195 rtx dispatch_label = gen_label_rtx ();
2197 cfun->eh->sjlj_fc
2198 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2199 int_size_in_bytes (sjlj_fc_type_node),
2200 TYPE_ALIGN (sjlj_fc_type_node));
2202 sjlj_assign_call_site_values (dispatch_label, lp_info);
2203 sjlj_mark_call_sites (lp_info);
2205 sjlj_emit_function_enter (dispatch_label);
2206 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2207 sjlj_emit_function_exit ();
2210 free (lp_info);
2213 void
2214 finish_eh_generation (void)
2216 /* Nothing to do if no regions created. */
2217 if (cfun->eh->region_tree == NULL)
2218 return;
2220 /* The object here is to provide find_basic_blocks with detailed
2221 information (via reachable_handlers) on how exception control
2222 flows within the function. In this first pass, we can include
2223 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2224 regions, and hope that it will be useful in deleting unreachable
2225 handlers. Subsequently, we will generate landing pads which will
2226 connect many of the handlers, and then type information will not
2227 be effective. Still, this is a win over previous implementations. */
2229 cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
2231 /* These registers are used by the landing pads. Make sure they
2232 have been generated. */
2233 get_exception_pointer (cfun);
2234 get_exception_filter (cfun);
2236 /* Construct the landing pads. */
2238 assign_filter_values ();
2239 build_post_landing_pads ();
2240 connect_post_landing_pads ();
2241 if (USING_SJLJ_EXCEPTIONS)
2242 sjlj_build_landing_pads ();
2243 else
2244 dw2_build_landing_pads ();
2246 cfun->eh->built_landing_pads = 1;
2248 /* We've totally changed the CFG. Start over. */
2249 find_exception_handler_labels ();
2250 rebuild_jump_labels (get_insns ());
2251 find_basic_blocks (get_insns (), max_reg_num (), 0);
2252 cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
2255 static hashval_t
2256 ehl_hash (const void *pentry)
2258 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2260 /* 2^32 * ((sqrt(5) - 1) / 2) */
2261 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2262 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2265 static int
2266 ehl_eq (const void *pentry, const void *pdata)
2268 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2269 struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2271 return entry->label == data->label;
2274 /* This section handles removing dead code for flow. */
2276 /* Remove LABEL from exception_handler_label_map. */
2278 static void
2279 remove_exception_handler_label (rtx label)
2281 struct ehl_map_entry **slot, tmp;
2283 /* If exception_handler_label_map was not built yet,
2284 there is nothing to do. */
2285 if (cfun->eh->exception_handler_label_map == NULL)
2286 return;
2288 tmp.label = label;
2289 slot = (struct ehl_map_entry **)
2290 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2291 if (! slot)
2292 abort ();
2294 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2297 /* Splice REGION from the region tree etc. */
2299 static void
2300 remove_eh_handler (struct eh_region *region)
2302 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2303 rtx lab;
2305 /* For the benefit of efficiently handling REG_EH_REGION notes,
2306 replace this region in the region array with its containing
2307 region. Note that previous region deletions may result in
2308 multiple copies of this region in the array, so we have a
2309 list of alternate numbers by which we are known. */
2311 outer = region->outer;
2312 cfun->eh->region_array[region->region_number] = outer;
2313 if (region->aka)
2315 int i;
2316 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i,
2317 { cfun->eh->region_array[i] = outer; });
2320 if (outer)
2322 if (!outer->aka)
2323 outer->aka = BITMAP_GGC_ALLOC ();
2324 if (region->aka)
2325 bitmap_a_or_b (outer->aka, outer->aka, region->aka);
2326 bitmap_set_bit (outer->aka, region->region_number);
2329 if (cfun->eh->built_landing_pads)
2330 lab = region->landing_pad;
2331 else
2332 lab = region->label;
2333 if (lab)
2334 remove_exception_handler_label (lab);
2336 if (outer)
2337 pp_start = &outer->inner;
2338 else
2339 pp_start = &cfun->eh->region_tree;
2340 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2341 continue;
2342 *pp = region->next_peer;
2344 inner = region->inner;
2345 if (inner)
2347 for (p = inner; p->next_peer ; p = p->next_peer)
2348 p->outer = outer;
2349 p->outer = outer;
2351 p->next_peer = *pp_start;
2352 *pp_start = inner;
2355 if (region->type == ERT_CATCH)
2357 struct eh_region *try, *next, *prev;
2359 for (try = region->next_peer;
2360 try->type == ERT_CATCH;
2361 try = try->next_peer)
2362 continue;
2363 if (try->type != ERT_TRY)
2364 abort ();
2366 next = region->u.catch.next_catch;
2367 prev = region->u.catch.prev_catch;
2369 if (next)
2370 next->u.catch.prev_catch = prev;
2371 else
2372 try->u.try.last_catch = prev;
2373 if (prev)
2374 prev->u.catch.next_catch = next;
2375 else
2377 try->u.try.catch = next;
2378 if (! next)
2379 remove_eh_handler (try);
2384 /* LABEL heads a basic block that is about to be deleted. If this
2385 label corresponds to an exception region, we may be able to
2386 delete the region. */
2388 void
2389 maybe_remove_eh_handler (rtx label)
2391 struct ehl_map_entry **slot, tmp;
2392 struct eh_region *region;
2394 /* ??? After generating landing pads, it's not so simple to determine
2395 if the region data is completely unused. One must examine the
2396 landing pad and the post landing pad, and whether an inner try block
2397 is referencing the catch handlers directly. */
2398 if (cfun->eh->built_landing_pads)
2399 return;
2401 tmp.label = label;
2402 slot = (struct ehl_map_entry **)
2403 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2404 if (! slot)
2405 return;
2406 region = (*slot)->region;
2407 if (! region)
2408 return;
2410 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2411 because there is no path to the fallback call to terminate.
2412 But the region continues to affect call-site data until there
2413 are no more contained calls, which we don't see here. */
2414 if (region->type == ERT_MUST_NOT_THROW)
2416 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2417 region->label = NULL_RTX;
2419 else
2420 remove_eh_handler (region);
2423 /* Invokes CALLBACK for every exception handler label. Only used by old
2424 loop hackery; should not be used by new code. */
2426 void
2427 for_each_eh_label (void (*callback) (rtx))
2429 htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
2430 (void *)callback);
2433 static int
2434 for_each_eh_label_1 (void **pentry, void *data)
2436 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2437 void (*callback) (rtx) = (void (*) (rtx)) data;
2439 (*callback) (entry->label);
2440 return 1;
2443 /* This section describes CFG exception edges for flow. */
2445 /* For communicating between calls to reachable_next_level. */
2446 struct reachable_info GTY(())
2448 tree types_caught;
2449 tree types_allowed;
2450 rtx handlers;
2453 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2454 base class of TYPE, is in HANDLED. */
2456 static int
2457 check_handled (tree handled, tree type)
2459 tree t;
2461 /* We can check for exact matches without front-end help. */
2462 if (! lang_eh_type_covers)
2464 for (t = handled; t ; t = TREE_CHAIN (t))
2465 if (TREE_VALUE (t) == type)
2466 return 1;
2468 else
2470 for (t = handled; t ; t = TREE_CHAIN (t))
2471 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2472 return 1;
2475 return 0;
2478 /* A subroutine of reachable_next_level. If we are collecting a list
2479 of handlers, add one. After landing pad generation, reference
2480 it instead of the handlers themselves. Further, the handlers are
2481 all wired together, so by referencing one, we've got them all.
2482 Before landing pad generation we reference each handler individually.
2484 LP_REGION contains the landing pad; REGION is the handler. */
2486 static void
2487 add_reachable_handler (struct reachable_info *info, struct eh_region *lp_region, struct eh_region *region)
2489 if (! info)
2490 return;
2492 if (cfun->eh->built_landing_pads)
2494 if (! info->handlers)
2495 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2497 else
2498 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2501 /* Process one level of exception regions for reachability.
2502 If TYPE_THROWN is non-null, then it is the *exact* type being
2503 propagated. If INFO is non-null, then collect handler labels
2504 and caught/allowed type information between invocations. */
2506 static enum reachable_code
2507 reachable_next_level (struct eh_region *region, tree type_thrown,
2508 struct reachable_info *info)
2510 switch (region->type)
2512 case ERT_CLEANUP:
2513 /* Before landing-pad generation, we model control flow
2514 directly to the individual handlers. In this way we can
2515 see that catch handler types may shadow one another. */
2516 add_reachable_handler (info, region, region);
2517 return RNL_MAYBE_CAUGHT;
2519 case ERT_TRY:
2521 struct eh_region *c;
2522 enum reachable_code ret = RNL_NOT_CAUGHT;
2524 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2526 /* A catch-all handler ends the search. */
2527 if (c->u.catch.type_list == NULL)
2529 add_reachable_handler (info, region, c);
2530 return RNL_CAUGHT;
2533 if (type_thrown)
2535 /* If we have at least one type match, end the search. */
2536 tree tp_node = c->u.catch.type_list;
2538 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2540 tree type = TREE_VALUE (tp_node);
2542 if (type == type_thrown
2543 || (lang_eh_type_covers
2544 && (*lang_eh_type_covers) (type, type_thrown)))
2546 add_reachable_handler (info, region, c);
2547 return RNL_CAUGHT;
2551 /* If we have definitive information of a match failure,
2552 the catch won't trigger. */
2553 if (lang_eh_type_covers)
2554 return RNL_NOT_CAUGHT;
2557 /* At this point, we either don't know what type is thrown or
2558 don't have front-end assistance to help deciding if it is
2559 covered by one of the types in the list for this region.
2561 We'd then like to add this region to the list of reachable
2562 handlers since it is indeed potentially reachable based on the
2563 information we have.
2565 Actually, this handler is for sure not reachable if all the
2566 types it matches have already been caught. That is, it is only
2567 potentially reachable if at least one of the types it catches
2568 has not been previously caught. */
2570 if (! info)
2571 ret = RNL_MAYBE_CAUGHT;
2572 else
2574 tree tp_node = c->u.catch.type_list;
2575 bool maybe_reachable = false;
2577 /* Compute the potential reachability of this handler and
2578 update the list of types caught at the same time. */
2579 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2581 tree type = TREE_VALUE (tp_node);
2583 if (! check_handled (info->types_caught, type))
2585 info->types_caught
2586 = tree_cons (NULL, type, info->types_caught);
2588 maybe_reachable = true;
2592 if (maybe_reachable)
2594 add_reachable_handler (info, region, c);
2596 /* ??? If the catch type is a base class of every allowed
2597 type, then we know we can stop the search. */
2598 ret = RNL_MAYBE_CAUGHT;
2603 return ret;
2606 case ERT_ALLOWED_EXCEPTIONS:
2607 /* An empty list of types definitely ends the search. */
2608 if (region->u.allowed.type_list == NULL_TREE)
2610 add_reachable_handler (info, region, region);
2611 return RNL_CAUGHT;
2614 /* Collect a list of lists of allowed types for use in detecting
2615 when a catch may be transformed into a catch-all. */
2616 if (info)
2617 info->types_allowed = tree_cons (NULL_TREE,
2618 region->u.allowed.type_list,
2619 info->types_allowed);
2621 /* If we have definitive information about the type hierarchy,
2622 then we can tell if the thrown type will pass through the
2623 filter. */
2624 if (type_thrown && lang_eh_type_covers)
2626 if (check_handled (region->u.allowed.type_list, type_thrown))
2627 return RNL_NOT_CAUGHT;
2628 else
2630 add_reachable_handler (info, region, region);
2631 return RNL_CAUGHT;
2635 add_reachable_handler (info, region, region);
2636 return RNL_MAYBE_CAUGHT;
2638 case ERT_CATCH:
2639 /* Catch regions are handled by their controlling try region. */
2640 return RNL_NOT_CAUGHT;
2642 case ERT_MUST_NOT_THROW:
2643 /* Here we end our search, since no exceptions may propagate.
2644 If we've touched down at some landing pad previous, then the
2645 explicit function call we generated may be used. Otherwise
2646 the call is made by the runtime. */
2647 if (info && info->handlers)
2649 add_reachable_handler (info, region, region);
2650 return RNL_CAUGHT;
2652 else
2653 return RNL_BLOCKED;
2655 case ERT_THROW:
2656 case ERT_FIXUP:
2657 case ERT_UNKNOWN:
2658 /* Shouldn't see these here. */
2659 break;
2662 abort ();
2665 /* Retrieve a list of labels of exception handlers which can be
2666 reached by a given insn. */
2669 reachable_handlers (rtx insn)
2671 struct reachable_info info;
2672 struct eh_region *region;
2673 tree type_thrown;
2674 int region_number;
2676 if (GET_CODE (insn) == JUMP_INSN
2677 && GET_CODE (PATTERN (insn)) == RESX)
2678 region_number = XINT (PATTERN (insn), 0);
2679 else
2681 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2682 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2683 return NULL;
2684 region_number = INTVAL (XEXP (note, 0));
2687 memset (&info, 0, sizeof (info));
2689 region = cfun->eh->region_array[region_number];
2691 type_thrown = NULL_TREE;
2692 if (GET_CODE (insn) == JUMP_INSN
2693 && GET_CODE (PATTERN (insn)) == RESX)
2695 /* A RESX leaves a region instead of entering it. Thus the
2696 region itself may have been deleted out from under us. */
2697 if (region == NULL)
2698 return NULL;
2699 region = region->outer;
2701 else if (region->type == ERT_THROW)
2703 type_thrown = region->u.throw.type;
2704 region = region->outer;
2707 while (region)
2709 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2710 break;
2711 /* If we have processed one cleanup, there is no point in
2712 processing any more of them. Each cleanup will have an edge
2713 to the next outer cleanup region, so the flow graph will be
2714 accurate. */
2715 if (region->type == ERT_CLEANUP)
2716 region = region->u.cleanup.prev_try;
2717 else
2718 region = region->outer;
2721 return info.handlers;
2724 /* Determine if the given INSN can throw an exception that is caught
2725 within the function. */
2727 bool
2728 can_throw_internal (rtx insn)
2730 struct eh_region *region;
2731 tree type_thrown;
2732 rtx note;
2734 if (! INSN_P (insn))
2735 return false;
2737 if (GET_CODE (insn) == INSN
2738 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2739 insn = XVECEXP (PATTERN (insn), 0, 0);
2741 if (GET_CODE (insn) == CALL_INSN
2742 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2744 int i;
2745 for (i = 0; i < 3; ++i)
2747 rtx sub = XEXP (PATTERN (insn), i);
2748 for (; sub ; sub = NEXT_INSN (sub))
2749 if (can_throw_internal (sub))
2750 return true;
2752 return false;
2755 /* Every insn that might throw has an EH_REGION note. */
2756 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2757 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2758 return false;
2760 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2762 type_thrown = NULL_TREE;
2763 if (region->type == ERT_THROW)
2765 type_thrown = region->u.throw.type;
2766 region = region->outer;
2769 /* If this exception is ignored by each and every containing region,
2770 then control passes straight out. The runtime may handle some
2771 regions, which also do not require processing internally. */
2772 for (; region; region = region->outer)
2774 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2775 if (how == RNL_BLOCKED)
2776 return false;
2777 if (how != RNL_NOT_CAUGHT)
2778 return true;
2781 return false;
2784 /* Determine if the given INSN can throw an exception that is
2785 visible outside the function. */
2787 bool
2788 can_throw_external (rtx insn)
2790 struct eh_region *region;
2791 tree type_thrown;
2792 rtx note;
2794 if (! INSN_P (insn))
2795 return false;
2797 if (GET_CODE (insn) == INSN
2798 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2799 insn = XVECEXP (PATTERN (insn), 0, 0);
2801 if (GET_CODE (insn) == CALL_INSN
2802 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2804 int i;
2805 for (i = 0; i < 3; ++i)
2807 rtx sub = XEXP (PATTERN (insn), i);
2808 for (; sub ; sub = NEXT_INSN (sub))
2809 if (can_throw_external (sub))
2810 return true;
2812 return false;
2815 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2816 if (!note)
2818 /* Calls (and trapping insns) without notes are outside any
2819 exception handling region in this function. We have to
2820 assume it might throw. Given that the front end and middle
2821 ends mark known NOTHROW functions, this isn't so wildly
2822 inaccurate. */
2823 return (GET_CODE (insn) == CALL_INSN
2824 || (flag_non_call_exceptions
2825 && may_trap_p (PATTERN (insn))));
2827 if (INTVAL (XEXP (note, 0)) <= 0)
2828 return false;
2830 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2832 type_thrown = NULL_TREE;
2833 if (region->type == ERT_THROW)
2835 type_thrown = region->u.throw.type;
2836 region = region->outer;
2839 /* If the exception is caught or blocked by any containing region,
2840 then it is not seen by any calling function. */
2841 for (; region ; region = region->outer)
2842 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2843 return false;
2845 return true;
2848 /* Set current_function_nothrow and cfun->all_throwers_are_sibcalls. */
2850 void
2851 set_nothrow_function_flags (void)
2853 rtx insn;
2855 current_function_nothrow = 1;
2857 /* Assume cfun->all_throwers_are_sibcalls until we encounter
2858 something that can throw an exception. We specifically exempt
2859 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2860 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2861 is optimistic. */
2863 cfun->all_throwers_are_sibcalls = 1;
2865 if (! flag_exceptions)
2866 return;
2868 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2869 if (can_throw_external (insn))
2871 current_function_nothrow = 0;
2873 if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
2875 cfun->all_throwers_are_sibcalls = 0;
2876 return;
2880 for (insn = current_function_epilogue_delay_list; insn;
2881 insn = XEXP (insn, 1))
2882 if (can_throw_external (insn))
2884 current_function_nothrow = 0;
2886 if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
2888 cfun->all_throwers_are_sibcalls = 0;
2889 return;
2895 /* Various hooks for unwind library. */
2897 /* Do any necessary initialization to access arbitrary stack frames.
2898 On the SPARC, this means flushing the register windows. */
2900 void
2901 expand_builtin_unwind_init (void)
2903 /* Set this so all the registers get saved in our frame; we need to be
2904 able to copy the saved values for any registers from frames we unwind. */
2905 current_function_has_nonlocal_label = 1;
2907 #ifdef SETUP_FRAME_ADDRESSES
2908 SETUP_FRAME_ADDRESSES ();
2909 #endif
2913 expand_builtin_eh_return_data_regno (tree arglist)
2915 tree which = TREE_VALUE (arglist);
2916 unsigned HOST_WIDE_INT iwhich;
2918 if (TREE_CODE (which) != INTEGER_CST)
2920 error ("argument of `__builtin_eh_return_regno' must be constant");
2921 return constm1_rtx;
2924 iwhich = tree_low_cst (which, 1);
2925 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2926 if (iwhich == INVALID_REGNUM)
2927 return constm1_rtx;
2929 #ifdef DWARF_FRAME_REGNUM
2930 iwhich = DWARF_FRAME_REGNUM (iwhich);
2931 #else
2932 iwhich = DBX_REGISTER_NUMBER (iwhich);
2933 #endif
2935 return GEN_INT (iwhich);
2938 /* Given a value extracted from the return address register or stack slot,
2939 return the actual address encoded in that value. */
2942 expand_builtin_extract_return_addr (tree addr_tree)
2944 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2946 if (GET_MODE (addr) != Pmode
2947 && GET_MODE (addr) != VOIDmode)
2949 #ifdef POINTERS_EXTEND_UNSIGNED
2950 addr = convert_memory_address (Pmode, addr);
2951 #else
2952 addr = convert_to_mode (Pmode, addr, 0);
2953 #endif
2956 /* First mask out any unwanted bits. */
2957 #ifdef MASK_RETURN_ADDR
2958 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2959 #endif
2961 /* Then adjust to find the real return address. */
2962 #if defined (RETURN_ADDR_OFFSET)
2963 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2964 #endif
2966 return addr;
2969 /* Given an actual address in addr_tree, do any necessary encoding
2970 and return the value to be stored in the return address register or
2971 stack slot so the epilogue will return to that address. */
2974 expand_builtin_frob_return_addr (tree addr_tree)
2976 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2978 #ifdef POINTERS_EXTEND_UNSIGNED
2979 if (GET_MODE (addr) != Pmode)
2980 addr = convert_memory_address (Pmode, addr);
2981 #endif
2983 #ifdef RETURN_ADDR_OFFSET
2984 addr = force_reg (Pmode, addr);
2985 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2986 #endif
2988 return addr;
2991 /* Set up the epilogue with the magic bits we'll need to return to the
2992 exception handler. */
2994 void
2995 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2996 tree handler_tree)
2998 rtx tmp;
3000 #ifdef EH_RETURN_STACKADJ_RTX
3001 tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
3002 #ifdef POINTERS_EXTEND_UNSIGNED
3003 if (GET_MODE (tmp) != Pmode)
3004 tmp = convert_memory_address (Pmode, tmp);
3005 #endif
3006 if (!cfun->eh->ehr_stackadj)
3007 cfun->eh->ehr_stackadj = copy_to_reg (tmp);
3008 else if (tmp != cfun->eh->ehr_stackadj)
3009 emit_move_insn (cfun->eh->ehr_stackadj, tmp);
3010 #endif
3012 tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
3013 #ifdef POINTERS_EXTEND_UNSIGNED
3014 if (GET_MODE (tmp) != Pmode)
3015 tmp = convert_memory_address (Pmode, tmp);
3016 #endif
3017 if (!cfun->eh->ehr_handler)
3018 cfun->eh->ehr_handler = copy_to_reg (tmp);
3019 else if (tmp != cfun->eh->ehr_handler)
3020 emit_move_insn (cfun->eh->ehr_handler, tmp);
3022 if (!cfun->eh->ehr_label)
3023 cfun->eh->ehr_label = gen_label_rtx ();
3024 emit_jump (cfun->eh->ehr_label);
3027 void
3028 expand_eh_return (void)
3030 rtx around_label;
3032 if (! cfun->eh->ehr_label)
3033 return;
3035 current_function_calls_eh_return = 1;
3037 #ifdef EH_RETURN_STACKADJ_RTX
3038 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3039 #endif
3041 around_label = gen_label_rtx ();
3042 emit_jump (around_label);
3044 emit_label (cfun->eh->ehr_label);
3045 clobber_return_register ();
3047 #ifdef EH_RETURN_STACKADJ_RTX
3048 emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
3049 #endif
3051 #ifdef HAVE_eh_return
3052 if (HAVE_eh_return)
3053 emit_insn (gen_eh_return (cfun->eh->ehr_handler));
3054 else
3055 #endif
3057 #ifdef EH_RETURN_HANDLER_RTX
3058 emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
3059 #else
3060 error ("__builtin_eh_return not supported on this target");
3061 #endif
3064 emit_label (around_label);
3067 /* In the following functions, we represent entries in the action table
3068 as 1-based indices. Special cases are:
3070 0: null action record, non-null landing pad; implies cleanups
3071 -1: null action record, null landing pad; implies no action
3072 -2: no call-site entry; implies must_not_throw
3073 -3: we have yet to process outer regions
3075 Further, no special cases apply to the "next" field of the record.
3076 For next, 0 means end of list. */
3078 struct action_record
3080 int offset;
3081 int filter;
3082 int next;
3085 static int
3086 action_record_eq (const void *pentry, const void *pdata)
3088 const struct action_record *entry = (const struct action_record *) pentry;
3089 const struct action_record *data = (const struct action_record *) pdata;
3090 return entry->filter == data->filter && entry->next == data->next;
3093 static hashval_t
3094 action_record_hash (const void *pentry)
3096 const struct action_record *entry = (const struct action_record *) pentry;
3097 return entry->next * 1009 + entry->filter;
3100 static int
3101 add_action_record (htab_t ar_hash, int filter, int next)
3103 struct action_record **slot, *new, tmp;
3105 tmp.filter = filter;
3106 tmp.next = next;
3107 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3109 if ((new = *slot) == NULL)
3111 new = (struct action_record *) xmalloc (sizeof (*new));
3112 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3113 new->filter = filter;
3114 new->next = next;
3115 *slot = new;
3117 /* The filter value goes in untouched. The link to the next
3118 record is a "self-relative" byte offset, or zero to indicate
3119 that there is no next record. So convert the absolute 1 based
3120 indices we've been carrying around into a displacement. */
3122 push_sleb128 (&cfun->eh->action_record_data, filter);
3123 if (next)
3124 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3125 push_sleb128 (&cfun->eh->action_record_data, next);
3128 return new->offset;
3131 static int
3132 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3134 struct eh_region *c;
3135 int next;
3137 /* If we've reached the top of the region chain, then we have
3138 no actions, and require no landing pad. */
3139 if (region == NULL)
3140 return -1;
3142 switch (region->type)
3144 case ERT_CLEANUP:
3145 /* A cleanup adds a zero filter to the beginning of the chain, but
3146 there are special cases to look out for. If there are *only*
3147 cleanups along a path, then it compresses to a zero action.
3148 Further, if there are multiple cleanups along a path, we only
3149 need to represent one of them, as that is enough to trigger
3150 entry to the landing pad at runtime. */
3151 next = collect_one_action_chain (ar_hash, region->outer);
3152 if (next <= 0)
3153 return 0;
3154 for (c = region->outer; c ; c = c->outer)
3155 if (c->type == ERT_CLEANUP)
3156 return next;
3157 return add_action_record (ar_hash, 0, next);
3159 case ERT_TRY:
3160 /* Process the associated catch regions in reverse order.
3161 If there's a catch-all handler, then we don't need to
3162 search outer regions. Use a magic -3 value to record
3163 that we haven't done the outer search. */
3164 next = -3;
3165 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3167 if (c->u.catch.type_list == NULL)
3169 /* Retrieve the filter from the head of the filter list
3170 where we have stored it (see assign_filter_values). */
3171 int filter
3172 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3174 next = add_action_record (ar_hash, filter, 0);
3176 else
3178 /* Once the outer search is done, trigger an action record for
3179 each filter we have. */
3180 tree flt_node;
3182 if (next == -3)
3184 next = collect_one_action_chain (ar_hash, region->outer);
3186 /* If there is no next action, terminate the chain. */
3187 if (next == -1)
3188 next = 0;
3189 /* If all outer actions are cleanups or must_not_throw,
3190 we'll have no action record for it, since we had wanted
3191 to encode these states in the call-site record directly.
3192 Add a cleanup action to the chain to catch these. */
3193 else if (next <= 0)
3194 next = add_action_record (ar_hash, 0, 0);
3197 flt_node = c->u.catch.filter_list;
3198 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3200 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3201 next = add_action_record (ar_hash, filter, next);
3205 return next;
3207 case ERT_ALLOWED_EXCEPTIONS:
3208 /* An exception specification adds its filter to the
3209 beginning of the chain. */
3210 next = collect_one_action_chain (ar_hash, region->outer);
3211 return add_action_record (ar_hash, region->u.allowed.filter,
3212 next < 0 ? 0 : next);
3214 case ERT_MUST_NOT_THROW:
3215 /* A must-not-throw region with no inner handlers or cleanups
3216 requires no call-site entry. Note that this differs from
3217 the no handler or cleanup case in that we do require an lsda
3218 to be generated. Return a magic -2 value to record this. */
3219 return -2;
3221 case ERT_CATCH:
3222 case ERT_THROW:
3223 /* CATCH regions are handled in TRY above. THROW regions are
3224 for optimization information only and produce no output. */
3225 return collect_one_action_chain (ar_hash, region->outer);
3227 default:
3228 abort ();
3232 static int
3233 add_call_site (rtx landing_pad, int action)
3235 struct call_site_record *data = cfun->eh->call_site_data;
3236 int used = cfun->eh->call_site_data_used;
3237 int size = cfun->eh->call_site_data_size;
3239 if (used >= size)
3241 size = (size ? size * 2 : 64);
3242 data = (struct call_site_record *)
3243 ggc_realloc (data, sizeof (*data) * size);
3244 cfun->eh->call_site_data = data;
3245 cfun->eh->call_site_data_size = size;
3248 data[used].landing_pad = landing_pad;
3249 data[used].action = action;
3251 cfun->eh->call_site_data_used = used + 1;
3253 return used + call_site_base;
3256 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3257 The new note numbers will not refer to region numbers, but
3258 instead to call site entries. */
3260 void
3261 convert_to_eh_region_ranges (void)
3263 rtx insn, iter, note;
3264 htab_t ar_hash;
3265 int last_action = -3;
3266 rtx last_action_insn = NULL_RTX;
3267 rtx last_landing_pad = NULL_RTX;
3268 rtx first_no_action_insn = NULL_RTX;
3269 int call_site = 0;
3271 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3272 return;
3274 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3276 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3278 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3279 if (INSN_P (iter))
3281 struct eh_region *region;
3282 int this_action;
3283 rtx this_landing_pad;
3285 insn = iter;
3286 if (GET_CODE (insn) == INSN
3287 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3288 insn = XVECEXP (PATTERN (insn), 0, 0);
3290 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3291 if (!note)
3293 if (! (GET_CODE (insn) == CALL_INSN
3294 || (flag_non_call_exceptions
3295 && may_trap_p (PATTERN (insn)))))
3296 continue;
3297 this_action = -1;
3298 region = NULL;
3300 else
3302 if (INTVAL (XEXP (note, 0)) <= 0)
3303 continue;
3304 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3305 this_action = collect_one_action_chain (ar_hash, region);
3308 /* Existence of catch handlers, or must-not-throw regions
3309 implies that an lsda is needed (even if empty). */
3310 if (this_action != -1)
3311 cfun->uses_eh_lsda = 1;
3313 /* Delay creation of region notes for no-action regions
3314 until we're sure that an lsda will be required. */
3315 else if (last_action == -3)
3317 first_no_action_insn = iter;
3318 last_action = -1;
3321 /* Cleanups and handlers may share action chains but not
3322 landing pads. Collect the landing pad for this region. */
3323 if (this_action >= 0)
3325 struct eh_region *o;
3326 for (o = region; ! o->landing_pad ; o = o->outer)
3327 continue;
3328 this_landing_pad = o->landing_pad;
3330 else
3331 this_landing_pad = NULL_RTX;
3333 /* Differing actions or landing pads implies a change in call-site
3334 info, which implies some EH_REGION note should be emitted. */
3335 if (last_action != this_action
3336 || last_landing_pad != this_landing_pad)
3338 /* If we'd not seen a previous action (-3) or the previous
3339 action was must-not-throw (-2), then we do not need an
3340 end note. */
3341 if (last_action >= -1)
3343 /* If we delayed the creation of the begin, do it now. */
3344 if (first_no_action_insn)
3346 call_site = add_call_site (NULL_RTX, 0);
3347 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3348 first_no_action_insn);
3349 NOTE_EH_HANDLER (note) = call_site;
3350 first_no_action_insn = NULL_RTX;
3353 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3354 last_action_insn);
3355 NOTE_EH_HANDLER (note) = call_site;
3358 /* If the new action is must-not-throw, then no region notes
3359 are created. */
3360 if (this_action >= -1)
3362 call_site = add_call_site (this_landing_pad,
3363 this_action < 0 ? 0 : this_action);
3364 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3365 NOTE_EH_HANDLER (note) = call_site;
3368 last_action = this_action;
3369 last_landing_pad = this_landing_pad;
3371 last_action_insn = iter;
3374 if (last_action >= -1 && ! first_no_action_insn)
3376 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3377 NOTE_EH_HANDLER (note) = call_site;
3380 htab_delete (ar_hash);
3384 static void
3385 push_uleb128 (varray_type *data_area, unsigned int value)
3389 unsigned char byte = value & 0x7f;
3390 value >>= 7;
3391 if (value)
3392 byte |= 0x80;
3393 VARRAY_PUSH_UCHAR (*data_area, byte);
3395 while (value);
3398 static void
3399 push_sleb128 (varray_type *data_area, int value)
3401 unsigned char byte;
3402 int more;
3406 byte = value & 0x7f;
3407 value >>= 7;
3408 more = ! ((value == 0 && (byte & 0x40) == 0)
3409 || (value == -1 && (byte & 0x40) != 0));
3410 if (more)
3411 byte |= 0x80;
3412 VARRAY_PUSH_UCHAR (*data_area, byte);
3414 while (more);
3418 #ifndef HAVE_AS_LEB128
3419 static int
3420 dw2_size_of_call_site_table (void)
3422 int n = cfun->eh->call_site_data_used;
3423 int size = n * (4 + 4 + 4);
3424 int i;
3426 for (i = 0; i < n; ++i)
3428 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3429 size += size_of_uleb128 (cs->action);
3432 return size;
3435 static int
3436 sjlj_size_of_call_site_table (void)
3438 int n = cfun->eh->call_site_data_used;
3439 int size = 0;
3440 int i;
3442 for (i = 0; i < n; ++i)
3444 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3445 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3446 size += size_of_uleb128 (cs->action);
3449 return size;
3451 #endif
3453 static void
3454 dw2_output_call_site_table (void)
3456 const char *const function_start_lab
3457 = IDENTIFIER_POINTER (current_function_func_begin_label);
3458 int n = cfun->eh->call_site_data_used;
3459 int i;
3461 for (i = 0; i < n; ++i)
3463 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3464 char reg_start_lab[32];
3465 char reg_end_lab[32];
3466 char landing_pad_lab[32];
3468 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3469 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3471 if (cs->landing_pad)
3472 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3473 CODE_LABEL_NUMBER (cs->landing_pad));
3475 /* ??? Perhaps use insn length scaling if the assembler supports
3476 generic arithmetic. */
3477 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3478 data4 if the function is small enough. */
3479 #ifdef HAVE_AS_LEB128
3480 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3481 "region %d start", i);
3482 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3483 "length");
3484 if (cs->landing_pad)
3485 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3486 "landing pad");
3487 else
3488 dw2_asm_output_data_uleb128 (0, "landing pad");
3489 #else
3490 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3491 "region %d start", i);
3492 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3493 if (cs->landing_pad)
3494 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3495 "landing pad");
3496 else
3497 dw2_asm_output_data (4, 0, "landing pad");
3498 #endif
3499 dw2_asm_output_data_uleb128 (cs->action, "action");
3502 call_site_base += n;
3505 static void
3506 sjlj_output_call_site_table (void)
3508 int n = cfun->eh->call_site_data_used;
3509 int i;
3511 for (i = 0; i < n; ++i)
3513 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3515 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3516 "region %d landing pad", i);
3517 dw2_asm_output_data_uleb128 (cs->action, "action");
3520 call_site_base += n;
3523 /* Tell assembler to switch to the section for the exception handling
3524 table. */
3526 void
3527 default_exception_section (void)
3529 if (targetm.have_named_sections)
3531 int flags;
3532 #ifdef HAVE_LD_RO_RW_SECTION_MIXING
3533 int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3535 flags = (! flag_pic
3536 || ((tt_format & 0x70) != DW_EH_PE_absptr
3537 && (tt_format & 0x70) != DW_EH_PE_aligned))
3538 ? 0 : SECTION_WRITE;
3539 #else
3540 flags = SECTION_WRITE;
3541 #endif
3542 named_section_flags (".gcc_except_table", flags);
3544 else if (flag_pic)
3545 data_section ();
3546 else
3547 readonly_data_section ();
3550 void
3551 output_function_exception_table (void)
3553 int tt_format, cs_format, lp_format, i, n;
3554 #ifdef HAVE_AS_LEB128
3555 char ttype_label[32];
3556 char cs_after_size_label[32];
3557 char cs_end_label[32];
3558 #else
3559 int call_site_len;
3560 #endif
3561 int have_tt_data;
3562 int tt_format_size = 0;
3564 /* Not all functions need anything. */
3565 if (! cfun->uses_eh_lsda)
3566 return;
3568 #ifdef IA64_UNWIND_INFO
3569 fputs ("\t.personality\t", asm_out_file);
3570 output_addr_const (asm_out_file, eh_personality_libfunc);
3571 fputs ("\n\t.handlerdata\n", asm_out_file);
3572 /* Note that varasm still thinks we're in the function's code section.
3573 The ".endp" directive that will immediately follow will take us back. */
3574 #else
3575 (*targetm.asm_out.exception_section) ();
3576 #endif
3578 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3579 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3581 /* Indicate the format of the @TType entries. */
3582 if (! have_tt_data)
3583 tt_format = DW_EH_PE_omit;
3584 else
3586 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3587 #ifdef HAVE_AS_LEB128
3588 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3589 current_function_funcdef_no);
3590 #endif
3591 tt_format_size = size_of_encoded_value (tt_format);
3593 assemble_align (tt_format_size * BITS_PER_UNIT);
3596 (*targetm.asm_out.internal_label) (asm_out_file, "LLSDA",
3597 current_function_funcdef_no);
3599 /* The LSDA header. */
3601 /* Indicate the format of the landing pad start pointer. An omitted
3602 field implies @LPStart == @Start. */
3603 /* Currently we always put @LPStart == @Start. This field would
3604 be most useful in moving the landing pads completely out of
3605 line to another section, but it could also be used to minimize
3606 the size of uleb128 landing pad offsets. */
3607 lp_format = DW_EH_PE_omit;
3608 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3609 eh_data_format_name (lp_format));
3611 /* @LPStart pointer would go here. */
3613 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3614 eh_data_format_name (tt_format));
3616 #ifndef HAVE_AS_LEB128
3617 if (USING_SJLJ_EXCEPTIONS)
3618 call_site_len = sjlj_size_of_call_site_table ();
3619 else
3620 call_site_len = dw2_size_of_call_site_table ();
3621 #endif
3623 /* A pc-relative 4-byte displacement to the @TType data. */
3624 if (have_tt_data)
3626 #ifdef HAVE_AS_LEB128
3627 char ttype_after_disp_label[32];
3628 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3629 current_function_funcdef_no);
3630 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3631 "@TType base offset");
3632 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3633 #else
3634 /* Ug. Alignment queers things. */
3635 unsigned int before_disp, after_disp, last_disp, disp;
3637 before_disp = 1 + 1;
3638 after_disp = (1 + size_of_uleb128 (call_site_len)
3639 + call_site_len
3640 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3641 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3642 * tt_format_size));
3644 disp = after_disp;
3647 unsigned int disp_size, pad;
3649 last_disp = disp;
3650 disp_size = size_of_uleb128 (disp);
3651 pad = before_disp + disp_size + after_disp;
3652 if (pad % tt_format_size)
3653 pad = tt_format_size - (pad % tt_format_size);
3654 else
3655 pad = 0;
3656 disp = after_disp + pad;
3658 while (disp != last_disp);
3660 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3661 #endif
3664 /* Indicate the format of the call-site offsets. */
3665 #ifdef HAVE_AS_LEB128
3666 cs_format = DW_EH_PE_uleb128;
3667 #else
3668 cs_format = DW_EH_PE_udata4;
3669 #endif
3670 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3671 eh_data_format_name (cs_format));
3673 #ifdef HAVE_AS_LEB128
3674 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3675 current_function_funcdef_no);
3676 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3677 current_function_funcdef_no);
3678 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3679 "Call-site table length");
3680 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3681 if (USING_SJLJ_EXCEPTIONS)
3682 sjlj_output_call_site_table ();
3683 else
3684 dw2_output_call_site_table ();
3685 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3686 #else
3687 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3688 if (USING_SJLJ_EXCEPTIONS)
3689 sjlj_output_call_site_table ();
3690 else
3691 dw2_output_call_site_table ();
3692 #endif
3694 /* ??? Decode and interpret the data for flag_debug_asm. */
3695 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3696 for (i = 0; i < n; ++i)
3697 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3698 (i ? NULL : "Action record table"));
3700 if (have_tt_data)
3701 assemble_align (tt_format_size * BITS_PER_UNIT);
3703 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3704 while (i-- > 0)
3706 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3707 rtx value;
3709 if (type == NULL_TREE)
3710 type = integer_zero_node;
3711 else
3712 type = lookup_type_for_runtime (type);
3714 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3715 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3716 assemble_integer (value, tt_format_size,
3717 tt_format_size * BITS_PER_UNIT, 1);
3718 else
3719 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
3722 #ifdef HAVE_AS_LEB128
3723 if (have_tt_data)
3724 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3725 #endif
3727 /* ??? Decode and interpret the data for flag_debug_asm. */
3728 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3729 for (i = 0; i < n; ++i)
3730 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3731 (i ? NULL : "Exception specification table"));
3733 function_section (current_function_decl);
3736 #include "gt-except.h"