2002-03-31 Segher Boessenkool <segher@koffie.nl>
[official-gcc.git] / gcc / except.c
blob3c80f606dc1b2851b46445890ca0658bca582716
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
77 /* Provide defaults for stuff that may not be defined when using
78 sjlj exceptions. */
79 #ifndef EH_RETURN_STACKADJ_RTX
80 #define EH_RETURN_STACKADJ_RTX 0
81 #endif
82 #ifndef EH_RETURN_HANDLER_RTX
83 #define EH_RETURN_HANDLER_RTX 0
84 #endif
85 #ifndef EH_RETURN_DATA_REGNO
86 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
87 #endif
90 /* Nonzero means enable synchronous exceptions for non-call instructions. */
91 int flag_non_call_exceptions;
93 /* Protect cleanup actions with must-not-throw regions, with a call
94 to the given failure handler. */
95 tree (*lang_protect_cleanup_actions) PARAMS ((void));
97 /* Return true if type A catches type B. */
98 int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
100 /* Map a type to a runtime object to match type. */
101 tree (*lang_eh_runtime_type) PARAMS ((tree));
103 /* A hash table of label to region number. */
105 struct ehl_map_entry GTY(())
107 rtx label;
108 struct eh_region *region;
111 static int call_site_base;
112 static GTY ((param_is (union tree_node)))
113 htab_t type_to_runtime_map;
115 /* Describe the SjLj_Function_Context structure. */
116 static GTY(()) tree sjlj_fc_type_node;
117 static int sjlj_fc_call_site_ofs;
118 static int sjlj_fc_data_ofs;
119 static int sjlj_fc_personality_ofs;
120 static int sjlj_fc_lsda_ofs;
121 static int sjlj_fc_jbuf_ofs;
123 /* Describes one exception region. */
124 struct eh_region GTY(())
126 /* The immediately surrounding region. */
127 struct eh_region *outer;
129 /* The list of immediately contained regions. */
130 struct eh_region *inner;
131 struct eh_region *next_peer;
133 /* An identifier for this region. */
134 int region_number;
136 /* When a region is deleted, its parents inherit the REG_EH_REGION
137 numbers already assigned. */
138 bitmap aka;
140 /* Each region does exactly one thing. */
141 enum eh_region_type
143 ERT_UNKNOWN = 0,
144 ERT_CLEANUP,
145 ERT_TRY,
146 ERT_CATCH,
147 ERT_ALLOWED_EXCEPTIONS,
148 ERT_MUST_NOT_THROW,
149 ERT_THROW,
150 ERT_FIXUP
151 } type;
153 /* Holds the action to perform based on the preceding type. */
154 union eh_region_u {
155 /* A list of catch blocks, a surrounding try block,
156 and the label for continuing after a catch. */
157 struct eh_region_u_try {
158 struct eh_region *catch;
159 struct eh_region *last_catch;
160 struct eh_region *prev_try;
161 rtx continue_label;
162 } GTY ((tag ("ERT_TRY"))) try;
164 /* The list through the catch handlers, the list of type objects
165 matched, and the list of associated filters. */
166 struct eh_region_u_catch {
167 struct eh_region *next_catch;
168 struct eh_region *prev_catch;
169 tree type_list;
170 tree filter_list;
171 } GTY ((tag ("ERT_CATCH"))) catch;
173 /* A tree_list of allowed types. */
174 struct eh_region_u_allowed {
175 tree type_list;
176 int filter;
177 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
179 /* The type given by a call to "throw foo();", or discovered
180 for a throw. */
181 struct eh_region_u_throw {
182 tree type;
183 } GTY ((tag ("ERT_THROW"))) throw;
185 /* Retain the cleanup expression even after expansion so that
186 we can match up fixup regions. */
187 struct eh_region_u_cleanup {
188 tree exp;
189 struct eh_region *prev_try;
190 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
192 /* The real region (by expression and by pointer) that fixup code
193 should live in. */
194 struct eh_region_u_fixup {
195 tree cleanup_exp;
196 struct eh_region *real_region;
197 } GTY ((tag ("ERT_FIXUP"))) fixup;
198 } GTY ((desc ("%0.type"))) u;
200 /* Entry point for this region's handler before landing pads are built. */
201 rtx label;
203 /* Entry point for this region's handler from the runtime eh library. */
204 rtx landing_pad;
206 /* Entry point for this region's handler from an inner region. */
207 rtx post_landing_pad;
209 /* The RESX insn for handing off control to the next outermost handler,
210 if appropriate. */
211 rtx resume;
214 struct call_site_record GTY(())
216 rtx landing_pad;
217 int action;
220 /* Used to save exception status for each function. */
221 struct eh_status GTY(())
223 /* The tree of all regions for this function. */
224 struct eh_region *region_tree;
226 /* The same information as an indexable array. */
227 struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
229 /* The most recently open region. */
230 struct eh_region *cur_region;
232 /* This is the region for which we are processing catch blocks. */
233 struct eh_region *try_region;
235 rtx filter;
236 rtx exc_ptr;
238 int built_landing_pads;
239 int last_region_number;
241 varray_type ttype_data;
242 varray_type ehspec_data;
243 varray_type action_record_data;
245 htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
247 struct call_site_record * GTY ((length ("%h.call_site_data_used")))
248 call_site_data;
249 int call_site_data_used;
250 int call_site_data_size;
252 rtx ehr_stackadj;
253 rtx ehr_handler;
254 rtx ehr_label;
256 rtx sjlj_fc;
257 rtx sjlj_exit_after;
261 static int t2r_eq PARAMS ((const PTR,
262 const PTR));
263 static hashval_t t2r_hash PARAMS ((const PTR));
264 static void add_type_for_runtime PARAMS ((tree));
265 static tree lookup_type_for_runtime PARAMS ((tree));
267 static struct eh_region *expand_eh_region_end PARAMS ((void));
269 static rtx get_exception_filter PARAMS ((struct function *));
271 static void collect_eh_region_array PARAMS ((void));
272 static void resolve_fixup_regions PARAMS ((void));
273 static void remove_fixup_regions PARAMS ((void));
274 static void remove_unreachable_regions PARAMS ((rtx));
275 static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
277 static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
278 struct inline_remap *));
279 static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
280 struct eh_region **));
281 static int ttypes_filter_eq PARAMS ((const PTR,
282 const PTR));
283 static hashval_t ttypes_filter_hash PARAMS ((const PTR));
284 static int ehspec_filter_eq PARAMS ((const PTR,
285 const PTR));
286 static hashval_t ehspec_filter_hash PARAMS ((const PTR));
287 static int add_ttypes_entry PARAMS ((htab_t, tree));
288 static int add_ehspec_entry PARAMS ((htab_t, htab_t,
289 tree));
290 static void assign_filter_values PARAMS ((void));
291 static void build_post_landing_pads PARAMS ((void));
292 static void connect_post_landing_pads PARAMS ((void));
293 static void dw2_build_landing_pads PARAMS ((void));
295 struct sjlj_lp_info;
296 static bool sjlj_find_directly_reachable_regions
297 PARAMS ((struct sjlj_lp_info *));
298 static void sjlj_assign_call_site_values
299 PARAMS ((rtx, struct sjlj_lp_info *));
300 static void sjlj_mark_call_sites
301 PARAMS ((struct sjlj_lp_info *));
302 static void sjlj_emit_function_enter PARAMS ((rtx));
303 static void sjlj_emit_function_exit PARAMS ((void));
304 static void sjlj_emit_dispatch_table
305 PARAMS ((rtx, struct sjlj_lp_info *));
306 static void sjlj_build_landing_pads PARAMS ((void));
308 static hashval_t ehl_hash PARAMS ((const PTR));
309 static int ehl_eq PARAMS ((const PTR,
310 const PTR));
311 static void add_ehl_entry PARAMS ((rtx,
312 struct eh_region *));
313 static void remove_exception_handler_label PARAMS ((rtx));
314 static void remove_eh_handler PARAMS ((struct eh_region *));
315 static int for_each_eh_label_1 PARAMS ((PTR *, PTR));
317 struct reachable_info;
319 /* The return value of reachable_next_level. */
320 enum reachable_code
322 /* The given exception is not processed by the given region. */
323 RNL_NOT_CAUGHT,
324 /* The given exception may need processing by the given region. */
325 RNL_MAYBE_CAUGHT,
326 /* The given exception is completely processed by the given region. */
327 RNL_CAUGHT,
328 /* The given exception is completely processed by the runtime. */
329 RNL_BLOCKED
332 static int check_handled PARAMS ((tree, tree));
333 static void add_reachable_handler
334 PARAMS ((struct reachable_info *, struct eh_region *,
335 struct eh_region *));
336 static enum reachable_code reachable_next_level
337 PARAMS ((struct eh_region *, tree, struct reachable_info *));
339 static int action_record_eq PARAMS ((const PTR,
340 const PTR));
341 static hashval_t action_record_hash PARAMS ((const PTR));
342 static int add_action_record PARAMS ((htab_t, int, int));
343 static int collect_one_action_chain PARAMS ((htab_t,
344 struct eh_region *));
345 static int add_call_site PARAMS ((rtx, int));
347 static void push_uleb128 PARAMS ((varray_type *,
348 unsigned int));
349 static void push_sleb128 PARAMS ((varray_type *, int));
350 #ifndef HAVE_AS_LEB128
351 static int dw2_size_of_call_site_table PARAMS ((void));
352 static int sjlj_size_of_call_site_table PARAMS ((void));
353 #endif
354 static void dw2_output_call_site_table PARAMS ((void));
355 static void sjlj_output_call_site_table PARAMS ((void));
358 /* Routine to see if exception handling is turned on.
359 DO_WARN is nonzero if we want to inform the user that exception
360 handling is turned off.
362 This is used to ensure that -fexceptions has been specified if the
363 compiler tries to use any exception-specific functions. */
366 doing_eh (do_warn)
367 int do_warn;
369 if (! flag_exceptions)
371 static int warned = 0;
372 if (! warned && do_warn)
374 error ("exception handling disabled, use -fexceptions to enable");
375 warned = 1;
377 return 0;
379 return 1;
383 void
384 init_eh ()
386 if (! flag_exceptions)
387 return;
389 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
391 /* Create the SjLj_Function_Context structure. This should match
392 the definition in unwind-sjlj.c. */
393 if (USING_SJLJ_EXCEPTIONS)
395 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
397 sjlj_fc_type_node = (*lang_hooks.types.make_type) (RECORD_TYPE);
399 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
400 build_pointer_type (sjlj_fc_type_node));
401 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
403 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
404 integer_type_node);
405 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
407 tmp = build_index_type (build_int_2 (4 - 1, 0));
408 tmp = build_array_type ((*lang_hooks.types.type_for_mode) (word_mode, 1),
409 tmp);
410 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
411 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
413 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
414 ptr_type_node);
415 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
417 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
418 ptr_type_node);
419 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
421 #ifdef DONT_USE_BUILTIN_SETJMP
422 #ifdef JMP_BUF_SIZE
423 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
424 #else
425 /* Should be large enough for most systems, if it is not,
426 JMP_BUF_SIZE should be defined with the proper value. It will
427 also tend to be larger than necessary for most systems, a more
428 optimal port will define JMP_BUF_SIZE. */
429 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
430 #endif
431 #else
432 /* This is 2 for builtin_setjmp, plus whatever the target requires
433 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
434 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
435 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
436 #endif
437 tmp = build_index_type (tmp);
438 tmp = build_array_type (ptr_type_node, tmp);
439 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
440 #ifdef DONT_USE_BUILTIN_SETJMP
441 /* We don't know what the alignment requirements of the
442 runtime's jmp_buf has. Overestimate. */
443 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
444 DECL_USER_ALIGN (f_jbuf) = 1;
445 #endif
446 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
448 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
449 TREE_CHAIN (f_prev) = f_cs;
450 TREE_CHAIN (f_cs) = f_data;
451 TREE_CHAIN (f_data) = f_per;
452 TREE_CHAIN (f_per) = f_lsda;
453 TREE_CHAIN (f_lsda) = f_jbuf;
455 layout_type (sjlj_fc_type_node);
457 /* Cache the interesting field offsets so that we have
458 easy access from rtl. */
459 sjlj_fc_call_site_ofs
460 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
461 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
462 sjlj_fc_data_ofs
463 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
464 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
465 sjlj_fc_personality_ofs
466 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
467 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
468 sjlj_fc_lsda_ofs
469 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
470 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
471 sjlj_fc_jbuf_ofs
472 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
473 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
477 void
478 init_eh_for_function ()
480 cfun->eh = (struct eh_status *)
481 ggc_alloc_cleared (sizeof (struct eh_status));
484 /* Start an exception handling region. All instructions emitted
485 after this point are considered to be part of the region until
486 expand_eh_region_end is invoked. */
488 void
489 expand_eh_region_start ()
491 struct eh_region *new_region;
492 struct eh_region *cur_region;
493 rtx note;
495 if (! doing_eh (0))
496 return;
498 /* Insert a new blank region as a leaf in the tree. */
499 new_region = (struct eh_region *) ggc_alloc_cleared (sizeof (*new_region));
500 cur_region = cfun->eh->cur_region;
501 new_region->outer = cur_region;
502 if (cur_region)
504 new_region->next_peer = cur_region->inner;
505 cur_region->inner = new_region;
507 else
509 new_region->next_peer = cfun->eh->region_tree;
510 cfun->eh->region_tree = new_region;
512 cfun->eh->cur_region = new_region;
514 /* Create a note marking the start of this region. */
515 new_region->region_number = ++cfun->eh->last_region_number;
516 note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
517 NOTE_EH_HANDLER (note) = new_region->region_number;
520 /* Common code to end a region. Returns the region just ended. */
522 static struct eh_region *
523 expand_eh_region_end ()
525 struct eh_region *cur_region = cfun->eh->cur_region;
526 rtx note;
528 /* Create a note marking the end of this region. */
529 note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
530 NOTE_EH_HANDLER (note) = cur_region->region_number;
532 /* Pop. */
533 cfun->eh->cur_region = cur_region->outer;
535 return cur_region;
538 /* End an exception handling region for a cleanup. HANDLER is an
539 expression to expand for the cleanup. */
541 void
542 expand_eh_region_end_cleanup (handler)
543 tree handler;
545 struct eh_region *region;
546 tree protect_cleanup_actions;
547 rtx around_label;
548 rtx data_save[2];
550 if (! doing_eh (0))
551 return;
553 region = expand_eh_region_end ();
554 region->type = ERT_CLEANUP;
555 region->label = gen_label_rtx ();
556 region->u.cleanup.exp = handler;
557 region->u.cleanup.prev_try = cfun->eh->try_region;
559 around_label = gen_label_rtx ();
560 emit_jump (around_label);
562 emit_label (region->label);
564 /* Give the language a chance to specify an action to be taken if an
565 exception is thrown that would propagate out of the HANDLER. */
566 protect_cleanup_actions
567 = (lang_protect_cleanup_actions
568 ? (*lang_protect_cleanup_actions) ()
569 : NULL_TREE);
571 if (protect_cleanup_actions)
572 expand_eh_region_start ();
574 /* In case this cleanup involves an inline destructor with a try block in
575 it, we need to save the EH return data registers around it. */
576 data_save[0] = gen_reg_rtx (ptr_mode);
577 emit_move_insn (data_save[0], get_exception_pointer (cfun));
578 data_save[1] = gen_reg_rtx (word_mode);
579 emit_move_insn (data_save[1], get_exception_filter (cfun));
581 expand_expr (handler, const0_rtx, VOIDmode, 0);
583 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
584 emit_move_insn (cfun->eh->filter, data_save[1]);
586 if (protect_cleanup_actions)
587 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
589 /* We need any stack adjustment complete before the around_label. */
590 do_pending_stack_adjust ();
592 /* We delay the generation of the _Unwind_Resume until we generate
593 landing pads. We emit a marker here so as to get good control
594 flow data in the meantime. */
595 region->resume
596 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
597 emit_barrier ();
599 emit_label (around_label);
602 /* End an exception handling region for a try block, and prepares
603 for subsequent calls to expand_start_catch. */
605 void
606 expand_start_all_catch ()
608 struct eh_region *region;
610 if (! doing_eh (1))
611 return;
613 region = expand_eh_region_end ();
614 region->type = ERT_TRY;
615 region->u.try.prev_try = cfun->eh->try_region;
616 region->u.try.continue_label = gen_label_rtx ();
618 cfun->eh->try_region = region;
620 emit_jump (region->u.try.continue_label);
623 /* Begin a catch clause. TYPE is the type caught, a list of such types, or
624 null if this is a catch-all clause. Providing a type list enables to
625 associate the catch region with potentially several exception types, which
626 is useful e.g. for Ada. */
628 void
629 expand_start_catch (type_or_list)
630 tree type_or_list;
632 struct eh_region *t, *c, *l;
633 tree type_list;
635 if (! doing_eh (0))
636 return;
638 type_list = type_or_list;
640 if (type_or_list)
642 /* Ensure to always end up with a type list to normalize further
643 processing, then register each type against the runtime types
644 map. */
645 tree type_node;
647 if (TREE_CODE (type_or_list) != TREE_LIST)
648 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
650 type_node = type_list;
651 for (; type_node; type_node = TREE_CHAIN (type_node))
652 add_type_for_runtime (TREE_VALUE (type_node));
655 expand_eh_region_start ();
657 t = cfun->eh->try_region;
658 c = cfun->eh->cur_region;
659 c->type = ERT_CATCH;
660 c->u.catch.type_list = type_list;
661 c->label = gen_label_rtx ();
663 l = t->u.try.last_catch;
664 c->u.catch.prev_catch = l;
665 if (l)
666 l->u.catch.next_catch = c;
667 else
668 t->u.try.catch = c;
669 t->u.try.last_catch = c;
671 emit_label (c->label);
674 /* End a catch clause. Control will resume after the try/catch block. */
676 void
677 expand_end_catch ()
679 struct eh_region *try_region;
681 if (! doing_eh (0))
682 return;
684 expand_eh_region_end ();
685 try_region = cfun->eh->try_region;
687 emit_jump (try_region->u.try.continue_label);
690 /* End a sequence of catch handlers for a try block. */
692 void
693 expand_end_all_catch ()
695 struct eh_region *try_region;
697 if (! doing_eh (0))
698 return;
700 try_region = cfun->eh->try_region;
701 cfun->eh->try_region = try_region->u.try.prev_try;
703 emit_label (try_region->u.try.continue_label);
706 /* End an exception region for an exception type filter. ALLOWED is a
707 TREE_LIST of types to be matched by the runtime. FAILURE is an
708 expression to invoke if a mismatch occurs.
710 ??? We could use these semantics for calls to rethrow, too; if we can
711 see the surrounding catch clause, we know that the exception we're
712 rethrowing satisfies the "filter" of the catch type. */
714 void
715 expand_eh_region_end_allowed (allowed, failure)
716 tree allowed, failure;
718 struct eh_region *region;
719 rtx around_label;
721 if (! doing_eh (0))
722 return;
724 region = expand_eh_region_end ();
725 region->type = ERT_ALLOWED_EXCEPTIONS;
726 region->u.allowed.type_list = allowed;
727 region->label = gen_label_rtx ();
729 for (; allowed ; allowed = TREE_CHAIN (allowed))
730 add_type_for_runtime (TREE_VALUE (allowed));
732 /* We must emit the call to FAILURE here, so that if this function
733 throws a different exception, that it will be processed by the
734 correct region. */
736 around_label = gen_label_rtx ();
737 emit_jump (around_label);
739 emit_label (region->label);
740 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
741 /* We must adjust the stack before we reach the AROUND_LABEL because
742 the call to FAILURE does not occur on all paths to the
743 AROUND_LABEL. */
744 do_pending_stack_adjust ();
746 emit_label (around_label);
749 /* End an exception region for a must-not-throw filter. FAILURE is an
750 expression invoke if an uncaught exception propagates this far.
752 This is conceptually identical to expand_eh_region_end_allowed with
753 an empty allowed list (if you passed "std::terminate" instead of
754 "__cxa_call_unexpected"), but they are represented differently in
755 the C++ LSDA. */
757 void
758 expand_eh_region_end_must_not_throw (failure)
759 tree failure;
761 struct eh_region *region;
762 rtx around_label;
764 if (! doing_eh (0))
765 return;
767 region = expand_eh_region_end ();
768 region->type = ERT_MUST_NOT_THROW;
769 region->label = gen_label_rtx ();
771 /* We must emit the call to FAILURE here, so that if this function
772 throws a different exception, that it will be processed by the
773 correct region. */
775 around_label = gen_label_rtx ();
776 emit_jump (around_label);
778 emit_label (region->label);
779 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
781 emit_label (around_label);
784 /* End an exception region for a throw. No handling goes on here,
785 but it's the easiest way for the front-end to indicate what type
786 is being thrown. */
788 void
789 expand_eh_region_end_throw (type)
790 tree type;
792 struct eh_region *region;
794 if (! doing_eh (0))
795 return;
797 region = expand_eh_region_end ();
798 region->type = ERT_THROW;
799 region->u.throw.type = type;
802 /* End a fixup region. Within this region the cleanups for the immediately
803 enclosing region are _not_ run. This is used for goto cleanup to avoid
804 destroying an object twice.
806 This would be an extraordinarily simple prospect, were it not for the
807 fact that we don't actually know what the immediately enclosing region
808 is. This surprising fact is because expand_cleanups is currently
809 generating a sequence that it will insert somewhere else. We collect
810 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
812 void
813 expand_eh_region_end_fixup (handler)
814 tree handler;
816 struct eh_region *fixup;
818 if (! doing_eh (0))
819 return;
821 fixup = expand_eh_region_end ();
822 fixup->type = ERT_FIXUP;
823 fixup->u.fixup.cleanup_exp = handler;
826 /* Return an rtl expression for a pointer to the exception object
827 within a handler. */
830 get_exception_pointer (fun)
831 struct function *fun;
833 rtx exc_ptr = fun->eh->exc_ptr;
834 if (fun == cfun && ! exc_ptr)
836 exc_ptr = gen_reg_rtx (ptr_mode);
837 fun->eh->exc_ptr = exc_ptr;
839 return exc_ptr;
842 /* Return an rtl expression for the exception dispatch filter
843 within a handler. */
845 static rtx
846 get_exception_filter (fun)
847 struct function *fun;
849 rtx filter = fun->eh->filter;
850 if (fun == cfun && ! filter)
852 filter = gen_reg_rtx (word_mode);
853 fun->eh->filter = filter;
855 return filter;
858 /* This section is for the exception handling specific optimization pass. */
860 /* Random access the exception region tree. It's just as simple to
861 collect the regions this way as in expand_eh_region_start, but
862 without having to realloc memory. */
864 static void
865 collect_eh_region_array ()
867 struct eh_region **array, *i;
869 i = cfun->eh->region_tree;
870 if (! i)
871 return;
873 array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
874 * sizeof (*array));
875 cfun->eh->region_array = array;
877 while (1)
879 array[i->region_number] = i;
881 /* If there are sub-regions, process them. */
882 if (i->inner)
883 i = i->inner;
884 /* If there are peers, process them. */
885 else if (i->next_peer)
886 i = i->next_peer;
887 /* Otherwise, step back up the tree to the next peer. */
888 else
890 do {
891 i = i->outer;
892 if (i == NULL)
893 return;
894 } while (i->next_peer == NULL);
895 i = i->next_peer;
900 static void
901 resolve_fixup_regions ()
903 int i, j, n = cfun->eh->last_region_number;
905 for (i = 1; i <= n; ++i)
907 struct eh_region *fixup = cfun->eh->region_array[i];
908 struct eh_region *cleanup = 0;
910 if (! fixup || fixup->type != ERT_FIXUP)
911 continue;
913 for (j = 1; j <= n; ++j)
915 cleanup = cfun->eh->region_array[j];
916 if (cleanup->type == ERT_CLEANUP
917 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
918 break;
920 if (j > n)
921 abort ();
923 fixup->u.fixup.real_region = cleanup->outer;
927 /* Now that we've discovered what region actually encloses a fixup,
928 we can shuffle pointers and remove them from the tree. */
930 static void
931 remove_fixup_regions ()
933 int i;
934 rtx insn, note;
935 struct eh_region *fixup;
937 /* Walk the insn chain and adjust the REG_EH_REGION numbers
938 for instructions referencing fixup regions. This is only
939 strictly necessary for fixup regions with no parent, but
940 doesn't hurt to do it for all regions. */
941 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
942 if (INSN_P (insn)
943 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
944 && INTVAL (XEXP (note, 0)) > 0
945 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
946 && fixup->type == ERT_FIXUP)
948 if (fixup->u.fixup.real_region)
949 XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
950 else
951 remove_note (insn, note);
954 /* Remove the fixup regions from the tree. */
955 for (i = cfun->eh->last_region_number; i > 0; --i)
957 fixup = cfun->eh->region_array[i];
958 if (! fixup)
959 continue;
961 /* Allow GC to maybe free some memory. */
962 if (fixup->type == ERT_CLEANUP)
963 fixup->u.cleanup.exp = NULL_TREE;
965 if (fixup->type != ERT_FIXUP)
966 continue;
968 if (fixup->inner)
970 struct eh_region *parent, *p, **pp;
972 parent = fixup->u.fixup.real_region;
974 /* Fix up the children's parent pointers; find the end of
975 the list. */
976 for (p = fixup->inner; ; p = p->next_peer)
978 p->outer = parent;
979 if (! p->next_peer)
980 break;
983 /* In the tree of cleanups, only outer-inner ordering matters.
984 So link the children back in anywhere at the correct level. */
985 if (parent)
986 pp = &parent->inner;
987 else
988 pp = &cfun->eh->region_tree;
989 p->next_peer = *pp;
990 *pp = fixup->inner;
991 fixup->inner = NULL;
994 remove_eh_handler (fixup);
998 /* Remove all regions whose labels are not reachable from insns. */
1000 static void
1001 remove_unreachable_regions (insns)
1002 rtx insns;
1004 int i, *uid_region_num;
1005 bool *reachable;
1006 struct eh_region *r;
1007 rtx insn;
1009 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
1010 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
1012 for (i = cfun->eh->last_region_number; i > 0; --i)
1014 r = cfun->eh->region_array[i];
1015 if (!r || r->region_number != i)
1016 continue;
1018 if (r->resume)
1020 if (uid_region_num[INSN_UID (r->resume)])
1021 abort ();
1022 uid_region_num[INSN_UID (r->resume)] = i;
1024 if (r->label)
1026 if (uid_region_num[INSN_UID (r->label)])
1027 abort ();
1028 uid_region_num[INSN_UID (r->label)] = i;
1030 if (r->type == ERT_TRY && r->u.try.continue_label)
1032 if (uid_region_num[INSN_UID (r->u.try.continue_label)])
1033 abort ();
1034 uid_region_num[INSN_UID (r->u.try.continue_label)] = i;
1038 for (insn = insns; insn; insn = NEXT_INSN (insn))
1039 reachable[uid_region_num[INSN_UID (insn)]] = true;
1041 for (i = cfun->eh->last_region_number; i > 0; --i)
1043 r = cfun->eh->region_array[i];
1044 if (r && r->region_number == i && !reachable[i])
1046 /* Don't remove ERT_THROW regions if their outer region
1047 is reachable. */
1048 if (r->type == ERT_THROW
1049 && r->outer
1050 && reachable[r->outer->region_number])
1051 continue;
1053 remove_eh_handler (r);
1057 free (reachable);
1058 free (uid_region_num);
1061 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1062 can_throw instruction in the region. */
1064 static void
1065 convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1066 rtx *pinsns;
1067 int *orig_sp;
1068 int cur;
1070 int *sp = orig_sp;
1071 rtx insn, next;
1073 for (insn = *pinsns; insn ; insn = next)
1075 next = NEXT_INSN (insn);
1076 if (GET_CODE (insn) == NOTE)
1078 int kind = NOTE_LINE_NUMBER (insn);
1079 if (kind == NOTE_INSN_EH_REGION_BEG
1080 || kind == NOTE_INSN_EH_REGION_END)
1082 if (kind == NOTE_INSN_EH_REGION_BEG)
1084 struct eh_region *r;
1086 *sp++ = cur;
1087 cur = NOTE_EH_HANDLER (insn);
1089 r = cfun->eh->region_array[cur];
1090 if (r->type == ERT_FIXUP)
1092 r = r->u.fixup.real_region;
1093 cur = r ? r->region_number : 0;
1095 else if (r->type == ERT_CATCH)
1097 r = r->outer;
1098 cur = r ? r->region_number : 0;
1101 else
1102 cur = *--sp;
1104 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1105 requires extra care to adjust sequence start. */
1106 if (insn == *pinsns)
1107 *pinsns = next;
1108 remove_insn (insn);
1109 continue;
1112 else if (INSN_P (insn))
1114 if (cur > 0
1115 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1116 /* Calls can always potentially throw exceptions, unless
1117 they have a REG_EH_REGION note with a value of 0 or less.
1118 Which should be the only possible kind so far. */
1119 && (GET_CODE (insn) == CALL_INSN
1120 /* If we wanted exceptions for non-call insns, then
1121 any may_trap_p instruction could throw. */
1122 || (flag_non_call_exceptions
1123 && GET_CODE (PATTERN (insn)) != CLOBBER
1124 && GET_CODE (PATTERN (insn)) != USE
1125 && may_trap_p (PATTERN (insn)))))
1127 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1128 REG_NOTES (insn));
1131 if (GET_CODE (insn) == CALL_INSN
1132 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1134 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1135 sp, cur);
1136 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1137 sp, cur);
1138 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1139 sp, cur);
1144 if (sp != orig_sp)
1145 abort ();
1148 void
1149 convert_from_eh_region_ranges ()
1151 int *stack;
1152 rtx insns;
1154 collect_eh_region_array ();
1155 resolve_fixup_regions ();
1157 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1158 insns = get_insns ();
1159 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1160 free (stack);
1162 remove_fixup_regions ();
1163 remove_unreachable_regions (insns);
1166 static void
1167 add_ehl_entry (label, region)
1168 rtx label;
1169 struct eh_region *region;
1171 struct ehl_map_entry **slot, *entry;
1173 LABEL_PRESERVE_P (label) = 1;
1175 entry = (struct ehl_map_entry *) ggc_alloc (sizeof (*entry));
1176 entry->label = label;
1177 entry->region = region;
1179 slot = (struct ehl_map_entry **)
1180 htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
1182 /* Before landing pad creation, each exception handler has its own
1183 label. After landing pad creation, the exception handlers may
1184 share landing pads. This is ok, since maybe_remove_eh_handler
1185 only requires the 1-1 mapping before landing pad creation. */
1186 if (*slot && !cfun->eh->built_landing_pads)
1187 abort ();
1189 *slot = entry;
1192 void
1193 find_exception_handler_labels ()
1195 int i;
1197 if (cfun->eh->exception_handler_label_map)
1198 htab_empty (cfun->eh->exception_handler_label_map);
1199 else
1201 /* ??? The expansion factor here (3/2) must be greater than the htab
1202 occupancy factor (4/3) to avoid unnecessary resizing. */
1203 cfun->eh->exception_handler_label_map
1204 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
1205 ehl_hash, ehl_eq, NULL);
1208 if (cfun->eh->region_tree == NULL)
1209 return;
1211 for (i = cfun->eh->last_region_number; i > 0; --i)
1213 struct eh_region *region = cfun->eh->region_array[i];
1214 rtx lab;
1216 if (! region || region->region_number != i)
1217 continue;
1218 if (cfun->eh->built_landing_pads)
1219 lab = region->landing_pad;
1220 else
1221 lab = region->label;
1223 if (lab)
1224 add_ehl_entry (lab, region);
1227 /* For sjlj exceptions, need the return label to remain live until
1228 after landing pad generation. */
1229 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1230 add_ehl_entry (return_label, NULL);
1233 bool
1234 current_function_has_exception_handlers ()
1236 int i;
1238 for (i = cfun->eh->last_region_number; i > 0; --i)
1240 struct eh_region *region = cfun->eh->region_array[i];
1242 if (! region || region->region_number != i)
1243 continue;
1244 if (region->type != ERT_THROW)
1245 return true;
1248 return false;
1251 static struct eh_region *
1252 duplicate_eh_region_1 (o, map)
1253 struct eh_region *o;
1254 struct inline_remap *map;
1256 struct eh_region *n
1257 = (struct eh_region *) ggc_alloc_cleared (sizeof (struct eh_region));
1259 n->region_number = o->region_number + cfun->eh->last_region_number;
1260 n->type = o->type;
1262 switch (n->type)
1264 case ERT_CLEANUP:
1265 case ERT_MUST_NOT_THROW:
1266 break;
1268 case ERT_TRY:
1269 if (o->u.try.continue_label)
1270 n->u.try.continue_label
1271 = get_label_from_map (map,
1272 CODE_LABEL_NUMBER (o->u.try.continue_label));
1273 break;
1275 case ERT_CATCH:
1276 n->u.catch.type_list = o->u.catch.type_list;
1277 break;
1279 case ERT_ALLOWED_EXCEPTIONS:
1280 n->u.allowed.type_list = o->u.allowed.type_list;
1281 break;
1283 case ERT_THROW:
1284 n->u.throw.type = o->u.throw.type;
1286 default:
1287 abort ();
1290 if (o->label)
1291 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1292 if (o->resume)
1294 n->resume = map->insn_map[INSN_UID (o->resume)];
1295 if (n->resume == NULL)
1296 abort ();
1299 return n;
1302 static void
1303 duplicate_eh_region_2 (o, n_array)
1304 struct eh_region *o;
1305 struct eh_region **n_array;
1307 struct eh_region *n = n_array[o->region_number];
1309 switch (n->type)
1311 case ERT_TRY:
1312 n->u.try.catch = n_array[o->u.try.catch->region_number];
1313 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1314 break;
1316 case ERT_CATCH:
1317 if (o->u.catch.next_catch)
1318 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1319 if (o->u.catch.prev_catch)
1320 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1321 break;
1323 default:
1324 break;
1327 if (o->outer)
1328 n->outer = n_array[o->outer->region_number];
1329 if (o->inner)
1330 n->inner = n_array[o->inner->region_number];
1331 if (o->next_peer)
1332 n->next_peer = n_array[o->next_peer->region_number];
1336 duplicate_eh_regions (ifun, map)
1337 struct function *ifun;
1338 struct inline_remap *map;
1340 int ifun_last_region_number = ifun->eh->last_region_number;
1341 struct eh_region **n_array, *root, *cur;
1342 int i;
1344 if (ifun_last_region_number == 0)
1345 return 0;
1347 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1349 for (i = 1; i <= ifun_last_region_number; ++i)
1351 cur = ifun->eh->region_array[i];
1352 if (!cur || cur->region_number != i)
1353 continue;
1354 n_array[i] = duplicate_eh_region_1 (cur, map);
1356 for (i = 1; i <= ifun_last_region_number; ++i)
1358 cur = ifun->eh->region_array[i];
1359 if (!cur || cur->region_number != i)
1360 continue;
1361 duplicate_eh_region_2 (cur, n_array);
1364 root = n_array[ifun->eh->region_tree->region_number];
1365 cur = cfun->eh->cur_region;
1366 if (cur)
1368 struct eh_region *p = cur->inner;
1369 if (p)
1371 while (p->next_peer)
1372 p = p->next_peer;
1373 p->next_peer = root;
1375 else
1376 cur->inner = root;
1378 for (i = 1; i <= ifun_last_region_number; ++i)
1379 if (n_array[i] && n_array[i]->outer == NULL)
1380 n_array[i]->outer = cur;
1382 else
1384 struct eh_region *p = cfun->eh->region_tree;
1385 if (p)
1387 while (p->next_peer)
1388 p = p->next_peer;
1389 p->next_peer = root;
1391 else
1392 cfun->eh->region_tree = root;
1395 free (n_array);
1397 i = cfun->eh->last_region_number;
1398 cfun->eh->last_region_number = i + ifun_last_region_number;
1399 return i;
1403 static int
1404 t2r_eq (pentry, pdata)
1405 const PTR pentry;
1406 const PTR pdata;
1408 tree entry = (tree) pentry;
1409 tree data = (tree) pdata;
1411 return TREE_PURPOSE (entry) == data;
1414 static hashval_t
1415 t2r_hash (pentry)
1416 const PTR pentry;
1418 tree entry = (tree) pentry;
1419 return TYPE_HASH (TREE_PURPOSE (entry));
1422 static void
1423 add_type_for_runtime (type)
1424 tree type;
1426 tree *slot;
1428 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1429 TYPE_HASH (type), INSERT);
1430 if (*slot == NULL)
1432 tree runtime = (*lang_eh_runtime_type) (type);
1433 *slot = tree_cons (type, runtime, NULL_TREE);
1437 static tree
1438 lookup_type_for_runtime (type)
1439 tree type;
1441 tree *slot;
1443 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1444 TYPE_HASH (type), NO_INSERT);
1446 /* We should have always inserted the data earlier. */
1447 return TREE_VALUE (*slot);
1451 /* Represent an entry in @TTypes for either catch actions
1452 or exception filter actions. */
1453 struct ttypes_filter GTY(())
1455 tree t;
1456 int filter;
1459 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1460 (a tree) for a @TTypes type node we are thinking about adding. */
1462 static int
1463 ttypes_filter_eq (pentry, pdata)
1464 const PTR pentry;
1465 const PTR pdata;
1467 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1468 tree data = (tree) pdata;
1470 return entry->t == data;
1473 static hashval_t
1474 ttypes_filter_hash (pentry)
1475 const PTR pentry;
1477 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1478 return TYPE_HASH (entry->t);
1481 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1482 exception specification list we are thinking about adding. */
1483 /* ??? Currently we use the type lists in the order given. Someone
1484 should put these in some canonical order. */
1486 static int
1487 ehspec_filter_eq (pentry, pdata)
1488 const PTR pentry;
1489 const PTR pdata;
1491 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1492 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1494 return type_list_equal (entry->t, data->t);
1497 /* Hash function for exception specification lists. */
1499 static hashval_t
1500 ehspec_filter_hash (pentry)
1501 const PTR pentry;
1503 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1504 hashval_t h = 0;
1505 tree list;
1507 for (list = entry->t; list ; list = TREE_CHAIN (list))
1508 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1509 return h;
1512 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1513 up the search. Return the filter value to be used. */
1515 static int
1516 add_ttypes_entry (ttypes_hash, type)
1517 htab_t ttypes_hash;
1518 tree type;
1520 struct ttypes_filter **slot, *n;
1522 slot = (struct ttypes_filter **)
1523 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1525 if ((n = *slot) == NULL)
1527 /* Filter value is a 1 based table index. */
1529 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1530 n->t = type;
1531 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1532 *slot = n;
1534 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1537 return n->filter;
1540 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1541 to speed up the search. Return the filter value to be used. */
1543 static int
1544 add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1545 htab_t ehspec_hash;
1546 htab_t ttypes_hash;
1547 tree list;
1549 struct ttypes_filter **slot, *n;
1550 struct ttypes_filter dummy;
1552 dummy.t = list;
1553 slot = (struct ttypes_filter **)
1554 htab_find_slot (ehspec_hash, &dummy, INSERT);
1556 if ((n = *slot) == NULL)
1558 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1560 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1561 n->t = list;
1562 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1563 *slot = n;
1565 /* Look up each type in the list and encode its filter
1566 value as a uleb128. Terminate the list with 0. */
1567 for (; list ; list = TREE_CHAIN (list))
1568 push_uleb128 (&cfun->eh->ehspec_data,
1569 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1570 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1573 return n->filter;
1576 /* Generate the action filter values to be used for CATCH and
1577 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1578 we use lots of landing pads, and so every type or list can share
1579 the same filter value, which saves table space. */
1581 static void
1582 assign_filter_values ()
1584 int i;
1585 htab_t ttypes, ehspec;
1587 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1588 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1590 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1591 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1593 for (i = cfun->eh->last_region_number; i > 0; --i)
1595 struct eh_region *r = cfun->eh->region_array[i];
1597 /* Mind we don't process a region more than once. */
1598 if (!r || r->region_number != i)
1599 continue;
1601 switch (r->type)
1603 case ERT_CATCH:
1604 /* Whatever type_list is (NULL or true list), we build a list
1605 of filters for the region. */
1606 r->u.catch.filter_list = NULL_TREE;
1608 if (r->u.catch.type_list != NULL)
1610 /* Get a filter value for each of the types caught and store
1611 them in the region's dedicated list. */
1612 tree tp_node = r->u.catch.type_list;
1614 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1616 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1617 tree flt_node = build_int_2 (flt, 0);
1619 r->u.catch.filter_list
1620 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1623 else
1625 /* Get a filter value for the NULL list also since it will need
1626 an action record anyway. */
1627 int flt = add_ttypes_entry (ttypes, NULL);
1628 tree flt_node = build_int_2 (flt, 0);
1630 r->u.catch.filter_list
1631 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1634 break;
1636 case ERT_ALLOWED_EXCEPTIONS:
1637 r->u.allowed.filter
1638 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1639 break;
1641 default:
1642 break;
1646 htab_delete (ttypes);
1647 htab_delete (ehspec);
1650 static void
1651 build_post_landing_pads ()
1653 int i;
1655 for (i = cfun->eh->last_region_number; i > 0; --i)
1657 struct eh_region *region = cfun->eh->region_array[i];
1658 rtx seq;
1660 /* Mind we don't process a region more than once. */
1661 if (!region || region->region_number != i)
1662 continue;
1664 switch (region->type)
1666 case ERT_TRY:
1667 /* ??? Collect the set of all non-overlapping catch handlers
1668 all the way up the chain until blocked by a cleanup. */
1669 /* ??? Outer try regions can share landing pads with inner
1670 try regions if the types are completely non-overlapping,
1671 and there are no intervening cleanups. */
1673 region->post_landing_pad = gen_label_rtx ();
1675 start_sequence ();
1677 emit_label (region->post_landing_pad);
1679 /* ??? It is mighty inconvenient to call back into the
1680 switch statement generation code in expand_end_case.
1681 Rapid prototyping sez a sequence of ifs. */
1683 struct eh_region *c;
1684 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1686 /* ??? _Unwind_ForcedUnwind wants no match here. */
1687 if (c->u.catch.type_list == NULL)
1688 emit_jump (c->label);
1689 else
1691 /* Need for one cmp/jump per type caught. Each type
1692 list entry has a matching entry in the filter list
1693 (see assign_filter_values). */
1694 tree tp_node = c->u.catch.type_list;
1695 tree flt_node = c->u.catch.filter_list;
1697 for (; tp_node; )
1699 emit_cmp_and_jump_insns
1700 (cfun->eh->filter,
1701 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1702 EQ, NULL_RTX, word_mode, 0, c->label);
1704 tp_node = TREE_CHAIN (tp_node);
1705 flt_node = TREE_CHAIN (flt_node);
1711 /* We delay the generation of the _Unwind_Resume until we generate
1712 landing pads. We emit a marker here so as to get good control
1713 flow data in the meantime. */
1714 region->resume
1715 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1716 emit_barrier ();
1718 seq = get_insns ();
1719 end_sequence ();
1721 emit_insn_before (seq, region->u.try.catch->label);
1722 break;
1724 case ERT_ALLOWED_EXCEPTIONS:
1725 region->post_landing_pad = gen_label_rtx ();
1727 start_sequence ();
1729 emit_label (region->post_landing_pad);
1731 emit_cmp_and_jump_insns (cfun->eh->filter,
1732 GEN_INT (region->u.allowed.filter),
1733 EQ, NULL_RTX, word_mode, 0, region->label);
1735 /* We delay the generation of the _Unwind_Resume until we generate
1736 landing pads. We emit a marker here so as to get good control
1737 flow data in the meantime. */
1738 region->resume
1739 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1740 emit_barrier ();
1742 seq = get_insns ();
1743 end_sequence ();
1745 emit_insn_before (seq, region->label);
1746 break;
1748 case ERT_CLEANUP:
1749 case ERT_MUST_NOT_THROW:
1750 region->post_landing_pad = region->label;
1751 break;
1753 case ERT_CATCH:
1754 case ERT_THROW:
1755 /* Nothing to do. */
1756 break;
1758 default:
1759 abort ();
1764 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1765 _Unwind_Resume otherwise. */
1767 static void
1768 connect_post_landing_pads ()
1770 int i;
1772 for (i = cfun->eh->last_region_number; i > 0; --i)
1774 struct eh_region *region = cfun->eh->region_array[i];
1775 struct eh_region *outer;
1776 rtx seq;
1778 /* Mind we don't process a region more than once. */
1779 if (!region || region->region_number != i)
1780 continue;
1782 /* If there is no RESX, or it has been deleted by flow, there's
1783 nothing to fix up. */
1784 if (! region->resume || INSN_DELETED_P (region->resume))
1785 continue;
1787 /* Search for another landing pad in this function. */
1788 for (outer = region->outer; outer ; outer = outer->outer)
1789 if (outer->post_landing_pad)
1790 break;
1792 start_sequence ();
1794 if (outer)
1795 emit_jump (outer->post_landing_pad);
1796 else
1797 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1798 VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1800 seq = get_insns ();
1801 end_sequence ();
1802 emit_insn_before (seq, region->resume);
1803 delete_insn (region->resume);
1808 static void
1809 dw2_build_landing_pads ()
1811 int i;
1812 unsigned int j;
1814 for (i = cfun->eh->last_region_number; i > 0; --i)
1816 struct eh_region *region = cfun->eh->region_array[i];
1817 rtx seq;
1818 bool clobbers_hard_regs = false;
1820 /* Mind we don't process a region more than once. */
1821 if (!region || region->region_number != i)
1822 continue;
1824 if (region->type != ERT_CLEANUP
1825 && region->type != ERT_TRY
1826 && region->type != ERT_ALLOWED_EXCEPTIONS)
1827 continue;
1829 start_sequence ();
1831 region->landing_pad = gen_label_rtx ();
1832 emit_label (region->landing_pad);
1834 #ifdef HAVE_exception_receiver
1835 if (HAVE_exception_receiver)
1836 emit_insn (gen_exception_receiver ());
1837 else
1838 #endif
1839 #ifdef HAVE_nonlocal_goto_receiver
1840 if (HAVE_nonlocal_goto_receiver)
1841 emit_insn (gen_nonlocal_goto_receiver ());
1842 else
1843 #endif
1844 { /* Nothing */ }
1846 /* If the eh_return data registers are call-saved, then we
1847 won't have considered them clobbered from the call that
1848 threw. Kill them now. */
1849 for (j = 0; ; ++j)
1851 unsigned r = EH_RETURN_DATA_REGNO (j);
1852 if (r == INVALID_REGNUM)
1853 break;
1854 if (! call_used_regs[r])
1856 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1857 clobbers_hard_regs = true;
1861 if (clobbers_hard_regs)
1863 /* @@@ This is a kludge. Not all machine descriptions define a
1864 blockage insn, but we must not allow the code we just generated
1865 to be reordered by scheduling. So emit an ASM_INPUT to act as
1866 blockage insn. */
1867 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1870 emit_move_insn (cfun->eh->exc_ptr,
1871 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1872 emit_move_insn (cfun->eh->filter,
1873 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
1875 seq = get_insns ();
1876 end_sequence ();
1878 emit_insn_before (seq, region->post_landing_pad);
1883 struct sjlj_lp_info
1885 int directly_reachable;
1886 int action_index;
1887 int dispatch_index;
1888 int call_site_index;
1891 static bool
1892 sjlj_find_directly_reachable_regions (lp_info)
1893 struct sjlj_lp_info *lp_info;
1895 rtx insn;
1896 bool found_one = false;
1898 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1900 struct eh_region *region;
1901 enum reachable_code rc;
1902 tree type_thrown;
1903 rtx note;
1905 if (! INSN_P (insn))
1906 continue;
1908 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1909 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1910 continue;
1912 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1914 type_thrown = NULL_TREE;
1915 if (region->type == ERT_THROW)
1917 type_thrown = region->u.throw.type;
1918 region = region->outer;
1921 /* Find the first containing region that might handle the exception.
1922 That's the landing pad to which we will transfer control. */
1923 rc = RNL_NOT_CAUGHT;
1924 for (; region; region = region->outer)
1926 rc = reachable_next_level (region, type_thrown, 0);
1927 if (rc != RNL_NOT_CAUGHT)
1928 break;
1930 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1932 lp_info[region->region_number].directly_reachable = 1;
1933 found_one = true;
1937 return found_one;
1940 static void
1941 sjlj_assign_call_site_values (dispatch_label, lp_info)
1942 rtx dispatch_label;
1943 struct sjlj_lp_info *lp_info;
1945 htab_t ar_hash;
1946 int i, index;
1948 /* First task: build the action table. */
1950 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1951 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1953 for (i = cfun->eh->last_region_number; i > 0; --i)
1954 if (lp_info[i].directly_reachable)
1956 struct eh_region *r = cfun->eh->region_array[i];
1957 r->landing_pad = dispatch_label;
1958 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1959 if (lp_info[i].action_index != -1)
1960 cfun->uses_eh_lsda = 1;
1963 htab_delete (ar_hash);
1965 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1966 landing pad label for the region. For sjlj though, there is one
1967 common landing pad from which we dispatch to the post-landing pads.
1969 A region receives a dispatch index if it is directly reachable
1970 and requires in-function processing. Regions that share post-landing
1971 pads may share dispatch indices. */
1972 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1973 (see build_post_landing_pads) so we don't bother checking for it. */
1975 index = 0;
1976 for (i = cfun->eh->last_region_number; i > 0; --i)
1977 if (lp_info[i].directly_reachable)
1978 lp_info[i].dispatch_index = index++;
1980 /* Finally: assign call-site values. If dwarf2 terms, this would be
1981 the region number assigned by convert_to_eh_region_ranges, but
1982 handles no-action and must-not-throw differently. */
1984 call_site_base = 1;
1985 for (i = cfun->eh->last_region_number; i > 0; --i)
1986 if (lp_info[i].directly_reachable)
1988 int action = lp_info[i].action_index;
1990 /* Map must-not-throw to otherwise unused call-site index 0. */
1991 if (action == -2)
1992 index = 0;
1993 /* Map no-action to otherwise unused call-site index -1. */
1994 else if (action == -1)
1995 index = -1;
1996 /* Otherwise, look it up in the table. */
1997 else
1998 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2000 lp_info[i].call_site_index = index;
2004 static void
2005 sjlj_mark_call_sites (lp_info)
2006 struct sjlj_lp_info *lp_info;
2008 int last_call_site = -2;
2009 rtx insn, mem;
2011 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2013 struct eh_region *region;
2014 int this_call_site;
2015 rtx note, before, p;
2017 /* Reset value tracking at extended basic block boundaries. */
2018 if (GET_CODE (insn) == CODE_LABEL)
2019 last_call_site = -2;
2021 if (! INSN_P (insn))
2022 continue;
2024 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2025 if (!note)
2027 /* Calls (and trapping insns) without notes are outside any
2028 exception handling region in this function. Mark them as
2029 no action. */
2030 if (GET_CODE (insn) == CALL_INSN
2031 || (flag_non_call_exceptions
2032 && may_trap_p (PATTERN (insn))))
2033 this_call_site = -1;
2034 else
2035 continue;
2037 else
2039 /* Calls that are known to not throw need not be marked. */
2040 if (INTVAL (XEXP (note, 0)) <= 0)
2041 continue;
2043 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2044 this_call_site = lp_info[region->region_number].call_site_index;
2047 if (this_call_site == last_call_site)
2048 continue;
2050 /* Don't separate a call from it's argument loads. */
2051 before = insn;
2052 if (GET_CODE (insn) == CALL_INSN)
2053 before = find_first_parameter_load (insn, NULL_RTX);
2055 start_sequence ();
2056 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2057 sjlj_fc_call_site_ofs);
2058 emit_move_insn (mem, GEN_INT (this_call_site));
2059 p = get_insns ();
2060 end_sequence ();
2062 emit_insn_before (p, before);
2063 last_call_site = this_call_site;
2067 /* Construct the SjLj_Function_Context. */
2069 static void
2070 sjlj_emit_function_enter (dispatch_label)
2071 rtx dispatch_label;
2073 rtx fn_begin, fc, mem, seq;
2075 fc = cfun->eh->sjlj_fc;
2077 start_sequence ();
2079 /* We're storing this libcall's address into memory instead of
2080 calling it directly. Thus, we must call assemble_external_libcall
2081 here, as we can not depend on emit_library_call to do it for us. */
2082 assemble_external_libcall (eh_personality_libfunc);
2083 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2084 emit_move_insn (mem, eh_personality_libfunc);
2086 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2087 if (cfun->uses_eh_lsda)
2089 char buf[20];
2090 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2091 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
2093 else
2094 emit_move_insn (mem, const0_rtx);
2096 #ifdef DONT_USE_BUILTIN_SETJMP
2098 rtx x, note;
2099 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2100 TYPE_MODE (integer_type_node), 1,
2101 plus_constant (XEXP (fc, 0),
2102 sjlj_fc_jbuf_ofs), Pmode);
2104 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2105 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2107 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2108 TYPE_MODE (integer_type_node), 0, dispatch_label);
2110 #else
2111 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2112 dispatch_label);
2113 #endif
2115 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2116 1, XEXP (fc, 0), Pmode);
2118 seq = get_insns ();
2119 end_sequence ();
2121 /* ??? Instead of doing this at the beginning of the function,
2122 do this in a block that is at loop level 0 and dominates all
2123 can_throw_internal instructions. */
2125 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2126 if (GET_CODE (fn_begin) == NOTE
2127 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2128 break;
2129 emit_insn_after (seq, fn_begin);
2132 /* Call back from expand_function_end to know where we should put
2133 the call to unwind_sjlj_unregister_libfunc if needed. */
2135 void
2136 sjlj_emit_function_exit_after (after)
2137 rtx after;
2139 cfun->eh->sjlj_exit_after = after;
2142 static void
2143 sjlj_emit_function_exit ()
2145 rtx seq;
2147 start_sequence ();
2149 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2150 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2152 seq = get_insns ();
2153 end_sequence ();
2155 /* ??? Really this can be done in any block at loop level 0 that
2156 post-dominates all can_throw_internal instructions. This is
2157 the last possible moment. */
2159 emit_insn_after (seq, cfun->eh->sjlj_exit_after);
2162 static void
2163 sjlj_emit_dispatch_table (dispatch_label, lp_info)
2164 rtx dispatch_label;
2165 struct sjlj_lp_info *lp_info;
2167 int i, first_reachable;
2168 rtx mem, dispatch, seq, fc;
2170 fc = cfun->eh->sjlj_fc;
2172 start_sequence ();
2174 emit_label (dispatch_label);
2176 #ifndef DONT_USE_BUILTIN_SETJMP
2177 expand_builtin_setjmp_receiver (dispatch_label);
2178 #endif
2180 /* Load up dispatch index, exc_ptr and filter values from the
2181 function context. */
2182 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2183 sjlj_fc_call_site_ofs);
2184 dispatch = copy_to_reg (mem);
2186 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
2187 if (word_mode != Pmode)
2189 #ifdef POINTERS_EXTEND_UNSIGNED
2190 mem = convert_memory_address (Pmode, mem);
2191 #else
2192 mem = convert_to_mode (Pmode, mem, 0);
2193 #endif
2195 emit_move_insn (cfun->eh->exc_ptr, mem);
2197 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2198 emit_move_insn (cfun->eh->filter, mem);
2200 /* Jump to one of the directly reachable regions. */
2201 /* ??? This really ought to be using a switch statement. */
2203 first_reachable = 0;
2204 for (i = cfun->eh->last_region_number; i > 0; --i)
2206 if (! lp_info[i].directly_reachable)
2207 continue;
2209 if (! first_reachable)
2211 first_reachable = i;
2212 continue;
2215 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2216 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2217 cfun->eh->region_array[i]->post_landing_pad);
2220 seq = get_insns ();
2221 end_sequence ();
2223 emit_insn_before (seq, (cfun->eh->region_array[first_reachable]
2224 ->post_landing_pad));
2227 static void
2228 sjlj_build_landing_pads ()
2230 struct sjlj_lp_info *lp_info;
2232 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2233 sizeof (struct sjlj_lp_info));
2235 if (sjlj_find_directly_reachable_regions (lp_info))
2237 rtx dispatch_label = gen_label_rtx ();
2239 cfun->eh->sjlj_fc
2240 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2241 int_size_in_bytes (sjlj_fc_type_node),
2242 TYPE_ALIGN (sjlj_fc_type_node));
2244 sjlj_assign_call_site_values (dispatch_label, lp_info);
2245 sjlj_mark_call_sites (lp_info);
2247 sjlj_emit_function_enter (dispatch_label);
2248 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2249 sjlj_emit_function_exit ();
2252 free (lp_info);
2255 void
2256 finish_eh_generation ()
2258 /* Nothing to do if no regions created. */
2259 if (cfun->eh->region_tree == NULL)
2260 return;
2262 /* The object here is to provide find_basic_blocks with detailed
2263 information (via reachable_handlers) on how exception control
2264 flows within the function. In this first pass, we can include
2265 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2266 regions, and hope that it will be useful in deleting unreachable
2267 handlers. Subsequently, we will generate landing pads which will
2268 connect many of the handlers, and then type information will not
2269 be effective. Still, this is a win over previous implementations. */
2271 cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
2273 /* These registers are used by the landing pads. Make sure they
2274 have been generated. */
2275 get_exception_pointer (cfun);
2276 get_exception_filter (cfun);
2278 /* Construct the landing pads. */
2280 assign_filter_values ();
2281 build_post_landing_pads ();
2282 connect_post_landing_pads ();
2283 if (USING_SJLJ_EXCEPTIONS)
2284 sjlj_build_landing_pads ();
2285 else
2286 dw2_build_landing_pads ();
2288 cfun->eh->built_landing_pads = 1;
2290 /* We've totally changed the CFG. Start over. */
2291 find_exception_handler_labels ();
2292 rebuild_jump_labels (get_insns ());
2293 find_basic_blocks (get_insns (), max_reg_num (), 0);
2294 cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
2297 static hashval_t
2298 ehl_hash (pentry)
2299 const PTR pentry;
2301 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2303 /* 2^32 * ((sqrt(5) - 1) / 2) */
2304 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2305 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2308 static int
2309 ehl_eq (pentry, pdata)
2310 const PTR pentry;
2311 const PTR pdata;
2313 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2314 struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2316 return entry->label == data->label;
2319 /* This section handles removing dead code for flow. */
2321 /* Remove LABEL from exception_handler_label_map. */
2323 static void
2324 remove_exception_handler_label (label)
2325 rtx label;
2327 struct ehl_map_entry **slot, tmp;
2329 /* If exception_handler_label_map was not built yet,
2330 there is nothing to do. */
2331 if (cfun->eh->exception_handler_label_map == NULL)
2332 return;
2334 tmp.label = label;
2335 slot = (struct ehl_map_entry **)
2336 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2337 if (! slot)
2338 abort ();
2340 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2343 /* Splice REGION from the region tree etc. */
2345 static void
2346 remove_eh_handler (region)
2347 struct eh_region *region;
2349 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2350 rtx lab;
2352 /* For the benefit of efficiently handling REG_EH_REGION notes,
2353 replace this region in the region array with its containing
2354 region. Note that previous region deletions may result in
2355 multiple copies of this region in the array, so we have a
2356 list of alternate numbers by which we are known. */
2358 outer = region->outer;
2359 cfun->eh->region_array[region->region_number] = outer;
2360 if (region->aka)
2362 int i;
2363 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i,
2364 { cfun->eh->region_array[i] = outer; });
2367 if (outer)
2369 if (!outer->aka)
2370 outer->aka = BITMAP_GGC_ALLOC ();
2371 if (region->aka)
2372 bitmap_a_or_b (outer->aka, outer->aka, region->aka);
2373 bitmap_set_bit (outer->aka, region->region_number);
2376 if (cfun->eh->built_landing_pads)
2377 lab = region->landing_pad;
2378 else
2379 lab = region->label;
2380 if (lab)
2381 remove_exception_handler_label (lab);
2383 if (outer)
2384 pp_start = &outer->inner;
2385 else
2386 pp_start = &cfun->eh->region_tree;
2387 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2388 continue;
2389 *pp = region->next_peer;
2391 inner = region->inner;
2392 if (inner)
2394 for (p = inner; p->next_peer ; p = p->next_peer)
2395 p->outer = outer;
2396 p->outer = outer;
2398 p->next_peer = *pp_start;
2399 *pp_start = inner;
2402 if (region->type == ERT_CATCH)
2404 struct eh_region *try, *next, *prev;
2406 for (try = region->next_peer;
2407 try->type == ERT_CATCH;
2408 try = try->next_peer)
2409 continue;
2410 if (try->type != ERT_TRY)
2411 abort ();
2413 next = region->u.catch.next_catch;
2414 prev = region->u.catch.prev_catch;
2416 if (next)
2417 next->u.catch.prev_catch = prev;
2418 else
2419 try->u.try.last_catch = prev;
2420 if (prev)
2421 prev->u.catch.next_catch = next;
2422 else
2424 try->u.try.catch = next;
2425 if (! next)
2426 remove_eh_handler (try);
2431 /* LABEL heads a basic block that is about to be deleted. If this
2432 label corresponds to an exception region, we may be able to
2433 delete the region. */
2435 void
2436 maybe_remove_eh_handler (label)
2437 rtx label;
2439 struct ehl_map_entry **slot, tmp;
2440 struct eh_region *region;
2442 /* ??? After generating landing pads, it's not so simple to determine
2443 if the region data is completely unused. One must examine the
2444 landing pad and the post landing pad, and whether an inner try block
2445 is referencing the catch handlers directly. */
2446 if (cfun->eh->built_landing_pads)
2447 return;
2449 tmp.label = label;
2450 slot = (struct ehl_map_entry **)
2451 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2452 if (! slot)
2453 return;
2454 region = (*slot)->region;
2455 if (! region)
2456 return;
2458 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2459 because there is no path to the fallback call to terminate.
2460 But the region continues to affect call-site data until there
2461 are no more contained calls, which we don't see here. */
2462 if (region->type == ERT_MUST_NOT_THROW)
2464 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2465 region->label = NULL_RTX;
2467 else
2468 remove_eh_handler (region);
2471 /* Invokes CALLBACK for every exception handler label. Only used by old
2472 loop hackery; should not be used by new code. */
2474 void
2475 for_each_eh_label (callback)
2476 void (*callback) PARAMS ((rtx));
2478 htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
2479 (void *)callback);
2482 static int
2483 for_each_eh_label_1 (pentry, data)
2484 PTR *pentry;
2485 PTR data;
2487 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2488 void (*callback) PARAMS ((rtx)) = (void (*) PARAMS ((rtx))) data;
2490 (*callback) (entry->label);
2491 return 1;
2494 /* This section describes CFG exception edges for flow. */
2496 /* For communicating between calls to reachable_next_level. */
2497 struct reachable_info GTY(())
2499 tree types_caught;
2500 tree types_allowed;
2501 rtx handlers;
2504 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2505 base class of TYPE, is in HANDLED. */
2507 static int
2508 check_handled (handled, type)
2509 tree handled, type;
2511 tree t;
2513 /* We can check for exact matches without front-end help. */
2514 if (! lang_eh_type_covers)
2516 for (t = handled; t ; t = TREE_CHAIN (t))
2517 if (TREE_VALUE (t) == type)
2518 return 1;
2520 else
2522 for (t = handled; t ; t = TREE_CHAIN (t))
2523 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2524 return 1;
2527 return 0;
2530 /* A subroutine of reachable_next_level. If we are collecting a list
2531 of handlers, add one. After landing pad generation, reference
2532 it instead of the handlers themselves. Further, the handlers are
2533 all wired together, so by referencing one, we've got them all.
2534 Before landing pad generation we reference each handler individually.
2536 LP_REGION contains the landing pad; REGION is the handler. */
2538 static void
2539 add_reachable_handler (info, lp_region, region)
2540 struct reachable_info *info;
2541 struct eh_region *lp_region;
2542 struct eh_region *region;
2544 if (! info)
2545 return;
2547 if (cfun->eh->built_landing_pads)
2549 if (! info->handlers)
2550 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2552 else
2553 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2556 /* Process one level of exception regions for reachability.
2557 If TYPE_THROWN is non-null, then it is the *exact* type being
2558 propagated. If INFO is non-null, then collect handler labels
2559 and caught/allowed type information between invocations. */
2561 static enum reachable_code
2562 reachable_next_level (region, type_thrown, info)
2563 struct eh_region *region;
2564 tree type_thrown;
2565 struct reachable_info *info;
2567 switch (region->type)
2569 case ERT_CLEANUP:
2570 /* Before landing-pad generation, we model control flow
2571 directly to the individual handlers. In this way we can
2572 see that catch handler types may shadow one another. */
2573 add_reachable_handler (info, region, region);
2574 return RNL_MAYBE_CAUGHT;
2576 case ERT_TRY:
2578 struct eh_region *c;
2579 enum reachable_code ret = RNL_NOT_CAUGHT;
2581 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2583 /* A catch-all handler ends the search. */
2584 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2585 to be run as well. */
2586 if (c->u.catch.type_list == NULL)
2588 add_reachable_handler (info, region, c);
2589 return RNL_CAUGHT;
2592 if (type_thrown)
2594 /* If we have at least one type match, end the search. */
2595 tree tp_node = c->u.catch.type_list;
2597 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2599 tree type = TREE_VALUE (tp_node);
2601 if (type == type_thrown
2602 || (lang_eh_type_covers
2603 && (*lang_eh_type_covers) (type, type_thrown)))
2605 add_reachable_handler (info, region, c);
2606 return RNL_CAUGHT;
2610 /* If we have definitive information of a match failure,
2611 the catch won't trigger. */
2612 if (lang_eh_type_covers)
2613 return RNL_NOT_CAUGHT;
2616 /* At this point, we either don't know what type is thrown or
2617 don't have front-end assistance to help deciding if it is
2618 covered by one of the types in the list for this region.
2620 We'd then like to add this region to the list of reachable
2621 handlers since it is indeed potentially reachable based on the
2622 information we have.
2624 Actually, this handler is for sure not reachable if all the
2625 types it matches have already been caught. That is, it is only
2626 potentially reachable if at least one of the types it catches
2627 has not been previously caught. */
2629 if (! info)
2630 ret = RNL_MAYBE_CAUGHT;
2631 else
2633 tree tp_node = c->u.catch.type_list;
2634 bool maybe_reachable = false;
2636 /* Compute the potential reachability of this handler and
2637 update the list of types caught at the same time. */
2638 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2640 tree type = TREE_VALUE (tp_node);
2642 if (! check_handled (info->types_caught, type))
2644 info->types_caught
2645 = tree_cons (NULL, type, info->types_caught);
2647 maybe_reachable = true;
2651 if (maybe_reachable)
2653 add_reachable_handler (info, region, c);
2655 /* ??? If the catch type is a base class of every allowed
2656 type, then we know we can stop the search. */
2657 ret = RNL_MAYBE_CAUGHT;
2662 return ret;
2665 case ERT_ALLOWED_EXCEPTIONS:
2666 /* An empty list of types definitely ends the search. */
2667 if (region->u.allowed.type_list == NULL_TREE)
2669 add_reachable_handler (info, region, region);
2670 return RNL_CAUGHT;
2673 /* Collect a list of lists of allowed types for use in detecting
2674 when a catch may be transformed into a catch-all. */
2675 if (info)
2676 info->types_allowed = tree_cons (NULL_TREE,
2677 region->u.allowed.type_list,
2678 info->types_allowed);
2680 /* If we have definitive information about the type hierarchy,
2681 then we can tell if the thrown type will pass through the
2682 filter. */
2683 if (type_thrown && lang_eh_type_covers)
2685 if (check_handled (region->u.allowed.type_list, type_thrown))
2686 return RNL_NOT_CAUGHT;
2687 else
2689 add_reachable_handler (info, region, region);
2690 return RNL_CAUGHT;
2694 add_reachable_handler (info, region, region);
2695 return RNL_MAYBE_CAUGHT;
2697 case ERT_CATCH:
2698 /* Catch regions are handled by their controlling try region. */
2699 return RNL_NOT_CAUGHT;
2701 case ERT_MUST_NOT_THROW:
2702 /* Here we end our search, since no exceptions may propagate.
2703 If we've touched down at some landing pad previous, then the
2704 explicit function call we generated may be used. Otherwise
2705 the call is made by the runtime. */
2706 if (info && info->handlers)
2708 add_reachable_handler (info, region, region);
2709 return RNL_CAUGHT;
2711 else
2712 return RNL_BLOCKED;
2714 case ERT_THROW:
2715 case ERT_FIXUP:
2716 case ERT_UNKNOWN:
2717 /* Shouldn't see these here. */
2718 break;
2721 abort ();
2724 /* Retrieve a list of labels of exception handlers which can be
2725 reached by a given insn. */
2728 reachable_handlers (insn)
2729 rtx insn;
2731 struct reachable_info info;
2732 struct eh_region *region;
2733 tree type_thrown;
2734 int region_number;
2736 if (GET_CODE (insn) == JUMP_INSN
2737 && GET_CODE (PATTERN (insn)) == RESX)
2738 region_number = XINT (PATTERN (insn), 0);
2739 else
2741 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2742 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2743 return NULL;
2744 region_number = INTVAL (XEXP (note, 0));
2747 memset (&info, 0, sizeof (info));
2749 region = cfun->eh->region_array[region_number];
2751 type_thrown = NULL_TREE;
2752 if (GET_CODE (insn) == JUMP_INSN
2753 && GET_CODE (PATTERN (insn)) == RESX)
2755 /* A RESX leaves a region instead of entering it. Thus the
2756 region itself may have been deleted out from under us. */
2757 if (region == NULL)
2758 return NULL;
2759 region = region->outer;
2761 else if (region->type == ERT_THROW)
2763 type_thrown = region->u.throw.type;
2764 region = region->outer;
2767 while (region)
2769 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2770 break;
2771 /* If we have processed one cleanup, there is no point in
2772 processing any more of them. Each cleanup will have an edge
2773 to the next outer cleanup region, so the flow graph will be
2774 accurate. */
2775 if (region->type == ERT_CLEANUP)
2776 region = region->u.cleanup.prev_try;
2777 else
2778 region = region->outer;
2781 return info.handlers;
2784 /* Determine if the given INSN can throw an exception that is caught
2785 within the function. */
2787 bool
2788 can_throw_internal (insn)
2789 rtx insn;
2791 struct eh_region *region;
2792 tree type_thrown;
2793 rtx note;
2795 if (! INSN_P (insn))
2796 return false;
2798 if (GET_CODE (insn) == INSN
2799 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2800 insn = XVECEXP (PATTERN (insn), 0, 0);
2802 if (GET_CODE (insn) == CALL_INSN
2803 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2805 int i;
2806 for (i = 0; i < 3; ++i)
2808 rtx sub = XEXP (PATTERN (insn), i);
2809 for (; sub ; sub = NEXT_INSN (sub))
2810 if (can_throw_internal (sub))
2811 return true;
2813 return false;
2816 /* Every insn that might throw has an EH_REGION note. */
2817 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2818 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2819 return false;
2821 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2823 type_thrown = NULL_TREE;
2824 if (region->type == ERT_THROW)
2826 type_thrown = region->u.throw.type;
2827 region = region->outer;
2830 /* If this exception is ignored by each and every containing region,
2831 then control passes straight out. The runtime may handle some
2832 regions, which also do not require processing internally. */
2833 for (; region; region = region->outer)
2835 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2836 if (how == RNL_BLOCKED)
2837 return false;
2838 if (how != RNL_NOT_CAUGHT)
2839 return true;
2842 return false;
2845 /* Determine if the given INSN can throw an exception that is
2846 visible outside the function. */
2848 bool
2849 can_throw_external (insn)
2850 rtx insn;
2852 struct eh_region *region;
2853 tree type_thrown;
2854 rtx note;
2856 if (! INSN_P (insn))
2857 return false;
2859 if (GET_CODE (insn) == INSN
2860 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2861 insn = XVECEXP (PATTERN (insn), 0, 0);
2863 if (GET_CODE (insn) == CALL_INSN
2864 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2866 int i;
2867 for (i = 0; i < 3; ++i)
2869 rtx sub = XEXP (PATTERN (insn), i);
2870 for (; sub ; sub = NEXT_INSN (sub))
2871 if (can_throw_external (sub))
2872 return true;
2874 return false;
2877 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2878 if (!note)
2880 /* Calls (and trapping insns) without notes are outside any
2881 exception handling region in this function. We have to
2882 assume it might throw. Given that the front end and middle
2883 ends mark known NOTHROW functions, this isn't so wildly
2884 inaccurate. */
2885 return (GET_CODE (insn) == CALL_INSN
2886 || (flag_non_call_exceptions
2887 && may_trap_p (PATTERN (insn))));
2889 if (INTVAL (XEXP (note, 0)) <= 0)
2890 return false;
2892 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2894 type_thrown = NULL_TREE;
2895 if (region->type == ERT_THROW)
2897 type_thrown = region->u.throw.type;
2898 region = region->outer;
2901 /* If the exception is caught or blocked by any containing region,
2902 then it is not seen by any calling function. */
2903 for (; region ; region = region->outer)
2904 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2905 return false;
2907 return true;
2910 /* Set current_function_nothrow and cfun->all_throwers_are_sibcalls. */
2912 void
2913 set_nothrow_function_flags ()
2915 rtx insn;
2917 current_function_nothrow = 1;
2919 /* Assume cfun->all_throwers_are_sibcalls until we encounter
2920 something that can throw an exception. We specifically exempt
2921 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2922 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2923 is optimistic. */
2925 cfun->all_throwers_are_sibcalls = 1;
2927 if (! flag_exceptions)
2928 return;
2930 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2931 if (can_throw_external (insn))
2933 current_function_nothrow = 0;
2935 if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
2937 cfun->all_throwers_are_sibcalls = 0;
2938 return;
2942 for (insn = current_function_epilogue_delay_list; insn;
2943 insn = XEXP (insn, 1))
2944 if (can_throw_external (insn))
2946 current_function_nothrow = 0;
2948 if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
2950 cfun->all_throwers_are_sibcalls = 0;
2951 return;
2957 /* Various hooks for unwind library. */
2959 /* Do any necessary initialization to access arbitrary stack frames.
2960 On the SPARC, this means flushing the register windows. */
2962 void
2963 expand_builtin_unwind_init ()
2965 /* Set this so all the registers get saved in our frame; we need to be
2966 able to copy the saved values for any registers from frames we unwind. */
2967 current_function_has_nonlocal_label = 1;
2969 #ifdef SETUP_FRAME_ADDRESSES
2970 SETUP_FRAME_ADDRESSES ();
2971 #endif
2975 expand_builtin_eh_return_data_regno (arglist)
2976 tree arglist;
2978 tree which = TREE_VALUE (arglist);
2979 unsigned HOST_WIDE_INT iwhich;
2981 if (TREE_CODE (which) != INTEGER_CST)
2983 error ("argument of `__builtin_eh_return_regno' must be constant");
2984 return constm1_rtx;
2987 iwhich = tree_low_cst (which, 1);
2988 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2989 if (iwhich == INVALID_REGNUM)
2990 return constm1_rtx;
2992 #ifdef DWARF_FRAME_REGNUM
2993 iwhich = DWARF_FRAME_REGNUM (iwhich);
2994 #else
2995 iwhich = DBX_REGISTER_NUMBER (iwhich);
2996 #endif
2998 return GEN_INT (iwhich);
3001 /* Given a value extracted from the return address register or stack slot,
3002 return the actual address encoded in that value. */
3005 expand_builtin_extract_return_addr (addr_tree)
3006 tree addr_tree;
3008 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
3010 if (GET_MODE (addr) != Pmode
3011 && GET_MODE (addr) != VOIDmode)
3013 #ifdef POINTERS_EXTEND_UNSIGNED
3014 addr = convert_memory_address (Pmode, addr);
3015 #else
3016 addr = convert_to_mode (Pmode, addr, 0);
3017 #endif
3020 /* First mask out any unwanted bits. */
3021 #ifdef MASK_RETURN_ADDR
3022 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3023 #endif
3025 /* Then adjust to find the real return address. */
3026 #if defined (RETURN_ADDR_OFFSET)
3027 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3028 #endif
3030 return addr;
3033 /* Given an actual address in addr_tree, do any necessary encoding
3034 and return the value to be stored in the return address register or
3035 stack slot so the epilogue will return to that address. */
3038 expand_builtin_frob_return_addr (addr_tree)
3039 tree addr_tree;
3041 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3043 #ifdef POINTERS_EXTEND_UNSIGNED
3044 if (GET_MODE (addr) != Pmode)
3045 addr = convert_memory_address (Pmode, addr);
3046 #endif
3048 #ifdef RETURN_ADDR_OFFSET
3049 addr = force_reg (Pmode, addr);
3050 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3051 #endif
3053 return addr;
3056 /* Set up the epilogue with the magic bits we'll need to return to the
3057 exception handler. */
3059 void
3060 expand_builtin_eh_return (stackadj_tree, handler_tree)
3061 tree stackadj_tree, handler_tree;
3063 rtx stackadj, handler;
3065 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
3066 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
3068 #ifdef POINTERS_EXTEND_UNSIGNED
3069 if (GET_MODE (stackadj) != Pmode)
3070 stackadj = convert_memory_address (Pmode, stackadj);
3072 if (GET_MODE (handler) != Pmode)
3073 handler = convert_memory_address (Pmode, handler);
3074 #endif
3076 if (! cfun->eh->ehr_label)
3078 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
3079 cfun->eh->ehr_handler = copy_to_reg (handler);
3080 cfun->eh->ehr_label = gen_label_rtx ();
3082 else
3084 if (stackadj != cfun->eh->ehr_stackadj)
3085 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
3086 if (handler != cfun->eh->ehr_handler)
3087 emit_move_insn (cfun->eh->ehr_handler, handler);
3090 emit_jump (cfun->eh->ehr_label);
3093 void
3094 expand_eh_return ()
3096 rtx sa, ra, around_label;
3098 if (! cfun->eh->ehr_label)
3099 return;
3101 sa = EH_RETURN_STACKADJ_RTX;
3102 if (! sa)
3104 error ("__builtin_eh_return not supported on this target");
3105 return;
3108 current_function_calls_eh_return = 1;
3110 around_label = gen_label_rtx ();
3111 emit_move_insn (sa, const0_rtx);
3112 emit_jump (around_label);
3114 emit_label (cfun->eh->ehr_label);
3115 clobber_return_register ();
3117 #ifdef HAVE_eh_return
3118 if (HAVE_eh_return)
3119 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3120 else
3121 #endif
3123 ra = EH_RETURN_HANDLER_RTX;
3124 if (! ra)
3126 error ("__builtin_eh_return not supported on this target");
3127 ra = gen_reg_rtx (Pmode);
3130 emit_move_insn (sa, cfun->eh->ehr_stackadj);
3131 emit_move_insn (ra, cfun->eh->ehr_handler);
3134 emit_label (around_label);
3137 /* In the following functions, we represent entries in the action table
3138 as 1-based indices. Special cases are:
3140 0: null action record, non-null landing pad; implies cleanups
3141 -1: null action record, null landing pad; implies no action
3142 -2: no call-site entry; implies must_not_throw
3143 -3: we have yet to process outer regions
3145 Further, no special cases apply to the "next" field of the record.
3146 For next, 0 means end of list. */
3148 struct action_record
3150 int offset;
3151 int filter;
3152 int next;
3155 static int
3156 action_record_eq (pentry, pdata)
3157 const PTR pentry;
3158 const PTR pdata;
3160 const struct action_record *entry = (const struct action_record *) pentry;
3161 const struct action_record *data = (const struct action_record *) pdata;
3162 return entry->filter == data->filter && entry->next == data->next;
3165 static hashval_t
3166 action_record_hash (pentry)
3167 const PTR pentry;
3169 const struct action_record *entry = (const struct action_record *) pentry;
3170 return entry->next * 1009 + entry->filter;
3173 static int
3174 add_action_record (ar_hash, filter, next)
3175 htab_t ar_hash;
3176 int filter, next;
3178 struct action_record **slot, *new, tmp;
3180 tmp.filter = filter;
3181 tmp.next = next;
3182 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3184 if ((new = *slot) == NULL)
3186 new = (struct action_record *) xmalloc (sizeof (*new));
3187 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3188 new->filter = filter;
3189 new->next = next;
3190 *slot = new;
3192 /* The filter value goes in untouched. The link to the next
3193 record is a "self-relative" byte offset, or zero to indicate
3194 that there is no next record. So convert the absolute 1 based
3195 indices we've been carrying around into a displacement. */
3197 push_sleb128 (&cfun->eh->action_record_data, filter);
3198 if (next)
3199 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3200 push_sleb128 (&cfun->eh->action_record_data, next);
3203 return new->offset;
3206 static int
3207 collect_one_action_chain (ar_hash, region)
3208 htab_t ar_hash;
3209 struct eh_region *region;
3211 struct eh_region *c;
3212 int next;
3214 /* If we've reached the top of the region chain, then we have
3215 no actions, and require no landing pad. */
3216 if (region == NULL)
3217 return -1;
3219 switch (region->type)
3221 case ERT_CLEANUP:
3222 /* A cleanup adds a zero filter to the beginning of the chain, but
3223 there are special cases to look out for. If there are *only*
3224 cleanups along a path, then it compresses to a zero action.
3225 Further, if there are multiple cleanups along a path, we only
3226 need to represent one of them, as that is enough to trigger
3227 entry to the landing pad at runtime. */
3228 next = collect_one_action_chain (ar_hash, region->outer);
3229 if (next <= 0)
3230 return 0;
3231 for (c = region->outer; c ; c = c->outer)
3232 if (c->type == ERT_CLEANUP)
3233 return next;
3234 return add_action_record (ar_hash, 0, next);
3236 case ERT_TRY:
3237 /* Process the associated catch regions in reverse order.
3238 If there's a catch-all handler, then we don't need to
3239 search outer regions. Use a magic -3 value to record
3240 that we haven't done the outer search. */
3241 next = -3;
3242 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3244 if (c->u.catch.type_list == NULL)
3246 /* Retrieve the filter from the head of the filter list
3247 where we have stored it (see assign_filter_values). */
3248 int filter
3249 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3251 next = add_action_record (ar_hash, filter, 0);
3253 else
3255 /* Once the outer search is done, trigger an action record for
3256 each filter we have. */
3257 tree flt_node;
3259 if (next == -3)
3261 next = collect_one_action_chain (ar_hash, region->outer);
3263 /* If there is no next action, terminate the chain. */
3264 if (next == -1)
3265 next = 0;
3266 /* If all outer actions are cleanups or must_not_throw,
3267 we'll have no action record for it, since we had wanted
3268 to encode these states in the call-site record directly.
3269 Add a cleanup action to the chain to catch these. */
3270 else if (next <= 0)
3271 next = add_action_record (ar_hash, 0, 0);
3274 flt_node = c->u.catch.filter_list;
3275 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3277 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3278 next = add_action_record (ar_hash, filter, next);
3282 return next;
3284 case ERT_ALLOWED_EXCEPTIONS:
3285 /* An exception specification adds its filter to the
3286 beginning of the chain. */
3287 next = collect_one_action_chain (ar_hash, region->outer);
3288 return add_action_record (ar_hash, region->u.allowed.filter,
3289 next < 0 ? 0 : next);
3291 case ERT_MUST_NOT_THROW:
3292 /* A must-not-throw region with no inner handlers or cleanups
3293 requires no call-site entry. Note that this differs from
3294 the no handler or cleanup case in that we do require an lsda
3295 to be generated. Return a magic -2 value to record this. */
3296 return -2;
3298 case ERT_CATCH:
3299 case ERT_THROW:
3300 /* CATCH regions are handled in TRY above. THROW regions are
3301 for optimization information only and produce no output. */
3302 return collect_one_action_chain (ar_hash, region->outer);
3304 default:
3305 abort ();
3309 static int
3310 add_call_site (landing_pad, action)
3311 rtx landing_pad;
3312 int action;
3314 struct call_site_record *data = cfun->eh->call_site_data;
3315 int used = cfun->eh->call_site_data_used;
3316 int size = cfun->eh->call_site_data_size;
3318 if (used >= size)
3320 size = (size ? size * 2 : 64);
3321 data = (struct call_site_record *)
3322 ggc_realloc (data, sizeof (*data) * size);
3323 cfun->eh->call_site_data = data;
3324 cfun->eh->call_site_data_size = size;
3327 data[used].landing_pad = landing_pad;
3328 data[used].action = action;
3330 cfun->eh->call_site_data_used = used + 1;
3332 return used + call_site_base;
3335 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3336 The new note numbers will not refer to region numbers, but
3337 instead to call site entries. */
3339 void
3340 convert_to_eh_region_ranges ()
3342 rtx insn, iter, note;
3343 htab_t ar_hash;
3344 int last_action = -3;
3345 rtx last_action_insn = NULL_RTX;
3346 rtx last_landing_pad = NULL_RTX;
3347 rtx first_no_action_insn = NULL_RTX;
3348 int call_site = 0;
3350 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3351 return;
3353 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3355 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3357 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3358 if (INSN_P (iter))
3360 struct eh_region *region;
3361 int this_action;
3362 rtx this_landing_pad;
3364 insn = iter;
3365 if (GET_CODE (insn) == INSN
3366 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3367 insn = XVECEXP (PATTERN (insn), 0, 0);
3369 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3370 if (!note)
3372 if (! (GET_CODE (insn) == CALL_INSN
3373 || (flag_non_call_exceptions
3374 && may_trap_p (PATTERN (insn)))))
3375 continue;
3376 this_action = -1;
3377 region = NULL;
3379 else
3381 if (INTVAL (XEXP (note, 0)) <= 0)
3382 continue;
3383 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3384 this_action = collect_one_action_chain (ar_hash, region);
3387 /* Existence of catch handlers, or must-not-throw regions
3388 implies that an lsda is needed (even if empty). */
3389 if (this_action != -1)
3390 cfun->uses_eh_lsda = 1;
3392 /* Delay creation of region notes for no-action regions
3393 until we're sure that an lsda will be required. */
3394 else if (last_action == -3)
3396 first_no_action_insn = iter;
3397 last_action = -1;
3400 /* Cleanups and handlers may share action chains but not
3401 landing pads. Collect the landing pad for this region. */
3402 if (this_action >= 0)
3404 struct eh_region *o;
3405 for (o = region; ! o->landing_pad ; o = o->outer)
3406 continue;
3407 this_landing_pad = o->landing_pad;
3409 else
3410 this_landing_pad = NULL_RTX;
3412 /* Differing actions or landing pads implies a change in call-site
3413 info, which implies some EH_REGION note should be emitted. */
3414 if (last_action != this_action
3415 || last_landing_pad != this_landing_pad)
3417 /* If we'd not seen a previous action (-3) or the previous
3418 action was must-not-throw (-2), then we do not need an
3419 end note. */
3420 if (last_action >= -1)
3422 /* If we delayed the creation of the begin, do it now. */
3423 if (first_no_action_insn)
3425 call_site = add_call_site (NULL_RTX, 0);
3426 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3427 first_no_action_insn);
3428 NOTE_EH_HANDLER (note) = call_site;
3429 first_no_action_insn = NULL_RTX;
3432 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3433 last_action_insn);
3434 NOTE_EH_HANDLER (note) = call_site;
3437 /* If the new action is must-not-throw, then no region notes
3438 are created. */
3439 if (this_action >= -1)
3441 call_site = add_call_site (this_landing_pad,
3442 this_action < 0 ? 0 : this_action);
3443 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3444 NOTE_EH_HANDLER (note) = call_site;
3447 last_action = this_action;
3448 last_landing_pad = this_landing_pad;
3450 last_action_insn = iter;
3453 if (last_action >= -1 && ! first_no_action_insn)
3455 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3456 NOTE_EH_HANDLER (note) = call_site;
3459 htab_delete (ar_hash);
3463 static void
3464 push_uleb128 (data_area, value)
3465 varray_type *data_area;
3466 unsigned int value;
3470 unsigned char byte = value & 0x7f;
3471 value >>= 7;
3472 if (value)
3473 byte |= 0x80;
3474 VARRAY_PUSH_UCHAR (*data_area, byte);
3476 while (value);
3479 static void
3480 push_sleb128 (data_area, value)
3481 varray_type *data_area;
3482 int value;
3484 unsigned char byte;
3485 int more;
3489 byte = value & 0x7f;
3490 value >>= 7;
3491 more = ! ((value == 0 && (byte & 0x40) == 0)
3492 || (value == -1 && (byte & 0x40) != 0));
3493 if (more)
3494 byte |= 0x80;
3495 VARRAY_PUSH_UCHAR (*data_area, byte);
3497 while (more);
3501 #ifndef HAVE_AS_LEB128
3502 static int
3503 dw2_size_of_call_site_table ()
3505 int n = cfun->eh->call_site_data_used;
3506 int size = n * (4 + 4 + 4);
3507 int i;
3509 for (i = 0; i < n; ++i)
3511 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3512 size += size_of_uleb128 (cs->action);
3515 return size;
3518 static int
3519 sjlj_size_of_call_site_table ()
3521 int n = cfun->eh->call_site_data_used;
3522 int size = 0;
3523 int i;
3525 for (i = 0; i < n; ++i)
3527 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3528 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3529 size += size_of_uleb128 (cs->action);
3532 return size;
3534 #endif
3536 static void
3537 dw2_output_call_site_table ()
3539 const char *const function_start_lab
3540 = IDENTIFIER_POINTER (current_function_func_begin_label);
3541 int n = cfun->eh->call_site_data_used;
3542 int i;
3544 for (i = 0; i < n; ++i)
3546 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3547 char reg_start_lab[32];
3548 char reg_end_lab[32];
3549 char landing_pad_lab[32];
3551 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3552 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3554 if (cs->landing_pad)
3555 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3556 CODE_LABEL_NUMBER (cs->landing_pad));
3558 /* ??? Perhaps use insn length scaling if the assembler supports
3559 generic arithmetic. */
3560 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3561 data4 if the function is small enough. */
3562 #ifdef HAVE_AS_LEB128
3563 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3564 "region %d start", i);
3565 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3566 "length");
3567 if (cs->landing_pad)
3568 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3569 "landing pad");
3570 else
3571 dw2_asm_output_data_uleb128 (0, "landing pad");
3572 #else
3573 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3574 "region %d start", i);
3575 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3576 if (cs->landing_pad)
3577 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3578 "landing pad");
3579 else
3580 dw2_asm_output_data (4, 0, "landing pad");
3581 #endif
3582 dw2_asm_output_data_uleb128 (cs->action, "action");
3585 call_site_base += n;
3588 static void
3589 sjlj_output_call_site_table ()
3591 int n = cfun->eh->call_site_data_used;
3592 int i;
3594 for (i = 0; i < n; ++i)
3596 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3598 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3599 "region %d landing pad", i);
3600 dw2_asm_output_data_uleb128 (cs->action, "action");
3603 call_site_base += n;
3606 /* Tell assembler to switch to the section for the exception handling
3607 table. */
3609 void
3610 default_exception_section ()
3612 if (targetm.have_named_sections)
3614 int flags;
3615 #ifdef HAVE_LD_RO_RW_SECTION_MIXING
3616 int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3618 flags = (! flag_pic
3619 || ((tt_format & 0x70) != DW_EH_PE_absptr
3620 && (tt_format & 0x70) != DW_EH_PE_aligned))
3621 ? 0 : SECTION_WRITE;
3622 #else
3623 flags = SECTION_WRITE;
3624 #endif
3625 named_section_flags (".gcc_except_table", flags);
3627 else if (flag_pic)
3628 data_section ();
3629 else
3630 readonly_data_section ();
3633 void
3634 output_function_exception_table ()
3636 int tt_format, cs_format, lp_format, i, n;
3637 #ifdef HAVE_AS_LEB128
3638 char ttype_label[32];
3639 char cs_after_size_label[32];
3640 char cs_end_label[32];
3641 #else
3642 int call_site_len;
3643 #endif
3644 int have_tt_data;
3645 int tt_format_size = 0;
3647 /* Not all functions need anything. */
3648 if (! cfun->uses_eh_lsda)
3649 return;
3651 #ifdef IA64_UNWIND_INFO
3652 fputs ("\t.personality\t", asm_out_file);
3653 output_addr_const (asm_out_file, eh_personality_libfunc);
3654 fputs ("\n\t.handlerdata\n", asm_out_file);
3655 /* Note that varasm still thinks we're in the function's code section.
3656 The ".endp" directive that will immediately follow will take us back. */
3657 #else
3658 (*targetm.asm_out.exception_section) ();
3659 #endif
3661 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3662 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3664 /* Indicate the format of the @TType entries. */
3665 if (! have_tt_data)
3666 tt_format = DW_EH_PE_omit;
3667 else
3669 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3670 #ifdef HAVE_AS_LEB128
3671 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3672 current_function_funcdef_no);
3673 #endif
3674 tt_format_size = size_of_encoded_value (tt_format);
3676 assemble_align (tt_format_size * BITS_PER_UNIT);
3679 (*targetm.asm_out.internal_label) (asm_out_file, "LLSDA",
3680 current_function_funcdef_no);
3682 /* The LSDA header. */
3684 /* Indicate the format of the landing pad start pointer. An omitted
3685 field implies @LPStart == @Start. */
3686 /* Currently we always put @LPStart == @Start. This field would
3687 be most useful in moving the landing pads completely out of
3688 line to another section, but it could also be used to minimize
3689 the size of uleb128 landing pad offsets. */
3690 lp_format = DW_EH_PE_omit;
3691 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3692 eh_data_format_name (lp_format));
3694 /* @LPStart pointer would go here. */
3696 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3697 eh_data_format_name (tt_format));
3699 #ifndef HAVE_AS_LEB128
3700 if (USING_SJLJ_EXCEPTIONS)
3701 call_site_len = sjlj_size_of_call_site_table ();
3702 else
3703 call_site_len = dw2_size_of_call_site_table ();
3704 #endif
3706 /* A pc-relative 4-byte displacement to the @TType data. */
3707 if (have_tt_data)
3709 #ifdef HAVE_AS_LEB128
3710 char ttype_after_disp_label[32];
3711 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3712 current_function_funcdef_no);
3713 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3714 "@TType base offset");
3715 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3716 #else
3717 /* Ug. Alignment queers things. */
3718 unsigned int before_disp, after_disp, last_disp, disp;
3720 before_disp = 1 + 1;
3721 after_disp = (1 + size_of_uleb128 (call_site_len)
3722 + call_site_len
3723 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3724 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3725 * tt_format_size));
3727 disp = after_disp;
3730 unsigned int disp_size, pad;
3732 last_disp = disp;
3733 disp_size = size_of_uleb128 (disp);
3734 pad = before_disp + disp_size + after_disp;
3735 if (pad % tt_format_size)
3736 pad = tt_format_size - (pad % tt_format_size);
3737 else
3738 pad = 0;
3739 disp = after_disp + pad;
3741 while (disp != last_disp);
3743 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3744 #endif
3747 /* Indicate the format of the call-site offsets. */
3748 #ifdef HAVE_AS_LEB128
3749 cs_format = DW_EH_PE_uleb128;
3750 #else
3751 cs_format = DW_EH_PE_udata4;
3752 #endif
3753 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3754 eh_data_format_name (cs_format));
3756 #ifdef HAVE_AS_LEB128
3757 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3758 current_function_funcdef_no);
3759 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3760 current_function_funcdef_no);
3761 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3762 "Call-site table length");
3763 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3764 if (USING_SJLJ_EXCEPTIONS)
3765 sjlj_output_call_site_table ();
3766 else
3767 dw2_output_call_site_table ();
3768 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3769 #else
3770 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3771 if (USING_SJLJ_EXCEPTIONS)
3772 sjlj_output_call_site_table ();
3773 else
3774 dw2_output_call_site_table ();
3775 #endif
3777 /* ??? Decode and interpret the data for flag_debug_asm. */
3778 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3779 for (i = 0; i < n; ++i)
3780 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3781 (i ? NULL : "Action record table"));
3783 if (have_tt_data)
3784 assemble_align (tt_format_size * BITS_PER_UNIT);
3786 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3787 while (i-- > 0)
3789 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3790 rtx value;
3792 if (type == NULL_TREE)
3793 type = integer_zero_node;
3794 else
3795 type = lookup_type_for_runtime (type);
3797 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3798 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3799 assemble_integer (value, tt_format_size,
3800 tt_format_size * BITS_PER_UNIT, 1);
3801 else
3802 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
3805 #ifdef HAVE_AS_LEB128
3806 if (have_tt_data)
3807 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3808 #endif
3810 /* ??? Decode and interpret the data for flag_debug_asm. */
3811 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3812 for (i = 0; i < n; ++i)
3813 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3814 (i ? NULL : "Exception specification table"));
3816 function_section (current_function_decl);
3819 #include "gt-except.h"