* c-semantics.c (genrtl_while_stmt, genrtl_do_stmt_1)
[official-gcc.git] / gcc / except.c
blobb735cc1aef3b17bb6e47d5dfb96df24f41fbd1c1
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
78 /* Provide defaults for stuff that may not be defined when using
79 sjlj exceptions. */
80 #ifndef EH_RETURN_DATA_REGNO
81 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
82 #endif
85 /* Nonzero means enable synchronous exceptions for non-call instructions. */
86 int flag_non_call_exceptions;
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 tree (*lang_protect_cleanup_actions) (void);
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers) (tree a, tree b);
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type) (tree);
98 /* A hash table of label to region number. */
100 struct ehl_map_entry GTY(())
102 rtx label;
103 struct eh_region *region;
106 static GTY(()) int call_site_base;
107 static GTY ((param_is (union tree_node)))
108 htab_t type_to_runtime_map;
110 /* Describe the SjLj_Function_Context structure. */
111 static GTY(()) tree sjlj_fc_type_node;
112 static int sjlj_fc_call_site_ofs;
113 static int sjlj_fc_data_ofs;
114 static int sjlj_fc_personality_ofs;
115 static int sjlj_fc_lsda_ofs;
116 static int sjlj_fc_jbuf_ofs;
118 /* Describes one exception region. */
119 struct eh_region GTY(())
121 /* The immediately surrounding region. */
122 struct eh_region *outer;
124 /* The list of immediately contained regions. */
125 struct eh_region *inner;
126 struct eh_region *next_peer;
128 /* An identifier for this region. */
129 int region_number;
131 /* When a region is deleted, its parents inherit the REG_EH_REGION
132 numbers already assigned. */
133 bitmap aka;
135 /* Each region does exactly one thing. */
136 enum eh_region_type
138 ERT_UNKNOWN = 0,
139 ERT_CLEANUP,
140 ERT_TRY,
141 ERT_CATCH,
142 ERT_ALLOWED_EXCEPTIONS,
143 ERT_MUST_NOT_THROW,
144 ERT_THROW,
145 ERT_FIXUP
146 } type;
148 /* Holds the action to perform based on the preceding type. */
149 union eh_region_u {
150 /* A list of catch blocks, a surrounding try block,
151 and the label for continuing after a catch. */
152 struct eh_region_u_try {
153 struct eh_region *catch;
154 struct eh_region *last_catch;
155 struct eh_region *prev_try;
156 rtx continue_label;
157 } GTY ((tag ("ERT_TRY"))) try;
159 /* The list through the catch handlers, the list of type objects
160 matched, and the list of associated filters. */
161 struct eh_region_u_catch {
162 struct eh_region *next_catch;
163 struct eh_region *prev_catch;
164 tree type_list;
165 tree filter_list;
166 } GTY ((tag ("ERT_CATCH"))) catch;
168 /* A tree_list of allowed types. */
169 struct eh_region_u_allowed {
170 tree type_list;
171 int filter;
172 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
174 /* The type given by a call to "throw foo();", or discovered
175 for a throw. */
176 struct eh_region_u_throw {
177 tree type;
178 } GTY ((tag ("ERT_THROW"))) throw;
180 /* Retain the cleanup expression even after expansion so that
181 we can match up fixup regions. */
182 struct eh_region_u_cleanup {
183 tree exp;
184 struct eh_region *prev_try;
185 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
187 /* The real region (by expression and by pointer) that fixup code
188 should live in. */
189 struct eh_region_u_fixup {
190 tree cleanup_exp;
191 struct eh_region *real_region;
192 bool resolved;
193 } GTY ((tag ("ERT_FIXUP"))) fixup;
194 } GTY ((desc ("%0.type"))) u;
196 /* Entry point for this region's handler before landing pads are built. */
197 rtx label;
199 /* Entry point for this region's handler from the runtime eh library. */
200 rtx landing_pad;
202 /* Entry point for this region's handler from an inner region. */
203 rtx post_landing_pad;
205 /* The RESX insn for handing off control to the next outermost handler,
206 if appropriate. */
207 rtx resume;
209 /* True if something in this region may throw. */
210 unsigned may_contain_throw : 1;
213 struct call_site_record GTY(())
215 rtx landing_pad;
216 int action;
219 /* Used to save exception status for each function. */
220 struct eh_status GTY(())
222 /* The tree of all regions for this function. */
223 struct eh_region *region_tree;
225 /* The same information as an indexable array. */
226 struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
228 /* The most recently open region. */
229 struct eh_region *cur_region;
231 /* This is the region for which we are processing catch blocks. */
232 struct eh_region *try_region;
234 rtx filter;
235 rtx exc_ptr;
237 int built_landing_pads;
238 int last_region_number;
240 varray_type ttype_data;
241 varray_type ehspec_data;
242 varray_type action_record_data;
244 htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
246 struct call_site_record * GTY ((length ("%h.call_site_data_used")))
247 call_site_data;
248 int call_site_data_used;
249 int call_site_data_size;
251 rtx ehr_stackadj;
252 rtx ehr_handler;
253 rtx ehr_label;
255 rtx sjlj_fc;
256 rtx sjlj_exit_after;
260 static int t2r_eq (const void *, const void *);
261 static hashval_t t2r_hash (const void *);
262 static void add_type_for_runtime (tree);
263 static tree lookup_type_for_runtime (tree);
265 static struct eh_region *expand_eh_region_end (void);
267 static rtx get_exception_filter (struct function *);
269 static void collect_eh_region_array (void);
270 static void resolve_fixup_regions (void);
271 static void remove_fixup_regions (void);
272 static void remove_unreachable_regions (rtx);
273 static void convert_from_eh_region_ranges_1 (rtx *, int *, int);
275 static struct eh_region *duplicate_eh_region_1 (struct eh_region *,
276 struct inline_remap *);
277 static void duplicate_eh_region_2 (struct eh_region *, struct eh_region **);
278 static int ttypes_filter_eq (const void *, const void *);
279 static hashval_t ttypes_filter_hash (const void *);
280 static int ehspec_filter_eq (const void *, const void *);
281 static hashval_t ehspec_filter_hash (const void *);
282 static int add_ttypes_entry (htab_t, tree);
283 static int add_ehspec_entry (htab_t, htab_t, tree);
284 static void assign_filter_values (void);
285 static void build_post_landing_pads (void);
286 static void connect_post_landing_pads (void);
287 static void dw2_build_landing_pads (void);
289 struct sjlj_lp_info;
290 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
291 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
292 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
293 static void sjlj_emit_function_enter (rtx);
294 static void sjlj_emit_function_exit (void);
295 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
296 static void sjlj_build_landing_pads (void);
298 static hashval_t ehl_hash (const void *);
299 static int ehl_eq (const void *, const void *);
300 static void add_ehl_entry (rtx, struct eh_region *);
301 static void remove_exception_handler_label (rtx);
302 static void remove_eh_handler (struct eh_region *);
303 static int for_each_eh_label_1 (void **, void *);
305 struct reachable_info;
307 /* The return value of reachable_next_level. */
308 enum reachable_code
310 /* The given exception is not processed by the given region. */
311 RNL_NOT_CAUGHT,
312 /* The given exception may need processing by the given region. */
313 RNL_MAYBE_CAUGHT,
314 /* The given exception is completely processed by the given region. */
315 RNL_CAUGHT,
316 /* The given exception is completely processed by the runtime. */
317 RNL_BLOCKED
320 static int check_handled (tree, tree);
321 static void add_reachable_handler (struct reachable_info *,
322 struct eh_region *, struct eh_region *);
323 static enum reachable_code reachable_next_level (struct eh_region *, tree,
324 struct reachable_info *);
326 static int action_record_eq (const void *, const void *);
327 static hashval_t action_record_hash (const void *);
328 static int add_action_record (htab_t, int, int);
329 static int collect_one_action_chain (htab_t, struct eh_region *);
330 static int add_call_site (rtx, int);
332 static void push_uleb128 (varray_type *, unsigned int);
333 static void push_sleb128 (varray_type *, int);
334 #ifndef HAVE_AS_LEB128
335 static int dw2_size_of_call_site_table (void);
336 static int sjlj_size_of_call_site_table (void);
337 #endif
338 static void dw2_output_call_site_table (void);
339 static void sjlj_output_call_site_table (void);
342 /* Routine to see if exception handling is turned on.
343 DO_WARN is nonzero if we want to inform the user that exception
344 handling is turned off.
346 This is used to ensure that -fexceptions has been specified if the
347 compiler tries to use any exception-specific functions. */
350 doing_eh (int do_warn)
352 if (! flag_exceptions)
354 static int warned = 0;
355 if (! warned && do_warn)
357 error ("exception handling disabled, use -fexceptions to enable");
358 warned = 1;
360 return 0;
362 return 1;
366 void
367 init_eh (void)
369 if (! flag_exceptions)
370 return;
372 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
374 /* Create the SjLj_Function_Context structure. This should match
375 the definition in unwind-sjlj.c. */
376 if (USING_SJLJ_EXCEPTIONS)
378 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
380 sjlj_fc_type_node = (*lang_hooks.types.make_type) (RECORD_TYPE);
382 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
383 build_pointer_type (sjlj_fc_type_node));
384 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
386 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
387 integer_type_node);
388 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
390 tmp = build_index_type (build_int_2 (4 - 1, 0));
391 tmp = build_array_type ((*lang_hooks.types.type_for_mode) (word_mode, 1),
392 tmp);
393 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
394 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
396 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
397 ptr_type_node);
398 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
400 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
401 ptr_type_node);
402 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
404 #ifdef DONT_USE_BUILTIN_SETJMP
405 #ifdef JMP_BUF_SIZE
406 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
407 #else
408 /* Should be large enough for most systems, if it is not,
409 JMP_BUF_SIZE should be defined with the proper value. It will
410 also tend to be larger than necessary for most systems, a more
411 optimal port will define JMP_BUF_SIZE. */
412 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
413 #endif
414 #else
415 /* This is 2 for builtin_setjmp, plus whatever the target requires
416 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
417 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
418 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
419 #endif
420 tmp = build_index_type (tmp);
421 tmp = build_array_type (ptr_type_node, tmp);
422 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
423 #ifdef DONT_USE_BUILTIN_SETJMP
424 /* We don't know what the alignment requirements of the
425 runtime's jmp_buf has. Overestimate. */
426 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
427 DECL_USER_ALIGN (f_jbuf) = 1;
428 #endif
429 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
431 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
432 TREE_CHAIN (f_prev) = f_cs;
433 TREE_CHAIN (f_cs) = f_data;
434 TREE_CHAIN (f_data) = f_per;
435 TREE_CHAIN (f_per) = f_lsda;
436 TREE_CHAIN (f_lsda) = f_jbuf;
438 layout_type (sjlj_fc_type_node);
440 /* Cache the interesting field offsets so that we have
441 easy access from rtl. */
442 sjlj_fc_call_site_ofs
443 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
444 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
445 sjlj_fc_data_ofs
446 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
447 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
448 sjlj_fc_personality_ofs
449 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
450 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
451 sjlj_fc_lsda_ofs
452 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
453 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
454 sjlj_fc_jbuf_ofs
455 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
456 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
460 void
461 init_eh_for_function (void)
463 cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
466 /* Start an exception handling region. All instructions emitted
467 after this point are considered to be part of the region until
468 expand_eh_region_end is invoked. */
470 void
471 expand_eh_region_start (void)
473 struct eh_region *new_region;
474 struct eh_region *cur_region;
475 rtx note;
477 if (! doing_eh (0))
478 return;
480 /* Insert a new blank region as a leaf in the tree. */
481 new_region = ggc_alloc_cleared (sizeof (*new_region));
482 cur_region = cfun->eh->cur_region;
483 new_region->outer = cur_region;
484 if (cur_region)
486 new_region->next_peer = cur_region->inner;
487 cur_region->inner = new_region;
489 else
491 new_region->next_peer = cfun->eh->region_tree;
492 cfun->eh->region_tree = new_region;
494 cfun->eh->cur_region = new_region;
496 /* Create a note marking the start of this region. */
497 new_region->region_number = ++cfun->eh->last_region_number;
498 note = emit_note (NOTE_INSN_EH_REGION_BEG);
499 NOTE_EH_HANDLER (note) = new_region->region_number;
502 /* Common code to end a region. Returns the region just ended. */
504 static struct eh_region *
505 expand_eh_region_end (void)
507 struct eh_region *cur_region = cfun->eh->cur_region;
508 rtx note;
510 /* Create a note marking the end of this region. */
511 note = emit_note (NOTE_INSN_EH_REGION_END);
512 NOTE_EH_HANDLER (note) = cur_region->region_number;
514 /* Pop. */
515 cfun->eh->cur_region = cur_region->outer;
517 return cur_region;
520 /* End an exception handling region for a cleanup. HANDLER is an
521 expression to expand for the cleanup. */
523 void
524 expand_eh_region_end_cleanup (tree handler)
526 struct eh_region *region;
527 tree protect_cleanup_actions;
528 rtx around_label;
529 rtx data_save[2];
531 if (! doing_eh (0))
532 return;
534 region = expand_eh_region_end ();
535 region->type = ERT_CLEANUP;
536 region->label = gen_label_rtx ();
537 region->u.cleanup.exp = handler;
538 region->u.cleanup.prev_try = cfun->eh->try_region;
540 around_label = gen_label_rtx ();
541 emit_jump (around_label);
543 emit_label (region->label);
545 if (flag_non_call_exceptions || region->may_contain_throw)
547 /* Give the language a chance to specify an action to be taken if an
548 exception is thrown that would propagate out of the HANDLER. */
549 protect_cleanup_actions
550 = (lang_protect_cleanup_actions
551 ? (*lang_protect_cleanup_actions) ()
552 : NULL_TREE);
554 if (protect_cleanup_actions)
555 expand_eh_region_start ();
557 /* In case this cleanup involves an inline destructor with a try block in
558 it, we need to save the EH return data registers around it. */
559 data_save[0] = gen_reg_rtx (ptr_mode);
560 emit_move_insn (data_save[0], get_exception_pointer (cfun));
561 data_save[1] = gen_reg_rtx (word_mode);
562 emit_move_insn (data_save[1], get_exception_filter (cfun));
564 expand_expr (handler, const0_rtx, VOIDmode, 0);
566 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
567 emit_move_insn (cfun->eh->filter, data_save[1]);
569 if (protect_cleanup_actions)
570 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
572 /* We need any stack adjustment complete before the around_label. */
573 do_pending_stack_adjust ();
576 /* We delay the generation of the _Unwind_Resume until we generate
577 landing pads. We emit a marker here so as to get good control
578 flow data in the meantime. */
579 region->resume
580 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
581 emit_barrier ();
583 emit_label (around_label);
586 /* End an exception handling region for a try block, and prepares
587 for subsequent calls to expand_start_catch. */
589 void
590 expand_start_all_catch (void)
592 struct eh_region *region;
594 if (! doing_eh (1))
595 return;
597 region = expand_eh_region_end ();
598 region->type = ERT_TRY;
599 region->u.try.prev_try = cfun->eh->try_region;
600 region->u.try.continue_label = gen_label_rtx ();
602 cfun->eh->try_region = region;
604 emit_jump (region->u.try.continue_label);
607 /* Begin a catch clause. TYPE is the type caught, a list of such types, or
608 null if this is a catch-all clause. Providing a type list enables to
609 associate the catch region with potentially several exception types, which
610 is useful e.g. for Ada. */
612 void
613 expand_start_catch (tree type_or_list)
615 struct eh_region *t, *c, *l;
616 tree type_list;
618 if (! doing_eh (0))
619 return;
621 type_list = type_or_list;
623 if (type_or_list)
625 /* Ensure to always end up with a type list to normalize further
626 processing, then register each type against the runtime types
627 map. */
628 tree type_node;
630 if (TREE_CODE (type_or_list) != TREE_LIST)
631 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
633 type_node = type_list;
634 for (; type_node; type_node = TREE_CHAIN (type_node))
635 add_type_for_runtime (TREE_VALUE (type_node));
638 expand_eh_region_start ();
640 t = cfun->eh->try_region;
641 c = cfun->eh->cur_region;
642 c->type = ERT_CATCH;
643 c->u.catch.type_list = type_list;
644 c->label = gen_label_rtx ();
646 l = t->u.try.last_catch;
647 c->u.catch.prev_catch = l;
648 if (l)
649 l->u.catch.next_catch = c;
650 else
651 t->u.try.catch = c;
652 t->u.try.last_catch = c;
654 emit_label (c->label);
657 /* End a catch clause. Control will resume after the try/catch block. */
659 void
660 expand_end_catch (void)
662 struct eh_region *try_region;
664 if (! doing_eh (0))
665 return;
667 expand_eh_region_end ();
668 try_region = cfun->eh->try_region;
670 emit_jump (try_region->u.try.continue_label);
673 /* End a sequence of catch handlers for a try block. */
675 void
676 expand_end_all_catch (void)
678 struct eh_region *try_region;
680 if (! doing_eh (0))
681 return;
683 try_region = cfun->eh->try_region;
684 cfun->eh->try_region = try_region->u.try.prev_try;
686 emit_label (try_region->u.try.continue_label);
689 /* End an exception region for an exception type filter. ALLOWED is a
690 TREE_LIST of types to be matched by the runtime. FAILURE is an
691 expression to invoke if a mismatch occurs.
693 ??? We could use these semantics for calls to rethrow, too; if we can
694 see the surrounding catch clause, we know that the exception we're
695 rethrowing satisfies the "filter" of the catch type. */
697 void
698 expand_eh_region_end_allowed (tree allowed, tree failure)
700 struct eh_region *region;
701 rtx around_label;
703 if (! doing_eh (0))
704 return;
706 region = expand_eh_region_end ();
707 region->type = ERT_ALLOWED_EXCEPTIONS;
708 region->u.allowed.type_list = allowed;
709 region->label = gen_label_rtx ();
711 for (; allowed ; allowed = TREE_CHAIN (allowed))
712 add_type_for_runtime (TREE_VALUE (allowed));
714 /* We must emit the call to FAILURE here, so that if this function
715 throws a different exception, that it will be processed by the
716 correct region. */
718 around_label = gen_label_rtx ();
719 emit_jump (around_label);
721 emit_label (region->label);
722 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
723 /* We must adjust the stack before we reach the AROUND_LABEL because
724 the call to FAILURE does not occur on all paths to the
725 AROUND_LABEL. */
726 do_pending_stack_adjust ();
728 emit_label (around_label);
731 /* End an exception region for a must-not-throw filter. FAILURE is an
732 expression invoke if an uncaught exception propagates this far.
734 This is conceptually identical to expand_eh_region_end_allowed with
735 an empty allowed list (if you passed "std::terminate" instead of
736 "__cxa_call_unexpected"), but they are represented differently in
737 the C++ LSDA. */
739 void
740 expand_eh_region_end_must_not_throw (tree failure)
742 struct eh_region *region;
743 rtx around_label;
745 if (! doing_eh (0))
746 return;
748 region = expand_eh_region_end ();
749 region->type = ERT_MUST_NOT_THROW;
750 region->label = gen_label_rtx ();
752 /* We must emit the call to FAILURE here, so that if this function
753 throws a different exception, that it will be processed by the
754 correct region. */
756 around_label = gen_label_rtx ();
757 emit_jump (around_label);
759 emit_label (region->label);
760 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
762 emit_label (around_label);
765 /* End an exception region for a throw. No handling goes on here,
766 but it's the easiest way for the front-end to indicate what type
767 is being thrown. */
769 void
770 expand_eh_region_end_throw (tree type)
772 struct eh_region *region;
774 if (! doing_eh (0))
775 return;
777 region = expand_eh_region_end ();
778 region->type = ERT_THROW;
779 region->u.throw.type = type;
782 /* End a fixup region. Within this region the cleanups for the immediately
783 enclosing region are _not_ run. This is used for goto cleanup to avoid
784 destroying an object twice.
786 This would be an extraordinarily simple prospect, were it not for the
787 fact that we don't actually know what the immediately enclosing region
788 is. This surprising fact is because expand_cleanups is currently
789 generating a sequence that it will insert somewhere else. We collect
790 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
792 void
793 expand_eh_region_end_fixup (tree handler)
795 struct eh_region *fixup;
797 if (! doing_eh (0))
798 return;
800 fixup = expand_eh_region_end ();
801 fixup->type = ERT_FIXUP;
802 fixup->u.fixup.cleanup_exp = handler;
805 /* Note that the current EH region (if any) may contain a throw, or a
806 call to a function which itself may contain a throw. */
808 void
809 note_eh_region_may_contain_throw (void)
811 struct eh_region *region;
813 region = cfun->eh->cur_region;
814 while (region && !region->may_contain_throw)
816 region->may_contain_throw = 1;
817 region = region->outer;
821 /* Return an rtl expression for a pointer to the exception object
822 within a handler. */
825 get_exception_pointer (struct function *fun)
827 rtx exc_ptr = fun->eh->exc_ptr;
828 if (fun == cfun && ! exc_ptr)
830 exc_ptr = gen_reg_rtx (ptr_mode);
831 fun->eh->exc_ptr = exc_ptr;
833 return exc_ptr;
836 /* Return an rtl expression for the exception dispatch filter
837 within a handler. */
839 static rtx
840 get_exception_filter (struct function *fun)
842 rtx filter = fun->eh->filter;
843 if (fun == cfun && ! filter)
845 filter = gen_reg_rtx (word_mode);
846 fun->eh->filter = filter;
848 return filter;
851 /* This section is for the exception handling specific optimization pass. */
853 /* Random access the exception region tree. It's just as simple to
854 collect the regions this way as in expand_eh_region_start, but
855 without having to realloc memory. */
857 static void
858 collect_eh_region_array (void)
860 struct eh_region **array, *i;
862 i = cfun->eh->region_tree;
863 if (! i)
864 return;
866 array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
867 * sizeof (*array));
868 cfun->eh->region_array = array;
870 while (1)
872 array[i->region_number] = i;
874 /* If there are sub-regions, process them. */
875 if (i->inner)
876 i = i->inner;
877 /* If there are peers, process them. */
878 else if (i->next_peer)
879 i = i->next_peer;
880 /* Otherwise, step back up the tree to the next peer. */
881 else
883 do {
884 i = i->outer;
885 if (i == NULL)
886 return;
887 } while (i->next_peer == NULL);
888 i = i->next_peer;
893 static void
894 resolve_one_fixup_region (struct eh_region *fixup)
896 struct eh_region *cleanup, *real;
897 int j, n;
899 n = cfun->eh->last_region_number;
900 cleanup = 0;
902 for (j = 1; j <= n; ++j)
904 cleanup = cfun->eh->region_array[j];
905 if (cleanup && cleanup->type == ERT_CLEANUP
906 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
907 break;
909 if (j > n)
910 abort ();
912 real = cleanup->outer;
913 if (real && real->type == ERT_FIXUP)
915 if (!real->u.fixup.resolved)
916 resolve_one_fixup_region (real);
917 real = real->u.fixup.real_region;
920 fixup->u.fixup.real_region = real;
921 fixup->u.fixup.resolved = true;
924 static void
925 resolve_fixup_regions (void)
927 int i, n = cfun->eh->last_region_number;
929 for (i = 1; i <= n; ++i)
931 struct eh_region *fixup = cfun->eh->region_array[i];
933 if (!fixup || fixup->type != ERT_FIXUP || fixup->u.fixup.resolved)
934 continue;
936 resolve_one_fixup_region (fixup);
940 /* Now that we've discovered what region actually encloses a fixup,
941 we can shuffle pointers and remove them from the tree. */
943 static void
944 remove_fixup_regions (void)
946 int i;
947 rtx insn, note;
948 struct eh_region *fixup;
950 /* Walk the insn chain and adjust the REG_EH_REGION numbers
951 for instructions referencing fixup regions. This is only
952 strictly necessary for fixup regions with no parent, but
953 doesn't hurt to do it for all regions. */
954 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
955 if (INSN_P (insn)
956 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
957 && INTVAL (XEXP (note, 0)) > 0
958 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
959 && fixup->type == ERT_FIXUP)
961 if (fixup->u.fixup.real_region)
962 XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
963 else
964 remove_note (insn, note);
967 /* Remove the fixup regions from the tree. */
968 for (i = cfun->eh->last_region_number; i > 0; --i)
970 fixup = cfun->eh->region_array[i];
971 if (! fixup)
972 continue;
974 /* Allow GC to maybe free some memory. */
975 if (fixup->type == ERT_CLEANUP)
976 fixup->u.cleanup.exp = NULL_TREE;
978 if (fixup->type != ERT_FIXUP)
979 continue;
981 if (fixup->inner)
983 struct eh_region *parent, *p, **pp;
985 parent = fixup->u.fixup.real_region;
987 /* Fix up the children's parent pointers; find the end of
988 the list. */
989 for (p = fixup->inner; ; p = p->next_peer)
991 p->outer = parent;
992 if (! p->next_peer)
993 break;
996 /* In the tree of cleanups, only outer-inner ordering matters.
997 So link the children back in anywhere at the correct level. */
998 if (parent)
999 pp = &parent->inner;
1000 else
1001 pp = &cfun->eh->region_tree;
1002 p->next_peer = *pp;
1003 *pp = fixup->inner;
1004 fixup->inner = NULL;
1007 remove_eh_handler (fixup);
1011 /* Remove all regions whose labels are not reachable from insns. */
1013 static void
1014 remove_unreachable_regions (rtx insns)
1016 int i, *uid_region_num;
1017 bool *reachable;
1018 struct eh_region *r;
1019 rtx insn;
1021 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
1022 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
1024 for (i = cfun->eh->last_region_number; i > 0; --i)
1026 r = cfun->eh->region_array[i];
1027 if (!r || r->region_number != i)
1028 continue;
1030 if (r->resume)
1032 if (uid_region_num[INSN_UID (r->resume)])
1033 abort ();
1034 uid_region_num[INSN_UID (r->resume)] = i;
1036 if (r->label)
1038 if (uid_region_num[INSN_UID (r->label)])
1039 abort ();
1040 uid_region_num[INSN_UID (r->label)] = i;
1042 if (r->type == ERT_TRY && r->u.try.continue_label)
1044 if (uid_region_num[INSN_UID (r->u.try.continue_label)])
1045 abort ();
1046 uid_region_num[INSN_UID (r->u.try.continue_label)] = i;
1050 for (insn = insns; insn; insn = NEXT_INSN (insn))
1051 reachable[uid_region_num[INSN_UID (insn)]] = true;
1053 for (i = cfun->eh->last_region_number; i > 0; --i)
1055 r = cfun->eh->region_array[i];
1056 if (r && r->region_number == i && !reachable[i])
1058 /* Don't remove ERT_THROW regions if their outer region
1059 is reachable. */
1060 if (r->type == ERT_THROW
1061 && r->outer
1062 && reachable[r->outer->region_number])
1063 continue;
1065 remove_eh_handler (r);
1069 free (reachable);
1070 free (uid_region_num);
1073 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1074 can_throw instruction in the region. */
1076 static void
1077 convert_from_eh_region_ranges_1 (rtx *pinsns, int *orig_sp, int cur)
1079 int *sp = orig_sp;
1080 rtx insn, next;
1082 for (insn = *pinsns; insn ; insn = next)
1084 next = NEXT_INSN (insn);
1085 if (GET_CODE (insn) == NOTE)
1087 int kind = NOTE_LINE_NUMBER (insn);
1088 if (kind == NOTE_INSN_EH_REGION_BEG
1089 || kind == NOTE_INSN_EH_REGION_END)
1091 if (kind == NOTE_INSN_EH_REGION_BEG)
1093 struct eh_region *r;
1095 *sp++ = cur;
1096 cur = NOTE_EH_HANDLER (insn);
1098 r = cfun->eh->region_array[cur];
1099 if (r->type == ERT_FIXUP)
1101 r = r->u.fixup.real_region;
1102 cur = r ? r->region_number : 0;
1104 else if (r->type == ERT_CATCH)
1106 r = r->outer;
1107 cur = r ? r->region_number : 0;
1110 else
1111 cur = *--sp;
1113 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1114 requires extra care to adjust sequence start. */
1115 if (insn == *pinsns)
1116 *pinsns = next;
1117 remove_insn (insn);
1118 continue;
1121 else if (INSN_P (insn))
1123 if (cur > 0
1124 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1125 /* Calls can always potentially throw exceptions, unless
1126 they have a REG_EH_REGION note with a value of 0 or less.
1127 Which should be the only possible kind so far. */
1128 && (GET_CODE (insn) == CALL_INSN
1129 /* If we wanted exceptions for non-call insns, then
1130 any may_trap_p instruction could throw. */
1131 || (flag_non_call_exceptions
1132 && GET_CODE (PATTERN (insn)) != CLOBBER
1133 && GET_CODE (PATTERN (insn)) != USE
1134 && may_trap_p (PATTERN (insn)))))
1136 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1137 REG_NOTES (insn));
1140 if (GET_CODE (insn) == CALL_INSN
1141 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1143 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1144 sp, cur);
1145 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1146 sp, cur);
1147 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1148 sp, cur);
1153 if (sp != orig_sp)
1154 abort ();
1157 void
1158 convert_from_eh_region_ranges (void)
1160 int *stack;
1161 rtx insns;
1163 collect_eh_region_array ();
1164 resolve_fixup_regions ();
1166 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1167 insns = get_insns ();
1168 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1169 free (stack);
1171 remove_fixup_regions ();
1172 remove_unreachable_regions (insns);
1175 static void
1176 add_ehl_entry (rtx label, struct eh_region *region)
1178 struct ehl_map_entry **slot, *entry;
1180 LABEL_PRESERVE_P (label) = 1;
1182 entry = ggc_alloc (sizeof (*entry));
1183 entry->label = label;
1184 entry->region = region;
1186 slot = (struct ehl_map_entry **)
1187 htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
1189 /* Before landing pad creation, each exception handler has its own
1190 label. After landing pad creation, the exception handlers may
1191 share landing pads. This is ok, since maybe_remove_eh_handler
1192 only requires the 1-1 mapping before landing pad creation. */
1193 if (*slot && !cfun->eh->built_landing_pads)
1194 abort ();
1196 *slot = entry;
1199 void
1200 find_exception_handler_labels (void)
1202 int i;
1204 if (cfun->eh->exception_handler_label_map)
1205 htab_empty (cfun->eh->exception_handler_label_map);
1206 else
1208 /* ??? The expansion factor here (3/2) must be greater than the htab
1209 occupancy factor (4/3) to avoid unnecessary resizing. */
1210 cfun->eh->exception_handler_label_map
1211 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
1212 ehl_hash, ehl_eq, NULL);
1215 if (cfun->eh->region_tree == NULL)
1216 return;
1218 for (i = cfun->eh->last_region_number; i > 0; --i)
1220 struct eh_region *region = cfun->eh->region_array[i];
1221 rtx lab;
1223 if (! region || region->region_number != i)
1224 continue;
1225 if (cfun->eh->built_landing_pads)
1226 lab = region->landing_pad;
1227 else
1228 lab = region->label;
1230 if (lab)
1231 add_ehl_entry (lab, region);
1234 /* For sjlj exceptions, need the return label to remain live until
1235 after landing pad generation. */
1236 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1237 add_ehl_entry (return_label, NULL);
1240 bool
1241 current_function_has_exception_handlers (void)
1243 int i;
1245 for (i = cfun->eh->last_region_number; i > 0; --i)
1247 struct eh_region *region = cfun->eh->region_array[i];
1249 if (! region || region->region_number != i)
1250 continue;
1251 if (region->type != ERT_THROW)
1252 return true;
1255 return false;
1258 static struct eh_region *
1259 duplicate_eh_region_1 (struct eh_region *o, struct inline_remap *map)
1261 struct eh_region *n = ggc_alloc_cleared (sizeof (struct eh_region));
1263 n->region_number = o->region_number + cfun->eh->last_region_number;
1264 n->type = o->type;
1266 switch (n->type)
1268 case ERT_CLEANUP:
1269 case ERT_MUST_NOT_THROW:
1270 break;
1272 case ERT_TRY:
1273 if (o->u.try.continue_label)
1274 n->u.try.continue_label
1275 = get_label_from_map (map,
1276 CODE_LABEL_NUMBER (o->u.try.continue_label));
1277 break;
1279 case ERT_CATCH:
1280 n->u.catch.type_list = o->u.catch.type_list;
1281 break;
1283 case ERT_ALLOWED_EXCEPTIONS:
1284 n->u.allowed.type_list = o->u.allowed.type_list;
1285 break;
1287 case ERT_THROW:
1288 n->u.throw.type = o->u.throw.type;
1290 default:
1291 abort ();
1294 if (o->label)
1295 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1296 if (o->resume)
1298 n->resume = map->insn_map[INSN_UID (o->resume)];
1299 if (n->resume == NULL)
1300 abort ();
1303 return n;
1306 static void
1307 duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array)
1309 struct eh_region *n = n_array[o->region_number];
1311 switch (n->type)
1313 case ERT_TRY:
1314 n->u.try.catch = n_array[o->u.try.catch->region_number];
1315 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1316 break;
1318 case ERT_CATCH:
1319 if (o->u.catch.next_catch)
1320 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1321 if (o->u.catch.prev_catch)
1322 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1323 break;
1325 default:
1326 break;
1329 if (o->outer)
1330 n->outer = n_array[o->outer->region_number];
1331 if (o->inner)
1332 n->inner = n_array[o->inner->region_number];
1333 if (o->next_peer)
1334 n->next_peer = n_array[o->next_peer->region_number];
1338 duplicate_eh_regions (struct function *ifun, struct inline_remap *map)
1340 int ifun_last_region_number = ifun->eh->last_region_number;
1341 struct eh_region **n_array, *root, *cur;
1342 int i;
1344 if (ifun_last_region_number == 0)
1345 return 0;
1347 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1349 for (i = 1; i <= ifun_last_region_number; ++i)
1351 cur = ifun->eh->region_array[i];
1352 if (!cur || cur->region_number != i)
1353 continue;
1354 n_array[i] = duplicate_eh_region_1 (cur, map);
1356 for (i = 1; i <= ifun_last_region_number; ++i)
1358 cur = ifun->eh->region_array[i];
1359 if (!cur || cur->region_number != i)
1360 continue;
1361 duplicate_eh_region_2 (cur, n_array);
1364 root = n_array[ifun->eh->region_tree->region_number];
1365 cur = cfun->eh->cur_region;
1366 if (cur)
1368 struct eh_region *p = cur->inner;
1369 if (p)
1371 while (p->next_peer)
1372 p = p->next_peer;
1373 p->next_peer = root;
1375 else
1376 cur->inner = root;
1378 for (i = 1; i <= ifun_last_region_number; ++i)
1379 if (n_array[i] && n_array[i]->outer == NULL)
1380 n_array[i]->outer = cur;
1382 else
1384 struct eh_region *p = cfun->eh->region_tree;
1385 if (p)
1387 while (p->next_peer)
1388 p = p->next_peer;
1389 p->next_peer = root;
1391 else
1392 cfun->eh->region_tree = root;
1395 free (n_array);
1397 i = cfun->eh->last_region_number;
1398 cfun->eh->last_region_number = i + ifun_last_region_number;
1399 return i;
1403 static int
1404 t2r_eq (const void *pentry, const void *pdata)
1406 tree entry = (tree) pentry;
1407 tree data = (tree) pdata;
1409 return TREE_PURPOSE (entry) == data;
1412 static hashval_t
1413 t2r_hash (const void *pentry)
1415 tree entry = (tree) pentry;
1416 return TYPE_HASH (TREE_PURPOSE (entry));
1419 static void
1420 add_type_for_runtime (tree type)
1422 tree *slot;
1424 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1425 TYPE_HASH (type), INSERT);
1426 if (*slot == NULL)
1428 tree runtime = (*lang_eh_runtime_type) (type);
1429 *slot = tree_cons (type, runtime, NULL_TREE);
1433 static tree
1434 lookup_type_for_runtime (tree type)
1436 tree *slot;
1438 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1439 TYPE_HASH (type), NO_INSERT);
1441 /* We should have always inserted the data earlier. */
1442 return TREE_VALUE (*slot);
1446 /* Represent an entry in @TTypes for either catch actions
1447 or exception filter actions. */
1448 struct ttypes_filter GTY(())
1450 tree t;
1451 int filter;
1454 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1455 (a tree) for a @TTypes type node we are thinking about adding. */
1457 static int
1458 ttypes_filter_eq (const void *pentry, const void *pdata)
1460 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1461 tree data = (tree) pdata;
1463 return entry->t == data;
1466 static hashval_t
1467 ttypes_filter_hash (const void *pentry)
1469 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1470 return TYPE_HASH (entry->t);
1473 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1474 exception specification list we are thinking about adding. */
1475 /* ??? Currently we use the type lists in the order given. Someone
1476 should put these in some canonical order. */
1478 static int
1479 ehspec_filter_eq (const void *pentry, const void *pdata)
1481 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1482 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1484 return type_list_equal (entry->t, data->t);
1487 /* Hash function for exception specification lists. */
1489 static hashval_t
1490 ehspec_filter_hash (const void *pentry)
1492 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1493 hashval_t h = 0;
1494 tree list;
1496 for (list = entry->t; list ; list = TREE_CHAIN (list))
1497 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1498 return h;
1501 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1502 up the search. Return the filter value to be used. */
1504 static int
1505 add_ttypes_entry (htab_t ttypes_hash, tree type)
1507 struct ttypes_filter **slot, *n;
1509 slot = (struct ttypes_filter **)
1510 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1512 if ((n = *slot) == NULL)
1514 /* Filter value is a 1 based table index. */
1516 n = xmalloc (sizeof (*n));
1517 n->t = type;
1518 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1519 *slot = n;
1521 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1524 return n->filter;
1527 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1528 to speed up the search. Return the filter value to be used. */
1530 static int
1531 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1533 struct ttypes_filter **slot, *n;
1534 struct ttypes_filter dummy;
1536 dummy.t = list;
1537 slot = (struct ttypes_filter **)
1538 htab_find_slot (ehspec_hash, &dummy, INSERT);
1540 if ((n = *slot) == NULL)
1542 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1544 n = xmalloc (sizeof (*n));
1545 n->t = list;
1546 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1547 *slot = n;
1549 /* Look up each type in the list and encode its filter
1550 value as a uleb128. Terminate the list with 0. */
1551 for (; list ; list = TREE_CHAIN (list))
1552 push_uleb128 (&cfun->eh->ehspec_data,
1553 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1554 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1557 return n->filter;
1560 /* Generate the action filter values to be used for CATCH and
1561 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1562 we use lots of landing pads, and so every type or list can share
1563 the same filter value, which saves table space. */
1565 static void
1566 assign_filter_values (void)
1568 int i;
1569 htab_t ttypes, ehspec;
1571 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1572 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1574 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1575 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1577 for (i = cfun->eh->last_region_number; i > 0; --i)
1579 struct eh_region *r = cfun->eh->region_array[i];
1581 /* Mind we don't process a region more than once. */
1582 if (!r || r->region_number != i)
1583 continue;
1585 switch (r->type)
1587 case ERT_CATCH:
1588 /* Whatever type_list is (NULL or true list), we build a list
1589 of filters for the region. */
1590 r->u.catch.filter_list = NULL_TREE;
1592 if (r->u.catch.type_list != NULL)
1594 /* Get a filter value for each of the types caught and store
1595 them in the region's dedicated list. */
1596 tree tp_node = r->u.catch.type_list;
1598 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1600 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1601 tree flt_node = build_int_2 (flt, 0);
1603 r->u.catch.filter_list
1604 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1607 else
1609 /* Get a filter value for the NULL list also since it will need
1610 an action record anyway. */
1611 int flt = add_ttypes_entry (ttypes, NULL);
1612 tree flt_node = build_int_2 (flt, 0);
1614 r->u.catch.filter_list
1615 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1618 break;
1620 case ERT_ALLOWED_EXCEPTIONS:
1621 r->u.allowed.filter
1622 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1623 break;
1625 default:
1626 break;
1630 htab_delete (ttypes);
1631 htab_delete (ehspec);
1634 /* Generate the code to actually handle exceptions, which will follow the
1635 landing pads. */
1637 static void
1638 build_post_landing_pads (void)
1640 int i;
1642 for (i = cfun->eh->last_region_number; i > 0; --i)
1644 struct eh_region *region = cfun->eh->region_array[i];
1645 rtx seq;
1647 /* Mind we don't process a region more than once. */
1648 if (!region || region->region_number != i)
1649 continue;
1651 switch (region->type)
1653 case ERT_TRY:
1654 /* ??? Collect the set of all non-overlapping catch handlers
1655 all the way up the chain until blocked by a cleanup. */
1656 /* ??? Outer try regions can share landing pads with inner
1657 try regions if the types are completely non-overlapping,
1658 and there are no intervening cleanups. */
1660 region->post_landing_pad = gen_label_rtx ();
1662 start_sequence ();
1664 emit_label (region->post_landing_pad);
1666 /* ??? It is mighty inconvenient to call back into the
1667 switch statement generation code in expand_end_case.
1668 Rapid prototyping sez a sequence of ifs. */
1670 struct eh_region *c;
1671 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1673 if (c->u.catch.type_list == NULL)
1674 emit_jump (c->label);
1675 else
1677 /* Need for one cmp/jump per type caught. Each type
1678 list entry has a matching entry in the filter list
1679 (see assign_filter_values). */
1680 tree tp_node = c->u.catch.type_list;
1681 tree flt_node = c->u.catch.filter_list;
1683 for (; tp_node; )
1685 emit_cmp_and_jump_insns
1686 (cfun->eh->filter,
1687 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1688 EQ, NULL_RTX, word_mode, 0, c->label);
1690 tp_node = TREE_CHAIN (tp_node);
1691 flt_node = TREE_CHAIN (flt_node);
1697 /* We delay the generation of the _Unwind_Resume until we generate
1698 landing pads. We emit a marker here so as to get good control
1699 flow data in the meantime. */
1700 region->resume
1701 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1702 emit_barrier ();
1704 seq = get_insns ();
1705 end_sequence ();
1707 emit_insn_before (seq, region->u.try.catch->label);
1708 break;
1710 case ERT_ALLOWED_EXCEPTIONS:
1711 region->post_landing_pad = gen_label_rtx ();
1713 start_sequence ();
1715 emit_label (region->post_landing_pad);
1717 emit_cmp_and_jump_insns (cfun->eh->filter,
1718 GEN_INT (region->u.allowed.filter),
1719 EQ, NULL_RTX, word_mode, 0, region->label);
1721 /* We delay the generation of the _Unwind_Resume until we generate
1722 landing pads. We emit a marker here so as to get good control
1723 flow data in the meantime. */
1724 region->resume
1725 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1726 emit_barrier ();
1728 seq = get_insns ();
1729 end_sequence ();
1731 emit_insn_before (seq, region->label);
1732 break;
1734 case ERT_CLEANUP:
1735 case ERT_MUST_NOT_THROW:
1736 region->post_landing_pad = region->label;
1737 break;
1739 case ERT_CATCH:
1740 case ERT_THROW:
1741 /* Nothing to do. */
1742 break;
1744 default:
1745 abort ();
1750 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1751 _Unwind_Resume otherwise. */
1753 static void
1754 connect_post_landing_pads (void)
1756 int i;
1758 for (i = cfun->eh->last_region_number; i > 0; --i)
1760 struct eh_region *region = cfun->eh->region_array[i];
1761 struct eh_region *outer;
1762 rtx seq;
1764 /* Mind we don't process a region more than once. */
1765 if (!region || region->region_number != i)
1766 continue;
1768 /* If there is no RESX, or it has been deleted by flow, there's
1769 nothing to fix up. */
1770 if (! region->resume || INSN_DELETED_P (region->resume))
1771 continue;
1773 /* Search for another landing pad in this function. */
1774 for (outer = region->outer; outer ; outer = outer->outer)
1775 if (outer->post_landing_pad)
1776 break;
1778 start_sequence ();
1780 if (outer)
1781 emit_jump (outer->post_landing_pad);
1782 else
1783 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1784 VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1786 seq = get_insns ();
1787 end_sequence ();
1788 emit_insn_before (seq, region->resume);
1789 delete_insn (region->resume);
1794 static void
1795 dw2_build_landing_pads (void)
1797 int i;
1798 unsigned int j;
1800 for (i = cfun->eh->last_region_number; i > 0; --i)
1802 struct eh_region *region = cfun->eh->region_array[i];
1803 rtx seq;
1804 bool clobbers_hard_regs = false;
1806 /* Mind we don't process a region more than once. */
1807 if (!region || region->region_number != i)
1808 continue;
1810 if (region->type != ERT_CLEANUP
1811 && region->type != ERT_TRY
1812 && region->type != ERT_ALLOWED_EXCEPTIONS)
1813 continue;
1815 start_sequence ();
1817 region->landing_pad = gen_label_rtx ();
1818 emit_label (region->landing_pad);
1820 #ifdef HAVE_exception_receiver
1821 if (HAVE_exception_receiver)
1822 emit_insn (gen_exception_receiver ());
1823 else
1824 #endif
1825 #ifdef HAVE_nonlocal_goto_receiver
1826 if (HAVE_nonlocal_goto_receiver)
1827 emit_insn (gen_nonlocal_goto_receiver ());
1828 else
1829 #endif
1830 { /* Nothing */ }
1832 /* If the eh_return data registers are call-saved, then we
1833 won't have considered them clobbered from the call that
1834 threw. Kill them now. */
1835 for (j = 0; ; ++j)
1837 unsigned r = EH_RETURN_DATA_REGNO (j);
1838 if (r == INVALID_REGNUM)
1839 break;
1840 if (! call_used_regs[r])
1842 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1843 clobbers_hard_regs = true;
1847 if (clobbers_hard_regs)
1849 /* @@@ This is a kludge. Not all machine descriptions define a
1850 blockage insn, but we must not allow the code we just generated
1851 to be reordered by scheduling. So emit an ASM_INPUT to act as
1852 blockage insn. */
1853 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1856 emit_move_insn (cfun->eh->exc_ptr,
1857 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1858 emit_move_insn (cfun->eh->filter,
1859 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
1861 seq = get_insns ();
1862 end_sequence ();
1864 emit_insn_before (seq, region->post_landing_pad);
1869 struct sjlj_lp_info
1871 int directly_reachable;
1872 int action_index;
1873 int dispatch_index;
1874 int call_site_index;
1877 static bool
1878 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1880 rtx insn;
1881 bool found_one = false;
1883 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1885 struct eh_region *region;
1886 enum reachable_code rc;
1887 tree type_thrown;
1888 rtx note;
1890 if (! INSN_P (insn))
1891 continue;
1893 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1894 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1895 continue;
1897 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1899 type_thrown = NULL_TREE;
1900 if (region->type == ERT_THROW)
1902 type_thrown = region->u.throw.type;
1903 region = region->outer;
1906 /* Find the first containing region that might handle the exception.
1907 That's the landing pad to which we will transfer control. */
1908 rc = RNL_NOT_CAUGHT;
1909 for (; region; region = region->outer)
1911 rc = reachable_next_level (region, type_thrown, 0);
1912 if (rc != RNL_NOT_CAUGHT)
1913 break;
1915 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1917 lp_info[region->region_number].directly_reachable = 1;
1918 found_one = true;
1922 return found_one;
1925 static void
1926 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1928 htab_t ar_hash;
1929 int i, index;
1931 /* First task: build the action table. */
1933 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1934 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1936 for (i = cfun->eh->last_region_number; i > 0; --i)
1937 if (lp_info[i].directly_reachable)
1939 struct eh_region *r = cfun->eh->region_array[i];
1940 r->landing_pad = dispatch_label;
1941 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1942 if (lp_info[i].action_index != -1)
1943 cfun->uses_eh_lsda = 1;
1946 htab_delete (ar_hash);
1948 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1949 landing pad label for the region. For sjlj though, there is one
1950 common landing pad from which we dispatch to the post-landing pads.
1952 A region receives a dispatch index if it is directly reachable
1953 and requires in-function processing. Regions that share post-landing
1954 pads may share dispatch indices. */
1955 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1956 (see build_post_landing_pads) so we don't bother checking for it. */
1958 index = 0;
1959 for (i = cfun->eh->last_region_number; i > 0; --i)
1960 if (lp_info[i].directly_reachable)
1961 lp_info[i].dispatch_index = index++;
1963 /* Finally: assign call-site values. If dwarf2 terms, this would be
1964 the region number assigned by convert_to_eh_region_ranges, but
1965 handles no-action and must-not-throw differently. */
1967 call_site_base = 1;
1968 for (i = cfun->eh->last_region_number; i > 0; --i)
1969 if (lp_info[i].directly_reachable)
1971 int action = lp_info[i].action_index;
1973 /* Map must-not-throw to otherwise unused call-site index 0. */
1974 if (action == -2)
1975 index = 0;
1976 /* Map no-action to otherwise unused call-site index -1. */
1977 else if (action == -1)
1978 index = -1;
1979 /* Otherwise, look it up in the table. */
1980 else
1981 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1983 lp_info[i].call_site_index = index;
1987 static void
1988 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1990 int last_call_site = -2;
1991 rtx insn, mem;
1993 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1995 struct eh_region *region;
1996 int this_call_site;
1997 rtx note, before, p;
1999 /* Reset value tracking at extended basic block boundaries. */
2000 if (GET_CODE (insn) == CODE_LABEL)
2001 last_call_site = -2;
2003 if (! INSN_P (insn))
2004 continue;
2006 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2007 if (!note)
2009 /* Calls (and trapping insns) without notes are outside any
2010 exception handling region in this function. Mark them as
2011 no action. */
2012 if (GET_CODE (insn) == CALL_INSN
2013 || (flag_non_call_exceptions
2014 && may_trap_p (PATTERN (insn))))
2015 this_call_site = -1;
2016 else
2017 continue;
2019 else
2021 /* Calls that are known to not throw need not be marked. */
2022 if (INTVAL (XEXP (note, 0)) <= 0)
2023 continue;
2025 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2026 this_call_site = lp_info[region->region_number].call_site_index;
2029 if (this_call_site == last_call_site)
2030 continue;
2032 /* Don't separate a call from it's argument loads. */
2033 before = insn;
2034 if (GET_CODE (insn) == CALL_INSN)
2035 before = find_first_parameter_load (insn, NULL_RTX);
2037 start_sequence ();
2038 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2039 sjlj_fc_call_site_ofs);
2040 emit_move_insn (mem, GEN_INT (this_call_site));
2041 p = get_insns ();
2042 end_sequence ();
2044 emit_insn_before (p, before);
2045 last_call_site = this_call_site;
2049 /* Construct the SjLj_Function_Context. */
2051 static void
2052 sjlj_emit_function_enter (rtx dispatch_label)
2054 rtx fn_begin, fc, mem, seq;
2056 fc = cfun->eh->sjlj_fc;
2058 start_sequence ();
2060 /* We're storing this libcall's address into memory instead of
2061 calling it directly. Thus, we must call assemble_external_libcall
2062 here, as we can not depend on emit_library_call to do it for us. */
2063 assemble_external_libcall (eh_personality_libfunc);
2064 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2065 emit_move_insn (mem, eh_personality_libfunc);
2067 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2068 if (cfun->uses_eh_lsda)
2070 char buf[20];
2071 rtx sym;
2073 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2074 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2075 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
2076 emit_move_insn (mem, sym);
2078 else
2079 emit_move_insn (mem, const0_rtx);
2081 #ifdef DONT_USE_BUILTIN_SETJMP
2083 rtx x, note;
2084 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2085 TYPE_MODE (integer_type_node), 1,
2086 plus_constant (XEXP (fc, 0),
2087 sjlj_fc_jbuf_ofs), Pmode);
2089 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
2090 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2092 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2093 TYPE_MODE (integer_type_node), 0, dispatch_label);
2095 #else
2096 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2097 dispatch_label);
2098 #endif
2100 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2101 1, XEXP (fc, 0), Pmode);
2103 seq = get_insns ();
2104 end_sequence ();
2106 /* ??? Instead of doing this at the beginning of the function,
2107 do this in a block that is at loop level 0 and dominates all
2108 can_throw_internal instructions. */
2110 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2111 if (GET_CODE (fn_begin) == NOTE
2112 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2113 break;
2114 emit_insn_after (seq, fn_begin);
2117 /* Call back from expand_function_end to know where we should put
2118 the call to unwind_sjlj_unregister_libfunc if needed. */
2120 void
2121 sjlj_emit_function_exit_after (rtx after)
2123 cfun->eh->sjlj_exit_after = after;
2126 static void
2127 sjlj_emit_function_exit (void)
2129 rtx seq;
2131 start_sequence ();
2133 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2134 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2136 seq = get_insns ();
2137 end_sequence ();
2139 /* ??? Really this can be done in any block at loop level 0 that
2140 post-dominates all can_throw_internal instructions. This is
2141 the last possible moment. */
2143 emit_insn_after (seq, cfun->eh->sjlj_exit_after);
2146 static void
2147 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2149 int i, first_reachable;
2150 rtx mem, dispatch, seq, fc;
2152 fc = cfun->eh->sjlj_fc;
2154 start_sequence ();
2156 emit_label (dispatch_label);
2158 #ifndef DONT_USE_BUILTIN_SETJMP
2159 expand_builtin_setjmp_receiver (dispatch_label);
2160 #endif
2162 /* Load up dispatch index, exc_ptr and filter values from the
2163 function context. */
2164 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2165 sjlj_fc_call_site_ofs);
2166 dispatch = copy_to_reg (mem);
2168 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
2169 if (word_mode != ptr_mode)
2171 #ifdef POINTERS_EXTEND_UNSIGNED
2172 mem = convert_memory_address (ptr_mode, mem);
2173 #else
2174 mem = convert_to_mode (ptr_mode, mem, 0);
2175 #endif
2177 emit_move_insn (cfun->eh->exc_ptr, mem);
2179 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2180 emit_move_insn (cfun->eh->filter, mem);
2182 /* Jump to one of the directly reachable regions. */
2183 /* ??? This really ought to be using a switch statement. */
2185 first_reachable = 0;
2186 for (i = cfun->eh->last_region_number; i > 0; --i)
2188 if (! lp_info[i].directly_reachable)
2189 continue;
2191 if (! first_reachable)
2193 first_reachable = i;
2194 continue;
2197 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2198 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2199 cfun->eh->region_array[i]->post_landing_pad);
2202 seq = get_insns ();
2203 end_sequence ();
2205 emit_insn_before (seq, (cfun->eh->region_array[first_reachable]
2206 ->post_landing_pad));
2209 static void
2210 sjlj_build_landing_pads (void)
2212 struct sjlj_lp_info *lp_info;
2214 lp_info = xcalloc (cfun->eh->last_region_number + 1,
2215 sizeof (struct sjlj_lp_info));
2217 if (sjlj_find_directly_reachable_regions (lp_info))
2219 rtx dispatch_label = gen_label_rtx ();
2221 cfun->eh->sjlj_fc
2222 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2223 int_size_in_bytes (sjlj_fc_type_node),
2224 TYPE_ALIGN (sjlj_fc_type_node));
2226 sjlj_assign_call_site_values (dispatch_label, lp_info);
2227 sjlj_mark_call_sites (lp_info);
2229 sjlj_emit_function_enter (dispatch_label);
2230 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2231 sjlj_emit_function_exit ();
2234 free (lp_info);
2237 void
2238 finish_eh_generation (void)
2240 /* Nothing to do if no regions created. */
2241 if (cfun->eh->region_tree == NULL)
2242 return;
2244 /* The object here is to provide find_basic_blocks with detailed
2245 information (via reachable_handlers) on how exception control
2246 flows within the function. In this first pass, we can include
2247 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2248 regions, and hope that it will be useful in deleting unreachable
2249 handlers. Subsequently, we will generate landing pads which will
2250 connect many of the handlers, and then type information will not
2251 be effective. Still, this is a win over previous implementations. */
2253 cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
2255 /* These registers are used by the landing pads. Make sure they
2256 have been generated. */
2257 get_exception_pointer (cfun);
2258 get_exception_filter (cfun);
2260 /* Construct the landing pads. */
2262 assign_filter_values ();
2263 build_post_landing_pads ();
2264 connect_post_landing_pads ();
2265 if (USING_SJLJ_EXCEPTIONS)
2266 sjlj_build_landing_pads ();
2267 else
2268 dw2_build_landing_pads ();
2270 cfun->eh->built_landing_pads = 1;
2272 /* We've totally changed the CFG. Start over. */
2273 find_exception_handler_labels ();
2274 rebuild_jump_labels (get_insns ());
2275 find_basic_blocks (get_insns (), max_reg_num (), 0);
2276 cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
2279 static hashval_t
2280 ehl_hash (const void *pentry)
2282 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2284 /* 2^32 * ((sqrt(5) - 1) / 2) */
2285 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2286 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2289 static int
2290 ehl_eq (const void *pentry, const void *pdata)
2292 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2293 struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2295 return entry->label == data->label;
2298 /* This section handles removing dead code for flow. */
2300 /* Remove LABEL from exception_handler_label_map. */
2302 static void
2303 remove_exception_handler_label (rtx label)
2305 struct ehl_map_entry **slot, tmp;
2307 /* If exception_handler_label_map was not built yet,
2308 there is nothing to do. */
2309 if (cfun->eh->exception_handler_label_map == NULL)
2310 return;
2312 tmp.label = label;
2313 slot = (struct ehl_map_entry **)
2314 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2315 if (! slot)
2316 abort ();
2318 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2321 /* Splice REGION from the region tree etc. */
2323 static void
2324 remove_eh_handler (struct eh_region *region)
2326 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2327 rtx lab;
2329 /* For the benefit of efficiently handling REG_EH_REGION notes,
2330 replace this region in the region array with its containing
2331 region. Note that previous region deletions may result in
2332 multiple copies of this region in the array, so we have a
2333 list of alternate numbers by which we are known. */
2335 outer = region->outer;
2336 cfun->eh->region_array[region->region_number] = outer;
2337 if (region->aka)
2339 int i;
2340 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i,
2341 { cfun->eh->region_array[i] = outer; });
2344 if (outer)
2346 if (!outer->aka)
2347 outer->aka = BITMAP_GGC_ALLOC ();
2348 if (region->aka)
2349 bitmap_a_or_b (outer->aka, outer->aka, region->aka);
2350 bitmap_set_bit (outer->aka, region->region_number);
2353 if (cfun->eh->built_landing_pads)
2354 lab = region->landing_pad;
2355 else
2356 lab = region->label;
2357 if (lab)
2358 remove_exception_handler_label (lab);
2360 if (outer)
2361 pp_start = &outer->inner;
2362 else
2363 pp_start = &cfun->eh->region_tree;
2364 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2365 continue;
2366 *pp = region->next_peer;
2368 inner = region->inner;
2369 if (inner)
2371 for (p = inner; p->next_peer ; p = p->next_peer)
2372 p->outer = outer;
2373 p->outer = outer;
2375 p->next_peer = *pp_start;
2376 *pp_start = inner;
2379 if (region->type == ERT_CATCH)
2381 struct eh_region *try, *next, *prev;
2383 for (try = region->next_peer;
2384 try->type == ERT_CATCH;
2385 try = try->next_peer)
2386 continue;
2387 if (try->type != ERT_TRY)
2388 abort ();
2390 next = region->u.catch.next_catch;
2391 prev = region->u.catch.prev_catch;
2393 if (next)
2394 next->u.catch.prev_catch = prev;
2395 else
2396 try->u.try.last_catch = prev;
2397 if (prev)
2398 prev->u.catch.next_catch = next;
2399 else
2401 try->u.try.catch = next;
2402 if (! next)
2403 remove_eh_handler (try);
2408 /* LABEL heads a basic block that is about to be deleted. If this
2409 label corresponds to an exception region, we may be able to
2410 delete the region. */
2412 void
2413 maybe_remove_eh_handler (rtx label)
2415 struct ehl_map_entry **slot, tmp;
2416 struct eh_region *region;
2418 /* ??? After generating landing pads, it's not so simple to determine
2419 if the region data is completely unused. One must examine the
2420 landing pad and the post landing pad, and whether an inner try block
2421 is referencing the catch handlers directly. */
2422 if (cfun->eh->built_landing_pads)
2423 return;
2425 tmp.label = label;
2426 slot = (struct ehl_map_entry **)
2427 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2428 if (! slot)
2429 return;
2430 region = (*slot)->region;
2431 if (! region)
2432 return;
2434 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2435 because there is no path to the fallback call to terminate.
2436 But the region continues to affect call-site data until there
2437 are no more contained calls, which we don't see here. */
2438 if (region->type == ERT_MUST_NOT_THROW)
2440 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2441 region->label = NULL_RTX;
2443 else
2444 remove_eh_handler (region);
2447 /* Invokes CALLBACK for every exception handler label. Only used by old
2448 loop hackery; should not be used by new code. */
2450 void
2451 for_each_eh_label (void (*callback) (rtx))
2453 htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
2454 (void *) &callback);
2457 static int
2458 for_each_eh_label_1 (void **pentry, void *data)
2460 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2461 void (*callback) (rtx) = *(void (**) (rtx)) data;
2463 (*callback) (entry->label);
2464 return 1;
2467 /* This section describes CFG exception edges for flow. */
2469 /* For communicating between calls to reachable_next_level. */
2470 struct reachable_info GTY(())
2472 tree types_caught;
2473 tree types_allowed;
2474 rtx handlers;
2477 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2478 base class of TYPE, is in HANDLED. */
2480 static int
2481 check_handled (tree handled, tree type)
2483 tree t;
2485 /* We can check for exact matches without front-end help. */
2486 if (! lang_eh_type_covers)
2488 for (t = handled; t ; t = TREE_CHAIN (t))
2489 if (TREE_VALUE (t) == type)
2490 return 1;
2492 else
2494 for (t = handled; t ; t = TREE_CHAIN (t))
2495 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2496 return 1;
2499 return 0;
2502 /* A subroutine of reachable_next_level. If we are collecting a list
2503 of handlers, add one. After landing pad generation, reference
2504 it instead of the handlers themselves. Further, the handlers are
2505 all wired together, so by referencing one, we've got them all.
2506 Before landing pad generation we reference each handler individually.
2508 LP_REGION contains the landing pad; REGION is the handler. */
2510 static void
2511 add_reachable_handler (struct reachable_info *info, struct eh_region *lp_region, struct eh_region *region)
2513 if (! info)
2514 return;
2516 if (cfun->eh->built_landing_pads)
2518 if (! info->handlers)
2519 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2521 else
2522 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2525 /* Process one level of exception regions for reachability.
2526 If TYPE_THROWN is non-null, then it is the *exact* type being
2527 propagated. If INFO is non-null, then collect handler labels
2528 and caught/allowed type information between invocations. */
2530 static enum reachable_code
2531 reachable_next_level (struct eh_region *region, tree type_thrown,
2532 struct reachable_info *info)
2534 switch (region->type)
2536 case ERT_CLEANUP:
2537 /* Before landing-pad generation, we model control flow
2538 directly to the individual handlers. In this way we can
2539 see that catch handler types may shadow one another. */
2540 add_reachable_handler (info, region, region);
2541 return RNL_MAYBE_CAUGHT;
2543 case ERT_TRY:
2545 struct eh_region *c;
2546 enum reachable_code ret = RNL_NOT_CAUGHT;
2548 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2550 /* A catch-all handler ends the search. */
2551 if (c->u.catch.type_list == NULL)
2553 add_reachable_handler (info, region, c);
2554 return RNL_CAUGHT;
2557 if (type_thrown)
2559 /* If we have at least one type match, end the search. */
2560 tree tp_node = c->u.catch.type_list;
2562 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2564 tree type = TREE_VALUE (tp_node);
2566 if (type == type_thrown
2567 || (lang_eh_type_covers
2568 && (*lang_eh_type_covers) (type, type_thrown)))
2570 add_reachable_handler (info, region, c);
2571 return RNL_CAUGHT;
2575 /* If we have definitive information of a match failure,
2576 the catch won't trigger. */
2577 if (lang_eh_type_covers)
2578 return RNL_NOT_CAUGHT;
2581 /* At this point, we either don't know what type is thrown or
2582 don't have front-end assistance to help deciding if it is
2583 covered by one of the types in the list for this region.
2585 We'd then like to add this region to the list of reachable
2586 handlers since it is indeed potentially reachable based on the
2587 information we have.
2589 Actually, this handler is for sure not reachable if all the
2590 types it matches have already been caught. That is, it is only
2591 potentially reachable if at least one of the types it catches
2592 has not been previously caught. */
2594 if (! info)
2595 ret = RNL_MAYBE_CAUGHT;
2596 else
2598 tree tp_node = c->u.catch.type_list;
2599 bool maybe_reachable = false;
2601 /* Compute the potential reachability of this handler and
2602 update the list of types caught at the same time. */
2603 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2605 tree type = TREE_VALUE (tp_node);
2607 if (! check_handled (info->types_caught, type))
2609 info->types_caught
2610 = tree_cons (NULL, type, info->types_caught);
2612 maybe_reachable = true;
2616 if (maybe_reachable)
2618 add_reachable_handler (info, region, c);
2620 /* ??? If the catch type is a base class of every allowed
2621 type, then we know we can stop the search. */
2622 ret = RNL_MAYBE_CAUGHT;
2627 return ret;
2630 case ERT_ALLOWED_EXCEPTIONS:
2631 /* An empty list of types definitely ends the search. */
2632 if (region->u.allowed.type_list == NULL_TREE)
2634 add_reachable_handler (info, region, region);
2635 return RNL_CAUGHT;
2638 /* Collect a list of lists of allowed types for use in detecting
2639 when a catch may be transformed into a catch-all. */
2640 if (info)
2641 info->types_allowed = tree_cons (NULL_TREE,
2642 region->u.allowed.type_list,
2643 info->types_allowed);
2645 /* If we have definitive information about the type hierarchy,
2646 then we can tell if the thrown type will pass through the
2647 filter. */
2648 if (type_thrown && lang_eh_type_covers)
2650 if (check_handled (region->u.allowed.type_list, type_thrown))
2651 return RNL_NOT_CAUGHT;
2652 else
2654 add_reachable_handler (info, region, region);
2655 return RNL_CAUGHT;
2659 add_reachable_handler (info, region, region);
2660 return RNL_MAYBE_CAUGHT;
2662 case ERT_CATCH:
2663 /* Catch regions are handled by their controlling try region. */
2664 return RNL_NOT_CAUGHT;
2666 case ERT_MUST_NOT_THROW:
2667 /* Here we end our search, since no exceptions may propagate.
2668 If we've touched down at some landing pad previous, then the
2669 explicit function call we generated may be used. Otherwise
2670 the call is made by the runtime. */
2671 if (info && info->handlers)
2673 add_reachable_handler (info, region, region);
2674 return RNL_CAUGHT;
2676 else
2677 return RNL_BLOCKED;
2679 case ERT_THROW:
2680 case ERT_FIXUP:
2681 case ERT_UNKNOWN:
2682 /* Shouldn't see these here. */
2683 break;
2686 abort ();
2689 /* Retrieve a list of labels of exception handlers which can be
2690 reached by a given insn. */
2693 reachable_handlers (rtx insn)
2695 struct reachable_info info;
2696 struct eh_region *region;
2697 tree type_thrown;
2698 int region_number;
2700 if (GET_CODE (insn) == JUMP_INSN
2701 && GET_CODE (PATTERN (insn)) == RESX)
2702 region_number = XINT (PATTERN (insn), 0);
2703 else
2705 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2706 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2707 return NULL;
2708 region_number = INTVAL (XEXP (note, 0));
2711 memset (&info, 0, sizeof (info));
2713 region = cfun->eh->region_array[region_number];
2715 type_thrown = NULL_TREE;
2716 if (GET_CODE (insn) == JUMP_INSN
2717 && GET_CODE (PATTERN (insn)) == RESX)
2719 /* A RESX leaves a region instead of entering it. Thus the
2720 region itself may have been deleted out from under us. */
2721 if (region == NULL)
2722 return NULL;
2723 region = region->outer;
2725 else if (region->type == ERT_THROW)
2727 type_thrown = region->u.throw.type;
2728 region = region->outer;
2731 while (region)
2733 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2734 break;
2735 /* If we have processed one cleanup, there is no point in
2736 processing any more of them. Each cleanup will have an edge
2737 to the next outer cleanup region, so the flow graph will be
2738 accurate. */
2739 if (region->type == ERT_CLEANUP)
2740 region = region->u.cleanup.prev_try;
2741 else
2742 region = region->outer;
2745 return info.handlers;
2748 /* Determine if the given INSN can throw an exception that is caught
2749 within the function. */
2751 bool
2752 can_throw_internal (rtx insn)
2754 struct eh_region *region;
2755 tree type_thrown;
2756 rtx note;
2758 if (! INSN_P (insn))
2759 return false;
2761 if (GET_CODE (insn) == INSN
2762 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2763 insn = XVECEXP (PATTERN (insn), 0, 0);
2765 if (GET_CODE (insn) == CALL_INSN
2766 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2768 int i;
2769 for (i = 0; i < 3; ++i)
2771 rtx sub = XEXP (PATTERN (insn), i);
2772 for (; sub ; sub = NEXT_INSN (sub))
2773 if (can_throw_internal (sub))
2774 return true;
2776 return false;
2779 /* Every insn that might throw has an EH_REGION note. */
2780 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2781 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2782 return false;
2784 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2786 type_thrown = NULL_TREE;
2787 if (region->type == ERT_THROW)
2789 type_thrown = region->u.throw.type;
2790 region = region->outer;
2793 /* If this exception is ignored by each and every containing region,
2794 then control passes straight out. The runtime may handle some
2795 regions, which also do not require processing internally. */
2796 for (; region; region = region->outer)
2798 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2799 if (how == RNL_BLOCKED)
2800 return false;
2801 if (how != RNL_NOT_CAUGHT)
2802 return true;
2805 return false;
2808 /* Determine if the given INSN can throw an exception that is
2809 visible outside the function. */
2811 bool
2812 can_throw_external (rtx insn)
2814 struct eh_region *region;
2815 tree type_thrown;
2816 rtx note;
2818 if (! INSN_P (insn))
2819 return false;
2821 if (GET_CODE (insn) == INSN
2822 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2823 insn = XVECEXP (PATTERN (insn), 0, 0);
2825 if (GET_CODE (insn) == CALL_INSN
2826 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2828 int i;
2829 for (i = 0; i < 3; ++i)
2831 rtx sub = XEXP (PATTERN (insn), i);
2832 for (; sub ; sub = NEXT_INSN (sub))
2833 if (can_throw_external (sub))
2834 return true;
2836 return false;
2839 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2840 if (!note)
2842 /* Calls (and trapping insns) without notes are outside any
2843 exception handling region in this function. We have to
2844 assume it might throw. Given that the front end and middle
2845 ends mark known NOTHROW functions, this isn't so wildly
2846 inaccurate. */
2847 return (GET_CODE (insn) == CALL_INSN
2848 || (flag_non_call_exceptions
2849 && may_trap_p (PATTERN (insn))));
2851 if (INTVAL (XEXP (note, 0)) <= 0)
2852 return false;
2854 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2856 type_thrown = NULL_TREE;
2857 if (region->type == ERT_THROW)
2859 type_thrown = region->u.throw.type;
2860 region = region->outer;
2863 /* If the exception is caught or blocked by any containing region,
2864 then it is not seen by any calling function. */
2865 for (; region ; region = region->outer)
2866 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2867 return false;
2869 return true;
2872 /* Set current_function_nothrow and cfun->all_throwers_are_sibcalls. */
2874 void
2875 set_nothrow_function_flags (void)
2877 rtx insn;
2879 current_function_nothrow = 1;
2881 /* Assume cfun->all_throwers_are_sibcalls until we encounter
2882 something that can throw an exception. We specifically exempt
2883 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2884 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2885 is optimistic. */
2887 cfun->all_throwers_are_sibcalls = 1;
2889 if (! flag_exceptions)
2890 return;
2892 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2893 if (can_throw_external (insn))
2895 current_function_nothrow = 0;
2897 if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
2899 cfun->all_throwers_are_sibcalls = 0;
2900 return;
2904 for (insn = current_function_epilogue_delay_list; insn;
2905 insn = XEXP (insn, 1))
2906 if (can_throw_external (insn))
2908 current_function_nothrow = 0;
2910 if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
2912 cfun->all_throwers_are_sibcalls = 0;
2913 return;
2919 /* Various hooks for unwind library. */
2921 /* Do any necessary initialization to access arbitrary stack frames.
2922 On the SPARC, this means flushing the register windows. */
2924 void
2925 expand_builtin_unwind_init (void)
2927 /* Set this so all the registers get saved in our frame; we need to be
2928 able to copy the saved values for any registers from frames we unwind. */
2929 current_function_has_nonlocal_label = 1;
2931 #ifdef SETUP_FRAME_ADDRESSES
2932 SETUP_FRAME_ADDRESSES ();
2933 #endif
2937 expand_builtin_eh_return_data_regno (tree arglist)
2939 tree which = TREE_VALUE (arglist);
2940 unsigned HOST_WIDE_INT iwhich;
2942 if (TREE_CODE (which) != INTEGER_CST)
2944 error ("argument of `__builtin_eh_return_regno' must be constant");
2945 return constm1_rtx;
2948 iwhich = tree_low_cst (which, 1);
2949 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2950 if (iwhich == INVALID_REGNUM)
2951 return constm1_rtx;
2953 #ifdef DWARF_FRAME_REGNUM
2954 iwhich = DWARF_FRAME_REGNUM (iwhich);
2955 #else
2956 iwhich = DBX_REGISTER_NUMBER (iwhich);
2957 #endif
2959 return GEN_INT (iwhich);
2962 /* Given a value extracted from the return address register or stack slot,
2963 return the actual address encoded in that value. */
2966 expand_builtin_extract_return_addr (tree addr_tree)
2968 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2970 if (GET_MODE (addr) != Pmode
2971 && GET_MODE (addr) != VOIDmode)
2973 #ifdef POINTERS_EXTEND_UNSIGNED
2974 addr = convert_memory_address (Pmode, addr);
2975 #else
2976 addr = convert_to_mode (Pmode, addr, 0);
2977 #endif
2980 /* First mask out any unwanted bits. */
2981 #ifdef MASK_RETURN_ADDR
2982 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2983 #endif
2985 /* Then adjust to find the real return address. */
2986 #if defined (RETURN_ADDR_OFFSET)
2987 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2988 #endif
2990 return addr;
2993 /* Given an actual address in addr_tree, do any necessary encoding
2994 and return the value to be stored in the return address register or
2995 stack slot so the epilogue will return to that address. */
2998 expand_builtin_frob_return_addr (tree addr_tree)
3000 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3002 addr = convert_memory_address (Pmode, addr);
3004 #ifdef RETURN_ADDR_OFFSET
3005 addr = force_reg (Pmode, addr);
3006 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3007 #endif
3009 return addr;
3012 /* Set up the epilogue with the magic bits we'll need to return to the
3013 exception handler. */
3015 void
3016 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3017 tree handler_tree)
3019 rtx tmp;
3021 #ifdef EH_RETURN_STACKADJ_RTX
3022 tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
3023 tmp = convert_memory_address (Pmode, tmp);
3024 if (!cfun->eh->ehr_stackadj)
3025 cfun->eh->ehr_stackadj = copy_to_reg (tmp);
3026 else if (tmp != cfun->eh->ehr_stackadj)
3027 emit_move_insn (cfun->eh->ehr_stackadj, tmp);
3028 #endif
3030 tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
3031 tmp = convert_memory_address (Pmode, tmp);
3032 if (!cfun->eh->ehr_handler)
3033 cfun->eh->ehr_handler = copy_to_reg (tmp);
3034 else if (tmp != cfun->eh->ehr_handler)
3035 emit_move_insn (cfun->eh->ehr_handler, tmp);
3037 if (!cfun->eh->ehr_label)
3038 cfun->eh->ehr_label = gen_label_rtx ();
3039 emit_jump (cfun->eh->ehr_label);
3042 void
3043 expand_eh_return (void)
3045 rtx around_label;
3047 if (! cfun->eh->ehr_label)
3048 return;
3050 current_function_calls_eh_return = 1;
3052 #ifdef EH_RETURN_STACKADJ_RTX
3053 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3054 #endif
3056 around_label = gen_label_rtx ();
3057 emit_jump (around_label);
3059 emit_label (cfun->eh->ehr_label);
3060 clobber_return_register ();
3062 #ifdef EH_RETURN_STACKADJ_RTX
3063 emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
3064 #endif
3066 #ifdef HAVE_eh_return
3067 if (HAVE_eh_return)
3068 emit_insn (gen_eh_return (cfun->eh->ehr_handler));
3069 else
3070 #endif
3072 #ifdef EH_RETURN_HANDLER_RTX
3073 emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
3074 #else
3075 error ("__builtin_eh_return not supported on this target");
3076 #endif
3079 emit_label (around_label);
3082 /* In the following functions, we represent entries in the action table
3083 as 1-based indices. Special cases are:
3085 0: null action record, non-null landing pad; implies cleanups
3086 -1: null action record, null landing pad; implies no action
3087 -2: no call-site entry; implies must_not_throw
3088 -3: we have yet to process outer regions
3090 Further, no special cases apply to the "next" field of the record.
3091 For next, 0 means end of list. */
3093 struct action_record
3095 int offset;
3096 int filter;
3097 int next;
3100 static int
3101 action_record_eq (const void *pentry, const void *pdata)
3103 const struct action_record *entry = (const struct action_record *) pentry;
3104 const struct action_record *data = (const struct action_record *) pdata;
3105 return entry->filter == data->filter && entry->next == data->next;
3108 static hashval_t
3109 action_record_hash (const void *pentry)
3111 const struct action_record *entry = (const struct action_record *) pentry;
3112 return entry->next * 1009 + entry->filter;
3115 static int
3116 add_action_record (htab_t ar_hash, int filter, int next)
3118 struct action_record **slot, *new, tmp;
3120 tmp.filter = filter;
3121 tmp.next = next;
3122 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3124 if ((new = *slot) == NULL)
3126 new = xmalloc (sizeof (*new));
3127 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3128 new->filter = filter;
3129 new->next = next;
3130 *slot = new;
3132 /* The filter value goes in untouched. The link to the next
3133 record is a "self-relative" byte offset, or zero to indicate
3134 that there is no next record. So convert the absolute 1 based
3135 indices we've been carrying around into a displacement. */
3137 push_sleb128 (&cfun->eh->action_record_data, filter);
3138 if (next)
3139 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3140 push_sleb128 (&cfun->eh->action_record_data, next);
3143 return new->offset;
3146 static int
3147 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3149 struct eh_region *c;
3150 int next;
3152 /* If we've reached the top of the region chain, then we have
3153 no actions, and require no landing pad. */
3154 if (region == NULL)
3155 return -1;
3157 switch (region->type)
3159 case ERT_CLEANUP:
3160 /* A cleanup adds a zero filter to the beginning of the chain, but
3161 there are special cases to look out for. If there are *only*
3162 cleanups along a path, then it compresses to a zero action.
3163 Further, if there are multiple cleanups along a path, we only
3164 need to represent one of them, as that is enough to trigger
3165 entry to the landing pad at runtime. */
3166 next = collect_one_action_chain (ar_hash, region->outer);
3167 if (next <= 0)
3168 return 0;
3169 for (c = region->outer; c ; c = c->outer)
3170 if (c->type == ERT_CLEANUP)
3171 return next;
3172 return add_action_record (ar_hash, 0, next);
3174 case ERT_TRY:
3175 /* Process the associated catch regions in reverse order.
3176 If there's a catch-all handler, then we don't need to
3177 search outer regions. Use a magic -3 value to record
3178 that we haven't done the outer search. */
3179 next = -3;
3180 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3182 if (c->u.catch.type_list == NULL)
3184 /* Retrieve the filter from the head of the filter list
3185 where we have stored it (see assign_filter_values). */
3186 int filter
3187 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3189 next = add_action_record (ar_hash, filter, 0);
3191 else
3193 /* Once the outer search is done, trigger an action record for
3194 each filter we have. */
3195 tree flt_node;
3197 if (next == -3)
3199 next = collect_one_action_chain (ar_hash, region->outer);
3201 /* If there is no next action, terminate the chain. */
3202 if (next == -1)
3203 next = 0;
3204 /* If all outer actions are cleanups or must_not_throw,
3205 we'll have no action record for it, since we had wanted
3206 to encode these states in the call-site record directly.
3207 Add a cleanup action to the chain to catch these. */
3208 else if (next <= 0)
3209 next = add_action_record (ar_hash, 0, 0);
3212 flt_node = c->u.catch.filter_list;
3213 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3215 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3216 next = add_action_record (ar_hash, filter, next);
3220 return next;
3222 case ERT_ALLOWED_EXCEPTIONS:
3223 /* An exception specification adds its filter to the
3224 beginning of the chain. */
3225 next = collect_one_action_chain (ar_hash, region->outer);
3226 return add_action_record (ar_hash, region->u.allowed.filter,
3227 next < 0 ? 0 : next);
3229 case ERT_MUST_NOT_THROW:
3230 /* A must-not-throw region with no inner handlers or cleanups
3231 requires no call-site entry. Note that this differs from
3232 the no handler or cleanup case in that we do require an lsda
3233 to be generated. Return a magic -2 value to record this. */
3234 return -2;
3236 case ERT_CATCH:
3237 case ERT_THROW:
3238 /* CATCH regions are handled in TRY above. THROW regions are
3239 for optimization information only and produce no output. */
3240 return collect_one_action_chain (ar_hash, region->outer);
3242 default:
3243 abort ();
3247 static int
3248 add_call_site (rtx landing_pad, int action)
3250 struct call_site_record *data = cfun->eh->call_site_data;
3251 int used = cfun->eh->call_site_data_used;
3252 int size = cfun->eh->call_site_data_size;
3254 if (used >= size)
3256 size = (size ? size * 2 : 64);
3257 data = ggc_realloc (data, sizeof (*data) * size);
3258 cfun->eh->call_site_data = data;
3259 cfun->eh->call_site_data_size = size;
3262 data[used].landing_pad = landing_pad;
3263 data[used].action = action;
3265 cfun->eh->call_site_data_used = used + 1;
3267 return used + call_site_base;
3270 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3271 The new note numbers will not refer to region numbers, but
3272 instead to call site entries. */
3274 void
3275 convert_to_eh_region_ranges (void)
3277 rtx insn, iter, note;
3278 htab_t ar_hash;
3279 int last_action = -3;
3280 rtx last_action_insn = NULL_RTX;
3281 rtx last_landing_pad = NULL_RTX;
3282 rtx first_no_action_insn = NULL_RTX;
3283 int call_site = 0;
3285 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3286 return;
3288 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3290 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3292 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3293 if (INSN_P (iter))
3295 struct eh_region *region;
3296 int this_action;
3297 rtx this_landing_pad;
3299 insn = iter;
3300 if (GET_CODE (insn) == INSN
3301 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3302 insn = XVECEXP (PATTERN (insn), 0, 0);
3304 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3305 if (!note)
3307 if (! (GET_CODE (insn) == CALL_INSN
3308 || (flag_non_call_exceptions
3309 && may_trap_p (PATTERN (insn)))))
3310 continue;
3311 this_action = -1;
3312 region = NULL;
3314 else
3316 if (INTVAL (XEXP (note, 0)) <= 0)
3317 continue;
3318 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3319 this_action = collect_one_action_chain (ar_hash, region);
3322 /* Existence of catch handlers, or must-not-throw regions
3323 implies that an lsda is needed (even if empty). */
3324 if (this_action != -1)
3325 cfun->uses_eh_lsda = 1;
3327 /* Delay creation of region notes for no-action regions
3328 until we're sure that an lsda will be required. */
3329 else if (last_action == -3)
3331 first_no_action_insn = iter;
3332 last_action = -1;
3335 /* Cleanups and handlers may share action chains but not
3336 landing pads. Collect the landing pad for this region. */
3337 if (this_action >= 0)
3339 struct eh_region *o;
3340 for (o = region; ! o->landing_pad ; o = o->outer)
3341 continue;
3342 this_landing_pad = o->landing_pad;
3344 else
3345 this_landing_pad = NULL_RTX;
3347 /* Differing actions or landing pads implies a change in call-site
3348 info, which implies some EH_REGION note should be emitted. */
3349 if (last_action != this_action
3350 || last_landing_pad != this_landing_pad)
3352 /* If we'd not seen a previous action (-3) or the previous
3353 action was must-not-throw (-2), then we do not need an
3354 end note. */
3355 if (last_action >= -1)
3357 /* If we delayed the creation of the begin, do it now. */
3358 if (first_no_action_insn)
3360 call_site = add_call_site (NULL_RTX, 0);
3361 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3362 first_no_action_insn);
3363 NOTE_EH_HANDLER (note) = call_site;
3364 first_no_action_insn = NULL_RTX;
3367 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3368 last_action_insn);
3369 NOTE_EH_HANDLER (note) = call_site;
3372 /* If the new action is must-not-throw, then no region notes
3373 are created. */
3374 if (this_action >= -1)
3376 call_site = add_call_site (this_landing_pad,
3377 this_action < 0 ? 0 : this_action);
3378 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3379 NOTE_EH_HANDLER (note) = call_site;
3382 last_action = this_action;
3383 last_landing_pad = this_landing_pad;
3385 last_action_insn = iter;
3388 if (last_action >= -1 && ! first_no_action_insn)
3390 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3391 NOTE_EH_HANDLER (note) = call_site;
3394 htab_delete (ar_hash);
3398 static void
3399 push_uleb128 (varray_type *data_area, unsigned int value)
3403 unsigned char byte = value & 0x7f;
3404 value >>= 7;
3405 if (value)
3406 byte |= 0x80;
3407 VARRAY_PUSH_UCHAR (*data_area, byte);
3409 while (value);
3412 static void
3413 push_sleb128 (varray_type *data_area, int value)
3415 unsigned char byte;
3416 int more;
3420 byte = value & 0x7f;
3421 value >>= 7;
3422 more = ! ((value == 0 && (byte & 0x40) == 0)
3423 || (value == -1 && (byte & 0x40) != 0));
3424 if (more)
3425 byte |= 0x80;
3426 VARRAY_PUSH_UCHAR (*data_area, byte);
3428 while (more);
3432 #ifndef HAVE_AS_LEB128
3433 static int
3434 dw2_size_of_call_site_table (void)
3436 int n = cfun->eh->call_site_data_used;
3437 int size = n * (4 + 4 + 4);
3438 int i;
3440 for (i = 0; i < n; ++i)
3442 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3443 size += size_of_uleb128 (cs->action);
3446 return size;
3449 static int
3450 sjlj_size_of_call_site_table (void)
3452 int n = cfun->eh->call_site_data_used;
3453 int size = 0;
3454 int i;
3456 for (i = 0; i < n; ++i)
3458 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3459 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3460 size += size_of_uleb128 (cs->action);
3463 return size;
3465 #endif
3467 static void
3468 dw2_output_call_site_table (void)
3470 const char *const function_start_lab
3471 = IDENTIFIER_POINTER (current_function_func_begin_label);
3472 int n = cfun->eh->call_site_data_used;
3473 int i;
3475 for (i = 0; i < n; ++i)
3477 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3478 char reg_start_lab[32];
3479 char reg_end_lab[32];
3480 char landing_pad_lab[32];
3482 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3483 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3485 if (cs->landing_pad)
3486 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3487 CODE_LABEL_NUMBER (cs->landing_pad));
3489 /* ??? Perhaps use insn length scaling if the assembler supports
3490 generic arithmetic. */
3491 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3492 data4 if the function is small enough. */
3493 #ifdef HAVE_AS_LEB128
3494 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3495 "region %d start", i);
3496 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3497 "length");
3498 if (cs->landing_pad)
3499 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3500 "landing pad");
3501 else
3502 dw2_asm_output_data_uleb128 (0, "landing pad");
3503 #else
3504 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3505 "region %d start", i);
3506 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3507 if (cs->landing_pad)
3508 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3509 "landing pad");
3510 else
3511 dw2_asm_output_data (4, 0, "landing pad");
3512 #endif
3513 dw2_asm_output_data_uleb128 (cs->action, "action");
3516 call_site_base += n;
3519 static void
3520 sjlj_output_call_site_table (void)
3522 int n = cfun->eh->call_site_data_used;
3523 int i;
3525 for (i = 0; i < n; ++i)
3527 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3529 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3530 "region %d landing pad", i);
3531 dw2_asm_output_data_uleb128 (cs->action, "action");
3534 call_site_base += n;
3537 /* Tell assembler to switch to the section for the exception handling
3538 table. */
3540 void
3541 default_exception_section (void)
3543 if (targetm.have_named_sections)
3545 int flags;
3546 #ifdef HAVE_LD_RO_RW_SECTION_MIXING
3547 int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3549 flags = (! flag_pic
3550 || ((tt_format & 0x70) != DW_EH_PE_absptr
3551 && (tt_format & 0x70) != DW_EH_PE_aligned))
3552 ? 0 : SECTION_WRITE;
3553 #else
3554 flags = SECTION_WRITE;
3555 #endif
3556 named_section_flags (".gcc_except_table", flags);
3558 else if (flag_pic)
3559 data_section ();
3560 else
3561 readonly_data_section ();
3564 void
3565 output_function_exception_table (void)
3567 int tt_format, cs_format, lp_format, i, n;
3568 #ifdef HAVE_AS_LEB128
3569 char ttype_label[32];
3570 char cs_after_size_label[32];
3571 char cs_end_label[32];
3572 #else
3573 int call_site_len;
3574 #endif
3575 int have_tt_data;
3576 int tt_format_size = 0;
3578 /* Not all functions need anything. */
3579 if (! cfun->uses_eh_lsda)
3580 return;
3582 #ifdef IA64_UNWIND_INFO
3583 fputs ("\t.personality\t", asm_out_file);
3584 output_addr_const (asm_out_file, eh_personality_libfunc);
3585 fputs ("\n\t.handlerdata\n", asm_out_file);
3586 /* Note that varasm still thinks we're in the function's code section.
3587 The ".endp" directive that will immediately follow will take us back. */
3588 #else
3589 (*targetm.asm_out.exception_section) ();
3590 #endif
3592 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3593 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3595 /* Indicate the format of the @TType entries. */
3596 if (! have_tt_data)
3597 tt_format = DW_EH_PE_omit;
3598 else
3600 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3601 #ifdef HAVE_AS_LEB128
3602 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3603 current_function_funcdef_no);
3604 #endif
3605 tt_format_size = size_of_encoded_value (tt_format);
3607 assemble_align (tt_format_size * BITS_PER_UNIT);
3610 (*targetm.asm_out.internal_label) (asm_out_file, "LLSDA",
3611 current_function_funcdef_no);
3613 /* The LSDA header. */
3615 /* Indicate the format of the landing pad start pointer. An omitted
3616 field implies @LPStart == @Start. */
3617 /* Currently we always put @LPStart == @Start. This field would
3618 be most useful in moving the landing pads completely out of
3619 line to another section, but it could also be used to minimize
3620 the size of uleb128 landing pad offsets. */
3621 lp_format = DW_EH_PE_omit;
3622 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3623 eh_data_format_name (lp_format));
3625 /* @LPStart pointer would go here. */
3627 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3628 eh_data_format_name (tt_format));
3630 #ifndef HAVE_AS_LEB128
3631 if (USING_SJLJ_EXCEPTIONS)
3632 call_site_len = sjlj_size_of_call_site_table ();
3633 else
3634 call_site_len = dw2_size_of_call_site_table ();
3635 #endif
3637 /* A pc-relative 4-byte displacement to the @TType data. */
3638 if (have_tt_data)
3640 #ifdef HAVE_AS_LEB128
3641 char ttype_after_disp_label[32];
3642 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3643 current_function_funcdef_no);
3644 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3645 "@TType base offset");
3646 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3647 #else
3648 /* Ug. Alignment queers things. */
3649 unsigned int before_disp, after_disp, last_disp, disp;
3651 before_disp = 1 + 1;
3652 after_disp = (1 + size_of_uleb128 (call_site_len)
3653 + call_site_len
3654 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3655 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3656 * tt_format_size));
3658 disp = after_disp;
3661 unsigned int disp_size, pad;
3663 last_disp = disp;
3664 disp_size = size_of_uleb128 (disp);
3665 pad = before_disp + disp_size + after_disp;
3666 if (pad % tt_format_size)
3667 pad = tt_format_size - (pad % tt_format_size);
3668 else
3669 pad = 0;
3670 disp = after_disp + pad;
3672 while (disp != last_disp);
3674 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3675 #endif
3678 /* Indicate the format of the call-site offsets. */
3679 #ifdef HAVE_AS_LEB128
3680 cs_format = DW_EH_PE_uleb128;
3681 #else
3682 cs_format = DW_EH_PE_udata4;
3683 #endif
3684 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3685 eh_data_format_name (cs_format));
3687 #ifdef HAVE_AS_LEB128
3688 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3689 current_function_funcdef_no);
3690 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3691 current_function_funcdef_no);
3692 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3693 "Call-site table length");
3694 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3695 if (USING_SJLJ_EXCEPTIONS)
3696 sjlj_output_call_site_table ();
3697 else
3698 dw2_output_call_site_table ();
3699 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3700 #else
3701 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3702 if (USING_SJLJ_EXCEPTIONS)
3703 sjlj_output_call_site_table ();
3704 else
3705 dw2_output_call_site_table ();
3706 #endif
3708 /* ??? Decode and interpret the data for flag_debug_asm. */
3709 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3710 for (i = 0; i < n; ++i)
3711 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3712 (i ? NULL : "Action record table"));
3714 if (have_tt_data)
3715 assemble_align (tt_format_size * BITS_PER_UNIT);
3717 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3718 while (i-- > 0)
3720 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3721 rtx value;
3723 if (type == NULL_TREE)
3724 value = const0_rtx;
3725 else
3727 struct cgraph_varpool_node *node;
3729 type = lookup_type_for_runtime (type);
3730 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3732 /* Let cgraph know that the rtti decl is used. Not all of the
3733 paths below go through assemble_integer, which would take
3734 care of this for us. */
3735 if (TREE_CODE (type) == ADDR_EXPR)
3737 type = TREE_OPERAND (type, 0);
3738 node = cgraph_varpool_node (type);
3739 if (node)
3740 cgraph_varpool_mark_needed_node (node);
3742 else if (TREE_CODE (type) != INTEGER_CST)
3743 abort ();
3746 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3747 assemble_integer (value, tt_format_size,
3748 tt_format_size * BITS_PER_UNIT, 1);
3749 else
3750 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
3753 #ifdef HAVE_AS_LEB128
3754 if (have_tt_data)
3755 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3756 #endif
3758 /* ??? Decode and interpret the data for flag_debug_asm. */
3759 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3760 for (i = 0; i < n; ++i)
3761 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3762 (i ? NULL : "Exception specification table"));
3764 function_section (current_function_decl);
3767 #include "gt-except.h"