* gnu/regexp/CharIndexedReader.java: Removed.
[official-gcc.git] / gcc / except.c
blob2dfd073e70a792e1577a1d7a56b89568e2df45ef
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
78 /* Provide defaults for stuff that may not be defined when using
79 sjlj exceptions. */
80 #ifndef EH_RETURN_DATA_REGNO
81 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
82 #endif
85 /* Nonzero means enable synchronous exceptions for non-call instructions. */
86 int flag_non_call_exceptions;
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 tree (*lang_protect_cleanup_actions) (void);
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers) (tree a, tree b);
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type) (tree);
98 /* A hash table of label to region number. */
100 struct ehl_map_entry GTY(())
102 rtx label;
103 struct eh_region *region;
106 static GTY(()) int call_site_base;
107 static GTY ((param_is (union tree_node)))
108 htab_t type_to_runtime_map;
110 /* Describe the SjLj_Function_Context structure. */
111 static GTY(()) tree sjlj_fc_type_node;
112 static int sjlj_fc_call_site_ofs;
113 static int sjlj_fc_data_ofs;
114 static int sjlj_fc_personality_ofs;
115 static int sjlj_fc_lsda_ofs;
116 static int sjlj_fc_jbuf_ofs;
118 /* Describes one exception region. */
119 struct eh_region GTY(())
121 /* The immediately surrounding region. */
122 struct eh_region *outer;
124 /* The list of immediately contained regions. */
125 struct eh_region *inner;
126 struct eh_region *next_peer;
128 /* An identifier for this region. */
129 int region_number;
131 /* When a region is deleted, its parents inherit the REG_EH_REGION
132 numbers already assigned. */
133 bitmap aka;
135 /* Each region does exactly one thing. */
136 enum eh_region_type
138 ERT_UNKNOWN = 0,
139 ERT_CLEANUP,
140 ERT_TRY,
141 ERT_CATCH,
142 ERT_ALLOWED_EXCEPTIONS,
143 ERT_MUST_NOT_THROW,
144 ERT_THROW,
145 ERT_FIXUP
146 } type;
148 /* Holds the action to perform based on the preceding type. */
149 union eh_region_u {
150 /* A list of catch blocks, a surrounding try block,
151 and the label for continuing after a catch. */
152 struct eh_region_u_try {
153 struct eh_region *catch;
154 struct eh_region *last_catch;
155 struct eh_region *prev_try;
156 rtx continue_label;
157 } GTY ((tag ("ERT_TRY"))) try;
159 /* The list through the catch handlers, the list of type objects
160 matched, and the list of associated filters. */
161 struct eh_region_u_catch {
162 struct eh_region *next_catch;
163 struct eh_region *prev_catch;
164 tree type_list;
165 tree filter_list;
166 } GTY ((tag ("ERT_CATCH"))) catch;
168 /* A tree_list of allowed types. */
169 struct eh_region_u_allowed {
170 tree type_list;
171 int filter;
172 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
174 /* The type given by a call to "throw foo();", or discovered
175 for a throw. */
176 struct eh_region_u_throw {
177 tree type;
178 } GTY ((tag ("ERT_THROW"))) throw;
180 /* Retain the cleanup expression even after expansion so that
181 we can match up fixup regions. */
182 struct eh_region_u_cleanup {
183 tree exp;
184 struct eh_region *prev_try;
185 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
187 /* The real region (by expression and by pointer) that fixup code
188 should live in. */
189 struct eh_region_u_fixup {
190 tree cleanup_exp;
191 struct eh_region *real_region;
192 bool resolved;
193 } GTY ((tag ("ERT_FIXUP"))) fixup;
194 } GTY ((desc ("%0.type"))) u;
196 /* Entry point for this region's handler before landing pads are built. */
197 rtx label;
198 tree tree_label;
200 /* Entry point for this region's handler from the runtime eh library. */
201 rtx landing_pad;
203 /* Entry point for this region's handler from an inner region. */
204 rtx post_landing_pad;
206 /* The RESX insn for handing off control to the next outermost handler,
207 if appropriate. */
208 rtx resume;
210 /* True if something in this region may throw. */
211 unsigned may_contain_throw : 1;
214 struct call_site_record GTY(())
216 rtx landing_pad;
217 int action;
220 /* Used to save exception status for each function. */
221 struct eh_status GTY(())
223 /* The tree of all regions for this function. */
224 struct eh_region *region_tree;
226 /* The same information as an indexable array. */
227 struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
229 /* The most recently open region. */
230 struct eh_region *cur_region;
232 /* This is the region for which we are processing catch blocks. */
233 struct eh_region *try_region;
235 rtx filter;
236 rtx exc_ptr;
238 int built_landing_pads;
239 int last_region_number;
241 varray_type ttype_data;
242 varray_type ehspec_data;
243 varray_type action_record_data;
245 htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
247 struct call_site_record * GTY ((length ("%h.call_site_data_used")))
248 call_site_data;
249 int call_site_data_used;
250 int call_site_data_size;
252 rtx ehr_stackadj;
253 rtx ehr_handler;
254 rtx ehr_label;
256 rtx sjlj_fc;
257 rtx sjlj_exit_after;
261 static int t2r_eq (const void *, const void *);
262 static hashval_t t2r_hash (const void *);
263 static void add_type_for_runtime (tree);
264 static tree lookup_type_for_runtime (tree);
266 static struct eh_region *expand_eh_region_end (void);
268 static void resolve_fixup_regions (void);
269 static void remove_fixup_regions (void);
270 static void remove_unreachable_regions (rtx);
271 static void convert_from_eh_region_ranges_1 (rtx *, int *, int);
273 static struct eh_region *duplicate_eh_region_1 (struct eh_region *,
274 struct inline_remap *);
275 static void duplicate_eh_region_2 (struct eh_region *, struct eh_region **);
276 static int ttypes_filter_eq (const void *, const void *);
277 static hashval_t ttypes_filter_hash (const void *);
278 static int ehspec_filter_eq (const void *, const void *);
279 static hashval_t ehspec_filter_hash (const void *);
280 static int add_ttypes_entry (htab_t, tree);
281 static int add_ehspec_entry (htab_t, htab_t, tree);
282 static void assign_filter_values (void);
283 static void build_post_landing_pads (void);
284 static void connect_post_landing_pads (void);
285 static void dw2_build_landing_pads (void);
287 struct sjlj_lp_info;
288 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
289 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
290 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
291 static void sjlj_emit_function_enter (rtx);
292 static void sjlj_emit_function_exit (void);
293 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
294 static void sjlj_build_landing_pads (void);
296 static hashval_t ehl_hash (const void *);
297 static int ehl_eq (const void *, const void *);
298 static void add_ehl_entry (rtx, struct eh_region *);
299 static void remove_exception_handler_label (rtx);
300 static void remove_eh_handler (struct eh_region *);
301 static int for_each_eh_label_1 (void **, void *);
303 /* The return value of reachable_next_level. */
304 enum reachable_code
306 /* The given exception is not processed by the given region. */
307 RNL_NOT_CAUGHT,
308 /* The given exception may need processing by the given region. */
309 RNL_MAYBE_CAUGHT,
310 /* The given exception is completely processed by the given region. */
311 RNL_CAUGHT,
312 /* The given exception is completely processed by the runtime. */
313 RNL_BLOCKED
316 struct reachable_info;
317 static enum reachable_code reachable_next_level (struct eh_region *, tree,
318 struct reachable_info *);
320 static int action_record_eq (const void *, const void *);
321 static hashval_t action_record_hash (const void *);
322 static int add_action_record (htab_t, int, int);
323 static int collect_one_action_chain (htab_t, struct eh_region *);
324 static int add_call_site (rtx, int);
326 static void push_uleb128 (varray_type *, unsigned int);
327 static void push_sleb128 (varray_type *, int);
328 #ifndef HAVE_AS_LEB128
329 static int dw2_size_of_call_site_table (void);
330 static int sjlj_size_of_call_site_table (void);
331 #endif
332 static void dw2_output_call_site_table (void);
333 static void sjlj_output_call_site_table (void);
336 /* Routine to see if exception handling is turned on.
337 DO_WARN is nonzero if we want to inform the user that exception
338 handling is turned off.
340 This is used to ensure that -fexceptions has been specified if the
341 compiler tries to use any exception-specific functions. */
344 doing_eh (int do_warn)
346 if (! flag_exceptions)
348 static int warned = 0;
349 if (! warned && do_warn)
351 error ("exception handling disabled, use -fexceptions to enable");
352 warned = 1;
354 return 0;
356 return 1;
360 void
361 init_eh (void)
363 if (! flag_exceptions)
364 return;
366 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
368 /* Create the SjLj_Function_Context structure. This should match
369 the definition in unwind-sjlj.c. */
370 if (USING_SJLJ_EXCEPTIONS)
372 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
374 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
376 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
377 build_pointer_type (sjlj_fc_type_node));
378 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
380 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
381 integer_type_node);
382 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
384 tmp = build_index_type (build_int_2 (4 - 1, 0));
385 tmp = build_array_type (lang_hooks.types.type_for_mode (word_mode, 1),
386 tmp);
387 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
388 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
390 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
391 ptr_type_node);
392 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
394 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
395 ptr_type_node);
396 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
398 #ifdef DONT_USE_BUILTIN_SETJMP
399 #ifdef JMP_BUF_SIZE
400 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
401 #else
402 /* Should be large enough for most systems, if it is not,
403 JMP_BUF_SIZE should be defined with the proper value. It will
404 also tend to be larger than necessary for most systems, a more
405 optimal port will define JMP_BUF_SIZE. */
406 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
407 #endif
408 #else
409 /* builtin_setjmp takes a pointer to 5 words. */
410 tmp = build_int_2 (5 * BITS_PER_WORD / POINTER_SIZE - 1, 0);
411 #endif
412 tmp = build_index_type (tmp);
413 tmp = build_array_type (ptr_type_node, tmp);
414 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
415 #ifdef DONT_USE_BUILTIN_SETJMP
416 /* We don't know what the alignment requirements of the
417 runtime's jmp_buf has. Overestimate. */
418 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
419 DECL_USER_ALIGN (f_jbuf) = 1;
420 #endif
421 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
423 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
424 TREE_CHAIN (f_prev) = f_cs;
425 TREE_CHAIN (f_cs) = f_data;
426 TREE_CHAIN (f_data) = f_per;
427 TREE_CHAIN (f_per) = f_lsda;
428 TREE_CHAIN (f_lsda) = f_jbuf;
430 layout_type (sjlj_fc_type_node);
432 /* Cache the interesting field offsets so that we have
433 easy access from rtl. */
434 sjlj_fc_call_site_ofs
435 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
436 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
437 sjlj_fc_data_ofs
438 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
439 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
440 sjlj_fc_personality_ofs
441 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
442 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
443 sjlj_fc_lsda_ofs
444 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
445 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
446 sjlj_fc_jbuf_ofs
447 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
448 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
452 void
453 init_eh_for_function (void)
455 cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
458 /* Routines to generate the exception tree somewhat directly.
459 These are used from tree-eh.c when processing exception related
460 nodes during tree optimization. */
462 static struct eh_region *
463 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
465 struct eh_region *new;
467 #ifdef ENABLE_CHECKING
468 if (! doing_eh (0))
469 abort ();
470 #endif
472 /* Insert a new blank region as a leaf in the tree. */
473 new = ggc_alloc_cleared (sizeof (*new));
474 new->type = type;
475 new->outer = outer;
476 if (outer)
478 new->next_peer = outer->inner;
479 outer->inner = new;
481 else
483 new->next_peer = cfun->eh->region_tree;
484 cfun->eh->region_tree = new;
487 new->region_number = ++cfun->eh->last_region_number;
489 return new;
492 struct eh_region *
493 gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
495 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
496 cleanup->u.cleanup.prev_try = prev_try;
497 return cleanup;
500 struct eh_region *
501 gen_eh_region_try (struct eh_region *outer)
503 return gen_eh_region (ERT_TRY, outer);
506 struct eh_region *
507 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
509 struct eh_region *c, *l;
510 tree type_list, type_node;
512 /* Ensure to always end up with a type list to normalize further
513 processing, then register each type against the runtime types map. */
514 type_list = type_or_list;
515 if (type_or_list)
517 if (TREE_CODE (type_or_list) != TREE_LIST)
518 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
520 type_node = type_list;
521 for (; type_node; type_node = TREE_CHAIN (type_node))
522 add_type_for_runtime (TREE_VALUE (type_node));
525 c = gen_eh_region (ERT_CATCH, t->outer);
526 c->u.catch.type_list = type_list;
527 l = t->u.try.last_catch;
528 c->u.catch.prev_catch = l;
529 if (l)
530 l->u.catch.next_catch = c;
531 else
532 t->u.try.catch = c;
533 t->u.try.last_catch = c;
535 return c;
538 struct eh_region *
539 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
541 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
542 region->u.allowed.type_list = allowed;
544 for (; allowed ; allowed = TREE_CHAIN (allowed))
545 add_type_for_runtime (TREE_VALUE (allowed));
547 return region;
550 struct eh_region *
551 gen_eh_region_must_not_throw (struct eh_region *outer)
553 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
557 get_eh_region_number (struct eh_region *region)
559 return region->region_number;
562 bool
563 get_eh_region_may_contain_throw (struct eh_region *region)
565 return region->may_contain_throw;
568 tree
569 get_eh_region_tree_label (struct eh_region *region)
571 return region->tree_label;
574 void
575 set_eh_region_tree_label (struct eh_region *region, tree lab)
577 region->tree_label = lab;
580 /* Start an exception handling region. All instructions emitted
581 after this point are considered to be part of the region until
582 expand_eh_region_end is invoked. */
584 void
585 expand_eh_region_start (void)
587 struct eh_region *new;
588 rtx note;
590 if (! doing_eh (0))
591 return;
593 new = gen_eh_region (ERT_UNKNOWN, cfun->eh->cur_region);
594 cfun->eh->cur_region = new;
596 /* Create a note marking the start of this region. */
597 note = emit_note (NOTE_INSN_EH_REGION_BEG);
598 NOTE_EH_HANDLER (note) = new->region_number;
601 /* Common code to end a region. Returns the region just ended. */
603 static struct eh_region *
604 expand_eh_region_end (void)
606 struct eh_region *cur_region = cfun->eh->cur_region;
607 rtx note;
609 /* Create a note marking the end of this region. */
610 note = emit_note (NOTE_INSN_EH_REGION_END);
611 NOTE_EH_HANDLER (note) = cur_region->region_number;
613 /* Pop. */
614 cfun->eh->cur_region = cur_region->outer;
616 return cur_region;
619 /* Expand HANDLER, which is the operand 1 of a TRY_CATCH_EXPR. Catch
620 blocks and C++ exception-specifications are handled specially. */
622 void
623 expand_eh_handler (tree handler)
625 tree inner = expr_first (handler);
627 switch (TREE_CODE (inner))
629 case CATCH_EXPR:
630 expand_start_all_catch ();
631 expand_expr (handler, const0_rtx, VOIDmode, 0);
632 expand_end_all_catch ();
633 break;
635 case EH_FILTER_EXPR:
636 if (EH_FILTER_MUST_NOT_THROW (handler))
637 expand_eh_region_end_must_not_throw (EH_FILTER_FAILURE (handler));
638 else
639 expand_eh_region_end_allowed (EH_FILTER_TYPES (handler),
640 EH_FILTER_FAILURE (handler));
641 break;
643 default:
644 expand_eh_region_end_cleanup (handler);
645 break;
649 /* End an exception handling region for a cleanup. HANDLER is an
650 expression to expand for the cleanup. */
652 void
653 expand_eh_region_end_cleanup (tree handler)
655 struct eh_region *region;
656 tree protect_cleanup_actions;
657 rtx around_label;
658 rtx data_save[2];
660 if (! doing_eh (0))
661 return;
663 region = expand_eh_region_end ();
664 region->type = ERT_CLEANUP;
665 region->label = gen_label_rtx ();
666 region->u.cleanup.exp = handler;
667 region->u.cleanup.prev_try = cfun->eh->try_region;
669 around_label = gen_label_rtx ();
670 emit_jump (around_label);
672 emit_label (region->label);
674 if (flag_non_call_exceptions || region->may_contain_throw)
676 /* Give the language a chance to specify an action to be taken if an
677 exception is thrown that would propagate out of the HANDLER. */
678 protect_cleanup_actions
679 = (lang_protect_cleanup_actions
680 ? (*lang_protect_cleanup_actions) ()
681 : NULL_TREE);
683 if (protect_cleanup_actions)
684 expand_eh_region_start ();
686 /* In case this cleanup involves an inline destructor with a try block in
687 it, we need to save the EH return data registers around it. */
688 data_save[0] = gen_reg_rtx (ptr_mode);
689 emit_move_insn (data_save[0], get_exception_pointer (cfun));
690 data_save[1] = gen_reg_rtx (word_mode);
691 emit_move_insn (data_save[1], get_exception_filter (cfun));
693 expand_expr (handler, const0_rtx, VOIDmode, 0);
695 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
696 emit_move_insn (cfun->eh->filter, data_save[1]);
698 if (protect_cleanup_actions)
699 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
701 /* We need any stack adjustment complete before the around_label. */
702 do_pending_stack_adjust ();
705 /* We delay the generation of the _Unwind_Resume until we generate
706 landing pads. We emit a marker here so as to get good control
707 flow data in the meantime. */
708 region->resume
709 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
710 emit_barrier ();
712 emit_label (around_label);
715 void
716 expand_resx_expr (tree exp)
718 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
719 struct eh_region *reg = cfun->eh->region_array[region_nr];
721 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
722 emit_barrier ();
725 /* End an exception handling region for a try block, and prepares
726 for subsequent calls to expand_start_catch. */
728 void
729 expand_start_all_catch (void)
731 struct eh_region *region;
733 if (! doing_eh (1))
734 return;
736 region = expand_eh_region_end ();
737 region->type = ERT_TRY;
738 region->u.try.prev_try = cfun->eh->try_region;
739 region->u.try.continue_label = gen_label_rtx ();
741 cfun->eh->try_region = region;
743 emit_jump (region->u.try.continue_label);
746 /* Begin a catch clause. TYPE is the type caught, a list of such
747 types, (in the case of Java) an ADDR_EXPR which points to the
748 runtime type to match, or null if this is a catch-all
749 clause. Providing a type list enables to associate the catch region
750 with potentially several exception types, which is useful e.g. for
751 Ada. */
753 void
754 expand_start_catch (tree type_or_list)
756 struct eh_region *c;
757 rtx note;
759 if (! doing_eh (0))
760 return;
762 c = gen_eh_region_catch (cfun->eh->try_region, type_or_list);
763 cfun->eh->cur_region = c;
765 c->label = gen_label_rtx ();
766 emit_label (c->label);
768 note = emit_note (NOTE_INSN_EH_REGION_BEG);
769 NOTE_EH_HANDLER (note) = c->region_number;
772 /* End a catch clause. Control will resume after the try/catch block. */
774 void
775 expand_end_catch (void)
777 if (! doing_eh (0))
778 return;
780 expand_eh_region_end ();
781 emit_jump (cfun->eh->try_region->u.try.continue_label);
784 /* End a sequence of catch handlers for a try block. */
786 void
787 expand_end_all_catch (void)
789 struct eh_region *try_region;
791 if (! doing_eh (0))
792 return;
794 try_region = cfun->eh->try_region;
795 cfun->eh->try_region = try_region->u.try.prev_try;
797 emit_label (try_region->u.try.continue_label);
800 /* End an exception region for an exception type filter. ALLOWED is a
801 TREE_LIST of types to be matched by the runtime. FAILURE is an
802 expression to invoke if a mismatch occurs.
804 ??? We could use these semantics for calls to rethrow, too; if we can
805 see the surrounding catch clause, we know that the exception we're
806 rethrowing satisfies the "filter" of the catch type. */
808 void
809 expand_eh_region_end_allowed (tree allowed, tree failure)
811 struct eh_region *region;
812 rtx around_label;
814 if (! doing_eh (0))
815 return;
817 region = expand_eh_region_end ();
818 region->type = ERT_ALLOWED_EXCEPTIONS;
819 region->u.allowed.type_list = allowed;
820 region->label = gen_label_rtx ();
822 for (; allowed ; allowed = TREE_CHAIN (allowed))
823 add_type_for_runtime (TREE_VALUE (allowed));
825 /* We must emit the call to FAILURE here, so that if this function
826 throws a different exception, that it will be processed by the
827 correct region. */
829 around_label = gen_label_rtx ();
830 emit_jump (around_label);
832 emit_label (region->label);
833 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
834 /* We must adjust the stack before we reach the AROUND_LABEL because
835 the call to FAILURE does not occur on all paths to the
836 AROUND_LABEL. */
837 do_pending_stack_adjust ();
839 emit_label (around_label);
842 /* End an exception region for a must-not-throw filter. FAILURE is an
843 expression invoke if an uncaught exception propagates this far.
845 This is conceptually identical to expand_eh_region_end_allowed with
846 an empty allowed list (if you passed "std::terminate" instead of
847 "__cxa_call_unexpected"), but they are represented differently in
848 the C++ LSDA. */
850 void
851 expand_eh_region_end_must_not_throw (tree failure)
853 struct eh_region *region;
854 rtx around_label;
856 if (! doing_eh (0))
857 return;
859 region = expand_eh_region_end ();
860 region->type = ERT_MUST_NOT_THROW;
861 region->label = gen_label_rtx ();
863 /* We must emit the call to FAILURE here, so that if this function
864 throws a different exception, that it will be processed by the
865 correct region. */
867 around_label = gen_label_rtx ();
868 emit_jump (around_label);
870 emit_label (region->label);
871 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
873 emit_label (around_label);
876 /* End an exception region for a throw. No handling goes on here,
877 but it's the easiest way for the front-end to indicate what type
878 is being thrown. */
880 void
881 expand_eh_region_end_throw (tree type)
883 struct eh_region *region;
885 if (! doing_eh (0))
886 return;
888 region = expand_eh_region_end ();
889 region->type = ERT_THROW;
890 region->u.throw.type = type;
893 /* End a fixup region. Within this region the cleanups for the immediately
894 enclosing region are _not_ run. This is used for goto cleanup to avoid
895 destroying an object twice.
897 This would be an extraordinarily simple prospect, were it not for the
898 fact that we don't actually know what the immediately enclosing region
899 is. This surprising fact is because expand_cleanups is currently
900 generating a sequence that it will insert somewhere else. We collect
901 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
903 void
904 expand_eh_region_end_fixup (tree handler)
906 struct eh_region *fixup;
908 if (! doing_eh (0))
909 return;
911 fixup = expand_eh_region_end ();
912 fixup->type = ERT_FIXUP;
913 fixup->u.fixup.cleanup_exp = handler;
916 /* Note that the current EH region (if any) may contain a throw, or a
917 call to a function which itself may contain a throw. */
919 void
920 note_eh_region_may_contain_throw (struct eh_region *region)
922 while (region && !region->may_contain_throw)
924 region->may_contain_throw = 1;
925 region = region->outer;
929 void
930 note_current_region_may_contain_throw (void)
932 note_eh_region_may_contain_throw (cfun->eh->cur_region);
936 /* Return an rtl expression for a pointer to the exception object
937 within a handler. */
940 get_exception_pointer (struct function *fun)
942 rtx exc_ptr = fun->eh->exc_ptr;
943 if (fun == cfun && ! exc_ptr)
945 exc_ptr = gen_reg_rtx (ptr_mode);
946 fun->eh->exc_ptr = exc_ptr;
948 return exc_ptr;
951 /* Return an rtl expression for the exception dispatch filter
952 within a handler. */
955 get_exception_filter (struct function *fun)
957 rtx filter = fun->eh->filter;
958 if (fun == cfun && ! filter)
960 filter = gen_reg_rtx (word_mode);
961 fun->eh->filter = filter;
963 return filter;
966 /* This section is for the exception handling specific optimization pass. */
968 /* Random access the exception region tree. It's just as simple to
969 collect the regions this way as in expand_eh_region_start, but
970 without having to realloc memory. */
972 void
973 collect_eh_region_array (void)
975 struct eh_region **array, *i;
977 i = cfun->eh->region_tree;
978 if (! i)
979 return;
981 array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
982 * sizeof (*array));
983 cfun->eh->region_array = array;
985 while (1)
987 array[i->region_number] = i;
989 /* If there are sub-regions, process them. */
990 if (i->inner)
991 i = i->inner;
992 /* If there are peers, process them. */
993 else if (i->next_peer)
994 i = i->next_peer;
995 /* Otherwise, step back up the tree to the next peer. */
996 else
998 do {
999 i = i->outer;
1000 if (i == NULL)
1001 return;
1002 } while (i->next_peer == NULL);
1003 i = i->next_peer;
1008 static void
1009 resolve_one_fixup_region (struct eh_region *fixup)
1011 struct eh_region *cleanup, *real;
1012 int j, n;
1014 n = cfun->eh->last_region_number;
1015 cleanup = 0;
1017 for (j = 1; j <= n; ++j)
1019 cleanup = cfun->eh->region_array[j];
1020 if (cleanup && cleanup->type == ERT_CLEANUP
1021 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1022 break;
1024 if (j > n)
1025 abort ();
1027 real = cleanup->outer;
1028 if (real && real->type == ERT_FIXUP)
1030 if (!real->u.fixup.resolved)
1031 resolve_one_fixup_region (real);
1032 real = real->u.fixup.real_region;
1035 fixup->u.fixup.real_region = real;
1036 fixup->u.fixup.resolved = true;
1039 static void
1040 resolve_fixup_regions (void)
1042 int i, n = cfun->eh->last_region_number;
1044 for (i = 1; i <= n; ++i)
1046 struct eh_region *fixup = cfun->eh->region_array[i];
1048 if (!fixup || fixup->type != ERT_FIXUP || fixup->u.fixup.resolved)
1049 continue;
1051 resolve_one_fixup_region (fixup);
1055 /* Now that we've discovered what region actually encloses a fixup,
1056 we can shuffle pointers and remove them from the tree. */
1058 static void
1059 remove_fixup_regions (void)
1061 int i;
1062 rtx insn, note;
1063 struct eh_region *fixup;
1065 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1066 for instructions referencing fixup regions. This is only
1067 strictly necessary for fixup regions with no parent, but
1068 doesn't hurt to do it for all regions. */
1069 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
1070 if (INSN_P (insn)
1071 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
1072 && INTVAL (XEXP (note, 0)) > 0
1073 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
1074 && fixup->type == ERT_FIXUP)
1076 if (fixup->u.fixup.real_region)
1077 XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
1078 else
1079 remove_note (insn, note);
1082 /* Remove the fixup regions from the tree. */
1083 for (i = cfun->eh->last_region_number; i > 0; --i)
1085 fixup = cfun->eh->region_array[i];
1086 if (! fixup)
1087 continue;
1089 /* Allow GC to maybe free some memory. */
1090 if (fixup->type == ERT_CLEANUP)
1091 fixup->u.cleanup.exp = NULL_TREE;
1093 if (fixup->type != ERT_FIXUP)
1094 continue;
1096 if (fixup->inner)
1098 struct eh_region *parent, *p, **pp;
1100 parent = fixup->u.fixup.real_region;
1102 /* Fix up the children's parent pointers; find the end of
1103 the list. */
1104 for (p = fixup->inner; ; p = p->next_peer)
1106 p->outer = parent;
1107 if (! p->next_peer)
1108 break;
1111 /* In the tree of cleanups, only outer-inner ordering matters.
1112 So link the children back in anywhere at the correct level. */
1113 if (parent)
1114 pp = &parent->inner;
1115 else
1116 pp = &cfun->eh->region_tree;
1117 p->next_peer = *pp;
1118 *pp = fixup->inner;
1119 fixup->inner = NULL;
1122 remove_eh_handler (fixup);
1126 /* Remove all regions whose labels are not reachable from insns. */
1128 static void
1129 remove_unreachable_regions (rtx insns)
1131 int i, *uid_region_num;
1132 bool *reachable;
1133 struct eh_region *r;
1134 rtx insn;
1136 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
1137 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
1139 for (i = cfun->eh->last_region_number; i > 0; --i)
1141 r = cfun->eh->region_array[i];
1142 if (!r || r->region_number != i)
1143 continue;
1145 if (r->resume)
1147 if (uid_region_num[INSN_UID (r->resume)])
1148 abort ();
1149 uid_region_num[INSN_UID (r->resume)] = i;
1151 if (r->label)
1153 if (uid_region_num[INSN_UID (r->label)])
1154 abort ();
1155 uid_region_num[INSN_UID (r->label)] = i;
1159 for (insn = insns; insn; insn = NEXT_INSN (insn))
1161 reachable[uid_region_num[INSN_UID (insn)]] = true;
1163 if (GET_CODE (insn) == CALL_INSN
1164 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1165 for (i = 0; i < 3; i++)
1167 rtx sub = XEXP (PATTERN (insn), i);
1168 for (; sub ; sub = NEXT_INSN (sub))
1169 reachable[uid_region_num[INSN_UID (sub)]] = true;
1173 for (i = cfun->eh->last_region_number; i > 0; --i)
1175 r = cfun->eh->region_array[i];
1176 if (r && r->region_number == i && !reachable[i])
1178 bool kill_it = true;
1179 switch (r->type)
1181 case ERT_THROW:
1182 /* Don't remove ERT_THROW regions if their outer region
1183 is reachable. */
1184 if (r->outer && reachable[r->outer->region_number])
1185 kill_it = false;
1186 break;
1188 case ERT_MUST_NOT_THROW:
1189 /* MUST_NOT_THROW regions are implementable solely in the
1190 runtime, but their existance continues to affect calls
1191 within that region. Never delete them here. */
1192 kill_it = false;
1193 break;
1195 case ERT_TRY:
1197 /* TRY regions are reachable if any of its CATCH regions
1198 are reachable. */
1199 struct eh_region *c;
1200 for (c = r->u.try.catch; c ; c = c->u.catch.next_catch)
1201 if (reachable[c->region_number])
1203 kill_it = false;
1204 break;
1206 break;
1209 default:
1210 break;
1213 if (kill_it)
1214 remove_eh_handler (r);
1218 free (reachable);
1219 free (uid_region_num);
1222 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1223 can_throw instruction in the region. */
1225 static void
1226 convert_from_eh_region_ranges_1 (rtx *pinsns, int *orig_sp, int cur)
1228 int *sp = orig_sp;
1229 rtx insn, next;
1231 for (insn = *pinsns; insn ; insn = next)
1233 next = NEXT_INSN (insn);
1234 if (GET_CODE (insn) == NOTE)
1236 int kind = NOTE_LINE_NUMBER (insn);
1237 if (kind == NOTE_INSN_EH_REGION_BEG
1238 || kind == NOTE_INSN_EH_REGION_END)
1240 if (kind == NOTE_INSN_EH_REGION_BEG)
1242 struct eh_region *r;
1244 *sp++ = cur;
1245 cur = NOTE_EH_HANDLER (insn);
1247 r = cfun->eh->region_array[cur];
1248 if (r->type == ERT_FIXUP)
1250 r = r->u.fixup.real_region;
1251 cur = r ? r->region_number : 0;
1253 else if (r->type == ERT_CATCH)
1255 r = r->outer;
1256 cur = r ? r->region_number : 0;
1259 else
1260 cur = *--sp;
1262 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1263 requires extra care to adjust sequence start. */
1264 if (insn == *pinsns)
1265 *pinsns = next;
1266 remove_insn (insn);
1267 continue;
1270 else if (INSN_P (insn))
1272 if (cur > 0
1273 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1274 /* Calls can always potentially throw exceptions, unless
1275 they have a REG_EH_REGION note with a value of 0 or less.
1276 Which should be the only possible kind so far. */
1277 && (GET_CODE (insn) == CALL_INSN
1278 /* If we wanted exceptions for non-call insns, then
1279 any may_trap_p instruction could throw. */
1280 || (flag_non_call_exceptions
1281 && GET_CODE (PATTERN (insn)) != CLOBBER
1282 && GET_CODE (PATTERN (insn)) != USE
1283 && may_trap_p (PATTERN (insn)))))
1285 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1286 REG_NOTES (insn));
1289 if (GET_CODE (insn) == CALL_INSN
1290 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1292 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1293 sp, cur);
1294 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1295 sp, cur);
1296 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1297 sp, cur);
1302 if (sp != orig_sp)
1303 abort ();
1306 static void
1307 collect_rtl_labels_from_trees (void)
1309 int i, n = cfun->eh->last_region_number;
1310 for (i = 1; i <= n; ++i)
1312 struct eh_region *reg = cfun->eh->region_array[i];
1313 if (reg && reg->tree_label)
1314 reg->label = DECL_RTL_IF_SET (reg->tree_label);
1318 void
1319 convert_from_eh_region_ranges (void)
1321 rtx insns = get_insns ();
1323 if (cfun->eh->region_array)
1325 /* If the region array already exists, assume we're coming from
1326 optimize_function_tree. In this case all we need to do is
1327 collect the rtl labels that correspond to the tree labels
1328 that we allocated earlier. */
1329 collect_rtl_labels_from_trees ();
1331 else
1333 int *stack;
1335 collect_eh_region_array ();
1336 resolve_fixup_regions ();
1338 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1339 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1340 free (stack);
1342 remove_fixup_regions ();
1345 remove_unreachable_regions (insns);
1348 static void
1349 add_ehl_entry (rtx label, struct eh_region *region)
1351 struct ehl_map_entry **slot, *entry;
1353 LABEL_PRESERVE_P (label) = 1;
1355 entry = ggc_alloc (sizeof (*entry));
1356 entry->label = label;
1357 entry->region = region;
1359 slot = (struct ehl_map_entry **)
1360 htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
1362 /* Before landing pad creation, each exception handler has its own
1363 label. After landing pad creation, the exception handlers may
1364 share landing pads. This is ok, since maybe_remove_eh_handler
1365 only requires the 1-1 mapping before landing pad creation. */
1366 if (*slot && !cfun->eh->built_landing_pads)
1367 abort ();
1369 *slot = entry;
1372 void
1373 find_exception_handler_labels (void)
1375 int i;
1377 if (cfun->eh->exception_handler_label_map)
1378 htab_empty (cfun->eh->exception_handler_label_map);
1379 else
1381 /* ??? The expansion factor here (3/2) must be greater than the htab
1382 occupancy factor (4/3) to avoid unnecessary resizing. */
1383 cfun->eh->exception_handler_label_map
1384 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
1385 ehl_hash, ehl_eq, NULL);
1388 if (cfun->eh->region_tree == NULL)
1389 return;
1391 for (i = cfun->eh->last_region_number; i > 0; --i)
1393 struct eh_region *region = cfun->eh->region_array[i];
1394 rtx lab;
1396 if (! region || region->region_number != i)
1397 continue;
1398 if (cfun->eh->built_landing_pads)
1399 lab = region->landing_pad;
1400 else
1401 lab = region->label;
1403 if (lab)
1404 add_ehl_entry (lab, region);
1407 /* For sjlj exceptions, need the return label to remain live until
1408 after landing pad generation. */
1409 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1410 add_ehl_entry (return_label, NULL);
1413 bool
1414 current_function_has_exception_handlers (void)
1416 int i;
1418 for (i = cfun->eh->last_region_number; i > 0; --i)
1420 struct eh_region *region = cfun->eh->region_array[i];
1422 if (! region || region->region_number != i)
1423 continue;
1424 if (region->type != ERT_THROW)
1425 return true;
1428 return false;
1431 static struct eh_region *
1432 duplicate_eh_region_1 (struct eh_region *o, struct inline_remap *map)
1434 struct eh_region *n = ggc_alloc_cleared (sizeof (struct eh_region));
1436 n->region_number = o->region_number + cfun->eh->last_region_number;
1437 n->type = o->type;
1439 switch (n->type)
1441 case ERT_CLEANUP:
1442 case ERT_MUST_NOT_THROW:
1443 break;
1445 case ERT_TRY:
1446 if (o->u.try.continue_label)
1447 n->u.try.continue_label
1448 = get_label_from_map (map,
1449 CODE_LABEL_NUMBER (o->u.try.continue_label));
1450 break;
1452 case ERT_CATCH:
1453 n->u.catch.type_list = o->u.catch.type_list;
1454 break;
1456 case ERT_ALLOWED_EXCEPTIONS:
1457 n->u.allowed.type_list = o->u.allowed.type_list;
1458 break;
1460 case ERT_THROW:
1461 n->u.throw.type = o->u.throw.type;
1463 default:
1464 abort ();
1467 if (o->label)
1468 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1469 if (o->resume)
1471 n->resume = map->insn_map[INSN_UID (o->resume)];
1472 if (n->resume == NULL)
1473 abort ();
1476 return n;
1479 static void
1480 duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array)
1482 struct eh_region *n = n_array[o->region_number];
1484 switch (n->type)
1486 case ERT_TRY:
1487 n->u.try.catch = n_array[o->u.try.catch->region_number];
1488 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1489 break;
1491 case ERT_CATCH:
1492 if (o->u.catch.next_catch)
1493 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1494 if (o->u.catch.prev_catch)
1495 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1496 break;
1498 default:
1499 break;
1502 if (o->outer)
1503 n->outer = n_array[o->outer->region_number];
1504 if (o->inner)
1505 n->inner = n_array[o->inner->region_number];
1506 if (o->next_peer)
1507 n->next_peer = n_array[o->next_peer->region_number];
1511 duplicate_eh_regions (struct function *ifun, struct inline_remap *map)
1513 int ifun_last_region_number = ifun->eh->last_region_number;
1514 struct eh_region **n_array, *root, *cur;
1515 int i;
1517 if (ifun_last_region_number == 0)
1518 return 0;
1520 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1522 for (i = 1; i <= ifun_last_region_number; ++i)
1524 cur = ifun->eh->region_array[i];
1525 if (!cur || cur->region_number != i)
1526 continue;
1527 n_array[i] = duplicate_eh_region_1 (cur, map);
1529 for (i = 1; i <= ifun_last_region_number; ++i)
1531 cur = ifun->eh->region_array[i];
1532 if (!cur || cur->region_number != i)
1533 continue;
1534 duplicate_eh_region_2 (cur, n_array);
1537 root = n_array[ifun->eh->region_tree->region_number];
1538 cur = cfun->eh->cur_region;
1539 if (cur)
1541 struct eh_region *p = cur->inner;
1542 if (p)
1544 while (p->next_peer)
1545 p = p->next_peer;
1546 p->next_peer = root;
1548 else
1549 cur->inner = root;
1551 for (i = 1; i <= ifun_last_region_number; ++i)
1552 if (n_array[i] && n_array[i]->outer == NULL)
1553 n_array[i]->outer = cur;
1555 else
1557 struct eh_region *p = cfun->eh->region_tree;
1558 if (p)
1560 while (p->next_peer)
1561 p = p->next_peer;
1562 p->next_peer = root;
1564 else
1565 cfun->eh->region_tree = root;
1568 free (n_array);
1570 i = cfun->eh->last_region_number;
1571 cfun->eh->last_region_number = i + ifun_last_region_number;
1572 return i;
1576 static int
1577 t2r_eq (const void *pentry, const void *pdata)
1579 tree entry = (tree) pentry;
1580 tree data = (tree) pdata;
1582 return TREE_PURPOSE (entry) == data;
1585 static hashval_t
1586 t2r_hash (const void *pentry)
1588 tree entry = (tree) pentry;
1589 return TREE_HASH (TREE_PURPOSE (entry));
1592 static void
1593 add_type_for_runtime (tree type)
1595 tree *slot;
1597 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1598 TREE_HASH (type), INSERT);
1599 if (*slot == NULL)
1601 tree runtime = (*lang_eh_runtime_type) (type);
1602 *slot = tree_cons (type, runtime, NULL_TREE);
1606 static tree
1607 lookup_type_for_runtime (tree type)
1609 tree *slot;
1611 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1612 TREE_HASH (type), NO_INSERT);
1614 /* We should have always inserted the data earlier. */
1615 return TREE_VALUE (*slot);
1619 /* Represent an entry in @TTypes for either catch actions
1620 or exception filter actions. */
1621 struct ttypes_filter GTY(())
1623 tree t;
1624 int filter;
1627 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1628 (a tree) for a @TTypes type node we are thinking about adding. */
1630 static int
1631 ttypes_filter_eq (const void *pentry, const void *pdata)
1633 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1634 tree data = (tree) pdata;
1636 return entry->t == data;
1639 static hashval_t
1640 ttypes_filter_hash (const void *pentry)
1642 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1643 return TREE_HASH (entry->t);
1646 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1647 exception specification list we are thinking about adding. */
1648 /* ??? Currently we use the type lists in the order given. Someone
1649 should put these in some canonical order. */
1651 static int
1652 ehspec_filter_eq (const void *pentry, const void *pdata)
1654 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1655 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1657 return type_list_equal (entry->t, data->t);
1660 /* Hash function for exception specification lists. */
1662 static hashval_t
1663 ehspec_filter_hash (const void *pentry)
1665 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1666 hashval_t h = 0;
1667 tree list;
1669 for (list = entry->t; list ; list = TREE_CHAIN (list))
1670 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1671 return h;
1674 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
1675 to speed up the search. Return the filter value to be used. */
1677 static int
1678 add_ttypes_entry (htab_t ttypes_hash, tree type)
1680 struct ttypes_filter **slot, *n;
1682 slot = (struct ttypes_filter **)
1683 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1685 if ((n = *slot) == NULL)
1687 /* Filter value is a 1 based table index. */
1689 n = xmalloc (sizeof (*n));
1690 n->t = type;
1691 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1692 *slot = n;
1694 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1697 return n->filter;
1700 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1701 to speed up the search. Return the filter value to be used. */
1703 static int
1704 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1706 struct ttypes_filter **slot, *n;
1707 struct ttypes_filter dummy;
1709 dummy.t = list;
1710 slot = (struct ttypes_filter **)
1711 htab_find_slot (ehspec_hash, &dummy, INSERT);
1713 if ((n = *slot) == NULL)
1715 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1717 n = xmalloc (sizeof (*n));
1718 n->t = list;
1719 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1720 *slot = n;
1722 /* Look up each type in the list and encode its filter
1723 value as a uleb128. Terminate the list with 0. */
1724 for (; list ; list = TREE_CHAIN (list))
1725 push_uleb128 (&cfun->eh->ehspec_data,
1726 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1727 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1730 return n->filter;
1733 /* Generate the action filter values to be used for CATCH and
1734 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1735 we use lots of landing pads, and so every type or list can share
1736 the same filter value, which saves table space. */
1738 static void
1739 assign_filter_values (void)
1741 int i;
1742 htab_t ttypes, ehspec;
1744 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1745 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1747 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1748 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1750 for (i = cfun->eh->last_region_number; i > 0; --i)
1752 struct eh_region *r = cfun->eh->region_array[i];
1754 /* Mind we don't process a region more than once. */
1755 if (!r || r->region_number != i)
1756 continue;
1758 switch (r->type)
1760 case ERT_CATCH:
1761 /* Whatever type_list is (NULL or true list), we build a list
1762 of filters for the region. */
1763 r->u.catch.filter_list = NULL_TREE;
1765 if (r->u.catch.type_list != NULL)
1767 /* Get a filter value for each of the types caught and store
1768 them in the region's dedicated list. */
1769 tree tp_node = r->u.catch.type_list;
1771 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1773 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1774 tree flt_node = build_int_2 (flt, 0);
1776 r->u.catch.filter_list
1777 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1780 else
1782 /* Get a filter value for the NULL list also since it will need
1783 an action record anyway. */
1784 int flt = add_ttypes_entry (ttypes, NULL);
1785 tree flt_node = build_int_2 (flt, 0);
1787 r->u.catch.filter_list
1788 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1791 break;
1793 case ERT_ALLOWED_EXCEPTIONS:
1794 r->u.allowed.filter
1795 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1796 break;
1798 default:
1799 break;
1803 htab_delete (ttypes);
1804 htab_delete (ehspec);
1807 /* Emit SEQ into basic block just before INSN (that is assumed to be
1808 first instruction of some existing BB and return the newly
1809 produced block. */
1810 static basic_block
1811 emit_to_new_bb_before (rtx seq, rtx insn)
1813 rtx last;
1814 basic_block bb;
1815 edge e;
1817 /* If there happens to be an fallthru edge (possibly created by cleanup_cfg
1818 call), we don't want it to go into newly created landing pad or other EH
1819 construct. */
1820 for (e = BLOCK_FOR_INSN (insn)->pred; e; e = e->pred_next)
1821 if (e->flags & EDGE_FALLTHRU)
1822 force_nonfallthru (e);
1823 last = emit_insn_before (seq, insn);
1824 if (GET_CODE (last) == BARRIER)
1825 last = PREV_INSN (last);
1826 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1827 update_bb_for_insn (bb);
1828 bb->flags |= BB_SUPERBLOCK;
1829 return bb;
1832 /* Generate the code to actually handle exceptions, which will follow the
1833 landing pads. */
1835 static void
1836 build_post_landing_pads (void)
1838 int i;
1840 for (i = cfun->eh->last_region_number; i > 0; --i)
1842 struct eh_region *region = cfun->eh->region_array[i];
1843 rtx seq;
1845 /* Mind we don't process a region more than once. */
1846 if (!region || region->region_number != i)
1847 continue;
1849 switch (region->type)
1851 case ERT_TRY:
1852 /* ??? Collect the set of all non-overlapping catch handlers
1853 all the way up the chain until blocked by a cleanup. */
1854 /* ??? Outer try regions can share landing pads with inner
1855 try regions if the types are completely non-overlapping,
1856 and there are no intervening cleanups. */
1858 region->post_landing_pad = gen_label_rtx ();
1860 start_sequence ();
1862 emit_label (region->post_landing_pad);
1864 /* ??? It is mighty inconvenient to call back into the
1865 switch statement generation code in expand_end_case.
1866 Rapid prototyping sez a sequence of ifs. */
1868 struct eh_region *c;
1869 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1871 if (c->u.catch.type_list == NULL)
1872 emit_jump (c->label);
1873 else
1875 /* Need for one cmp/jump per type caught. Each type
1876 list entry has a matching entry in the filter list
1877 (see assign_filter_values). */
1878 tree tp_node = c->u.catch.type_list;
1879 tree flt_node = c->u.catch.filter_list;
1881 for (; tp_node; )
1883 emit_cmp_and_jump_insns
1884 (cfun->eh->filter,
1885 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1886 EQ, NULL_RTX, word_mode, 0, c->label);
1888 tp_node = TREE_CHAIN (tp_node);
1889 flt_node = TREE_CHAIN (flt_node);
1895 /* We delay the generation of the _Unwind_Resume until we generate
1896 landing pads. We emit a marker here so as to get good control
1897 flow data in the meantime. */
1898 region->resume
1899 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1900 emit_barrier ();
1902 seq = get_insns ();
1903 end_sequence ();
1905 emit_to_new_bb_before (seq, region->u.try.catch->label);
1907 break;
1909 case ERT_ALLOWED_EXCEPTIONS:
1910 region->post_landing_pad = gen_label_rtx ();
1912 start_sequence ();
1914 emit_label (region->post_landing_pad);
1916 emit_cmp_and_jump_insns (cfun->eh->filter,
1917 GEN_INT (region->u.allowed.filter),
1918 EQ, NULL_RTX, word_mode, 0, region->label);
1920 /* We delay the generation of the _Unwind_Resume until we generate
1921 landing pads. We emit a marker here so as to get good control
1922 flow data in the meantime. */
1923 region->resume
1924 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1925 emit_barrier ();
1927 seq = get_insns ();
1928 end_sequence ();
1930 emit_to_new_bb_before (seq, region->label);
1931 break;
1933 case ERT_CLEANUP:
1934 case ERT_MUST_NOT_THROW:
1935 region->post_landing_pad = region->label;
1936 break;
1938 case ERT_CATCH:
1939 case ERT_THROW:
1940 /* Nothing to do. */
1941 break;
1943 default:
1944 abort ();
1949 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1950 _Unwind_Resume otherwise. */
1952 static void
1953 connect_post_landing_pads (void)
1955 int i;
1957 for (i = cfun->eh->last_region_number; i > 0; --i)
1959 struct eh_region *region = cfun->eh->region_array[i];
1960 struct eh_region *outer;
1961 rtx seq;
1962 rtx barrier;
1964 /* Mind we don't process a region more than once. */
1965 if (!region || region->region_number != i)
1966 continue;
1968 /* If there is no RESX, or it has been deleted by flow, there's
1969 nothing to fix up. */
1970 if (! region->resume || INSN_DELETED_P (region->resume))
1971 continue;
1973 /* Search for another landing pad in this function. */
1974 for (outer = region->outer; outer ; outer = outer->outer)
1975 if (outer->post_landing_pad)
1976 break;
1978 start_sequence ();
1980 if (outer)
1982 edge e;
1983 basic_block src, dest;
1985 emit_jump (outer->post_landing_pad);
1986 src = BLOCK_FOR_INSN (region->resume);
1987 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1988 while (src->succ)
1989 remove_edge (src->succ);
1990 e = make_edge (src, dest, 0);
1991 e->probability = REG_BR_PROB_BASE;
1992 e->count = src->count;
1994 else
1996 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1997 VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1999 /* What we just emitted was a throwing libcall, so it got a
2000 barrier automatically added after it. If the last insn in
2001 the libcall sequence isn't the barrier, it's because the
2002 target emits multiple insns for a call, and there are insns
2003 after the actual call insn (which are redundant and would be
2004 optimized away). The barrier is inserted exactly after the
2005 call insn, so let's go get that and delete the insns after
2006 it, because below we need the barrier to be the last insn in
2007 the sequence. */
2008 delete_insns_since (NEXT_INSN (last_call_insn ()));
2011 seq = get_insns ();
2012 end_sequence ();
2013 barrier = emit_insn_before (seq, region->resume);
2014 /* Avoid duplicate barrier. */
2015 if (GET_CODE (barrier) != BARRIER)
2016 abort ();
2017 delete_insn (barrier);
2018 delete_insn (region->resume);
2020 /* ??? From tree-ssa we can wind up with catch regions whose
2021 label is not instantiated, but whose resx is present. Now
2022 that we've dealt with the resx, kill the region. */
2023 if (region->label == NULL && region->type == ERT_CLEANUP)
2024 remove_eh_handler (region);
2029 static void
2030 dw2_build_landing_pads (void)
2032 int i;
2033 unsigned int j;
2035 for (i = cfun->eh->last_region_number; i > 0; --i)
2037 struct eh_region *region = cfun->eh->region_array[i];
2038 rtx seq;
2039 basic_block bb;
2040 bool clobbers_hard_regs = false;
2041 edge e;
2043 /* Mind we don't process a region more than once. */
2044 if (!region || region->region_number != i)
2045 continue;
2047 if (region->type != ERT_CLEANUP
2048 && region->type != ERT_TRY
2049 && region->type != ERT_ALLOWED_EXCEPTIONS)
2050 continue;
2052 start_sequence ();
2054 region->landing_pad = gen_label_rtx ();
2055 emit_label (region->landing_pad);
2057 #ifdef HAVE_exception_receiver
2058 if (HAVE_exception_receiver)
2059 emit_insn (gen_exception_receiver ());
2060 else
2061 #endif
2062 #ifdef HAVE_nonlocal_goto_receiver
2063 if (HAVE_nonlocal_goto_receiver)
2064 emit_insn (gen_nonlocal_goto_receiver ());
2065 else
2066 #endif
2067 { /* Nothing */ }
2069 /* If the eh_return data registers are call-saved, then we
2070 won't have considered them clobbered from the call that
2071 threw. Kill them now. */
2072 for (j = 0; ; ++j)
2074 unsigned r = EH_RETURN_DATA_REGNO (j);
2075 if (r == INVALID_REGNUM)
2076 break;
2077 if (! call_used_regs[r])
2079 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
2080 clobbers_hard_regs = true;
2084 if (clobbers_hard_regs)
2086 /* @@@ This is a kludge. Not all machine descriptions define a
2087 blockage insn, but we must not allow the code we just generated
2088 to be reordered by scheduling. So emit an ASM_INPUT to act as
2089 blockage insn. */
2090 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
2093 emit_move_insn (cfun->eh->exc_ptr,
2094 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
2095 emit_move_insn (cfun->eh->filter,
2096 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
2098 seq = get_insns ();
2099 end_sequence ();
2101 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
2102 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2103 e->count = bb->count;
2104 e->probability = REG_BR_PROB_BASE;
2109 struct sjlj_lp_info
2111 int directly_reachable;
2112 int action_index;
2113 int dispatch_index;
2114 int call_site_index;
2117 static bool
2118 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
2120 rtx insn;
2121 bool found_one = false;
2123 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2125 struct eh_region *region;
2126 enum reachable_code rc;
2127 tree type_thrown;
2128 rtx note;
2130 if (! INSN_P (insn))
2131 continue;
2133 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2134 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2135 continue;
2137 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2139 type_thrown = NULL_TREE;
2140 if (region->type == ERT_THROW)
2142 type_thrown = region->u.throw.type;
2143 region = region->outer;
2146 /* Find the first containing region that might handle the exception.
2147 That's the landing pad to which we will transfer control. */
2148 rc = RNL_NOT_CAUGHT;
2149 for (; region; region = region->outer)
2151 rc = reachable_next_level (region, type_thrown, NULL);
2152 if (rc != RNL_NOT_CAUGHT)
2153 break;
2155 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
2157 lp_info[region->region_number].directly_reachable = 1;
2158 found_one = true;
2162 return found_one;
2165 static void
2166 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2168 htab_t ar_hash;
2169 int i, index;
2171 /* First task: build the action table. */
2173 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
2174 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2176 for (i = cfun->eh->last_region_number; i > 0; --i)
2177 if (lp_info[i].directly_reachable)
2179 struct eh_region *r = cfun->eh->region_array[i];
2180 r->landing_pad = dispatch_label;
2181 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2182 if (lp_info[i].action_index != -1)
2183 cfun->uses_eh_lsda = 1;
2186 htab_delete (ar_hash);
2188 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2189 landing pad label for the region. For sjlj though, there is one
2190 common landing pad from which we dispatch to the post-landing pads.
2192 A region receives a dispatch index if it is directly reachable
2193 and requires in-function processing. Regions that share post-landing
2194 pads may share dispatch indices. */
2195 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2196 (see build_post_landing_pads) so we don't bother checking for it. */
2198 index = 0;
2199 for (i = cfun->eh->last_region_number; i > 0; --i)
2200 if (lp_info[i].directly_reachable)
2201 lp_info[i].dispatch_index = index++;
2203 /* Finally: assign call-site values. If dwarf2 terms, this would be
2204 the region number assigned by convert_to_eh_region_ranges, but
2205 handles no-action and must-not-throw differently. */
2207 call_site_base = 1;
2208 for (i = cfun->eh->last_region_number; i > 0; --i)
2209 if (lp_info[i].directly_reachable)
2211 int action = lp_info[i].action_index;
2213 /* Map must-not-throw to otherwise unused call-site index 0. */
2214 if (action == -2)
2215 index = 0;
2216 /* Map no-action to otherwise unused call-site index -1. */
2217 else if (action == -1)
2218 index = -1;
2219 /* Otherwise, look it up in the table. */
2220 else
2221 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2223 lp_info[i].call_site_index = index;
2227 static void
2228 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
2230 int last_call_site = -2;
2231 rtx insn, mem;
2233 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2235 struct eh_region *region;
2236 int this_call_site;
2237 rtx note, before, p;
2239 /* Reset value tracking at extended basic block boundaries. */
2240 if (GET_CODE (insn) == CODE_LABEL)
2241 last_call_site = -2;
2243 if (! INSN_P (insn))
2244 continue;
2246 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2247 if (!note)
2249 /* Calls (and trapping insns) without notes are outside any
2250 exception handling region in this function. Mark them as
2251 no action. */
2252 if (GET_CODE (insn) == CALL_INSN
2253 || (flag_non_call_exceptions
2254 && may_trap_p (PATTERN (insn))))
2255 this_call_site = -1;
2256 else
2257 continue;
2259 else
2261 /* Calls that are known to not throw need not be marked. */
2262 if (INTVAL (XEXP (note, 0)) <= 0)
2263 continue;
2265 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2266 this_call_site = lp_info[region->region_number].call_site_index;
2269 if (this_call_site == last_call_site)
2270 continue;
2272 /* Don't separate a call from it's argument loads. */
2273 before = insn;
2274 if (GET_CODE (insn) == CALL_INSN)
2275 before = find_first_parameter_load (insn, NULL_RTX);
2277 start_sequence ();
2278 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2279 sjlj_fc_call_site_ofs);
2280 emit_move_insn (mem, GEN_INT (this_call_site));
2281 p = get_insns ();
2282 end_sequence ();
2284 emit_insn_before (p, before);
2285 last_call_site = this_call_site;
2289 /* Construct the SjLj_Function_Context. */
2291 static void
2292 sjlj_emit_function_enter (rtx dispatch_label)
2294 rtx fn_begin, fc, mem, seq;
2296 fc = cfun->eh->sjlj_fc;
2298 start_sequence ();
2300 /* We're storing this libcall's address into memory instead of
2301 calling it directly. Thus, we must call assemble_external_libcall
2302 here, as we can not depend on emit_library_call to do it for us. */
2303 assemble_external_libcall (eh_personality_libfunc);
2304 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2305 emit_move_insn (mem, eh_personality_libfunc);
2307 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2308 if (cfun->uses_eh_lsda)
2310 char buf[20];
2311 rtx sym;
2313 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2314 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2315 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
2316 emit_move_insn (mem, sym);
2318 else
2319 emit_move_insn (mem, const0_rtx);
2321 #ifdef DONT_USE_BUILTIN_SETJMP
2323 rtx x, note;
2324 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2325 TYPE_MODE (integer_type_node), 1,
2326 plus_constant (XEXP (fc, 0),
2327 sjlj_fc_jbuf_ofs), Pmode);
2329 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
2330 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2332 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2333 TYPE_MODE (integer_type_node), 0, dispatch_label);
2335 #else
2336 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2337 dispatch_label);
2338 #endif
2340 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2341 1, XEXP (fc, 0), Pmode);
2343 seq = get_insns ();
2344 end_sequence ();
2346 /* ??? Instead of doing this at the beginning of the function,
2347 do this in a block that is at loop level 0 and dominates all
2348 can_throw_internal instructions. */
2350 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2351 if (GET_CODE (fn_begin) == NOTE
2352 && (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG
2353 || NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK))
2354 break;
2355 if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2356 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
2357 else
2359 rtx last = BB_END (ENTRY_BLOCK_PTR->succ->dest);
2360 for (; ; fn_begin = NEXT_INSN (fn_begin))
2361 if ((GET_CODE (fn_begin) == NOTE
2362 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2363 || fn_begin == last)
2364 break;
2365 emit_insn_after (seq, fn_begin);
2369 /* Call back from expand_function_end to know where we should put
2370 the call to unwind_sjlj_unregister_libfunc if needed. */
2372 void
2373 sjlj_emit_function_exit_after (rtx after)
2375 cfun->eh->sjlj_exit_after = after;
2378 static void
2379 sjlj_emit_function_exit (void)
2381 rtx seq;
2382 edge e;
2384 start_sequence ();
2386 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2387 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2389 seq = get_insns ();
2390 end_sequence ();
2392 /* ??? Really this can be done in any block at loop level 0 that
2393 post-dominates all can_throw_internal instructions. This is
2394 the last possible moment. */
2396 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
2397 if (e->flags & EDGE_FALLTHRU)
2398 break;
2399 if (e)
2401 rtx insn;
2403 /* Figure out whether the place we are supposed to insert libcall
2404 is inside the last basic block or after it. In the other case
2405 we need to emit to edge. */
2406 if (e->src->next_bb != EXIT_BLOCK_PTR)
2407 abort ();
2408 for (insn = NEXT_INSN (BB_END (e->src)); insn; insn = NEXT_INSN (insn))
2409 if (insn == cfun->eh->sjlj_exit_after)
2410 break;
2411 if (insn)
2412 insert_insn_on_edge (seq, e);
2413 else
2415 insn = cfun->eh->sjlj_exit_after;
2416 if (GET_CODE (insn) == CODE_LABEL)
2417 insn = NEXT_INSN (insn);
2418 emit_insn_after (seq, insn);
2423 static void
2424 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2426 int i, first_reachable;
2427 rtx mem, dispatch, seq, fc;
2428 rtx before;
2429 basic_block bb;
2430 edge e;
2432 fc = cfun->eh->sjlj_fc;
2434 start_sequence ();
2436 emit_label (dispatch_label);
2438 #ifndef DONT_USE_BUILTIN_SETJMP
2439 expand_builtin_setjmp_receiver (dispatch_label);
2440 #endif
2442 /* Load up dispatch index, exc_ptr and filter values from the
2443 function context. */
2444 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2445 sjlj_fc_call_site_ofs);
2446 dispatch = copy_to_reg (mem);
2448 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
2449 if (word_mode != ptr_mode)
2451 #ifdef POINTERS_EXTEND_UNSIGNED
2452 mem = convert_memory_address (ptr_mode, mem);
2453 #else
2454 mem = convert_to_mode (ptr_mode, mem, 0);
2455 #endif
2457 emit_move_insn (cfun->eh->exc_ptr, mem);
2459 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2460 emit_move_insn (cfun->eh->filter, mem);
2462 /* Jump to one of the directly reachable regions. */
2463 /* ??? This really ought to be using a switch statement. */
2465 first_reachable = 0;
2466 for (i = cfun->eh->last_region_number; i > 0; --i)
2468 if (! lp_info[i].directly_reachable)
2469 continue;
2471 if (! first_reachable)
2473 first_reachable = i;
2474 continue;
2477 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2478 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2479 cfun->eh->region_array[i]->post_landing_pad);
2482 seq = get_insns ();
2483 end_sequence ();
2485 before = cfun->eh->region_array[first_reachable]->post_landing_pad;
2487 bb = emit_to_new_bb_before (seq, before);
2488 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2489 e->count = bb->count;
2490 e->probability = REG_BR_PROB_BASE;
2493 static void
2494 sjlj_build_landing_pads (void)
2496 struct sjlj_lp_info *lp_info;
2498 lp_info = xcalloc (cfun->eh->last_region_number + 1,
2499 sizeof (struct sjlj_lp_info));
2501 if (sjlj_find_directly_reachable_regions (lp_info))
2503 rtx dispatch_label = gen_label_rtx ();
2505 cfun->eh->sjlj_fc
2506 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2507 int_size_in_bytes (sjlj_fc_type_node),
2508 TYPE_ALIGN (sjlj_fc_type_node));
2510 sjlj_assign_call_site_values (dispatch_label, lp_info);
2511 sjlj_mark_call_sites (lp_info);
2513 sjlj_emit_function_enter (dispatch_label);
2514 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2515 sjlj_emit_function_exit ();
2518 free (lp_info);
2521 void
2522 finish_eh_generation (void)
2524 basic_block bb;
2526 /* Nothing to do if no regions created. */
2527 if (cfun->eh->region_tree == NULL)
2528 return;
2530 /* The object here is to provide find_basic_blocks with detailed
2531 information (via reachable_handlers) on how exception control
2532 flows within the function. In this first pass, we can include
2533 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2534 regions, and hope that it will be useful in deleting unreachable
2535 handlers. Subsequently, we will generate landing pads which will
2536 connect many of the handlers, and then type information will not
2537 be effective. Still, this is a win over previous implementations. */
2539 cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
2541 /* These registers are used by the landing pads. Make sure they
2542 have been generated. */
2543 get_exception_pointer (cfun);
2544 get_exception_filter (cfun);
2546 /* Construct the landing pads. */
2548 assign_filter_values ();
2549 build_post_landing_pads ();
2550 connect_post_landing_pads ();
2551 if (USING_SJLJ_EXCEPTIONS)
2552 sjlj_build_landing_pads ();
2553 else
2554 dw2_build_landing_pads ();
2556 cfun->eh->built_landing_pads = 1;
2558 /* We've totally changed the CFG. Start over. */
2559 find_exception_handler_labels ();
2560 break_superblocks ();
2561 if (USING_SJLJ_EXCEPTIONS)
2562 commit_edge_insertions ();
2563 FOR_EACH_BB (bb)
2565 edge e, next;
2566 bool eh = false;
2567 for (e = bb->succ; e; e = next)
2569 next = e->succ_next;
2570 if (e->flags & EDGE_EH)
2572 remove_edge (e);
2573 eh = true;
2576 if (eh)
2577 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2579 cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
2582 static hashval_t
2583 ehl_hash (const void *pentry)
2585 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2587 /* 2^32 * ((sqrt(5) - 1) / 2) */
2588 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2589 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2592 static int
2593 ehl_eq (const void *pentry, const void *pdata)
2595 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2596 struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2598 return entry->label == data->label;
2601 /* This section handles removing dead code for flow. */
2603 /* Remove LABEL from exception_handler_label_map. */
2605 static void
2606 remove_exception_handler_label (rtx label)
2608 struct ehl_map_entry **slot, tmp;
2610 /* If exception_handler_label_map was not built yet,
2611 there is nothing to do. */
2612 if (cfun->eh->exception_handler_label_map == NULL)
2613 return;
2615 tmp.label = label;
2616 slot = (struct ehl_map_entry **)
2617 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2618 if (! slot)
2619 abort ();
2621 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2624 /* Splice REGION from the region tree etc. */
2626 static void
2627 remove_eh_handler (struct eh_region *region)
2629 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2630 rtx lab;
2632 /* For the benefit of efficiently handling REG_EH_REGION notes,
2633 replace this region in the region array with its containing
2634 region. Note that previous region deletions may result in
2635 multiple copies of this region in the array, so we have a
2636 list of alternate numbers by which we are known. */
2638 outer = region->outer;
2639 cfun->eh->region_array[region->region_number] = outer;
2640 if (region->aka)
2642 int i;
2643 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i,
2644 { cfun->eh->region_array[i] = outer; });
2647 if (outer)
2649 if (!outer->aka)
2650 outer->aka = BITMAP_GGC_ALLOC ();
2651 if (region->aka)
2652 bitmap_a_or_b (outer->aka, outer->aka, region->aka);
2653 bitmap_set_bit (outer->aka, region->region_number);
2656 if (cfun->eh->built_landing_pads)
2657 lab = region->landing_pad;
2658 else
2659 lab = region->label;
2660 if (lab)
2661 remove_exception_handler_label (lab);
2663 if (outer)
2664 pp_start = &outer->inner;
2665 else
2666 pp_start = &cfun->eh->region_tree;
2667 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2668 continue;
2669 *pp = region->next_peer;
2671 inner = region->inner;
2672 if (inner)
2674 for (p = inner; p->next_peer ; p = p->next_peer)
2675 p->outer = outer;
2676 p->outer = outer;
2678 p->next_peer = *pp_start;
2679 *pp_start = inner;
2682 if (region->type == ERT_CATCH)
2684 struct eh_region *try, *next, *prev;
2686 for (try = region->next_peer;
2687 try->type == ERT_CATCH;
2688 try = try->next_peer)
2689 continue;
2690 if (try->type != ERT_TRY)
2691 abort ();
2693 next = region->u.catch.next_catch;
2694 prev = region->u.catch.prev_catch;
2696 if (next)
2697 next->u.catch.prev_catch = prev;
2698 else
2699 try->u.try.last_catch = prev;
2700 if (prev)
2701 prev->u.catch.next_catch = next;
2702 else
2704 try->u.try.catch = next;
2705 if (! next)
2706 remove_eh_handler (try);
2711 /* LABEL heads a basic block that is about to be deleted. If this
2712 label corresponds to an exception region, we may be able to
2713 delete the region. */
2715 void
2716 maybe_remove_eh_handler (rtx label)
2718 struct ehl_map_entry **slot, tmp;
2719 struct eh_region *region;
2721 /* ??? After generating landing pads, it's not so simple to determine
2722 if the region data is completely unused. One must examine the
2723 landing pad and the post landing pad, and whether an inner try block
2724 is referencing the catch handlers directly. */
2725 if (cfun->eh->built_landing_pads)
2726 return;
2728 tmp.label = label;
2729 slot = (struct ehl_map_entry **)
2730 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2731 if (! slot)
2732 return;
2733 region = (*slot)->region;
2734 if (! region)
2735 return;
2737 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2738 because there is no path to the fallback call to terminate.
2739 But the region continues to affect call-site data until there
2740 are no more contained calls, which we don't see here. */
2741 if (region->type == ERT_MUST_NOT_THROW)
2743 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2744 region->label = NULL_RTX;
2746 else
2747 remove_eh_handler (region);
2750 /* Invokes CALLBACK for every exception handler label. Only used by old
2751 loop hackery; should not be used by new code. */
2753 void
2754 for_each_eh_label (void (*callback) (rtx))
2756 htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
2757 (void *) &callback);
2760 static int
2761 for_each_eh_label_1 (void **pentry, void *data)
2763 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2764 void (*callback) (rtx) = *(void (**) (rtx)) data;
2766 (*callback) (entry->label);
2767 return 1;
2770 /* This section describes CFG exception edges for flow. */
2772 /* For communicating between calls to reachable_next_level. */
2773 struct reachable_info
2775 tree types_caught;
2776 tree types_allowed;
2777 void (*callback) (struct eh_region *, void *);
2778 void *callback_data;
2779 bool saw_any_handlers;
2782 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2783 base class of TYPE, is in HANDLED. */
2786 check_handled (tree handled, tree type)
2788 tree t;
2790 /* We can check for exact matches without front-end help. */
2791 if (! lang_eh_type_covers)
2793 for (t = handled; t ; t = TREE_CHAIN (t))
2794 if (TREE_VALUE (t) == type)
2795 return 1;
2797 else
2799 for (t = handled; t ; t = TREE_CHAIN (t))
2800 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2801 return 1;
2804 return 0;
2807 /* A subroutine of reachable_next_level. If we are collecting a list
2808 of handlers, add one. After landing pad generation, reference
2809 it instead of the handlers themselves. Further, the handlers are
2810 all wired together, so by referencing one, we've got them all.
2811 Before landing pad generation we reference each handler individually.
2813 LP_REGION contains the landing pad; REGION is the handler. */
2815 static void
2816 add_reachable_handler (struct reachable_info *info,
2817 struct eh_region *lp_region, struct eh_region *region)
2819 if (! info)
2820 return;
2822 info->saw_any_handlers = true;
2824 if (cfun->eh->built_landing_pads)
2825 info->callback (lp_region, info->callback_data);
2826 else
2827 info->callback (region, info->callback_data);
2830 /* Process one level of exception regions for reachability.
2831 If TYPE_THROWN is non-null, then it is the *exact* type being
2832 propagated. If INFO is non-null, then collect handler labels
2833 and caught/allowed type information between invocations. */
2835 static enum reachable_code
2836 reachable_next_level (struct eh_region *region, tree type_thrown,
2837 struct reachable_info *info)
2839 switch (region->type)
2841 case ERT_CLEANUP:
2842 /* Before landing-pad generation, we model control flow
2843 directly to the individual handlers. In this way we can
2844 see that catch handler types may shadow one another. */
2845 add_reachable_handler (info, region, region);
2846 return RNL_MAYBE_CAUGHT;
2848 case ERT_TRY:
2850 struct eh_region *c;
2851 enum reachable_code ret = RNL_NOT_CAUGHT;
2853 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2855 /* A catch-all handler ends the search. */
2856 if (c->u.catch.type_list == NULL)
2858 add_reachable_handler (info, region, c);
2859 return RNL_CAUGHT;
2862 if (type_thrown)
2864 /* If we have at least one type match, end the search. */
2865 tree tp_node = c->u.catch.type_list;
2867 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2869 tree type = TREE_VALUE (tp_node);
2871 if (type == type_thrown
2872 || (lang_eh_type_covers
2873 && (*lang_eh_type_covers) (type, type_thrown)))
2875 add_reachable_handler (info, region, c);
2876 return RNL_CAUGHT;
2880 /* If we have definitive information of a match failure,
2881 the catch won't trigger. */
2882 if (lang_eh_type_covers)
2883 return RNL_NOT_CAUGHT;
2886 /* At this point, we either don't know what type is thrown or
2887 don't have front-end assistance to help deciding if it is
2888 covered by one of the types in the list for this region.
2890 We'd then like to add this region to the list of reachable
2891 handlers since it is indeed potentially reachable based on the
2892 information we have.
2894 Actually, this handler is for sure not reachable if all the
2895 types it matches have already been caught. That is, it is only
2896 potentially reachable if at least one of the types it catches
2897 has not been previously caught. */
2899 if (! info)
2900 ret = RNL_MAYBE_CAUGHT;
2901 else
2903 tree tp_node = c->u.catch.type_list;
2904 bool maybe_reachable = false;
2906 /* Compute the potential reachability of this handler and
2907 update the list of types caught at the same time. */
2908 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2910 tree type = TREE_VALUE (tp_node);
2912 if (! check_handled (info->types_caught, type))
2914 info->types_caught
2915 = tree_cons (NULL, type, info->types_caught);
2917 maybe_reachable = true;
2921 if (maybe_reachable)
2923 add_reachable_handler (info, region, c);
2925 /* ??? If the catch type is a base class of every allowed
2926 type, then we know we can stop the search. */
2927 ret = RNL_MAYBE_CAUGHT;
2932 return ret;
2935 case ERT_ALLOWED_EXCEPTIONS:
2936 /* An empty list of types definitely ends the search. */
2937 if (region->u.allowed.type_list == NULL_TREE)
2939 add_reachable_handler (info, region, region);
2940 return RNL_CAUGHT;
2943 /* Collect a list of lists of allowed types for use in detecting
2944 when a catch may be transformed into a catch-all. */
2945 if (info)
2946 info->types_allowed = tree_cons (NULL_TREE,
2947 region->u.allowed.type_list,
2948 info->types_allowed);
2950 /* If we have definitive information about the type hierarchy,
2951 then we can tell if the thrown type will pass through the
2952 filter. */
2953 if (type_thrown && lang_eh_type_covers)
2955 if (check_handled (region->u.allowed.type_list, type_thrown))
2956 return RNL_NOT_CAUGHT;
2957 else
2959 add_reachable_handler (info, region, region);
2960 return RNL_CAUGHT;
2964 add_reachable_handler (info, region, region);
2965 return RNL_MAYBE_CAUGHT;
2967 case ERT_CATCH:
2968 /* Catch regions are handled by their controlling try region. */
2969 return RNL_NOT_CAUGHT;
2971 case ERT_MUST_NOT_THROW:
2972 /* Here we end our search, since no exceptions may propagate.
2973 If we've touched down at some landing pad previous, then the
2974 explicit function call we generated may be used. Otherwise
2975 the call is made by the runtime. */
2976 if (info && info->saw_any_handlers)
2978 add_reachable_handler (info, region, region);
2979 return RNL_CAUGHT;
2981 else
2982 return RNL_BLOCKED;
2984 case ERT_THROW:
2985 case ERT_FIXUP:
2986 case ERT_UNKNOWN:
2987 /* Shouldn't see these here. */
2988 break;
2991 abort ();
2994 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2996 void
2997 foreach_reachable_handler (int region_number, bool is_resx,
2998 void (*callback) (struct eh_region *, void *),
2999 void *callback_data)
3001 struct reachable_info info;
3002 struct eh_region *region;
3003 tree type_thrown;
3005 memset (&info, 0, sizeof (info));
3006 info.callback = callback;
3007 info.callback_data = callback_data;
3009 region = cfun->eh->region_array[region_number];
3011 type_thrown = NULL_TREE;
3012 if (is_resx)
3014 /* A RESX leaves a region instead of entering it. Thus the
3015 region itself may have been deleted out from under us. */
3016 if (region == NULL)
3017 return;
3018 region = region->outer;
3020 else if (region->type == ERT_THROW)
3022 type_thrown = region->u.throw.type;
3023 region = region->outer;
3026 while (region)
3028 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
3029 break;
3030 /* If we have processed one cleanup, there is no point in
3031 processing any more of them. Each cleanup will have an edge
3032 to the next outer cleanup region, so the flow graph will be
3033 accurate. */
3034 if (region->type == ERT_CLEANUP)
3035 region = region->u.cleanup.prev_try;
3036 else
3037 region = region->outer;
3041 /* Retrieve a list of labels of exception handlers which can be
3042 reached by a given insn. */
3044 static void
3045 arh_to_landing_pad (struct eh_region *region, void *data)
3047 rtx *p_handlers = data;
3048 if (! *p_handlers)
3049 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
3052 static void
3053 arh_to_label (struct eh_region *region, void *data)
3055 rtx *p_handlers = data;
3056 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
3060 reachable_handlers (rtx insn)
3062 bool is_resx = false;
3063 rtx handlers = NULL;
3064 int region_number;
3066 if (GET_CODE (insn) == JUMP_INSN
3067 && GET_CODE (PATTERN (insn)) == RESX)
3069 region_number = XINT (PATTERN (insn), 0);
3070 is_resx = true;
3072 else
3074 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3075 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3076 return NULL;
3077 region_number = INTVAL (XEXP (note, 0));
3080 foreach_reachable_handler (region_number, is_resx,
3081 (cfun->eh->built_landing_pads
3082 ? arh_to_landing_pad
3083 : arh_to_label),
3084 &handlers);
3086 return handlers;
3089 /* Determine if the given INSN can throw an exception that is caught
3090 within the function. */
3092 bool
3093 can_throw_internal_1 (int region_number)
3095 struct eh_region *region;
3096 tree type_thrown;
3098 region = cfun->eh->region_array[region_number];
3100 type_thrown = NULL_TREE;
3101 if (region->type == ERT_THROW)
3103 type_thrown = region->u.throw.type;
3104 region = region->outer;
3107 /* If this exception is ignored by each and every containing region,
3108 then control passes straight out. The runtime may handle some
3109 regions, which also do not require processing internally. */
3110 for (; region; region = region->outer)
3112 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
3113 if (how == RNL_BLOCKED)
3114 return false;
3115 if (how != RNL_NOT_CAUGHT)
3116 return true;
3119 return false;
3122 bool
3123 can_throw_internal (rtx insn)
3125 rtx note;
3127 if (! INSN_P (insn))
3128 return false;
3130 if (GET_CODE (insn) == INSN
3131 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3132 insn = XVECEXP (PATTERN (insn), 0, 0);
3134 if (GET_CODE (insn) == CALL_INSN
3135 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3137 int i;
3138 for (i = 0; i < 3; ++i)
3140 rtx sub = XEXP (PATTERN (insn), i);
3141 for (; sub ; sub = NEXT_INSN (sub))
3142 if (can_throw_internal (sub))
3143 return true;
3145 return false;
3148 /* Every insn that might throw has an EH_REGION note. */
3149 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3150 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3151 return false;
3153 return can_throw_internal_1 (INTVAL (XEXP (note, 0)));
3156 /* Determine if the given INSN can throw an exception that is
3157 visible outside the function. */
3159 bool
3160 can_throw_external_1 (int region_number)
3162 struct eh_region *region;
3163 tree type_thrown;
3165 region = cfun->eh->region_array[region_number];
3167 type_thrown = NULL_TREE;
3168 if (region->type == ERT_THROW)
3170 type_thrown = region->u.throw.type;
3171 region = region->outer;
3174 /* If the exception is caught or blocked by any containing region,
3175 then it is not seen by any calling function. */
3176 for (; region ; region = region->outer)
3177 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
3178 return false;
3180 return true;
3183 bool
3184 can_throw_external (rtx insn)
3186 rtx note;
3188 if (! INSN_P (insn))
3189 return false;
3191 if (GET_CODE (insn) == INSN
3192 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3193 insn = XVECEXP (PATTERN (insn), 0, 0);
3195 if (GET_CODE (insn) == CALL_INSN
3196 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3198 int i;
3199 for (i = 0; i < 3; ++i)
3201 rtx sub = XEXP (PATTERN (insn), i);
3202 for (; sub ; sub = NEXT_INSN (sub))
3203 if (can_throw_external (sub))
3204 return true;
3206 return false;
3209 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3210 if (!note)
3212 /* Calls (and trapping insns) without notes are outside any
3213 exception handling region in this function. We have to
3214 assume it might throw. Given that the front end and middle
3215 ends mark known NOTHROW functions, this isn't so wildly
3216 inaccurate. */
3217 return (GET_CODE (insn) == CALL_INSN
3218 || (flag_non_call_exceptions
3219 && may_trap_p (PATTERN (insn))));
3221 if (INTVAL (XEXP (note, 0)) <= 0)
3222 return false;
3224 return can_throw_external_1 (INTVAL (XEXP (note, 0)));
3227 /* Set current_function_nothrow and cfun->all_throwers_are_sibcalls. */
3229 void
3230 set_nothrow_function_flags (void)
3232 rtx insn;
3234 current_function_nothrow = 1;
3236 /* Assume cfun->all_throwers_are_sibcalls until we encounter
3237 something that can throw an exception. We specifically exempt
3238 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
3239 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
3240 is optimistic. */
3242 cfun->all_throwers_are_sibcalls = 1;
3244 if (! flag_exceptions)
3245 return;
3247 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3248 if (can_throw_external (insn))
3250 current_function_nothrow = 0;
3252 if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
3254 cfun->all_throwers_are_sibcalls = 0;
3255 return;
3259 for (insn = current_function_epilogue_delay_list; insn;
3260 insn = XEXP (insn, 1))
3261 if (can_throw_external (insn))
3263 current_function_nothrow = 0;
3265 if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
3267 cfun->all_throwers_are_sibcalls = 0;
3268 return;
3274 /* Various hooks for unwind library. */
3276 /* Do any necessary initialization to access arbitrary stack frames.
3277 On the SPARC, this means flushing the register windows. */
3279 void
3280 expand_builtin_unwind_init (void)
3282 /* Set this so all the registers get saved in our frame; we need to be
3283 able to copy the saved values for any registers from frames we unwind. */
3284 current_function_has_nonlocal_label = 1;
3286 #ifdef SETUP_FRAME_ADDRESSES
3287 SETUP_FRAME_ADDRESSES ();
3288 #endif
3292 expand_builtin_eh_return_data_regno (tree arglist)
3294 tree which = TREE_VALUE (arglist);
3295 unsigned HOST_WIDE_INT iwhich;
3297 if (TREE_CODE (which) != INTEGER_CST)
3299 error ("argument of `__builtin_eh_return_regno' must be constant");
3300 return constm1_rtx;
3303 iwhich = tree_low_cst (which, 1);
3304 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3305 if (iwhich == INVALID_REGNUM)
3306 return constm1_rtx;
3308 #ifdef DWARF_FRAME_REGNUM
3309 iwhich = DWARF_FRAME_REGNUM (iwhich);
3310 #else
3311 iwhich = DBX_REGISTER_NUMBER (iwhich);
3312 #endif
3314 return GEN_INT (iwhich);
3317 /* Given a value extracted from the return address register or stack slot,
3318 return the actual address encoded in that value. */
3321 expand_builtin_extract_return_addr (tree addr_tree)
3323 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
3325 if (GET_MODE (addr) != Pmode
3326 && GET_MODE (addr) != VOIDmode)
3328 #ifdef POINTERS_EXTEND_UNSIGNED
3329 addr = convert_memory_address (Pmode, addr);
3330 #else
3331 addr = convert_to_mode (Pmode, addr, 0);
3332 #endif
3335 /* First mask out any unwanted bits. */
3336 #ifdef MASK_RETURN_ADDR
3337 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3338 #endif
3340 /* Then adjust to find the real return address. */
3341 #if defined (RETURN_ADDR_OFFSET)
3342 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3343 #endif
3345 return addr;
3348 /* Given an actual address in addr_tree, do any necessary encoding
3349 and return the value to be stored in the return address register or
3350 stack slot so the epilogue will return to that address. */
3353 expand_builtin_frob_return_addr (tree addr_tree)
3355 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3357 addr = convert_memory_address (Pmode, addr);
3359 #ifdef RETURN_ADDR_OFFSET
3360 addr = force_reg (Pmode, addr);
3361 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3362 #endif
3364 return addr;
3367 /* Set up the epilogue with the magic bits we'll need to return to the
3368 exception handler. */
3370 void
3371 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3372 tree handler_tree)
3374 rtx tmp;
3376 #ifdef EH_RETURN_STACKADJ_RTX
3377 tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
3378 tmp = convert_memory_address (Pmode, tmp);
3379 if (!cfun->eh->ehr_stackadj)
3380 cfun->eh->ehr_stackadj = copy_to_reg (tmp);
3381 else if (tmp != cfun->eh->ehr_stackadj)
3382 emit_move_insn (cfun->eh->ehr_stackadj, tmp);
3383 #endif
3385 tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
3386 tmp = convert_memory_address (Pmode, tmp);
3387 if (!cfun->eh->ehr_handler)
3388 cfun->eh->ehr_handler = copy_to_reg (tmp);
3389 else if (tmp != cfun->eh->ehr_handler)
3390 emit_move_insn (cfun->eh->ehr_handler, tmp);
3392 if (!cfun->eh->ehr_label)
3393 cfun->eh->ehr_label = gen_label_rtx ();
3394 emit_jump (cfun->eh->ehr_label);
3397 void
3398 expand_eh_return (void)
3400 rtx around_label;
3402 if (! cfun->eh->ehr_label)
3403 return;
3405 current_function_calls_eh_return = 1;
3407 #ifdef EH_RETURN_STACKADJ_RTX
3408 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3409 #endif
3411 around_label = gen_label_rtx ();
3412 emit_jump (around_label);
3414 emit_label (cfun->eh->ehr_label);
3415 clobber_return_register ();
3417 #ifdef EH_RETURN_STACKADJ_RTX
3418 emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
3419 #endif
3421 #ifdef HAVE_eh_return
3422 if (HAVE_eh_return)
3423 emit_insn (gen_eh_return (cfun->eh->ehr_handler));
3424 else
3425 #endif
3427 #ifdef EH_RETURN_HANDLER_RTX
3428 emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
3429 #else
3430 error ("__builtin_eh_return not supported on this target");
3431 #endif
3434 emit_label (around_label);
3437 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3438 POINTERS_EXTEND_UNSIGNED and return it. */
3441 expand_builtin_extend_pointer (tree addr_tree)
3443 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3444 int extend;
3446 #ifdef POINTERS_EXTEND_UNSIGNED
3447 extend = POINTERS_EXTEND_UNSIGNED;
3448 #else
3449 /* The previous EH code did an unsigned extend by default, so we do this also
3450 for consistency. */
3451 extend = 1;
3452 #endif
3454 return convert_modes (word_mode, ptr_mode, addr, extend);
3457 /* In the following functions, we represent entries in the action table
3458 as 1-based indices. Special cases are:
3460 0: null action record, non-null landing pad; implies cleanups
3461 -1: null action record, null landing pad; implies no action
3462 -2: no call-site entry; implies must_not_throw
3463 -3: we have yet to process outer regions
3465 Further, no special cases apply to the "next" field of the record.
3466 For next, 0 means end of list. */
3468 struct action_record
3470 int offset;
3471 int filter;
3472 int next;
3475 static int
3476 action_record_eq (const void *pentry, const void *pdata)
3478 const struct action_record *entry = (const struct action_record *) pentry;
3479 const struct action_record *data = (const struct action_record *) pdata;
3480 return entry->filter == data->filter && entry->next == data->next;
3483 static hashval_t
3484 action_record_hash (const void *pentry)
3486 const struct action_record *entry = (const struct action_record *) pentry;
3487 return entry->next * 1009 + entry->filter;
3490 static int
3491 add_action_record (htab_t ar_hash, int filter, int next)
3493 struct action_record **slot, *new, tmp;
3495 tmp.filter = filter;
3496 tmp.next = next;
3497 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3499 if ((new = *slot) == NULL)
3501 new = xmalloc (sizeof (*new));
3502 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3503 new->filter = filter;
3504 new->next = next;
3505 *slot = new;
3507 /* The filter value goes in untouched. The link to the next
3508 record is a "self-relative" byte offset, or zero to indicate
3509 that there is no next record. So convert the absolute 1 based
3510 indices we've been carrying around into a displacement. */
3512 push_sleb128 (&cfun->eh->action_record_data, filter);
3513 if (next)
3514 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3515 push_sleb128 (&cfun->eh->action_record_data, next);
3518 return new->offset;
3521 static int
3522 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3524 struct eh_region *c;
3525 int next;
3527 /* If we've reached the top of the region chain, then we have
3528 no actions, and require no landing pad. */
3529 if (region == NULL)
3530 return -1;
3532 switch (region->type)
3534 case ERT_CLEANUP:
3535 /* A cleanup adds a zero filter to the beginning of the chain, but
3536 there are special cases to look out for. If there are *only*
3537 cleanups along a path, then it compresses to a zero action.
3538 Further, if there are multiple cleanups along a path, we only
3539 need to represent one of them, as that is enough to trigger
3540 entry to the landing pad at runtime. */
3541 next = collect_one_action_chain (ar_hash, region->outer);
3542 if (next <= 0)
3543 return 0;
3544 for (c = region->outer; c ; c = c->outer)
3545 if (c->type == ERT_CLEANUP)
3546 return next;
3547 return add_action_record (ar_hash, 0, next);
3549 case ERT_TRY:
3550 /* Process the associated catch regions in reverse order.
3551 If there's a catch-all handler, then we don't need to
3552 search outer regions. Use a magic -3 value to record
3553 that we haven't done the outer search. */
3554 next = -3;
3555 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3557 if (c->u.catch.type_list == NULL)
3559 /* Retrieve the filter from the head of the filter list
3560 where we have stored it (see assign_filter_values). */
3561 int filter
3562 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3564 next = add_action_record (ar_hash, filter, 0);
3566 else
3568 /* Once the outer search is done, trigger an action record for
3569 each filter we have. */
3570 tree flt_node;
3572 if (next == -3)
3574 next = collect_one_action_chain (ar_hash, region->outer);
3576 /* If there is no next action, terminate the chain. */
3577 if (next == -1)
3578 next = 0;
3579 /* If all outer actions are cleanups or must_not_throw,
3580 we'll have no action record for it, since we had wanted
3581 to encode these states in the call-site record directly.
3582 Add a cleanup action to the chain to catch these. */
3583 else if (next <= 0)
3584 next = add_action_record (ar_hash, 0, 0);
3587 flt_node = c->u.catch.filter_list;
3588 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3590 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3591 next = add_action_record (ar_hash, filter, next);
3595 return next;
3597 case ERT_ALLOWED_EXCEPTIONS:
3598 /* An exception specification adds its filter to the
3599 beginning of the chain. */
3600 next = collect_one_action_chain (ar_hash, region->outer);
3602 /* If there is no next action, terminate the chain. */
3603 if (next == -1)
3604 next = 0;
3605 /* If all outer actions are cleanups or must_not_throw,
3606 we'll have no action record for it, since we had wanted
3607 to encode these states in the call-site record directly.
3608 Add a cleanup action to the chain to catch these. */
3609 else if (next <= 0)
3610 next = add_action_record (ar_hash, 0, 0);
3612 return add_action_record (ar_hash, region->u.allowed.filter, next);
3614 case ERT_MUST_NOT_THROW:
3615 /* A must-not-throw region with no inner handlers or cleanups
3616 requires no call-site entry. Note that this differs from
3617 the no handler or cleanup case in that we do require an lsda
3618 to be generated. Return a magic -2 value to record this. */
3619 return -2;
3621 case ERT_CATCH:
3622 case ERT_THROW:
3623 /* CATCH regions are handled in TRY above. THROW regions are
3624 for optimization information only and produce no output. */
3625 return collect_one_action_chain (ar_hash, region->outer);
3627 default:
3628 abort ();
3632 static int
3633 add_call_site (rtx landing_pad, int action)
3635 struct call_site_record *data = cfun->eh->call_site_data;
3636 int used = cfun->eh->call_site_data_used;
3637 int size = cfun->eh->call_site_data_size;
3639 if (used >= size)
3641 size = (size ? size * 2 : 64);
3642 data = ggc_realloc (data, sizeof (*data) * size);
3643 cfun->eh->call_site_data = data;
3644 cfun->eh->call_site_data_size = size;
3647 data[used].landing_pad = landing_pad;
3648 data[used].action = action;
3650 cfun->eh->call_site_data_used = used + 1;
3652 return used + call_site_base;
3655 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3656 The new note numbers will not refer to region numbers, but
3657 instead to call site entries. */
3659 void
3660 convert_to_eh_region_ranges (void)
3662 rtx insn, iter, note;
3663 htab_t ar_hash;
3664 int last_action = -3;
3665 rtx last_action_insn = NULL_RTX;
3666 rtx last_landing_pad = NULL_RTX;
3667 rtx first_no_action_insn = NULL_RTX;
3668 int call_site = 0;
3670 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3671 return;
3673 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3675 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3677 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3678 if (INSN_P (iter))
3680 struct eh_region *region;
3681 int this_action;
3682 rtx this_landing_pad;
3684 insn = iter;
3685 if (GET_CODE (insn) == INSN
3686 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3687 insn = XVECEXP (PATTERN (insn), 0, 0);
3689 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3690 if (!note)
3692 if (! (GET_CODE (insn) == CALL_INSN
3693 || (flag_non_call_exceptions
3694 && may_trap_p (PATTERN (insn)))))
3695 continue;
3696 this_action = -1;
3697 region = NULL;
3699 else
3701 if (INTVAL (XEXP (note, 0)) <= 0)
3702 continue;
3703 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3704 this_action = collect_one_action_chain (ar_hash, region);
3707 /* Existence of catch handlers, or must-not-throw regions
3708 implies that an lsda is needed (even if empty). */
3709 if (this_action != -1)
3710 cfun->uses_eh_lsda = 1;
3712 /* Delay creation of region notes for no-action regions
3713 until we're sure that an lsda will be required. */
3714 else if (last_action == -3)
3716 first_no_action_insn = iter;
3717 last_action = -1;
3720 /* Cleanups and handlers may share action chains but not
3721 landing pads. Collect the landing pad for this region. */
3722 if (this_action >= 0)
3724 struct eh_region *o;
3725 for (o = region; ! o->landing_pad ; o = o->outer)
3726 continue;
3727 this_landing_pad = o->landing_pad;
3729 else
3730 this_landing_pad = NULL_RTX;
3732 /* Differing actions or landing pads implies a change in call-site
3733 info, which implies some EH_REGION note should be emitted. */
3734 if (last_action != this_action
3735 || last_landing_pad != this_landing_pad)
3737 /* If we'd not seen a previous action (-3) or the previous
3738 action was must-not-throw (-2), then we do not need an
3739 end note. */
3740 if (last_action >= -1)
3742 /* If we delayed the creation of the begin, do it now. */
3743 if (first_no_action_insn)
3745 call_site = add_call_site (NULL_RTX, 0);
3746 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3747 first_no_action_insn);
3748 NOTE_EH_HANDLER (note) = call_site;
3749 first_no_action_insn = NULL_RTX;
3752 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3753 last_action_insn);
3754 NOTE_EH_HANDLER (note) = call_site;
3757 /* If the new action is must-not-throw, then no region notes
3758 are created. */
3759 if (this_action >= -1)
3761 call_site = add_call_site (this_landing_pad,
3762 this_action < 0 ? 0 : this_action);
3763 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3764 NOTE_EH_HANDLER (note) = call_site;
3767 last_action = this_action;
3768 last_landing_pad = this_landing_pad;
3770 last_action_insn = iter;
3773 if (last_action >= -1 && ! first_no_action_insn)
3775 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3776 NOTE_EH_HANDLER (note) = call_site;
3779 htab_delete (ar_hash);
3783 static void
3784 push_uleb128 (varray_type *data_area, unsigned int value)
3788 unsigned char byte = value & 0x7f;
3789 value >>= 7;
3790 if (value)
3791 byte |= 0x80;
3792 VARRAY_PUSH_UCHAR (*data_area, byte);
3794 while (value);
3797 static void
3798 push_sleb128 (varray_type *data_area, int value)
3800 unsigned char byte;
3801 int more;
3805 byte = value & 0x7f;
3806 value >>= 7;
3807 more = ! ((value == 0 && (byte & 0x40) == 0)
3808 || (value == -1 && (byte & 0x40) != 0));
3809 if (more)
3810 byte |= 0x80;
3811 VARRAY_PUSH_UCHAR (*data_area, byte);
3813 while (more);
3817 #ifndef HAVE_AS_LEB128
3818 static int
3819 dw2_size_of_call_site_table (void)
3821 int n = cfun->eh->call_site_data_used;
3822 int size = n * (4 + 4 + 4);
3823 int i;
3825 for (i = 0; i < n; ++i)
3827 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3828 size += size_of_uleb128 (cs->action);
3831 return size;
3834 static int
3835 sjlj_size_of_call_site_table (void)
3837 int n = cfun->eh->call_site_data_used;
3838 int size = 0;
3839 int i;
3841 for (i = 0; i < n; ++i)
3843 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3844 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3845 size += size_of_uleb128 (cs->action);
3848 return size;
3850 #endif
3852 static void
3853 dw2_output_call_site_table (void)
3855 const char *const function_start_lab
3856 = IDENTIFIER_POINTER (current_function_func_begin_label);
3857 int n = cfun->eh->call_site_data_used;
3858 int i;
3860 for (i = 0; i < n; ++i)
3862 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3863 char reg_start_lab[32];
3864 char reg_end_lab[32];
3865 char landing_pad_lab[32];
3867 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3868 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3870 if (cs->landing_pad)
3871 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3872 CODE_LABEL_NUMBER (cs->landing_pad));
3874 /* ??? Perhaps use insn length scaling if the assembler supports
3875 generic arithmetic. */
3876 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3877 data4 if the function is small enough. */
3878 #ifdef HAVE_AS_LEB128
3879 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3880 "region %d start", i);
3881 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3882 "length");
3883 if (cs->landing_pad)
3884 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3885 "landing pad");
3886 else
3887 dw2_asm_output_data_uleb128 (0, "landing pad");
3888 #else
3889 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3890 "region %d start", i);
3891 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3892 if (cs->landing_pad)
3893 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3894 "landing pad");
3895 else
3896 dw2_asm_output_data (4, 0, "landing pad");
3897 #endif
3898 dw2_asm_output_data_uleb128 (cs->action, "action");
3901 call_site_base += n;
3904 static void
3905 sjlj_output_call_site_table (void)
3907 int n = cfun->eh->call_site_data_used;
3908 int i;
3910 for (i = 0; i < n; ++i)
3912 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3914 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3915 "region %d landing pad", i);
3916 dw2_asm_output_data_uleb128 (cs->action, "action");
3919 call_site_base += n;
3922 /* Tell assembler to switch to the section for the exception handling
3923 table. */
3925 void
3926 default_exception_section (void)
3928 if (targetm.have_named_sections)
3930 int flags;
3931 #ifdef HAVE_LD_RO_RW_SECTION_MIXING
3932 int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3934 flags = (! flag_pic
3935 || ((tt_format & 0x70) != DW_EH_PE_absptr
3936 && (tt_format & 0x70) != DW_EH_PE_aligned))
3937 ? 0 : SECTION_WRITE;
3938 #else
3939 flags = SECTION_WRITE;
3940 #endif
3941 named_section_flags (".gcc_except_table", flags);
3943 else if (flag_pic)
3944 data_section ();
3945 else
3946 readonly_data_section ();
3949 void
3950 output_function_exception_table (void)
3952 int tt_format, cs_format, lp_format, i, n;
3953 #ifdef HAVE_AS_LEB128
3954 char ttype_label[32];
3955 char cs_after_size_label[32];
3956 char cs_end_label[32];
3957 #else
3958 int call_site_len;
3959 #endif
3960 int have_tt_data;
3961 int tt_format_size = 0;
3963 /* Not all functions need anything. */
3964 if (! cfun->uses_eh_lsda)
3965 return;
3967 #ifdef IA64_UNWIND_INFO
3968 fputs ("\t.personality\t", asm_out_file);
3969 output_addr_const (asm_out_file, eh_personality_libfunc);
3970 fputs ("\n\t.handlerdata\n", asm_out_file);
3971 /* Note that varasm still thinks we're in the function's code section.
3972 The ".endp" directive that will immediately follow will take us back. */
3973 #else
3974 targetm.asm_out.exception_section ();
3975 #endif
3977 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3978 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3980 /* Indicate the format of the @TType entries. */
3981 if (! have_tt_data)
3982 tt_format = DW_EH_PE_omit;
3983 else
3985 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3986 #ifdef HAVE_AS_LEB128
3987 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3988 current_function_funcdef_no);
3989 #endif
3990 tt_format_size = size_of_encoded_value (tt_format);
3992 assemble_align (tt_format_size * BITS_PER_UNIT);
3995 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3996 current_function_funcdef_no);
3998 /* The LSDA header. */
4000 /* Indicate the format of the landing pad start pointer. An omitted
4001 field implies @LPStart == @Start. */
4002 /* Currently we always put @LPStart == @Start. This field would
4003 be most useful in moving the landing pads completely out of
4004 line to another section, but it could also be used to minimize
4005 the size of uleb128 landing pad offsets. */
4006 lp_format = DW_EH_PE_omit;
4007 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
4008 eh_data_format_name (lp_format));
4010 /* @LPStart pointer would go here. */
4012 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
4013 eh_data_format_name (tt_format));
4015 #ifndef HAVE_AS_LEB128
4016 if (USING_SJLJ_EXCEPTIONS)
4017 call_site_len = sjlj_size_of_call_site_table ();
4018 else
4019 call_site_len = dw2_size_of_call_site_table ();
4020 #endif
4022 /* A pc-relative 4-byte displacement to the @TType data. */
4023 if (have_tt_data)
4025 #ifdef HAVE_AS_LEB128
4026 char ttype_after_disp_label[32];
4027 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
4028 current_function_funcdef_no);
4029 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
4030 "@TType base offset");
4031 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
4032 #else
4033 /* Ug. Alignment queers things. */
4034 unsigned int before_disp, after_disp, last_disp, disp;
4036 before_disp = 1 + 1;
4037 after_disp = (1 + size_of_uleb128 (call_site_len)
4038 + call_site_len
4039 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
4040 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
4041 * tt_format_size));
4043 disp = after_disp;
4046 unsigned int disp_size, pad;
4048 last_disp = disp;
4049 disp_size = size_of_uleb128 (disp);
4050 pad = before_disp + disp_size + after_disp;
4051 if (pad % tt_format_size)
4052 pad = tt_format_size - (pad % tt_format_size);
4053 else
4054 pad = 0;
4055 disp = after_disp + pad;
4057 while (disp != last_disp);
4059 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
4060 #endif
4063 /* Indicate the format of the call-site offsets. */
4064 #ifdef HAVE_AS_LEB128
4065 cs_format = DW_EH_PE_uleb128;
4066 #else
4067 cs_format = DW_EH_PE_udata4;
4068 #endif
4069 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
4070 eh_data_format_name (cs_format));
4072 #ifdef HAVE_AS_LEB128
4073 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
4074 current_function_funcdef_no);
4075 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
4076 current_function_funcdef_no);
4077 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
4078 "Call-site table length");
4079 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
4080 if (USING_SJLJ_EXCEPTIONS)
4081 sjlj_output_call_site_table ();
4082 else
4083 dw2_output_call_site_table ();
4084 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
4085 #else
4086 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
4087 if (USING_SJLJ_EXCEPTIONS)
4088 sjlj_output_call_site_table ();
4089 else
4090 dw2_output_call_site_table ();
4091 #endif
4093 /* ??? Decode and interpret the data for flag_debug_asm. */
4094 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
4095 for (i = 0; i < n; ++i)
4096 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
4097 (i ? NULL : "Action record table"));
4099 if (have_tt_data)
4100 assemble_align (tt_format_size * BITS_PER_UNIT);
4102 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
4103 while (i-- > 0)
4105 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
4106 rtx value;
4108 if (type == NULL_TREE)
4109 value = const0_rtx;
4110 else
4112 struct cgraph_varpool_node *node;
4114 type = lookup_type_for_runtime (type);
4115 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
4117 /* Let cgraph know that the rtti decl is used. Not all of the
4118 paths below go through assemble_integer, which would take
4119 care of this for us. */
4120 STRIP_NOPS (type);
4121 if (TREE_CODE (type) == ADDR_EXPR)
4123 type = TREE_OPERAND (type, 0);
4124 node = cgraph_varpool_node (type);
4125 if (node)
4126 cgraph_varpool_mark_needed_node (node);
4128 else if (TREE_CODE (type) != INTEGER_CST)
4129 abort ();
4132 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
4133 assemble_integer (value, tt_format_size,
4134 tt_format_size * BITS_PER_UNIT, 1);
4135 else
4136 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
4139 #ifdef HAVE_AS_LEB128
4140 if (have_tt_data)
4141 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
4142 #endif
4144 /* ??? Decode and interpret the data for flag_debug_asm. */
4145 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
4146 for (i = 0; i < n; ++i)
4147 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
4148 (i ? NULL : "Exception specification table"));
4150 function_section (current_function_decl);
4153 #include "gt-except.h"