* Makefile.in (rtlanal.o): Depend on $(TM_P_H).
[official-gcc.git] / gcc / except.c
blob6ea1723c24fa31b05f9a6254aed3ff8983f0c9a4
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "rtl.h"
53 #include "tree.h"
54 #include "flags.h"
55 #include "function.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "insn-config.h"
59 #include "except.h"
60 #include "integrate.h"
61 #include "hard-reg-set.h"
62 #include "basic-block.h"
63 #include "output.h"
64 #include "dwarf2asm.h"
65 #include "dwarf2out.h"
66 #include "dwarf2.h"
67 #include "toplev.h"
68 #include "hashtab.h"
69 #include "intl.h"
70 #include "ggc.h"
71 #include "tm_p.h"
74 /* Provide defaults for stuff that may not be defined when using
75 sjlj exceptions. */
76 #ifndef EH_RETURN_STACKADJ_RTX
77 #define EH_RETURN_STACKADJ_RTX 0
78 #endif
79 #ifndef EH_RETURN_HANDLER_RTX
80 #define EH_RETURN_HANDLER_RTX 0
81 #endif
82 #ifndef EH_RETURN_DATA_REGNO
83 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
84 #endif
87 /* Nonzero means enable synchronous exceptions for non-call instructions. */
88 int flag_non_call_exceptions;
90 /* Protect cleanup actions with must-not-throw regions, with a call
91 to the given failure handler. */
92 tree (*lang_protect_cleanup_actions) PARAMS ((void));
94 /* Return true if type A catches type B. */
95 int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
97 /* Map a type to a runtime object to match type. */
98 tree (*lang_eh_runtime_type) PARAMS ((tree));
100 /* A list of labels used for exception handlers. */
101 rtx exception_handler_labels;
103 static int call_site_base;
104 static unsigned int sjlj_funcdef_number;
105 static htab_t type_to_runtime_map;
107 /* Describe the SjLj_Function_Context structure. */
108 static tree sjlj_fc_type_node;
109 static int sjlj_fc_call_site_ofs;
110 static int sjlj_fc_data_ofs;
111 static int sjlj_fc_personality_ofs;
112 static int sjlj_fc_lsda_ofs;
113 static int sjlj_fc_jbuf_ofs;
115 /* Describes one exception region. */
116 struct eh_region
118 /* The immediately surrounding region. */
119 struct eh_region *outer;
121 /* The list of immediately contained regions. */
122 struct eh_region *inner;
123 struct eh_region *next_peer;
125 /* An identifier for this region. */
126 int region_number;
128 /* Each region does exactly one thing. */
129 enum eh_region_type
131 ERT_CLEANUP = 1,
132 ERT_TRY,
133 ERT_CATCH,
134 ERT_ALLOWED_EXCEPTIONS,
135 ERT_MUST_NOT_THROW,
136 ERT_THROW,
137 ERT_FIXUP
138 } type;
140 /* Holds the action to perform based on the preceeding type. */
141 union {
142 /* A list of catch blocks, a surrounding try block,
143 and the label for continuing after a catch. */
144 struct {
145 struct eh_region *catch;
146 struct eh_region *last_catch;
147 struct eh_region *prev_try;
148 rtx continue_label;
149 } try;
151 /* The list through the catch handlers, the type object
152 matched, and a pointer to the generated code. */
153 struct {
154 struct eh_region *next_catch;
155 struct eh_region *prev_catch;
156 tree type;
157 int filter;
158 } catch;
160 /* A tree_list of allowed types. */
161 struct {
162 tree type_list;
163 int filter;
164 } allowed;
166 /* The type given by a call to "throw foo();", or discovered
167 for a throw. */
168 struct {
169 tree type;
170 } throw;
172 /* Retain the cleanup expression even after expansion so that
173 we can match up fixup regions. */
174 struct {
175 tree exp;
176 } cleanup;
178 /* The real region (by expression and by pointer) that fixup code
179 should live in. */
180 struct {
181 tree cleanup_exp;
182 struct eh_region *real_region;
183 } fixup;
184 } u;
186 /* Entry point for this region's handler before landing pads are built. */
187 rtx label;
189 /* Entry point for this region's handler from the runtime eh library. */
190 rtx landing_pad;
192 /* Entry point for this region's handler from an inner region. */
193 rtx post_landing_pad;
195 /* The RESX insn for handing off control to the next outermost handler,
196 if appropriate. */
197 rtx resume;
200 /* Used to save exception status for each function. */
201 struct eh_status
203 /* The tree of all regions for this function. */
204 struct eh_region *region_tree;
206 /* The same information as an indexable array. */
207 struct eh_region **region_array;
209 /* The most recently open region. */
210 struct eh_region *cur_region;
212 /* This is the region for which we are processing catch blocks. */
213 struct eh_region *try_region;
215 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
216 node is itself a TREE_CHAINed list of handlers for regions that
217 are not yet closed. The TREE_VALUE of each entry contains the
218 handler for the corresponding entry on the ehstack. */
219 tree protect_list;
221 rtx filter;
222 rtx exc_ptr;
224 int built_landing_pads;
225 int last_region_number;
227 varray_type ttype_data;
228 varray_type ehspec_data;
229 varray_type action_record_data;
231 struct call_site_record
233 rtx landing_pad;
234 int action;
235 } *call_site_data;
236 int call_site_data_used;
237 int call_site_data_size;
239 rtx ehr_stackadj;
240 rtx ehr_handler;
241 rtx ehr_label;
243 rtx sjlj_fc;
244 rtx sjlj_exit_after;
248 static void mark_eh_region PARAMS ((struct eh_region *));
250 static int t2r_eq PARAMS ((const PTR,
251 const PTR));
252 static hashval_t t2r_hash PARAMS ((const PTR));
253 static int t2r_mark_1 PARAMS ((PTR *, PTR));
254 static void t2r_mark PARAMS ((PTR));
255 static void add_type_for_runtime PARAMS ((tree));
256 static tree lookup_type_for_runtime PARAMS ((tree));
258 static struct eh_region *expand_eh_region_end PARAMS ((void));
260 static rtx get_exception_filter PARAMS ((struct function *));
262 static void collect_eh_region_array PARAMS ((void));
263 static void resolve_fixup_regions PARAMS ((void));
264 static void remove_fixup_regions PARAMS ((void));
265 static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
267 static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
268 struct inline_remap *));
269 static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
270 struct eh_region **));
271 static int ttypes_filter_eq PARAMS ((const PTR,
272 const PTR));
273 static hashval_t ttypes_filter_hash PARAMS ((const PTR));
274 static int ehspec_filter_eq PARAMS ((const PTR,
275 const PTR));
276 static hashval_t ehspec_filter_hash PARAMS ((const PTR));
277 static int add_ttypes_entry PARAMS ((htab_t, tree));
278 static int add_ehspec_entry PARAMS ((htab_t, htab_t,
279 tree));
280 static void assign_filter_values PARAMS ((void));
281 static void build_post_landing_pads PARAMS ((void));
282 static void connect_post_landing_pads PARAMS ((void));
283 static void dw2_build_landing_pads PARAMS ((void));
285 struct sjlj_lp_info;
286 static bool sjlj_find_directly_reachable_regions
287 PARAMS ((struct sjlj_lp_info *));
288 static void sjlj_assign_call_site_values
289 PARAMS ((rtx, struct sjlj_lp_info *));
290 static void sjlj_mark_call_sites
291 PARAMS ((struct sjlj_lp_info *));
292 static void sjlj_emit_function_enter PARAMS ((rtx));
293 static void sjlj_emit_function_exit PARAMS ((void));
294 static void sjlj_emit_dispatch_table
295 PARAMS ((rtx, struct sjlj_lp_info *));
296 static void sjlj_build_landing_pads PARAMS ((void));
298 static void remove_exception_handler_label PARAMS ((rtx));
299 static void remove_eh_handler PARAMS ((struct eh_region *));
301 struct reachable_info;
303 /* The return value of reachable_next_level. */
304 enum reachable_code
306 /* The given exception is not processed by the given region. */
307 RNL_NOT_CAUGHT,
308 /* The given exception may need processing by the given region. */
309 RNL_MAYBE_CAUGHT,
310 /* The given exception is completely processed by the given region. */
311 RNL_CAUGHT,
312 /* The given exception is completely processed by the runtime. */
313 RNL_BLOCKED
316 static int check_handled PARAMS ((tree, tree));
317 static void add_reachable_handler
318 PARAMS ((struct reachable_info *, struct eh_region *,
319 struct eh_region *));
320 static enum reachable_code reachable_next_level
321 PARAMS ((struct eh_region *, tree, struct reachable_info *));
323 static int action_record_eq PARAMS ((const PTR,
324 const PTR));
325 static hashval_t action_record_hash PARAMS ((const PTR));
326 static int add_action_record PARAMS ((htab_t, int, int));
327 static int collect_one_action_chain PARAMS ((htab_t,
328 struct eh_region *));
329 static int add_call_site PARAMS ((rtx, int));
331 static void push_uleb128 PARAMS ((varray_type *,
332 unsigned int));
333 static void push_sleb128 PARAMS ((varray_type *, int));
334 #ifndef HAVE_AS_LEB128
335 static int dw2_size_of_call_site_table PARAMS ((void));
336 static int sjlj_size_of_call_site_table PARAMS ((void));
337 #endif
338 static void dw2_output_call_site_table PARAMS ((void));
339 static void sjlj_output_call_site_table PARAMS ((void));
342 /* Routine to see if exception handling is turned on.
343 DO_WARN is non-zero if we want to inform the user that exception
344 handling is turned off.
346 This is used to ensure that -fexceptions has been specified if the
347 compiler tries to use any exception-specific functions. */
350 doing_eh (do_warn)
351 int do_warn;
353 if (! flag_exceptions)
355 static int warned = 0;
356 if (! warned && do_warn)
358 error ("exception handling disabled, use -fexceptions to enable");
359 warned = 1;
361 return 0;
363 return 1;
367 void
368 init_eh ()
370 ggc_add_rtx_root (&exception_handler_labels, 1);
372 if (! flag_exceptions)
373 return;
375 type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
376 ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
378 /* Create the SjLj_Function_Context structure. This should match
379 the definition in unwind-sjlj.c. */
380 if (USING_SJLJ_EXCEPTIONS)
382 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
384 sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
385 ggc_add_tree_root (&sjlj_fc_type_node, 1);
387 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
388 build_pointer_type (sjlj_fc_type_node));
389 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
391 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
392 integer_type_node);
393 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
395 tmp = build_index_type (build_int_2 (4 - 1, 0));
396 tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
397 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
398 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
400 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
401 ptr_type_node);
402 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
404 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
405 ptr_type_node);
406 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
408 #ifdef DONT_USE_BUILTIN_SETJMP
409 #ifdef JMP_BUF_SIZE
410 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
411 #else
412 /* Should be large enough for most systems, if it is not,
413 JMP_BUF_SIZE should be defined with the proper value. It will
414 also tend to be larger than necessary for most systems, a more
415 optimal port will define JMP_BUF_SIZE. */
416 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
417 #endif
418 #else
419 /* This is 2 for builtin_setjmp, plus whatever the target requires
420 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
421 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
422 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
423 #endif
424 tmp = build_index_type (tmp);
425 tmp = build_array_type (ptr_type_node, tmp);
426 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
427 #ifdef DONT_USE_BUILTIN_SETJMP
428 /* We don't know what the alignment requirements of the
429 runtime's jmp_buf has. Overestimate. */
430 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
431 DECL_USER_ALIGN (f_jbuf) = 1;
432 #endif
433 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
435 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
436 TREE_CHAIN (f_prev) = f_cs;
437 TREE_CHAIN (f_cs) = f_data;
438 TREE_CHAIN (f_data) = f_per;
439 TREE_CHAIN (f_per) = f_lsda;
440 TREE_CHAIN (f_lsda) = f_jbuf;
442 layout_type (sjlj_fc_type_node);
444 /* Cache the interesting field offsets so that we have
445 easy access from rtl. */
446 sjlj_fc_call_site_ofs
447 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
448 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
449 sjlj_fc_data_ofs
450 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
451 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
452 sjlj_fc_personality_ofs
453 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
454 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
455 sjlj_fc_lsda_ofs
456 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
457 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
458 sjlj_fc_jbuf_ofs
459 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
460 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
464 void
465 init_eh_for_function ()
467 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
470 /* Mark EH for GC. */
472 static void
473 mark_eh_region (region)
474 struct eh_region *region;
476 if (! region)
477 return;
479 switch (region->type)
481 case ERT_CLEANUP:
482 ggc_mark_tree (region->u.cleanup.exp);
483 break;
484 case ERT_TRY:
485 ggc_mark_rtx (region->u.try.continue_label);
486 break;
487 case ERT_CATCH:
488 ggc_mark_tree (region->u.catch.type);
489 break;
490 case ERT_ALLOWED_EXCEPTIONS:
491 ggc_mark_tree (region->u.allowed.type_list);
492 break;
493 case ERT_MUST_NOT_THROW:
494 break;
495 case ERT_THROW:
496 ggc_mark_tree (region->u.throw.type);
497 break;
498 case ERT_FIXUP:
499 ggc_mark_tree (region->u.fixup.cleanup_exp);
500 break;
501 default:
502 abort ();
505 ggc_mark_rtx (region->label);
506 ggc_mark_rtx (region->resume);
507 ggc_mark_rtx (region->landing_pad);
508 ggc_mark_rtx (region->post_landing_pad);
511 void
512 mark_eh_status (eh)
513 struct eh_status *eh;
515 int i;
517 if (eh == 0)
518 return;
520 /* If we've called collect_eh_region_array, use it. Otherwise walk
521 the tree non-recursively. */
522 if (eh->region_array)
524 for (i = eh->last_region_number; i > 0; --i)
526 struct eh_region *r = eh->region_array[i];
527 if (r && r->region_number == i)
528 mark_eh_region (r);
531 else if (eh->region_tree)
533 struct eh_region *r = eh->region_tree;
534 while (1)
536 mark_eh_region (r);
537 if (r->inner)
538 r = r->inner;
539 else if (r->next_peer)
540 r = r->next_peer;
541 else
543 do {
544 r = r->outer;
545 if (r == NULL)
546 goto tree_done;
547 } while (r->next_peer == NULL);
548 r = r->next_peer;
551 tree_done:;
554 ggc_mark_tree (eh->protect_list);
555 ggc_mark_rtx (eh->filter);
556 ggc_mark_rtx (eh->exc_ptr);
557 ggc_mark_tree_varray (eh->ttype_data);
559 if (eh->call_site_data)
561 for (i = eh->call_site_data_used - 1; i >= 0; --i)
562 ggc_mark_rtx (eh->call_site_data[i].landing_pad);
565 ggc_mark_rtx (eh->ehr_stackadj);
566 ggc_mark_rtx (eh->ehr_handler);
567 ggc_mark_rtx (eh->ehr_label);
569 ggc_mark_rtx (eh->sjlj_fc);
570 ggc_mark_rtx (eh->sjlj_exit_after);
573 void
574 free_eh_status (f)
575 struct function *f;
577 struct eh_status *eh = f->eh;
579 if (eh->region_array)
581 int i;
582 for (i = eh->last_region_number; i > 0; --i)
584 struct eh_region *r = eh->region_array[i];
585 /* Mind we don't free a region struct more than once. */
586 if (r && r->region_number == i)
587 free (r);
589 free (eh->region_array);
591 else if (eh->region_tree)
593 struct eh_region *next, *r = eh->region_tree;
594 while (1)
596 if (r->inner)
597 r = r->inner;
598 else if (r->next_peer)
600 next = r->next_peer;
601 free (r);
602 r = next;
604 else
606 do {
607 next = r->outer;
608 free (r);
609 r = next;
610 if (r == NULL)
611 goto tree_done;
612 } while (r->next_peer == NULL);
613 next = r->next_peer;
614 free (r);
615 r = next;
618 tree_done:;
621 VARRAY_FREE (eh->ttype_data);
622 VARRAY_FREE (eh->ehspec_data);
623 VARRAY_FREE (eh->action_record_data);
624 if (eh->call_site_data)
625 free (eh->call_site_data);
627 free (eh);
628 f->eh = NULL;
632 /* Start an exception handling region. All instructions emitted
633 after this point are considered to be part of the region until
634 expand_eh_region_end is invoked. */
636 void
637 expand_eh_region_start ()
639 struct eh_region *new_region;
640 struct eh_region *cur_region;
641 rtx note;
643 if (! doing_eh (0))
644 return;
646 /* Insert a new blank region as a leaf in the tree. */
647 new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
648 cur_region = cfun->eh->cur_region;
649 new_region->outer = cur_region;
650 if (cur_region)
652 new_region->next_peer = cur_region->inner;
653 cur_region->inner = new_region;
655 else
657 new_region->next_peer = cfun->eh->region_tree;
658 cfun->eh->region_tree = new_region;
660 cfun->eh->cur_region = new_region;
662 /* Create a note marking the start of this region. */
663 new_region->region_number = ++cfun->eh->last_region_number;
664 note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
665 NOTE_EH_HANDLER (note) = new_region->region_number;
668 /* Common code to end a region. Returns the region just ended. */
670 static struct eh_region *
671 expand_eh_region_end ()
673 struct eh_region *cur_region = cfun->eh->cur_region;
674 rtx note;
676 /* Create a nute marking the end of this region. */
677 note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
678 NOTE_EH_HANDLER (note) = cur_region->region_number;
680 /* Pop. */
681 cfun->eh->cur_region = cur_region->outer;
683 return cur_region;
686 /* End an exception handling region for a cleanup. HANDLER is an
687 expression to expand for the cleanup. */
689 void
690 expand_eh_region_end_cleanup (handler)
691 tree handler;
693 struct eh_region *region;
694 tree protect_cleanup_actions;
695 rtx around_label;
696 rtx data_save[2];
698 if (! doing_eh (0))
699 return;
701 region = expand_eh_region_end ();
702 region->type = ERT_CLEANUP;
703 region->label = gen_label_rtx ();
704 region->u.cleanup.exp = handler;
706 around_label = gen_label_rtx ();
707 emit_jump (around_label);
709 emit_label (region->label);
711 /* Give the language a chance to specify an action to be taken if an
712 exception is thrown that would propogate out of the HANDLER. */
713 protect_cleanup_actions
714 = (lang_protect_cleanup_actions
715 ? (*lang_protect_cleanup_actions) ()
716 : NULL_TREE);
718 if (protect_cleanup_actions)
719 expand_eh_region_start ();
721 /* In case this cleanup involves an inline destructor with a try block in
722 it, we need to save the EH return data registers around it. */
723 data_save[0] = gen_reg_rtx (Pmode);
724 emit_move_insn (data_save[0], get_exception_pointer (cfun));
725 data_save[1] = gen_reg_rtx (word_mode);
726 emit_move_insn (data_save[1], get_exception_filter (cfun));
728 expand_expr (handler, const0_rtx, VOIDmode, 0);
730 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
731 emit_move_insn (cfun->eh->filter, data_save[1]);
733 if (protect_cleanup_actions)
734 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
736 /* We need any stack adjustment complete before the around_label. */
737 do_pending_stack_adjust ();
739 /* We delay the generation of the _Unwind_Resume until we generate
740 landing pads. We emit a marker here so as to get good control
741 flow data in the meantime. */
742 region->resume
743 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
744 emit_barrier ();
746 emit_label (around_label);
749 /* End an exception handling region for a try block, and prepares
750 for subsequent calls to expand_start_catch. */
752 void
753 expand_start_all_catch ()
755 struct eh_region *region;
757 if (! doing_eh (1))
758 return;
760 region = expand_eh_region_end ();
761 region->type = ERT_TRY;
762 region->u.try.prev_try = cfun->eh->try_region;
763 region->u.try.continue_label = gen_label_rtx ();
765 cfun->eh->try_region = region;
767 emit_jump (region->u.try.continue_label);
770 /* Begin a catch clause. TYPE is the type caught, or null if this is
771 a catch-all clause. */
773 void
774 expand_start_catch (type)
775 tree type;
777 struct eh_region *t, *c, *l;
779 if (! doing_eh (0))
780 return;
782 if (type)
783 add_type_for_runtime (type);
784 expand_eh_region_start ();
786 t = cfun->eh->try_region;
787 c = cfun->eh->cur_region;
788 c->type = ERT_CATCH;
789 c->u.catch.type = type;
790 c->label = gen_label_rtx ();
792 l = t->u.try.last_catch;
793 c->u.catch.prev_catch = l;
794 if (l)
795 l->u.catch.next_catch = c;
796 else
797 t->u.try.catch = c;
798 t->u.try.last_catch = c;
800 emit_label (c->label);
803 /* End a catch clause. Control will resume after the try/catch block. */
805 void
806 expand_end_catch ()
808 struct eh_region *try_region, *catch_region;
810 if (! doing_eh (0))
811 return;
813 catch_region = expand_eh_region_end ();
814 try_region = cfun->eh->try_region;
816 emit_jump (try_region->u.try.continue_label);
819 /* End a sequence of catch handlers for a try block. */
821 void
822 expand_end_all_catch ()
824 struct eh_region *try_region;
826 if (! doing_eh (0))
827 return;
829 try_region = cfun->eh->try_region;
830 cfun->eh->try_region = try_region->u.try.prev_try;
832 emit_label (try_region->u.try.continue_label);
835 /* End an exception region for an exception type filter. ALLOWED is a
836 TREE_LIST of types to be matched by the runtime. FAILURE is an
837 expression to invoke if a mismatch ocurrs.
839 ??? We could use these semantics for calls to rethrow, too; if we can
840 see the surrounding catch clause, we know that the exception we're
841 rethrowing satisfies the "filter" of the catch type. */
843 void
844 expand_eh_region_end_allowed (allowed, failure)
845 tree allowed, failure;
847 struct eh_region *region;
848 rtx around_label;
850 if (! doing_eh (0))
851 return;
853 region = expand_eh_region_end ();
854 region->type = ERT_ALLOWED_EXCEPTIONS;
855 region->u.allowed.type_list = allowed;
856 region->label = gen_label_rtx ();
858 for (; allowed ; allowed = TREE_CHAIN (allowed))
859 add_type_for_runtime (TREE_VALUE (allowed));
861 /* We must emit the call to FAILURE here, so that if this function
862 throws a different exception, that it will be processed by the
863 correct region. */
865 around_label = gen_label_rtx ();
866 emit_jump (around_label);
868 emit_label (region->label);
869 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
870 /* We must adjust the stack before we reach the AROUND_LABEL because
871 the call to FAILURE does not occur on all paths to the
872 AROUND_LABEL. */
873 do_pending_stack_adjust ();
875 emit_label (around_label);
878 /* End an exception region for a must-not-throw filter. FAILURE is an
879 expression invoke if an uncaught exception propagates this far.
881 This is conceptually identical to expand_eh_region_end_allowed with
882 an empty allowed list (if you passed "std::terminate" instead of
883 "__cxa_call_unexpected"), but they are represented differently in
884 the C++ LSDA. */
886 void
887 expand_eh_region_end_must_not_throw (failure)
888 tree failure;
890 struct eh_region *region;
891 rtx around_label;
893 if (! doing_eh (0))
894 return;
896 region = expand_eh_region_end ();
897 region->type = ERT_MUST_NOT_THROW;
898 region->label = gen_label_rtx ();
900 /* We must emit the call to FAILURE here, so that if this function
901 throws a different exception, that it will be processed by the
902 correct region. */
904 around_label = gen_label_rtx ();
905 emit_jump (around_label);
907 emit_label (region->label);
908 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
910 emit_label (around_label);
913 /* End an exception region for a throw. No handling goes on here,
914 but it's the easiest way for the front-end to indicate what type
915 is being thrown. */
917 void
918 expand_eh_region_end_throw (type)
919 tree type;
921 struct eh_region *region;
923 if (! doing_eh (0))
924 return;
926 region = expand_eh_region_end ();
927 region->type = ERT_THROW;
928 region->u.throw.type = type;
931 /* End a fixup region. Within this region the cleanups for the immediately
932 enclosing region are _not_ run. This is used for goto cleanup to avoid
933 destroying an object twice.
935 This would be an extraordinarily simple prospect, were it not for the
936 fact that we don't actually know what the immediately enclosing region
937 is. This surprising fact is because expand_cleanups is currently
938 generating a sequence that it will insert somewhere else. We collect
939 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
941 void
942 expand_eh_region_end_fixup (handler)
943 tree handler;
945 struct eh_region *fixup;
947 if (! doing_eh (0))
948 return;
950 fixup = expand_eh_region_end ();
951 fixup->type = ERT_FIXUP;
952 fixup->u.fixup.cleanup_exp = handler;
955 /* Return an rtl expression for a pointer to the exception object
956 within a handler. */
959 get_exception_pointer (fun)
960 struct function *fun;
962 rtx exc_ptr = fun->eh->exc_ptr;
963 if (fun == cfun && ! exc_ptr)
965 exc_ptr = gen_reg_rtx (Pmode);
966 fun->eh->exc_ptr = exc_ptr;
968 return exc_ptr;
971 /* Return an rtl expression for the exception dispatch filter
972 within a handler. */
974 static rtx
975 get_exception_filter (fun)
976 struct function *fun;
978 rtx filter = fun->eh->filter;
979 if (fun == cfun && ! filter)
981 filter = gen_reg_rtx (word_mode);
982 fun->eh->filter = filter;
984 return filter;
987 /* Begin a region that will contain entries created with
988 add_partial_entry. */
990 void
991 begin_protect_partials ()
993 /* Push room for a new list. */
994 cfun->eh->protect_list
995 = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
998 /* Start a new exception region for a region of code that has a
999 cleanup action and push the HANDLER for the region onto
1000 protect_list. All of the regions created with add_partial_entry
1001 will be ended when end_protect_partials is invoked. */
1003 void
1004 add_partial_entry (handler)
1005 tree handler;
1007 expand_eh_region_start ();
1009 /* ??? This comment was old before the most recent rewrite. We
1010 really ought to fix the callers at some point. */
1011 /* For backwards compatibility, we allow callers to omit calls to
1012 begin_protect_partials for the outermost region. So, we must
1013 explicitly do so here. */
1014 if (!cfun->eh->protect_list)
1015 begin_protect_partials ();
1017 /* Add this entry to the front of the list. */
1018 TREE_VALUE (cfun->eh->protect_list)
1019 = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
1022 /* End all the pending exception regions on protect_list. */
1024 void
1025 end_protect_partials ()
1027 tree t;
1029 /* ??? This comment was old before the most recent rewrite. We
1030 really ought to fix the callers at some point. */
1031 /* For backwards compatibility, we allow callers to omit the call to
1032 begin_protect_partials for the outermost region. So,
1033 PROTECT_LIST may be NULL. */
1034 if (!cfun->eh->protect_list)
1035 return;
1037 /* Pop the topmost entry. */
1038 t = TREE_VALUE (cfun->eh->protect_list);
1039 cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
1041 /* End all the exception regions. */
1042 for (; t; t = TREE_CHAIN (t))
1043 expand_eh_region_end_cleanup (TREE_VALUE (t));
1047 /* This section is for the exception handling specific optimization pass. */
1049 /* Random access the exception region tree. It's just as simple to
1050 collect the regions this way as in expand_eh_region_start, but
1051 without having to realloc memory. */
1053 static void
1054 collect_eh_region_array ()
1056 struct eh_region **array, *i;
1058 i = cfun->eh->region_tree;
1059 if (! i)
1060 return;
1062 array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1063 cfun->eh->region_array = array;
1065 while (1)
1067 array[i->region_number] = i;
1069 /* If there are sub-regions, process them. */
1070 if (i->inner)
1071 i = i->inner;
1072 /* If there are peers, process them. */
1073 else if (i->next_peer)
1074 i = i->next_peer;
1075 /* Otherwise, step back up the tree to the next peer. */
1076 else
1078 do {
1079 i = i->outer;
1080 if (i == NULL)
1081 return;
1082 } while (i->next_peer == NULL);
1083 i = i->next_peer;
1088 static void
1089 resolve_fixup_regions ()
1091 int i, j, n = cfun->eh->last_region_number;
1093 for (i = 1; i <= n; ++i)
1095 struct eh_region *fixup = cfun->eh->region_array[i];
1096 struct eh_region *cleanup = 0;
1098 if (! fixup || fixup->type != ERT_FIXUP)
1099 continue;
1101 for (j = 1; j <= n; ++j)
1103 cleanup = cfun->eh->region_array[j];
1104 if (cleanup->type == ERT_CLEANUP
1105 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1106 break;
1108 if (j > n)
1109 abort ();
1111 fixup->u.fixup.real_region = cleanup->outer;
1115 /* Now that we've discovered what region actually encloses a fixup,
1116 we can shuffle pointers and remove them from the tree. */
1118 static void
1119 remove_fixup_regions ()
1121 int i;
1122 rtx insn, note;
1123 struct eh_region *fixup;
1125 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1126 for instructions referencing fixup regions. This is only
1127 strictly necessary for fixup regions with no parent, but
1128 doesn't hurt to do it for all regions. */
1129 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
1130 if (INSN_P (insn)
1131 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
1132 && INTVAL (XEXP (note, 0)) > 0
1133 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
1134 && fixup->type == ERT_FIXUP)
1136 if (fixup->u.fixup.real_region)
1137 XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
1138 else
1139 remove_note (insn, note);
1142 /* Remove the fixup regions from the tree. */
1143 for (i = cfun->eh->last_region_number; i > 0; --i)
1145 fixup = cfun->eh->region_array[i];
1146 if (! fixup)
1147 continue;
1149 /* Allow GC to maybe free some memory. */
1150 if (fixup->type == ERT_CLEANUP)
1151 fixup->u.cleanup.exp = NULL_TREE;
1153 if (fixup->type != ERT_FIXUP)
1154 continue;
1156 if (fixup->inner)
1158 struct eh_region *parent, *p, **pp;
1160 parent = fixup->u.fixup.real_region;
1162 /* Fix up the children's parent pointers; find the end of
1163 the list. */
1164 for (p = fixup->inner; ; p = p->next_peer)
1166 p->outer = parent;
1167 if (! p->next_peer)
1168 break;
1171 /* In the tree of cleanups, only outer-inner ordering matters.
1172 So link the children back in anywhere at the correct level. */
1173 if (parent)
1174 pp = &parent->inner;
1175 else
1176 pp = &cfun->eh->region_tree;
1177 p->next_peer = *pp;
1178 *pp = fixup->inner;
1179 fixup->inner = NULL;
1182 remove_eh_handler (fixup);
1186 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1187 can_throw instruction in the region. */
1189 static void
1190 convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1191 rtx *pinsns;
1192 int *orig_sp;
1193 int cur;
1195 int *sp = orig_sp;
1196 rtx insn, next;
1198 for (insn = *pinsns; insn ; insn = next)
1200 next = NEXT_INSN (insn);
1201 if (GET_CODE (insn) == NOTE)
1203 int kind = NOTE_LINE_NUMBER (insn);
1204 if (kind == NOTE_INSN_EH_REGION_BEG
1205 || kind == NOTE_INSN_EH_REGION_END)
1207 if (kind == NOTE_INSN_EH_REGION_BEG)
1209 struct eh_region *r;
1211 *sp++ = cur;
1212 cur = NOTE_EH_HANDLER (insn);
1214 r = cfun->eh->region_array[cur];
1215 if (r->type == ERT_FIXUP)
1217 r = r->u.fixup.real_region;
1218 cur = r ? r->region_number : 0;
1220 else if (r->type == ERT_CATCH)
1222 r = r->outer;
1223 cur = r ? r->region_number : 0;
1226 else
1227 cur = *--sp;
1229 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1230 requires extra care to adjust sequence start. */
1231 if (insn == *pinsns)
1232 *pinsns = next;
1233 remove_insn (insn);
1234 continue;
1237 else if (INSN_P (insn))
1239 if (cur > 0
1240 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1241 /* Calls can always potentially throw exceptions, unless
1242 they have a REG_EH_REGION note with a value of 0 or less.
1243 Which should be the only possible kind so far. */
1244 && (GET_CODE (insn) == CALL_INSN
1245 /* If we wanted exceptions for non-call insns, then
1246 any may_trap_p instruction could throw. */
1247 || (flag_non_call_exceptions
1248 && GET_CODE (PATTERN (insn)) != CLOBBER
1249 && GET_CODE (PATTERN (insn)) != USE
1250 && may_trap_p (PATTERN (insn)))))
1252 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1253 REG_NOTES (insn));
1256 if (GET_CODE (insn) == CALL_INSN
1257 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1259 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1260 sp, cur);
1261 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1262 sp, cur);
1263 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1264 sp, cur);
1269 if (sp != orig_sp)
1270 abort ();
1273 void
1274 convert_from_eh_region_ranges ()
1276 int *stack;
1277 rtx insns;
1279 collect_eh_region_array ();
1280 resolve_fixup_regions ();
1282 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1283 insns = get_insns ();
1284 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1285 free (stack);
1287 remove_fixup_regions ();
1290 void
1291 find_exception_handler_labels ()
1293 rtx list = NULL_RTX;
1294 int i;
1296 free_EXPR_LIST_list (&exception_handler_labels);
1298 if (cfun->eh->region_tree == NULL)
1299 return;
1301 for (i = cfun->eh->last_region_number; i > 0; --i)
1303 struct eh_region *region = cfun->eh->region_array[i];
1304 rtx lab;
1306 if (! region)
1307 continue;
1308 if (cfun->eh->built_landing_pads)
1309 lab = region->landing_pad;
1310 else
1311 lab = region->label;
1313 if (lab)
1314 list = alloc_EXPR_LIST (0, lab, list);
1317 /* For sjlj exceptions, need the return label to remain live until
1318 after landing pad generation. */
1319 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1320 list = alloc_EXPR_LIST (0, return_label, list);
1322 exception_handler_labels = list;
1326 static struct eh_region *
1327 duplicate_eh_region_1 (o, map)
1328 struct eh_region *o;
1329 struct inline_remap *map;
1331 struct eh_region *n
1332 = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
1334 n->region_number = o->region_number + cfun->eh->last_region_number;
1335 n->type = o->type;
1337 switch (n->type)
1339 case ERT_CLEANUP:
1340 case ERT_MUST_NOT_THROW:
1341 break;
1343 case ERT_TRY:
1344 if (o->u.try.continue_label)
1345 n->u.try.continue_label
1346 = get_label_from_map (map,
1347 CODE_LABEL_NUMBER (o->u.try.continue_label));
1348 break;
1350 case ERT_CATCH:
1351 n->u.catch.type = o->u.catch.type;
1352 break;
1354 case ERT_ALLOWED_EXCEPTIONS:
1355 n->u.allowed.type_list = o->u.allowed.type_list;
1356 break;
1358 case ERT_THROW:
1359 n->u.throw.type = o->u.throw.type;
1361 default:
1362 abort ();
1365 if (o->label)
1366 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1367 if (o->resume)
1369 n->resume = map->insn_map[INSN_UID (o->resume)];
1370 if (n->resume == NULL)
1371 abort ();
1374 return n;
1377 static void
1378 duplicate_eh_region_2 (o, n_array)
1379 struct eh_region *o;
1380 struct eh_region **n_array;
1382 struct eh_region *n = n_array[o->region_number];
1384 switch (n->type)
1386 case ERT_TRY:
1387 n->u.try.catch = n_array[o->u.try.catch->region_number];
1388 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1389 break;
1391 case ERT_CATCH:
1392 if (o->u.catch.next_catch)
1393 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1394 if (o->u.catch.prev_catch)
1395 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1396 break;
1398 default:
1399 break;
1402 if (o->outer)
1403 n->outer = n_array[o->outer->region_number];
1404 if (o->inner)
1405 n->inner = n_array[o->inner->region_number];
1406 if (o->next_peer)
1407 n->next_peer = n_array[o->next_peer->region_number];
1411 duplicate_eh_regions (ifun, map)
1412 struct function *ifun;
1413 struct inline_remap *map;
1415 int ifun_last_region_number = ifun->eh->last_region_number;
1416 struct eh_region **n_array, *root, *cur;
1417 int i;
1419 if (ifun_last_region_number == 0)
1420 return 0;
1422 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1424 for (i = 1; i <= ifun_last_region_number; ++i)
1426 cur = ifun->eh->region_array[i];
1427 if (!cur || cur->region_number != i)
1428 continue;
1429 n_array[i] = duplicate_eh_region_1 (cur, map);
1431 for (i = 1; i <= ifun_last_region_number; ++i)
1433 cur = ifun->eh->region_array[i];
1434 if (!cur || cur->region_number != i)
1435 continue;
1436 duplicate_eh_region_2 (cur, n_array);
1439 root = n_array[ifun->eh->region_tree->region_number];
1440 cur = cfun->eh->cur_region;
1441 if (cur)
1443 struct eh_region *p = cur->inner;
1444 if (p)
1446 while (p->next_peer)
1447 p = p->next_peer;
1448 p->next_peer = root;
1450 else
1451 cur->inner = root;
1453 for (i = 1; i <= ifun_last_region_number; ++i)
1454 if (n_array[i] && n_array[i]->outer == NULL)
1455 n_array[i]->outer = cur;
1457 else
1459 struct eh_region *p = cfun->eh->region_tree;
1460 if (p)
1462 while (p->next_peer)
1463 p = p->next_peer;
1464 p->next_peer = root;
1466 else
1467 cfun->eh->region_tree = root;
1470 free (n_array);
1472 i = cfun->eh->last_region_number;
1473 cfun->eh->last_region_number = i + ifun_last_region_number;
1474 return i;
1478 static int
1479 t2r_eq (pentry, pdata)
1480 const PTR pentry;
1481 const PTR pdata;
1483 tree entry = (tree) pentry;
1484 tree data = (tree) pdata;
1486 return TREE_PURPOSE (entry) == data;
1489 static hashval_t
1490 t2r_hash (pentry)
1491 const PTR pentry;
1493 tree entry = (tree) pentry;
1494 return TYPE_HASH (TREE_PURPOSE (entry));
1497 static int
1498 t2r_mark_1 (slot, data)
1499 PTR *slot;
1500 PTR data ATTRIBUTE_UNUSED;
1502 tree contents = (tree) *slot;
1503 ggc_mark_tree (contents);
1504 return 1;
1507 static void
1508 t2r_mark (addr)
1509 PTR addr;
1511 htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1514 static void
1515 add_type_for_runtime (type)
1516 tree type;
1518 tree *slot;
1520 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1521 TYPE_HASH (type), INSERT);
1522 if (*slot == NULL)
1524 tree runtime = (*lang_eh_runtime_type) (type);
1525 *slot = tree_cons (type, runtime, NULL_TREE);
1529 static tree
1530 lookup_type_for_runtime (type)
1531 tree type;
1533 tree *slot;
1535 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1536 TYPE_HASH (type), NO_INSERT);
1538 /* We should have always inserrted the data earlier. */
1539 return TREE_VALUE (*slot);
1543 /* Represent an entry in @TTypes for either catch actions
1544 or exception filter actions. */
1545 struct ttypes_filter
1547 tree t;
1548 int filter;
1551 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1552 (a tree) for a @TTypes type node we are thinking about adding. */
1554 static int
1555 ttypes_filter_eq (pentry, pdata)
1556 const PTR pentry;
1557 const PTR pdata;
1559 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1560 tree data = (tree) pdata;
1562 return entry->t == data;
1565 static hashval_t
1566 ttypes_filter_hash (pentry)
1567 const PTR pentry;
1569 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1570 return TYPE_HASH (entry->t);
1573 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1574 exception specification list we are thinking about adding. */
1575 /* ??? Currently we use the type lists in the order given. Someone
1576 should put these in some canonical order. */
1578 static int
1579 ehspec_filter_eq (pentry, pdata)
1580 const PTR pentry;
1581 const PTR pdata;
1583 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1584 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1586 return type_list_equal (entry->t, data->t);
1589 /* Hash function for exception specification lists. */
1591 static hashval_t
1592 ehspec_filter_hash (pentry)
1593 const PTR pentry;
1595 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1596 hashval_t h = 0;
1597 tree list;
1599 for (list = entry->t; list ; list = TREE_CHAIN (list))
1600 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1601 return h;
1604 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1605 up the search. Return the filter value to be used. */
1607 static int
1608 add_ttypes_entry (ttypes_hash, type)
1609 htab_t ttypes_hash;
1610 tree type;
1612 struct ttypes_filter **slot, *n;
1614 slot = (struct ttypes_filter **)
1615 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1617 if ((n = *slot) == NULL)
1619 /* Filter value is a 1 based table index. */
1621 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1622 n->t = type;
1623 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1624 *slot = n;
1626 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1629 return n->filter;
1632 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1633 to speed up the search. Return the filter value to be used. */
1635 static int
1636 add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1637 htab_t ehspec_hash;
1638 htab_t ttypes_hash;
1639 tree list;
1641 struct ttypes_filter **slot, *n;
1642 struct ttypes_filter dummy;
1644 dummy.t = list;
1645 slot = (struct ttypes_filter **)
1646 htab_find_slot (ehspec_hash, &dummy, INSERT);
1648 if ((n = *slot) == NULL)
1650 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1652 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1653 n->t = list;
1654 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1655 *slot = n;
1657 /* Look up each type in the list and encode its filter
1658 value as a uleb128. Terminate the list with 0. */
1659 for (; list ; list = TREE_CHAIN (list))
1660 push_uleb128 (&cfun->eh->ehspec_data,
1661 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1662 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1665 return n->filter;
1668 /* Generate the action filter values to be used for CATCH and
1669 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1670 we use lots of landing pads, and so every type or list can share
1671 the same filter value, which saves table space. */
1673 static void
1674 assign_filter_values ()
1676 int i;
1677 htab_t ttypes, ehspec;
1679 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1680 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1682 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1683 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1685 for (i = cfun->eh->last_region_number; i > 0; --i)
1687 struct eh_region *r = cfun->eh->region_array[i];
1689 /* Mind we don't process a region more than once. */
1690 if (!r || r->region_number != i)
1691 continue;
1693 switch (r->type)
1695 case ERT_CATCH:
1696 r->u.catch.filter = add_ttypes_entry (ttypes, r->u.catch.type);
1697 break;
1699 case ERT_ALLOWED_EXCEPTIONS:
1700 r->u.allowed.filter
1701 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1702 break;
1704 default:
1705 break;
1709 htab_delete (ttypes);
1710 htab_delete (ehspec);
1713 static void
1714 build_post_landing_pads ()
1716 int i;
1718 for (i = cfun->eh->last_region_number; i > 0; --i)
1720 struct eh_region *region = cfun->eh->region_array[i];
1721 rtx seq;
1723 /* Mind we don't process a region more than once. */
1724 if (!region || region->region_number != i)
1725 continue;
1727 switch (region->type)
1729 case ERT_TRY:
1730 /* ??? Collect the set of all non-overlapping catch handlers
1731 all the way up the chain until blocked by a cleanup. */
1732 /* ??? Outer try regions can share landing pads with inner
1733 try regions if the types are completely non-overlapping,
1734 and there are no interveaning cleanups. */
1736 region->post_landing_pad = gen_label_rtx ();
1738 start_sequence ();
1740 emit_label (region->post_landing_pad);
1742 /* ??? It is mighty inconvenient to call back into the
1743 switch statement generation code in expand_end_case.
1744 Rapid prototyping sez a sequence of ifs. */
1746 struct eh_region *c;
1747 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1749 /* ??? _Unwind_ForcedUnwind wants no match here. */
1750 if (c->u.catch.type == NULL)
1751 emit_jump (c->label);
1752 else
1753 emit_cmp_and_jump_insns (cfun->eh->filter,
1754 GEN_INT (c->u.catch.filter),
1755 EQ, NULL_RTX, word_mode,
1756 0, 0, c->label);
1760 /* We delay the generation of the _Unwind_Resume until we generate
1761 landing pads. We emit a marker here so as to get good control
1762 flow data in the meantime. */
1763 region->resume
1764 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1765 emit_barrier ();
1767 seq = get_insns ();
1768 end_sequence ();
1770 emit_insns_before (seq, region->u.try.catch->label);
1771 break;
1773 case ERT_ALLOWED_EXCEPTIONS:
1774 region->post_landing_pad = gen_label_rtx ();
1776 start_sequence ();
1778 emit_label (region->post_landing_pad);
1780 emit_cmp_and_jump_insns (cfun->eh->filter,
1781 GEN_INT (region->u.allowed.filter),
1782 EQ, NULL_RTX, word_mode, 0, 0,
1783 region->label);
1785 /* We delay the generation of the _Unwind_Resume until we generate
1786 landing pads. We emit a marker here so as to get good control
1787 flow data in the meantime. */
1788 region->resume
1789 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1790 emit_barrier ();
1792 seq = get_insns ();
1793 end_sequence ();
1795 emit_insns_before (seq, region->label);
1796 break;
1798 case ERT_CLEANUP:
1799 case ERT_MUST_NOT_THROW:
1800 region->post_landing_pad = region->label;
1801 break;
1803 case ERT_CATCH:
1804 case ERT_THROW:
1805 /* Nothing to do. */
1806 break;
1808 default:
1809 abort ();
1814 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1815 _Unwind_Resume otherwise. */
1817 static void
1818 connect_post_landing_pads ()
1820 int i;
1822 for (i = cfun->eh->last_region_number; i > 0; --i)
1824 struct eh_region *region = cfun->eh->region_array[i];
1825 struct eh_region *outer;
1826 rtx seq;
1828 /* Mind we don't process a region more than once. */
1829 if (!region || region->region_number != i)
1830 continue;
1832 /* If there is no RESX, or it has been deleted by flow, there's
1833 nothing to fix up. */
1834 if (! region->resume || INSN_DELETED_P (region->resume))
1835 continue;
1837 /* Search for another landing pad in this function. */
1838 for (outer = region->outer; outer ; outer = outer->outer)
1839 if (outer->post_landing_pad)
1840 break;
1842 start_sequence ();
1844 if (outer)
1845 emit_jump (outer->post_landing_pad);
1846 else
1847 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1848 VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
1850 seq = get_insns ();
1851 end_sequence ();
1852 emit_insns_before (seq, region->resume);
1853 delete_insn (region->resume);
1858 static void
1859 dw2_build_landing_pads ()
1861 int i;
1862 unsigned int j;
1864 for (i = cfun->eh->last_region_number; i > 0; --i)
1866 struct eh_region *region = cfun->eh->region_array[i];
1867 rtx seq;
1868 bool clobbers_hard_regs = false;
1870 /* Mind we don't process a region more than once. */
1871 if (!region || region->region_number != i)
1872 continue;
1874 if (region->type != ERT_CLEANUP
1875 && region->type != ERT_TRY
1876 && region->type != ERT_ALLOWED_EXCEPTIONS)
1877 continue;
1879 start_sequence ();
1881 region->landing_pad = gen_label_rtx ();
1882 emit_label (region->landing_pad);
1884 #ifdef HAVE_exception_receiver
1885 if (HAVE_exception_receiver)
1886 emit_insn (gen_exception_receiver ());
1887 else
1888 #endif
1889 #ifdef HAVE_nonlocal_goto_receiver
1890 if (HAVE_nonlocal_goto_receiver)
1891 emit_insn (gen_nonlocal_goto_receiver ());
1892 else
1893 #endif
1894 { /* Nothing */ }
1896 /* If the eh_return data registers are call-saved, then we
1897 won't have considered them clobbered from the call that
1898 threw. Kill them now. */
1899 for (j = 0; ; ++j)
1901 unsigned r = EH_RETURN_DATA_REGNO (j);
1902 if (r == INVALID_REGNUM)
1903 break;
1904 if (! call_used_regs[r])
1906 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1907 clobbers_hard_regs = true;
1911 if (clobbers_hard_regs)
1913 /* @@@ This is a kludge. Not all machine descriptions define a
1914 blockage insn, but we must not allow the code we just generated
1915 to be reordered by scheduling. So emit an ASM_INPUT to act as
1916 blockage insn. */
1917 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1920 emit_move_insn (cfun->eh->exc_ptr,
1921 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
1922 emit_move_insn (cfun->eh->filter,
1923 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
1925 seq = get_insns ();
1926 end_sequence ();
1928 emit_insns_before (seq, region->post_landing_pad);
1933 struct sjlj_lp_info
1935 int directly_reachable;
1936 int action_index;
1937 int dispatch_index;
1938 int call_site_index;
1941 static bool
1942 sjlj_find_directly_reachable_regions (lp_info)
1943 struct sjlj_lp_info *lp_info;
1945 rtx insn;
1946 bool found_one = false;
1948 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1950 struct eh_region *region;
1951 tree type_thrown;
1952 rtx note;
1954 if (! INSN_P (insn))
1955 continue;
1957 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1958 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1959 continue;
1961 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1963 type_thrown = NULL_TREE;
1964 if (region->type == ERT_THROW)
1966 type_thrown = region->u.throw.type;
1967 region = region->outer;
1970 /* Find the first containing region that might handle the exception.
1971 That's the landing pad to which we will transfer control. */
1972 for (; region; region = region->outer)
1973 if (reachable_next_level (region, type_thrown, 0) != RNL_NOT_CAUGHT)
1974 break;
1976 if (region)
1978 lp_info[region->region_number].directly_reachable = 1;
1979 found_one = true;
1983 return found_one;
1986 static void
1987 sjlj_assign_call_site_values (dispatch_label, lp_info)
1988 rtx dispatch_label;
1989 struct sjlj_lp_info *lp_info;
1991 htab_t ar_hash;
1992 int i, index;
1994 /* First task: build the action table. */
1996 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1997 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1999 for (i = cfun->eh->last_region_number; i > 0; --i)
2000 if (lp_info[i].directly_reachable)
2002 struct eh_region *r = cfun->eh->region_array[i];
2003 r->landing_pad = dispatch_label;
2004 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2005 if (lp_info[i].action_index != -1)
2006 cfun->uses_eh_lsda = 1;
2009 htab_delete (ar_hash);
2011 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2012 landing pad label for the region. For sjlj though, there is one
2013 common landing pad from which we dispatch to the post-landing pads.
2015 A region receives a dispatch index if it is directly reachable
2016 and requires in-function processing. Regions that share post-landing
2017 pads may share dispatch indicies. */
2018 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2019 (see build_post_landing_pads) so we don't bother checking for it. */
2021 index = 0;
2022 for (i = cfun->eh->last_region_number; i > 0; --i)
2023 if (lp_info[i].directly_reachable
2024 && lp_info[i].action_index >= 0)
2025 lp_info[i].dispatch_index = index++;
2027 /* Finally: assign call-site values. If dwarf2 terms, this would be
2028 the region number assigned by convert_to_eh_region_ranges, but
2029 handles no-action and must-not-throw differently. */
2031 call_site_base = 1;
2032 for (i = cfun->eh->last_region_number; i > 0; --i)
2033 if (lp_info[i].directly_reachable)
2035 int action = lp_info[i].action_index;
2037 /* Map must-not-throw to otherwise unused call-site index 0. */
2038 if (action == -2)
2039 index = 0;
2040 /* Map no-action to otherwise unused call-site index -1. */
2041 else if (action == -1)
2042 index = -1;
2043 /* Otherwise, look it up in the table. */
2044 else
2045 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2047 lp_info[i].call_site_index = index;
2051 static void
2052 sjlj_mark_call_sites (lp_info)
2053 struct sjlj_lp_info *lp_info;
2055 int last_call_site = -2;
2056 rtx insn, mem;
2058 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2060 struct eh_region *region;
2061 int this_call_site;
2062 rtx note, before, p;
2064 /* Reset value tracking at extended basic block boundaries. */
2065 if (GET_CODE (insn) == CODE_LABEL)
2066 last_call_site = -2;
2068 if (! INSN_P (insn))
2069 continue;
2071 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2072 if (!note)
2074 /* Calls (and trapping insns) without notes are outside any
2075 exception handling region in this function. Mark them as
2076 no action. */
2077 if (GET_CODE (insn) == CALL_INSN
2078 || (flag_non_call_exceptions
2079 && may_trap_p (PATTERN (insn))))
2080 this_call_site = -1;
2081 else
2082 continue;
2084 else
2086 /* Calls that are known to not throw need not be marked. */
2087 if (INTVAL (XEXP (note, 0)) <= 0)
2088 continue;
2090 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2091 this_call_site = lp_info[region->region_number].call_site_index;
2094 if (this_call_site == last_call_site)
2095 continue;
2097 /* Don't separate a call from it's argument loads. */
2098 before = insn;
2099 if (GET_CODE (insn) == CALL_INSN)
2100 before = find_first_parameter_load (insn, NULL_RTX);
2102 start_sequence ();
2103 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2104 sjlj_fc_call_site_ofs);
2105 emit_move_insn (mem, GEN_INT (this_call_site));
2106 p = get_insns ();
2107 end_sequence ();
2109 emit_insns_before (p, before);
2110 last_call_site = this_call_site;
2114 /* Construct the SjLj_Function_Context. */
2116 static void
2117 sjlj_emit_function_enter (dispatch_label)
2118 rtx dispatch_label;
2120 rtx fn_begin, fc, mem, seq;
2122 fc = cfun->eh->sjlj_fc;
2124 start_sequence ();
2126 /* We're storing this libcall's address into memory instead of
2127 calling it directly. Thus, we must call assemble_external_libcall
2128 here, as we can not depend on emit_library_call to do it for us. */
2129 assemble_external_libcall (eh_personality_libfunc);
2130 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2131 emit_move_insn (mem, eh_personality_libfunc);
2133 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2134 if (cfun->uses_eh_lsda)
2136 char buf[20];
2137 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2138 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
2140 else
2141 emit_move_insn (mem, const0_rtx);
2143 #ifdef DONT_USE_BUILTIN_SETJMP
2145 rtx x, note;
2146 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2147 TYPE_MODE (integer_type_node), 1,
2148 plus_constant (XEXP (fc, 0),
2149 sjlj_fc_jbuf_ofs), Pmode);
2151 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2152 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2154 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2155 TYPE_MODE (integer_type_node), 0, 0,
2156 dispatch_label);
2158 #else
2159 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2160 dispatch_label);
2161 #endif
2163 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2164 1, XEXP (fc, 0), Pmode);
2166 seq = get_insns ();
2167 end_sequence ();
2169 /* ??? Instead of doing this at the beginning of the function,
2170 do this in a block that is at loop level 0 and dominates all
2171 can_throw_internal instructions. */
2173 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2174 if (GET_CODE (fn_begin) == NOTE
2175 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2176 break;
2177 emit_insns_after (seq, fn_begin);
2180 /* Call back from expand_function_end to know where we should put
2181 the call to unwind_sjlj_unregister_libfunc if needed. */
2183 void
2184 sjlj_emit_function_exit_after (after)
2185 rtx after;
2187 cfun->eh->sjlj_exit_after = after;
2190 static void
2191 sjlj_emit_function_exit ()
2193 rtx seq;
2195 start_sequence ();
2197 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2198 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2200 seq = get_insns ();
2201 end_sequence ();
2203 /* ??? Really this can be done in any block at loop level 0 that
2204 post-dominates all can_throw_internal instructions. This is
2205 the last possible moment. */
2207 emit_insns_after (seq, cfun->eh->sjlj_exit_after);
2210 static void
2211 sjlj_emit_dispatch_table (dispatch_label, lp_info)
2212 rtx dispatch_label;
2213 struct sjlj_lp_info *lp_info;
2215 int i, first_reachable;
2216 rtx mem, dispatch, seq, fc;
2218 fc = cfun->eh->sjlj_fc;
2220 start_sequence ();
2222 emit_label (dispatch_label);
2224 #ifndef DONT_USE_BUILTIN_SETJMP
2225 expand_builtin_setjmp_receiver (dispatch_label);
2226 #endif
2228 /* Load up dispatch index, exc_ptr and filter values from the
2229 function context. */
2230 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2231 sjlj_fc_call_site_ofs);
2232 dispatch = copy_to_reg (mem);
2234 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
2235 if (word_mode != Pmode)
2237 #ifdef POINTERS_EXTEND_UNSIGNED
2238 mem = convert_memory_address (Pmode, mem);
2239 #else
2240 mem = convert_to_mode (Pmode, mem, 0);
2241 #endif
2243 emit_move_insn (cfun->eh->exc_ptr, mem);
2245 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2246 emit_move_insn (cfun->eh->filter, mem);
2248 /* Jump to one of the directly reachable regions. */
2249 /* ??? This really ought to be using a switch statement. */
2251 first_reachable = 0;
2252 for (i = cfun->eh->last_region_number; i > 0; --i)
2254 if (! lp_info[i].directly_reachable
2255 || lp_info[i].action_index < 0)
2256 continue;
2258 if (! first_reachable)
2260 first_reachable = i;
2261 continue;
2264 emit_cmp_and_jump_insns (dispatch,
2265 GEN_INT (lp_info[i].dispatch_index), EQ,
2266 NULL_RTX, TYPE_MODE (integer_type_node), 0, 0,
2267 cfun->eh->region_array[i]->post_landing_pad);
2270 seq = get_insns ();
2271 end_sequence ();
2273 emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2274 ->post_landing_pad));
2277 static void
2278 sjlj_build_landing_pads ()
2280 struct sjlj_lp_info *lp_info;
2282 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2283 sizeof (struct sjlj_lp_info));
2285 if (sjlj_find_directly_reachable_regions (lp_info))
2287 rtx dispatch_label = gen_label_rtx ();
2289 cfun->eh->sjlj_fc
2290 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2291 int_size_in_bytes (sjlj_fc_type_node),
2292 TYPE_ALIGN (sjlj_fc_type_node));
2294 sjlj_assign_call_site_values (dispatch_label, lp_info);
2295 sjlj_mark_call_sites (lp_info);
2297 sjlj_emit_function_enter (dispatch_label);
2298 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2299 sjlj_emit_function_exit ();
2302 free (lp_info);
2305 void
2306 finish_eh_generation ()
2308 /* Nothing to do if no regions created. */
2309 if (cfun->eh->region_tree == NULL)
2310 return;
2312 /* The object here is to provide find_basic_blocks with detailed
2313 information (via reachable_handlers) on how exception control
2314 flows within the function. In this first pass, we can include
2315 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2316 regions, and hope that it will be useful in deleting unreachable
2317 handlers. Subsequently, we will generate landing pads which will
2318 connect many of the handlers, and then type information will not
2319 be effective. Still, this is a win over previous implementations. */
2321 rebuild_jump_labels (get_insns ());
2322 find_basic_blocks (get_insns (), max_reg_num (), 0);
2323 cleanup_cfg (CLEANUP_PRE_LOOP);
2325 /* These registers are used by the landing pads. Make sure they
2326 have been generated. */
2327 get_exception_pointer (cfun);
2328 get_exception_filter (cfun);
2330 /* Construct the landing pads. */
2332 assign_filter_values ();
2333 build_post_landing_pads ();
2334 connect_post_landing_pads ();
2335 if (USING_SJLJ_EXCEPTIONS)
2336 sjlj_build_landing_pads ();
2337 else
2338 dw2_build_landing_pads ();
2340 cfun->eh->built_landing_pads = 1;
2342 /* We've totally changed the CFG. Start over. */
2343 find_exception_handler_labels ();
2344 rebuild_jump_labels (get_insns ());
2345 find_basic_blocks (get_insns (), max_reg_num (), 0);
2346 cleanup_cfg (CLEANUP_PRE_LOOP);
2349 /* This section handles removing dead code for flow. */
2351 /* Remove LABEL from the exception_handler_labels list. */
2353 static void
2354 remove_exception_handler_label (label)
2355 rtx label;
2357 rtx *pl, l;
2359 for (pl = &exception_handler_labels, l = *pl;
2360 XEXP (l, 0) != label;
2361 pl = &XEXP (l, 1), l = *pl)
2362 continue;
2364 *pl = XEXP (l, 1);
2365 free_EXPR_LIST_node (l);
2368 /* Splice REGION from the region tree etc. */
2370 static void
2371 remove_eh_handler (region)
2372 struct eh_region *region;
2374 struct eh_region **pp, *p;
2375 rtx lab;
2376 int i;
2378 /* For the benefit of efficiently handling REG_EH_REGION notes,
2379 replace this region in the region array with its containing
2380 region. Note that previous region deletions may result in
2381 multiple copies of this region in the array, so we have to
2382 search the whole thing. */
2383 for (i = cfun->eh->last_region_number; i > 0; --i)
2384 if (cfun->eh->region_array[i] == region)
2385 cfun->eh->region_array[i] = region->outer;
2387 if (cfun->eh->built_landing_pads)
2388 lab = region->landing_pad;
2389 else
2390 lab = region->label;
2391 if (lab)
2392 remove_exception_handler_label (lab);
2394 if (region->outer)
2395 pp = &region->outer->inner;
2396 else
2397 pp = &cfun->eh->region_tree;
2398 for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
2399 continue;
2401 if (region->inner)
2403 for (p = region->inner; p->next_peer ; p = p->next_peer)
2404 p->outer = region->outer;
2405 p->next_peer = region->next_peer;
2406 p->outer = region->outer;
2407 *pp = region->inner;
2409 else
2410 *pp = region->next_peer;
2412 if (region->type == ERT_CATCH)
2414 struct eh_region *try, *next, *prev;
2416 for (try = region->next_peer;
2417 try->type == ERT_CATCH;
2418 try = try->next_peer)
2419 continue;
2420 if (try->type != ERT_TRY)
2421 abort ();
2423 next = region->u.catch.next_catch;
2424 prev = region->u.catch.prev_catch;
2426 if (next)
2427 next->u.catch.prev_catch = prev;
2428 else
2429 try->u.try.last_catch = prev;
2430 if (prev)
2431 prev->u.catch.next_catch = next;
2432 else
2434 try->u.try.catch = next;
2435 if (! next)
2436 remove_eh_handler (try);
2440 free (region);
2443 /* LABEL heads a basic block that is about to be deleted. If this
2444 label corresponds to an exception region, we may be able to
2445 delete the region. */
2447 void
2448 maybe_remove_eh_handler (label)
2449 rtx label;
2451 int i;
2453 /* ??? After generating landing pads, it's not so simple to determine
2454 if the region data is completely unused. One must examine the
2455 landing pad and the post landing pad, and whether an inner try block
2456 is referencing the catch handlers directly. */
2457 if (cfun->eh->built_landing_pads)
2458 return;
2460 for (i = cfun->eh->last_region_number; i > 0; --i)
2462 struct eh_region *region = cfun->eh->region_array[i];
2463 if (region && region->label == label)
2465 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2466 because there is no path to the fallback call to terminate.
2467 But the region continues to affect call-site data until there
2468 are no more contained calls, which we don't see here. */
2469 if (region->type == ERT_MUST_NOT_THROW)
2471 remove_exception_handler_label (region->label);
2472 region->label = NULL_RTX;
2474 else
2475 remove_eh_handler (region);
2476 break;
2482 /* This section describes CFG exception edges for flow. */
2484 /* For communicating between calls to reachable_next_level. */
2485 struct reachable_info
2487 tree types_caught;
2488 tree types_allowed;
2489 rtx handlers;
2492 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2493 base class of TYPE, is in HANDLED. */
2495 static int
2496 check_handled (handled, type)
2497 tree handled, type;
2499 tree t;
2501 /* We can check for exact matches without front-end help. */
2502 if (! lang_eh_type_covers)
2504 for (t = handled; t ; t = TREE_CHAIN (t))
2505 if (TREE_VALUE (t) == type)
2506 return 1;
2508 else
2510 for (t = handled; t ; t = TREE_CHAIN (t))
2511 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2512 return 1;
2515 return 0;
2518 /* A subroutine of reachable_next_level. If we are collecting a list
2519 of handlers, add one. After landing pad generation, reference
2520 it instead of the handlers themselves. Further, the handlers are
2521 all wired together, so by referencing one, we've got them all.
2522 Before landing pad generation we reference each handler individually.
2524 LP_REGION contains the landing pad; REGION is the handler. */
2526 static void
2527 add_reachable_handler (info, lp_region, region)
2528 struct reachable_info *info;
2529 struct eh_region *lp_region;
2530 struct eh_region *region;
2532 if (! info)
2533 return;
2535 if (cfun->eh->built_landing_pads)
2537 if (! info->handlers)
2538 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2540 else
2541 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2544 /* Process one level of exception regions for reachability.
2545 If TYPE_THROWN is non-null, then it is the *exact* type being
2546 propagated. If INFO is non-null, then collect handler labels
2547 and caught/allowed type information between invocations. */
2549 static enum reachable_code
2550 reachable_next_level (region, type_thrown, info)
2551 struct eh_region *region;
2552 tree type_thrown;
2553 struct reachable_info *info;
2555 switch (region->type)
2557 case ERT_CLEANUP:
2558 /* Before landing-pad generation, we model control flow
2559 directly to the individual handlers. In this way we can
2560 see that catch handler types may shadow one another. */
2561 add_reachable_handler (info, region, region);
2562 return RNL_MAYBE_CAUGHT;
2564 case ERT_TRY:
2566 struct eh_region *c;
2567 enum reachable_code ret = RNL_NOT_CAUGHT;
2569 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2571 /* A catch-all handler ends the search. */
2572 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2573 to be run as well. */
2574 if (c->u.catch.type == NULL)
2576 add_reachable_handler (info, region, c);
2577 return RNL_CAUGHT;
2580 if (type_thrown)
2582 /* If we have a type match, end the search. */
2583 if (c->u.catch.type == type_thrown
2584 || (lang_eh_type_covers
2585 && (*lang_eh_type_covers) (c->u.catch.type,
2586 type_thrown)))
2588 add_reachable_handler (info, region, c);
2589 return RNL_CAUGHT;
2592 /* If we have definitive information of a match failure,
2593 the catch won't trigger. */
2594 if (lang_eh_type_covers)
2595 return RNL_NOT_CAUGHT;
2598 if (! info)
2599 ret = RNL_MAYBE_CAUGHT;
2601 /* A type must not have been previously caught. */
2602 else if (! check_handled (info->types_caught, c->u.catch.type))
2604 add_reachable_handler (info, region, c);
2605 info->types_caught = tree_cons (NULL, c->u.catch.type,
2606 info->types_caught);
2608 /* ??? If the catch type is a base class of every allowed
2609 type, then we know we can stop the search. */
2610 ret = RNL_MAYBE_CAUGHT;
2614 return ret;
2617 case ERT_ALLOWED_EXCEPTIONS:
2618 /* An empty list of types definitely ends the search. */
2619 if (region->u.allowed.type_list == NULL_TREE)
2621 add_reachable_handler (info, region, region);
2622 return RNL_CAUGHT;
2625 /* Collect a list of lists of allowed types for use in detecting
2626 when a catch may be transformed into a catch-all. */
2627 if (info)
2628 info->types_allowed = tree_cons (NULL_TREE,
2629 region->u.allowed.type_list,
2630 info->types_allowed);
2632 /* If we have definitive information about the type heirarchy,
2633 then we can tell if the thrown type will pass through the
2634 filter. */
2635 if (type_thrown && lang_eh_type_covers)
2637 if (check_handled (region->u.allowed.type_list, type_thrown))
2638 return RNL_NOT_CAUGHT;
2639 else
2641 add_reachable_handler (info, region, region);
2642 return RNL_CAUGHT;
2646 add_reachable_handler (info, region, region);
2647 return RNL_MAYBE_CAUGHT;
2649 case ERT_CATCH:
2650 /* Catch regions are handled by their controling try region. */
2651 return RNL_NOT_CAUGHT;
2653 case ERT_MUST_NOT_THROW:
2654 /* Here we end our search, since no exceptions may propagate.
2655 If we've touched down at some landing pad previous, then the
2656 explicit function call we generated may be used. Otherwise
2657 the call is made by the runtime. */
2658 if (info && info->handlers)
2660 add_reachable_handler (info, region, region);
2661 return RNL_CAUGHT;
2663 else
2664 return RNL_BLOCKED;
2666 case ERT_THROW:
2667 case ERT_FIXUP:
2668 /* Shouldn't see these here. */
2669 break;
2672 abort ();
2675 /* Retrieve a list of labels of exception handlers which can be
2676 reached by a given insn. */
2679 reachable_handlers (insn)
2680 rtx insn;
2682 struct reachable_info info;
2683 struct eh_region *region;
2684 tree type_thrown;
2685 int region_number;
2687 if (GET_CODE (insn) == JUMP_INSN
2688 && GET_CODE (PATTERN (insn)) == RESX)
2689 region_number = XINT (PATTERN (insn), 0);
2690 else
2692 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2693 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2694 return NULL;
2695 region_number = INTVAL (XEXP (note, 0));
2698 memset (&info, 0, sizeof (info));
2700 region = cfun->eh->region_array[region_number];
2702 type_thrown = NULL_TREE;
2703 if (GET_CODE (insn) == JUMP_INSN
2704 && GET_CODE (PATTERN (insn)) == RESX)
2706 /* A RESX leaves a region instead of entering it. Thus the
2707 region itself may have been deleted out from under us. */
2708 if (region == NULL)
2709 return NULL;
2710 region = region->outer;
2712 else if (region->type == ERT_THROW)
2714 type_thrown = region->u.throw.type;
2715 region = region->outer;
2718 for (; region; region = region->outer)
2719 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2720 break;
2722 return info.handlers;
2725 /* Determine if the given INSN can throw an exception that is caught
2726 within the function. */
2728 bool
2729 can_throw_internal (insn)
2730 rtx insn;
2732 struct eh_region *region;
2733 tree type_thrown;
2734 rtx note;
2736 if (! INSN_P (insn))
2737 return false;
2739 if (GET_CODE (insn) == INSN
2740 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2741 insn = XVECEXP (PATTERN (insn), 0, 0);
2743 if (GET_CODE (insn) == CALL_INSN
2744 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2746 int i;
2747 for (i = 0; i < 3; ++i)
2749 rtx sub = XEXP (PATTERN (insn), i);
2750 for (; sub ; sub = NEXT_INSN (sub))
2751 if (can_throw_internal (sub))
2752 return true;
2754 return false;
2757 /* Every insn that might throw has an EH_REGION note. */
2758 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2759 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2760 return false;
2762 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2764 type_thrown = NULL_TREE;
2765 if (region->type == ERT_THROW)
2767 type_thrown = region->u.throw.type;
2768 region = region->outer;
2771 /* If this exception is ignored by each and every containing region,
2772 then control passes straight out. The runtime may handle some
2773 regions, which also do not require processing internally. */
2774 for (; region; region = region->outer)
2776 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2777 if (how == RNL_BLOCKED)
2778 return false;
2779 if (how != RNL_NOT_CAUGHT)
2780 return true;
2783 return false;
2786 /* Determine if the given INSN can throw an exception that is
2787 visible outside the function. */
2789 bool
2790 can_throw_external (insn)
2791 rtx insn;
2793 struct eh_region *region;
2794 tree type_thrown;
2795 rtx note;
2797 if (! INSN_P (insn))
2798 return false;
2800 if (GET_CODE (insn) == INSN
2801 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2802 insn = XVECEXP (PATTERN (insn), 0, 0);
2804 if (GET_CODE (insn) == CALL_INSN
2805 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2807 int i;
2808 for (i = 0; i < 3; ++i)
2810 rtx sub = XEXP (PATTERN (insn), i);
2811 for (; sub ; sub = NEXT_INSN (sub))
2812 if (can_throw_external (sub))
2813 return true;
2815 return false;
2818 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2819 if (!note)
2821 /* Calls (and trapping insns) without notes are outside any
2822 exception handling region in this function. We have to
2823 assume it might throw. Given that the front end and middle
2824 ends mark known NOTHROW functions, this isn't so wildly
2825 inaccurate. */
2826 return (GET_CODE (insn) == CALL_INSN
2827 || (flag_non_call_exceptions
2828 && may_trap_p (PATTERN (insn))));
2830 if (INTVAL (XEXP (note, 0)) <= 0)
2831 return false;
2833 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2835 type_thrown = NULL_TREE;
2836 if (region->type == ERT_THROW)
2838 type_thrown = region->u.throw.type;
2839 region = region->outer;
2842 /* If the exception is caught or blocked by any containing region,
2843 then it is not seen by any calling function. */
2844 for (; region ; region = region->outer)
2845 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2846 return false;
2848 return true;
2851 /* True if nothing in this function can throw outside this function. */
2853 bool
2854 nothrow_function_p ()
2856 rtx insn;
2858 if (! flag_exceptions)
2859 return true;
2861 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2862 if (can_throw_external (insn))
2863 return false;
2864 for (insn = current_function_epilogue_delay_list; insn;
2865 insn = XEXP (insn, 1))
2866 if (can_throw_external (insn))
2867 return false;
2869 return true;
2873 /* Various hooks for unwind library. */
2875 /* Do any necessary initialization to access arbitrary stack frames.
2876 On the SPARC, this means flushing the register windows. */
2878 void
2879 expand_builtin_unwind_init ()
2881 /* Set this so all the registers get saved in our frame; we need to be
2882 able to copy the saved values for any registers from frames we unwind. */
2883 current_function_has_nonlocal_label = 1;
2885 #ifdef SETUP_FRAME_ADDRESSES
2886 SETUP_FRAME_ADDRESSES ();
2887 #endif
2891 expand_builtin_eh_return_data_regno (arglist)
2892 tree arglist;
2894 tree which = TREE_VALUE (arglist);
2895 unsigned HOST_WIDE_INT iwhich;
2897 if (TREE_CODE (which) != INTEGER_CST)
2899 error ("argument of `__builtin_eh_return_regno' must be constant");
2900 return constm1_rtx;
2903 iwhich = tree_low_cst (which, 1);
2904 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2905 if (iwhich == INVALID_REGNUM)
2906 return constm1_rtx;
2908 #ifdef DWARF_FRAME_REGNUM
2909 iwhich = DWARF_FRAME_REGNUM (iwhich);
2910 #else
2911 iwhich = DBX_REGISTER_NUMBER (iwhich);
2912 #endif
2914 return GEN_INT (iwhich);
2917 /* Given a value extracted from the return address register or stack slot,
2918 return the actual address encoded in that value. */
2921 expand_builtin_extract_return_addr (addr_tree)
2922 tree addr_tree;
2924 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2926 /* First mask out any unwanted bits. */
2927 #ifdef MASK_RETURN_ADDR
2928 expand_and (addr, MASK_RETURN_ADDR, addr);
2929 #endif
2931 /* Then adjust to find the real return address. */
2932 #if defined (RETURN_ADDR_OFFSET)
2933 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2934 #endif
2936 return addr;
2939 /* Given an actual address in addr_tree, do any necessary encoding
2940 and return the value to be stored in the return address register or
2941 stack slot so the epilogue will return to that address. */
2944 expand_builtin_frob_return_addr (addr_tree)
2945 tree addr_tree;
2947 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2949 #ifdef POINTERS_EXTEND_UNSIGNED
2950 addr = convert_memory_address (Pmode, addr);
2951 #endif
2953 #ifdef RETURN_ADDR_OFFSET
2954 addr = force_reg (Pmode, addr);
2955 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2956 #endif
2958 return addr;
2961 /* Set up the epilogue with the magic bits we'll need to return to the
2962 exception handler. */
2964 void
2965 expand_builtin_eh_return (stackadj_tree, handler_tree)
2966 tree stackadj_tree, handler_tree;
2968 rtx stackadj, handler;
2970 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2971 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
2973 #ifdef POINTERS_EXTEND_UNSIGNED
2974 stackadj = convert_memory_address (Pmode, stackadj);
2975 handler = convert_memory_address (Pmode, handler);
2976 #endif
2978 if (! cfun->eh->ehr_label)
2980 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
2981 cfun->eh->ehr_handler = copy_to_reg (handler);
2982 cfun->eh->ehr_label = gen_label_rtx ();
2984 else
2986 if (stackadj != cfun->eh->ehr_stackadj)
2987 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
2988 if (handler != cfun->eh->ehr_handler)
2989 emit_move_insn (cfun->eh->ehr_handler, handler);
2992 emit_jump (cfun->eh->ehr_label);
2995 void
2996 expand_eh_return ()
2998 rtx sa, ra, around_label;
3000 if (! cfun->eh->ehr_label)
3001 return;
3003 sa = EH_RETURN_STACKADJ_RTX;
3004 if (! sa)
3006 error ("__builtin_eh_return not supported on this target");
3007 return;
3010 current_function_calls_eh_return = 1;
3012 around_label = gen_label_rtx ();
3013 emit_move_insn (sa, const0_rtx);
3014 emit_jump (around_label);
3016 emit_label (cfun->eh->ehr_label);
3017 clobber_return_register ();
3019 #ifdef HAVE_eh_return
3020 if (HAVE_eh_return)
3021 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3022 else
3023 #endif
3025 ra = EH_RETURN_HANDLER_RTX;
3026 if (! ra)
3028 error ("__builtin_eh_return not supported on this target");
3029 ra = gen_reg_rtx (Pmode);
3032 emit_move_insn (sa, cfun->eh->ehr_stackadj);
3033 emit_move_insn (ra, cfun->eh->ehr_handler);
3036 emit_label (around_label);
3039 /* In the following functions, we represent entries in the action table
3040 as 1-based indicies. Special cases are:
3042 0: null action record, non-null landing pad; implies cleanups
3043 -1: null action record, null landing pad; implies no action
3044 -2: no call-site entry; implies must_not_throw
3045 -3: we have yet to process outer regions
3047 Further, no special cases apply to the "next" field of the record.
3048 For next, 0 means end of list. */
3050 struct action_record
3052 int offset;
3053 int filter;
3054 int next;
3057 static int
3058 action_record_eq (pentry, pdata)
3059 const PTR pentry;
3060 const PTR pdata;
3062 const struct action_record *entry = (const struct action_record *) pentry;
3063 const struct action_record *data = (const struct action_record *) pdata;
3064 return entry->filter == data->filter && entry->next == data->next;
3067 static hashval_t
3068 action_record_hash (pentry)
3069 const PTR pentry;
3071 const struct action_record *entry = (const struct action_record *) pentry;
3072 return entry->next * 1009 + entry->filter;
3075 static int
3076 add_action_record (ar_hash, filter, next)
3077 htab_t ar_hash;
3078 int filter, next;
3080 struct action_record **slot, *new, tmp;
3082 tmp.filter = filter;
3083 tmp.next = next;
3084 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3086 if ((new = *slot) == NULL)
3088 new = (struct action_record *) xmalloc (sizeof (*new));
3089 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3090 new->filter = filter;
3091 new->next = next;
3092 *slot = new;
3094 /* The filter value goes in untouched. The link to the next
3095 record is a "self-relative" byte offset, or zero to indicate
3096 that there is no next record. So convert the absolute 1 based
3097 indicies we've been carrying around into a displacement. */
3099 push_sleb128 (&cfun->eh->action_record_data, filter);
3100 if (next)
3101 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3102 push_sleb128 (&cfun->eh->action_record_data, next);
3105 return new->offset;
3108 static int
3109 collect_one_action_chain (ar_hash, region)
3110 htab_t ar_hash;
3111 struct eh_region *region;
3113 struct eh_region *c;
3114 int next;
3116 /* If we've reached the top of the region chain, then we have
3117 no actions, and require no landing pad. */
3118 if (region == NULL)
3119 return -1;
3121 switch (region->type)
3123 case ERT_CLEANUP:
3124 /* A cleanup adds a zero filter to the beginning of the chain, but
3125 there are special cases to look out for. If there are *only*
3126 cleanups along a path, then it compresses to a zero action.
3127 Further, if there are multiple cleanups along a path, we only
3128 need to represent one of them, as that is enough to trigger
3129 entry to the landing pad at runtime. */
3130 next = collect_one_action_chain (ar_hash, region->outer);
3131 if (next <= 0)
3132 return 0;
3133 for (c = region->outer; c ; c = c->outer)
3134 if (c->type == ERT_CLEANUP)
3135 return next;
3136 return add_action_record (ar_hash, 0, next);
3138 case ERT_TRY:
3139 /* Process the associated catch regions in reverse order.
3140 If there's a catch-all handler, then we don't need to
3141 search outer regions. Use a magic -3 value to record
3142 that we havn't done the outer search. */
3143 next = -3;
3144 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3146 if (c->u.catch.type == NULL)
3147 next = add_action_record (ar_hash, c->u.catch.filter, 0);
3148 else
3150 if (next == -3)
3152 next = collect_one_action_chain (ar_hash, region->outer);
3154 /* If there is no next action, terminate the chain. */
3155 if (next == -1)
3156 next = 0;
3157 /* If all outer actions are cleanups or must_not_throw,
3158 we'll have no action record for it, since we had wanted
3159 to encode these states in the call-site record directly.
3160 Add a cleanup action to the chain to catch these. */
3161 else if (next <= 0)
3162 next = add_action_record (ar_hash, 0, 0);
3164 next = add_action_record (ar_hash, c->u.catch.filter, next);
3167 return next;
3169 case ERT_ALLOWED_EXCEPTIONS:
3170 /* An exception specification adds its filter to the
3171 beginning of the chain. */
3172 next = collect_one_action_chain (ar_hash, region->outer);
3173 return add_action_record (ar_hash, region->u.allowed.filter,
3174 next < 0 ? 0 : next);
3176 case ERT_MUST_NOT_THROW:
3177 /* A must-not-throw region with no inner handlers or cleanups
3178 requires no call-site entry. Note that this differs from
3179 the no handler or cleanup case in that we do require an lsda
3180 to be generated. Return a magic -2 value to record this. */
3181 return -2;
3183 case ERT_CATCH:
3184 case ERT_THROW:
3185 /* CATCH regions are handled in TRY above. THROW regions are
3186 for optimization information only and produce no output. */
3187 return collect_one_action_chain (ar_hash, region->outer);
3189 default:
3190 abort ();
3194 static int
3195 add_call_site (landing_pad, action)
3196 rtx landing_pad;
3197 int action;
3199 struct call_site_record *data = cfun->eh->call_site_data;
3200 int used = cfun->eh->call_site_data_used;
3201 int size = cfun->eh->call_site_data_size;
3203 if (used >= size)
3205 size = (size ? size * 2 : 64);
3206 data = (struct call_site_record *)
3207 xrealloc (data, sizeof (*data) * size);
3208 cfun->eh->call_site_data = data;
3209 cfun->eh->call_site_data_size = size;
3212 data[used].landing_pad = landing_pad;
3213 data[used].action = action;
3215 cfun->eh->call_site_data_used = used + 1;
3217 return used + call_site_base;
3220 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3221 The new note numbers will not refer to region numbers, but
3222 instead to call site entries. */
3224 void
3225 convert_to_eh_region_ranges ()
3227 rtx insn, iter, note;
3228 htab_t ar_hash;
3229 int last_action = -3;
3230 rtx last_action_insn = NULL_RTX;
3231 rtx last_landing_pad = NULL_RTX;
3232 rtx first_no_action_insn = NULL_RTX;
3233 int call_site = 0;
3235 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3236 return;
3238 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3240 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3242 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3243 if (INSN_P (iter))
3245 struct eh_region *region;
3246 int this_action;
3247 rtx this_landing_pad;
3249 insn = iter;
3250 if (GET_CODE (insn) == INSN
3251 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3252 insn = XVECEXP (PATTERN (insn), 0, 0);
3254 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3255 if (!note)
3257 if (! (GET_CODE (insn) == CALL_INSN
3258 || (flag_non_call_exceptions
3259 && may_trap_p (PATTERN (insn)))))
3260 continue;
3261 this_action = -1;
3262 region = NULL;
3264 else
3266 if (INTVAL (XEXP (note, 0)) <= 0)
3267 continue;
3268 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3269 this_action = collect_one_action_chain (ar_hash, region);
3272 /* Existence of catch handlers, or must-not-throw regions
3273 implies that an lsda is needed (even if empty). */
3274 if (this_action != -1)
3275 cfun->uses_eh_lsda = 1;
3277 /* Delay creation of region notes for no-action regions
3278 until we're sure that an lsda will be required. */
3279 else if (last_action == -3)
3281 first_no_action_insn = iter;
3282 last_action = -1;
3285 /* Cleanups and handlers may share action chains but not
3286 landing pads. Collect the landing pad for this region. */
3287 if (this_action >= 0)
3289 struct eh_region *o;
3290 for (o = region; ! o->landing_pad ; o = o->outer)
3291 continue;
3292 this_landing_pad = o->landing_pad;
3294 else
3295 this_landing_pad = NULL_RTX;
3297 /* Differing actions or landing pads implies a change in call-site
3298 info, which implies some EH_REGION note should be emitted. */
3299 if (last_action != this_action
3300 || last_landing_pad != this_landing_pad)
3302 /* If we'd not seen a previous action (-3) or the previous
3303 action was must-not-throw (-2), then we do not need an
3304 end note. */
3305 if (last_action >= -1)
3307 /* If we delayed the creation of the begin, do it now. */
3308 if (first_no_action_insn)
3310 call_site = add_call_site (NULL_RTX, 0);
3311 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3312 first_no_action_insn);
3313 NOTE_EH_HANDLER (note) = call_site;
3314 first_no_action_insn = NULL_RTX;
3317 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3318 last_action_insn);
3319 NOTE_EH_HANDLER (note) = call_site;
3322 /* If the new action is must-not-throw, then no region notes
3323 are created. */
3324 if (this_action >= -1)
3326 call_site = add_call_site (this_landing_pad,
3327 this_action < 0 ? 0 : this_action);
3328 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3329 NOTE_EH_HANDLER (note) = call_site;
3332 last_action = this_action;
3333 last_landing_pad = this_landing_pad;
3335 last_action_insn = iter;
3338 if (last_action >= -1 && ! first_no_action_insn)
3340 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3341 NOTE_EH_HANDLER (note) = call_site;
3344 htab_delete (ar_hash);
3348 static void
3349 push_uleb128 (data_area, value)
3350 varray_type *data_area;
3351 unsigned int value;
3355 unsigned char byte = value & 0x7f;
3356 value >>= 7;
3357 if (value)
3358 byte |= 0x80;
3359 VARRAY_PUSH_UCHAR (*data_area, byte);
3361 while (value);
3364 static void
3365 push_sleb128 (data_area, value)
3366 varray_type *data_area;
3367 int value;
3369 unsigned char byte;
3370 int more;
3374 byte = value & 0x7f;
3375 value >>= 7;
3376 more = ! ((value == 0 && (byte & 0x40) == 0)
3377 || (value == -1 && (byte & 0x40) != 0));
3378 if (more)
3379 byte |= 0x80;
3380 VARRAY_PUSH_UCHAR (*data_area, byte);
3382 while (more);
3386 #ifndef HAVE_AS_LEB128
3387 static int
3388 dw2_size_of_call_site_table ()
3390 int n = cfun->eh->call_site_data_used;
3391 int size = n * (4 + 4 + 4);
3392 int i;
3394 for (i = 0; i < n; ++i)
3396 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3397 size += size_of_uleb128 (cs->action);
3400 return size;
3403 static int
3404 sjlj_size_of_call_site_table ()
3406 int n = cfun->eh->call_site_data_used;
3407 int size = 0;
3408 int i;
3410 for (i = 0; i < n; ++i)
3412 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3413 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3414 size += size_of_uleb128 (cs->action);
3417 return size;
3419 #endif
3421 static void
3422 dw2_output_call_site_table ()
3424 const char *const function_start_lab
3425 = IDENTIFIER_POINTER (current_function_func_begin_label);
3426 int n = cfun->eh->call_site_data_used;
3427 int i;
3429 for (i = 0; i < n; ++i)
3431 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3432 char reg_start_lab[32];
3433 char reg_end_lab[32];
3434 char landing_pad_lab[32];
3436 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3437 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3439 if (cs->landing_pad)
3440 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3441 CODE_LABEL_NUMBER (cs->landing_pad));
3443 /* ??? Perhaps use insn length scaling if the assembler supports
3444 generic arithmetic. */
3445 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3446 data4 if the function is small enough. */
3447 #ifdef HAVE_AS_LEB128
3448 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3449 "region %d start", i);
3450 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3451 "length");
3452 if (cs->landing_pad)
3453 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3454 "landing pad");
3455 else
3456 dw2_asm_output_data_uleb128 (0, "landing pad");
3457 #else
3458 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3459 "region %d start", i);
3460 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3461 if (cs->landing_pad)
3462 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3463 "landing pad");
3464 else
3465 dw2_asm_output_data (4, 0, "landing pad");
3466 #endif
3467 dw2_asm_output_data_uleb128 (cs->action, "action");
3470 call_site_base += n;
3473 static void
3474 sjlj_output_call_site_table ()
3476 int n = cfun->eh->call_site_data_used;
3477 int i;
3479 for (i = 0; i < n; ++i)
3481 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3483 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3484 "region %d landing pad", i);
3485 dw2_asm_output_data_uleb128 (cs->action, "action");
3488 call_site_base += n;
3491 void
3492 output_function_exception_table ()
3494 int tt_format, cs_format, lp_format, i, n;
3495 #ifdef HAVE_AS_LEB128
3496 char ttype_label[32];
3497 char cs_after_size_label[32];
3498 char cs_end_label[32];
3499 #else
3500 int call_site_len;
3501 #endif
3502 int have_tt_data;
3503 int funcdef_number;
3504 int tt_format_size = 0;
3506 /* Not all functions need anything. */
3507 if (! cfun->uses_eh_lsda)
3508 return;
3510 funcdef_number = (USING_SJLJ_EXCEPTIONS
3511 ? sjlj_funcdef_number
3512 : current_funcdef_number);
3514 #ifdef IA64_UNWIND_INFO
3515 fputs ("\t.personality\t", asm_out_file);
3516 output_addr_const (asm_out_file, eh_personality_libfunc);
3517 fputs ("\n\t.handlerdata\n", asm_out_file);
3518 /* Note that varasm still thinks we're in the function's code section.
3519 The ".endp" directive that will immediately follow will take us back. */
3520 #else
3521 exception_section ();
3522 #endif
3524 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3525 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3527 /* Indicate the format of the @TType entries. */
3528 if (! have_tt_data)
3529 tt_format = DW_EH_PE_omit;
3530 else
3532 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3533 #ifdef HAVE_AS_LEB128
3534 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3535 #endif
3536 tt_format_size = size_of_encoded_value (tt_format);
3538 assemble_align (tt_format_size * BITS_PER_UNIT);
3541 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3543 /* The LSDA header. */
3545 /* Indicate the format of the landing pad start pointer. An omitted
3546 field implies @LPStart == @Start. */
3547 /* Currently we always put @LPStart == @Start. This field would
3548 be most useful in moving the landing pads completely out of
3549 line to another section, but it could also be used to minimize
3550 the size of uleb128 landing pad offsets. */
3551 lp_format = DW_EH_PE_omit;
3552 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3553 eh_data_format_name (lp_format));
3555 /* @LPStart pointer would go here. */
3557 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3558 eh_data_format_name (tt_format));
3560 #ifndef HAVE_AS_LEB128
3561 if (USING_SJLJ_EXCEPTIONS)
3562 call_site_len = sjlj_size_of_call_site_table ();
3563 else
3564 call_site_len = dw2_size_of_call_site_table ();
3565 #endif
3567 /* A pc-relative 4-byte displacement to the @TType data. */
3568 if (have_tt_data)
3570 #ifdef HAVE_AS_LEB128
3571 char ttype_after_disp_label[32];
3572 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3573 funcdef_number);
3574 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3575 "@TType base offset");
3576 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3577 #else
3578 /* Ug. Alignment queers things. */
3579 unsigned int before_disp, after_disp, last_disp, disp;
3581 before_disp = 1 + 1;
3582 after_disp = (1 + size_of_uleb128 (call_site_len)
3583 + call_site_len
3584 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3585 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3586 * tt_format_size));
3588 disp = after_disp;
3591 unsigned int disp_size, pad;
3593 last_disp = disp;
3594 disp_size = size_of_uleb128 (disp);
3595 pad = before_disp + disp_size + after_disp;
3596 if (pad % tt_format_size)
3597 pad = tt_format_size - (pad % tt_format_size);
3598 else
3599 pad = 0;
3600 disp = after_disp + pad;
3602 while (disp != last_disp);
3604 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3605 #endif
3608 /* Indicate the format of the call-site offsets. */
3609 #ifdef HAVE_AS_LEB128
3610 cs_format = DW_EH_PE_uleb128;
3611 #else
3612 cs_format = DW_EH_PE_udata4;
3613 #endif
3614 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3615 eh_data_format_name (cs_format));
3617 #ifdef HAVE_AS_LEB128
3618 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3619 funcdef_number);
3620 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3621 funcdef_number);
3622 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3623 "Call-site table length");
3624 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3625 if (USING_SJLJ_EXCEPTIONS)
3626 sjlj_output_call_site_table ();
3627 else
3628 dw2_output_call_site_table ();
3629 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3630 #else
3631 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3632 if (USING_SJLJ_EXCEPTIONS)
3633 sjlj_output_call_site_table ();
3634 else
3635 dw2_output_call_site_table ();
3636 #endif
3638 /* ??? Decode and interpret the data for flag_debug_asm. */
3639 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3640 for (i = 0; i < n; ++i)
3641 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3642 (i ? NULL : "Action record table"));
3644 if (have_tt_data)
3645 assemble_align (tt_format_size * BITS_PER_UNIT);
3647 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3648 while (i-- > 0)
3650 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3651 rtx value;
3653 if (type == NULL_TREE)
3654 type = integer_zero_node;
3655 else
3656 type = lookup_type_for_runtime (type);
3658 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3659 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3660 assemble_integer (value, tt_format_size,
3661 tt_format_size * BITS_PER_UNIT, 1);
3662 else
3663 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
3666 #ifdef HAVE_AS_LEB128
3667 if (have_tt_data)
3668 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3669 #endif
3671 /* ??? Decode and interpret the data for flag_debug_asm. */
3672 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3673 for (i = 0; i < n; ++i)
3674 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3675 (i ? NULL : "Exception specification table"));
3677 function_section (current_function_decl);
3679 if (USING_SJLJ_EXCEPTIONS)
3680 sjlj_funcdef_number += 1;