* tm.texi: Fix markup.
[official-gcc.git] / gcc / except.c
blobe298329411f6266211f48d5706cd27c5d13b432a
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "rtl.h"
53 #include "tree.h"
54 #include "flags.h"
55 #include "function.h"
56 #include "expr.h"
57 #include "insn-config.h"
58 #include "except.h"
59 #include "integrate.h"
60 #include "hard-reg-set.h"
61 #include "basic-block.h"
62 #include "output.h"
63 #include "dwarf2asm.h"
64 #include "dwarf2out.h"
65 #include "dwarf2.h"
66 #include "toplev.h"
67 #include "hashtab.h"
68 #include "intl.h"
69 #include "ggc.h"
70 #include "tm_p.h"
73 /* Provide defaults for stuff that may not be defined when using
74 sjlj exceptions. */
75 #ifndef EH_RETURN_STACKADJ_RTX
76 #define EH_RETURN_STACKADJ_RTX 0
77 #endif
78 #ifndef EH_RETURN_HANDLER_RTX
79 #define EH_RETURN_HANDLER_RTX 0
80 #endif
81 #ifndef EH_RETURN_DATA_REGNO
82 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
83 #endif
86 /* Nonzero means enable synchronous exceptions for non-call instructions. */
87 int flag_non_call_exceptions;
89 /* Protect cleanup actions with must-not-throw regions, with a call
90 to the given failure handler. */
91 tree (*lang_protect_cleanup_actions) PARAMS ((void));
93 /* Return true if type A catches type B. */
94 int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
96 /* Map a type to a runtime object to match type. */
97 tree (*lang_eh_runtime_type) PARAMS ((tree));
99 /* A list of labels used for exception handlers. */
100 rtx exception_handler_labels;
102 static int call_site_base;
103 static int sjlj_funcdef_number;
104 static htab_t type_to_runtime_map;
106 /* Describe the SjLj_Function_Context structure. */
107 static tree sjlj_fc_type_node;
108 static int sjlj_fc_call_site_ofs;
109 static int sjlj_fc_data_ofs;
110 static int sjlj_fc_personality_ofs;
111 static int sjlj_fc_lsda_ofs;
112 static int sjlj_fc_jbuf_ofs;
114 /* Describes one exception region. */
115 struct eh_region
117 /* The immediately surrounding region. */
118 struct eh_region *outer;
120 /* The list of immediately contained regions. */
121 struct eh_region *inner;
122 struct eh_region *next_peer;
124 /* An identifier for this region. */
125 int region_number;
127 /* Each region does exactly one thing. */
128 enum eh_region_type
130 ERT_CLEANUP = 1,
131 ERT_TRY,
132 ERT_CATCH,
133 ERT_ALLOWED_EXCEPTIONS,
134 ERT_MUST_NOT_THROW,
135 ERT_THROW,
136 ERT_FIXUP
137 } type;
139 /* Holds the action to perform based on the preceeding type. */
140 union {
141 /* A list of catch blocks, a surrounding try block,
142 and the label for continuing after a catch. */
143 struct {
144 struct eh_region *catch;
145 struct eh_region *last_catch;
146 struct eh_region *prev_try;
147 rtx continue_label;
148 } try;
150 /* The list through the catch handlers, the type object
151 matched, and a pointer to the generated code. */
152 struct {
153 struct eh_region *next_catch;
154 struct eh_region *prev_catch;
155 tree type;
156 int filter;
157 } catch;
159 /* A tree_list of allowed types. */
160 struct {
161 tree type_list;
162 int filter;
163 } allowed;
165 /* The type given by a call to "throw foo();", or discovered
166 for a throw. */
167 struct {
168 tree type;
169 } throw;
171 /* Retain the cleanup expression even after expansion so that
172 we can match up fixup regions. */
173 struct {
174 tree exp;
175 } cleanup;
177 /* The real region (by expression and by pointer) that fixup code
178 should live in. */
179 struct {
180 tree cleanup_exp;
181 struct eh_region *real_region;
182 } fixup;
183 } u;
185 /* Entry point for this region's handler before landing pads are built. */
186 rtx label;
188 /* Entry point for this region's handler from the runtime eh library. */
189 rtx landing_pad;
191 /* Entry point for this region's handler from an inner region. */
192 rtx post_landing_pad;
194 /* The RESX insn for handing off control to the next outermost handler,
195 if appropriate. */
196 rtx resume;
199 /* Used to save exception status for each function. */
200 struct eh_status
202 /* The tree of all regions for this function. */
203 struct eh_region *region_tree;
205 /* The same information as an indexable array. */
206 struct eh_region **region_array;
208 /* The most recently open region. */
209 struct eh_region *cur_region;
211 /* This is the region for which we are processing catch blocks. */
212 struct eh_region *try_region;
214 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
215 node is itself a TREE_CHAINed list of handlers for regions that
216 are not yet closed. The TREE_VALUE of each entry contains the
217 handler for the corresponding entry on the ehstack. */
218 tree protect_list;
220 rtx filter;
221 rtx exc_ptr;
223 int built_landing_pads;
224 int last_region_number;
226 varray_type ttype_data;
227 varray_type ehspec_data;
228 varray_type action_record_data;
230 struct call_site_record
232 rtx landing_pad;
233 int action;
234 } *call_site_data;
235 int call_site_data_used;
236 int call_site_data_size;
238 rtx ehr_stackadj;
239 rtx ehr_handler;
240 rtx ehr_label;
242 rtx sjlj_fc;
243 rtx sjlj_exit_after;
247 static void mark_eh_region PARAMS ((struct eh_region *));
249 static int t2r_eq PARAMS ((const PTR,
250 const PTR));
251 static hashval_t t2r_hash PARAMS ((const PTR));
252 static int t2r_mark_1 PARAMS ((PTR *, PTR));
253 static void t2r_mark PARAMS ((PTR));
254 static void add_type_for_runtime PARAMS ((tree));
255 static tree lookup_type_for_runtime PARAMS ((tree));
257 static struct eh_region *expand_eh_region_end PARAMS ((void));
259 static rtx get_exception_filter PARAMS ((struct function *));
261 static void collect_eh_region_array PARAMS ((void));
262 static void resolve_fixup_regions PARAMS ((void));
263 static void remove_fixup_regions PARAMS ((void));
264 static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
266 static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
267 struct inline_remap *));
268 static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
269 struct eh_region **));
270 static int ttypes_filter_eq PARAMS ((const PTR,
271 const PTR));
272 static hashval_t ttypes_filter_hash PARAMS ((const PTR));
273 static int ehspec_filter_eq PARAMS ((const PTR,
274 const PTR));
275 static hashval_t ehspec_filter_hash PARAMS ((const PTR));
276 static int add_ttypes_entry PARAMS ((htab_t, tree));
277 static int add_ehspec_entry PARAMS ((htab_t, htab_t,
278 tree));
279 static void assign_filter_values PARAMS ((void));
280 static void build_post_landing_pads PARAMS ((void));
281 static void connect_post_landing_pads PARAMS ((void));
282 static void dw2_build_landing_pads PARAMS ((void));
284 struct sjlj_lp_info;
285 static bool sjlj_find_directly_reachable_regions
286 PARAMS ((struct sjlj_lp_info *));
287 static void sjlj_assign_call_site_values
288 PARAMS ((rtx, struct sjlj_lp_info *));
289 static void sjlj_mark_call_sites
290 PARAMS ((struct sjlj_lp_info *));
291 static void sjlj_emit_function_enter PARAMS ((rtx));
292 static void sjlj_emit_function_exit PARAMS ((void));
293 static void sjlj_emit_dispatch_table
294 PARAMS ((rtx, struct sjlj_lp_info *));
295 static void sjlj_build_landing_pads PARAMS ((void));
297 static void remove_exception_handler_label PARAMS ((rtx));
298 static void remove_eh_handler PARAMS ((struct eh_region *));
300 struct reachable_info;
302 /* The return value of reachable_next_level. */
303 enum reachable_code
305 /* The given exception is not processed by the given region. */
306 RNL_NOT_CAUGHT,
307 /* The given exception may need processing by the given region. */
308 RNL_MAYBE_CAUGHT,
309 /* The given exception is completely processed by the given region. */
310 RNL_CAUGHT,
311 /* The given exception is completely processed by the runtime. */
312 RNL_BLOCKED
315 static int check_handled PARAMS ((tree, tree));
316 static void add_reachable_handler
317 PARAMS ((struct reachable_info *, struct eh_region *,
318 struct eh_region *));
319 static enum reachable_code reachable_next_level
320 PARAMS ((struct eh_region *, tree, struct reachable_info *));
322 static int action_record_eq PARAMS ((const PTR,
323 const PTR));
324 static hashval_t action_record_hash PARAMS ((const PTR));
325 static int add_action_record PARAMS ((htab_t, int, int));
326 static int collect_one_action_chain PARAMS ((htab_t,
327 struct eh_region *));
328 static int add_call_site PARAMS ((rtx, int));
330 static void push_uleb128 PARAMS ((varray_type *,
331 unsigned int));
332 static void push_sleb128 PARAMS ((varray_type *, int));
333 #ifndef HAVE_AS_LEB128
334 static int dw2_size_of_call_site_table PARAMS ((void));
335 static int sjlj_size_of_call_site_table PARAMS ((void));
336 #endif
337 static void dw2_output_call_site_table PARAMS ((void));
338 static void sjlj_output_call_site_table PARAMS ((void));
341 /* Routine to see if exception handling is turned on.
342 DO_WARN is non-zero if we want to inform the user that exception
343 handling is turned off.
345 This is used to ensure that -fexceptions has been specified if the
346 compiler tries to use any exception-specific functions. */
349 doing_eh (do_warn)
350 int do_warn;
352 if (! flag_exceptions)
354 static int warned = 0;
355 if (! warned && do_warn)
357 error ("exception handling disabled, use -fexceptions to enable");
358 warned = 1;
360 return 0;
362 return 1;
366 void
367 init_eh ()
369 ggc_add_rtx_root (&exception_handler_labels, 1);
371 if (! flag_exceptions)
372 return;
374 type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
375 ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
377 /* Create the SjLj_Function_Context structure. This should match
378 the definition in unwind-sjlj.c. */
379 if (USING_SJLJ_EXCEPTIONS)
381 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
383 sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
384 ggc_add_tree_root (&sjlj_fc_type_node, 1);
386 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
387 build_pointer_type (sjlj_fc_type_node));
388 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
390 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
391 integer_type_node);
392 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
394 tmp = build_index_type (build_int_2 (4 - 1, 0));
395 tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
396 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
397 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
399 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
400 ptr_type_node);
401 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
403 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
404 ptr_type_node);
405 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
407 #ifdef DONT_USE_BUILTIN_SETJMP
408 #ifdef JMP_BUF_SIZE
409 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
410 #else
411 /* Should be large enough for most systems, if it is not,
412 JMP_BUF_SIZE should be defined with the proper value. It will
413 also tend to be larger than necessary for most systems, a more
414 optimal port will define JMP_BUF_SIZE. */
415 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
416 #endif
417 #else
418 /* This is 2 for builtin_setjmp, plus whatever the target requires
419 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
420 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
421 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
422 #endif
423 tmp = build_index_type (tmp);
424 tmp = build_array_type (ptr_type_node, tmp);
425 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
426 #ifdef DONT_USE_BUILTIN_SETJMP
427 /* We don't know what the alignment requirements of the
428 runtime's jmp_buf has. Overestimate. */
429 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
430 DECL_USER_ALIGN (f_jbuf) = 1;
431 #endif
432 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
434 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
435 TREE_CHAIN (f_prev) = f_cs;
436 TREE_CHAIN (f_cs) = f_data;
437 TREE_CHAIN (f_data) = f_per;
438 TREE_CHAIN (f_per) = f_lsda;
439 TREE_CHAIN (f_lsda) = f_jbuf;
441 layout_type (sjlj_fc_type_node);
443 /* Cache the interesting field offsets so that we have
444 easy access from rtl. */
445 sjlj_fc_call_site_ofs
446 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
447 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
448 sjlj_fc_data_ofs
449 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
450 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
451 sjlj_fc_personality_ofs
452 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
453 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
454 sjlj_fc_lsda_ofs
455 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
456 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
457 sjlj_fc_jbuf_ofs
458 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
459 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
463 void
464 init_eh_for_function ()
466 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
469 /* Mark EH for GC. */
471 static void
472 mark_eh_region (region)
473 struct eh_region *region;
475 if (! region)
476 return;
478 switch (region->type)
480 case ERT_CLEANUP:
481 ggc_mark_tree (region->u.cleanup.exp);
482 break;
483 case ERT_TRY:
484 ggc_mark_rtx (region->u.try.continue_label);
485 break;
486 case ERT_CATCH:
487 ggc_mark_tree (region->u.catch.type);
488 break;
489 case ERT_ALLOWED_EXCEPTIONS:
490 ggc_mark_tree (region->u.allowed.type_list);
491 break;
492 case ERT_MUST_NOT_THROW:
493 break;
494 case ERT_THROW:
495 ggc_mark_tree (region->u.throw.type);
496 break;
497 case ERT_FIXUP:
498 ggc_mark_tree (region->u.fixup.cleanup_exp);
499 break;
500 default:
501 abort ();
504 ggc_mark_rtx (region->label);
505 ggc_mark_rtx (region->resume);
506 ggc_mark_rtx (region->landing_pad);
507 ggc_mark_rtx (region->post_landing_pad);
510 void
511 mark_eh_status (eh)
512 struct eh_status *eh;
514 int i;
516 if (eh == 0)
517 return;
519 /* If we've called collect_eh_region_array, use it. Otherwise walk
520 the tree non-recursively. */
521 if (eh->region_array)
523 for (i = eh->last_region_number; i > 0; --i)
525 struct eh_region *r = eh->region_array[i];
526 if (r && r->region_number == i)
527 mark_eh_region (r);
530 else if (eh->region_tree)
532 struct eh_region *r = eh->region_tree;
533 while (1)
535 mark_eh_region (r);
536 if (r->inner)
537 r = r->inner;
538 else if (r->next_peer)
539 r = r->next_peer;
540 else
542 do {
543 r = r->outer;
544 if (r == NULL)
545 goto tree_done;
546 } while (r->next_peer == NULL);
547 r = r->next_peer;
550 tree_done:;
553 ggc_mark_tree (eh->protect_list);
554 ggc_mark_rtx (eh->filter);
555 ggc_mark_rtx (eh->exc_ptr);
556 ggc_mark_tree_varray (eh->ttype_data);
558 if (eh->call_site_data)
560 for (i = eh->call_site_data_used - 1; i >= 0; --i)
561 ggc_mark_rtx (eh->call_site_data[i].landing_pad);
564 ggc_mark_rtx (eh->ehr_stackadj);
565 ggc_mark_rtx (eh->ehr_handler);
566 ggc_mark_rtx (eh->ehr_label);
568 ggc_mark_rtx (eh->sjlj_fc);
569 ggc_mark_rtx (eh->sjlj_exit_after);
572 void
573 free_eh_status (f)
574 struct function *f;
576 struct eh_status *eh = f->eh;
578 if (eh->region_array)
580 int i;
581 for (i = eh->last_region_number; i > 0; --i)
583 struct eh_region *r = eh->region_array[i];
584 /* Mind we don't free a region struct more than once. */
585 if (r && r->region_number == i)
586 free (r);
588 free (eh->region_array);
590 else if (eh->region_tree)
592 struct eh_region *next, *r = eh->region_tree;
593 while (1)
595 if (r->inner)
596 r = r->inner;
597 else if (r->next_peer)
599 next = r->next_peer;
600 free (r);
601 r = next;
603 else
605 do {
606 next = r->outer;
607 free (r);
608 r = next;
609 if (r == NULL)
610 goto tree_done;
611 } while (r->next_peer == NULL);
612 next = r->next_peer;
613 free (r);
614 r = next;
617 tree_done:;
620 VARRAY_FREE (eh->ttype_data);
621 VARRAY_FREE (eh->ehspec_data);
622 VARRAY_FREE (eh->action_record_data);
623 if (eh->call_site_data)
624 free (eh->call_site_data);
626 free (eh);
627 f->eh = NULL;
631 /* Start an exception handling region. All instructions emitted
632 after this point are considered to be part of the region until
633 expand_eh_region_end is invoked. */
635 void
636 expand_eh_region_start ()
638 struct eh_region *new_region;
639 struct eh_region *cur_region;
640 rtx note;
642 if (! doing_eh (0))
643 return;
645 /* Insert a new blank region as a leaf in the tree. */
646 new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
647 cur_region = cfun->eh->cur_region;
648 new_region->outer = cur_region;
649 if (cur_region)
651 new_region->next_peer = cur_region->inner;
652 cur_region->inner = new_region;
654 else
656 new_region->next_peer = cfun->eh->region_tree;
657 cfun->eh->region_tree = new_region;
659 cfun->eh->cur_region = new_region;
661 /* Create a note marking the start of this region. */
662 new_region->region_number = ++cfun->eh->last_region_number;
663 note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
664 NOTE_EH_HANDLER (note) = new_region->region_number;
667 /* Common code to end a region. Returns the region just ended. */
669 static struct eh_region *
670 expand_eh_region_end ()
672 struct eh_region *cur_region = cfun->eh->cur_region;
673 rtx note;
675 /* Create a nute marking the end of this region. */
676 note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
677 NOTE_EH_HANDLER (note) = cur_region->region_number;
679 /* Pop. */
680 cfun->eh->cur_region = cur_region->outer;
682 return cur_region;
685 /* End an exception handling region for a cleanup. HANDLER is an
686 expression to expand for the cleanup. */
688 void
689 expand_eh_region_end_cleanup (handler)
690 tree handler;
692 struct eh_region *region;
693 tree protect_cleanup_actions;
694 rtx around_label;
695 rtx data_save[2];
697 if (! doing_eh (0))
698 return;
700 region = expand_eh_region_end ();
701 region->type = ERT_CLEANUP;
702 region->label = gen_label_rtx ();
703 region->u.cleanup.exp = handler;
705 around_label = gen_label_rtx ();
706 emit_jump (around_label);
708 emit_label (region->label);
710 /* Give the language a chance to specify an action to be taken if an
711 exception is thrown that would propogate out of the HANDLER. */
712 protect_cleanup_actions
713 = (lang_protect_cleanup_actions
714 ? (*lang_protect_cleanup_actions) ()
715 : NULL_TREE);
717 if (protect_cleanup_actions)
718 expand_eh_region_start ();
720 /* In case this cleanup involves an inline destructor with a try block in
721 it, we need to save the EH return data registers around it. */
722 data_save[0] = gen_reg_rtx (Pmode);
723 emit_move_insn (data_save[0], get_exception_pointer (cfun));
724 data_save[1] = gen_reg_rtx (word_mode);
725 emit_move_insn (data_save[1], get_exception_filter (cfun));
727 expand_expr (handler, const0_rtx, VOIDmode, 0);
729 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
730 emit_move_insn (cfun->eh->filter, data_save[1]);
732 if (protect_cleanup_actions)
733 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
735 /* We need any stack adjustment complete before the around_label. */
736 do_pending_stack_adjust ();
738 /* We delay the generation of the _Unwind_Resume until we generate
739 landing pads. We emit a marker here so as to get good control
740 flow data in the meantime. */
741 region->resume
742 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
743 emit_barrier ();
745 emit_label (around_label);
748 /* End an exception handling region for a try block, and prepares
749 for subsequent calls to expand_start_catch. */
751 void
752 expand_start_all_catch ()
754 struct eh_region *region;
756 if (! doing_eh (1))
757 return;
759 region = expand_eh_region_end ();
760 region->type = ERT_TRY;
761 region->u.try.prev_try = cfun->eh->try_region;
762 region->u.try.continue_label = gen_label_rtx ();
764 cfun->eh->try_region = region;
766 emit_jump (region->u.try.continue_label);
769 /* Begin a catch clause. TYPE is the type caught, or null if this is
770 a catch-all clause. */
772 void
773 expand_start_catch (type)
774 tree type;
776 struct eh_region *t, *c, *l;
778 if (! doing_eh (0))
779 return;
781 if (type)
782 add_type_for_runtime (type);
783 expand_eh_region_start ();
785 t = cfun->eh->try_region;
786 c = cfun->eh->cur_region;
787 c->type = ERT_CATCH;
788 c->u.catch.type = type;
789 c->label = gen_label_rtx ();
791 l = t->u.try.last_catch;
792 c->u.catch.prev_catch = l;
793 if (l)
794 l->u.catch.next_catch = c;
795 else
796 t->u.try.catch = c;
797 t->u.try.last_catch = c;
799 emit_label (c->label);
802 /* End a catch clause. Control will resume after the try/catch block. */
804 void
805 expand_end_catch ()
807 struct eh_region *try_region, *catch_region;
809 if (! doing_eh (0))
810 return;
812 catch_region = expand_eh_region_end ();
813 try_region = cfun->eh->try_region;
815 emit_jump (try_region->u.try.continue_label);
818 /* End a sequence of catch handlers for a try block. */
820 void
821 expand_end_all_catch ()
823 struct eh_region *try_region;
825 if (! doing_eh (0))
826 return;
828 try_region = cfun->eh->try_region;
829 cfun->eh->try_region = try_region->u.try.prev_try;
831 emit_label (try_region->u.try.continue_label);
834 /* End an exception region for an exception type filter. ALLOWED is a
835 TREE_LIST of types to be matched by the runtime. FAILURE is an
836 expression to invoke if a mismatch ocurrs. */
838 void
839 expand_eh_region_end_allowed (allowed, failure)
840 tree allowed, failure;
842 struct eh_region *region;
843 rtx around_label;
845 if (! doing_eh (0))
846 return;
848 region = expand_eh_region_end ();
849 region->type = ERT_ALLOWED_EXCEPTIONS;
850 region->u.allowed.type_list = allowed;
851 region->label = gen_label_rtx ();
853 for (; allowed ; allowed = TREE_CHAIN (allowed))
854 add_type_for_runtime (TREE_VALUE (allowed));
856 /* We must emit the call to FAILURE here, so that if this function
857 throws a different exception, that it will be processed by the
858 correct region. */
860 around_label = gen_label_rtx ();
861 emit_jump (around_label);
863 emit_label (region->label);
864 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
865 /* We must adjust the stack before we reach the AROUND_LABEL because
866 the call to FAILURE does not occur on all paths to the
867 AROUND_LABEL. */
868 do_pending_stack_adjust ();
870 emit_label (around_label);
873 /* End an exception region for a must-not-throw filter. FAILURE is an
874 expression invoke if an uncaught exception propagates this far.
876 This is conceptually identical to expand_eh_region_end_allowed with
877 an empty allowed list (if you passed "std::terminate" instead of
878 "__cxa_call_unexpected"), but they are represented differently in
879 the C++ LSDA. */
881 void
882 expand_eh_region_end_must_not_throw (failure)
883 tree failure;
885 struct eh_region *region;
886 rtx around_label;
888 if (! doing_eh (0))
889 return;
891 region = expand_eh_region_end ();
892 region->type = ERT_MUST_NOT_THROW;
893 region->label = gen_label_rtx ();
895 /* We must emit the call to FAILURE here, so that if this function
896 throws a different exception, that it will be processed by the
897 correct region. */
899 around_label = gen_label_rtx ();
900 emit_jump (around_label);
902 emit_label (region->label);
903 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
905 emit_label (around_label);
908 /* End an exception region for a throw. No handling goes on here,
909 but it's the easiest way for the front-end to indicate what type
910 is being thrown. */
912 void
913 expand_eh_region_end_throw (type)
914 tree type;
916 struct eh_region *region;
918 if (! doing_eh (0))
919 return;
921 region = expand_eh_region_end ();
922 region->type = ERT_THROW;
923 region->u.throw.type = type;
926 /* End a fixup region. Within this region the cleanups for the immediately
927 enclosing region are _not_ run. This is used for goto cleanup to avoid
928 destroying an object twice.
930 This would be an extraordinarily simple prospect, were it not for the
931 fact that we don't actually know what the immediately enclosing region
932 is. This surprising fact is because expand_cleanups is currently
933 generating a sequence that it will insert somewhere else. We collect
934 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
936 void
937 expand_eh_region_end_fixup (handler)
938 tree handler;
940 struct eh_region *fixup;
942 if (! doing_eh (0))
943 return;
945 fixup = expand_eh_region_end ();
946 fixup->type = ERT_FIXUP;
947 fixup->u.fixup.cleanup_exp = handler;
950 /* Return an rtl expression for a pointer to the exception object
951 within a handler. */
954 get_exception_pointer (fun)
955 struct function *fun;
957 rtx exc_ptr = fun->eh->exc_ptr;
958 if (fun == cfun && ! exc_ptr)
960 exc_ptr = gen_reg_rtx (Pmode);
961 fun->eh->exc_ptr = exc_ptr;
963 return exc_ptr;
966 /* Return an rtl expression for the exception dispatch filter
967 within a handler. */
969 static rtx
970 get_exception_filter (fun)
971 struct function *fun;
973 rtx filter = fun->eh->filter;
974 if (fun == cfun && ! filter)
976 filter = gen_reg_rtx (word_mode);
977 fun->eh->filter = filter;
979 return filter;
982 /* Begin a region that will contain entries created with
983 add_partial_entry. */
985 void
986 begin_protect_partials ()
988 /* Push room for a new list. */
989 cfun->eh->protect_list
990 = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
993 /* Start a new exception region for a region of code that has a
994 cleanup action and push the HANDLER for the region onto
995 protect_list. All of the regions created with add_partial_entry
996 will be ended when end_protect_partials is invoked. */
998 void
999 add_partial_entry (handler)
1000 tree handler;
1002 expand_eh_region_start ();
1004 /* ??? This comment was old before the most recent rewrite. We
1005 really ought to fix the callers at some point. */
1006 /* For backwards compatibility, we allow callers to omit calls to
1007 begin_protect_partials for the outermost region. So, we must
1008 explicitly do so here. */
1009 if (!cfun->eh->protect_list)
1010 begin_protect_partials ();
1012 /* Add this entry to the front of the list. */
1013 TREE_VALUE (cfun->eh->protect_list)
1014 = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
1017 /* End all the pending exception regions on protect_list. */
1019 void
1020 end_protect_partials ()
1022 tree t;
1024 /* ??? This comment was old before the most recent rewrite. We
1025 really ought to fix the callers at some point. */
1026 /* For backwards compatibility, we allow callers to omit the call to
1027 begin_protect_partials for the outermost region. So,
1028 PROTECT_LIST may be NULL. */
1029 if (!cfun->eh->protect_list)
1030 return;
1032 /* Pop the topmost entry. */
1033 t = TREE_VALUE (cfun->eh->protect_list);
1034 cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
1036 /* End all the exception regions. */
1037 for (; t; t = TREE_CHAIN (t))
1038 expand_eh_region_end_cleanup (TREE_VALUE (t));
1042 /* This section is for the exception handling specific optimization pass. */
1044 /* Random access the exception region tree. It's just as simple to
1045 collect the regions this way as in expand_eh_region_start, but
1046 without having to realloc memory. */
1048 static void
1049 collect_eh_region_array ()
1051 struct eh_region **array, *i;
1053 i = cfun->eh->region_tree;
1054 if (! i)
1055 return;
1057 array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1058 cfun->eh->region_array = array;
1060 while (1)
1062 array[i->region_number] = i;
1064 /* If there are sub-regions, process them. */
1065 if (i->inner)
1066 i = i->inner;
1067 /* If there are peers, process them. */
1068 else if (i->next_peer)
1069 i = i->next_peer;
1070 /* Otherwise, step back up the tree to the next peer. */
1071 else
1073 do {
1074 i = i->outer;
1075 if (i == NULL)
1076 return;
1077 } while (i->next_peer == NULL);
1078 i = i->next_peer;
1083 static void
1084 resolve_fixup_regions ()
1086 int i, j, n = cfun->eh->last_region_number;
1088 for (i = 1; i <= n; ++i)
1090 struct eh_region *fixup = cfun->eh->region_array[i];
1091 struct eh_region *cleanup = 0;
1093 if (! fixup || fixup->type != ERT_FIXUP)
1094 continue;
1096 for (j = 1; j <= n; ++j)
1098 cleanup = cfun->eh->region_array[j];
1099 if (cleanup->type == ERT_CLEANUP
1100 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1101 break;
1103 if (j > n)
1104 abort ();
1106 fixup->u.fixup.real_region = cleanup->outer;
1110 /* Now that we've discovered what region actually encloses a fixup,
1111 we can shuffle pointers and remove them from the tree. */
1113 static void
1114 remove_fixup_regions ()
1116 int i;
1117 rtx insn, note;
1118 struct eh_region *fixup;
1120 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1121 for instructions referencing fixup regions. This is only
1122 strictly necessary for fixup regions with no parent, but
1123 doesn't hurt to do it for all regions. */
1124 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
1125 if (INSN_P (insn)
1126 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
1127 && INTVAL (XEXP (note, 0)) > 0
1128 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
1129 && fixup->type == ERT_FIXUP)
1131 if (fixup->u.fixup.real_region)
1132 XEXP (note, 1) = GEN_INT (fixup->u.fixup.real_region->region_number);
1133 else
1134 remove_note (insn, note);
1137 /* Remove the fixup regions from the tree. */
1138 for (i = cfun->eh->last_region_number; i > 0; --i)
1140 fixup = cfun->eh->region_array[i];
1141 if (! fixup)
1142 continue;
1144 /* Allow GC to maybe free some memory. */
1145 if (fixup->type == ERT_CLEANUP)
1146 fixup->u.cleanup.exp = NULL_TREE;
1148 if (fixup->type != ERT_FIXUP)
1149 continue;
1151 if (fixup->inner)
1153 struct eh_region *parent, *p, **pp;
1155 parent = fixup->u.fixup.real_region;
1157 /* Fix up the children's parent pointers; find the end of
1158 the list. */
1159 for (p = fixup->inner; ; p = p->next_peer)
1161 p->outer = parent;
1162 if (! p->next_peer)
1163 break;
1166 /* In the tree of cleanups, only outer-inner ordering matters.
1167 So link the children back in anywhere at the correct level. */
1168 if (parent)
1169 pp = &parent->inner;
1170 else
1171 pp = &cfun->eh->region_tree;
1172 p->next_peer = *pp;
1173 *pp = fixup->inner;
1174 fixup->inner = NULL;
1177 remove_eh_handler (fixup);
1181 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1182 can_throw instruction in the region. */
1184 static void
1185 convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1186 rtx *pinsns;
1187 int *orig_sp;
1188 int cur;
1190 int *sp = orig_sp;
1191 rtx insn, next;
1193 for (insn = *pinsns; insn ; insn = next)
1195 next = NEXT_INSN (insn);
1196 if (GET_CODE (insn) == NOTE)
1198 int kind = NOTE_LINE_NUMBER (insn);
1199 if (kind == NOTE_INSN_EH_REGION_BEG
1200 || kind == NOTE_INSN_EH_REGION_END)
1202 if (kind == NOTE_INSN_EH_REGION_BEG)
1204 struct eh_region *r;
1206 *sp++ = cur;
1207 cur = NOTE_EH_HANDLER (insn);
1209 r = cfun->eh->region_array[cur];
1210 if (r->type == ERT_FIXUP)
1212 r = r->u.fixup.real_region;
1213 cur = r ? r->region_number : 0;
1215 else if (r->type == ERT_CATCH)
1217 r = r->outer;
1218 cur = r ? r->region_number : 0;
1221 else
1222 cur = *--sp;
1224 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1225 requires extra care to adjust sequence start. */
1226 if (insn == *pinsns)
1227 *pinsns = next;
1228 remove_insn (insn);
1229 continue;
1232 else if (INSN_P (insn))
1234 if (cur > 0
1235 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1236 /* Calls can always potentially throw exceptions, unless
1237 they have a REG_EH_REGION note with a value of 0 or less.
1238 Which should be the only possible kind so far. */
1239 && (GET_CODE (insn) == CALL_INSN
1240 /* If we wanted exceptions for non-call insns, then
1241 any may_trap_p instruction could throw. */
1242 || (flag_non_call_exceptions
1243 && may_trap_p (PATTERN (insn)))))
1245 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1246 REG_NOTES (insn));
1249 if (GET_CODE (insn) == CALL_INSN
1250 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1252 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1253 sp, cur);
1254 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1255 sp, cur);
1256 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1257 sp, cur);
1262 if (sp != orig_sp)
1263 abort ();
1266 void
1267 convert_from_eh_region_ranges ()
1269 int *stack;
1270 rtx insns;
1272 collect_eh_region_array ();
1273 resolve_fixup_regions ();
1275 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1276 insns = get_insns ();
1277 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1278 free (stack);
1280 remove_fixup_regions ();
1283 void
1284 find_exception_handler_labels ()
1286 rtx list = NULL_RTX;
1287 int i;
1289 free_EXPR_LIST_list (&exception_handler_labels);
1291 if (cfun->eh->region_tree == NULL)
1292 return;
1294 for (i = cfun->eh->last_region_number; i > 0; --i)
1296 struct eh_region *region = cfun->eh->region_array[i];
1297 rtx lab;
1299 if (! region)
1300 continue;
1301 if (cfun->eh->built_landing_pads)
1302 lab = region->landing_pad;
1303 else
1304 lab = region->label;
1306 if (lab)
1307 list = alloc_EXPR_LIST (0, lab, list);
1310 /* For sjlj exceptions, need the return label to remain live until
1311 after landing pad generation. */
1312 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1313 list = alloc_EXPR_LIST (0, return_label, list);
1315 exception_handler_labels = list;
1319 static struct eh_region *
1320 duplicate_eh_region_1 (o, map)
1321 struct eh_region *o;
1322 struct inline_remap *map;
1324 struct eh_region *n
1325 = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
1327 n->region_number = o->region_number + cfun->eh->last_region_number;
1328 n->type = o->type;
1330 switch (n->type)
1332 case ERT_CLEANUP:
1333 case ERT_MUST_NOT_THROW:
1334 break;
1336 case ERT_TRY:
1337 if (o->u.try.continue_label)
1338 n->u.try.continue_label
1339 = get_label_from_map (map,
1340 CODE_LABEL_NUMBER (o->u.try.continue_label));
1341 break;
1343 case ERT_CATCH:
1344 n->u.catch.type = o->u.catch.type;
1345 break;
1347 case ERT_ALLOWED_EXCEPTIONS:
1348 n->u.allowed.type_list = o->u.allowed.type_list;
1349 break;
1351 case ERT_THROW:
1352 n->u.throw.type = o->u.throw.type;
1354 default:
1355 abort ();
1358 if (o->label)
1359 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1360 if (o->resume)
1362 n->resume = map->insn_map[INSN_UID (o->resume)];
1363 if (n->resume == NULL)
1364 abort ();
1367 return n;
1370 static void
1371 duplicate_eh_region_2 (o, n_array)
1372 struct eh_region *o;
1373 struct eh_region **n_array;
1375 struct eh_region *n = n_array[o->region_number];
1377 switch (n->type)
1379 case ERT_TRY:
1380 n->u.try.catch = n_array[o->u.try.catch->region_number];
1381 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1382 break;
1384 case ERT_CATCH:
1385 if (o->u.catch.next_catch)
1386 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1387 if (o->u.catch.prev_catch)
1388 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1389 break;
1391 default:
1392 break;
1395 if (o->outer)
1396 n->outer = n_array[o->outer->region_number];
1397 if (o->inner)
1398 n->inner = n_array[o->inner->region_number];
1399 if (o->next_peer)
1400 n->next_peer = n_array[o->next_peer->region_number];
1404 duplicate_eh_regions (ifun, map)
1405 struct function *ifun;
1406 struct inline_remap *map;
1408 int ifun_last_region_number = ifun->eh->last_region_number;
1409 struct eh_region **n_array, *root, *cur;
1410 int i;
1412 if (ifun_last_region_number == 0)
1413 return 0;
1415 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1417 for (i = 1; i <= ifun_last_region_number; ++i)
1419 cur = ifun->eh->region_array[i];
1420 if (!cur || cur->region_number != i)
1421 continue;
1422 n_array[i] = duplicate_eh_region_1 (cur, map);
1424 for (i = 1; i <= ifun_last_region_number; ++i)
1426 cur = ifun->eh->region_array[i];
1427 if (!cur || cur->region_number != i)
1428 continue;
1429 duplicate_eh_region_2 (cur, n_array);
1432 root = n_array[ifun->eh->region_tree->region_number];
1433 cur = cfun->eh->cur_region;
1434 if (cur)
1436 struct eh_region *p = cur->inner;
1437 if (p)
1439 while (p->next_peer)
1440 p = p->next_peer;
1441 p->next_peer = root;
1443 else
1444 cur->inner = root;
1446 for (i = 1; i <= ifun_last_region_number; ++i)
1447 if (n_array[i]->outer == NULL)
1448 n_array[i]->outer = cur;
1450 else
1452 struct eh_region *p = cfun->eh->region_tree;
1453 if (p)
1455 while (p->next_peer)
1456 p = p->next_peer;
1457 p->next_peer = root;
1459 else
1460 cfun->eh->region_tree = root;
1463 free (n_array);
1465 i = cfun->eh->last_region_number;
1466 cfun->eh->last_region_number = i + ifun_last_region_number;
1467 return i;
1471 /* ??? Move from tree.c to tree.h. */
1472 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
1474 static int
1475 t2r_eq (pentry, pdata)
1476 const PTR pentry;
1477 const PTR pdata;
1479 tree entry = (tree) pentry;
1480 tree data = (tree) pdata;
1482 return TREE_PURPOSE (entry) == data;
1485 static hashval_t
1486 t2r_hash (pentry)
1487 const PTR pentry;
1489 tree entry = (tree) pentry;
1490 return TYPE_HASH (TREE_PURPOSE (entry));
1493 static int
1494 t2r_mark_1 (slot, data)
1495 PTR *slot;
1496 PTR data ATTRIBUTE_UNUSED;
1498 tree contents = (tree) *slot;
1499 ggc_mark_tree (contents);
1500 return 1;
1503 static void
1504 t2r_mark (addr)
1505 PTR addr;
1507 htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1510 static void
1511 add_type_for_runtime (type)
1512 tree type;
1514 tree *slot;
1516 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1517 TYPE_HASH (type), INSERT);
1518 if (*slot == NULL)
1520 tree runtime = (*lang_eh_runtime_type) (type);
1521 *slot = tree_cons (type, runtime, NULL_TREE);
1525 static tree
1526 lookup_type_for_runtime (type)
1527 tree type;
1529 tree *slot;
1531 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1532 TYPE_HASH (type), NO_INSERT);
1534 /* We should have always inserrted the data earlier. */
1535 return TREE_VALUE (*slot);
1539 /* Represent an entry in @TTypes for either catch actions
1540 or exception filter actions. */
1541 struct ttypes_filter
1543 tree t;
1544 int filter;
1547 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1548 (a tree) for a @TTypes type node we are thinking about adding. */
1550 static int
1551 ttypes_filter_eq (pentry, pdata)
1552 const PTR pentry;
1553 const PTR pdata;
1555 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1556 tree data = (tree) pdata;
1558 return entry->t == data;
1561 static hashval_t
1562 ttypes_filter_hash (pentry)
1563 const PTR pentry;
1565 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1566 return TYPE_HASH (entry->t);
1569 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1570 exception specification list we are thinking about adding. */
1571 /* ??? Currently we use the type lists in the order given. Someone
1572 should put these in some canonical order. */
1574 static int
1575 ehspec_filter_eq (pentry, pdata)
1576 const PTR pentry;
1577 const PTR pdata;
1579 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1580 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1582 return type_list_equal (entry->t, data->t);
1585 /* Hash function for exception specification lists. */
1587 static hashval_t
1588 ehspec_filter_hash (pentry)
1589 const PTR pentry;
1591 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1592 hashval_t h = 0;
1593 tree list;
1595 for (list = entry->t; list ; list = TREE_CHAIN (list))
1596 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1597 return h;
1600 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1601 up the search. Return the filter value to be used. */
1603 static int
1604 add_ttypes_entry (ttypes_hash, type)
1605 htab_t ttypes_hash;
1606 tree type;
1608 struct ttypes_filter **slot, *n;
1610 slot = (struct ttypes_filter **)
1611 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1613 if ((n = *slot) == NULL)
1615 /* Filter value is a 1 based table index. */
1617 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1618 n->t = type;
1619 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1620 *slot = n;
1622 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1625 return n->filter;
1628 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1629 to speed up the search. Return the filter value to be used. */
1631 static int
1632 add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1633 htab_t ehspec_hash;
1634 htab_t ttypes_hash;
1635 tree list;
1637 struct ttypes_filter **slot, *n;
1638 struct ttypes_filter dummy;
1640 dummy.t = list;
1641 slot = (struct ttypes_filter **)
1642 htab_find_slot (ehspec_hash, &dummy, INSERT);
1644 if ((n = *slot) == NULL)
1646 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1648 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1649 n->t = list;
1650 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1651 *slot = n;
1653 /* Look up each type in the list and encode its filter
1654 value as a uleb128. Terminate the list with 0. */
1655 for (; list ; list = TREE_CHAIN (list))
1656 push_uleb128 (&cfun->eh->ehspec_data,
1657 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1658 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1661 return n->filter;
1664 /* Generate the action filter values to be used for CATCH and
1665 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1666 we use lots of landing pads, and so every type or list can share
1667 the same filter value, which saves table space. */
1669 static void
1670 assign_filter_values ()
1672 int i;
1673 htab_t ttypes, ehspec;
1675 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1676 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1678 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1679 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1681 for (i = cfun->eh->last_region_number; i > 0; --i)
1683 struct eh_region *r = cfun->eh->region_array[i];
1685 /* Mind we don't process a region more than once. */
1686 if (!r || r->region_number != i)
1687 continue;
1689 switch (r->type)
1691 case ERT_CATCH:
1692 r->u.catch.filter = add_ttypes_entry (ttypes, r->u.catch.type);
1693 break;
1695 case ERT_ALLOWED_EXCEPTIONS:
1696 r->u.allowed.filter
1697 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1698 break;
1700 default:
1701 break;
1705 htab_delete (ttypes);
1706 htab_delete (ehspec);
1709 static void
1710 build_post_landing_pads ()
1712 int i;
1714 for (i = cfun->eh->last_region_number; i > 0; --i)
1716 struct eh_region *region = cfun->eh->region_array[i];
1717 rtx seq;
1719 /* Mind we don't process a region more than once. */
1720 if (!region || region->region_number != i)
1721 continue;
1723 switch (region->type)
1725 case ERT_TRY:
1726 /* ??? Collect the set of all non-overlapping catch handlers
1727 all the way up the chain until blocked by a cleanup. */
1728 /* ??? Outer try regions can share landing pads with inner
1729 try regions if the types are completely non-overlapping,
1730 and there are no interveaning cleanups. */
1732 region->post_landing_pad = gen_label_rtx ();
1734 start_sequence ();
1736 emit_label (region->post_landing_pad);
1738 /* ??? It is mighty inconvenient to call back into the
1739 switch statement generation code in expand_end_case.
1740 Rapid prototyping sez a sequence of ifs. */
1742 struct eh_region *c;
1743 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1745 /* ??? _Unwind_ForcedUnwind wants no match here. */
1746 if (c->u.catch.type == NULL)
1747 emit_jump (c->label);
1748 else
1749 emit_cmp_and_jump_insns (cfun->eh->filter,
1750 GEN_INT (c->u.catch.filter),
1751 EQ, NULL_RTX, word_mode,
1752 0, 0, c->label);
1756 /* We delay the generation of the _Unwind_Resume until we generate
1757 landing pads. We emit a marker here so as to get good control
1758 flow data in the meantime. */
1759 region->resume
1760 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1761 emit_barrier ();
1763 seq = get_insns ();
1764 end_sequence ();
1766 emit_insns_before (seq, region->u.try.catch->label);
1767 break;
1769 case ERT_ALLOWED_EXCEPTIONS:
1770 region->post_landing_pad = gen_label_rtx ();
1772 start_sequence ();
1774 emit_label (region->post_landing_pad);
1776 emit_cmp_and_jump_insns (cfun->eh->filter,
1777 GEN_INT (region->u.allowed.filter),
1778 EQ, NULL_RTX, word_mode, 0, 0,
1779 region->label);
1781 /* We delay the generation of the _Unwind_Resume until we generate
1782 landing pads. We emit a marker here so as to get good control
1783 flow data in the meantime. */
1784 region->resume
1785 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1786 emit_barrier ();
1788 seq = get_insns ();
1789 end_sequence ();
1791 emit_insns_before (seq, region->label);
1792 break;
1794 case ERT_CLEANUP:
1795 case ERT_MUST_NOT_THROW:
1796 region->post_landing_pad = region->label;
1797 break;
1799 case ERT_CATCH:
1800 case ERT_THROW:
1801 /* Nothing to do. */
1802 break;
1804 default:
1805 abort ();
1810 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1811 _Unwind_Resume otherwise. */
1813 static void
1814 connect_post_landing_pads ()
1816 int i;
1818 for (i = cfun->eh->last_region_number; i > 0; --i)
1820 struct eh_region *region = cfun->eh->region_array[i];
1821 struct eh_region *outer;
1822 rtx seq;
1824 /* Mind we don't process a region more than once. */
1825 if (!region || region->region_number != i)
1826 continue;
1828 /* If there is no RESX, or it has been deleted by flow, there's
1829 nothing to fix up. */
1830 if (! region->resume || INSN_DELETED_P (region->resume))
1831 continue;
1833 /* Search for another landing pad in this function. */
1834 for (outer = region->outer; outer ; outer = outer->outer)
1835 if (outer->post_landing_pad)
1836 break;
1838 start_sequence ();
1840 if (outer)
1841 emit_jump (outer->post_landing_pad);
1842 else
1843 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1844 VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
1846 seq = get_insns ();
1847 end_sequence ();
1848 emit_insns_before (seq, region->resume);
1850 /* Leave the RESX to be deleted by flow. */
1855 static void
1856 dw2_build_landing_pads ()
1858 int i, j;
1860 for (i = cfun->eh->last_region_number; i > 0; --i)
1862 struct eh_region *region = cfun->eh->region_array[i];
1863 rtx seq;
1865 /* Mind we don't process a region more than once. */
1866 if (!region || region->region_number != i)
1867 continue;
1869 if (region->type != ERT_CLEANUP
1870 && region->type != ERT_TRY
1871 && region->type != ERT_ALLOWED_EXCEPTIONS)
1872 continue;
1874 start_sequence ();
1876 region->landing_pad = gen_label_rtx ();
1877 emit_label (region->landing_pad);
1879 #ifdef HAVE_exception_receiver
1880 if (HAVE_exception_receiver)
1881 emit_insn (gen_exception_receiver ());
1882 else
1883 #endif
1884 #ifdef HAVE_nonlocal_goto_receiver
1885 if (HAVE_nonlocal_goto_receiver)
1886 emit_insn (gen_nonlocal_goto_receiver ());
1887 else
1888 #endif
1889 { /* Nothing */ }
1891 /* If the eh_return data registers are call-saved, then we
1892 won't have considered them clobbered from the call that
1893 threw. Kill them now. */
1894 for (j = 0; ; ++j)
1896 unsigned r = EH_RETURN_DATA_REGNO (j);
1897 if (r == INVALID_REGNUM)
1898 break;
1899 if (! call_used_regs[r])
1900 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1903 emit_move_insn (cfun->eh->exc_ptr,
1904 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
1905 emit_move_insn (cfun->eh->filter,
1906 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
1908 seq = get_insns ();
1909 end_sequence ();
1911 emit_insns_before (seq, region->post_landing_pad);
1916 struct sjlj_lp_info
1918 int directly_reachable;
1919 int action_index;
1920 int dispatch_index;
1921 int call_site_index;
1924 static bool
1925 sjlj_find_directly_reachable_regions (lp_info)
1926 struct sjlj_lp_info *lp_info;
1928 rtx insn;
1929 bool found_one = false;
1931 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1933 struct eh_region *region;
1934 tree type_thrown;
1935 rtx note;
1937 if (! INSN_P (insn))
1938 continue;
1940 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1941 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1942 continue;
1944 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1946 type_thrown = NULL_TREE;
1947 if (region->type == ERT_THROW)
1949 type_thrown = region->u.throw.type;
1950 region = region->outer;
1953 /* Find the first containing region that might handle the exception.
1954 That's the landing pad to which we will transfer control. */
1955 for (; region; region = region->outer)
1956 if (reachable_next_level (region, type_thrown, 0) != RNL_NOT_CAUGHT)
1957 break;
1959 if (region)
1961 lp_info[region->region_number].directly_reachable = 1;
1962 found_one = true;
1966 return found_one;
1969 static void
1970 sjlj_assign_call_site_values (dispatch_label, lp_info)
1971 rtx dispatch_label;
1972 struct sjlj_lp_info *lp_info;
1974 htab_t ar_hash;
1975 int i, index;
1977 /* First task: build the action table. */
1979 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1980 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1982 for (i = cfun->eh->last_region_number; i > 0; --i)
1983 if (lp_info[i].directly_reachable)
1985 struct eh_region *r = cfun->eh->region_array[i];
1986 r->landing_pad = dispatch_label;
1987 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1988 if (lp_info[i].action_index != -1)
1989 cfun->uses_eh_lsda = 1;
1992 htab_delete (ar_hash);
1994 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1995 landing pad label for the region. For sjlj though, there is one
1996 common landing pad from which we dispatch to the post-landing pads.
1998 A region receives a dispatch index if it is directly reachable
1999 and requires in-function processing. Regions that share post-landing
2000 pads may share dispatch indicies. */
2001 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2002 (see build_post_landing_pads) so we don't bother checking for it. */
2004 index = 0;
2005 for (i = cfun->eh->last_region_number; i > 0; --i)
2006 if (lp_info[i].directly_reachable
2007 && lp_info[i].action_index >= 0)
2008 lp_info[i].dispatch_index = index++;
2010 /* Finally: assign call-site values. If dwarf2 terms, this would be
2011 the region number assigned by convert_to_eh_region_ranges, but
2012 handles no-action and must-not-throw differently. */
2014 call_site_base = 1;
2015 for (i = cfun->eh->last_region_number; i > 0; --i)
2016 if (lp_info[i].directly_reachable)
2018 int action = lp_info[i].action_index;
2020 /* Map must-not-throw to otherwise unused call-site index 0. */
2021 if (action == -2)
2022 index = 0;
2023 /* Map no-action to otherwise unused call-site index -1. */
2024 else if (action == -1)
2025 index = -1;
2026 /* Otherwise, look it up in the table. */
2027 else
2028 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2030 lp_info[i].call_site_index = index;
2034 static void
2035 sjlj_mark_call_sites (lp_info)
2036 struct sjlj_lp_info *lp_info;
2038 int last_call_site = -2;
2039 rtx insn, mem;
2041 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2042 sjlj_fc_call_site_ofs);
2044 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2046 struct eh_region *region;
2047 int this_call_site;
2048 rtx note, before, p;
2050 /* Reset value tracking at extended basic block boundaries. */
2051 if (GET_CODE (insn) == CODE_LABEL)
2052 last_call_site = -2;
2054 if (! INSN_P (insn))
2055 continue;
2057 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2058 if (!note)
2060 /* Calls (and trapping insns) without notes are outside any
2061 exception handling region in this function. Mark them as
2062 no action. */
2063 if (GET_CODE (insn) == CALL_INSN
2064 || (flag_non_call_exceptions
2065 && may_trap_p (PATTERN (insn))))
2066 this_call_site = -1;
2067 else
2068 continue;
2070 else
2072 /* Calls that are known to not throw need not be marked. */
2073 if (INTVAL (XEXP (note, 0)) <= 0)
2074 continue;
2076 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2077 this_call_site = lp_info[region->region_number].call_site_index;
2080 if (this_call_site == last_call_site)
2081 continue;
2083 /* Don't separate a call from it's argument loads. */
2084 before = insn;
2085 if (GET_CODE (insn) == CALL_INSN)
2087 HARD_REG_SET parm_regs;
2088 int nparm_regs;
2090 /* Since different machines initialize their parameter registers
2091 in different orders, assume nothing. Collect the set of all
2092 parameter registers. */
2093 CLEAR_HARD_REG_SET (parm_regs);
2094 nparm_regs = 0;
2095 for (p = CALL_INSN_FUNCTION_USAGE (insn); p ; p = XEXP (p, 1))
2096 if (GET_CODE (XEXP (p, 0)) == USE
2097 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
2099 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
2100 abort ();
2102 /* We only care about registers which can hold function
2103 arguments. */
2104 if (! FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
2105 continue;
2107 SET_HARD_REG_BIT (parm_regs, REGNO (XEXP (XEXP (p, 0), 0)));
2108 nparm_regs++;
2111 /* Search backward for the first set of a register in this set. */
2112 while (nparm_regs)
2114 before = PREV_INSN (before);
2116 /* Given that we've done no other optimizations yet,
2117 the arguments should be immediately available. */
2118 if (GET_CODE (before) == CODE_LABEL)
2119 abort ();
2121 p = single_set (before);
2122 if (p && GET_CODE (SET_DEST (p)) == REG
2123 && REGNO (SET_DEST (p)) < FIRST_PSEUDO_REGISTER
2124 && TEST_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p))))
2126 CLEAR_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p)));
2127 nparm_regs--;
2132 start_sequence ();
2133 emit_move_insn (mem, GEN_INT (this_call_site));
2134 p = get_insns ();
2135 end_sequence ();
2137 emit_insns_before (p, before);
2138 last_call_site = this_call_site;
2142 /* Construct the SjLj_Function_Context. */
2144 static void
2145 sjlj_emit_function_enter (dispatch_label)
2146 rtx dispatch_label;
2148 rtx fn_begin, fc, mem, seq;
2150 fc = cfun->eh->sjlj_fc;
2152 start_sequence ();
2154 /* We're storing this libcall's address into memory instead of
2155 calling it directly. Thus, we must call assemble_external_libcall
2156 here, as we can not depend on emit_library_call to do it for us. */
2157 assemble_external_libcall (eh_personality_libfunc);
2158 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2159 emit_move_insn (mem, eh_personality_libfunc);
2161 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2162 if (cfun->uses_eh_lsda)
2164 char buf[20];
2165 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2166 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
2168 else
2169 emit_move_insn (mem, const0_rtx);
2171 #ifdef DONT_USE_BUILTIN_SETJMP
2173 rtx x, note;
2174 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_NORMAL,
2175 TYPE_MODE (integer_type_node), 1,
2176 plus_constant (XEXP (fc, 0),
2177 sjlj_fc_jbuf_ofs), Pmode);
2179 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2180 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2182 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2183 TYPE_MODE (integer_type_node), 0, 0,
2184 dispatch_label);
2186 #else
2187 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2188 dispatch_label);
2189 #endif
2191 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2192 1, XEXP (fc, 0), Pmode);
2194 seq = get_insns ();
2195 end_sequence ();
2197 /* ??? Instead of doing this at the beginning of the function,
2198 do this in a block that is at loop level 0 and dominates all
2199 can_throw_internal instructions. */
2201 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2202 if (GET_CODE (fn_begin) == NOTE
2203 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2204 break;
2205 emit_insns_after (seq, fn_begin);
2208 /* Call back from expand_function_end to know where we should put
2209 the call to unwind_sjlj_unregister_libfunc if needed. */
2211 void
2212 sjlj_emit_function_exit_after (after)
2213 rtx after;
2215 cfun->eh->sjlj_exit_after = after;
2218 static void
2219 sjlj_emit_function_exit ()
2221 rtx seq;
2223 start_sequence ();
2225 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2226 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2228 seq = get_insns ();
2229 end_sequence ();
2231 /* ??? Really this can be done in any block at loop level 0 that
2232 post-dominates all can_throw_internal instructions. This is
2233 the last possible moment. */
2235 emit_insns_after (seq, cfun->eh->sjlj_exit_after);
2238 static void
2239 sjlj_emit_dispatch_table (dispatch_label, lp_info)
2240 rtx dispatch_label;
2241 struct sjlj_lp_info *lp_info;
2243 int i, first_reachable;
2244 rtx mem, dispatch, seq, fc;
2246 fc = cfun->eh->sjlj_fc;
2248 start_sequence ();
2250 emit_label (dispatch_label);
2252 #ifndef DONT_USE_BUILTIN_SETJMP
2253 expand_builtin_setjmp_receiver (dispatch_label);
2254 #endif
2256 /* Load up dispatch index, exc_ptr and filter values from the
2257 function context. */
2258 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2259 sjlj_fc_call_site_ofs);
2260 dispatch = copy_to_reg (mem);
2262 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
2263 if (word_mode != Pmode)
2265 #ifdef POINTERS_EXTEND_UNSIGNED
2266 mem = convert_memory_address (Pmode, mem);
2267 #else
2268 mem = convert_to_mode (Pmode, mem, 0);
2269 #endif
2271 emit_move_insn (cfun->eh->exc_ptr, mem);
2273 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2274 emit_move_insn (cfun->eh->filter, mem);
2276 /* Jump to one of the directly reachable regions. */
2277 /* ??? This really ought to be using a switch statement. */
2279 first_reachable = 0;
2280 for (i = cfun->eh->last_region_number; i > 0; --i)
2282 if (! lp_info[i].directly_reachable
2283 || lp_info[i].action_index < 0)
2284 continue;
2286 if (! first_reachable)
2288 first_reachable = i;
2289 continue;
2292 emit_cmp_and_jump_insns (dispatch,
2293 GEN_INT (lp_info[i].dispatch_index), EQ,
2294 NULL_RTX, TYPE_MODE (integer_type_node), 0, 0,
2295 cfun->eh->region_array[i]->post_landing_pad);
2298 seq = get_insns ();
2299 end_sequence ();
2301 emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2302 ->post_landing_pad));
2305 static void
2306 sjlj_build_landing_pads ()
2308 struct sjlj_lp_info *lp_info;
2310 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2311 sizeof (struct sjlj_lp_info));
2313 if (sjlj_find_directly_reachable_regions (lp_info))
2315 rtx dispatch_label = gen_label_rtx ();
2317 cfun->eh->sjlj_fc
2318 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2319 int_size_in_bytes (sjlj_fc_type_node),
2320 TYPE_ALIGN (sjlj_fc_type_node));
2322 sjlj_assign_call_site_values (dispatch_label, lp_info);
2323 sjlj_mark_call_sites (lp_info);
2325 sjlj_emit_function_enter (dispatch_label);
2326 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2327 sjlj_emit_function_exit ();
2330 free (lp_info);
2333 void
2334 finish_eh_generation ()
2336 /* Nothing to do if no regions created. */
2337 if (cfun->eh->region_tree == NULL)
2338 return;
2340 /* The object here is to provide find_basic_blocks with detailed
2341 information (via reachable_handlers) on how exception control
2342 flows within the function. In this first pass, we can include
2343 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2344 regions, and hope that it will be useful in deleting unreachable
2345 handlers. Subsequently, we will generate landing pads which will
2346 connect many of the handlers, and then type information will not
2347 be effective. Still, this is a win over previous implementations. */
2349 jump_optimize_minimal (get_insns ());
2350 find_basic_blocks (get_insns (), max_reg_num (), 0);
2351 cleanup_cfg ();
2353 /* These registers are used by the landing pads. Make sure they
2354 have been generated. */
2355 get_exception_pointer (cfun);
2356 get_exception_filter (cfun);
2358 /* Construct the landing pads. */
2360 assign_filter_values ();
2361 build_post_landing_pads ();
2362 connect_post_landing_pads ();
2363 if (USING_SJLJ_EXCEPTIONS)
2364 sjlj_build_landing_pads ();
2365 else
2366 dw2_build_landing_pads ();
2368 cfun->eh->built_landing_pads = 1;
2370 /* We've totally changed the CFG. Start over. */
2371 find_exception_handler_labels ();
2372 jump_optimize_minimal (get_insns ());
2373 find_basic_blocks (get_insns (), max_reg_num (), 0);
2374 cleanup_cfg ();
2377 /* This section handles removing dead code for flow. */
2379 /* Remove LABEL from the exception_handler_labels list. */
2381 static void
2382 remove_exception_handler_label (label)
2383 rtx label;
2385 rtx *pl, l;
2387 for (pl = &exception_handler_labels, l = *pl;
2388 XEXP (l, 0) != label;
2389 pl = &XEXP (l, 1), l = *pl)
2390 continue;
2392 *pl = XEXP (l, 1);
2393 free_EXPR_LIST_node (l);
2396 /* Splice REGION from the region tree etc. */
2398 static void
2399 remove_eh_handler (region)
2400 struct eh_region *region;
2402 struct eh_region **pp, *p;
2403 rtx lab;
2404 int i;
2406 /* For the benefit of efficiently handling REG_EH_REGION notes,
2407 replace this region in the region array with its containing
2408 region. Note that previous region deletions may result in
2409 multiple copies of this region in the array, so we have to
2410 search the whole thing. */
2411 for (i = cfun->eh->last_region_number; i > 0; --i)
2412 if (cfun->eh->region_array[i] == region)
2413 cfun->eh->region_array[i] = region->outer;
2415 if (cfun->eh->built_landing_pads)
2416 lab = region->landing_pad;
2417 else
2418 lab = region->label;
2419 if (lab)
2420 remove_exception_handler_label (lab);
2422 if (region->outer)
2423 pp = &region->outer->inner;
2424 else
2425 pp = &cfun->eh->region_tree;
2426 for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
2427 continue;
2429 if (region->inner)
2431 for (p = region->inner; p->next_peer ; p = p->next_peer)
2432 p->outer = region->outer;
2433 p->next_peer = region->next_peer;
2434 p->outer = region->outer;
2435 *pp = region->inner;
2437 else
2438 *pp = region->next_peer;
2440 if (region->type == ERT_CATCH)
2442 struct eh_region *try, *next, *prev;
2444 for (try = region->next_peer;
2445 try->type == ERT_CATCH;
2446 try = try->next_peer)
2447 continue;
2448 if (try->type != ERT_TRY)
2449 abort ();
2451 next = region->u.catch.next_catch;
2452 prev = region->u.catch.prev_catch;
2454 if (next)
2455 next->u.catch.prev_catch = prev;
2456 else
2457 try->u.try.last_catch = prev;
2458 if (prev)
2459 prev->u.catch.next_catch = next;
2460 else
2462 try->u.try.catch = next;
2463 if (! next)
2464 remove_eh_handler (try);
2468 free (region);
2471 /* LABEL heads a basic block that is about to be deleted. If this
2472 label corresponds to an exception region, we may be able to
2473 delete the region. */
2475 void
2476 maybe_remove_eh_handler (label)
2477 rtx label;
2479 int i;
2481 /* ??? After generating landing pads, it's not so simple to determine
2482 if the region data is completely unused. One must examine the
2483 landing pad and the post landing pad, and whether an inner try block
2484 is referencing the catch handlers directly. */
2485 if (cfun->eh->built_landing_pads)
2486 return;
2488 for (i = cfun->eh->last_region_number; i > 0; --i)
2490 struct eh_region *region = cfun->eh->region_array[i];
2491 if (region && region->label == label)
2493 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2494 because there is no path to the fallback call to terminate.
2495 But the region continues to affect call-site data until there
2496 are no more contained calls, which we don't see here. */
2497 if (region->type == ERT_MUST_NOT_THROW)
2499 remove_exception_handler_label (region->label);
2500 region->label = NULL_RTX;
2502 else
2503 remove_eh_handler (region);
2504 break;
2510 /* This section describes CFG exception edges for flow. */
2512 /* For communicating between calls to reachable_next_level. */
2513 struct reachable_info
2515 tree types_caught;
2516 tree types_allowed;
2517 rtx handlers;
2520 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2521 base class of TYPE, is in HANDLED. */
2523 static int
2524 check_handled (handled, type)
2525 tree handled, type;
2527 tree t;
2529 /* We can check for exact matches without front-end help. */
2530 if (! lang_eh_type_covers)
2532 for (t = handled; t ; t = TREE_CHAIN (t))
2533 if (TREE_VALUE (t) == type)
2534 return 1;
2536 else
2538 for (t = handled; t ; t = TREE_CHAIN (t))
2539 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2540 return 1;
2543 return 0;
2546 /* A subroutine of reachable_next_level. If we are collecting a list
2547 of handlers, add one. After landing pad generation, reference
2548 it instead of the handlers themselves. Further, the handlers are
2549 all wired together, so by referencing one, we've got them all.
2550 Before landing pad generation we reference each handler individually.
2552 LP_REGION contains the landing pad; REGION is the handler. */
2554 static void
2555 add_reachable_handler (info, lp_region, region)
2556 struct reachable_info *info;
2557 struct eh_region *lp_region;
2558 struct eh_region *region;
2560 if (! info)
2561 return;
2563 if (cfun->eh->built_landing_pads)
2565 if (! info->handlers)
2566 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2568 else
2569 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2572 /* Process one level of exception regions for reachability.
2573 If TYPE_THROWN is non-null, then it is the *exact* type being
2574 propagated. If INFO is non-null, then collect handler labels
2575 and caught/allowed type information between invocations. */
2577 static enum reachable_code
2578 reachable_next_level (region, type_thrown, info)
2579 struct eh_region *region;
2580 tree type_thrown;
2581 struct reachable_info *info;
2583 switch (region->type)
2585 case ERT_CLEANUP:
2586 /* Before landing-pad generation, we model control flow
2587 directly to the individual handlers. In this way we can
2588 see that catch handler types may shadow one another. */
2589 add_reachable_handler (info, region, region);
2590 return RNL_MAYBE_CAUGHT;
2592 case ERT_TRY:
2594 struct eh_region *c;
2595 enum reachable_code ret = RNL_NOT_CAUGHT;
2597 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2599 /* A catch-all handler ends the search. */
2600 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2601 to be run as well. */
2602 if (c->u.catch.type == NULL)
2604 add_reachable_handler (info, region, c);
2605 return RNL_CAUGHT;
2608 if (type_thrown)
2610 /* If we have a type match, end the search. */
2611 if (c->u.catch.type == type_thrown
2612 || (lang_eh_type_covers
2613 && (*lang_eh_type_covers) (c->u.catch.type,
2614 type_thrown)))
2616 add_reachable_handler (info, region, c);
2617 return RNL_CAUGHT;
2620 /* If we have definitive information of a match failure,
2621 the catch won't trigger. */
2622 if (lang_eh_type_covers)
2623 return RNL_NOT_CAUGHT;
2626 if (! info)
2627 ret = RNL_MAYBE_CAUGHT;
2629 /* A type must not have been previously caught. */
2630 else if (! check_handled (info->types_caught, c->u.catch.type))
2632 add_reachable_handler (info, region, c);
2633 info->types_caught = tree_cons (NULL, c->u.catch.type,
2634 info->types_caught);
2636 /* ??? If the catch type is a base class of every allowed
2637 type, then we know we can stop the search. */
2638 ret = RNL_MAYBE_CAUGHT;
2642 return ret;
2645 case ERT_ALLOWED_EXCEPTIONS:
2646 /* An empty list of types definitely ends the search. */
2647 if (region->u.allowed.type_list == NULL_TREE)
2649 add_reachable_handler (info, region, region);
2650 return RNL_CAUGHT;
2653 /* Collect a list of lists of allowed types for use in detecting
2654 when a catch may be transformed into a catch-all. */
2655 if (info)
2656 info->types_allowed = tree_cons (NULL_TREE,
2657 region->u.allowed.type_list,
2658 info->types_allowed);
2660 /* If we have definitive information about the type heirarchy,
2661 then we can tell if the thrown type will pass through the
2662 filter. */
2663 if (type_thrown && lang_eh_type_covers)
2665 if (check_handled (region->u.allowed.type_list, type_thrown))
2666 return RNL_NOT_CAUGHT;
2667 else
2669 add_reachable_handler (info, region, region);
2670 return RNL_CAUGHT;
2674 add_reachable_handler (info, region, region);
2675 return RNL_MAYBE_CAUGHT;
2677 case ERT_CATCH:
2678 /* Catch regions are handled by their controling try region. */
2679 return RNL_NOT_CAUGHT;
2681 case ERT_MUST_NOT_THROW:
2682 /* Here we end our search, since no exceptions may propagate.
2683 If we've touched down at some landing pad previous, then the
2684 explicit function call we generated may be used. Otherwise
2685 the call is made by the runtime. */
2686 if (info && info->handlers)
2688 add_reachable_handler (info, region, region);
2689 return RNL_CAUGHT;
2691 else
2692 return RNL_BLOCKED;
2694 case ERT_THROW:
2695 case ERT_FIXUP:
2696 /* Shouldn't see these here. */
2697 break;
2700 abort ();
2703 /* Retrieve a list of labels of exception handlers which can be
2704 reached by a given insn. */
2707 reachable_handlers (insn)
2708 rtx insn;
2710 struct reachable_info info;
2711 struct eh_region *region;
2712 tree type_thrown;
2713 int region_number;
2715 if (GET_CODE (insn) == JUMP_INSN
2716 && GET_CODE (PATTERN (insn)) == RESX)
2717 region_number = XINT (PATTERN (insn), 0);
2718 else
2720 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2721 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2722 return NULL;
2723 region_number = INTVAL (XEXP (note, 0));
2726 memset (&info, 0, sizeof (info));
2728 region = cfun->eh->region_array[region_number];
2730 type_thrown = NULL_TREE;
2731 if (region->type == ERT_THROW)
2733 type_thrown = region->u.throw.type;
2734 region = region->outer;
2736 else if (GET_CODE (insn) == JUMP_INSN
2737 && GET_CODE (PATTERN (insn)) == RESX)
2738 region = region->outer;
2740 for (; region; region = region->outer)
2741 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2742 break;
2744 return info.handlers;
2747 /* Determine if the given INSN can throw an exception that is caught
2748 within the function. */
2750 bool
2751 can_throw_internal (insn)
2752 rtx insn;
2754 struct eh_region *region;
2755 tree type_thrown;
2756 rtx note;
2758 if (! INSN_P (insn))
2759 return false;
2761 if (GET_CODE (insn) == INSN
2762 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2763 insn = XVECEXP (PATTERN (insn), 0, 0);
2765 if (GET_CODE (insn) == CALL_INSN
2766 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2768 int i;
2769 for (i = 0; i < 3; ++i)
2771 rtx sub = XEXP (PATTERN (insn), i);
2772 for (; sub ; sub = NEXT_INSN (sub))
2773 if (can_throw_internal (sub))
2774 return true;
2776 return false;
2779 /* Every insn that might throw has an EH_REGION note. */
2780 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2781 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2782 return false;
2784 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2786 type_thrown = NULL_TREE;
2787 if (region->type == ERT_THROW)
2789 type_thrown = region->u.throw.type;
2790 region = region->outer;
2793 /* If this exception is ignored by each and every containing region,
2794 then control passes straight out. The runtime may handle some
2795 regions, which also do not require processing internally. */
2796 for (; region; region = region->outer)
2798 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2799 if (how == RNL_BLOCKED)
2800 return false;
2801 if (how != RNL_NOT_CAUGHT)
2802 return true;
2805 return false;
2808 /* Determine if the given INSN can throw an exception that is
2809 visible outside the function. */
2811 bool
2812 can_throw_external (insn)
2813 rtx insn;
2815 struct eh_region *region;
2816 tree type_thrown;
2817 rtx note;
2819 if (! INSN_P (insn))
2820 return false;
2822 if (GET_CODE (insn) == INSN
2823 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2824 insn = XVECEXP (PATTERN (insn), 0, 0);
2826 if (GET_CODE (insn) == CALL_INSN
2827 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2829 int i;
2830 for (i = 0; i < 3; ++i)
2832 rtx sub = XEXP (PATTERN (insn), i);
2833 for (; sub ; sub = NEXT_INSN (sub))
2834 if (can_throw_external (sub))
2835 return true;
2837 return false;
2840 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2841 if (!note)
2843 /* Calls (and trapping insns) without notes are outside any
2844 exception handling region in this function. We have to
2845 assume it might throw. Given that the front end and middle
2846 ends mark known NOTHROW functions, this isn't so wildly
2847 inaccurate. */
2848 return (GET_CODE (insn) == CALL_INSN
2849 || (flag_non_call_exceptions
2850 && may_trap_p (PATTERN (insn))));
2852 if (INTVAL (XEXP (note, 0)) <= 0)
2853 return false;
2855 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2857 type_thrown = NULL_TREE;
2858 if (region->type == ERT_THROW)
2860 type_thrown = region->u.throw.type;
2861 region = region->outer;
2864 /* If the exception is caught or blocked by any containing region,
2865 then it is not seen by any calling function. */
2866 for (; region ; region = region->outer)
2867 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2868 return false;
2870 return true;
2873 /* True if nothing in this function can throw outside this function. */
2875 bool
2876 nothrow_function_p ()
2878 rtx insn;
2880 if (! flag_exceptions)
2881 return true;
2883 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2884 if (can_throw_external (insn))
2885 return false;
2886 for (insn = current_function_epilogue_delay_list; insn;
2887 insn = XEXP (insn, 1))
2888 if (can_throw_external (insn))
2889 return false;
2891 return true;
2895 /* Various hooks for unwind library. */
2897 /* Do any necessary initialization to access arbitrary stack frames.
2898 On the SPARC, this means flushing the register windows. */
2900 void
2901 expand_builtin_unwind_init ()
2903 /* Set this so all the registers get saved in our frame; we need to be
2904 able to copy the saved values for any registers from frames we unwind. */
2905 current_function_has_nonlocal_label = 1;
2907 #ifdef SETUP_FRAME_ADDRESSES
2908 SETUP_FRAME_ADDRESSES ();
2909 #endif
2913 expand_builtin_eh_return_data_regno (arglist)
2914 tree arglist;
2916 tree which = TREE_VALUE (arglist);
2917 unsigned HOST_WIDE_INT iwhich;
2919 if (TREE_CODE (which) != INTEGER_CST)
2921 error ("argument of `__builtin_eh_return_regno' must be constant");
2922 return constm1_rtx;
2925 iwhich = tree_low_cst (which, 1);
2926 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2927 if (iwhich == INVALID_REGNUM)
2928 return constm1_rtx;
2930 #ifdef DWARF_FRAME_REGNUM
2931 iwhich = DWARF_FRAME_REGNUM (iwhich);
2932 #else
2933 iwhich = DBX_REGISTER_NUMBER (iwhich);
2934 #endif
2936 return GEN_INT (iwhich);
2939 /* Given a value extracted from the return address register or stack slot,
2940 return the actual address encoded in that value. */
2943 expand_builtin_extract_return_addr (addr_tree)
2944 tree addr_tree;
2946 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2948 /* First mask out any unwanted bits. */
2949 #ifdef MASK_RETURN_ADDR
2950 expand_and (addr, MASK_RETURN_ADDR, addr);
2951 #endif
2953 /* Then adjust to find the real return address. */
2954 #if defined (RETURN_ADDR_OFFSET)
2955 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2956 #endif
2958 return addr;
2961 /* Given an actual address in addr_tree, do any necessary encoding
2962 and return the value to be stored in the return address register or
2963 stack slot so the epilogue will return to that address. */
2966 expand_builtin_frob_return_addr (addr_tree)
2967 tree addr_tree;
2969 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2971 #ifdef POINTERS_EXTEND_UNSIGNED
2972 addr = convert_memory_address (Pmode, addr);
2973 #endif
2975 #ifdef RETURN_ADDR_OFFSET
2976 addr = force_reg (Pmode, addr);
2977 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2978 #endif
2980 return addr;
2983 /* Set up the epilogue with the magic bits we'll need to return to the
2984 exception handler. */
2986 void
2987 expand_builtin_eh_return (stackadj_tree, handler_tree)
2988 tree stackadj_tree, handler_tree;
2990 rtx stackadj, handler;
2992 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2993 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
2995 #ifdef POINTERS_EXTEND_UNSIGNED
2996 stackadj = convert_memory_address (Pmode, stackadj);
2997 handler = convert_memory_address (Pmode, handler);
2998 #endif
3000 if (! cfun->eh->ehr_label)
3002 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
3003 cfun->eh->ehr_handler = copy_to_reg (handler);
3004 cfun->eh->ehr_label = gen_label_rtx ();
3006 else
3008 if (stackadj != cfun->eh->ehr_stackadj)
3009 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
3010 if (handler != cfun->eh->ehr_handler)
3011 emit_move_insn (cfun->eh->ehr_handler, handler);
3014 emit_jump (cfun->eh->ehr_label);
3017 void
3018 expand_eh_return ()
3020 rtx sa, ra, around_label;
3022 if (! cfun->eh->ehr_label)
3023 return;
3025 sa = EH_RETURN_STACKADJ_RTX;
3026 if (! sa)
3028 error ("__builtin_eh_return not supported on this target");
3029 return;
3032 current_function_calls_eh_return = 1;
3034 around_label = gen_label_rtx ();
3035 emit_move_insn (sa, const0_rtx);
3036 emit_jump (around_label);
3038 emit_label (cfun->eh->ehr_label);
3039 clobber_return_register ();
3041 #ifdef HAVE_eh_return
3042 if (HAVE_eh_return)
3043 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3044 else
3045 #endif
3047 ra = EH_RETURN_HANDLER_RTX;
3048 if (! ra)
3050 error ("__builtin_eh_return not supported on this target");
3051 ra = gen_reg_rtx (Pmode);
3054 emit_move_insn (sa, cfun->eh->ehr_stackadj);
3055 emit_move_insn (ra, cfun->eh->ehr_handler);
3058 emit_label (around_label);
3061 struct action_record
3063 int offset;
3064 int filter;
3065 int next;
3068 static int
3069 action_record_eq (pentry, pdata)
3070 const PTR pentry;
3071 const PTR pdata;
3073 const struct action_record *entry = (const struct action_record *) pentry;
3074 const struct action_record *data = (const struct action_record *) pdata;
3075 return entry->filter == data->filter && entry->next == data->next;
3078 static hashval_t
3079 action_record_hash (pentry)
3080 const PTR pentry;
3082 const struct action_record *entry = (const struct action_record *) pentry;
3083 return entry->next * 1009 + entry->filter;
3086 static int
3087 add_action_record (ar_hash, filter, next)
3088 htab_t ar_hash;
3089 int filter, next;
3091 struct action_record **slot, *new, tmp;
3093 tmp.filter = filter;
3094 tmp.next = next;
3095 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3097 if ((new = *slot) == NULL)
3099 new = (struct action_record *) xmalloc (sizeof (*new));
3100 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3101 new->filter = filter;
3102 new->next = next;
3103 *slot = new;
3105 /* The filter value goes in untouched. The link to the next
3106 record is a "self-relative" byte offset, or zero to indicate
3107 that there is no next record. So convert the absolute 1 based
3108 indicies we've been carrying around into a displacement. */
3110 push_sleb128 (&cfun->eh->action_record_data, filter);
3111 if (next)
3112 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3113 push_sleb128 (&cfun->eh->action_record_data, next);
3116 return new->offset;
3119 static int
3120 collect_one_action_chain (ar_hash, region)
3121 htab_t ar_hash;
3122 struct eh_region *region;
3124 struct eh_region *c;
3125 int next;
3127 /* If we've reached the top of the region chain, then we have
3128 no actions, and require no landing pad. */
3129 if (region == NULL)
3130 return -1;
3132 switch (region->type)
3134 case ERT_CLEANUP:
3135 /* A cleanup adds a zero filter to the beginning of the chain, but
3136 there are special cases to look out for. If there are *only*
3137 cleanups along a path, then it compresses to a zero action.
3138 Further, if there are multiple cleanups along a path, we only
3139 need to represent one of them, as that is enough to trigger
3140 entry to the landing pad at runtime. */
3141 next = collect_one_action_chain (ar_hash, region->outer);
3142 if (next <= 0)
3143 return 0;
3144 for (c = region->outer; c ; c = c->outer)
3145 if (c->type == ERT_CLEANUP)
3146 return next;
3147 return add_action_record (ar_hash, 0, next);
3149 case ERT_TRY:
3150 /* Process the associated catch regions in reverse order.
3151 If there's a catch-all handler, then we don't need to
3152 search outer regions. Use a magic -3 value to record
3153 that we havn't done the outer search. */
3154 next = -3;
3155 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3157 if (c->u.catch.type == NULL)
3158 next = add_action_record (ar_hash, c->u.catch.filter, 0);
3159 else
3161 if (next == -3)
3163 next = collect_one_action_chain (ar_hash, region->outer);
3164 if (next < 0)
3165 next = 0;
3167 next = add_action_record (ar_hash, c->u.catch.filter, next);
3170 return next;
3172 case ERT_ALLOWED_EXCEPTIONS:
3173 /* An exception specification adds its filter to the
3174 beginning of the chain. */
3175 next = collect_one_action_chain (ar_hash, region->outer);
3176 return add_action_record (ar_hash, region->u.allowed.filter,
3177 next < 0 ? 0 : next);
3179 case ERT_MUST_NOT_THROW:
3180 /* A must-not-throw region with no inner handlers or cleanups
3181 requires no call-site entry. Note that this differs from
3182 the no handler or cleanup case in that we do require an lsda
3183 to be generated. Return a magic -2 value to record this. */
3184 return -2;
3186 case ERT_CATCH:
3187 case ERT_THROW:
3188 /* CATCH regions are handled in TRY above. THROW regions are
3189 for optimization information only and produce no output. */
3190 return collect_one_action_chain (ar_hash, region->outer);
3192 default:
3193 abort ();
3197 static int
3198 add_call_site (landing_pad, action)
3199 rtx landing_pad;
3200 int action;
3202 struct call_site_record *data = cfun->eh->call_site_data;
3203 int used = cfun->eh->call_site_data_used;
3204 int size = cfun->eh->call_site_data_size;
3206 if (used >= size)
3208 size = (size ? size * 2 : 64);
3209 data = (struct call_site_record *)
3210 xrealloc (data, sizeof (*data) * size);
3211 cfun->eh->call_site_data = data;
3212 cfun->eh->call_site_data_size = size;
3215 data[used].landing_pad = landing_pad;
3216 data[used].action = action;
3218 cfun->eh->call_site_data_used = used + 1;
3220 return used + call_site_base;
3223 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3224 The new note numbers will not refer to region numbers, but
3225 instead to call site entries. */
3227 void
3228 convert_to_eh_region_ranges ()
3230 rtx insn, iter, note;
3231 htab_t ar_hash;
3232 int last_action = -3;
3233 rtx last_action_insn = NULL_RTX;
3234 rtx last_landing_pad = NULL_RTX;
3235 rtx first_no_action_insn = NULL_RTX;
3236 int call_site;
3238 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3239 return;
3241 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3243 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3245 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3246 if (INSN_P (iter))
3248 struct eh_region *region;
3249 int this_action;
3250 rtx this_landing_pad;
3252 insn = iter;
3253 if (GET_CODE (insn) == INSN
3254 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3255 insn = XVECEXP (PATTERN (insn), 0, 0);
3257 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3258 if (!note)
3260 if (! (GET_CODE (insn) == CALL_INSN
3261 || (flag_non_call_exceptions
3262 && may_trap_p (PATTERN (insn)))))
3263 continue;
3264 this_action = -1;
3265 region = NULL;
3267 else
3269 if (INTVAL (XEXP (note, 0)) <= 0)
3270 continue;
3271 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3272 this_action = collect_one_action_chain (ar_hash, region);
3275 /* Existence of catch handlers, or must-not-throw regions
3276 implies that an lsda is needed (even if empty). */
3277 if (this_action != -1)
3278 cfun->uses_eh_lsda = 1;
3280 /* Delay creation of region notes for no-action regions
3281 until we're sure that an lsda will be required. */
3282 else if (last_action == -3)
3284 first_no_action_insn = iter;
3285 last_action = -1;
3288 /* Cleanups and handlers may share action chains but not
3289 landing pads. Collect the landing pad for this region. */
3290 if (this_action >= 0)
3292 struct eh_region *o;
3293 for (o = region; ! o->landing_pad ; o = o->outer)
3294 continue;
3295 this_landing_pad = o->landing_pad;
3297 else
3298 this_landing_pad = NULL_RTX;
3300 /* Differing actions or landing pads implies a change in call-site
3301 info, which implies some EH_REGION note should be emitted. */
3302 if (last_action != this_action
3303 || last_landing_pad != this_landing_pad)
3305 /* If we'd not seen a previous action (-3) or the previous
3306 action was must-not-throw (-2), then we do not need an
3307 end note. */
3308 if (last_action >= -1)
3310 /* If we delayed the creation of the begin, do it now. */
3311 if (first_no_action_insn)
3313 call_site = add_call_site (NULL_RTX, 0);
3314 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3315 first_no_action_insn);
3316 NOTE_EH_HANDLER (note) = call_site;
3317 first_no_action_insn = NULL_RTX;
3320 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3321 last_action_insn);
3322 NOTE_EH_HANDLER (note) = call_site;
3325 /* If the new action is must-not-throw, then no region notes
3326 are created. */
3327 if (this_action >= -1)
3329 call_site = add_call_site (this_landing_pad,
3330 this_action < 0 ? 0 : this_action);
3331 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3332 NOTE_EH_HANDLER (note) = call_site;
3335 last_action = this_action;
3336 last_landing_pad = this_landing_pad;
3338 last_action_insn = iter;
3341 if (last_action >= -1 && ! first_no_action_insn)
3343 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3344 NOTE_EH_HANDLER (note) = call_site;
3347 htab_delete (ar_hash);
3351 static void
3352 push_uleb128 (data_area, value)
3353 varray_type *data_area;
3354 unsigned int value;
3358 unsigned char byte = value & 0x7f;
3359 value >>= 7;
3360 if (value)
3361 byte |= 0x80;
3362 VARRAY_PUSH_UCHAR (*data_area, byte);
3364 while (value);
3367 static void
3368 push_sleb128 (data_area, value)
3369 varray_type *data_area;
3370 int value;
3372 unsigned char byte;
3373 int more;
3377 byte = value & 0x7f;
3378 value >>= 7;
3379 more = ! ((value == 0 && (byte & 0x40) == 0)
3380 || (value == -1 && (byte & 0x40) != 0));
3381 if (more)
3382 byte |= 0x80;
3383 VARRAY_PUSH_UCHAR (*data_area, byte);
3385 while (more);
3389 #ifndef HAVE_AS_LEB128
3390 static int
3391 dw2_size_of_call_site_table ()
3393 int n = cfun->eh->call_site_data_used;
3394 int size = n * (4 + 4 + 4);
3395 int i;
3397 for (i = 0; i < n; ++i)
3399 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3400 size += size_of_uleb128 (cs->action);
3403 return size;
3406 static int
3407 sjlj_size_of_call_site_table ()
3409 int n = cfun->eh->call_site_data_used;
3410 int size = 0;
3411 int i;
3413 for (i = 0; i < n; ++i)
3415 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3416 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3417 size += size_of_uleb128 (cs->action);
3420 return size;
3422 #endif
3424 static void
3425 dw2_output_call_site_table ()
3427 const char *function_start_lab
3428 = IDENTIFIER_POINTER (current_function_func_begin_label);
3429 int n = cfun->eh->call_site_data_used;
3430 int i;
3432 for (i = 0; i < n; ++i)
3434 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3435 char reg_start_lab[32];
3436 char reg_end_lab[32];
3437 char landing_pad_lab[32];
3439 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3440 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3442 if (cs->landing_pad)
3443 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3444 CODE_LABEL_NUMBER (cs->landing_pad));
3446 /* ??? Perhaps use insn length scaling if the assembler supports
3447 generic arithmetic. */
3448 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3449 data4 if the function is small enough. */
3450 #ifdef HAVE_AS_LEB128
3451 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3452 "region %d start", i);
3453 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3454 "length");
3455 if (cs->landing_pad)
3456 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3457 "landing pad");
3458 else
3459 dw2_asm_output_data_uleb128 (0, "landing pad");
3460 #else
3461 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3462 "region %d start", i);
3463 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3464 if (cs->landing_pad)
3465 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3466 "landing pad");
3467 else
3468 dw2_asm_output_data (4, 0, "landing pad");
3469 #endif
3470 dw2_asm_output_data_uleb128 (cs->action, "action");
3473 call_site_base += n;
3476 static void
3477 sjlj_output_call_site_table ()
3479 int n = cfun->eh->call_site_data_used;
3480 int i;
3482 for (i = 0; i < n; ++i)
3484 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3486 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3487 "region %d landing pad", i);
3488 dw2_asm_output_data_uleb128 (cs->action, "action");
3491 call_site_base += n;
3494 void
3495 output_function_exception_table ()
3497 int tt_format, cs_format, lp_format, i, n;
3498 #ifdef HAVE_AS_LEB128
3499 char ttype_label[32];
3500 char cs_after_size_label[32];
3501 char cs_end_label[32];
3502 #else
3503 int call_site_len;
3504 #endif
3505 int have_tt_data;
3506 int funcdef_number;
3507 int tt_format_size;
3509 /* Not all functions need anything. */
3510 if (! cfun->uses_eh_lsda)
3511 return;
3513 funcdef_number = (USING_SJLJ_EXCEPTIONS
3514 ? sjlj_funcdef_number
3515 : current_funcdef_number);
3517 #ifdef IA64_UNWIND_INFO
3518 fputs ("\t.personality\t", asm_out_file);
3519 output_addr_const (asm_out_file, eh_personality_libfunc);
3520 fputs ("\n\t.handlerdata\n", asm_out_file);
3521 /* Note that varasm still thinks we're in the function's code section.
3522 The ".endp" directive that will immediately follow will take us back. */
3523 #else
3524 exception_section ();
3525 #endif
3527 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3528 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3530 /* Indicate the format of the @TType entries. */
3531 if (! have_tt_data)
3532 tt_format = DW_EH_PE_omit;
3533 else
3535 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3536 #ifdef HAVE_AS_LEB128
3537 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3538 #endif
3539 tt_format_size = size_of_encoded_value (tt_format);
3541 assemble_eh_align (tt_format_size * BITS_PER_UNIT);
3544 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3546 /* The LSDA header. */
3548 /* Indicate the format of the landing pad start pointer. An omitted
3549 field implies @LPStart == @Start. */
3550 /* Currently we always put @LPStart == @Start. This field would
3551 be most useful in moving the landing pads completely out of
3552 line to another section, but it could also be used to minimize
3553 the size of uleb128 landing pad offsets. */
3554 lp_format = DW_EH_PE_omit;
3555 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3556 eh_data_format_name (lp_format));
3558 /* @LPStart pointer would go here. */
3560 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3561 eh_data_format_name (tt_format));
3563 #ifndef HAVE_AS_LEB128
3564 if (USING_SJLJ_EXCEPTIONS)
3565 call_site_len = sjlj_size_of_call_site_table ();
3566 else
3567 call_site_len = dw2_size_of_call_site_table ();
3568 #endif
3570 /* A pc-relative 4-byte displacement to the @TType data. */
3571 if (have_tt_data)
3573 #ifdef HAVE_AS_LEB128
3574 char ttype_after_disp_label[32];
3575 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3576 funcdef_number);
3577 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3578 "@TType base offset");
3579 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3580 #else
3581 /* Ug. Alignment queers things. */
3582 unsigned int before_disp, after_disp, last_disp, disp;
3584 before_disp = 1 + 1;
3585 after_disp = (1 + size_of_uleb128 (call_site_len)
3586 + call_site_len
3587 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3588 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3589 * tt_format_size));
3591 disp = after_disp;
3594 unsigned int disp_size, pad;
3596 last_disp = disp;
3597 disp_size = size_of_uleb128 (disp);
3598 pad = before_disp + disp_size + after_disp;
3599 if (pad % tt_format_size)
3600 pad = tt_format_size - (pad % tt_format_size);
3601 else
3602 pad = 0;
3603 disp = after_disp + pad;
3605 while (disp != last_disp);
3607 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3608 #endif
3611 /* Indicate the format of the call-site offsets. */
3612 #ifdef HAVE_AS_LEB128
3613 cs_format = DW_EH_PE_uleb128;
3614 #else
3615 cs_format = DW_EH_PE_udata4;
3616 #endif
3617 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3618 eh_data_format_name (cs_format));
3620 #ifdef HAVE_AS_LEB128
3621 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3622 funcdef_number);
3623 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3624 funcdef_number);
3625 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3626 "Call-site table length");
3627 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3628 if (USING_SJLJ_EXCEPTIONS)
3629 sjlj_output_call_site_table ();
3630 else
3631 dw2_output_call_site_table ();
3632 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3633 #else
3634 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3635 if (USING_SJLJ_EXCEPTIONS)
3636 sjlj_output_call_site_table ();
3637 else
3638 dw2_output_call_site_table ();
3639 #endif
3641 /* ??? Decode and interpret the data for flag_debug_asm. */
3642 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3643 for (i = 0; i < n; ++i)
3644 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3645 (i ? NULL : "Action record table"));
3647 if (have_tt_data)
3648 assemble_eh_align (tt_format_size * BITS_PER_UNIT);
3650 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3651 while (i-- > 0)
3653 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3655 if (type == NULL_TREE)
3656 type = integer_zero_node;
3657 else
3658 type = lookup_type_for_runtime (type);
3660 dw2_asm_output_encoded_addr_rtx (tt_format,
3661 expand_expr (type, NULL_RTX, VOIDmode,
3662 EXPAND_INITIALIZER),
3663 NULL);
3666 #ifdef HAVE_AS_LEB128
3667 if (have_tt_data)
3668 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3669 #endif
3671 /* ??? Decode and interpret the data for flag_debug_asm. */
3672 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3673 for (i = 0; i < n; ++i)
3674 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3675 (i ? NULL : "Exception specification table"));
3677 function_section (current_function_decl);
3679 if (USING_SJLJ_EXCEPTIONS)
3680 sjlj_funcdef_number += 1;