* except.c (expand_eh_region_end_allowed): Call
[official-gcc.git] / gcc / except.c
bloba585920d6bd2a0fe46c330062fddd0754a0b447d
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "rtl.h"
53 #include "tree.h"
54 #include "flags.h"
55 #include "function.h"
56 #include "expr.h"
57 #include "insn-config.h"
58 #include "except.h"
59 #include "integrate.h"
60 #include "hard-reg-set.h"
61 #include "basic-block.h"
62 #include "output.h"
63 #include "dwarf2asm.h"
64 #include "dwarf2out.h"
65 #include "dwarf2.h"
66 #include "toplev.h"
67 #include "hashtab.h"
68 #include "intl.h"
69 #include "ggc.h"
70 #include "tm_p.h"
73 /* Provide defaults for stuff that may not be defined when using
74 sjlj exceptions. */
75 #ifndef EH_RETURN_STACKADJ_RTX
76 #define EH_RETURN_STACKADJ_RTX 0
77 #endif
78 #ifndef EH_RETURN_HANDLER_RTX
79 #define EH_RETURN_HANDLER_RTX 0
80 #endif
81 #ifndef EH_RETURN_DATA_REGNO
82 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
83 #endif
86 /* Nonzero means enable synchronous exceptions for non-call instructions. */
87 int flag_non_call_exceptions;
89 /* Protect cleanup actions with must-not-throw regions, with a call
90 to the given failure handler. */
91 tree (*lang_protect_cleanup_actions) PARAMS ((void));
93 /* Return true if type A catches type B. */
94 int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
96 /* Map a type to a runtime object to match type. */
97 tree (*lang_eh_runtime_type) PARAMS ((tree));
99 /* A list of labels used for exception handlers. */
100 rtx exception_handler_labels;
102 static int call_site_base;
103 static int sjlj_funcdef_number;
104 static htab_t type_to_runtime_map;
106 /* Describe the SjLj_Function_Context structure. */
107 static tree sjlj_fc_type_node;
108 static int sjlj_fc_call_site_ofs;
109 static int sjlj_fc_data_ofs;
110 static int sjlj_fc_personality_ofs;
111 static int sjlj_fc_lsda_ofs;
112 static int sjlj_fc_jbuf_ofs;
114 /* Describes one exception region. */
115 struct eh_region
117 /* The immediately surrounding region. */
118 struct eh_region *outer;
120 /* The list of immediately contained regions. */
121 struct eh_region *inner;
122 struct eh_region *next_peer;
124 /* An identifier for this region. */
125 int region_number;
127 /* Each region does exactly one thing. */
128 enum eh_region_type
130 ERT_CLEANUP = 1,
131 ERT_TRY,
132 ERT_CATCH,
133 ERT_ALLOWED_EXCEPTIONS,
134 ERT_MUST_NOT_THROW,
135 ERT_THROW,
136 ERT_FIXUP
137 } type;
139 /* Holds the action to perform based on the preceeding type. */
140 union {
141 /* A list of catch blocks, a surrounding try block,
142 and the label for continuing after a catch. */
143 struct {
144 struct eh_region *catch;
145 struct eh_region *last_catch;
146 struct eh_region *prev_try;
147 rtx continue_label;
148 } try;
150 /* The list through the catch handlers, the type object
151 matched, and a pointer to the generated code. */
152 struct {
153 struct eh_region *next_catch;
154 struct eh_region *prev_catch;
155 tree type;
156 int filter;
157 } catch;
159 /* A tree_list of allowed types. */
160 struct {
161 tree type_list;
162 int filter;
163 } allowed;
165 /* The type given by a call to "throw foo();", or discovered
166 for a throw. */
167 struct {
168 tree type;
169 } throw;
171 /* Retain the cleanup expression even after expansion so that
172 we can match up fixup regions. */
173 struct {
174 tree exp;
175 } cleanup;
177 /* The real region (by expression and by pointer) that fixup code
178 should live in. */
179 struct {
180 tree cleanup_exp;
181 struct eh_region *real_region;
182 } fixup;
183 } u;
185 /* Entry point for this region's handler before landing pads are built. */
186 rtx label;
188 /* Entry point for this region's handler from the runtime eh library. */
189 rtx landing_pad;
191 /* Entry point for this region's handler from an inner region. */
192 rtx post_landing_pad;
194 /* The RESX insn for handing off control to the next outermost handler,
195 if appropriate. */
196 rtx resume;
199 /* Used to save exception status for each function. */
200 struct eh_status
202 /* The tree of all regions for this function. */
203 struct eh_region *region_tree;
205 /* The same information as an indexable array. */
206 struct eh_region **region_array;
208 /* The most recently open region. */
209 struct eh_region *cur_region;
211 /* This is the region for which we are processing catch blocks. */
212 struct eh_region *try_region;
214 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
215 node is itself a TREE_CHAINed list of handlers for regions that
216 are not yet closed. The TREE_VALUE of each entry contains the
217 handler for the corresponding entry on the ehstack. */
218 tree protect_list;
220 rtx filter;
221 rtx exc_ptr;
223 int built_landing_pads;
224 int last_region_number;
226 varray_type ttype_data;
227 varray_type ehspec_data;
228 varray_type action_record_data;
230 struct call_site_record
232 rtx landing_pad;
233 int action;
234 } *call_site_data;
235 int call_site_data_used;
236 int call_site_data_size;
238 rtx ehr_stackadj;
239 rtx ehr_handler;
240 rtx ehr_label;
242 rtx sjlj_fc;
243 rtx sjlj_exit_after;
247 static void mark_eh_region PARAMS ((struct eh_region *));
249 static int t2r_eq PARAMS ((const PTR,
250 const PTR));
251 static hashval_t t2r_hash PARAMS ((const PTR));
252 static int t2r_mark_1 PARAMS ((PTR *, PTR));
253 static void t2r_mark PARAMS ((PTR));
254 static void add_type_for_runtime PARAMS ((tree));
255 static tree lookup_type_for_runtime PARAMS ((tree));
257 static struct eh_region *expand_eh_region_end PARAMS ((void));
259 static rtx get_exception_filter PARAMS ((void));
261 static void collect_eh_region_array PARAMS ((void));
262 static void resolve_fixup_regions PARAMS ((void));
263 static void remove_fixup_regions PARAMS ((void));
264 static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
266 static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
267 struct inline_remap *));
268 static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
269 struct eh_region **));
270 static int ttypes_filter_eq PARAMS ((const PTR,
271 const PTR));
272 static hashval_t ttypes_filter_hash PARAMS ((const PTR));
273 static int ehspec_filter_eq PARAMS ((const PTR,
274 const PTR));
275 static hashval_t ehspec_filter_hash PARAMS ((const PTR));
276 static int add_ttypes_entry PARAMS ((htab_t, tree));
277 static int add_ehspec_entry PARAMS ((htab_t, htab_t,
278 tree));
279 static void assign_filter_values PARAMS ((void));
280 static void build_post_landing_pads PARAMS ((void));
281 static void connect_post_landing_pads PARAMS ((void));
282 static void dw2_build_landing_pads PARAMS ((void));
284 struct sjlj_lp_info;
285 static bool sjlj_find_directly_reachable_regions
286 PARAMS ((struct sjlj_lp_info *));
287 static void sjlj_assign_call_site_values
288 PARAMS ((rtx, struct sjlj_lp_info *));
289 static void sjlj_mark_call_sites
290 PARAMS ((struct sjlj_lp_info *));
291 static void sjlj_emit_function_enter PARAMS ((rtx));
292 static void sjlj_emit_function_exit PARAMS ((void));
293 static void sjlj_emit_dispatch_table
294 PARAMS ((rtx, struct sjlj_lp_info *));
295 static void sjlj_build_landing_pads PARAMS ((void));
297 static void remove_exception_handler_label PARAMS ((rtx));
298 static void remove_eh_handler PARAMS ((struct eh_region *));
300 struct reachable_info;
302 /* The return value of reachable_next_level. */
303 enum reachable_code
305 /* The given exception is not processed by the given region. */
306 RNL_NOT_CAUGHT,
307 /* The given exception may need processing by the given region. */
308 RNL_MAYBE_CAUGHT,
309 /* The given exception is completely processed by the given region. */
310 RNL_CAUGHT,
311 /* The given exception is completely processed by the runtime. */
312 RNL_BLOCKED
315 static int check_handled PARAMS ((tree, tree));
316 static void add_reachable_handler
317 PARAMS ((struct reachable_info *, struct eh_region *,
318 struct eh_region *));
319 static enum reachable_code reachable_next_level
320 PARAMS ((struct eh_region *, tree, struct reachable_info *));
322 static int action_record_eq PARAMS ((const PTR,
323 const PTR));
324 static hashval_t action_record_hash PARAMS ((const PTR));
325 static int add_action_record PARAMS ((htab_t, int, int));
326 static int collect_one_action_chain PARAMS ((htab_t,
327 struct eh_region *));
328 static int add_call_site PARAMS ((rtx, int));
330 static void push_uleb128 PARAMS ((varray_type *,
331 unsigned int));
332 static void push_sleb128 PARAMS ((varray_type *, int));
333 #ifndef HAVE_AS_LEB128
334 static int dw2_size_of_call_site_table PARAMS ((void));
335 static int sjlj_size_of_call_site_table PARAMS ((void));
336 #endif
337 static void dw2_output_call_site_table PARAMS ((void));
338 static void sjlj_output_call_site_table PARAMS ((void));
341 /* Routine to see if exception handling is turned on.
342 DO_WARN is non-zero if we want to inform the user that exception
343 handling is turned off.
345 This is used to ensure that -fexceptions has been specified if the
346 compiler tries to use any exception-specific functions. */
349 doing_eh (do_warn)
350 int do_warn;
352 if (! flag_exceptions)
354 static int warned = 0;
355 if (! warned && do_warn)
357 error ("exception handling disabled, use -fexceptions to enable");
358 warned = 1;
360 return 0;
362 return 1;
366 void
367 init_eh ()
369 ggc_add_rtx_root (&exception_handler_labels, 1);
371 if (! flag_exceptions)
372 return;
374 type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
375 ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
377 /* Create the SjLj_Function_Context structure. This should match
378 the definition in unwind-sjlj.c. */
379 if (USING_SJLJ_EXCEPTIONS)
381 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
383 sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
384 ggc_add_tree_root (&sjlj_fc_type_node, 1);
386 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
387 build_pointer_type (sjlj_fc_type_node));
388 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
390 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
391 integer_type_node);
392 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
394 tmp = build_index_type (build_int_2 (4 - 1, 0));
395 tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
396 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
397 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
399 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
400 ptr_type_node);
401 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
403 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
404 ptr_type_node);
405 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
407 #ifdef DONT_USE_BUILTIN_SETJMP
408 #ifdef JMP_BUF_SIZE
409 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
410 #else
411 /* Should be large enough for most systems, if it is not,
412 JMP_BUF_SIZE should be defined with the proper value. It will
413 also tend to be larger than necessary for most systems, a more
414 optimal port will define JMP_BUF_SIZE. */
415 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
416 #endif
417 #else
418 /* This is 2 for builtin_setjmp, plus whatever the target requires
419 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
420 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
421 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
422 #endif
423 tmp = build_index_type (tmp);
424 tmp = build_array_type (ptr_type_node, tmp);
425 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
426 #ifdef DONT_USE_BUILTIN_SETJMP
427 /* We don't know what the alignment requirements of the
428 runtime's jmp_buf has. Overestimate. */
429 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
430 DECL_USER_ALIGN (f_jbuf) = 1;
431 #endif
432 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
434 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
435 TREE_CHAIN (f_prev) = f_cs;
436 TREE_CHAIN (f_cs) = f_data;
437 TREE_CHAIN (f_data) = f_per;
438 TREE_CHAIN (f_per) = f_lsda;
439 TREE_CHAIN (f_lsda) = f_jbuf;
441 layout_type (sjlj_fc_type_node);
443 /* Cache the interesting field offsets so that we have
444 easy access from rtl. */
445 sjlj_fc_call_site_ofs
446 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
447 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
448 sjlj_fc_data_ofs
449 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
450 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
451 sjlj_fc_personality_ofs
452 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
453 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
454 sjlj_fc_lsda_ofs
455 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
456 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
457 sjlj_fc_jbuf_ofs
458 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
459 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
463 void
464 init_eh_for_function ()
466 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
469 /* Mark EH for GC. */
471 static void
472 mark_eh_region (region)
473 struct eh_region *region;
475 if (! region)
476 return;
478 switch (region->type)
480 case ERT_CLEANUP:
481 ggc_mark_tree (region->u.cleanup.exp);
482 break;
483 case ERT_TRY:
484 ggc_mark_rtx (region->u.try.continue_label);
485 break;
486 case ERT_CATCH:
487 ggc_mark_tree (region->u.catch.type);
488 break;
489 case ERT_ALLOWED_EXCEPTIONS:
490 ggc_mark_tree (region->u.allowed.type_list);
491 break;
492 case ERT_MUST_NOT_THROW:
493 break;
494 case ERT_THROW:
495 ggc_mark_tree (region->u.throw.type);
496 break;
497 case ERT_FIXUP:
498 ggc_mark_tree (region->u.fixup.cleanup_exp);
499 break;
500 default:
501 abort ();
504 ggc_mark_rtx (region->label);
505 ggc_mark_rtx (region->resume);
506 ggc_mark_rtx (region->landing_pad);
507 ggc_mark_rtx (region->post_landing_pad);
510 void
511 mark_eh_status (eh)
512 struct eh_status *eh;
514 int i;
516 if (eh == 0)
517 return;
519 /* If we've called collect_eh_region_array, use it. Otherwise walk
520 the tree non-recursively. */
521 if (eh->region_array)
523 for (i = eh->last_region_number; i > 0; --i)
525 struct eh_region *r = eh->region_array[i];
526 if (r && r->region_number == i)
527 mark_eh_region (r);
530 else if (eh->region_tree)
532 struct eh_region *r = eh->region_tree;
533 while (1)
535 mark_eh_region (r);
536 if (r->inner)
537 r = r->inner;
538 else if (r->next_peer)
539 r = r->next_peer;
540 else
542 do {
543 r = r->outer;
544 if (r == NULL)
545 goto tree_done;
546 } while (r->next_peer == NULL);
547 r = r->next_peer;
550 tree_done:;
553 ggc_mark_tree (eh->protect_list);
554 ggc_mark_rtx (eh->filter);
555 ggc_mark_rtx (eh->exc_ptr);
556 ggc_mark_tree_varray (eh->ttype_data);
558 if (eh->call_site_data)
560 for (i = eh->call_site_data_used - 1; i >= 0; --i)
561 ggc_mark_rtx (eh->call_site_data[i].landing_pad);
564 ggc_mark_rtx (eh->ehr_stackadj);
565 ggc_mark_rtx (eh->ehr_handler);
566 ggc_mark_rtx (eh->ehr_label);
568 ggc_mark_rtx (eh->sjlj_fc);
569 ggc_mark_rtx (eh->sjlj_exit_after);
572 void
573 free_eh_status (f)
574 struct function *f;
576 struct eh_status *eh = f->eh;
578 if (eh->region_array)
580 int i;
581 for (i = eh->last_region_number; i > 0; --i)
583 struct eh_region *r = eh->region_array[i];
584 /* Mind we don't free a region struct more than once. */
585 if (r && r->region_number == i)
586 free (r);
588 free (eh->region_array);
590 else if (eh->region_tree)
592 struct eh_region *next, *r = eh->region_tree;
593 while (1)
595 if (r->inner)
596 r = r->inner;
597 else if (r->next_peer)
599 next = r->next_peer;
600 free (r);
601 r = next;
603 else
605 do {
606 next = r->outer;
607 free (r);
608 r = next;
609 if (r == NULL)
610 goto tree_done;
611 } while (r->next_peer == NULL);
612 next = r->next_peer;
613 free (r);
614 r = next;
617 tree_done:;
620 VARRAY_FREE (eh->ttype_data);
621 VARRAY_FREE (eh->ehspec_data);
622 VARRAY_FREE (eh->action_record_data);
623 if (eh->call_site_data)
624 free (eh->call_site_data);
626 free (eh);
627 f->eh = NULL;
631 /* Start an exception handling region. All instructions emitted
632 after this point are considered to be part of the region until
633 expand_eh_region_end is invoked. */
635 void
636 expand_eh_region_start ()
638 struct eh_region *new_region;
639 struct eh_region *cur_region;
640 rtx note;
642 if (! doing_eh (0))
643 return;
645 /* Insert a new blank region as a leaf in the tree. */
646 new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
647 cur_region = cfun->eh->cur_region;
648 new_region->outer = cur_region;
649 if (cur_region)
651 new_region->next_peer = cur_region->inner;
652 cur_region->inner = new_region;
654 else
656 new_region->next_peer = cfun->eh->region_tree;
657 cfun->eh->region_tree = new_region;
659 cfun->eh->cur_region = new_region;
661 /* Create a note marking the start of this region. */
662 new_region->region_number = ++cfun->eh->last_region_number;
663 note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
664 NOTE_EH_HANDLER (note) = new_region->region_number;
667 /* Common code to end a region. Returns the region just ended. */
669 static struct eh_region *
670 expand_eh_region_end ()
672 struct eh_region *cur_region = cfun->eh->cur_region;
673 rtx note;
675 /* Create a nute marking the end of this region. */
676 note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
677 NOTE_EH_HANDLER (note) = cur_region->region_number;
679 /* Pop. */
680 cfun->eh->cur_region = cur_region->outer;
682 return cur_region;
685 /* End an exception handling region for a cleanup. HANDLER is an
686 expression to expand for the cleanup. */
688 void
689 expand_eh_region_end_cleanup (handler)
690 tree handler;
692 struct eh_region *region;
693 tree protect_cleanup_actions;
694 rtx around_label;
695 rtx data_save[2];
697 if (! doing_eh (0))
698 return;
700 region = expand_eh_region_end ();
701 region->type = ERT_CLEANUP;
702 region->label = gen_label_rtx ();
703 region->u.cleanup.exp = handler;
705 around_label = gen_label_rtx ();
706 emit_jump (around_label);
708 emit_label (region->label);
710 /* Give the language a chance to specify an action to be taken if an
711 exception is thrown that would propogate out of the HANDLER. */
712 protect_cleanup_actions
713 = (lang_protect_cleanup_actions
714 ? (*lang_protect_cleanup_actions) ()
715 : NULL_TREE);
717 if (protect_cleanup_actions)
718 expand_eh_region_start ();
720 /* In case this cleanup involves an inline destructor with a try block in
721 it, we need to save the EH return data registers around it. */
722 data_save[0] = gen_reg_rtx (Pmode);
723 emit_move_insn (data_save[0], get_exception_pointer ());
724 data_save[1] = gen_reg_rtx (word_mode);
725 emit_move_insn (data_save[1], get_exception_filter ());
727 expand_expr (handler, const0_rtx, VOIDmode, 0);
729 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
730 emit_move_insn (cfun->eh->filter, data_save[1]);
732 if (protect_cleanup_actions)
733 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
735 /* We need any stack adjustment complete before the around_label. */
736 do_pending_stack_adjust ();
738 /* We delay the generation of the _Unwind_Resume until we generate
739 landing pads. We emit a marker here so as to get good control
740 flow data in the meantime. */
741 region->resume
742 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
743 emit_barrier ();
745 emit_label (around_label);
748 /* End an exception handling region for a try block, and prepares
749 for subsequent calls to expand_start_catch. */
751 void
752 expand_start_all_catch ()
754 struct eh_region *region;
756 if (! doing_eh (1))
757 return;
759 region = expand_eh_region_end ();
760 region->type = ERT_TRY;
761 region->u.try.prev_try = cfun->eh->try_region;
762 region->u.try.continue_label = gen_label_rtx ();
764 cfun->eh->try_region = region;
766 emit_jump (region->u.try.continue_label);
769 /* Begin a catch clause. TYPE is the type caught, or null if this is
770 a catch-all clause. */
772 void
773 expand_start_catch (type)
774 tree type;
776 struct eh_region *t, *c, *l;
778 if (! doing_eh (0))
779 return;
781 if (type)
782 add_type_for_runtime (type);
783 expand_eh_region_start ();
785 t = cfun->eh->try_region;
786 c = cfun->eh->cur_region;
787 c->type = ERT_CATCH;
788 c->u.catch.type = type;
789 c->label = gen_label_rtx ();
791 l = t->u.try.last_catch;
792 c->u.catch.prev_catch = l;
793 if (l)
794 l->u.catch.next_catch = c;
795 else
796 t->u.try.catch = c;
797 t->u.try.last_catch = c;
799 emit_label (c->label);
802 /* End a catch clause. Control will resume after the try/catch block. */
804 void
805 expand_end_catch ()
807 struct eh_region *try_region, *catch_region;
809 if (! doing_eh (0))
810 return;
812 catch_region = expand_eh_region_end ();
813 try_region = cfun->eh->try_region;
815 emit_jump (try_region->u.try.continue_label);
818 /* End a sequence of catch handlers for a try block. */
820 void
821 expand_end_all_catch ()
823 struct eh_region *try_region;
825 if (! doing_eh (0))
826 return;
828 try_region = cfun->eh->try_region;
829 cfun->eh->try_region = try_region->u.try.prev_try;
831 emit_label (try_region->u.try.continue_label);
834 /* End an exception region for an exception type filter. ALLOWED is a
835 TREE_LIST of types to be matched by the runtime. FAILURE is an
836 expression to invoke if a mismatch ocurrs. */
838 void
839 expand_eh_region_end_allowed (allowed, failure)
840 tree allowed, failure;
842 struct eh_region *region;
843 rtx around_label;
845 if (! doing_eh (0))
846 return;
848 region = expand_eh_region_end ();
849 region->type = ERT_ALLOWED_EXCEPTIONS;
850 region->u.allowed.type_list = allowed;
851 region->label = gen_label_rtx ();
853 for (; allowed ; allowed = TREE_CHAIN (allowed))
854 add_type_for_runtime (TREE_VALUE (allowed));
856 /* We must emit the call to FAILURE here, so that if this function
857 throws a different exception, that it will be processed by the
858 correct region. */
860 /* If there are any pending stack adjustments, we must emit them
861 before we branch -- otherwise, we won't know how much adjustment
862 is required later. */
863 do_pending_stack_adjust ();
864 around_label = gen_label_rtx ();
865 emit_jump (around_label);
867 emit_label (region->label);
868 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
869 /* We must adjust the stack before we reach the AROUND_LABEL because
870 the call to FAILURE does not occur on all paths to the
871 AROUND_LABEL. */
872 do_pending_stack_adjust ();
874 emit_label (around_label);
877 /* End an exception region for a must-not-throw filter. FAILURE is an
878 expression invoke if an uncaught exception propagates this far.
880 This is conceptually identical to expand_eh_region_end_allowed with
881 an empty allowed list (if you passed "std::terminate" instead of
882 "__cxa_call_unexpected"), but they are represented differently in
883 the C++ LSDA. */
885 void
886 expand_eh_region_end_must_not_throw (failure)
887 tree failure;
889 struct eh_region *region;
890 rtx around_label;
892 if (! doing_eh (0))
893 return;
895 region = expand_eh_region_end ();
896 region->type = ERT_MUST_NOT_THROW;
897 region->label = gen_label_rtx ();
899 /* We must emit the call to FAILURE here, so that if this function
900 throws a different exception, that it will be processed by the
901 correct region. */
903 around_label = gen_label_rtx ();
904 emit_jump (around_label);
906 emit_label (region->label);
907 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
909 emit_label (around_label);
912 /* End an exception region for a throw. No handling goes on here,
913 but it's the easiest way for the front-end to indicate what type
914 is being thrown. */
916 void
917 expand_eh_region_end_throw (type)
918 tree type;
920 struct eh_region *region;
922 if (! doing_eh (0))
923 return;
925 region = expand_eh_region_end ();
926 region->type = ERT_THROW;
927 region->u.throw.type = type;
930 /* End a fixup region. Within this region the cleanups for the immediately
931 enclosing region are _not_ run. This is used for goto cleanup to avoid
932 destroying an object twice.
934 This would be an extraordinarily simple prospect, were it not for the
935 fact that we don't actually know what the immediately enclosing region
936 is. This surprising fact is because expand_cleanups is currently
937 generating a sequence that it will insert somewhere else. We collect
938 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
940 void
941 expand_eh_region_end_fixup (handler)
942 tree handler;
944 struct eh_region *fixup;
946 if (! doing_eh (0))
947 return;
949 fixup = expand_eh_region_end ();
950 fixup->type = ERT_FIXUP;
951 fixup->u.fixup.cleanup_exp = handler;
954 /* Return an rtl expression for a pointer to the exception object
955 within a handler. */
958 get_exception_pointer ()
960 rtx exc_ptr = cfun->eh->exc_ptr;
961 if (! exc_ptr)
963 exc_ptr = gen_reg_rtx (Pmode);
964 cfun->eh->exc_ptr = exc_ptr;
966 return exc_ptr;
969 /* Return an rtl expression for the exception dispatch filter
970 within a handler. */
972 static rtx
973 get_exception_filter ()
975 rtx filter = cfun->eh->filter;
976 if (! filter)
978 filter = gen_reg_rtx (word_mode);
979 cfun->eh->filter = filter;
981 return filter;
984 /* Begin a region that will contain entries created with
985 add_partial_entry. */
987 void
988 begin_protect_partials ()
990 /* Push room for a new list. */
991 cfun->eh->protect_list
992 = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
995 /* Start a new exception region for a region of code that has a
996 cleanup action and push the HANDLER for the region onto
997 protect_list. All of the regions created with add_partial_entry
998 will be ended when end_protect_partials is invoked. */
1000 void
1001 add_partial_entry (handler)
1002 tree handler;
1004 expand_eh_region_start ();
1006 /* ??? This comment was old before the most recent rewrite. We
1007 really ought to fix the callers at some point. */
1008 /* For backwards compatibility, we allow callers to omit calls to
1009 begin_protect_partials for the outermost region. So, we must
1010 explicitly do so here. */
1011 if (!cfun->eh->protect_list)
1012 begin_protect_partials ();
1014 /* Add this entry to the front of the list. */
1015 TREE_VALUE (cfun->eh->protect_list)
1016 = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
1019 /* End all the pending exception regions on protect_list. */
1021 void
1022 end_protect_partials ()
1024 tree t;
1026 /* ??? This comment was old before the most recent rewrite. We
1027 really ought to fix the callers at some point. */
1028 /* For backwards compatibility, we allow callers to omit the call to
1029 begin_protect_partials for the outermost region. So,
1030 PROTECT_LIST may be NULL. */
1031 if (!cfun->eh->protect_list)
1032 return;
1034 /* Pop the topmost entry. */
1035 t = TREE_VALUE (cfun->eh->protect_list);
1036 cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
1038 /* End all the exception regions. */
1039 for (; t; t = TREE_CHAIN (t))
1040 expand_eh_region_end_cleanup (TREE_VALUE (t));
1044 /* This section is for the exception handling specific optimization pass. */
1046 /* Random access the exception region tree. It's just as simple to
1047 collect the regions this way as in expand_eh_region_start, but
1048 without having to realloc memory. */
1050 static void
1051 collect_eh_region_array ()
1053 struct eh_region **array, *i;
1055 i = cfun->eh->region_tree;
1056 if (! i)
1057 return;
1059 array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1060 cfun->eh->region_array = array;
1062 while (1)
1064 array[i->region_number] = i;
1066 /* If there are sub-regions, process them. */
1067 if (i->inner)
1068 i = i->inner;
1069 /* If there are peers, process them. */
1070 else if (i->next_peer)
1071 i = i->next_peer;
1072 /* Otherwise, step back up the tree to the next peer. */
1073 else
1075 do {
1076 i = i->outer;
1077 if (i == NULL)
1078 return;
1079 } while (i->next_peer == NULL);
1080 i = i->next_peer;
1085 static void
1086 resolve_fixup_regions ()
1088 int i, j, n = cfun->eh->last_region_number;
1090 for (i = 1; i <= n; ++i)
1092 struct eh_region *fixup = cfun->eh->region_array[i];
1093 struct eh_region *cleanup;
1095 if (! fixup || fixup->type != ERT_FIXUP)
1096 continue;
1098 for (j = 1; j <= n; ++j)
1100 cleanup = cfun->eh->region_array[j];
1101 if (cleanup->type == ERT_CLEANUP
1102 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1103 break;
1105 if (j > n)
1106 abort ();
1108 fixup->u.fixup.real_region = cleanup->outer;
1112 /* Now that we've discovered what region actually encloses a fixup,
1113 we can shuffle pointers and remove them from the tree. */
1115 static void
1116 remove_fixup_regions ()
1118 int i;
1119 rtx insn, note;
1120 struct eh_region *fixup;
1122 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1123 for instructions referencing fixup regions. This is only
1124 strictly necessary for fixup regions with no parent, but
1125 doesn't hurt to do it for all regions. */
1126 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
1127 if (INSN_P (insn)
1128 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
1129 && INTVAL (XEXP (note, 0)) > 0
1130 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
1131 && fixup->type == ERT_FIXUP)
1133 if (fixup->u.fixup.real_region)
1134 XEXP (note, 1) = GEN_INT (fixup->u.fixup.real_region->region_number);
1135 else
1136 remove_note (insn, note);
1139 /* Remove the fixup regions from the tree. */
1140 for (i = cfun->eh->last_region_number; i > 0; --i)
1142 fixup = cfun->eh->region_array[i];
1143 if (! fixup)
1144 continue;
1146 /* Allow GC to maybe free some memory. */
1147 if (fixup->type == ERT_CLEANUP)
1148 fixup->u.cleanup.exp = NULL_TREE;
1150 if (fixup->type != ERT_FIXUP)
1151 continue;
1153 if (fixup->inner)
1155 struct eh_region *parent, *p, **pp;
1157 parent = fixup->u.fixup.real_region;
1159 /* Fix up the children's parent pointers; find the end of
1160 the list. */
1161 for (p = fixup->inner; ; p = p->next_peer)
1163 p->outer = parent;
1164 if (! p->next_peer)
1165 break;
1168 /* In the tree of cleanups, only outer-inner ordering matters.
1169 So link the children back in anywhere at the correct level. */
1170 if (parent)
1171 pp = &parent->inner;
1172 else
1173 pp = &cfun->eh->region_tree;
1174 p->next_peer = *pp;
1175 *pp = fixup->inner;
1176 fixup->inner = NULL;
1179 remove_eh_handler (fixup);
1183 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1184 can_throw instruction in the region. */
1186 static void
1187 convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1188 rtx *pinsns;
1189 int *orig_sp;
1190 int cur;
1192 int *sp = orig_sp;
1193 rtx insn, next;
1195 for (insn = *pinsns; insn ; insn = next)
1197 next = NEXT_INSN (insn);
1198 if (GET_CODE (insn) == NOTE)
1200 int kind = NOTE_LINE_NUMBER (insn);
1201 if (kind == NOTE_INSN_EH_REGION_BEG
1202 || kind == NOTE_INSN_EH_REGION_END)
1204 if (kind == NOTE_INSN_EH_REGION_BEG)
1206 struct eh_region *r;
1208 *sp++ = cur;
1209 cur = NOTE_EH_HANDLER (insn);
1211 r = cfun->eh->region_array[cur];
1212 if (r->type == ERT_FIXUP)
1214 r = r->u.fixup.real_region;
1215 cur = r ? r->region_number : 0;
1217 else if (r->type == ERT_CATCH)
1219 r = r->outer;
1220 cur = r ? r->region_number : 0;
1223 else
1224 cur = *--sp;
1226 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1227 requires extra care to adjust sequence start. */
1228 if (insn == *pinsns)
1229 *pinsns = next;
1230 remove_insn (insn);
1231 continue;
1234 else if (INSN_P (insn))
1236 if (cur > 0
1237 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1238 /* Calls can always potentially throw exceptions, unless
1239 they have a REG_EH_REGION note with a value of 0 or less.
1240 Which should be the only possible kind so far. */
1241 && (GET_CODE (insn) == CALL_INSN
1242 /* If we wanted exceptions for non-call insns, then
1243 any may_trap_p instruction could throw. */
1244 || (flag_non_call_exceptions
1245 && may_trap_p (PATTERN (insn)))))
1247 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1248 REG_NOTES (insn));
1251 if (GET_CODE (insn) == CALL_INSN
1252 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1254 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1255 sp, cur);
1256 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1257 sp, cur);
1258 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1259 sp, cur);
1264 if (sp != orig_sp)
1265 abort ();
1268 void
1269 convert_from_eh_region_ranges ()
1271 int *stack;
1272 rtx insns;
1274 collect_eh_region_array ();
1275 resolve_fixup_regions ();
1277 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1278 insns = get_insns ();
1279 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1280 free (stack);
1282 remove_fixup_regions ();
1285 void
1286 find_exception_handler_labels ()
1288 rtx list = NULL_RTX;
1289 int i;
1291 free_EXPR_LIST_list (&exception_handler_labels);
1293 if (cfun->eh->region_tree == NULL)
1294 return;
1296 for (i = cfun->eh->last_region_number; i > 0; --i)
1298 struct eh_region *region = cfun->eh->region_array[i];
1299 rtx lab;
1301 if (! region)
1302 continue;
1303 if (cfun->eh->built_landing_pads)
1304 lab = region->landing_pad;
1305 else
1306 lab = region->label;
1308 if (lab)
1309 list = alloc_EXPR_LIST (0, lab, list);
1312 /* For sjlj exceptions, need the return label to remain live until
1313 after landing pad generation. */
1314 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1315 list = alloc_EXPR_LIST (0, return_label, list);
1317 exception_handler_labels = list;
1321 static struct eh_region *
1322 duplicate_eh_region_1 (o, map)
1323 struct eh_region *o;
1324 struct inline_remap *map;
1326 struct eh_region *n
1327 = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
1329 n->region_number = o->region_number + cfun->eh->last_region_number;
1330 n->type = o->type;
1332 switch (n->type)
1334 case ERT_CLEANUP:
1335 case ERT_MUST_NOT_THROW:
1336 break;
1338 case ERT_TRY:
1339 if (o->u.try.continue_label)
1340 n->u.try.continue_label
1341 = get_label_from_map (map,
1342 CODE_LABEL_NUMBER (o->u.try.continue_label));
1343 break;
1345 case ERT_CATCH:
1346 n->u.catch.type = o->u.catch.type;
1347 break;
1349 case ERT_ALLOWED_EXCEPTIONS:
1350 n->u.allowed.type_list = o->u.allowed.type_list;
1351 break;
1353 case ERT_THROW:
1354 n->u.throw.type = o->u.throw.type;
1356 default:
1357 abort ();
1360 if (o->label)
1361 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1362 if (o->resume)
1364 n->resume = map->insn_map[INSN_UID (o->resume)];
1365 if (n->resume == NULL)
1366 abort ();
1369 return n;
1372 static void
1373 duplicate_eh_region_2 (o, n_array)
1374 struct eh_region *o;
1375 struct eh_region **n_array;
1377 struct eh_region *n = n_array[o->region_number];
1379 switch (n->type)
1381 case ERT_TRY:
1382 n->u.try.catch = n_array[o->u.try.catch->region_number];
1383 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1384 break;
1386 case ERT_CATCH:
1387 if (o->u.catch.next_catch)
1388 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1389 if (o->u.catch.prev_catch)
1390 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1391 break;
1393 default:
1394 break;
1397 if (o->outer)
1398 n->outer = n_array[o->outer->region_number];
1399 if (o->inner)
1400 n->inner = n_array[o->inner->region_number];
1401 if (o->next_peer)
1402 n->next_peer = n_array[o->next_peer->region_number];
1406 duplicate_eh_regions (ifun, map)
1407 struct function *ifun;
1408 struct inline_remap *map;
1410 int ifun_last_region_number = ifun->eh->last_region_number;
1411 struct eh_region **n_array, *root, *cur;
1412 int i;
1414 if (ifun_last_region_number == 0)
1415 return 0;
1417 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1419 for (i = 1; i <= ifun_last_region_number; ++i)
1421 cur = ifun->eh->region_array[i];
1422 if (!cur || cur->region_number != i)
1423 continue;
1424 n_array[i] = duplicate_eh_region_1 (cur, map);
1426 for (i = 1; i <= ifun_last_region_number; ++i)
1428 cur = ifun->eh->region_array[i];
1429 if (!cur || cur->region_number != i)
1430 continue;
1431 duplicate_eh_region_2 (cur, n_array);
1434 root = n_array[ifun->eh->region_tree->region_number];
1435 cur = cfun->eh->cur_region;
1436 if (cur)
1438 struct eh_region *p = cur->inner;
1439 if (p)
1441 while (p->next_peer)
1442 p = p->next_peer;
1443 p->next_peer = root;
1445 else
1446 cur->inner = root;
1448 for (i = 1; i <= ifun_last_region_number; ++i)
1449 if (n_array[i]->outer == NULL)
1450 n_array[i]->outer = cur;
1452 else
1454 struct eh_region *p = cfun->eh->region_tree;
1455 if (p)
1457 while (p->next_peer)
1458 p = p->next_peer;
1459 p->next_peer = root;
1461 else
1462 cfun->eh->region_tree = root;
1465 free (n_array);
1467 i = cfun->eh->last_region_number;
1468 cfun->eh->last_region_number = i + ifun_last_region_number;
1469 return i;
1473 /* ??? Move from tree.c to tree.h. */
1474 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
1476 static int
1477 t2r_eq (pentry, pdata)
1478 const PTR pentry;
1479 const PTR pdata;
1481 tree entry = (tree) pentry;
1482 tree data = (tree) pdata;
1484 return TREE_PURPOSE (entry) == data;
1487 static hashval_t
1488 t2r_hash (pentry)
1489 const PTR pentry;
1491 tree entry = (tree) pentry;
1492 return TYPE_HASH (TREE_PURPOSE (entry));
1495 static int
1496 t2r_mark_1 (slot, data)
1497 PTR *slot;
1498 PTR data ATTRIBUTE_UNUSED;
1500 tree contents = (tree) *slot;
1501 ggc_mark_tree (contents);
1502 return 1;
1505 static void
1506 t2r_mark (addr)
1507 PTR addr;
1509 htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1512 static void
1513 add_type_for_runtime (type)
1514 tree type;
1516 tree *slot;
1518 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1519 TYPE_HASH (type), INSERT);
1520 if (*slot == NULL)
1522 tree runtime = (*lang_eh_runtime_type) (type);
1523 *slot = tree_cons (type, runtime, NULL_TREE);
1527 static tree
1528 lookup_type_for_runtime (type)
1529 tree type;
1531 tree *slot;
1533 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1534 TYPE_HASH (type), NO_INSERT);
1536 /* We should have always inserrted the data earlier. */
1537 return TREE_VALUE (*slot);
1541 /* Represent an entry in @TTypes for either catch actions
1542 or exception filter actions. */
1543 struct ttypes_filter
1545 tree t;
1546 int filter;
1549 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1550 (a tree) for a @TTypes type node we are thinking about adding. */
1552 static int
1553 ttypes_filter_eq (pentry, pdata)
1554 const PTR pentry;
1555 const PTR pdata;
1557 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1558 tree data = (tree) pdata;
1560 return entry->t == data;
1563 static hashval_t
1564 ttypes_filter_hash (pentry)
1565 const PTR pentry;
1567 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1568 return TYPE_HASH (entry->t);
1571 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1572 exception specification list we are thinking about adding. */
1573 /* ??? Currently we use the type lists in the order given. Someone
1574 should put these in some canonical order. */
1576 static int
1577 ehspec_filter_eq (pentry, pdata)
1578 const PTR pentry;
1579 const PTR pdata;
1581 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1582 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1584 return type_list_equal (entry->t, data->t);
1587 /* Hash function for exception specification lists. */
1589 static hashval_t
1590 ehspec_filter_hash (pentry)
1591 const PTR pentry;
1593 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1594 hashval_t h = 0;
1595 tree list;
1597 for (list = entry->t; list ; list = TREE_CHAIN (list))
1598 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1599 return h;
1602 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1603 up the search. Return the filter value to be used. */
1605 static int
1606 add_ttypes_entry (ttypes_hash, type)
1607 htab_t ttypes_hash;
1608 tree type;
1610 struct ttypes_filter **slot, *n;
1612 slot = (struct ttypes_filter **)
1613 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1615 if ((n = *slot) == NULL)
1617 /* Filter value is a 1 based table index. */
1619 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1620 n->t = type;
1621 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1622 *slot = n;
1624 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1627 return n->filter;
1630 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1631 to speed up the search. Return the filter value to be used. */
1633 static int
1634 add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1635 htab_t ehspec_hash;
1636 htab_t ttypes_hash;
1637 tree list;
1639 struct ttypes_filter **slot, *n;
1640 struct ttypes_filter dummy;
1642 dummy.t = list;
1643 slot = (struct ttypes_filter **)
1644 htab_find_slot (ehspec_hash, &dummy, INSERT);
1646 if ((n = *slot) == NULL)
1648 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1650 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1651 n->t = list;
1652 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1653 *slot = n;
1655 /* Look up each type in the list and encode its filter
1656 value as a uleb128. Terminate the list with 0. */
1657 for (; list ; list = TREE_CHAIN (list))
1658 push_uleb128 (&cfun->eh->ehspec_data,
1659 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1660 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1663 return n->filter;
1666 /* Generate the action filter values to be used for CATCH and
1667 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1668 we use lots of landing pads, and so every type or list can share
1669 the same filter value, which saves table space. */
1671 static void
1672 assign_filter_values ()
1674 int i;
1675 htab_t ttypes, ehspec;
1677 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1678 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1680 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1681 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1683 for (i = cfun->eh->last_region_number; i > 0; --i)
1685 struct eh_region *r = cfun->eh->region_array[i];
1687 /* Mind we don't process a region more than once. */
1688 if (!r || r->region_number != i)
1689 continue;
1691 switch (r->type)
1693 case ERT_CATCH:
1694 r->u.catch.filter = add_ttypes_entry (ttypes, r->u.catch.type);
1695 break;
1697 case ERT_ALLOWED_EXCEPTIONS:
1698 r->u.allowed.filter
1699 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1700 break;
1702 default:
1703 break;
1707 htab_delete (ttypes);
1708 htab_delete (ehspec);
1711 static void
1712 build_post_landing_pads ()
1714 int i;
1716 for (i = cfun->eh->last_region_number; i > 0; --i)
1718 struct eh_region *region = cfun->eh->region_array[i];
1719 rtx seq;
1721 /* Mind we don't process a region more than once. */
1722 if (!region || region->region_number != i)
1723 continue;
1725 switch (region->type)
1727 case ERT_TRY:
1728 /* ??? Collect the set of all non-overlapping catch handlers
1729 all the way up the chain until blocked by a cleanup. */
1730 /* ??? Outer try regions can share landing pads with inner
1731 try regions if the types are completely non-overlapping,
1732 and there are no interveaning cleanups. */
1734 region->post_landing_pad = gen_label_rtx ();
1736 start_sequence ();
1738 emit_label (region->post_landing_pad);
1740 /* ??? It is mighty inconvenient to call back into the
1741 switch statement generation code in expand_end_case.
1742 Rapid prototyping sez a sequence of ifs. */
1744 struct eh_region *c;
1745 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1747 /* ??? _Unwind_ForcedUnwind wants no match here. */
1748 if (c->u.catch.type == NULL)
1749 emit_jump (c->label);
1750 else
1751 emit_cmp_and_jump_insns (cfun->eh->filter,
1752 GEN_INT (c->u.catch.filter),
1753 EQ, NULL_RTX, word_mode,
1754 0, 0, c->label);
1758 /* We delay the generation of the _Unwind_Resume until we generate
1759 landing pads. We emit a marker here so as to get good control
1760 flow data in the meantime. */
1761 region->resume
1762 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1763 emit_barrier ();
1765 seq = get_insns ();
1766 end_sequence ();
1768 emit_insns_before (seq, region->u.try.catch->label);
1769 break;
1771 case ERT_ALLOWED_EXCEPTIONS:
1772 region->post_landing_pad = gen_label_rtx ();
1774 start_sequence ();
1776 emit_label (region->post_landing_pad);
1778 emit_cmp_and_jump_insns (cfun->eh->filter,
1779 GEN_INT (region->u.allowed.filter),
1780 EQ, NULL_RTX, word_mode, 0, 0,
1781 region->label);
1783 /* We delay the generation of the _Unwind_Resume until we generate
1784 landing pads. We emit a marker here so as to get good control
1785 flow data in the meantime. */
1786 region->resume
1787 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1788 emit_barrier ();
1790 seq = get_insns ();
1791 end_sequence ();
1793 emit_insns_before (seq, region->label);
1794 break;
1796 case ERT_CLEANUP:
1797 case ERT_MUST_NOT_THROW:
1798 region->post_landing_pad = region->label;
1799 break;
1801 case ERT_CATCH:
1802 case ERT_THROW:
1803 /* Nothing to do. */
1804 break;
1806 default:
1807 abort ();
1812 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1813 _Unwind_Resume otherwise. */
1815 static void
1816 connect_post_landing_pads ()
1818 int i;
1820 for (i = cfun->eh->last_region_number; i > 0; --i)
1822 struct eh_region *region = cfun->eh->region_array[i];
1823 struct eh_region *outer;
1824 rtx seq;
1826 /* Mind we don't process a region more than once. */
1827 if (!region || region->region_number != i)
1828 continue;
1830 /* If there is no RESX, or it has been deleted by flow, there's
1831 nothing to fix up. */
1832 if (! region->resume || INSN_DELETED_P (region->resume))
1833 continue;
1835 /* Search for another landing pad in this function. */
1836 for (outer = region->outer; outer ; outer = outer->outer)
1837 if (outer->post_landing_pad)
1838 break;
1840 start_sequence ();
1842 if (outer)
1843 emit_jump (outer->post_landing_pad);
1844 else
1845 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1846 VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
1848 seq = get_insns ();
1849 end_sequence ();
1850 emit_insns_before (seq, region->resume);
1852 /* Leave the RESX to be deleted by flow. */
1857 static void
1858 dw2_build_landing_pads ()
1860 int i, j;
1862 for (i = cfun->eh->last_region_number; i > 0; --i)
1864 struct eh_region *region = cfun->eh->region_array[i];
1865 rtx seq;
1867 /* Mind we don't process a region more than once. */
1868 if (!region || region->region_number != i)
1869 continue;
1871 if (region->type != ERT_CLEANUP
1872 && region->type != ERT_TRY
1873 && region->type != ERT_ALLOWED_EXCEPTIONS)
1874 continue;
1876 start_sequence ();
1878 region->landing_pad = gen_label_rtx ();
1879 emit_label (region->landing_pad);
1881 #ifdef HAVE_exception_receiver
1882 if (HAVE_exception_receiver)
1883 emit_insn (gen_exception_receiver ());
1884 else
1885 #endif
1886 #ifdef HAVE_nonlocal_goto_receiver
1887 if (HAVE_nonlocal_goto_receiver)
1888 emit_insn (gen_nonlocal_goto_receiver ());
1889 else
1890 #endif
1891 { /* Nothing */ }
1893 /* If the eh_return data registers are call-saved, then we
1894 won't have considered them clobbered from the call that
1895 threw. Kill them now. */
1896 for (j = 0; ; ++j)
1898 unsigned r = EH_RETURN_DATA_REGNO (j);
1899 if (r == INVALID_REGNUM)
1900 break;
1901 if (! call_used_regs[r])
1902 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1905 emit_move_insn (cfun->eh->exc_ptr,
1906 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
1907 emit_move_insn (cfun->eh->filter,
1908 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
1910 seq = get_insns ();
1911 end_sequence ();
1913 emit_insns_before (seq, region->post_landing_pad);
1918 struct sjlj_lp_info
1920 int directly_reachable;
1921 int action_index;
1922 int dispatch_index;
1923 int call_site_index;
1926 static bool
1927 sjlj_find_directly_reachable_regions (lp_info)
1928 struct sjlj_lp_info *lp_info;
1930 rtx insn;
1931 bool found_one = false;
1933 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1935 struct eh_region *region;
1936 tree type_thrown;
1937 rtx note;
1939 if (! INSN_P (insn))
1940 continue;
1942 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1943 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1944 continue;
1946 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1948 type_thrown = NULL_TREE;
1949 if (region->type == ERT_THROW)
1951 type_thrown = region->u.throw.type;
1952 region = region->outer;
1955 /* Find the first containing region that might handle the exception.
1956 That's the landing pad to which we will transfer control. */
1957 for (; region; region = region->outer)
1958 if (reachable_next_level (region, type_thrown, 0) != RNL_NOT_CAUGHT)
1959 break;
1961 if (region)
1963 lp_info[region->region_number].directly_reachable = 1;
1964 found_one = true;
1968 return found_one;
1971 static void
1972 sjlj_assign_call_site_values (dispatch_label, lp_info)
1973 rtx dispatch_label;
1974 struct sjlj_lp_info *lp_info;
1976 htab_t ar_hash;
1977 int i, index;
1979 /* First task: build the action table. */
1981 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1982 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1984 for (i = cfun->eh->last_region_number; i > 0; --i)
1985 if (lp_info[i].directly_reachable)
1987 struct eh_region *r = cfun->eh->region_array[i];
1988 r->landing_pad = dispatch_label;
1989 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1990 if (lp_info[i].action_index != -1)
1991 cfun->uses_eh_lsda = 1;
1994 htab_delete (ar_hash);
1996 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1997 landing pad label for the region. For sjlj though, there is one
1998 common landing pad from which we dispatch to the post-landing pads.
2000 A region receives a dispatch index if it is directly reachable
2001 and requires in-function processing. Regions that share post-landing
2002 pads may share dispatch indicies. */
2003 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2004 (see build_post_landing_pads) so we don't bother checking for it. */
2006 index = 0;
2007 for (i = cfun->eh->last_region_number; i > 0; --i)
2008 if (lp_info[i].directly_reachable
2009 && lp_info[i].action_index >= 0)
2010 lp_info[i].dispatch_index = index++;
2012 /* Finally: assign call-site values. If dwarf2 terms, this would be
2013 the region number assigned by convert_to_eh_region_ranges, but
2014 handles no-action and must-not-throw differently. */
2016 call_site_base = 1;
2017 for (i = cfun->eh->last_region_number; i > 0; --i)
2018 if (lp_info[i].directly_reachable)
2020 int action = lp_info[i].action_index;
2022 /* Map must-not-throw to otherwise unused call-site index 0. */
2023 if (action == -2)
2024 index = 0;
2025 /* Map no-action to otherwise unused call-site index -1. */
2026 else if (action == -1)
2027 index = -1;
2028 /* Otherwise, look it up in the table. */
2029 else
2030 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2032 lp_info[i].call_site_index = index;
2036 static void
2037 sjlj_mark_call_sites (lp_info)
2038 struct sjlj_lp_info *lp_info;
2040 int last_call_site = -2;
2041 rtx insn, mem;
2043 mem = change_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2044 plus_constant (XEXP (cfun->eh->sjlj_fc, 0),
2045 sjlj_fc_call_site_ofs));
2047 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2049 struct eh_region *region;
2050 int this_call_site;
2051 rtx note, before, p;
2053 /* Reset value tracking at extended basic block boundaries. */
2054 if (GET_CODE (insn) == CODE_LABEL)
2055 last_call_site = -2;
2057 if (! INSN_P (insn))
2058 continue;
2060 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2061 if (!note)
2063 /* Calls (and trapping insns) without notes are outside any
2064 exception handling region in this function. Mark them as
2065 no action. */
2066 if (GET_CODE (insn) == CALL_INSN
2067 || (flag_non_call_exceptions
2068 && may_trap_p (PATTERN (insn))))
2069 this_call_site = -1;
2070 else
2071 continue;
2073 else
2075 /* Calls that are known to not throw need not be marked. */
2076 if (INTVAL (XEXP (note, 0)) <= 0)
2077 continue;
2079 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2080 this_call_site = lp_info[region->region_number].call_site_index;
2083 if (this_call_site == last_call_site)
2084 continue;
2086 /* Don't separate a call from it's argument loads. */
2087 before = insn;
2088 if (GET_CODE (insn) == CALL_INSN)
2090 HARD_REG_SET parm_regs;
2091 int nparm_regs;
2093 /* Since different machines initialize their parameter registers
2094 in different orders, assume nothing. Collect the set of all
2095 parameter registers. */
2096 CLEAR_HARD_REG_SET (parm_regs);
2097 nparm_regs = 0;
2098 for (p = CALL_INSN_FUNCTION_USAGE (insn); p ; p = XEXP (p, 1))
2099 if (GET_CODE (XEXP (p, 0)) == USE
2100 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
2102 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
2103 abort ();
2105 /* We only care about registers which can hold function
2106 arguments. */
2107 if (! FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
2108 continue;
2110 SET_HARD_REG_BIT (parm_regs, REGNO (XEXP (XEXP (p, 0), 0)));
2111 nparm_regs++;
2114 /* Search backward for the first set of a register in this set. */
2115 while (nparm_regs)
2117 before = PREV_INSN (before);
2119 /* Given that we've done no other optimizations yet,
2120 the arguments should be immediately available. */
2121 if (GET_CODE (before) == CODE_LABEL)
2122 abort ();
2124 p = single_set (before);
2125 if (p && GET_CODE (SET_DEST (p)) == REG
2126 && REGNO (SET_DEST (p)) < FIRST_PSEUDO_REGISTER
2127 && TEST_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p))))
2129 CLEAR_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p)));
2130 nparm_regs--;
2135 start_sequence ();
2136 emit_move_insn (mem, GEN_INT (this_call_site));
2137 p = get_insns ();
2138 end_sequence ();
2140 emit_insns_before (p, before);
2141 last_call_site = this_call_site;
2145 /* Construct the SjLj_Function_Context. */
2147 static void
2148 sjlj_emit_function_enter (dispatch_label)
2149 rtx dispatch_label;
2151 rtx fn_begin, fc, mem, seq;
2153 fc = cfun->eh->sjlj_fc;
2155 start_sequence ();
2157 /* We're storing this libcall's address into memory instead of
2158 calling it directly. Thus, we must call assemble_external_libcall
2159 here, as we can not depend on emit_library_call to do it for us. */
2160 assemble_external_libcall (eh_personality_libfunc);
2161 mem = change_address (fc, Pmode,
2162 plus_constant (XEXP (fc, 0), sjlj_fc_personality_ofs));
2163 emit_move_insn (mem, eh_personality_libfunc);
2165 mem = change_address (fc, Pmode,
2166 plus_constant (XEXP (fc, 0), sjlj_fc_lsda_ofs));
2167 if (cfun->uses_eh_lsda)
2169 char buf[20];
2170 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2171 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
2173 else
2174 emit_move_insn (mem, const0_rtx);
2176 #ifdef DONT_USE_BUILTIN_SETJMP
2178 rtx x, note;
2179 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_NORMAL,
2180 TYPE_MODE (integer_type_node), 1,
2181 plus_constant (XEXP (fc, 0),
2182 sjlj_fc_jbuf_ofs), Pmode);
2184 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2185 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2187 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2188 TYPE_MODE (integer_type_node), 0, 0,
2189 dispatch_label);
2191 #else
2192 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2193 dispatch_label);
2194 #endif
2196 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2197 1, XEXP (fc, 0), Pmode);
2199 seq = get_insns ();
2200 end_sequence ();
2202 /* ??? Instead of doing this at the beginning of the function,
2203 do this in a block that is at loop level 0 and dominates all
2204 can_throw_internal instructions. */
2206 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2207 if (GET_CODE (fn_begin) == NOTE
2208 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2209 break;
2210 emit_insns_after (seq, fn_begin);
2213 /* Call back from expand_function_end to know where we should put
2214 the call to unwind_sjlj_unregister_libfunc if needed. */
2216 void
2217 sjlj_emit_function_exit_after (after)
2218 rtx after;
2220 cfun->eh->sjlj_exit_after = after;
2223 static void
2224 sjlj_emit_function_exit ()
2226 rtx seq;
2228 start_sequence ();
2230 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2231 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2233 seq = get_insns ();
2234 end_sequence ();
2236 /* ??? Really this can be done in any block at loop level 0 that
2237 post-dominates all can_throw_internal instructions. This is
2238 the last possible moment. */
2240 emit_insns_after (seq, cfun->eh->sjlj_exit_after);
2243 static void
2244 sjlj_emit_dispatch_table (dispatch_label, lp_info)
2245 rtx dispatch_label;
2246 struct sjlj_lp_info *lp_info;
2248 int i, first_reachable;
2249 rtx mem, dispatch, seq, fc;
2251 fc = cfun->eh->sjlj_fc;
2253 start_sequence ();
2255 emit_label (dispatch_label);
2257 #ifndef DONT_USE_BUILTIN_SETJMP
2258 expand_builtin_setjmp_receiver (dispatch_label);
2259 #endif
2261 /* Load up dispatch index, exc_ptr and filter values from the
2262 function context. */
2263 mem = change_address (fc, TYPE_MODE (integer_type_node),
2264 plus_constant (XEXP (fc, 0), sjlj_fc_call_site_ofs));
2265 dispatch = copy_to_reg (mem);
2267 mem = change_address (fc, word_mode,
2268 plus_constant (XEXP (fc, 0), sjlj_fc_data_ofs));
2269 if (word_mode != Pmode)
2271 #ifdef POINTERS_EXTEND_UNSIGNED
2272 mem = convert_memory_address (Pmode, mem);
2273 #else
2274 mem = convert_to_mode (Pmode, mem, 0);
2275 #endif
2277 emit_move_insn (cfun->eh->exc_ptr, mem);
2279 mem = change_address (fc, word_mode,
2280 plus_constant (XEXP (fc, 0),
2281 sjlj_fc_data_ofs + UNITS_PER_WORD));
2282 emit_move_insn (cfun->eh->filter, mem);
2284 /* Jump to one of the directly reachable regions. */
2285 /* ??? This really ought to be using a switch statement. */
2287 first_reachable = 0;
2288 for (i = cfun->eh->last_region_number; i > 0; --i)
2290 if (! lp_info[i].directly_reachable
2291 || lp_info[i].action_index < 0)
2292 continue;
2294 if (! first_reachable)
2296 first_reachable = i;
2297 continue;
2300 emit_cmp_and_jump_insns (dispatch,
2301 GEN_INT (lp_info[i].dispatch_index), EQ,
2302 NULL_RTX, TYPE_MODE (integer_type_node), 0, 0,
2303 cfun->eh->region_array[i]->post_landing_pad);
2306 seq = get_insns ();
2307 end_sequence ();
2309 emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2310 ->post_landing_pad));
2313 static void
2314 sjlj_build_landing_pads ()
2316 struct sjlj_lp_info *lp_info;
2318 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2319 sizeof (struct sjlj_lp_info));
2321 if (sjlj_find_directly_reachable_regions (lp_info))
2323 rtx dispatch_label = gen_label_rtx ();
2325 cfun->eh->sjlj_fc
2326 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2327 int_size_in_bytes (sjlj_fc_type_node),
2328 TYPE_ALIGN (sjlj_fc_type_node));
2330 sjlj_assign_call_site_values (dispatch_label, lp_info);
2331 sjlj_mark_call_sites (lp_info);
2333 sjlj_emit_function_enter (dispatch_label);
2334 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2335 sjlj_emit_function_exit ();
2338 free (lp_info);
2341 void
2342 finish_eh_generation ()
2344 /* Nothing to do if no regions created. */
2345 if (cfun->eh->region_tree == NULL)
2346 return;
2348 /* The object here is to provide find_basic_blocks with detailed
2349 information (via reachable_handlers) on how exception control
2350 flows within the function. In this first pass, we can include
2351 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2352 regions, and hope that it will be useful in deleting unreachable
2353 handlers. Subsequently, we will generate landing pads which will
2354 connect many of the handlers, and then type information will not
2355 be effective. Still, this is a win over previous implementations. */
2357 jump_optimize_minimal (get_insns ());
2358 find_basic_blocks (get_insns (), max_reg_num (), 0);
2359 cleanup_cfg ();
2361 /* These registers are used by the landing pads. Make sure they
2362 have been generated. */
2363 get_exception_pointer ();
2364 get_exception_filter ();
2366 /* Construct the landing pads. */
2368 assign_filter_values ();
2369 build_post_landing_pads ();
2370 connect_post_landing_pads ();
2371 if (USING_SJLJ_EXCEPTIONS)
2372 sjlj_build_landing_pads ();
2373 else
2374 dw2_build_landing_pads ();
2376 cfun->eh->built_landing_pads = 1;
2378 /* We've totally changed the CFG. Start over. */
2379 find_exception_handler_labels ();
2380 jump_optimize_minimal (get_insns ());
2381 find_basic_blocks (get_insns (), max_reg_num (), 0);
2382 cleanup_cfg ();
2385 /* This section handles removing dead code for flow. */
2387 /* Remove LABEL from the exception_handler_labels list. */
2389 static void
2390 remove_exception_handler_label (label)
2391 rtx label;
2393 rtx *pl, l;
2395 for (pl = &exception_handler_labels, l = *pl;
2396 XEXP (l, 0) != label;
2397 pl = &XEXP (l, 1), l = *pl)
2398 continue;
2400 *pl = XEXP (l, 1);
2401 free_EXPR_LIST_node (l);
2404 /* Splice REGION from the region tree etc. */
2406 static void
2407 remove_eh_handler (region)
2408 struct eh_region *region;
2410 struct eh_region **pp, *p;
2411 rtx lab;
2412 int i;
2414 /* For the benefit of efficiently handling REG_EH_REGION notes,
2415 replace this region in the region array with its containing
2416 region. Note that previous region deletions may result in
2417 multiple copies of this region in the array, so we have to
2418 search the whole thing. */
2419 for (i = cfun->eh->last_region_number; i > 0; --i)
2420 if (cfun->eh->region_array[i] == region)
2421 cfun->eh->region_array[i] = region->outer;
2423 if (cfun->eh->built_landing_pads)
2424 lab = region->landing_pad;
2425 else
2426 lab = region->label;
2427 if (lab)
2428 remove_exception_handler_label (lab);
2430 if (region->outer)
2431 pp = &region->outer->inner;
2432 else
2433 pp = &cfun->eh->region_tree;
2434 for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
2435 continue;
2437 if (region->inner)
2439 for (p = region->inner; p->next_peer ; p = p->next_peer)
2440 p->outer = region->outer;
2441 p->next_peer = region->next_peer;
2442 p->outer = region->outer;
2443 *pp = region->inner;
2445 else
2446 *pp = region->next_peer;
2448 if (region->type == ERT_CATCH)
2450 struct eh_region *try, *next, *prev;
2452 for (try = region->next_peer;
2453 try->type == ERT_CATCH;
2454 try = try->next_peer)
2455 continue;
2456 if (try->type != ERT_TRY)
2457 abort ();
2459 next = region->u.catch.next_catch;
2460 prev = region->u.catch.prev_catch;
2462 if (next)
2463 next->u.catch.prev_catch = prev;
2464 else
2465 try->u.try.last_catch = prev;
2466 if (prev)
2467 prev->u.catch.next_catch = next;
2468 else
2470 try->u.try.catch = next;
2471 if (! next)
2472 remove_eh_handler (try);
2476 free (region);
2479 /* LABEL heads a basic block that is about to be deleted. If this
2480 label corresponds to an exception region, we may be able to
2481 delete the region. */
2483 void
2484 maybe_remove_eh_handler (label)
2485 rtx label;
2487 int i;
2489 /* ??? After generating landing pads, it's not so simple to determine
2490 if the region data is completely unused. One must examine the
2491 landing pad and the post landing pad, and whether an inner try block
2492 is referencing the catch handlers directly. */
2493 if (cfun->eh->built_landing_pads)
2494 return;
2496 for (i = cfun->eh->last_region_number; i > 0; --i)
2498 struct eh_region *region = cfun->eh->region_array[i];
2499 if (region && region->label == label)
2501 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2502 because there is no path to the fallback call to terminate.
2503 But the region continues to affect call-site data until there
2504 are no more contained calls, which we don't see here. */
2505 if (region->type == ERT_MUST_NOT_THROW)
2507 remove_exception_handler_label (region->label);
2508 region->label = NULL_RTX;
2510 else
2511 remove_eh_handler (region);
2512 break;
2518 /* This section describes CFG exception edges for flow. */
2520 /* For communicating between calls to reachable_next_level. */
2521 struct reachable_info
2523 tree types_caught;
2524 tree types_allowed;
2525 rtx handlers;
2528 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2529 base class of TYPE, is in HANDLED. */
2531 static int
2532 check_handled (handled, type)
2533 tree handled, type;
2535 tree t;
2537 /* We can check for exact matches without front-end help. */
2538 if (! lang_eh_type_covers)
2540 for (t = handled; t ; t = TREE_CHAIN (t))
2541 if (TREE_VALUE (t) == type)
2542 return 1;
2544 else
2546 for (t = handled; t ; t = TREE_CHAIN (t))
2547 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2548 return 1;
2551 return 0;
2554 /* A subroutine of reachable_next_level. If we are collecting a list
2555 of handlers, add one. After landing pad generation, reference
2556 it instead of the handlers themselves. Further, the handlers are
2557 all wired together, so by referencing one, we've got them all.
2558 Before landing pad generation we reference each handler individually.
2560 LP_REGION contains the landing pad; REGION is the handler. */
2562 static void
2563 add_reachable_handler (info, lp_region, region)
2564 struct reachable_info *info;
2565 struct eh_region *lp_region;
2566 struct eh_region *region;
2568 if (! info)
2569 return;
2571 if (cfun->eh->built_landing_pads)
2573 if (! info->handlers)
2574 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2576 else
2577 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2580 /* Process one level of exception regions for reachability.
2581 If TYPE_THROWN is non-null, then it is the *exact* type being
2582 propagated. If INFO is non-null, then collect handler labels
2583 and caught/allowed type information between invocations. */
2585 static enum reachable_code
2586 reachable_next_level (region, type_thrown, info)
2587 struct eh_region *region;
2588 tree type_thrown;
2589 struct reachable_info *info;
2591 switch (region->type)
2593 case ERT_CLEANUP:
2594 /* Before landing-pad generation, we model control flow
2595 directly to the individual handlers. In this way we can
2596 see that catch handler types may shadow one another. */
2597 add_reachable_handler (info, region, region);
2598 return RNL_MAYBE_CAUGHT;
2600 case ERT_TRY:
2602 struct eh_region *c;
2603 enum reachable_code ret = RNL_NOT_CAUGHT;
2605 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2607 /* A catch-all handler ends the search. */
2608 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2609 to be run as well. */
2610 if (c->u.catch.type == NULL)
2612 add_reachable_handler (info, region, c);
2613 return RNL_CAUGHT;
2616 if (type_thrown)
2618 /* If we have a type match, end the search. */
2619 if (c->u.catch.type == type_thrown
2620 || (lang_eh_type_covers
2621 && (*lang_eh_type_covers) (c->u.catch.type,
2622 type_thrown)))
2624 add_reachable_handler (info, region, c);
2625 return RNL_CAUGHT;
2628 /* If we have definitive information of a match failure,
2629 the catch won't trigger. */
2630 if (lang_eh_type_covers)
2631 return RNL_NOT_CAUGHT;
2634 if (! info)
2635 ret = RNL_MAYBE_CAUGHT;
2637 /* A type must not have been previously caught. */
2638 else if (! check_handled (info->types_caught, c->u.catch.type))
2640 add_reachable_handler (info, region, c);
2641 info->types_caught = tree_cons (NULL, c->u.catch.type,
2642 info->types_caught);
2644 /* ??? If the catch type is a base class of every allowed
2645 type, then we know we can stop the search. */
2646 ret = RNL_MAYBE_CAUGHT;
2650 return ret;
2653 case ERT_ALLOWED_EXCEPTIONS:
2654 /* An empty list of types definitely ends the search. */
2655 if (region->u.allowed.type_list == NULL_TREE)
2657 add_reachable_handler (info, region, region);
2658 return RNL_CAUGHT;
2661 /* Collect a list of lists of allowed types for use in detecting
2662 when a catch may be transformed into a catch-all. */
2663 if (info)
2664 info->types_allowed = tree_cons (NULL_TREE,
2665 region->u.allowed.type_list,
2666 info->types_allowed);
2668 /* If we have definitive information about the type heirarchy,
2669 then we can tell if the thrown type will pass through the
2670 filter. */
2671 if (type_thrown && lang_eh_type_covers)
2673 if (check_handled (region->u.allowed.type_list, type_thrown))
2674 return RNL_NOT_CAUGHT;
2675 else
2677 add_reachable_handler (info, region, region);
2678 return RNL_CAUGHT;
2682 add_reachable_handler (info, region, region);
2683 return RNL_MAYBE_CAUGHT;
2685 case ERT_CATCH:
2686 /* Catch regions are handled by their controling try region. */
2687 return RNL_NOT_CAUGHT;
2689 case ERT_MUST_NOT_THROW:
2690 /* Here we end our search, since no exceptions may propagate.
2691 If we've touched down at some landing pad previous, then the
2692 explicit function call we generated may be used. Otherwise
2693 the call is made by the runtime. */
2694 if (info && info->handlers)
2696 add_reachable_handler (info, region, region);
2697 return RNL_CAUGHT;
2699 else
2700 return RNL_BLOCKED;
2702 case ERT_THROW:
2703 case ERT_FIXUP:
2704 /* Shouldn't see these here. */
2705 break;
2708 abort ();
2711 /* Retrieve a list of labels of exception handlers which can be
2712 reached by a given insn. */
2715 reachable_handlers (insn)
2716 rtx insn;
2718 struct reachable_info info;
2719 struct eh_region *region;
2720 tree type_thrown;
2721 int region_number;
2723 if (GET_CODE (insn) == JUMP_INSN
2724 && GET_CODE (PATTERN (insn)) == RESX)
2725 region_number = XINT (PATTERN (insn), 0);
2726 else
2728 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2729 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2730 return NULL;
2731 region_number = INTVAL (XEXP (note, 0));
2734 memset (&info, 0, sizeof (info));
2736 region = cfun->eh->region_array[region_number];
2738 type_thrown = NULL_TREE;
2739 if (region->type == ERT_THROW)
2741 type_thrown = region->u.throw.type;
2742 region = region->outer;
2744 else if (GET_CODE (insn) == JUMP_INSN
2745 && GET_CODE (PATTERN (insn)) == RESX)
2746 region = region->outer;
2748 for (; region; region = region->outer)
2749 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2750 break;
2752 return info.handlers;
2755 /* Determine if the given INSN can throw an exception that is caught
2756 within the function. */
2758 bool
2759 can_throw_internal (insn)
2760 rtx insn;
2762 struct eh_region *region;
2763 tree type_thrown;
2764 rtx note;
2766 if (! INSN_P (insn))
2767 return false;
2769 if (GET_CODE (insn) == INSN
2770 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2771 insn = XVECEXP (PATTERN (insn), 0, 0);
2773 if (GET_CODE (insn) == CALL_INSN
2774 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2776 int i;
2777 for (i = 0; i < 3; ++i)
2779 rtx sub = XEXP (PATTERN (insn), i);
2780 for (; sub ; sub = NEXT_INSN (sub))
2781 if (can_throw_internal (sub))
2782 return true;
2784 return false;
2787 /* Every insn that might throw has an EH_REGION note. */
2788 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2789 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2790 return false;
2792 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2794 type_thrown = NULL_TREE;
2795 if (region->type == ERT_THROW)
2797 type_thrown = region->u.throw.type;
2798 region = region->outer;
2801 /* If this exception is ignored by each and every containing region,
2802 then control passes straight out. The runtime may handle some
2803 regions, which also do not require processing internally. */
2804 for (; region; region = region->outer)
2806 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2807 if (how == RNL_BLOCKED)
2808 return false;
2809 if (how != RNL_NOT_CAUGHT)
2810 return true;
2813 return false;
2816 /* Determine if the given INSN can throw an exception that is
2817 visible outside the function. */
2819 bool
2820 can_throw_external (insn)
2821 rtx insn;
2823 struct eh_region *region;
2824 tree type_thrown;
2825 rtx note;
2827 if (! INSN_P (insn))
2828 return false;
2830 if (GET_CODE (insn) == INSN
2831 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2832 insn = XVECEXP (PATTERN (insn), 0, 0);
2834 if (GET_CODE (insn) == CALL_INSN
2835 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2837 int i;
2838 for (i = 0; i < 3; ++i)
2840 rtx sub = XEXP (PATTERN (insn), i);
2841 for (; sub ; sub = NEXT_INSN (sub))
2842 if (can_throw_external (sub))
2843 return true;
2845 return false;
2848 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2849 if (!note)
2851 /* Calls (and trapping insns) without notes are outside any
2852 exception handling region in this function. We have to
2853 assume it might throw. Given that the front end and middle
2854 ends mark known NOTHROW functions, this isn't so wildly
2855 inaccurate. */
2856 return (GET_CODE (insn) == CALL_INSN
2857 || (flag_non_call_exceptions
2858 && may_trap_p (PATTERN (insn))));
2860 if (INTVAL (XEXP (note, 0)) <= 0)
2861 return false;
2863 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2865 type_thrown = NULL_TREE;
2866 if (region->type == ERT_THROW)
2868 type_thrown = region->u.throw.type;
2869 region = region->outer;
2872 /* If the exception is caught or blocked by any containing region,
2873 then it is not seen by any calling function. */
2874 for (; region ; region = region->outer)
2875 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2876 return false;
2878 return true;
2881 /* True if nothing in this function can throw outside this function. */
2883 bool
2884 nothrow_function_p ()
2886 rtx insn;
2888 if (! flag_exceptions)
2889 return true;
2891 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2892 if (can_throw_external (insn))
2893 return false;
2894 for (insn = current_function_epilogue_delay_list; insn;
2895 insn = XEXP (insn, 1))
2896 if (can_throw_external (insn))
2897 return false;
2899 return true;
2903 /* Various hooks for unwind library. */
2905 /* Do any necessary initialization to access arbitrary stack frames.
2906 On the SPARC, this means flushing the register windows. */
2908 void
2909 expand_builtin_unwind_init ()
2911 /* Set this so all the registers get saved in our frame; we need to be
2912 able to copy the saved values for any registers from frames we unwind. */
2913 current_function_has_nonlocal_label = 1;
2915 #ifdef SETUP_FRAME_ADDRESSES
2916 SETUP_FRAME_ADDRESSES ();
2917 #endif
2921 expand_builtin_eh_return_data_regno (arglist)
2922 tree arglist;
2924 tree which = TREE_VALUE (arglist);
2925 unsigned HOST_WIDE_INT iwhich;
2927 if (TREE_CODE (which) != INTEGER_CST)
2929 error ("argument of `__builtin_eh_return_regno' must be constant");
2930 return constm1_rtx;
2933 iwhich = tree_low_cst (which, 1);
2934 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2935 if (iwhich == INVALID_REGNUM)
2936 return constm1_rtx;
2938 #ifdef DWARF_FRAME_REGNUM
2939 iwhich = DWARF_FRAME_REGNUM (iwhich);
2940 #else
2941 iwhich = DBX_REGISTER_NUMBER (iwhich);
2942 #endif
2944 return GEN_INT (iwhich);
2947 /* Given a value extracted from the return address register or stack slot,
2948 return the actual address encoded in that value. */
2951 expand_builtin_extract_return_addr (addr_tree)
2952 tree addr_tree;
2954 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2956 /* First mask out any unwanted bits. */
2957 #ifdef MASK_RETURN_ADDR
2958 expand_and (addr, MASK_RETURN_ADDR, addr);
2959 #endif
2961 /* Then adjust to find the real return address. */
2962 #if defined (RETURN_ADDR_OFFSET)
2963 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2964 #endif
2966 return addr;
2969 /* Given an actual address in addr_tree, do any necessary encoding
2970 and return the value to be stored in the return address register or
2971 stack slot so the epilogue will return to that address. */
2974 expand_builtin_frob_return_addr (addr_tree)
2975 tree addr_tree;
2977 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2979 #ifdef RETURN_ADDR_OFFSET
2980 addr = force_reg (Pmode, addr);
2981 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2982 #endif
2984 return addr;
2987 /* Set up the epilogue with the magic bits we'll need to return to the
2988 exception handler. */
2990 void
2991 expand_builtin_eh_return (stackadj_tree, handler_tree)
2992 tree stackadj_tree, handler_tree;
2994 rtx stackadj, handler;
2996 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2997 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
2999 if (! cfun->eh->ehr_label)
3001 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
3002 cfun->eh->ehr_handler = copy_to_reg (handler);
3003 cfun->eh->ehr_label = gen_label_rtx ();
3005 else
3007 if (stackadj != cfun->eh->ehr_stackadj)
3008 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
3009 if (handler != cfun->eh->ehr_handler)
3010 emit_move_insn (cfun->eh->ehr_handler, handler);
3013 emit_jump (cfun->eh->ehr_label);
3016 void
3017 expand_eh_return ()
3019 rtx sa, ra, around_label;
3021 if (! cfun->eh->ehr_label)
3022 return;
3024 sa = EH_RETURN_STACKADJ_RTX;
3025 if (! sa)
3027 error ("__builtin_eh_return not supported on this target");
3028 return;
3031 current_function_calls_eh_return = 1;
3033 around_label = gen_label_rtx ();
3034 emit_move_insn (sa, const0_rtx);
3035 emit_jump (around_label);
3037 emit_label (cfun->eh->ehr_label);
3038 clobber_return_register ();
3040 #ifdef HAVE_eh_return
3041 if (HAVE_eh_return)
3042 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3043 else
3044 #endif
3046 rtx handler;
3048 ra = EH_RETURN_HANDLER_RTX;
3049 if (! ra)
3051 error ("__builtin_eh_return not supported on this target");
3052 ra = gen_reg_rtx (Pmode);
3055 emit_move_insn (sa, cfun->eh->ehr_stackadj);
3057 handler = cfun->eh->ehr_handler;
3058 if (GET_MODE (ra) != Pmode)
3060 #ifdef POINTERS_EXTEND_UNSIGNED
3061 handler = convert_memory_address (GET_MODE (ra), handler);
3062 #else
3063 handler = convert_to_mode (GET_MODE (ra), handler, 0);
3064 #endif
3066 emit_move_insn (ra, handler);
3069 emit_label (around_label);
3072 struct action_record
3074 int offset;
3075 int filter;
3076 int next;
3079 static int
3080 action_record_eq (pentry, pdata)
3081 const PTR pentry;
3082 const PTR pdata;
3084 const struct action_record *entry = (const struct action_record *) pentry;
3085 const struct action_record *data = (const struct action_record *) pdata;
3086 return entry->filter == data->filter && entry->next == data->next;
3089 static hashval_t
3090 action_record_hash (pentry)
3091 const PTR pentry;
3093 const struct action_record *entry = (const struct action_record *) pentry;
3094 return entry->next * 1009 + entry->filter;
3097 static int
3098 add_action_record (ar_hash, filter, next)
3099 htab_t ar_hash;
3100 int filter, next;
3102 struct action_record **slot, *new, tmp;
3104 tmp.filter = filter;
3105 tmp.next = next;
3106 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3108 if ((new = *slot) == NULL)
3110 new = (struct action_record *) xmalloc (sizeof (*new));
3111 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3112 new->filter = filter;
3113 new->next = next;
3114 *slot = new;
3116 /* The filter value goes in untouched. The link to the next
3117 record is a "self-relative" byte offset, or zero to indicate
3118 that there is no next record. So convert the absolute 1 based
3119 indicies we've been carrying around into a displacement. */
3121 push_sleb128 (&cfun->eh->action_record_data, filter);
3122 if (next)
3123 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3124 push_sleb128 (&cfun->eh->action_record_data, next);
3127 return new->offset;
3130 static int
3131 collect_one_action_chain (ar_hash, region)
3132 htab_t ar_hash;
3133 struct eh_region *region;
3135 struct eh_region *c;
3136 int next;
3138 /* If we've reached the top of the region chain, then we have
3139 no actions, and require no landing pad. */
3140 if (region == NULL)
3141 return -1;
3143 switch (region->type)
3145 case ERT_CLEANUP:
3146 /* A cleanup adds a zero filter to the beginning of the chain, but
3147 there are special cases to look out for. If there are *only*
3148 cleanups along a path, then it compresses to a zero action.
3149 Further, if there are multiple cleanups along a path, we only
3150 need to represent one of them, as that is enough to trigger
3151 entry to the landing pad at runtime. */
3152 next = collect_one_action_chain (ar_hash, region->outer);
3153 if (next <= 0)
3154 return 0;
3155 for (c = region->outer; c ; c = c->outer)
3156 if (c->type == ERT_CLEANUP)
3157 return next;
3158 return add_action_record (ar_hash, 0, next);
3160 case ERT_TRY:
3161 /* Process the associated catch regions in reverse order.
3162 If there's a catch-all handler, then we don't need to
3163 search outer regions. Use a magic -3 value to record
3164 that we havn't done the outer search. */
3165 next = -3;
3166 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3168 if (c->u.catch.type == NULL)
3169 next = add_action_record (ar_hash, c->u.catch.filter, 0);
3170 else
3172 if (next == -3)
3174 next = collect_one_action_chain (ar_hash, region->outer);
3175 if (next < 0)
3176 next = 0;
3178 next = add_action_record (ar_hash, c->u.catch.filter, next);
3181 return next;
3183 case ERT_ALLOWED_EXCEPTIONS:
3184 /* An exception specification adds its filter to the
3185 beginning of the chain. */
3186 next = collect_one_action_chain (ar_hash, region->outer);
3187 return add_action_record (ar_hash, region->u.allowed.filter,
3188 next < 0 ? 0 : next);
3190 case ERT_MUST_NOT_THROW:
3191 /* A must-not-throw region with no inner handlers or cleanups
3192 requires no call-site entry. Note that this differs from
3193 the no handler or cleanup case in that we do require an lsda
3194 to be generated. Return a magic -2 value to record this. */
3195 return -2;
3197 case ERT_CATCH:
3198 case ERT_THROW:
3199 /* CATCH regions are handled in TRY above. THROW regions are
3200 for optimization information only and produce no output. */
3201 return collect_one_action_chain (ar_hash, region->outer);
3203 default:
3204 abort ();
3208 static int
3209 add_call_site (landing_pad, action)
3210 rtx landing_pad;
3211 int action;
3213 struct call_site_record *data = cfun->eh->call_site_data;
3214 int used = cfun->eh->call_site_data_used;
3215 int size = cfun->eh->call_site_data_size;
3217 if (used >= size)
3219 size = (size ? size * 2 : 64);
3220 data = (struct call_site_record *)
3221 xrealloc (data, sizeof (*data) * size);
3222 cfun->eh->call_site_data = data;
3223 cfun->eh->call_site_data_size = size;
3226 data[used].landing_pad = landing_pad;
3227 data[used].action = action;
3229 cfun->eh->call_site_data_used = used + 1;
3231 return used + call_site_base;
3234 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3235 The new note numbers will not refer to region numbers, but
3236 instead to call site entries. */
3238 void
3239 convert_to_eh_region_ranges ()
3241 rtx insn, iter, note;
3242 htab_t ar_hash;
3243 int last_action = -3;
3244 rtx last_action_insn = NULL_RTX;
3245 rtx last_landing_pad = NULL_RTX;
3246 rtx first_no_action_insn = NULL_RTX;
3247 int call_site;
3249 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3250 return;
3252 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3254 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3256 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3257 if (INSN_P (iter))
3259 struct eh_region *region;
3260 int this_action;
3261 rtx this_landing_pad;
3263 insn = iter;
3264 if (GET_CODE (insn) == INSN
3265 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3266 insn = XVECEXP (PATTERN (insn), 0, 0);
3268 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3269 if (!note)
3271 if (! (GET_CODE (insn) == CALL_INSN
3272 || (flag_non_call_exceptions
3273 && may_trap_p (PATTERN (insn)))))
3274 continue;
3275 this_action = -1;
3276 region = NULL;
3278 else
3280 if (INTVAL (XEXP (note, 0)) <= 0)
3281 continue;
3282 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3283 this_action = collect_one_action_chain (ar_hash, region);
3286 /* Existence of catch handlers, or must-not-throw regions
3287 implies that an lsda is needed (even if empty). */
3288 if (this_action != -1)
3289 cfun->uses_eh_lsda = 1;
3291 /* Delay creation of region notes for no-action regions
3292 until we're sure that an lsda will be required. */
3293 else if (last_action == -3)
3295 first_no_action_insn = iter;
3296 last_action = -1;
3299 /* Cleanups and handlers may share action chains but not
3300 landing pads. Collect the landing pad for this region. */
3301 if (this_action >= 0)
3303 struct eh_region *o;
3304 for (o = region; ! o->landing_pad ; o = o->outer)
3305 continue;
3306 this_landing_pad = o->landing_pad;
3308 else
3309 this_landing_pad = NULL_RTX;
3311 /* Differing actions or landing pads implies a change in call-site
3312 info, which implies some EH_REGION note should be emitted. */
3313 if (last_action != this_action
3314 || last_landing_pad != this_landing_pad)
3316 /* If we'd not seen a previous action (-3) or the previous
3317 action was must-not-throw (-2), then we do not need an
3318 end note. */
3319 if (last_action >= -1)
3321 /* If we delayed the creation of the begin, do it now. */
3322 if (first_no_action_insn)
3324 call_site = add_call_site (NULL_RTX, 0);
3325 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3326 first_no_action_insn);
3327 NOTE_EH_HANDLER (note) = call_site;
3328 first_no_action_insn = NULL_RTX;
3331 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3332 last_action_insn);
3333 NOTE_EH_HANDLER (note) = call_site;
3336 /* If the new action is must-not-throw, then no region notes
3337 are created. */
3338 if (this_action >= -1)
3340 call_site = add_call_site (this_landing_pad,
3341 this_action < 0 ? 0 : this_action);
3342 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3343 NOTE_EH_HANDLER (note) = call_site;
3346 last_action = this_action;
3347 last_landing_pad = this_landing_pad;
3349 last_action_insn = iter;
3352 if (last_action >= -1 && ! first_no_action_insn)
3354 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3355 NOTE_EH_HANDLER (note) = call_site;
3358 htab_delete (ar_hash);
3362 static void
3363 push_uleb128 (data_area, value)
3364 varray_type *data_area;
3365 unsigned int value;
3369 unsigned char byte = value & 0x7f;
3370 value >>= 7;
3371 if (value)
3372 byte |= 0x80;
3373 VARRAY_PUSH_UCHAR (*data_area, byte);
3375 while (value);
3378 static void
3379 push_sleb128 (data_area, value)
3380 varray_type *data_area;
3381 int value;
3383 unsigned char byte;
3384 int more;
3388 byte = value & 0x7f;
3389 value >>= 7;
3390 more = ! ((value == 0 && (byte & 0x40) == 0)
3391 || (value == -1 && (byte & 0x40) != 0));
3392 if (more)
3393 byte |= 0x80;
3394 VARRAY_PUSH_UCHAR (*data_area, byte);
3396 while (more);
3400 #ifndef HAVE_AS_LEB128
3401 static int
3402 dw2_size_of_call_site_table ()
3404 int n = cfun->eh->call_site_data_used;
3405 int size = n * (4 + 4 + 4);
3406 int i;
3408 for (i = 0; i < n; ++i)
3410 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3411 size += size_of_uleb128 (cs->action);
3414 return size;
3417 static int
3418 sjlj_size_of_call_site_table ()
3420 int n = cfun->eh->call_site_data_used;
3421 int size = 0;
3422 int i;
3424 for (i = 0; i < n; ++i)
3426 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3427 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3428 size += size_of_uleb128 (cs->action);
3431 return size;
3433 #endif
3435 static void
3436 dw2_output_call_site_table ()
3438 const char *function_start_lab
3439 = IDENTIFIER_POINTER (current_function_func_begin_label);
3440 int n = cfun->eh->call_site_data_used;
3441 int i;
3443 for (i = 0; i < n; ++i)
3445 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3446 char reg_start_lab[32];
3447 char reg_end_lab[32];
3448 char landing_pad_lab[32];
3450 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3451 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3453 if (cs->landing_pad)
3454 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3455 CODE_LABEL_NUMBER (cs->landing_pad));
3457 /* ??? Perhaps use insn length scaling if the assembler supports
3458 generic arithmetic. */
3459 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3460 data4 if the function is small enough. */
3461 #ifdef HAVE_AS_LEB128
3462 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3463 "region %d start", i);
3464 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3465 "length");
3466 if (cs->landing_pad)
3467 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3468 "landing pad");
3469 else
3470 dw2_asm_output_data_uleb128 (0, "landing pad");
3471 #else
3472 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3473 "region %d start", i);
3474 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3475 if (cs->landing_pad)
3476 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3477 "landing pad");
3478 else
3479 dw2_asm_output_data (4, 0, "landing pad");
3480 #endif
3481 dw2_asm_output_data_uleb128 (cs->action, "action");
3484 call_site_base += n;
3487 static void
3488 sjlj_output_call_site_table ()
3490 int n = cfun->eh->call_site_data_used;
3491 int i;
3493 for (i = 0; i < n; ++i)
3495 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3497 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3498 "region %d landing pad", i);
3499 dw2_asm_output_data_uleb128 (cs->action, "action");
3502 call_site_base += n;
3505 void
3506 output_function_exception_table ()
3508 int tt_format, cs_format, lp_format, i, n;
3509 #ifdef HAVE_AS_LEB128
3510 char ttype_label[32];
3511 char cs_after_size_label[32];
3512 char cs_end_label[32];
3513 #else
3514 int call_site_len;
3515 #endif
3516 int have_tt_data;
3517 int funcdef_number;
3518 int tt_format_size;
3520 /* Not all functions need anything. */
3521 if (! cfun->uses_eh_lsda)
3522 return;
3524 funcdef_number = (USING_SJLJ_EXCEPTIONS
3525 ? sjlj_funcdef_number
3526 : current_funcdef_number);
3528 #ifdef IA64_UNWIND_INFO
3529 fputs ("\t.personality\t", asm_out_file);
3530 output_addr_const (asm_out_file, eh_personality_libfunc);
3531 fputs ("\n\t.handlerdata\n", asm_out_file);
3532 /* Note that varasm still thinks we're in the function's code section.
3533 The ".endp" directive that will immediately follow will take us back. */
3534 #else
3535 exception_section ();
3536 #endif
3538 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3539 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3541 /* Indicate the format of the @TType entries. */
3542 if (! have_tt_data)
3543 tt_format = DW_EH_PE_omit;
3544 else
3546 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3547 #ifdef HAVE_AS_LEB128
3548 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3549 #endif
3550 tt_format_size = size_of_encoded_value (tt_format);
3552 assemble_eh_align (tt_format_size * BITS_PER_UNIT);
3555 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3557 /* The LSDA header. */
3559 /* Indicate the format of the landing pad start pointer. An omitted
3560 field implies @LPStart == @Start. */
3561 /* Currently we always put @LPStart == @Start. This field would
3562 be most useful in moving the landing pads completely out of
3563 line to another section, but it could also be used to minimize
3564 the size of uleb128 landing pad offsets. */
3565 lp_format = DW_EH_PE_omit;
3566 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3567 eh_data_format_name (lp_format));
3569 /* @LPStart pointer would go here. */
3571 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3572 eh_data_format_name (tt_format));
3574 #ifndef HAVE_AS_LEB128
3575 if (USING_SJLJ_EXCEPTIONS)
3576 call_site_len = sjlj_size_of_call_site_table ();
3577 else
3578 call_site_len = dw2_size_of_call_site_table ();
3579 #endif
3581 /* A pc-relative 4-byte displacement to the @TType data. */
3582 if (have_tt_data)
3584 #ifdef HAVE_AS_LEB128
3585 char ttype_after_disp_label[32];
3586 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3587 funcdef_number);
3588 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3589 "@TType base offset");
3590 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3591 #else
3592 /* Ug. Alignment queers things. */
3593 unsigned int before_disp, after_disp, last_disp, disp;
3595 before_disp = 1 + 1;
3596 after_disp = (1 + size_of_uleb128 (call_site_len)
3597 + call_site_len
3598 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3599 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3600 * tt_format_size));
3602 disp = after_disp;
3605 unsigned int disp_size, pad;
3607 last_disp = disp;
3608 disp_size = size_of_uleb128 (disp);
3609 pad = before_disp + disp_size + after_disp;
3610 if (pad % tt_format_size)
3611 pad = tt_format_size - (pad % tt_format_size);
3612 else
3613 pad = 0;
3614 disp = after_disp + pad;
3616 while (disp != last_disp);
3618 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3619 #endif
3622 /* Indicate the format of the call-site offsets. */
3623 #ifdef HAVE_AS_LEB128
3624 cs_format = DW_EH_PE_uleb128;
3625 #else
3626 cs_format = DW_EH_PE_udata4;
3627 #endif
3628 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3629 eh_data_format_name (cs_format));
3631 #ifdef HAVE_AS_LEB128
3632 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3633 funcdef_number);
3634 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3635 funcdef_number);
3636 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3637 "Call-site table length");
3638 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3639 if (USING_SJLJ_EXCEPTIONS)
3640 sjlj_output_call_site_table ();
3641 else
3642 dw2_output_call_site_table ();
3643 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3644 #else
3645 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3646 if (USING_SJLJ_EXCEPTIONS)
3647 sjlj_output_call_site_table ();
3648 else
3649 dw2_output_call_site_table ();
3650 #endif
3652 /* ??? Decode and interpret the data for flag_debug_asm. */
3653 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3654 for (i = 0; i < n; ++i)
3655 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3656 (i ? NULL : "Action record table"));
3658 if (have_tt_data)
3659 assemble_eh_align (tt_format_size * BITS_PER_UNIT);
3661 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3662 while (i-- > 0)
3664 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3666 if (type == NULL_TREE)
3667 type = integer_zero_node;
3668 else
3669 type = lookup_type_for_runtime (type);
3671 dw2_asm_output_encoded_addr_rtx (tt_format,
3672 expand_expr (type, NULL_RTX, VOIDmode,
3673 EXPAND_INITIALIZER),
3674 NULL);
3677 #ifdef HAVE_AS_LEB128
3678 if (have_tt_data)
3679 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3680 #endif
3682 /* ??? Decode and interpret the data for flag_debug_asm. */
3683 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3684 for (i = 0; i < n; ++i)
3685 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3686 (i ? NULL : "Exception specification table"));
3688 function_section (current_function_decl);
3690 if (USING_SJLJ_EXCEPTIONS)
3691 sjlj_funcdef_number += 1;