* emit-rtl.c (adjust_address_1): Always copy address to avoid
[official-gcc.git] / gcc / except.c
blobdb4e25a0cb8b589851555485ae62879c6e6fb2a8
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "rtl.h"
53 #include "tree.h"
54 #include "flags.h"
55 #include "function.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "insn-config.h"
59 #include "except.h"
60 #include "integrate.h"
61 #include "hard-reg-set.h"
62 #include "basic-block.h"
63 #include "output.h"
64 #include "dwarf2asm.h"
65 #include "dwarf2out.h"
66 #include "dwarf2.h"
67 #include "toplev.h"
68 #include "hashtab.h"
69 #include "intl.h"
70 #include "ggc.h"
71 #include "tm_p.h"
72 #include "target.h"
74 /* Provide defaults for stuff that may not be defined when using
75 sjlj exceptions. */
76 #ifndef EH_RETURN_STACKADJ_RTX
77 #define EH_RETURN_STACKADJ_RTX 0
78 #endif
79 #ifndef EH_RETURN_HANDLER_RTX
80 #define EH_RETURN_HANDLER_RTX 0
81 #endif
82 #ifndef EH_RETURN_DATA_REGNO
83 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
84 #endif
87 /* Nonzero means enable synchronous exceptions for non-call instructions. */
88 int flag_non_call_exceptions;
90 /* Protect cleanup actions with must-not-throw regions, with a call
91 to the given failure handler. */
92 tree (*lang_protect_cleanup_actions) PARAMS ((void));
94 /* Return true if type A catches type B. */
95 int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
97 /* Map a type to a runtime object to match type. */
98 tree (*lang_eh_runtime_type) PARAMS ((tree));
100 /* A list of labels used for exception handlers. */
101 rtx exception_handler_labels;
103 static int call_site_base;
104 static unsigned int sjlj_funcdef_number;
105 static htab_t type_to_runtime_map;
107 /* Describe the SjLj_Function_Context structure. */
108 static tree sjlj_fc_type_node;
109 static int sjlj_fc_call_site_ofs;
110 static int sjlj_fc_data_ofs;
111 static int sjlj_fc_personality_ofs;
112 static int sjlj_fc_lsda_ofs;
113 static int sjlj_fc_jbuf_ofs;
115 /* Describes one exception region. */
116 struct eh_region
118 /* The immediately surrounding region. */
119 struct eh_region *outer;
121 /* The list of immediately contained regions. */
122 struct eh_region *inner;
123 struct eh_region *next_peer;
125 /* An identifier for this region. */
126 int region_number;
128 /* Each region does exactly one thing. */
129 enum eh_region_type
131 ERT_UNKNOWN = 0,
132 ERT_CLEANUP,
133 ERT_TRY,
134 ERT_CATCH,
135 ERT_ALLOWED_EXCEPTIONS,
136 ERT_MUST_NOT_THROW,
137 ERT_THROW,
138 ERT_FIXUP
139 } type;
141 /* Holds the action to perform based on the preceding type. */
142 union {
143 /* A list of catch blocks, a surrounding try block,
144 and the label for continuing after a catch. */
145 struct {
146 struct eh_region *catch;
147 struct eh_region *last_catch;
148 struct eh_region *prev_try;
149 rtx continue_label;
150 } try;
152 /* The list through the catch handlers, the list of type objects
153 matched, and the list of associated filters. */
154 struct {
155 struct eh_region *next_catch;
156 struct eh_region *prev_catch;
157 tree type_list;
158 tree filter_list;
159 } catch;
161 /* A tree_list of allowed types. */
162 struct {
163 tree type_list;
164 int filter;
165 } allowed;
167 /* The type given by a call to "throw foo();", or discovered
168 for a throw. */
169 struct {
170 tree type;
171 } throw;
173 /* Retain the cleanup expression even after expansion so that
174 we can match up fixup regions. */
175 struct {
176 tree exp;
177 } cleanup;
179 /* The real region (by expression and by pointer) that fixup code
180 should live in. */
181 struct {
182 tree cleanup_exp;
183 struct eh_region *real_region;
184 } fixup;
185 } u;
187 /* Entry point for this region's handler before landing pads are built. */
188 rtx label;
190 /* Entry point for this region's handler from the runtime eh library. */
191 rtx landing_pad;
193 /* Entry point for this region's handler from an inner region. */
194 rtx post_landing_pad;
196 /* The RESX insn for handing off control to the next outermost handler,
197 if appropriate. */
198 rtx resume;
201 /* Used to save exception status for each function. */
202 struct eh_status
204 /* The tree of all regions for this function. */
205 struct eh_region *region_tree;
207 /* The same information as an indexable array. */
208 struct eh_region **region_array;
210 /* The most recently open region. */
211 struct eh_region *cur_region;
213 /* This is the region for which we are processing catch blocks. */
214 struct eh_region *try_region;
216 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
217 node is itself a TREE_CHAINed list of handlers for regions that
218 are not yet closed. The TREE_VALUE of each entry contains the
219 handler for the corresponding entry on the ehstack. */
220 tree protect_list;
222 rtx filter;
223 rtx exc_ptr;
225 int built_landing_pads;
226 int last_region_number;
228 varray_type ttype_data;
229 varray_type ehspec_data;
230 varray_type action_record_data;
232 struct call_site_record
234 rtx landing_pad;
235 int action;
236 } *call_site_data;
237 int call_site_data_used;
238 int call_site_data_size;
240 rtx ehr_stackadj;
241 rtx ehr_handler;
242 rtx ehr_label;
244 rtx sjlj_fc;
245 rtx sjlj_exit_after;
249 static void mark_eh_region PARAMS ((struct eh_region *));
251 static int t2r_eq PARAMS ((const PTR,
252 const PTR));
253 static hashval_t t2r_hash PARAMS ((const PTR));
254 static int t2r_mark_1 PARAMS ((PTR *, PTR));
255 static void t2r_mark PARAMS ((PTR));
256 static void add_type_for_runtime PARAMS ((tree));
257 static tree lookup_type_for_runtime PARAMS ((tree));
259 static struct eh_region *expand_eh_region_end PARAMS ((void));
261 static rtx get_exception_filter PARAMS ((struct function *));
263 static void collect_eh_region_array PARAMS ((void));
264 static void resolve_fixup_regions PARAMS ((void));
265 static void remove_fixup_regions PARAMS ((void));
266 static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
268 static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
269 struct inline_remap *));
270 static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
271 struct eh_region **));
272 static int ttypes_filter_eq PARAMS ((const PTR,
273 const PTR));
274 static hashval_t ttypes_filter_hash PARAMS ((const PTR));
275 static int ehspec_filter_eq PARAMS ((const PTR,
276 const PTR));
277 static hashval_t ehspec_filter_hash PARAMS ((const PTR));
278 static int add_ttypes_entry PARAMS ((htab_t, tree));
279 static int add_ehspec_entry PARAMS ((htab_t, htab_t,
280 tree));
281 static void assign_filter_values PARAMS ((void));
282 static void build_post_landing_pads PARAMS ((void));
283 static void connect_post_landing_pads PARAMS ((void));
284 static void dw2_build_landing_pads PARAMS ((void));
286 struct sjlj_lp_info;
287 static bool sjlj_find_directly_reachable_regions
288 PARAMS ((struct sjlj_lp_info *));
289 static void sjlj_assign_call_site_values
290 PARAMS ((rtx, struct sjlj_lp_info *));
291 static void sjlj_mark_call_sites
292 PARAMS ((struct sjlj_lp_info *));
293 static void sjlj_emit_function_enter PARAMS ((rtx));
294 static void sjlj_emit_function_exit PARAMS ((void));
295 static void sjlj_emit_dispatch_table
296 PARAMS ((rtx, struct sjlj_lp_info *));
297 static void sjlj_build_landing_pads PARAMS ((void));
299 static void remove_exception_handler_label PARAMS ((rtx));
300 static void remove_eh_handler PARAMS ((struct eh_region *));
302 struct reachable_info;
304 /* The return value of reachable_next_level. */
305 enum reachable_code
307 /* The given exception is not processed by the given region. */
308 RNL_NOT_CAUGHT,
309 /* The given exception may need processing by the given region. */
310 RNL_MAYBE_CAUGHT,
311 /* The given exception is completely processed by the given region. */
312 RNL_CAUGHT,
313 /* The given exception is completely processed by the runtime. */
314 RNL_BLOCKED
317 static int check_handled PARAMS ((tree, tree));
318 static void add_reachable_handler
319 PARAMS ((struct reachable_info *, struct eh_region *,
320 struct eh_region *));
321 static enum reachable_code reachable_next_level
322 PARAMS ((struct eh_region *, tree, struct reachable_info *));
324 static int action_record_eq PARAMS ((const PTR,
325 const PTR));
326 static hashval_t action_record_hash PARAMS ((const PTR));
327 static int add_action_record PARAMS ((htab_t, int, int));
328 static int collect_one_action_chain PARAMS ((htab_t,
329 struct eh_region *));
330 static int add_call_site PARAMS ((rtx, int));
332 static void push_uleb128 PARAMS ((varray_type *,
333 unsigned int));
334 static void push_sleb128 PARAMS ((varray_type *, int));
335 #ifndef HAVE_AS_LEB128
336 static int dw2_size_of_call_site_table PARAMS ((void));
337 static int sjlj_size_of_call_site_table PARAMS ((void));
338 #endif
339 static void dw2_output_call_site_table PARAMS ((void));
340 static void sjlj_output_call_site_table PARAMS ((void));
343 /* Routine to see if exception handling is turned on.
344 DO_WARN is non-zero if we want to inform the user that exception
345 handling is turned off.
347 This is used to ensure that -fexceptions has been specified if the
348 compiler tries to use any exception-specific functions. */
351 doing_eh (do_warn)
352 int do_warn;
354 if (! flag_exceptions)
356 static int warned = 0;
357 if (! warned && do_warn)
359 error ("exception handling disabled, use -fexceptions to enable");
360 warned = 1;
362 return 0;
364 return 1;
368 void
369 init_eh ()
371 ggc_add_rtx_root (&exception_handler_labels, 1);
373 if (! flag_exceptions)
374 return;
376 type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
377 ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
379 /* Create the SjLj_Function_Context structure. This should match
380 the definition in unwind-sjlj.c. */
381 if (USING_SJLJ_EXCEPTIONS)
383 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
385 sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
386 ggc_add_tree_root (&sjlj_fc_type_node, 1);
388 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
389 build_pointer_type (sjlj_fc_type_node));
390 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
392 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
393 integer_type_node);
394 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
396 tmp = build_index_type (build_int_2 (4 - 1, 0));
397 tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
398 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
399 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
401 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
402 ptr_type_node);
403 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
405 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
406 ptr_type_node);
407 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
409 #ifdef DONT_USE_BUILTIN_SETJMP
410 #ifdef JMP_BUF_SIZE
411 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
412 #else
413 /* Should be large enough for most systems, if it is not,
414 JMP_BUF_SIZE should be defined with the proper value. It will
415 also tend to be larger than necessary for most systems, a more
416 optimal port will define JMP_BUF_SIZE. */
417 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
418 #endif
419 #else
420 /* This is 2 for builtin_setjmp, plus whatever the target requires
421 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
422 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
423 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
424 #endif
425 tmp = build_index_type (tmp);
426 tmp = build_array_type (ptr_type_node, tmp);
427 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
428 #ifdef DONT_USE_BUILTIN_SETJMP
429 /* We don't know what the alignment requirements of the
430 runtime's jmp_buf has. Overestimate. */
431 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
432 DECL_USER_ALIGN (f_jbuf) = 1;
433 #endif
434 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
436 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
437 TREE_CHAIN (f_prev) = f_cs;
438 TREE_CHAIN (f_cs) = f_data;
439 TREE_CHAIN (f_data) = f_per;
440 TREE_CHAIN (f_per) = f_lsda;
441 TREE_CHAIN (f_lsda) = f_jbuf;
443 layout_type (sjlj_fc_type_node);
445 /* Cache the interesting field offsets so that we have
446 easy access from rtl. */
447 sjlj_fc_call_site_ofs
448 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
449 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
450 sjlj_fc_data_ofs
451 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
452 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
453 sjlj_fc_personality_ofs
454 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
455 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
456 sjlj_fc_lsda_ofs
457 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
458 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
459 sjlj_fc_jbuf_ofs
460 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
461 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
465 void
466 init_eh_for_function ()
468 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
471 /* Mark EH for GC. */
473 static void
474 mark_eh_region (region)
475 struct eh_region *region;
477 if (! region)
478 return;
480 switch (region->type)
482 case ERT_UNKNOWN:
483 /* This can happen if a nested function is inside the body of a region
484 and we do a GC as part of processing it. */
485 break;
486 case ERT_CLEANUP:
487 ggc_mark_tree (region->u.cleanup.exp);
488 break;
489 case ERT_TRY:
490 ggc_mark_rtx (region->u.try.continue_label);
491 break;
492 case ERT_CATCH:
493 ggc_mark_tree (region->u.catch.type_list);
494 ggc_mark_tree (region->u.catch.filter_list);
495 break;
496 case ERT_ALLOWED_EXCEPTIONS:
497 ggc_mark_tree (region->u.allowed.type_list);
498 break;
499 case ERT_MUST_NOT_THROW:
500 break;
501 case ERT_THROW:
502 ggc_mark_tree (region->u.throw.type);
503 break;
504 case ERT_FIXUP:
505 ggc_mark_tree (region->u.fixup.cleanup_exp);
506 break;
507 default:
508 abort ();
511 ggc_mark_rtx (region->label);
512 ggc_mark_rtx (region->resume);
513 ggc_mark_rtx (region->landing_pad);
514 ggc_mark_rtx (region->post_landing_pad);
517 void
518 mark_eh_status (eh)
519 struct eh_status *eh;
521 int i;
523 if (eh == 0)
524 return;
526 /* If we've called collect_eh_region_array, use it. Otherwise walk
527 the tree non-recursively. */
528 if (eh->region_array)
530 for (i = eh->last_region_number; i > 0; --i)
532 struct eh_region *r = eh->region_array[i];
533 if (r && r->region_number == i)
534 mark_eh_region (r);
537 else if (eh->region_tree)
539 struct eh_region *r = eh->region_tree;
540 while (1)
542 mark_eh_region (r);
543 if (r->inner)
544 r = r->inner;
545 else if (r->next_peer)
546 r = r->next_peer;
547 else
549 do {
550 r = r->outer;
551 if (r == NULL)
552 goto tree_done;
553 } while (r->next_peer == NULL);
554 r = r->next_peer;
557 tree_done:;
560 ggc_mark_tree (eh->protect_list);
561 ggc_mark_rtx (eh->filter);
562 ggc_mark_rtx (eh->exc_ptr);
563 ggc_mark_tree_varray (eh->ttype_data);
565 if (eh->call_site_data)
567 for (i = eh->call_site_data_used - 1; i >= 0; --i)
568 ggc_mark_rtx (eh->call_site_data[i].landing_pad);
571 ggc_mark_rtx (eh->ehr_stackadj);
572 ggc_mark_rtx (eh->ehr_handler);
573 ggc_mark_rtx (eh->ehr_label);
575 ggc_mark_rtx (eh->sjlj_fc);
576 ggc_mark_rtx (eh->sjlj_exit_after);
579 void
580 free_eh_status (f)
581 struct function *f;
583 struct eh_status *eh = f->eh;
585 if (eh->region_array)
587 int i;
588 for (i = eh->last_region_number; i > 0; --i)
590 struct eh_region *r = eh->region_array[i];
591 /* Mind we don't free a region struct more than once. */
592 if (r && r->region_number == i)
593 free (r);
595 free (eh->region_array);
597 else if (eh->region_tree)
599 struct eh_region *next, *r = eh->region_tree;
600 while (1)
602 if (r->inner)
603 r = r->inner;
604 else if (r->next_peer)
606 next = r->next_peer;
607 free (r);
608 r = next;
610 else
612 do {
613 next = r->outer;
614 free (r);
615 r = next;
616 if (r == NULL)
617 goto tree_done;
618 } while (r->next_peer == NULL);
619 next = r->next_peer;
620 free (r);
621 r = next;
624 tree_done:;
627 VARRAY_FREE (eh->ttype_data);
628 VARRAY_FREE (eh->ehspec_data);
629 VARRAY_FREE (eh->action_record_data);
630 if (eh->call_site_data)
631 free (eh->call_site_data);
633 free (eh);
634 f->eh = NULL;
638 /* Start an exception handling region. All instructions emitted
639 after this point are considered to be part of the region until
640 expand_eh_region_end is invoked. */
642 void
643 expand_eh_region_start ()
645 struct eh_region *new_region;
646 struct eh_region *cur_region;
647 rtx note;
649 if (! doing_eh (0))
650 return;
652 /* Insert a new blank region as a leaf in the tree. */
653 new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
654 cur_region = cfun->eh->cur_region;
655 new_region->outer = cur_region;
656 if (cur_region)
658 new_region->next_peer = cur_region->inner;
659 cur_region->inner = new_region;
661 else
663 new_region->next_peer = cfun->eh->region_tree;
664 cfun->eh->region_tree = new_region;
666 cfun->eh->cur_region = new_region;
668 /* Create a note marking the start of this region. */
669 new_region->region_number = ++cfun->eh->last_region_number;
670 note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
671 NOTE_EH_HANDLER (note) = new_region->region_number;
674 /* Common code to end a region. Returns the region just ended. */
676 static struct eh_region *
677 expand_eh_region_end ()
679 struct eh_region *cur_region = cfun->eh->cur_region;
680 rtx note;
682 /* Create a note marking the end of this region. */
683 note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
684 NOTE_EH_HANDLER (note) = cur_region->region_number;
686 /* Pop. */
687 cfun->eh->cur_region = cur_region->outer;
689 return cur_region;
692 /* End an exception handling region for a cleanup. HANDLER is an
693 expression to expand for the cleanup. */
695 void
696 expand_eh_region_end_cleanup (handler)
697 tree handler;
699 struct eh_region *region;
700 tree protect_cleanup_actions;
701 rtx around_label;
702 rtx data_save[2];
704 if (! doing_eh (0))
705 return;
707 region = expand_eh_region_end ();
708 region->type = ERT_CLEANUP;
709 region->label = gen_label_rtx ();
710 region->u.cleanup.exp = handler;
712 around_label = gen_label_rtx ();
713 emit_jump (around_label);
715 emit_label (region->label);
717 /* Give the language a chance to specify an action to be taken if an
718 exception is thrown that would propagate out of the HANDLER. */
719 protect_cleanup_actions
720 = (lang_protect_cleanup_actions
721 ? (*lang_protect_cleanup_actions) ()
722 : NULL_TREE);
724 if (protect_cleanup_actions)
725 expand_eh_region_start ();
727 /* In case this cleanup involves an inline destructor with a try block in
728 it, we need to save the EH return data registers around it. */
729 data_save[0] = gen_reg_rtx (Pmode);
730 emit_move_insn (data_save[0], get_exception_pointer (cfun));
731 data_save[1] = gen_reg_rtx (word_mode);
732 emit_move_insn (data_save[1], get_exception_filter (cfun));
734 expand_expr (handler, const0_rtx, VOIDmode, 0);
736 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
737 emit_move_insn (cfun->eh->filter, data_save[1]);
739 if (protect_cleanup_actions)
740 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
742 /* We need any stack adjustment complete before the around_label. */
743 do_pending_stack_adjust ();
745 /* We delay the generation of the _Unwind_Resume until we generate
746 landing pads. We emit a marker here so as to get good control
747 flow data in the meantime. */
748 region->resume
749 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
750 emit_barrier ();
752 emit_label (around_label);
755 /* End an exception handling region for a try block, and prepares
756 for subsequent calls to expand_start_catch. */
758 void
759 expand_start_all_catch ()
761 struct eh_region *region;
763 if (! doing_eh (1))
764 return;
766 region = expand_eh_region_end ();
767 region->type = ERT_TRY;
768 region->u.try.prev_try = cfun->eh->try_region;
769 region->u.try.continue_label = gen_label_rtx ();
771 cfun->eh->try_region = region;
773 emit_jump (region->u.try.continue_label);
776 /* Begin a catch clause. TYPE is the type caught, a list of such types, or
777 null if this is a catch-all clause. Providing a type list enables to
778 associate the catch region with potentially several exception types, which
779 is useful e.g. for Ada. */
781 void
782 expand_start_catch (type_or_list)
783 tree type_or_list;
785 struct eh_region *t, *c, *l;
786 tree type_list;
788 if (! doing_eh (0))
789 return;
791 type_list = type_or_list;
793 if (type_or_list)
795 /* Ensure to always end up with a type list to normalize further
796 processing, then register each type against the runtime types
797 map. */
798 tree type_node;
800 if (TREE_CODE (type_or_list) != TREE_LIST)
801 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
803 type_node = type_list;
804 for (; type_node; type_node = TREE_CHAIN (type_node))
805 add_type_for_runtime (TREE_VALUE (type_node));
808 expand_eh_region_start ();
810 t = cfun->eh->try_region;
811 c = cfun->eh->cur_region;
812 c->type = ERT_CATCH;
813 c->u.catch.type_list = type_list;
814 c->label = gen_label_rtx ();
816 l = t->u.try.last_catch;
817 c->u.catch.prev_catch = l;
818 if (l)
819 l->u.catch.next_catch = c;
820 else
821 t->u.try.catch = c;
822 t->u.try.last_catch = c;
824 emit_label (c->label);
827 /* End a catch clause. Control will resume after the try/catch block. */
829 void
830 expand_end_catch ()
832 struct eh_region *try_region, *catch_region;
834 if (! doing_eh (0))
835 return;
837 catch_region = expand_eh_region_end ();
838 try_region = cfun->eh->try_region;
840 emit_jump (try_region->u.try.continue_label);
843 /* End a sequence of catch handlers for a try block. */
845 void
846 expand_end_all_catch ()
848 struct eh_region *try_region;
850 if (! doing_eh (0))
851 return;
853 try_region = cfun->eh->try_region;
854 cfun->eh->try_region = try_region->u.try.prev_try;
856 emit_label (try_region->u.try.continue_label);
859 /* End an exception region for an exception type filter. ALLOWED is a
860 TREE_LIST of types to be matched by the runtime. FAILURE is an
861 expression to invoke if a mismatch occurs.
863 ??? We could use these semantics for calls to rethrow, too; if we can
864 see the surrounding catch clause, we know that the exception we're
865 rethrowing satisfies the "filter" of the catch type. */
867 void
868 expand_eh_region_end_allowed (allowed, failure)
869 tree allowed, failure;
871 struct eh_region *region;
872 rtx around_label;
874 if (! doing_eh (0))
875 return;
877 region = expand_eh_region_end ();
878 region->type = ERT_ALLOWED_EXCEPTIONS;
879 region->u.allowed.type_list = allowed;
880 region->label = gen_label_rtx ();
882 for (; allowed ; allowed = TREE_CHAIN (allowed))
883 add_type_for_runtime (TREE_VALUE (allowed));
885 /* We must emit the call to FAILURE here, so that if this function
886 throws a different exception, that it will be processed by the
887 correct region. */
889 around_label = gen_label_rtx ();
890 emit_jump (around_label);
892 emit_label (region->label);
893 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
894 /* We must adjust the stack before we reach the AROUND_LABEL because
895 the call to FAILURE does not occur on all paths to the
896 AROUND_LABEL. */
897 do_pending_stack_adjust ();
899 emit_label (around_label);
902 /* End an exception region for a must-not-throw filter. FAILURE is an
903 expression invoke if an uncaught exception propagates this far.
905 This is conceptually identical to expand_eh_region_end_allowed with
906 an empty allowed list (if you passed "std::terminate" instead of
907 "__cxa_call_unexpected"), but they are represented differently in
908 the C++ LSDA. */
910 void
911 expand_eh_region_end_must_not_throw (failure)
912 tree failure;
914 struct eh_region *region;
915 rtx around_label;
917 if (! doing_eh (0))
918 return;
920 region = expand_eh_region_end ();
921 region->type = ERT_MUST_NOT_THROW;
922 region->label = gen_label_rtx ();
924 /* We must emit the call to FAILURE here, so that if this function
925 throws a different exception, that it will be processed by the
926 correct region. */
928 around_label = gen_label_rtx ();
929 emit_jump (around_label);
931 emit_label (region->label);
932 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
934 emit_label (around_label);
937 /* End an exception region for a throw. No handling goes on here,
938 but it's the easiest way for the front-end to indicate what type
939 is being thrown. */
941 void
942 expand_eh_region_end_throw (type)
943 tree type;
945 struct eh_region *region;
947 if (! doing_eh (0))
948 return;
950 region = expand_eh_region_end ();
951 region->type = ERT_THROW;
952 region->u.throw.type = type;
955 /* End a fixup region. Within this region the cleanups for the immediately
956 enclosing region are _not_ run. This is used for goto cleanup to avoid
957 destroying an object twice.
959 This would be an extraordinarily simple prospect, were it not for the
960 fact that we don't actually know what the immediately enclosing region
961 is. This surprising fact is because expand_cleanups is currently
962 generating a sequence that it will insert somewhere else. We collect
963 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
965 void
966 expand_eh_region_end_fixup (handler)
967 tree handler;
969 struct eh_region *fixup;
971 if (! doing_eh (0))
972 return;
974 fixup = expand_eh_region_end ();
975 fixup->type = ERT_FIXUP;
976 fixup->u.fixup.cleanup_exp = handler;
979 /* Return an rtl expression for a pointer to the exception object
980 within a handler. */
983 get_exception_pointer (fun)
984 struct function *fun;
986 rtx exc_ptr = fun->eh->exc_ptr;
987 if (fun == cfun && ! exc_ptr)
989 exc_ptr = gen_reg_rtx (Pmode);
990 fun->eh->exc_ptr = exc_ptr;
992 return exc_ptr;
995 /* Return an rtl expression for the exception dispatch filter
996 within a handler. */
998 static rtx
999 get_exception_filter (fun)
1000 struct function *fun;
1002 rtx filter = fun->eh->filter;
1003 if (fun == cfun && ! filter)
1005 filter = gen_reg_rtx (word_mode);
1006 fun->eh->filter = filter;
1008 return filter;
1011 /* Begin a region that will contain entries created with
1012 add_partial_entry. */
1014 void
1015 begin_protect_partials ()
1017 /* Push room for a new list. */
1018 cfun->eh->protect_list
1019 = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
1022 /* Start a new exception region for a region of code that has a
1023 cleanup action and push the HANDLER for the region onto
1024 protect_list. All of the regions created with add_partial_entry
1025 will be ended when end_protect_partials is invoked. */
1027 void
1028 add_partial_entry (handler)
1029 tree handler;
1031 expand_eh_region_start ();
1033 /* ??? This comment was old before the most recent rewrite. We
1034 really ought to fix the callers at some point. */
1035 /* For backwards compatibility, we allow callers to omit calls to
1036 begin_protect_partials for the outermost region. So, we must
1037 explicitly do so here. */
1038 if (!cfun->eh->protect_list)
1039 begin_protect_partials ();
1041 /* Add this entry to the front of the list. */
1042 TREE_VALUE (cfun->eh->protect_list)
1043 = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
1046 /* End all the pending exception regions on protect_list. */
1048 void
1049 end_protect_partials ()
1051 tree t;
1053 /* ??? This comment was old before the most recent rewrite. We
1054 really ought to fix the callers at some point. */
1055 /* For backwards compatibility, we allow callers to omit the call to
1056 begin_protect_partials for the outermost region. So,
1057 PROTECT_LIST may be NULL. */
1058 if (!cfun->eh->protect_list)
1059 return;
1061 /* Pop the topmost entry. */
1062 t = TREE_VALUE (cfun->eh->protect_list);
1063 cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
1065 /* End all the exception regions. */
1066 for (; t; t = TREE_CHAIN (t))
1067 expand_eh_region_end_cleanup (TREE_VALUE (t));
1071 /* This section is for the exception handling specific optimization pass. */
1073 /* Random access the exception region tree. It's just as simple to
1074 collect the regions this way as in expand_eh_region_start, but
1075 without having to realloc memory. */
1077 static void
1078 collect_eh_region_array ()
1080 struct eh_region **array, *i;
1082 i = cfun->eh->region_tree;
1083 if (! i)
1084 return;
1086 array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1087 cfun->eh->region_array = array;
1089 while (1)
1091 array[i->region_number] = i;
1093 /* If there are sub-regions, process them. */
1094 if (i->inner)
1095 i = i->inner;
1096 /* If there are peers, process them. */
1097 else if (i->next_peer)
1098 i = i->next_peer;
1099 /* Otherwise, step back up the tree to the next peer. */
1100 else
1102 do {
1103 i = i->outer;
1104 if (i == NULL)
1105 return;
1106 } while (i->next_peer == NULL);
1107 i = i->next_peer;
1112 static void
1113 resolve_fixup_regions ()
1115 int i, j, n = cfun->eh->last_region_number;
1117 for (i = 1; i <= n; ++i)
1119 struct eh_region *fixup = cfun->eh->region_array[i];
1120 struct eh_region *cleanup = 0;
1122 if (! fixup || fixup->type != ERT_FIXUP)
1123 continue;
1125 for (j = 1; j <= n; ++j)
1127 cleanup = cfun->eh->region_array[j];
1128 if (cleanup->type == ERT_CLEANUP
1129 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1130 break;
1132 if (j > n)
1133 abort ();
1135 fixup->u.fixup.real_region = cleanup->outer;
1139 /* Now that we've discovered what region actually encloses a fixup,
1140 we can shuffle pointers and remove them from the tree. */
1142 static void
1143 remove_fixup_regions ()
1145 int i;
1146 rtx insn, note;
1147 struct eh_region *fixup;
1149 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1150 for instructions referencing fixup regions. This is only
1151 strictly necessary for fixup regions with no parent, but
1152 doesn't hurt to do it for all regions. */
1153 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
1154 if (INSN_P (insn)
1155 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
1156 && INTVAL (XEXP (note, 0)) > 0
1157 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
1158 && fixup->type == ERT_FIXUP)
1160 if (fixup->u.fixup.real_region)
1161 XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
1162 else
1163 remove_note (insn, note);
1166 /* Remove the fixup regions from the tree. */
1167 for (i = cfun->eh->last_region_number; i > 0; --i)
1169 fixup = cfun->eh->region_array[i];
1170 if (! fixup)
1171 continue;
1173 /* Allow GC to maybe free some memory. */
1174 if (fixup->type == ERT_CLEANUP)
1175 fixup->u.cleanup.exp = NULL_TREE;
1177 if (fixup->type != ERT_FIXUP)
1178 continue;
1180 if (fixup->inner)
1182 struct eh_region *parent, *p, **pp;
1184 parent = fixup->u.fixup.real_region;
1186 /* Fix up the children's parent pointers; find the end of
1187 the list. */
1188 for (p = fixup->inner; ; p = p->next_peer)
1190 p->outer = parent;
1191 if (! p->next_peer)
1192 break;
1195 /* In the tree of cleanups, only outer-inner ordering matters.
1196 So link the children back in anywhere at the correct level. */
1197 if (parent)
1198 pp = &parent->inner;
1199 else
1200 pp = &cfun->eh->region_tree;
1201 p->next_peer = *pp;
1202 *pp = fixup->inner;
1203 fixup->inner = NULL;
1206 remove_eh_handler (fixup);
1210 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1211 can_throw instruction in the region. */
1213 static void
1214 convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1215 rtx *pinsns;
1216 int *orig_sp;
1217 int cur;
1219 int *sp = orig_sp;
1220 rtx insn, next;
1222 for (insn = *pinsns; insn ; insn = next)
1224 next = NEXT_INSN (insn);
1225 if (GET_CODE (insn) == NOTE)
1227 int kind = NOTE_LINE_NUMBER (insn);
1228 if (kind == NOTE_INSN_EH_REGION_BEG
1229 || kind == NOTE_INSN_EH_REGION_END)
1231 if (kind == NOTE_INSN_EH_REGION_BEG)
1233 struct eh_region *r;
1235 *sp++ = cur;
1236 cur = NOTE_EH_HANDLER (insn);
1238 r = cfun->eh->region_array[cur];
1239 if (r->type == ERT_FIXUP)
1241 r = r->u.fixup.real_region;
1242 cur = r ? r->region_number : 0;
1244 else if (r->type == ERT_CATCH)
1246 r = r->outer;
1247 cur = r ? r->region_number : 0;
1250 else
1251 cur = *--sp;
1253 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1254 requires extra care to adjust sequence start. */
1255 if (insn == *pinsns)
1256 *pinsns = next;
1257 remove_insn (insn);
1258 continue;
1261 else if (INSN_P (insn))
1263 if (cur > 0
1264 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1265 /* Calls can always potentially throw exceptions, unless
1266 they have a REG_EH_REGION note with a value of 0 or less.
1267 Which should be the only possible kind so far. */
1268 && (GET_CODE (insn) == CALL_INSN
1269 /* If we wanted exceptions for non-call insns, then
1270 any may_trap_p instruction could throw. */
1271 || (flag_non_call_exceptions
1272 && GET_CODE (PATTERN (insn)) != CLOBBER
1273 && GET_CODE (PATTERN (insn)) != USE
1274 && may_trap_p (PATTERN (insn)))))
1276 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1277 REG_NOTES (insn));
1280 if (GET_CODE (insn) == CALL_INSN
1281 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1283 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1284 sp, cur);
1285 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1286 sp, cur);
1287 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1288 sp, cur);
1293 if (sp != orig_sp)
1294 abort ();
1297 void
1298 convert_from_eh_region_ranges ()
1300 int *stack;
1301 rtx insns;
1303 collect_eh_region_array ();
1304 resolve_fixup_regions ();
1306 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1307 insns = get_insns ();
1308 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1309 free (stack);
1311 remove_fixup_regions ();
1314 void
1315 find_exception_handler_labels ()
1317 rtx list = NULL_RTX;
1318 int i;
1320 free_EXPR_LIST_list (&exception_handler_labels);
1322 if (cfun->eh->region_tree == NULL)
1323 return;
1325 for (i = cfun->eh->last_region_number; i > 0; --i)
1327 struct eh_region *region = cfun->eh->region_array[i];
1328 rtx lab;
1330 if (! region)
1331 continue;
1332 if (cfun->eh->built_landing_pads)
1333 lab = region->landing_pad;
1334 else
1335 lab = region->label;
1337 if (lab)
1338 list = alloc_EXPR_LIST (0, lab, list);
1341 /* For sjlj exceptions, need the return label to remain live until
1342 after landing pad generation. */
1343 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1344 list = alloc_EXPR_LIST (0, return_label, list);
1346 exception_handler_labels = list;
1350 static struct eh_region *
1351 duplicate_eh_region_1 (o, map)
1352 struct eh_region *o;
1353 struct inline_remap *map;
1355 struct eh_region *n
1356 = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
1358 n->region_number = o->region_number + cfun->eh->last_region_number;
1359 n->type = o->type;
1361 switch (n->type)
1363 case ERT_CLEANUP:
1364 case ERT_MUST_NOT_THROW:
1365 break;
1367 case ERT_TRY:
1368 if (o->u.try.continue_label)
1369 n->u.try.continue_label
1370 = get_label_from_map (map,
1371 CODE_LABEL_NUMBER (o->u.try.continue_label));
1372 break;
1374 case ERT_CATCH:
1375 n->u.catch.type_list = o->u.catch.type_list;
1376 break;
1378 case ERT_ALLOWED_EXCEPTIONS:
1379 n->u.allowed.type_list = o->u.allowed.type_list;
1380 break;
1382 case ERT_THROW:
1383 n->u.throw.type = o->u.throw.type;
1385 default:
1386 abort ();
1389 if (o->label)
1390 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1391 if (o->resume)
1393 n->resume = map->insn_map[INSN_UID (o->resume)];
1394 if (n->resume == NULL)
1395 abort ();
1398 return n;
1401 static void
1402 duplicate_eh_region_2 (o, n_array)
1403 struct eh_region *o;
1404 struct eh_region **n_array;
1406 struct eh_region *n = n_array[o->region_number];
1408 switch (n->type)
1410 case ERT_TRY:
1411 n->u.try.catch = n_array[o->u.try.catch->region_number];
1412 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1413 break;
1415 case ERT_CATCH:
1416 if (o->u.catch.next_catch)
1417 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1418 if (o->u.catch.prev_catch)
1419 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1420 break;
1422 default:
1423 break;
1426 if (o->outer)
1427 n->outer = n_array[o->outer->region_number];
1428 if (o->inner)
1429 n->inner = n_array[o->inner->region_number];
1430 if (o->next_peer)
1431 n->next_peer = n_array[o->next_peer->region_number];
1435 duplicate_eh_regions (ifun, map)
1436 struct function *ifun;
1437 struct inline_remap *map;
1439 int ifun_last_region_number = ifun->eh->last_region_number;
1440 struct eh_region **n_array, *root, *cur;
1441 int i;
1443 if (ifun_last_region_number == 0)
1444 return 0;
1446 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1448 for (i = 1; i <= ifun_last_region_number; ++i)
1450 cur = ifun->eh->region_array[i];
1451 if (!cur || cur->region_number != i)
1452 continue;
1453 n_array[i] = duplicate_eh_region_1 (cur, map);
1455 for (i = 1; i <= ifun_last_region_number; ++i)
1457 cur = ifun->eh->region_array[i];
1458 if (!cur || cur->region_number != i)
1459 continue;
1460 duplicate_eh_region_2 (cur, n_array);
1463 root = n_array[ifun->eh->region_tree->region_number];
1464 cur = cfun->eh->cur_region;
1465 if (cur)
1467 struct eh_region *p = cur->inner;
1468 if (p)
1470 while (p->next_peer)
1471 p = p->next_peer;
1472 p->next_peer = root;
1474 else
1475 cur->inner = root;
1477 for (i = 1; i <= ifun_last_region_number; ++i)
1478 if (n_array[i] && n_array[i]->outer == NULL)
1479 n_array[i]->outer = cur;
1481 else
1483 struct eh_region *p = cfun->eh->region_tree;
1484 if (p)
1486 while (p->next_peer)
1487 p = p->next_peer;
1488 p->next_peer = root;
1490 else
1491 cfun->eh->region_tree = root;
1494 free (n_array);
1496 i = cfun->eh->last_region_number;
1497 cfun->eh->last_region_number = i + ifun_last_region_number;
1498 return i;
1502 static int
1503 t2r_eq (pentry, pdata)
1504 const PTR pentry;
1505 const PTR pdata;
1507 tree entry = (tree) pentry;
1508 tree data = (tree) pdata;
1510 return TREE_PURPOSE (entry) == data;
1513 static hashval_t
1514 t2r_hash (pentry)
1515 const PTR pentry;
1517 tree entry = (tree) pentry;
1518 return TYPE_HASH (TREE_PURPOSE (entry));
1521 static int
1522 t2r_mark_1 (slot, data)
1523 PTR *slot;
1524 PTR data ATTRIBUTE_UNUSED;
1526 tree contents = (tree) *slot;
1527 ggc_mark_tree (contents);
1528 return 1;
1531 static void
1532 t2r_mark (addr)
1533 PTR addr;
1535 htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1538 static void
1539 add_type_for_runtime (type)
1540 tree type;
1542 tree *slot;
1544 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1545 TYPE_HASH (type), INSERT);
1546 if (*slot == NULL)
1548 tree runtime = (*lang_eh_runtime_type) (type);
1549 *slot = tree_cons (type, runtime, NULL_TREE);
1553 static tree
1554 lookup_type_for_runtime (type)
1555 tree type;
1557 tree *slot;
1559 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1560 TYPE_HASH (type), NO_INSERT);
1562 /* We should have always inserted the data earlier. */
1563 return TREE_VALUE (*slot);
1567 /* Represent an entry in @TTypes for either catch actions
1568 or exception filter actions. */
1569 struct ttypes_filter
1571 tree t;
1572 int filter;
1575 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1576 (a tree) for a @TTypes type node we are thinking about adding. */
1578 static int
1579 ttypes_filter_eq (pentry, pdata)
1580 const PTR pentry;
1581 const PTR pdata;
1583 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1584 tree data = (tree) pdata;
1586 return entry->t == data;
1589 static hashval_t
1590 ttypes_filter_hash (pentry)
1591 const PTR pentry;
1593 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1594 return TYPE_HASH (entry->t);
1597 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1598 exception specification list we are thinking about adding. */
1599 /* ??? Currently we use the type lists in the order given. Someone
1600 should put these in some canonical order. */
1602 static int
1603 ehspec_filter_eq (pentry, pdata)
1604 const PTR pentry;
1605 const PTR pdata;
1607 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1608 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1610 return type_list_equal (entry->t, data->t);
1613 /* Hash function for exception specification lists. */
1615 static hashval_t
1616 ehspec_filter_hash (pentry)
1617 const PTR pentry;
1619 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1620 hashval_t h = 0;
1621 tree list;
1623 for (list = entry->t; list ; list = TREE_CHAIN (list))
1624 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1625 return h;
1628 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1629 up the search. Return the filter value to be used. */
1631 static int
1632 add_ttypes_entry (ttypes_hash, type)
1633 htab_t ttypes_hash;
1634 tree type;
1636 struct ttypes_filter **slot, *n;
1638 slot = (struct ttypes_filter **)
1639 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1641 if ((n = *slot) == NULL)
1643 /* Filter value is a 1 based table index. */
1645 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1646 n->t = type;
1647 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1648 *slot = n;
1650 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1653 return n->filter;
1656 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1657 to speed up the search. Return the filter value to be used. */
1659 static int
1660 add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1661 htab_t ehspec_hash;
1662 htab_t ttypes_hash;
1663 tree list;
1665 struct ttypes_filter **slot, *n;
1666 struct ttypes_filter dummy;
1668 dummy.t = list;
1669 slot = (struct ttypes_filter **)
1670 htab_find_slot (ehspec_hash, &dummy, INSERT);
1672 if ((n = *slot) == NULL)
1674 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1676 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1677 n->t = list;
1678 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1679 *slot = n;
1681 /* Look up each type in the list and encode its filter
1682 value as a uleb128. Terminate the list with 0. */
1683 for (; list ; list = TREE_CHAIN (list))
1684 push_uleb128 (&cfun->eh->ehspec_data,
1685 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1686 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1689 return n->filter;
1692 /* Generate the action filter values to be used for CATCH and
1693 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1694 we use lots of landing pads, and so every type or list can share
1695 the same filter value, which saves table space. */
1697 static void
1698 assign_filter_values ()
1700 int i;
1701 htab_t ttypes, ehspec;
1703 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1704 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1706 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1707 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1709 for (i = cfun->eh->last_region_number; i > 0; --i)
1711 struct eh_region *r = cfun->eh->region_array[i];
1713 /* Mind we don't process a region more than once. */
1714 if (!r || r->region_number != i)
1715 continue;
1717 switch (r->type)
1719 case ERT_CATCH:
1720 /* Whatever type_list is (NULL or true list), we build a list
1721 of filters for the region. */
1722 r->u.catch.filter_list = NULL_TREE;
1724 if (r->u.catch.type_list != NULL)
1726 /* Get a filter value for each of the types caught and store
1727 them in the region's dedicated list. */
1728 tree tp_node = r->u.catch.type_list;
1730 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1732 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1733 tree flt_node = build_int_2 (flt, 0);
1735 r->u.catch.filter_list
1736 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1739 else
1741 /* Get a filter value for the NULL list also since it will need
1742 an action record anyway. */
1743 int flt = add_ttypes_entry (ttypes, NULL);
1744 tree flt_node = build_int_2 (flt, 0);
1746 r->u.catch.filter_list
1747 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1750 break;
1752 case ERT_ALLOWED_EXCEPTIONS:
1753 r->u.allowed.filter
1754 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1755 break;
1757 default:
1758 break;
1762 htab_delete (ttypes);
1763 htab_delete (ehspec);
1766 static void
1767 build_post_landing_pads ()
1769 int i;
1771 for (i = cfun->eh->last_region_number; i > 0; --i)
1773 struct eh_region *region = cfun->eh->region_array[i];
1774 rtx seq;
1776 /* Mind we don't process a region more than once. */
1777 if (!region || region->region_number != i)
1778 continue;
1780 switch (region->type)
1782 case ERT_TRY:
1783 /* ??? Collect the set of all non-overlapping catch handlers
1784 all the way up the chain until blocked by a cleanup. */
1785 /* ??? Outer try regions can share landing pads with inner
1786 try regions if the types are completely non-overlapping,
1787 and there are no intervening cleanups. */
1789 region->post_landing_pad = gen_label_rtx ();
1791 start_sequence ();
1793 emit_label (region->post_landing_pad);
1795 /* ??? It is mighty inconvenient to call back into the
1796 switch statement generation code in expand_end_case.
1797 Rapid prototyping sez a sequence of ifs. */
1799 struct eh_region *c;
1800 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1802 /* ??? _Unwind_ForcedUnwind wants no match here. */
1803 if (c->u.catch.type_list == NULL)
1804 emit_jump (c->label);
1805 else
1807 /* Need for one cmp/jump per type caught. Each type
1808 list entry has a matching entry in the filter list
1809 (see assign_filter_values). */
1810 tree tp_node = c->u.catch.type_list;
1811 tree flt_node = c->u.catch.filter_list;
1813 for (; tp_node; )
1815 emit_cmp_and_jump_insns
1816 (cfun->eh->filter,
1817 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1818 EQ, NULL_RTX, word_mode, 0, c->label);
1820 tp_node = TREE_CHAIN (tp_node);
1821 flt_node = TREE_CHAIN (flt_node);
1827 /* We delay the generation of the _Unwind_Resume until we generate
1828 landing pads. We emit a marker here so as to get good control
1829 flow data in the meantime. */
1830 region->resume
1831 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1832 emit_barrier ();
1834 seq = get_insns ();
1835 end_sequence ();
1837 emit_insns_before (seq, region->u.try.catch->label);
1838 break;
1840 case ERT_ALLOWED_EXCEPTIONS:
1841 region->post_landing_pad = gen_label_rtx ();
1843 start_sequence ();
1845 emit_label (region->post_landing_pad);
1847 emit_cmp_and_jump_insns (cfun->eh->filter,
1848 GEN_INT (region->u.allowed.filter),
1849 EQ, NULL_RTX, word_mode, 0, region->label);
1851 /* We delay the generation of the _Unwind_Resume until we generate
1852 landing pads. We emit a marker here so as to get good control
1853 flow data in the meantime. */
1854 region->resume
1855 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1856 emit_barrier ();
1858 seq = get_insns ();
1859 end_sequence ();
1861 emit_insns_before (seq, region->label);
1862 break;
1864 case ERT_CLEANUP:
1865 case ERT_MUST_NOT_THROW:
1866 region->post_landing_pad = region->label;
1867 break;
1869 case ERT_CATCH:
1870 case ERT_THROW:
1871 /* Nothing to do. */
1872 break;
1874 default:
1875 abort ();
1880 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1881 _Unwind_Resume otherwise. */
1883 static void
1884 connect_post_landing_pads ()
1886 int i;
1888 for (i = cfun->eh->last_region_number; i > 0; --i)
1890 struct eh_region *region = cfun->eh->region_array[i];
1891 struct eh_region *outer;
1892 rtx seq;
1894 /* Mind we don't process a region more than once. */
1895 if (!region || region->region_number != i)
1896 continue;
1898 /* If there is no RESX, or it has been deleted by flow, there's
1899 nothing to fix up. */
1900 if (! region->resume || INSN_DELETED_P (region->resume))
1901 continue;
1903 /* Search for another landing pad in this function. */
1904 for (outer = region->outer; outer ; outer = outer->outer)
1905 if (outer->post_landing_pad)
1906 break;
1908 start_sequence ();
1910 if (outer)
1911 emit_jump (outer->post_landing_pad);
1912 else
1913 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1914 VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
1916 seq = get_insns ();
1917 end_sequence ();
1918 emit_insns_before (seq, region->resume);
1919 delete_insn (region->resume);
1924 static void
1925 dw2_build_landing_pads ()
1927 int i;
1928 unsigned int j;
1930 for (i = cfun->eh->last_region_number; i > 0; --i)
1932 struct eh_region *region = cfun->eh->region_array[i];
1933 rtx seq;
1934 bool clobbers_hard_regs = false;
1936 /* Mind we don't process a region more than once. */
1937 if (!region || region->region_number != i)
1938 continue;
1940 if (region->type != ERT_CLEANUP
1941 && region->type != ERT_TRY
1942 && region->type != ERT_ALLOWED_EXCEPTIONS)
1943 continue;
1945 start_sequence ();
1947 region->landing_pad = gen_label_rtx ();
1948 emit_label (region->landing_pad);
1950 #ifdef HAVE_exception_receiver
1951 if (HAVE_exception_receiver)
1952 emit_insn (gen_exception_receiver ());
1953 else
1954 #endif
1955 #ifdef HAVE_nonlocal_goto_receiver
1956 if (HAVE_nonlocal_goto_receiver)
1957 emit_insn (gen_nonlocal_goto_receiver ());
1958 else
1959 #endif
1960 { /* Nothing */ }
1962 /* If the eh_return data registers are call-saved, then we
1963 won't have considered them clobbered from the call that
1964 threw. Kill them now. */
1965 for (j = 0; ; ++j)
1967 unsigned r = EH_RETURN_DATA_REGNO (j);
1968 if (r == INVALID_REGNUM)
1969 break;
1970 if (! call_used_regs[r])
1972 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1973 clobbers_hard_regs = true;
1977 if (clobbers_hard_regs)
1979 /* @@@ This is a kludge. Not all machine descriptions define a
1980 blockage insn, but we must not allow the code we just generated
1981 to be reordered by scheduling. So emit an ASM_INPUT to act as
1982 blockage insn. */
1983 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1986 emit_move_insn (cfun->eh->exc_ptr,
1987 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
1988 emit_move_insn (cfun->eh->filter,
1989 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
1991 seq = get_insns ();
1992 end_sequence ();
1994 emit_insns_before (seq, region->post_landing_pad);
1999 struct sjlj_lp_info
2001 int directly_reachable;
2002 int action_index;
2003 int dispatch_index;
2004 int call_site_index;
2007 static bool
2008 sjlj_find_directly_reachable_regions (lp_info)
2009 struct sjlj_lp_info *lp_info;
2011 rtx insn;
2012 bool found_one = false;
2014 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2016 struct eh_region *region;
2017 enum reachable_code rc;
2018 tree type_thrown;
2019 rtx note;
2021 if (! INSN_P (insn))
2022 continue;
2024 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2025 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2026 continue;
2028 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2030 type_thrown = NULL_TREE;
2031 if (region->type == ERT_THROW)
2033 type_thrown = region->u.throw.type;
2034 region = region->outer;
2037 /* Find the first containing region that might handle the exception.
2038 That's the landing pad to which we will transfer control. */
2039 rc = RNL_NOT_CAUGHT;
2040 for (; region; region = region->outer)
2042 rc = reachable_next_level (region, type_thrown, 0);
2043 if (rc != RNL_NOT_CAUGHT)
2044 break;
2046 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
2048 lp_info[region->region_number].directly_reachable = 1;
2049 found_one = true;
2053 return found_one;
2056 static void
2057 sjlj_assign_call_site_values (dispatch_label, lp_info)
2058 rtx dispatch_label;
2059 struct sjlj_lp_info *lp_info;
2061 htab_t ar_hash;
2062 int i, index;
2064 /* First task: build the action table. */
2066 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
2067 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2069 for (i = cfun->eh->last_region_number; i > 0; --i)
2070 if (lp_info[i].directly_reachable)
2072 struct eh_region *r = cfun->eh->region_array[i];
2073 r->landing_pad = dispatch_label;
2074 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2075 if (lp_info[i].action_index != -1)
2076 cfun->uses_eh_lsda = 1;
2079 htab_delete (ar_hash);
2081 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2082 landing pad label for the region. For sjlj though, there is one
2083 common landing pad from which we dispatch to the post-landing pads.
2085 A region receives a dispatch index if it is directly reachable
2086 and requires in-function processing. Regions that share post-landing
2087 pads may share dispatch indices. */
2088 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2089 (see build_post_landing_pads) so we don't bother checking for it. */
2091 index = 0;
2092 for (i = cfun->eh->last_region_number; i > 0; --i)
2093 if (lp_info[i].directly_reachable)
2094 lp_info[i].dispatch_index = index++;
2096 /* Finally: assign call-site values. If dwarf2 terms, this would be
2097 the region number assigned by convert_to_eh_region_ranges, but
2098 handles no-action and must-not-throw differently. */
2100 call_site_base = 1;
2101 for (i = cfun->eh->last_region_number; i > 0; --i)
2102 if (lp_info[i].directly_reachable)
2104 int action = lp_info[i].action_index;
2106 /* Map must-not-throw to otherwise unused call-site index 0. */
2107 if (action == -2)
2108 index = 0;
2109 /* Map no-action to otherwise unused call-site index -1. */
2110 else if (action == -1)
2111 index = -1;
2112 /* Otherwise, look it up in the table. */
2113 else
2114 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2116 lp_info[i].call_site_index = index;
2120 static void
2121 sjlj_mark_call_sites (lp_info)
2122 struct sjlj_lp_info *lp_info;
2124 int last_call_site = -2;
2125 rtx insn, mem;
2127 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2129 struct eh_region *region;
2130 int this_call_site;
2131 rtx note, before, p;
2133 /* Reset value tracking at extended basic block boundaries. */
2134 if (GET_CODE (insn) == CODE_LABEL)
2135 last_call_site = -2;
2137 if (! INSN_P (insn))
2138 continue;
2140 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2141 if (!note)
2143 /* Calls (and trapping insns) without notes are outside any
2144 exception handling region in this function. Mark them as
2145 no action. */
2146 if (GET_CODE (insn) == CALL_INSN
2147 || (flag_non_call_exceptions
2148 && may_trap_p (PATTERN (insn))))
2149 this_call_site = -1;
2150 else
2151 continue;
2153 else
2155 /* Calls that are known to not throw need not be marked. */
2156 if (INTVAL (XEXP (note, 0)) <= 0)
2157 continue;
2159 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2160 this_call_site = lp_info[region->region_number].call_site_index;
2163 if (this_call_site == last_call_site)
2164 continue;
2166 /* Don't separate a call from it's argument loads. */
2167 before = insn;
2168 if (GET_CODE (insn) == CALL_INSN)
2169 before = find_first_parameter_load (insn, NULL_RTX);
2171 start_sequence ();
2172 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2173 sjlj_fc_call_site_ofs);
2174 emit_move_insn (mem, GEN_INT (this_call_site));
2175 p = get_insns ();
2176 end_sequence ();
2178 emit_insns_before (p, before);
2179 last_call_site = this_call_site;
2183 /* Construct the SjLj_Function_Context. */
2185 static void
2186 sjlj_emit_function_enter (dispatch_label)
2187 rtx dispatch_label;
2189 rtx fn_begin, fc, mem, seq;
2191 fc = cfun->eh->sjlj_fc;
2193 start_sequence ();
2195 /* We're storing this libcall's address into memory instead of
2196 calling it directly. Thus, we must call assemble_external_libcall
2197 here, as we can not depend on emit_library_call to do it for us. */
2198 assemble_external_libcall (eh_personality_libfunc);
2199 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2200 emit_move_insn (mem, eh_personality_libfunc);
2202 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2203 if (cfun->uses_eh_lsda)
2205 char buf[20];
2206 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2207 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
2209 else
2210 emit_move_insn (mem, const0_rtx);
2212 #ifdef DONT_USE_BUILTIN_SETJMP
2214 rtx x, note;
2215 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2216 TYPE_MODE (integer_type_node), 1,
2217 plus_constant (XEXP (fc, 0),
2218 sjlj_fc_jbuf_ofs), Pmode);
2220 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2221 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2223 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2224 TYPE_MODE (integer_type_node), 0, dispatch_label);
2226 #else
2227 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2228 dispatch_label);
2229 #endif
2231 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2232 1, XEXP (fc, 0), Pmode);
2234 seq = get_insns ();
2235 end_sequence ();
2237 /* ??? Instead of doing this at the beginning of the function,
2238 do this in a block that is at loop level 0 and dominates all
2239 can_throw_internal instructions. */
2241 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2242 if (GET_CODE (fn_begin) == NOTE
2243 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2244 break;
2245 emit_insns_after (seq, fn_begin);
2248 /* Call back from expand_function_end to know where we should put
2249 the call to unwind_sjlj_unregister_libfunc if needed. */
2251 void
2252 sjlj_emit_function_exit_after (after)
2253 rtx after;
2255 cfun->eh->sjlj_exit_after = after;
2258 static void
2259 sjlj_emit_function_exit ()
2261 rtx seq;
2263 start_sequence ();
2265 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2266 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2268 seq = get_insns ();
2269 end_sequence ();
2271 /* ??? Really this can be done in any block at loop level 0 that
2272 post-dominates all can_throw_internal instructions. This is
2273 the last possible moment. */
2275 emit_insns_after (seq, cfun->eh->sjlj_exit_after);
2278 static void
2279 sjlj_emit_dispatch_table (dispatch_label, lp_info)
2280 rtx dispatch_label;
2281 struct sjlj_lp_info *lp_info;
2283 int i, first_reachable;
2284 rtx mem, dispatch, seq, fc;
2286 fc = cfun->eh->sjlj_fc;
2288 start_sequence ();
2290 emit_label (dispatch_label);
2292 #ifndef DONT_USE_BUILTIN_SETJMP
2293 expand_builtin_setjmp_receiver (dispatch_label);
2294 #endif
2296 /* Load up dispatch index, exc_ptr and filter values from the
2297 function context. */
2298 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2299 sjlj_fc_call_site_ofs);
2300 dispatch = copy_to_reg (mem);
2302 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
2303 if (word_mode != Pmode)
2305 #ifdef POINTERS_EXTEND_UNSIGNED
2306 mem = convert_memory_address (Pmode, mem);
2307 #else
2308 mem = convert_to_mode (Pmode, mem, 0);
2309 #endif
2311 emit_move_insn (cfun->eh->exc_ptr, mem);
2313 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2314 emit_move_insn (cfun->eh->filter, mem);
2316 /* Jump to one of the directly reachable regions. */
2317 /* ??? This really ought to be using a switch statement. */
2319 first_reachable = 0;
2320 for (i = cfun->eh->last_region_number; i > 0; --i)
2322 if (! lp_info[i].directly_reachable)
2323 continue;
2325 if (! first_reachable)
2327 first_reachable = i;
2328 continue;
2331 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2332 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2333 cfun->eh->region_array[i]->post_landing_pad);
2336 seq = get_insns ();
2337 end_sequence ();
2339 emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2340 ->post_landing_pad));
2343 static void
2344 sjlj_build_landing_pads ()
2346 struct sjlj_lp_info *lp_info;
2348 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2349 sizeof (struct sjlj_lp_info));
2351 if (sjlj_find_directly_reachable_regions (lp_info))
2353 rtx dispatch_label = gen_label_rtx ();
2355 cfun->eh->sjlj_fc
2356 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2357 int_size_in_bytes (sjlj_fc_type_node),
2358 TYPE_ALIGN (sjlj_fc_type_node));
2360 sjlj_assign_call_site_values (dispatch_label, lp_info);
2361 sjlj_mark_call_sites (lp_info);
2363 sjlj_emit_function_enter (dispatch_label);
2364 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2365 sjlj_emit_function_exit ();
2368 free (lp_info);
2371 void
2372 finish_eh_generation ()
2374 /* Nothing to do if no regions created. */
2375 if (cfun->eh->region_tree == NULL)
2376 return;
2378 /* The object here is to provide find_basic_blocks with detailed
2379 information (via reachable_handlers) on how exception control
2380 flows within the function. In this first pass, we can include
2381 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2382 regions, and hope that it will be useful in deleting unreachable
2383 handlers. Subsequently, we will generate landing pads which will
2384 connect many of the handlers, and then type information will not
2385 be effective. Still, this is a win over previous implementations. */
2387 rebuild_jump_labels (get_insns ());
2388 find_basic_blocks (get_insns (), max_reg_num (), 0);
2389 cleanup_cfg (CLEANUP_PRE_LOOP);
2391 /* These registers are used by the landing pads. Make sure they
2392 have been generated. */
2393 get_exception_pointer (cfun);
2394 get_exception_filter (cfun);
2396 /* Construct the landing pads. */
2398 assign_filter_values ();
2399 build_post_landing_pads ();
2400 connect_post_landing_pads ();
2401 if (USING_SJLJ_EXCEPTIONS)
2402 sjlj_build_landing_pads ();
2403 else
2404 dw2_build_landing_pads ();
2406 cfun->eh->built_landing_pads = 1;
2408 /* We've totally changed the CFG. Start over. */
2409 find_exception_handler_labels ();
2410 rebuild_jump_labels (get_insns ());
2411 find_basic_blocks (get_insns (), max_reg_num (), 0);
2412 cleanup_cfg (CLEANUP_PRE_LOOP);
2415 /* This section handles removing dead code for flow. */
2417 /* Remove LABEL from the exception_handler_labels list. */
2419 static void
2420 remove_exception_handler_label (label)
2421 rtx label;
2423 rtx *pl, l;
2425 for (pl = &exception_handler_labels, l = *pl;
2426 XEXP (l, 0) != label;
2427 pl = &XEXP (l, 1), l = *pl)
2428 continue;
2430 *pl = XEXP (l, 1);
2431 free_EXPR_LIST_node (l);
2434 /* Splice REGION from the region tree etc. */
2436 static void
2437 remove_eh_handler (region)
2438 struct eh_region *region;
2440 struct eh_region **pp, *p;
2441 rtx lab;
2442 int i;
2444 /* For the benefit of efficiently handling REG_EH_REGION notes,
2445 replace this region in the region array with its containing
2446 region. Note that previous region deletions may result in
2447 multiple copies of this region in the array, so we have to
2448 search the whole thing. */
2449 for (i = cfun->eh->last_region_number; i > 0; --i)
2450 if (cfun->eh->region_array[i] == region)
2451 cfun->eh->region_array[i] = region->outer;
2453 if (cfun->eh->built_landing_pads)
2454 lab = region->landing_pad;
2455 else
2456 lab = region->label;
2457 if (lab)
2458 remove_exception_handler_label (lab);
2460 if (region->outer)
2461 pp = &region->outer->inner;
2462 else
2463 pp = &cfun->eh->region_tree;
2464 for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
2465 continue;
2467 if (region->inner)
2469 for (p = region->inner; p->next_peer ; p = p->next_peer)
2470 p->outer = region->outer;
2471 p->next_peer = region->next_peer;
2472 p->outer = region->outer;
2473 *pp = region->inner;
2475 else
2476 *pp = region->next_peer;
2478 if (region->type == ERT_CATCH)
2480 struct eh_region *try, *next, *prev;
2482 for (try = region->next_peer;
2483 try->type == ERT_CATCH;
2484 try = try->next_peer)
2485 continue;
2486 if (try->type != ERT_TRY)
2487 abort ();
2489 next = region->u.catch.next_catch;
2490 prev = region->u.catch.prev_catch;
2492 if (next)
2493 next->u.catch.prev_catch = prev;
2494 else
2495 try->u.try.last_catch = prev;
2496 if (prev)
2497 prev->u.catch.next_catch = next;
2498 else
2500 try->u.try.catch = next;
2501 if (! next)
2502 remove_eh_handler (try);
2506 free (region);
2509 /* LABEL heads a basic block that is about to be deleted. If this
2510 label corresponds to an exception region, we may be able to
2511 delete the region. */
2513 void
2514 maybe_remove_eh_handler (label)
2515 rtx label;
2517 int i;
2519 /* ??? After generating landing pads, it's not so simple to determine
2520 if the region data is completely unused. One must examine the
2521 landing pad and the post landing pad, and whether an inner try block
2522 is referencing the catch handlers directly. */
2523 if (cfun->eh->built_landing_pads)
2524 return;
2526 for (i = cfun->eh->last_region_number; i > 0; --i)
2528 struct eh_region *region = cfun->eh->region_array[i];
2529 if (region && region->label == label)
2531 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2532 because there is no path to the fallback call to terminate.
2533 But the region continues to affect call-site data until there
2534 are no more contained calls, which we don't see here. */
2535 if (region->type == ERT_MUST_NOT_THROW)
2537 remove_exception_handler_label (region->label);
2538 region->label = NULL_RTX;
2540 else
2541 remove_eh_handler (region);
2542 break;
2548 /* This section describes CFG exception edges for flow. */
2550 /* For communicating between calls to reachable_next_level. */
2551 struct reachable_info
2553 tree types_caught;
2554 tree types_allowed;
2555 rtx handlers;
2558 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2559 base class of TYPE, is in HANDLED. */
2561 static int
2562 check_handled (handled, type)
2563 tree handled, type;
2565 tree t;
2567 /* We can check for exact matches without front-end help. */
2568 if (! lang_eh_type_covers)
2570 for (t = handled; t ; t = TREE_CHAIN (t))
2571 if (TREE_VALUE (t) == type)
2572 return 1;
2574 else
2576 for (t = handled; t ; t = TREE_CHAIN (t))
2577 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2578 return 1;
2581 return 0;
2584 /* A subroutine of reachable_next_level. If we are collecting a list
2585 of handlers, add one. After landing pad generation, reference
2586 it instead of the handlers themselves. Further, the handlers are
2587 all wired together, so by referencing one, we've got them all.
2588 Before landing pad generation we reference each handler individually.
2590 LP_REGION contains the landing pad; REGION is the handler. */
2592 static void
2593 add_reachable_handler (info, lp_region, region)
2594 struct reachable_info *info;
2595 struct eh_region *lp_region;
2596 struct eh_region *region;
2598 if (! info)
2599 return;
2601 if (cfun->eh->built_landing_pads)
2603 if (! info->handlers)
2604 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2606 else
2607 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2610 /* Process one level of exception regions for reachability.
2611 If TYPE_THROWN is non-null, then it is the *exact* type being
2612 propagated. If INFO is non-null, then collect handler labels
2613 and caught/allowed type information between invocations. */
2615 static enum reachable_code
2616 reachable_next_level (region, type_thrown, info)
2617 struct eh_region *region;
2618 tree type_thrown;
2619 struct reachable_info *info;
2621 switch (region->type)
2623 case ERT_CLEANUP:
2624 /* Before landing-pad generation, we model control flow
2625 directly to the individual handlers. In this way we can
2626 see that catch handler types may shadow one another. */
2627 add_reachable_handler (info, region, region);
2628 return RNL_MAYBE_CAUGHT;
2630 case ERT_TRY:
2632 struct eh_region *c;
2633 enum reachable_code ret = RNL_NOT_CAUGHT;
2635 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2637 /* A catch-all handler ends the search. */
2638 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2639 to be run as well. */
2640 if (c->u.catch.type_list == NULL)
2642 add_reachable_handler (info, region, c);
2643 return RNL_CAUGHT;
2646 if (type_thrown)
2648 /* If we have a at least one type match, end the search. */
2649 tree tp_node = c->u.catch.type_list;
2651 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2653 tree type = TREE_VALUE (tp_node);
2655 if (type == type_thrown
2656 || (lang_eh_type_covers
2657 && (*lang_eh_type_covers) (type, type_thrown)))
2659 add_reachable_handler (info, region, c);
2660 return RNL_CAUGHT;
2664 /* If we have definitive information of a match failure,
2665 the catch won't trigger. */
2666 if (lang_eh_type_covers)
2667 return RNL_NOT_CAUGHT;
2670 /* At this point, we either don't know what type is thrown or
2671 don't have front-end assistance to help deciding if it is
2672 covered by one of the types in the list for this region.
2674 We'd then like to add this region to the list of reachable
2675 handlers since it is indeed potentially reachable based on the
2676 information we have.
2678 Actually, this handler is for sure not reachable if all the
2679 types it matches have already been caught. That is, it is only
2680 potentially reachable if at least one of the types it catches
2681 has not been previously caught. */
2683 if (! info)
2684 ret = RNL_MAYBE_CAUGHT;
2685 else
2687 tree tp_node = c->u.catch.type_list;
2688 bool maybe_reachable = false;
2690 /* Compute the potential reachability of this handler and
2691 update the list of types caught at the same time. */
2692 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2694 tree type = TREE_VALUE (tp_node);
2696 if (! check_handled (info->types_caught, type))
2698 info->types_caught
2699 = tree_cons (NULL, type, info->types_caught);
2701 maybe_reachable = true;
2705 if (maybe_reachable)
2707 add_reachable_handler (info, region, c);
2709 /* ??? If the catch type is a base class of every allowed
2710 type, then we know we can stop the search. */
2711 ret = RNL_MAYBE_CAUGHT;
2716 return ret;
2719 case ERT_ALLOWED_EXCEPTIONS:
2720 /* An empty list of types definitely ends the search. */
2721 if (region->u.allowed.type_list == NULL_TREE)
2723 add_reachable_handler (info, region, region);
2724 return RNL_CAUGHT;
2727 /* Collect a list of lists of allowed types for use in detecting
2728 when a catch may be transformed into a catch-all. */
2729 if (info)
2730 info->types_allowed = tree_cons (NULL_TREE,
2731 region->u.allowed.type_list,
2732 info->types_allowed);
2734 /* If we have definitive information about the type hierarchy,
2735 then we can tell if the thrown type will pass through the
2736 filter. */
2737 if (type_thrown && lang_eh_type_covers)
2739 if (check_handled (region->u.allowed.type_list, type_thrown))
2740 return RNL_NOT_CAUGHT;
2741 else
2743 add_reachable_handler (info, region, region);
2744 return RNL_CAUGHT;
2748 add_reachable_handler (info, region, region);
2749 return RNL_MAYBE_CAUGHT;
2751 case ERT_CATCH:
2752 /* Catch regions are handled by their controling try region. */
2753 return RNL_NOT_CAUGHT;
2755 case ERT_MUST_NOT_THROW:
2756 /* Here we end our search, since no exceptions may propagate.
2757 If we've touched down at some landing pad previous, then the
2758 explicit function call we generated may be used. Otherwise
2759 the call is made by the runtime. */
2760 if (info && info->handlers)
2762 add_reachable_handler (info, region, region);
2763 return RNL_CAUGHT;
2765 else
2766 return RNL_BLOCKED;
2768 case ERT_THROW:
2769 case ERT_FIXUP:
2770 case ERT_UNKNOWN:
2771 /* Shouldn't see these here. */
2772 break;
2775 abort ();
2778 /* Retrieve a list of labels of exception handlers which can be
2779 reached by a given insn. */
2782 reachable_handlers (insn)
2783 rtx insn;
2785 struct reachable_info info;
2786 struct eh_region *region;
2787 tree type_thrown;
2788 int region_number;
2790 if (GET_CODE (insn) == JUMP_INSN
2791 && GET_CODE (PATTERN (insn)) == RESX)
2792 region_number = XINT (PATTERN (insn), 0);
2793 else
2795 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2796 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2797 return NULL;
2798 region_number = INTVAL (XEXP (note, 0));
2801 memset (&info, 0, sizeof (info));
2803 region = cfun->eh->region_array[region_number];
2805 type_thrown = NULL_TREE;
2806 if (GET_CODE (insn) == JUMP_INSN
2807 && GET_CODE (PATTERN (insn)) == RESX)
2809 /* A RESX leaves a region instead of entering it. Thus the
2810 region itself may have been deleted out from under us. */
2811 if (region == NULL)
2812 return NULL;
2813 region = region->outer;
2815 else if (region->type == ERT_THROW)
2817 type_thrown = region->u.throw.type;
2818 region = region->outer;
2821 for (; region; region = region->outer)
2822 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2823 break;
2825 return info.handlers;
2828 /* Determine if the given INSN can throw an exception that is caught
2829 within the function. */
2831 bool
2832 can_throw_internal (insn)
2833 rtx insn;
2835 struct eh_region *region;
2836 tree type_thrown;
2837 rtx note;
2839 if (! INSN_P (insn))
2840 return false;
2842 if (GET_CODE (insn) == INSN
2843 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2844 insn = XVECEXP (PATTERN (insn), 0, 0);
2846 if (GET_CODE (insn) == CALL_INSN
2847 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2849 int i;
2850 for (i = 0; i < 3; ++i)
2852 rtx sub = XEXP (PATTERN (insn), i);
2853 for (; sub ; sub = NEXT_INSN (sub))
2854 if (can_throw_internal (sub))
2855 return true;
2857 return false;
2860 /* Every insn that might throw has an EH_REGION note. */
2861 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2862 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2863 return false;
2865 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2867 type_thrown = NULL_TREE;
2868 if (region->type == ERT_THROW)
2870 type_thrown = region->u.throw.type;
2871 region = region->outer;
2874 /* If this exception is ignored by each and every containing region,
2875 then control passes straight out. The runtime may handle some
2876 regions, which also do not require processing internally. */
2877 for (; region; region = region->outer)
2879 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2880 if (how == RNL_BLOCKED)
2881 return false;
2882 if (how != RNL_NOT_CAUGHT)
2883 return true;
2886 return false;
2889 /* Determine if the given INSN can throw an exception that is
2890 visible outside the function. */
2892 bool
2893 can_throw_external (insn)
2894 rtx insn;
2896 struct eh_region *region;
2897 tree type_thrown;
2898 rtx note;
2900 if (! INSN_P (insn))
2901 return false;
2903 if (GET_CODE (insn) == INSN
2904 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2905 insn = XVECEXP (PATTERN (insn), 0, 0);
2907 if (GET_CODE (insn) == CALL_INSN
2908 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2910 int i;
2911 for (i = 0; i < 3; ++i)
2913 rtx sub = XEXP (PATTERN (insn), i);
2914 for (; sub ; sub = NEXT_INSN (sub))
2915 if (can_throw_external (sub))
2916 return true;
2918 return false;
2921 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2922 if (!note)
2924 /* Calls (and trapping insns) without notes are outside any
2925 exception handling region in this function. We have to
2926 assume it might throw. Given that the front end and middle
2927 ends mark known NOTHROW functions, this isn't so wildly
2928 inaccurate. */
2929 return (GET_CODE (insn) == CALL_INSN
2930 || (flag_non_call_exceptions
2931 && may_trap_p (PATTERN (insn))));
2933 if (INTVAL (XEXP (note, 0)) <= 0)
2934 return false;
2936 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2938 type_thrown = NULL_TREE;
2939 if (region->type == ERT_THROW)
2941 type_thrown = region->u.throw.type;
2942 region = region->outer;
2945 /* If the exception is caught or blocked by any containing region,
2946 then it is not seen by any calling function. */
2947 for (; region ; region = region->outer)
2948 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2949 return false;
2951 return true;
2954 /* True if nothing in this function can throw outside this function. */
2956 bool
2957 nothrow_function_p ()
2959 rtx insn;
2961 if (! flag_exceptions)
2962 return true;
2964 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2965 if (can_throw_external (insn))
2966 return false;
2967 for (insn = current_function_epilogue_delay_list; insn;
2968 insn = XEXP (insn, 1))
2969 if (can_throw_external (insn))
2970 return false;
2972 return true;
2976 /* Various hooks for unwind library. */
2978 /* Do any necessary initialization to access arbitrary stack frames.
2979 On the SPARC, this means flushing the register windows. */
2981 void
2982 expand_builtin_unwind_init ()
2984 /* Set this so all the registers get saved in our frame; we need to be
2985 able to copy the saved values for any registers from frames we unwind. */
2986 current_function_has_nonlocal_label = 1;
2988 #ifdef SETUP_FRAME_ADDRESSES
2989 SETUP_FRAME_ADDRESSES ();
2990 #endif
2994 expand_builtin_eh_return_data_regno (arglist)
2995 tree arglist;
2997 tree which = TREE_VALUE (arglist);
2998 unsigned HOST_WIDE_INT iwhich;
3000 if (TREE_CODE (which) != INTEGER_CST)
3002 error ("argument of `__builtin_eh_return_regno' must be constant");
3003 return constm1_rtx;
3006 iwhich = tree_low_cst (which, 1);
3007 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3008 if (iwhich == INVALID_REGNUM)
3009 return constm1_rtx;
3011 #ifdef DWARF_FRAME_REGNUM
3012 iwhich = DWARF_FRAME_REGNUM (iwhich);
3013 #else
3014 iwhich = DBX_REGISTER_NUMBER (iwhich);
3015 #endif
3017 return GEN_INT (iwhich);
3020 /* Given a value extracted from the return address register or stack slot,
3021 return the actual address encoded in that value. */
3024 expand_builtin_extract_return_addr (addr_tree)
3025 tree addr_tree;
3027 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
3029 /* First mask out any unwanted bits. */
3030 #ifdef MASK_RETURN_ADDR
3031 expand_and (addr, MASK_RETURN_ADDR, addr);
3032 #endif
3034 /* Then adjust to find the real return address. */
3035 #if defined (RETURN_ADDR_OFFSET)
3036 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3037 #endif
3039 return addr;
3042 /* Given an actual address in addr_tree, do any necessary encoding
3043 and return the value to be stored in the return address register or
3044 stack slot so the epilogue will return to that address. */
3047 expand_builtin_frob_return_addr (addr_tree)
3048 tree addr_tree;
3050 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3052 #ifdef POINTERS_EXTEND_UNSIGNED
3053 if (GET_MODE (addr) != Pmode)
3054 addr = convert_memory_address (Pmode, addr);
3055 #endif
3057 #ifdef RETURN_ADDR_OFFSET
3058 addr = force_reg (Pmode, addr);
3059 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3060 #endif
3062 return addr;
3065 /* Set up the epilogue with the magic bits we'll need to return to the
3066 exception handler. */
3068 void
3069 expand_builtin_eh_return (stackadj_tree, handler_tree)
3070 tree stackadj_tree, handler_tree;
3072 rtx stackadj, handler;
3074 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
3075 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
3077 #ifdef POINTERS_EXTEND_UNSIGNED
3078 if (GET_MODE (stackadj) != Pmode)
3079 stackadj = convert_memory_address (Pmode, stackadj);
3081 if (GET_MODE (handler) != Pmode)
3082 handler = convert_memory_address (Pmode, handler);
3083 #endif
3085 if (! cfun->eh->ehr_label)
3087 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
3088 cfun->eh->ehr_handler = copy_to_reg (handler);
3089 cfun->eh->ehr_label = gen_label_rtx ();
3091 else
3093 if (stackadj != cfun->eh->ehr_stackadj)
3094 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
3095 if (handler != cfun->eh->ehr_handler)
3096 emit_move_insn (cfun->eh->ehr_handler, handler);
3099 emit_jump (cfun->eh->ehr_label);
3102 void
3103 expand_eh_return ()
3105 rtx sa, ra, around_label;
3107 if (! cfun->eh->ehr_label)
3108 return;
3110 sa = EH_RETURN_STACKADJ_RTX;
3111 if (! sa)
3113 error ("__builtin_eh_return not supported on this target");
3114 return;
3117 current_function_calls_eh_return = 1;
3119 around_label = gen_label_rtx ();
3120 emit_move_insn (sa, const0_rtx);
3121 emit_jump (around_label);
3123 emit_label (cfun->eh->ehr_label);
3124 clobber_return_register ();
3126 #ifdef HAVE_eh_return
3127 if (HAVE_eh_return)
3128 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3129 else
3130 #endif
3132 ra = EH_RETURN_HANDLER_RTX;
3133 if (! ra)
3135 error ("__builtin_eh_return not supported on this target");
3136 ra = gen_reg_rtx (Pmode);
3139 emit_move_insn (sa, cfun->eh->ehr_stackadj);
3140 emit_move_insn (ra, cfun->eh->ehr_handler);
3143 emit_label (around_label);
3146 /* In the following functions, we represent entries in the action table
3147 as 1-based indices. Special cases are:
3149 0: null action record, non-null landing pad; implies cleanups
3150 -1: null action record, null landing pad; implies no action
3151 -2: no call-site entry; implies must_not_throw
3152 -3: we have yet to process outer regions
3154 Further, no special cases apply to the "next" field of the record.
3155 For next, 0 means end of list. */
3157 struct action_record
3159 int offset;
3160 int filter;
3161 int next;
3164 static int
3165 action_record_eq (pentry, pdata)
3166 const PTR pentry;
3167 const PTR pdata;
3169 const struct action_record *entry = (const struct action_record *) pentry;
3170 const struct action_record *data = (const struct action_record *) pdata;
3171 return entry->filter == data->filter && entry->next == data->next;
3174 static hashval_t
3175 action_record_hash (pentry)
3176 const PTR pentry;
3178 const struct action_record *entry = (const struct action_record *) pentry;
3179 return entry->next * 1009 + entry->filter;
3182 static int
3183 add_action_record (ar_hash, filter, next)
3184 htab_t ar_hash;
3185 int filter, next;
3187 struct action_record **slot, *new, tmp;
3189 tmp.filter = filter;
3190 tmp.next = next;
3191 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3193 if ((new = *slot) == NULL)
3195 new = (struct action_record *) xmalloc (sizeof (*new));
3196 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3197 new->filter = filter;
3198 new->next = next;
3199 *slot = new;
3201 /* The filter value goes in untouched. The link to the next
3202 record is a "self-relative" byte offset, or zero to indicate
3203 that there is no next record. So convert the absolute 1 based
3204 indices we've been carrying around into a displacement. */
3206 push_sleb128 (&cfun->eh->action_record_data, filter);
3207 if (next)
3208 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3209 push_sleb128 (&cfun->eh->action_record_data, next);
3212 return new->offset;
3215 static int
3216 collect_one_action_chain (ar_hash, region)
3217 htab_t ar_hash;
3218 struct eh_region *region;
3220 struct eh_region *c;
3221 int next;
3223 /* If we've reached the top of the region chain, then we have
3224 no actions, and require no landing pad. */
3225 if (region == NULL)
3226 return -1;
3228 switch (region->type)
3230 case ERT_CLEANUP:
3231 /* A cleanup adds a zero filter to the beginning of the chain, but
3232 there are special cases to look out for. If there are *only*
3233 cleanups along a path, then it compresses to a zero action.
3234 Further, if there are multiple cleanups along a path, we only
3235 need to represent one of them, as that is enough to trigger
3236 entry to the landing pad at runtime. */
3237 next = collect_one_action_chain (ar_hash, region->outer);
3238 if (next <= 0)
3239 return 0;
3240 for (c = region->outer; c ; c = c->outer)
3241 if (c->type == ERT_CLEANUP)
3242 return next;
3243 return add_action_record (ar_hash, 0, next);
3245 case ERT_TRY:
3246 /* Process the associated catch regions in reverse order.
3247 If there's a catch-all handler, then we don't need to
3248 search outer regions. Use a magic -3 value to record
3249 that we haven't done the outer search. */
3250 next = -3;
3251 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3253 if (c->u.catch.type_list == NULL)
3255 /* Retrieve the filter from the head of the filter list
3256 where we have stored it (see assign_filter_values). */
3257 int filter
3258 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3260 next = add_action_record (ar_hash, filter, 0);
3262 else
3264 /* Once the outer search is done, trigger an action record for
3265 each filter we have. */
3266 tree flt_node;
3268 if (next == -3)
3270 next = collect_one_action_chain (ar_hash, region->outer);
3272 /* If there is no next action, terminate the chain. */
3273 if (next == -1)
3274 next = 0;
3275 /* If all outer actions are cleanups or must_not_throw,
3276 we'll have no action record for it, since we had wanted
3277 to encode these states in the call-site record directly.
3278 Add a cleanup action to the chain to catch these. */
3279 else if (next <= 0)
3280 next = add_action_record (ar_hash, 0, 0);
3283 flt_node = c->u.catch.filter_list;
3284 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3286 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3287 next = add_action_record (ar_hash, filter, next);
3291 return next;
3293 case ERT_ALLOWED_EXCEPTIONS:
3294 /* An exception specification adds its filter to the
3295 beginning of the chain. */
3296 next = collect_one_action_chain (ar_hash, region->outer);
3297 return add_action_record (ar_hash, region->u.allowed.filter,
3298 next < 0 ? 0 : next);
3300 case ERT_MUST_NOT_THROW:
3301 /* A must-not-throw region with no inner handlers or cleanups
3302 requires no call-site entry. Note that this differs from
3303 the no handler or cleanup case in that we do require an lsda
3304 to be generated. Return a magic -2 value to record this. */
3305 return -2;
3307 case ERT_CATCH:
3308 case ERT_THROW:
3309 /* CATCH regions are handled in TRY above. THROW regions are
3310 for optimization information only and produce no output. */
3311 return collect_one_action_chain (ar_hash, region->outer);
3313 default:
3314 abort ();
3318 static int
3319 add_call_site (landing_pad, action)
3320 rtx landing_pad;
3321 int action;
3323 struct call_site_record *data = cfun->eh->call_site_data;
3324 int used = cfun->eh->call_site_data_used;
3325 int size = cfun->eh->call_site_data_size;
3327 if (used >= size)
3329 size = (size ? size * 2 : 64);
3330 data = (struct call_site_record *)
3331 xrealloc (data, sizeof (*data) * size);
3332 cfun->eh->call_site_data = data;
3333 cfun->eh->call_site_data_size = size;
3336 data[used].landing_pad = landing_pad;
3337 data[used].action = action;
3339 cfun->eh->call_site_data_used = used + 1;
3341 return used + call_site_base;
3344 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3345 The new note numbers will not refer to region numbers, but
3346 instead to call site entries. */
3348 void
3349 convert_to_eh_region_ranges ()
3351 rtx insn, iter, note;
3352 htab_t ar_hash;
3353 int last_action = -3;
3354 rtx last_action_insn = NULL_RTX;
3355 rtx last_landing_pad = NULL_RTX;
3356 rtx first_no_action_insn = NULL_RTX;
3357 int call_site = 0;
3359 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3360 return;
3362 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3364 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3366 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3367 if (INSN_P (iter))
3369 struct eh_region *region;
3370 int this_action;
3371 rtx this_landing_pad;
3373 insn = iter;
3374 if (GET_CODE (insn) == INSN
3375 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3376 insn = XVECEXP (PATTERN (insn), 0, 0);
3378 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3379 if (!note)
3381 if (! (GET_CODE (insn) == CALL_INSN
3382 || (flag_non_call_exceptions
3383 && may_trap_p (PATTERN (insn)))))
3384 continue;
3385 this_action = -1;
3386 region = NULL;
3388 else
3390 if (INTVAL (XEXP (note, 0)) <= 0)
3391 continue;
3392 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3393 this_action = collect_one_action_chain (ar_hash, region);
3396 /* Existence of catch handlers, or must-not-throw regions
3397 implies that an lsda is needed (even if empty). */
3398 if (this_action != -1)
3399 cfun->uses_eh_lsda = 1;
3401 /* Delay creation of region notes for no-action regions
3402 until we're sure that an lsda will be required. */
3403 else if (last_action == -3)
3405 first_no_action_insn = iter;
3406 last_action = -1;
3409 /* Cleanups and handlers may share action chains but not
3410 landing pads. Collect the landing pad for this region. */
3411 if (this_action >= 0)
3413 struct eh_region *o;
3414 for (o = region; ! o->landing_pad ; o = o->outer)
3415 continue;
3416 this_landing_pad = o->landing_pad;
3418 else
3419 this_landing_pad = NULL_RTX;
3421 /* Differing actions or landing pads implies a change in call-site
3422 info, which implies some EH_REGION note should be emitted. */
3423 if (last_action != this_action
3424 || last_landing_pad != this_landing_pad)
3426 /* If we'd not seen a previous action (-3) or the previous
3427 action was must-not-throw (-2), then we do not need an
3428 end note. */
3429 if (last_action >= -1)
3431 /* If we delayed the creation of the begin, do it now. */
3432 if (first_no_action_insn)
3434 call_site = add_call_site (NULL_RTX, 0);
3435 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3436 first_no_action_insn);
3437 NOTE_EH_HANDLER (note) = call_site;
3438 first_no_action_insn = NULL_RTX;
3441 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3442 last_action_insn);
3443 NOTE_EH_HANDLER (note) = call_site;
3446 /* If the new action is must-not-throw, then no region notes
3447 are created. */
3448 if (this_action >= -1)
3450 call_site = add_call_site (this_landing_pad,
3451 this_action < 0 ? 0 : this_action);
3452 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3453 NOTE_EH_HANDLER (note) = call_site;
3456 last_action = this_action;
3457 last_landing_pad = this_landing_pad;
3459 last_action_insn = iter;
3462 if (last_action >= -1 && ! first_no_action_insn)
3464 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3465 NOTE_EH_HANDLER (note) = call_site;
3468 htab_delete (ar_hash);
3472 static void
3473 push_uleb128 (data_area, value)
3474 varray_type *data_area;
3475 unsigned int value;
3479 unsigned char byte = value & 0x7f;
3480 value >>= 7;
3481 if (value)
3482 byte |= 0x80;
3483 VARRAY_PUSH_UCHAR (*data_area, byte);
3485 while (value);
3488 static void
3489 push_sleb128 (data_area, value)
3490 varray_type *data_area;
3491 int value;
3493 unsigned char byte;
3494 int more;
3498 byte = value & 0x7f;
3499 value >>= 7;
3500 more = ! ((value == 0 && (byte & 0x40) == 0)
3501 || (value == -1 && (byte & 0x40) != 0));
3502 if (more)
3503 byte |= 0x80;
3504 VARRAY_PUSH_UCHAR (*data_area, byte);
3506 while (more);
3510 #ifndef HAVE_AS_LEB128
3511 static int
3512 dw2_size_of_call_site_table ()
3514 int n = cfun->eh->call_site_data_used;
3515 int size = n * (4 + 4 + 4);
3516 int i;
3518 for (i = 0; i < n; ++i)
3520 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3521 size += size_of_uleb128 (cs->action);
3524 return size;
3527 static int
3528 sjlj_size_of_call_site_table ()
3530 int n = cfun->eh->call_site_data_used;
3531 int size = 0;
3532 int i;
3534 for (i = 0; i < n; ++i)
3536 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3537 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3538 size += size_of_uleb128 (cs->action);
3541 return size;
3543 #endif
3545 static void
3546 dw2_output_call_site_table ()
3548 const char *const function_start_lab
3549 = IDENTIFIER_POINTER (current_function_func_begin_label);
3550 int n = cfun->eh->call_site_data_used;
3551 int i;
3553 for (i = 0; i < n; ++i)
3555 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3556 char reg_start_lab[32];
3557 char reg_end_lab[32];
3558 char landing_pad_lab[32];
3560 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3561 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3563 if (cs->landing_pad)
3564 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3565 CODE_LABEL_NUMBER (cs->landing_pad));
3567 /* ??? Perhaps use insn length scaling if the assembler supports
3568 generic arithmetic. */
3569 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3570 data4 if the function is small enough. */
3571 #ifdef HAVE_AS_LEB128
3572 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3573 "region %d start", i);
3574 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3575 "length");
3576 if (cs->landing_pad)
3577 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3578 "landing pad");
3579 else
3580 dw2_asm_output_data_uleb128 (0, "landing pad");
3581 #else
3582 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3583 "region %d start", i);
3584 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3585 if (cs->landing_pad)
3586 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3587 "landing pad");
3588 else
3589 dw2_asm_output_data (4, 0, "landing pad");
3590 #endif
3591 dw2_asm_output_data_uleb128 (cs->action, "action");
3594 call_site_base += n;
3597 static void
3598 sjlj_output_call_site_table ()
3600 int n = cfun->eh->call_site_data_used;
3601 int i;
3603 for (i = 0; i < n; ++i)
3605 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3607 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3608 "region %d landing pad", i);
3609 dw2_asm_output_data_uleb128 (cs->action, "action");
3612 call_site_base += n;
3615 void
3616 output_function_exception_table ()
3618 int tt_format, cs_format, lp_format, i, n;
3619 #ifdef HAVE_AS_LEB128
3620 char ttype_label[32];
3621 char cs_after_size_label[32];
3622 char cs_end_label[32];
3623 #else
3624 int call_site_len;
3625 #endif
3626 int have_tt_data;
3627 int funcdef_number;
3628 int tt_format_size = 0;
3630 /* Not all functions need anything. */
3631 if (! cfun->uses_eh_lsda)
3632 return;
3634 funcdef_number = (USING_SJLJ_EXCEPTIONS
3635 ? sjlj_funcdef_number
3636 : current_funcdef_number);
3638 #ifdef IA64_UNWIND_INFO
3639 fputs ("\t.personality\t", asm_out_file);
3640 output_addr_const (asm_out_file, eh_personality_libfunc);
3641 fputs ("\n\t.handlerdata\n", asm_out_file);
3642 /* Note that varasm still thinks we're in the function's code section.
3643 The ".endp" directive that will immediately follow will take us back. */
3644 #else
3645 (*targetm.asm_out.exception_section) ();
3646 #endif
3648 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3649 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3651 /* Indicate the format of the @TType entries. */
3652 if (! have_tt_data)
3653 tt_format = DW_EH_PE_omit;
3654 else
3656 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3657 #ifdef HAVE_AS_LEB128
3658 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3659 #endif
3660 tt_format_size = size_of_encoded_value (tt_format);
3662 assemble_align (tt_format_size * BITS_PER_UNIT);
3665 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3667 /* The LSDA header. */
3669 /* Indicate the format of the landing pad start pointer. An omitted
3670 field implies @LPStart == @Start. */
3671 /* Currently we always put @LPStart == @Start. This field would
3672 be most useful in moving the landing pads completely out of
3673 line to another section, but it could also be used to minimize
3674 the size of uleb128 landing pad offsets. */
3675 lp_format = DW_EH_PE_omit;
3676 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3677 eh_data_format_name (lp_format));
3679 /* @LPStart pointer would go here. */
3681 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3682 eh_data_format_name (tt_format));
3684 #ifndef HAVE_AS_LEB128
3685 if (USING_SJLJ_EXCEPTIONS)
3686 call_site_len = sjlj_size_of_call_site_table ();
3687 else
3688 call_site_len = dw2_size_of_call_site_table ();
3689 #endif
3691 /* A pc-relative 4-byte displacement to the @TType data. */
3692 if (have_tt_data)
3694 #ifdef HAVE_AS_LEB128
3695 char ttype_after_disp_label[32];
3696 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3697 funcdef_number);
3698 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3699 "@TType base offset");
3700 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3701 #else
3702 /* Ug. Alignment queers things. */
3703 unsigned int before_disp, after_disp, last_disp, disp;
3705 before_disp = 1 + 1;
3706 after_disp = (1 + size_of_uleb128 (call_site_len)
3707 + call_site_len
3708 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3709 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3710 * tt_format_size));
3712 disp = after_disp;
3715 unsigned int disp_size, pad;
3717 last_disp = disp;
3718 disp_size = size_of_uleb128 (disp);
3719 pad = before_disp + disp_size + after_disp;
3720 if (pad % tt_format_size)
3721 pad = tt_format_size - (pad % tt_format_size);
3722 else
3723 pad = 0;
3724 disp = after_disp + pad;
3726 while (disp != last_disp);
3728 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3729 #endif
3732 /* Indicate the format of the call-site offsets. */
3733 #ifdef HAVE_AS_LEB128
3734 cs_format = DW_EH_PE_uleb128;
3735 #else
3736 cs_format = DW_EH_PE_udata4;
3737 #endif
3738 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3739 eh_data_format_name (cs_format));
3741 #ifdef HAVE_AS_LEB128
3742 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3743 funcdef_number);
3744 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3745 funcdef_number);
3746 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3747 "Call-site table length");
3748 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3749 if (USING_SJLJ_EXCEPTIONS)
3750 sjlj_output_call_site_table ();
3751 else
3752 dw2_output_call_site_table ();
3753 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3754 #else
3755 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3756 if (USING_SJLJ_EXCEPTIONS)
3757 sjlj_output_call_site_table ();
3758 else
3759 dw2_output_call_site_table ();
3760 #endif
3762 /* ??? Decode and interpret the data for flag_debug_asm. */
3763 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3764 for (i = 0; i < n; ++i)
3765 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3766 (i ? NULL : "Action record table"));
3768 if (have_tt_data)
3769 assemble_align (tt_format_size * BITS_PER_UNIT);
3771 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3772 while (i-- > 0)
3774 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3775 rtx value;
3777 if (type == NULL_TREE)
3778 type = integer_zero_node;
3779 else
3780 type = lookup_type_for_runtime (type);
3782 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3783 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3784 assemble_integer (value, tt_format_size,
3785 tt_format_size * BITS_PER_UNIT, 1);
3786 else
3787 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
3790 #ifdef HAVE_AS_LEB128
3791 if (have_tt_data)
3792 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3793 #endif
3795 /* ??? Decode and interpret the data for flag_debug_asm. */
3796 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3797 for (i = 0; i < n; ++i)
3798 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3799 (i ? NULL : "Exception specification table"));
3801 function_section (current_function_decl);
3803 if (USING_SJLJ_EXCEPTIONS)
3804 sjlj_funcdef_number += 1;