* configure.in: Don't check for putenv.
[official-gcc.git] / gcc / except.c
blobec13ed044378b99ac57036ce8b65af3f79191dfd
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "rtl.h"
53 #include "tree.h"
54 #include "flags.h"
55 #include "function.h"
56 #include "expr.h"
57 #include "insn-config.h"
58 #include "except.h"
59 #include "integrate.h"
60 #include "hard-reg-set.h"
61 #include "basic-block.h"
62 #include "output.h"
63 #include "dwarf2asm.h"
64 #include "dwarf2out.h"
65 #include "toplev.h"
66 #include "hashtab.h"
67 #include "intl.h"
68 #include "ggc.h"
69 #include "tm_p.h"
72 /* Provide defaults for stuff that may not be defined when using
73 sjlj exceptions. */
74 #ifndef EH_RETURN_STACKADJ_RTX
75 #define EH_RETURN_STACKADJ_RTX 0
76 #endif
77 #ifndef EH_RETURN_HANDLER_RTX
78 #define EH_RETURN_HANDLER_RTX 0
79 #endif
80 #ifndef EH_RETURN_DATA_REGNO
81 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
82 #endif
85 /* Nonzero means enable synchronous exceptions for non-call instructions. */
86 int flag_non_call_exceptions;
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 tree protect_cleanup_actions;
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type) PARAMS ((tree));
98 /* A list of labels used for exception handlers. */
99 rtx exception_handler_labels;
101 static int call_site_base;
102 static int sjlj_funcdef_number;
103 static htab_t type_to_runtime_map;
105 /* Describe the SjLj_Function_Context structure. */
106 static tree sjlj_fc_type_node;
107 static int sjlj_fc_call_site_ofs;
108 static int sjlj_fc_data_ofs;
109 static int sjlj_fc_personality_ofs;
110 static int sjlj_fc_lsda_ofs;
111 static int sjlj_fc_jbuf_ofs;
113 /* Describes one exception region. */
114 struct eh_region
116 /* The immediately surrounding region. */
117 struct eh_region *outer;
119 /* The list of immediately contained regions. */
120 struct eh_region *inner;
121 struct eh_region *next_peer;
123 /* An identifier for this region. */
124 int region_number;
126 /* Each region does exactly one thing. */
127 enum eh_region_type
129 ERT_CLEANUP = 1,
130 ERT_TRY,
131 ERT_CATCH,
132 ERT_ALLOWED_EXCEPTIONS,
133 ERT_MUST_NOT_THROW,
134 ERT_THROW,
135 ERT_FIXUP
136 } type;
138 /* Holds the action to perform based on the preceeding type. */
139 union {
140 /* A list of catch blocks, a surrounding try block,
141 and the label for continuing after a catch. */
142 struct {
143 struct eh_region *catch;
144 struct eh_region *last_catch;
145 struct eh_region *prev_try;
146 rtx continue_label;
147 } try;
149 /* The list through the catch handlers, the type object
150 matched, and a pointer to the generated code. */
151 struct {
152 struct eh_region *next_catch;
153 struct eh_region *prev_catch;
154 tree type;
155 int filter;
156 } catch;
158 /* A tree_list of allowed types. */
159 struct {
160 tree type_list;
161 int filter;
162 } allowed;
164 /* The type given by a call to "throw foo();", or discovered
165 for a throw. */
166 struct {
167 tree type;
168 } throw;
170 /* Retain the cleanup expression even after expansion so that
171 we can match up fixup regions. */
172 struct {
173 tree exp;
174 } cleanup;
176 /* The real region (by expression and by pointer) that fixup code
177 should live in. */
178 struct {
179 tree cleanup_exp;
180 struct eh_region *real_region;
181 } fixup;
182 } u;
184 /* The region of code generated, or contained within, the region. */
185 rtx label, last;
187 /* Entry point for this region from the runtime eh library. */
188 rtx landing_pad;
190 /* Entry point for this region from an inner region. */
191 rtx post_landing_pad;
194 /* Used to save exception status for each function. */
195 struct eh_status
197 /* The tree of all regions for this function. */
198 struct eh_region *region_tree;
200 /* The same information as an indexable array. */
201 struct eh_region **region_array;
203 /* The most recently open region. */
204 struct eh_region *cur_region;
206 /* This is the region for which we are processing catch blocks. */
207 struct eh_region *try_region;
209 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
210 node is itself a TREE_CHAINed list of handlers for regions that
211 are not yet closed. The TREE_VALUE of each entry contains the
212 handler for the corresponding entry on the ehstack. */
213 tree protect_list;
215 rtx filter;
216 rtx exc_ptr;
218 int built_landing_pads;
219 int last_region_number;
221 varray_type ttype_data;
222 varray_type ehspec_data;
223 varray_type action_record_data;
225 struct call_site_record
227 rtx landing_pad;
228 int action;
229 } *call_site_data;
230 int call_site_data_used;
231 int call_site_data_size;
233 rtx ehr_stackadj;
234 rtx ehr_handler;
235 rtx ehr_label;
237 rtx sjlj_fc;
238 rtx sjlj_exit_after;
242 static void mark_eh_region PARAMS ((struct eh_region *));
244 static int t2r_eq PARAMS ((const PTR,
245 const PTR));
246 static hashval_t t2r_hash PARAMS ((const PTR));
247 static int t2r_mark_1 PARAMS ((PTR *, PTR));
248 static void t2r_mark PARAMS ((PTR));
249 static void add_type_for_runtime PARAMS ((tree));
250 static tree lookup_type_for_runtime PARAMS ((tree));
252 static struct eh_region *expand_eh_region_end PARAMS ((void));
254 static void collect_eh_region_array PARAMS ((void));
255 static void resolve_fixup_regions PARAMS ((void));
256 static void remove_fixup_regions PARAMS ((void));
257 static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
259 static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
260 struct inline_remap *));
261 static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
262 struct eh_region **));
263 static int ttypes_filter_eq PARAMS ((const PTR,
264 const PTR));
265 static hashval_t ttypes_filter_hash PARAMS ((const PTR));
266 static int ehspec_filter_eq PARAMS ((const PTR,
267 const PTR));
268 static hashval_t ehspec_filter_hash PARAMS ((const PTR));
269 static int add_ttypes_entry PARAMS ((htab_t, tree));
270 static int add_ehspec_entry PARAMS ((htab_t, htab_t,
271 tree));
272 static void assign_filter_values PARAMS ((void));
273 static void build_post_landing_pads PARAMS ((void));
274 static void connect_post_landing_pads PARAMS ((void));
275 static void dw2_build_landing_pads PARAMS ((void));
277 struct sjlj_lp_info;
278 static bool sjlj_find_directly_reachable_regions
279 PARAMS ((struct sjlj_lp_info *));
280 static void sjlj_assign_call_site_values
281 PARAMS ((rtx, struct sjlj_lp_info *));
282 static void sjlj_mark_call_sites
283 PARAMS ((struct sjlj_lp_info *));
284 static void sjlj_emit_function_enter PARAMS ((rtx));
285 static void sjlj_emit_function_exit PARAMS ((void));
286 static void sjlj_emit_dispatch_table
287 PARAMS ((rtx, struct sjlj_lp_info *));
288 static void sjlj_build_landing_pads PARAMS ((void));
290 static void remove_exception_handler_label PARAMS ((rtx));
291 static void remove_eh_handler PARAMS ((struct eh_region *));
293 struct reachable_info;
295 /* The return value of reachable_next_level. */
296 enum reachable_code
298 /* The given exception is not processed by the given region. */
299 RNL_NOT_CAUGHT,
300 /* The given exception may need processing by the given region. */
301 RNL_MAYBE_CAUGHT,
302 /* The given exception is completely processed by the given region. */
303 RNL_CAUGHT,
304 /* The given exception is completely processed by the runtime. */
305 RNL_BLOCKED
308 static int check_handled PARAMS ((tree, tree));
309 static void add_reachable_handler
310 PARAMS ((struct reachable_info *, struct eh_region *,
311 struct eh_region *));
312 static enum reachable_code reachable_next_level
313 PARAMS ((struct eh_region *, tree, struct reachable_info *));
315 static int action_record_eq PARAMS ((const PTR,
316 const PTR));
317 static hashval_t action_record_hash PARAMS ((const PTR));
318 static int add_action_record PARAMS ((htab_t, int, int));
319 static int collect_one_action_chain PARAMS ((htab_t,
320 struct eh_region *));
321 static int add_call_site PARAMS ((rtx, int));
323 static void push_uleb128 PARAMS ((varray_type *,
324 unsigned int));
325 static void push_sleb128 PARAMS ((varray_type *, int));
326 static const char *eh_data_format_name PARAMS ((int));
327 #ifndef HAVE_AS_LEB128
328 static int dw2_size_of_call_site_table PARAMS ((void));
329 static int sjlj_size_of_call_site_table PARAMS ((void));
330 #endif
331 static void dw2_output_call_site_table PARAMS ((void));
332 static void sjlj_output_call_site_table PARAMS ((void));
335 /* Routine to see if exception handling is turned on.
336 DO_WARN is non-zero if we want to inform the user that exception
337 handling is turned off.
339 This is used to ensure that -fexceptions has been specified if the
340 compiler tries to use any exception-specific functions. */
343 doing_eh (do_warn)
344 int do_warn;
346 if (! flag_exceptions)
348 static int warned = 0;
349 if (! warned && do_warn)
351 error ("exception handling disabled, use -fexceptions to enable");
352 warned = 1;
354 return 0;
356 return 1;
360 void
361 init_eh ()
363 ggc_add_rtx_root (&exception_handler_labels, 1);
364 ggc_add_tree_root (&protect_cleanup_actions, 1);
366 if (! flag_exceptions)
367 return;
369 type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
370 ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
372 /* Create the SjLj_Function_Context structure. This should match
373 the definition in unwind-sjlj.c. */
374 if (USING_SJLJ_EXCEPTIONS)
376 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
378 sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
379 ggc_add_tree_root (&sjlj_fc_type_node, 1);
381 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
382 build_pointer_type (sjlj_fc_type_node));
383 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
385 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
386 integer_type_node);
387 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
389 tmp = build_index_type (build_int_2 (4 - 1, 0));
390 tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
391 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
392 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
394 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
395 ptr_type_node);
396 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
398 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
399 ptr_type_node);
400 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
402 #ifdef DONT_USE_BUILTIN_SETJMP
403 #ifdef JMP_BUF_SIZE
404 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
405 #else
406 /* Should be large enough for most systems, if it is not,
407 JMP_BUF_SIZE should be defined with the proper value. It will
408 also tend to be larger than necessary for most systems, a more
409 optimal port will define JMP_BUF_SIZE. */
410 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
411 #endif
412 #else
413 /* This is 2 for builtin_setjmp, plus whatever the target requires
414 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
415 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
416 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
417 #endif
418 tmp = build_index_type (tmp);
419 tmp = build_array_type (ptr_type_node, tmp);
420 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
421 #ifdef DONT_USE_BUILTIN_SETJMP
422 /* We don't know what the alignment requirements of the
423 runtime's jmp_buf has. Overestimate. */
424 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
425 DECL_USER_ALIGN (f_jbuf) = 1;
426 #endif
427 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
429 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
430 TREE_CHAIN (f_prev) = f_cs;
431 TREE_CHAIN (f_cs) = f_data;
432 TREE_CHAIN (f_data) = f_per;
433 TREE_CHAIN (f_per) = f_lsda;
434 TREE_CHAIN (f_lsda) = f_jbuf;
436 layout_type (sjlj_fc_type_node);
438 /* Cache the interesting field offsets so that we have
439 easy access from rtl. */
440 sjlj_fc_call_site_ofs
441 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
442 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
443 sjlj_fc_data_ofs
444 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
445 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
446 sjlj_fc_personality_ofs
447 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
448 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
449 sjlj_fc_lsda_ofs
450 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
451 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
452 sjlj_fc_jbuf_ofs
453 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
454 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
458 void
459 init_eh_for_function ()
461 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
464 /* Mark EH for GC. */
466 static void
467 mark_eh_region (region)
468 struct eh_region *region;
470 if (! region)
471 return;
473 switch (region->type)
475 case ERT_CLEANUP:
476 ggc_mark_tree (region->u.cleanup.exp);
477 break;
478 case ERT_TRY:
479 ggc_mark_rtx (region->u.try.continue_label);
480 break;
481 case ERT_CATCH:
482 ggc_mark_tree (region->u.catch.type);
483 break;
484 case ERT_ALLOWED_EXCEPTIONS:
485 ggc_mark_tree (region->u.allowed.type_list);
486 break;
487 case ERT_MUST_NOT_THROW:
488 break;
489 case ERT_THROW:
490 ggc_mark_tree (region->u.throw.type);
491 break;
492 case ERT_FIXUP:
493 ggc_mark_tree (region->u.fixup.cleanup_exp);
494 break;
495 default:
496 abort ();
499 ggc_mark_rtx (region->label);
500 ggc_mark_rtx (region->last);
501 ggc_mark_rtx (region->landing_pad);
502 ggc_mark_rtx (region->post_landing_pad);
505 void
506 mark_eh_status (eh)
507 struct eh_status *eh;
509 int i;
511 if (eh == 0)
512 return;
514 /* If we've called collect_eh_region_array, use it. Otherwise walk
515 the tree non-recursively. */
516 if (eh->region_array)
518 for (i = eh->last_region_number; i > 0; --i)
520 struct eh_region *r = eh->region_array[i];
521 if (r && r->region_number == i)
522 mark_eh_region (r);
525 else if (eh->region_tree)
527 struct eh_region *r = eh->region_tree;
528 while (1)
530 mark_eh_region (r);
531 if (r->inner)
532 r = r->inner;
533 else if (r->next_peer)
534 r = r->next_peer;
535 else
537 do {
538 r = r->outer;
539 if (r == NULL)
540 goto tree_done;
541 } while (r->next_peer == NULL);
542 r = r->next_peer;
545 tree_done:;
548 ggc_mark_tree (eh->protect_list);
549 ggc_mark_rtx (eh->filter);
550 ggc_mark_rtx (eh->exc_ptr);
551 ggc_mark_tree_varray (eh->ttype_data);
553 if (eh->call_site_data)
555 for (i = eh->call_site_data_used - 1; i >= 0; --i)
556 ggc_mark_rtx (eh->call_site_data[i].landing_pad);
559 ggc_mark_rtx (eh->ehr_stackadj);
560 ggc_mark_rtx (eh->ehr_handler);
561 ggc_mark_rtx (eh->ehr_label);
563 ggc_mark_rtx (eh->sjlj_fc);
564 ggc_mark_rtx (eh->sjlj_exit_after);
567 void
568 free_eh_status (f)
569 struct function *f;
571 struct eh_status *eh = f->eh;
573 if (eh->region_array)
575 int i;
576 for (i = eh->last_region_number; i > 0; --i)
578 struct eh_region *r = eh->region_array[i];
579 /* Mind we don't free a region struct more than once. */
580 if (r && r->region_number == i)
581 free (r);
583 free (eh->region_array);
585 else if (eh->region_tree)
587 struct eh_region *next, *r = eh->region_tree;
588 while (1)
590 if (r->inner)
591 r = r->inner;
592 else if (r->next_peer)
594 next = r->next_peer;
595 free (r);
596 r = next;
598 else
600 do {
601 next = r->outer;
602 free (r);
603 r = next;
604 if (r == NULL)
605 goto tree_done;
606 } while (r->next_peer == NULL);
607 next = r->next_peer;
608 free (r);
609 r = next;
612 tree_done:;
615 VARRAY_FREE (eh->ttype_data);
616 VARRAY_FREE (eh->ehspec_data);
617 VARRAY_FREE (eh->action_record_data);
618 if (eh->call_site_data)
619 free (eh->call_site_data);
621 free (eh);
622 f->eh = NULL;
626 /* Start an exception handling region. All instructions emitted
627 after this point are considered to be part of the region until
628 expand_eh_region_end is invoked. */
630 void
631 expand_eh_region_start ()
633 struct eh_region *new_region;
634 struct eh_region *cur_region;
635 rtx note;
637 if (! doing_eh (0))
638 return;
640 /* We need a new block to record the start and end of the dynamic
641 handler chain. We also want to prevent jumping into a try block. */
642 expand_start_bindings (2);
644 /* But we don't need or want a new temporary level. */
645 pop_temp_slots ();
647 /* Mark this block as created by expand_eh_region_start. This is so
648 that we can pop the block with expand_end_bindings automatically. */
649 mark_block_as_eh_region ();
651 /* Insert a new blank region as a leaf in the tree. */
652 new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
653 cur_region = cfun->eh->cur_region;
654 new_region->outer = cur_region;
655 if (cur_region)
657 new_region->next_peer = cur_region->inner;
658 cur_region->inner = new_region;
660 else
662 new_region->next_peer = cfun->eh->region_tree;
663 cfun->eh->region_tree = new_region;
665 cfun->eh->cur_region = new_region;
667 /* Create a note marking the start of this region. */
668 new_region->region_number = ++cfun->eh->last_region_number;
669 note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_BEG);
670 NOTE_EH_HANDLER (note) = new_region->region_number;
673 /* Common code to end a region. Returns the region just ended. */
675 static struct eh_region *
676 expand_eh_region_end ()
678 struct eh_region *cur_region = cfun->eh->cur_region;
679 rtx note;
681 /* Create a nute marking the end of this region. */
682 note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_END);
683 NOTE_EH_HANDLER (note) = cur_region->region_number;
685 /* Pop. */
686 cfun->eh->cur_region = cur_region->outer;
688 /* If we have already started ending the bindings, don't recurse. */
689 if (is_eh_region ())
691 /* Because we don't need or want a new temporary level and
692 because we didn't create one in expand_eh_region_start,
693 create a fake one now to avoid removing one in
694 expand_end_bindings. */
695 push_temp_slots ();
697 mark_block_as_not_eh_region ();
699 expand_end_bindings (NULL_TREE, 0, 0);
702 return cur_region;
705 /* End an exception handling region for a cleanup. HANDLER is an
706 expression to expand for the cleanup. */
708 void
709 expand_eh_region_end_cleanup (handler)
710 tree handler;
712 struct eh_region *region;
713 rtx around_label;
715 if (! doing_eh (0))
716 return;
718 region = expand_eh_region_end ();
719 region->type = ERT_CLEANUP;
720 region->label = gen_label_rtx ();
721 region->u.cleanup.exp = handler;
723 around_label = gen_label_rtx ();
724 emit_jump (around_label);
726 emit_label (region->label);
728 if (protect_cleanup_actions)
729 expand_eh_region_start ();
731 expand_expr (handler, const0_rtx, VOIDmode, 0);
733 if (protect_cleanup_actions)
734 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
736 /* We delay the generation of the _Unwind_Resume until we generate
737 landing pads. We emit a marker here so as to get good control
738 flow data in the meantime. */
739 emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
740 emit_barrier ();
742 region->last = get_last_insn ();
744 emit_label (around_label);
747 /* End an exception handling region for a try block, and prepares
748 for subsequent calls to expand_start_catch. */
750 void
751 expand_start_all_catch ()
753 struct eh_region *region;
755 if (! doing_eh (1))
756 return;
758 region = expand_eh_region_end ();
759 region->type = ERT_TRY;
760 region->u.try.prev_try = cfun->eh->try_region;
761 region->u.try.continue_label = gen_label_rtx ();
763 cfun->eh->try_region = region;
765 emit_jump (region->u.try.continue_label);
768 /* Begin a catch clause. TYPE is the type caught, or null if this is
769 a catch-all clause. */
771 void
772 expand_start_catch (type)
773 tree type;
775 struct eh_region *t, *c, *l;
777 if (! doing_eh (0))
778 return;
780 if (type)
781 add_type_for_runtime (type);
782 expand_eh_region_start ();
784 t = cfun->eh->try_region;
785 c = cfun->eh->cur_region;
786 c->type = ERT_CATCH;
787 c->u.catch.type = type;
788 c->label = gen_label_rtx ();
790 l = t->u.try.last_catch;
791 c->u.catch.prev_catch = l;
792 if (l)
793 l->u.catch.next_catch = c;
794 else
795 t->u.try.catch = c;
796 t->u.try.last_catch = c;
798 emit_label (c->label);
801 /* End a catch clause. Control will resume after the try/catch block. */
803 void
804 expand_end_catch ()
806 struct eh_region *try_region, *catch_region;
808 if (! doing_eh (0))
809 return;
811 catch_region = expand_eh_region_end ();
812 try_region = cfun->eh->try_region;
814 emit_jump (try_region->u.try.continue_label);
816 catch_region->last = get_last_insn ();
819 /* End a sequence of catch handlers for a try block. */
821 void
822 expand_end_all_catch ()
824 struct eh_region *try_region;
826 if (! doing_eh (0))
827 return;
829 try_region = cfun->eh->try_region;
830 cfun->eh->try_region = try_region->u.try.prev_try;
832 emit_label (try_region->u.try.continue_label);
835 /* End an exception region for an exception type filter. ALLOWED is a
836 TREE_LIST of types to be matched by the runtime. FAILURE is an
837 expression to invoke if a mismatch ocurrs. */
839 void
840 expand_eh_region_end_allowed (allowed, failure)
841 tree allowed, failure;
843 struct eh_region *region;
844 rtx around_label;
846 if (! doing_eh (0))
847 return;
849 region = expand_eh_region_end ();
850 region->type = ERT_ALLOWED_EXCEPTIONS;
851 region->u.allowed.type_list = allowed;
852 region->label = gen_label_rtx ();
854 for (; allowed ; allowed = TREE_CHAIN (allowed))
855 add_type_for_runtime (TREE_VALUE (allowed));
857 /* We must emit the call to FAILURE here, so that if this function
858 throws a different exception, that it will be processed by the
859 correct region. */
861 around_label = gen_label_rtx ();
862 emit_jump (around_label);
864 emit_label (region->label);
865 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
867 region->last = get_last_insn ();
869 emit_label (around_label);
872 /* End an exception region for a must-not-throw filter. FAILURE is an
873 expression invoke if an uncaught exception propagates this far.
875 This is conceptually identical to expand_eh_region_end_allowed with
876 an empty allowed list (if you passed "std::terminate" instead of
877 "__cxa_call_unexpected"), but they are represented differently in
878 the C++ LSDA. */
880 void
881 expand_eh_region_end_must_not_throw (failure)
882 tree failure;
884 struct eh_region *region;
885 rtx around_label;
887 if (! doing_eh (0))
888 return;
890 region = expand_eh_region_end ();
891 region->type = ERT_MUST_NOT_THROW;
892 region->label = gen_label_rtx ();
894 /* We must emit the call to FAILURE here, so that if this function
895 throws a different exception, that it will be processed by the
896 correct region. */
898 around_label = gen_label_rtx ();
899 emit_jump (around_label);
901 emit_label (region->label);
902 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
904 region->last = get_last_insn ();
906 emit_label (around_label);
909 /* End an exception region for a throw. No handling goes on here,
910 but it's the easiest way for the front-end to indicate what type
911 is being thrown. */
913 void
914 expand_eh_region_end_throw (type)
915 tree type;
917 struct eh_region *region;
919 if (! doing_eh (0))
920 return;
922 region = expand_eh_region_end ();
923 region->type = ERT_THROW;
924 region->u.throw.type = type;
927 /* End a fixup region. Within this region the cleanups for the immediately
928 enclosing region are _not_ run. This is used for goto cleanup to avoid
929 destroying an object twice.
931 This would be an extraordinarily simple prospect, were it not for the
932 fact that we don't actually know what the immediately enclosing region
933 is. This surprising fact is because expand_cleanups is currently
934 generating a sequence that it will insert somewhere else. We collect
935 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
937 void
938 expand_eh_region_end_fixup (handler)
939 tree handler;
941 struct eh_region *fixup;
943 if (! doing_eh (0))
944 return;
946 fixup = expand_eh_region_end ();
947 fixup->type = ERT_FIXUP;
948 fixup->u.fixup.cleanup_exp = handler;
951 /* Return a tree expression for a pointer to the exception object
952 within a handler. */
955 get_exception_pointer ()
957 rtx exc_ptr = cfun->eh->exc_ptr;
958 if (! exc_ptr)
960 exc_ptr = gen_reg_rtx (Pmode);
961 cfun->eh->exc_ptr = exc_ptr;
963 return exc_ptr;
967 /* Begin a region that will contain entries created with
968 add_partial_entry. */
970 void
971 begin_protect_partials ()
973 /* Push room for a new list. */
974 cfun->eh->protect_list
975 = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
978 /* Start a new exception region for a region of code that has a
979 cleanup action and push the HANDLER for the region onto
980 protect_list. All of the regions created with add_partial_entry
981 will be ended when end_protect_partials is invoked. */
983 void
984 add_partial_entry (handler)
985 tree handler;
987 expand_eh_region_start ();
989 /* ??? This comment was old before the most recent rewrite. We
990 really ought to fix the callers at some point. */
991 /* For backwards compatibility, we allow callers to omit calls to
992 begin_protect_partials for the outermost region. So, we must
993 explicitly do so here. */
994 if (!cfun->eh->protect_list)
995 begin_protect_partials ();
997 /* Add this entry to the front of the list. */
998 TREE_VALUE (cfun->eh->protect_list)
999 = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
1002 /* End all the pending exception regions on protect_list. */
1004 void
1005 end_protect_partials ()
1007 tree t;
1009 /* ??? This comment was old before the most recent rewrite. We
1010 really ought to fix the callers at some point. */
1011 /* For backwards compatibility, we allow callers to omit the call to
1012 begin_protect_partials for the outermost region. So,
1013 PROTECT_LIST may be NULL. */
1014 if (!cfun->eh->protect_list)
1015 return;
1017 /* Pop the topmost entry. */
1018 t = TREE_VALUE (cfun->eh->protect_list);
1019 cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
1021 /* End all the exception regions. */
1022 for (; t; t = TREE_CHAIN (t))
1023 expand_eh_region_end_cleanup (TREE_VALUE (t));
1027 /* This section is for the exception handling specific optimization pass. */
1029 /* Random access the exception region tree. It's just as simple to
1030 collect the regions this way as in expand_eh_region_start, but
1031 without having to realloc memory. */
1033 static void
1034 collect_eh_region_array ()
1036 struct eh_region **array, *i;
1038 i = cfun->eh->region_tree;
1039 if (! i)
1040 return;
1042 array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1043 cfun->eh->region_array = array;
1045 while (1)
1047 array[i->region_number] = i;
1049 /* If there are sub-regions, process them. */
1050 if (i->inner)
1051 i = i->inner;
1052 /* If there are peers, process them. */
1053 else if (i->next_peer)
1054 i = i->next_peer;
1055 /* Otherwise, step back up the tree to the next peer. */
1056 else
1058 do {
1059 i = i->outer;
1060 if (i == NULL)
1061 return;
1062 } while (i->next_peer == NULL);
1063 i = i->next_peer;
1068 static void
1069 resolve_fixup_regions ()
1071 int i, j, n = cfun->eh->last_region_number;
1073 for (i = 1; i <= n; ++i)
1075 struct eh_region *fixup = cfun->eh->region_array[i];
1076 struct eh_region *cleanup;
1078 if (! fixup || fixup->type != ERT_FIXUP)
1079 continue;
1081 for (j = 1; j <= n; ++j)
1083 cleanup = cfun->eh->region_array[j];
1084 if (cleanup->type == ERT_CLEANUP
1085 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1086 break;
1088 if (j > n)
1089 abort ();
1091 fixup->u.fixup.real_region = cleanup->outer;
1095 /* Now that we've discovered what region actually encloses a fixup,
1096 we can shuffle pointers and remove them from the tree. */
1098 static void
1099 remove_fixup_regions ()
1101 int i;
1103 for (i = cfun->eh->last_region_number; i > 0; --i)
1105 struct eh_region *fixup = cfun->eh->region_array[i];
1107 if (! fixup)
1108 continue;
1110 /* Allow GC to maybe free some memory. */
1111 if (fixup->type == ERT_CLEANUP)
1112 fixup->u.cleanup.exp = NULL_TREE;
1114 if (fixup->type != ERT_FIXUP)
1115 continue;
1117 if (fixup->inner)
1119 struct eh_region *parent, *p, **pp;
1121 parent = fixup->u.fixup.real_region;
1123 /* Fix up the children's parent pointers; find the end of
1124 the list. */
1125 for (p = fixup->inner; ; p = p->next_peer)
1127 p->outer = parent;
1128 if (! p->next_peer)
1129 break;
1132 /* In the tree of cleanups, only outer-inner ordering matters.
1133 So link the children back in anywhere at the correct level. */
1134 if (parent)
1135 pp = &parent->inner;
1136 else
1137 pp = &cfun->eh->region_tree;
1138 p->next_peer = *pp;
1139 *pp = fixup->inner;
1140 fixup->inner = NULL;
1143 remove_eh_handler (fixup);
1147 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1148 can_throw instruction in the region. */
1150 static void
1151 convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1152 rtx *pinsns;
1153 int *orig_sp;
1154 int cur;
1156 int *sp = orig_sp;
1157 rtx insn, next;
1159 for (insn = *pinsns; insn ; insn = next)
1161 next = NEXT_INSN (insn);
1162 if (GET_CODE (insn) == NOTE)
1164 int kind = NOTE_LINE_NUMBER (insn);
1165 if (kind == NOTE_INSN_EH_REGION_BEG
1166 || kind == NOTE_INSN_EH_REGION_END)
1168 if (kind == NOTE_INSN_EH_REGION_BEG)
1170 struct eh_region *r;
1172 *sp++ = cur;
1173 cur = NOTE_EH_HANDLER (insn);
1175 r = cfun->eh->region_array[cur];
1176 if (r->type == ERT_FIXUP)
1178 r = r->u.fixup.real_region;
1179 cur = r ? r->region_number : 0;
1181 else if (r->type == ERT_CATCH)
1183 r = r->outer;
1184 cur = r ? r->region_number : 0;
1187 else
1188 cur = *--sp;
1190 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1191 requires extra care to adjust sequence start. */
1192 if (insn == *pinsns)
1193 *pinsns = next;
1194 remove_insn (insn);
1195 continue;
1198 else if (INSN_P (insn))
1200 if (cur > 0
1201 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1202 /* Calls can always potentially throw exceptions, unless
1203 they have a REG_EH_REGION note with a value of 0 or less.
1204 Which should be the only possible kind so far. */
1205 && (GET_CODE (insn) == CALL_INSN
1206 /* If we wanted exceptions for non-call insns, then
1207 any may_trap_p instruction could throw. */
1208 || (flag_non_call_exceptions
1209 && may_trap_p (PATTERN (insn)))))
1211 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1212 REG_NOTES (insn));
1215 if (GET_CODE (insn) == CALL_INSN
1216 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1218 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1219 sp, cur);
1220 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1221 sp, cur);
1222 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1223 sp, cur);
1228 if (sp != orig_sp)
1229 abort ();
1232 void
1233 convert_from_eh_region_ranges ()
1235 int *stack;
1236 rtx insns;
1238 collect_eh_region_array ();
1239 resolve_fixup_regions ();
1241 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1242 insns = get_insns ();
1243 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1244 free (stack);
1246 remove_fixup_regions ();
1249 void
1250 find_exception_handler_labels ()
1252 rtx list = NULL_RTX;
1253 int i;
1255 free_EXPR_LIST_list (&exception_handler_labels);
1257 if (cfun->eh->region_tree == NULL)
1258 return;
1260 for (i = cfun->eh->last_region_number; i > 0; --i)
1262 struct eh_region *region = cfun->eh->region_array[i];
1263 rtx lab;
1265 if (! region)
1266 continue;
1267 if (cfun->eh->built_landing_pads)
1268 lab = region->landing_pad;
1269 else
1270 lab = region->label;
1272 if (lab)
1273 list = alloc_EXPR_LIST (0, lab, list);
1276 /* For sjlj exceptions, need the return label to remain live until
1277 after landing pad generation. */
1278 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1279 list = alloc_EXPR_LIST (0, return_label, list);
1281 exception_handler_labels = list;
1285 static struct eh_region *
1286 duplicate_eh_region_1 (o, map)
1287 struct eh_region *o;
1288 struct inline_remap *map;
1290 struct eh_region *n
1291 = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
1293 n->region_number = o->region_number + cfun->eh->last_region_number;
1294 n->type = o->type;
1296 switch (n->type)
1298 case ERT_CLEANUP:
1299 case ERT_MUST_NOT_THROW:
1300 break;
1302 case ERT_TRY:
1303 if (o->u.try.continue_label)
1304 n->u.try.continue_label
1305 = get_label_from_map (map,
1306 CODE_LABEL_NUMBER (o->u.try.continue_label));
1307 break;
1309 case ERT_CATCH:
1310 n->u.catch.type = o->u.catch.type;
1311 break;
1313 case ERT_ALLOWED_EXCEPTIONS:
1314 n->u.allowed.type_list = o->u.allowed.type_list;
1315 break;
1317 case ERT_THROW:
1318 n->u.throw.type = o->u.throw.type;
1320 default:
1321 abort ();
1324 if (o->label)
1325 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1326 if (o->last)
1328 n->last = map->insn_map[INSN_UID (o->last)];
1329 if (n->last == NULL)
1330 abort ();
1333 return n;
1336 static void
1337 duplicate_eh_region_2 (o, n_array)
1338 struct eh_region *o;
1339 struct eh_region **n_array;
1341 struct eh_region *n = n_array[o->region_number];
1343 switch (n->type)
1345 case ERT_TRY:
1346 n->u.try.catch = n_array[o->u.try.catch->region_number];
1347 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1348 break;
1350 case ERT_CATCH:
1351 if (o->u.catch.next_catch)
1352 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1353 if (o->u.catch.prev_catch)
1354 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1355 break;
1357 default:
1358 break;
1361 if (o->outer)
1362 n->outer = n_array[o->outer->region_number];
1363 if (o->inner)
1364 n->inner = n_array[o->inner->region_number];
1365 if (o->next_peer)
1366 n->next_peer = n_array[o->next_peer->region_number];
1370 duplicate_eh_regions (ifun, map)
1371 struct function *ifun;
1372 struct inline_remap *map;
1374 int ifun_last_region_number = ifun->eh->last_region_number;
1375 struct eh_region **n_array, *root, *cur;
1376 int i;
1378 if (ifun_last_region_number == 0)
1379 return 0;
1381 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1383 for (i = 1; i <= ifun_last_region_number; ++i)
1385 cur = ifun->eh->region_array[i];
1386 if (!cur || cur->region_number != i)
1387 continue;
1388 n_array[i] = duplicate_eh_region_1 (cur, map);
1390 for (i = 1; i <= ifun_last_region_number; ++i)
1392 cur = ifun->eh->region_array[i];
1393 if (!cur || cur->region_number != i)
1394 continue;
1395 duplicate_eh_region_2 (cur, n_array);
1398 root = n_array[ifun->eh->region_tree->region_number];
1399 cur = cfun->eh->cur_region;
1400 if (cur)
1402 struct eh_region *p = cur->inner;
1403 if (p)
1405 while (p->next_peer)
1406 p = p->next_peer;
1407 p->next_peer = root;
1409 else
1410 cur->inner = root;
1412 for (i = 1; i <= ifun_last_region_number; ++i)
1413 if (n_array[i]->outer == NULL)
1414 n_array[i]->outer = cur;
1416 else
1418 struct eh_region *p = cfun->eh->region_tree;
1419 if (p)
1421 while (p->next_peer)
1422 p = p->next_peer;
1423 p->next_peer = root;
1425 else
1426 cfun->eh->region_tree = root;
1429 free (n_array);
1431 i = cfun->eh->last_region_number;
1432 cfun->eh->last_region_number = i + ifun_last_region_number;
1433 return i;
1437 /* ??? Move from tree.c to tree.h. */
1438 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
1440 static int
1441 t2r_eq (pentry, pdata)
1442 const PTR pentry;
1443 const PTR pdata;
1445 tree entry = (tree) pentry;
1446 tree data = (tree) pdata;
1448 return TREE_PURPOSE (entry) == data;
1451 static hashval_t
1452 t2r_hash (pentry)
1453 const PTR pentry;
1455 tree entry = (tree) pentry;
1456 return TYPE_HASH (TREE_PURPOSE (entry));
1459 static int
1460 t2r_mark_1 (slot, data)
1461 PTR *slot;
1462 PTR data ATTRIBUTE_UNUSED;
1464 tree contents = (tree) *slot;
1465 ggc_mark_tree (contents);
1466 return 1;
1469 static void
1470 t2r_mark (addr)
1471 PTR addr;
1473 htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1476 static void
1477 add_type_for_runtime (type)
1478 tree type;
1480 tree *slot;
1482 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1483 TYPE_HASH (type), INSERT);
1484 if (*slot == NULL)
1486 tree runtime = (*lang_eh_runtime_type) (type);
1487 *slot = tree_cons (type, runtime, NULL_TREE);
1491 static tree
1492 lookup_type_for_runtime (type)
1493 tree type;
1495 tree *slot;
1497 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1498 TYPE_HASH (type), NO_INSERT);
1500 /* We should have always inserrted the data earlier. */
1501 return TREE_VALUE (*slot);
1505 /* Represent an entry in @TTypes for either catch actions
1506 or exception filter actions. */
1507 struct ttypes_filter
1509 tree t;
1510 int filter;
1513 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1514 (a tree) for a @TTypes type node we are thinking about adding. */
1516 static int
1517 ttypes_filter_eq (pentry, pdata)
1518 const PTR pentry;
1519 const PTR pdata;
1521 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1522 tree data = (tree) pdata;
1524 return entry->t == data;
1527 static hashval_t
1528 ttypes_filter_hash (pentry)
1529 const PTR pentry;
1531 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1532 return TYPE_HASH (entry->t);
1535 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1536 exception specification list we are thinking about adding. */
1537 /* ??? Currently we use the type lists in the order given. Someone
1538 should put these in some canonical order. */
1540 static int
1541 ehspec_filter_eq (pentry, pdata)
1542 const PTR pentry;
1543 const PTR pdata;
1545 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1546 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1548 return type_list_equal (entry->t, data->t);
1551 /* Hash function for exception specification lists. */
1553 static hashval_t
1554 ehspec_filter_hash (pentry)
1555 const PTR pentry;
1557 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1558 hashval_t h = 0;
1559 tree list;
1561 for (list = entry->t; list ; list = TREE_CHAIN (list))
1562 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1563 return h;
1566 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1567 up the search. Return the filter value to be used. */
1569 static int
1570 add_ttypes_entry (ttypes_hash, type)
1571 htab_t ttypes_hash;
1572 tree type;
1574 struct ttypes_filter **slot, *n;
1576 slot = (struct ttypes_filter **)
1577 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1579 if ((n = *slot) == NULL)
1581 /* Filter value is a 1 based table index. */
1583 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1584 n->t = type;
1585 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1586 *slot = n;
1588 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1591 return n->filter;
1594 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1595 to speed up the search. Return the filter value to be used. */
1597 static int
1598 add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1599 htab_t ehspec_hash;
1600 htab_t ttypes_hash;
1601 tree list;
1603 struct ttypes_filter **slot, *n;
1604 struct ttypes_filter dummy;
1606 dummy.t = list;
1607 slot = (struct ttypes_filter **)
1608 htab_find_slot (ehspec_hash, &dummy, INSERT);
1610 if ((n = *slot) == NULL)
1612 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1614 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1615 n->t = list;
1616 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1617 *slot = n;
1619 /* Look up each type in the list and encode its filter
1620 value as a uleb128. Terminate the list with 0. */
1621 for (; list ; list = TREE_CHAIN (list))
1622 push_uleb128 (&cfun->eh->ehspec_data,
1623 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1624 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1627 return n->filter;
1630 /* Generate the action filter values to be used for CATCH and
1631 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1632 we use lots of landing pads, and so every type or list can share
1633 the same filter value, which saves table space. */
1635 static void
1636 assign_filter_values ()
1638 int i;
1639 htab_t ttypes, ehspec;
1641 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1642 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1644 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1645 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1647 for (i = cfun->eh->last_region_number; i > 0; --i)
1649 struct eh_region *r = cfun->eh->region_array[i];
1651 /* Mind we don't process a region more than once. */
1652 if (!r || r->region_number != i)
1653 continue;
1655 switch (r->type)
1657 case ERT_CATCH:
1658 r->u.catch.filter = add_ttypes_entry (ttypes, r->u.catch.type);
1659 break;
1661 case ERT_ALLOWED_EXCEPTIONS:
1662 r->u.allowed.filter
1663 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1664 break;
1666 default:
1667 break;
1671 htab_delete (ttypes);
1672 htab_delete (ehspec);
1675 static void
1676 build_post_landing_pads ()
1678 int i;
1680 for (i = cfun->eh->last_region_number; i > 0; --i)
1682 struct eh_region *region = cfun->eh->region_array[i];
1683 rtx seq;
1685 /* Mind we don't process a region more than once. */
1686 if (!region || region->region_number != i)
1687 continue;
1689 switch (region->type)
1691 case ERT_TRY:
1692 /* ??? Collect the set of all non-overlapping catch handlers
1693 all the way up the chain until blocked by a cleanup. */
1694 /* ??? Outer try regions can share landing pads with inner
1695 try regions if the types are completely non-overlapping,
1696 and there are no interveaning cleanups. */
1698 region->post_landing_pad = gen_label_rtx ();
1700 start_sequence ();
1702 emit_label (region->post_landing_pad);
1704 /* ??? It is mighty inconvenient to call back into the
1705 switch statement generation code in expand_end_case.
1706 Rapid prototyping sez a sequence of ifs. */
1708 struct eh_region *c;
1709 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1711 /* ??? _Unwind_ForcedUnwind wants no match here. */
1712 if (c->u.catch.type == NULL)
1713 emit_jump (c->label);
1714 else
1715 emit_cmp_and_jump_insns (cfun->eh->filter,
1716 GEN_INT (c->u.catch.filter),
1717 EQ, NULL_RTX, word_mode,
1718 0, 0, c->label);
1722 seq = get_insns ();
1723 end_sequence ();
1725 region->last = emit_insns_before (seq, region->u.try.catch->label);
1726 break;
1728 case ERT_ALLOWED_EXCEPTIONS:
1729 region->post_landing_pad = gen_label_rtx ();
1731 start_sequence ();
1733 emit_label (region->post_landing_pad);
1735 emit_cmp_and_jump_insns (cfun->eh->filter,
1736 GEN_INT (region->u.allowed.filter),
1737 EQ, NULL_RTX, word_mode, 0, 0,
1738 region->label);
1740 seq = get_insns ();
1741 end_sequence ();
1743 region->last = emit_insns_before (seq, region->label);
1744 break;
1746 case ERT_CLEANUP:
1747 case ERT_MUST_NOT_THROW:
1748 region->post_landing_pad = region->label;
1749 break;
1751 case ERT_CATCH:
1752 case ERT_THROW:
1753 /* Nothing to do. */
1754 break;
1756 default:
1757 abort ();
1762 static void
1763 connect_post_landing_pads ()
1765 int i;
1767 for (i = cfun->eh->last_region_number; i > 0; --i)
1769 struct eh_region *region = cfun->eh->region_array[i];
1770 struct eh_region *outer;
1771 rtx before = NULL_RTX, after = NULL_RTX, seq;
1773 /* Mind we don't process a region more than once. */
1774 if (!region || region->region_number != i)
1775 continue;
1777 switch (region->type)
1779 case ERT_CLEANUP:
1780 after = region->last;
1781 if (GET_CODE (after) == BARRIER
1782 && GET_CODE (PREV_INSN (after)) == JUMP_INSN
1783 && GET_CODE (PATTERN (PREV_INSN (after))) == RESX)
1785 before = PREV_INSN (after);
1786 after = NULL_RTX;
1788 break;
1790 case ERT_TRY:
1791 after = region->last;
1792 break;
1794 case ERT_ALLOWED_EXCEPTIONS:
1795 before = region->label;
1796 break;
1798 case ERT_MUST_NOT_THROW:
1799 case ERT_CATCH:
1800 case ERT_THROW:
1801 continue;
1803 default:
1804 abort ();
1807 /* If there's no fallthru, no need to add branches. */
1808 if (after && GET_CODE (after) == BARRIER)
1809 continue;
1811 /* Search for another landing pad in this function. */
1812 for (outer = region->outer; outer ; outer = outer->outer)
1813 if (outer->post_landing_pad)
1814 break;
1816 start_sequence ();
1818 if (outer)
1819 emit_jump (outer->post_landing_pad);
1820 else
1821 emit_library_call (unwind_resume_libfunc, LCT_NORETURN,
1822 VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
1824 seq = get_insns ();
1825 end_sequence ();
1826 if (before)
1827 emit_insns_before (seq, before);
1828 else
1829 emit_insns_after (seq, after);
1834 static void
1835 dw2_build_landing_pads ()
1837 int i, j;
1839 for (i = cfun->eh->last_region_number; i > 0; --i)
1841 struct eh_region *region = cfun->eh->region_array[i];
1842 rtx seq;
1844 /* Mind we don't process a region more than once. */
1845 if (!region || region->region_number != i)
1846 continue;
1848 if (region->type != ERT_CLEANUP
1849 && region->type != ERT_TRY
1850 && region->type != ERT_ALLOWED_EXCEPTIONS)
1851 continue;
1853 start_sequence ();
1855 region->landing_pad = gen_label_rtx ();
1856 emit_label (region->landing_pad);
1858 #ifdef HAVE_exception_receiver
1859 if (HAVE_exception_receiver)
1860 emit_insn (gen_exception_receiver ());
1861 else
1862 #endif
1863 #ifdef HAVE_nonlocal_goto_receiver
1864 if (HAVE_nonlocal_goto_receiver)
1865 emit_insn (gen_nonlocal_goto_receiver ());
1866 else
1867 #endif
1868 { /* Nothing */ }
1870 /* If the eh_return data registers are call-saved, then we
1871 won't have considered them clobbered from the call that
1872 threw. Kill them now. */
1873 for (j = 0; ; ++j)
1875 unsigned r = EH_RETURN_DATA_REGNO (j);
1876 if (r == INVALID_REGNUM)
1877 break;
1878 if (! call_used_regs[r])
1879 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1882 emit_move_insn (cfun->eh->exc_ptr,
1883 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
1884 emit_move_insn (cfun->eh->filter,
1885 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (1)));
1887 seq = get_insns ();
1888 end_sequence ();
1890 emit_insns_before (seq, region->post_landing_pad);
1895 struct sjlj_lp_info
1897 int directly_reachable;
1898 int action_index;
1899 int dispatch_index;
1900 int call_site_index;
1903 static bool
1904 sjlj_find_directly_reachable_regions (lp_info)
1905 struct sjlj_lp_info *lp_info;
1907 rtx insn;
1908 bool found_one = false;
1910 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1912 struct eh_region *region;
1913 tree type_thrown;
1914 rtx note;
1916 if (! INSN_P (insn))
1917 continue;
1919 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1920 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1921 continue;
1923 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1925 type_thrown = NULL_TREE;
1926 if (region->type == ERT_THROW)
1928 type_thrown = region->u.throw.type;
1929 region = region->outer;
1932 /* Find the first containing region that might handle the exception.
1933 That's the landing pad to which we will transfer control. */
1934 for (; region; region = region->outer)
1935 if (reachable_next_level (region, type_thrown, 0) != RNL_NOT_CAUGHT)
1936 break;
1938 if (region)
1940 lp_info[region->region_number].directly_reachable = 1;
1941 found_one = true;
1945 return found_one;
1948 static void
1949 sjlj_assign_call_site_values (dispatch_label, lp_info)
1950 rtx dispatch_label;
1951 struct sjlj_lp_info *lp_info;
1953 htab_t ar_hash;
1954 int i, index;
1956 /* First task: build the action table. */
1958 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1959 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1961 for (i = cfun->eh->last_region_number; i > 0; --i)
1962 if (lp_info[i].directly_reachable)
1964 struct eh_region *r = cfun->eh->region_array[i];
1965 r->landing_pad = dispatch_label;
1966 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1967 if (lp_info[i].action_index != -1)
1968 cfun->uses_eh_lsda = 1;
1971 htab_delete (ar_hash);
1973 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1974 landing pad label for the region. For sjlj though, there is one
1975 common landing pad from which we dispatch to the post-landing pads.
1977 A region receives a dispatch index if it is directly reachable
1978 and requires in-function processing. Regions that share post-landing
1979 pads may share dispatch indicies. */
1980 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1981 (see build_post_landing_pads) so we don't bother checking for it. */
1983 index = 0;
1984 for (i = cfun->eh->last_region_number; i > 0; --i)
1985 if (lp_info[i].directly_reachable
1986 && lp_info[i].action_index >= 0)
1987 lp_info[i].dispatch_index = index++;
1989 /* Finally: assign call-site values. If dwarf2 terms, this would be
1990 the region number assigned by convert_to_eh_region_ranges, but
1991 handles no-action and must-not-throw differently. */
1993 call_site_base = 1;
1994 for (i = cfun->eh->last_region_number; i > 0; --i)
1995 if (lp_info[i].directly_reachable)
1997 int action = lp_info[i].action_index;
1999 /* Map must-not-throw to otherwise unused call-site index 0. */
2000 if (action == -2)
2001 index = 0;
2002 /* Map no-action to otherwise unused call-site index -1. */
2003 else if (action == -1)
2004 index = -1;
2005 /* Otherwise, look it up in the table. */
2006 else
2007 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2009 lp_info[i].call_site_index = index;
2013 static void
2014 sjlj_mark_call_sites (lp_info)
2015 struct sjlj_lp_info *lp_info;
2017 int last_call_site = -2;
2018 rtx insn, mem;
2020 mem = change_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2021 plus_constant (XEXP (cfun->eh->sjlj_fc, 0),
2022 sjlj_fc_call_site_ofs));
2024 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2026 struct eh_region *region;
2027 int this_call_site;
2028 rtx note, before, p;
2030 /* Reset value tracking at extended basic block boundaries. */
2031 if (GET_CODE (insn) == CODE_LABEL)
2032 last_call_site = -2;
2034 if (! INSN_P (insn))
2035 continue;
2037 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2038 if (!note)
2040 /* Calls (and trapping insns) without notes are outside any
2041 exception handling region in this function. Mark them as
2042 no action. */
2043 if (GET_CODE (insn) == CALL_INSN
2044 || (flag_non_call_exceptions
2045 && may_trap_p (PATTERN (insn))))
2046 this_call_site = -1;
2047 else
2048 continue;
2050 else
2052 /* Calls that are known to not throw need not be marked. */
2053 if (INTVAL (XEXP (note, 0)) <= 0)
2054 continue;
2056 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2057 this_call_site = lp_info[region->region_number].call_site_index;
2060 if (this_call_site == last_call_site)
2061 continue;
2063 /* Don't separate a call from it's argument loads. */
2064 before = insn;
2065 if (GET_CODE (insn) == CALL_INSN)
2067 HARD_REG_SET parm_regs;
2068 int nparm_regs;
2070 /* Since different machines initialize their parameter registers
2071 in different orders, assume nothing. Collect the set of all
2072 parameter registers. */
2073 CLEAR_HARD_REG_SET (parm_regs);
2074 nparm_regs = 0;
2075 for (p = CALL_INSN_FUNCTION_USAGE (insn); p ; p = XEXP (p, 1))
2076 if (GET_CODE (XEXP (p, 0)) == USE
2077 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
2079 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
2080 abort ();
2082 SET_HARD_REG_BIT (parm_regs, REGNO (XEXP (XEXP (p, 0), 0)));
2083 nparm_regs++;
2086 /* Search backward for the first set of a register in this set. */
2087 while (nparm_regs)
2089 before = PREV_INSN (before);
2091 /* Given that we've done no other optimizations yet,
2092 the arguments should be immediately available. */
2093 if (GET_CODE (before) == CODE_LABEL)
2094 abort ();
2096 p = single_set (before);
2097 if (p && GET_CODE (SET_DEST (p)) == REG
2098 && REGNO (SET_DEST (p)) < FIRST_PSEUDO_REGISTER
2099 && TEST_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p))))
2101 CLEAR_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p)));
2102 nparm_regs--;
2107 start_sequence ();
2108 emit_move_insn (mem, GEN_INT (this_call_site));
2109 p = get_insns ();
2110 end_sequence ();
2112 emit_insns_before (p, before);
2113 last_call_site = this_call_site;
2117 /* Construct the SjLj_Function_Context. */
2119 static void
2120 sjlj_emit_function_enter (dispatch_label)
2121 rtx dispatch_label;
2123 rtx fn_begin, fc, mem, seq;
2125 fc = cfun->eh->sjlj_fc;
2127 start_sequence ();
2129 mem = change_address (fc, Pmode,
2130 plus_constant (XEXP (fc, 0), sjlj_fc_personality_ofs));
2131 emit_move_insn (mem, eh_personality_libfunc);
2133 mem = change_address (fc, Pmode,
2134 plus_constant (XEXP (fc, 0), sjlj_fc_lsda_ofs));
2135 if (cfun->uses_eh_lsda)
2137 char buf[20];
2138 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2139 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
2141 else
2142 emit_move_insn (mem, const0_rtx);
2144 #ifdef DONT_USE_BUILTIN_SETJMP
2146 rtx x, note;
2147 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_NORMAL,
2148 TYPE_MODE (integer_type_node), 1,
2149 plus_constant (XEXP (fc, 0),
2150 sjlj_fc_jbuf_ofs), Pmode);
2152 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2153 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2155 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2156 TYPE_MODE (integer_type_node), 0, 0,
2157 dispatch_label);
2159 #else
2160 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2161 dispatch_label);
2162 #endif
2164 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2165 1, XEXP (fc, 0), Pmode);
2167 seq = get_insns ();
2168 end_sequence ();
2170 /* ??? Instead of doing this at the beginning of the function,
2171 do this in a block that is at loop level 0 and dominates all
2172 can_throw_internal instructions. */
2174 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2175 if (GET_CODE (fn_begin) == NOTE
2176 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2177 break;
2178 emit_insns_after (seq, fn_begin);
2181 /* Call back from expand_function_end to know where we should put
2182 the call to unwind_sjlj_unregister_libfunc if needed. */
2184 void
2185 sjlj_emit_function_exit_after (after)
2186 rtx after;
2188 cfun->eh->sjlj_exit_after = after;
2191 static void
2192 sjlj_emit_function_exit ()
2194 rtx seq;
2196 start_sequence ();
2198 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2199 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2201 seq = get_insns ();
2202 end_sequence ();
2204 /* ??? Really this can be done in any block at loop level 0 that
2205 post-dominates all can_throw_internal instructions. This is
2206 the last possible moment. */
2208 emit_insns_after (seq, cfun->eh->sjlj_exit_after);
2211 static void
2212 sjlj_emit_dispatch_table (dispatch_label, lp_info)
2213 rtx dispatch_label;
2214 struct sjlj_lp_info *lp_info;
2216 int i, first_reachable;
2217 rtx mem, dispatch, seq, fc;
2219 fc = cfun->eh->sjlj_fc;
2221 start_sequence ();
2223 emit_label (dispatch_label);
2225 #ifndef DONT_USE_BUILTIN_SETJMP
2226 expand_builtin_setjmp_receiver (dispatch_label);
2227 #endif
2229 /* Load up dispatch index, exc_ptr and filter values from the
2230 function context. */
2231 mem = change_address (fc, TYPE_MODE (integer_type_node),
2232 plus_constant (XEXP (fc, 0), sjlj_fc_call_site_ofs));
2233 dispatch = copy_to_reg (mem);
2235 mem = change_address (fc, word_mode,
2236 plus_constant (XEXP (fc, 0), sjlj_fc_data_ofs));
2237 if (word_mode != Pmode)
2239 #ifdef POINTERS_EXTEND_UNSIGNED
2240 mem = convert_memory_address (Pmode, mem);
2241 #else
2242 mem = convert_to_mode (Pmode, mem, 0);
2243 #endif
2245 emit_move_insn (cfun->eh->exc_ptr, mem);
2247 mem = change_address (fc, word_mode,
2248 plus_constant (XEXP (fc, 0),
2249 sjlj_fc_data_ofs + UNITS_PER_WORD));
2250 emit_move_insn (cfun->eh->filter, mem);
2252 /* Jump to one of the directly reachable regions. */
2253 /* ??? This really ought to be using a switch statement. */
2255 first_reachable = 0;
2256 for (i = cfun->eh->last_region_number; i > 0; --i)
2258 if (! lp_info[i].directly_reachable
2259 || lp_info[i].action_index < 0)
2260 continue;
2262 if (! first_reachable)
2264 first_reachable = i;
2265 continue;
2268 emit_cmp_and_jump_insns (dispatch,
2269 GEN_INT (lp_info[i].dispatch_index), EQ,
2270 NULL_RTX, TYPE_MODE (integer_type_node), 0, 0,
2271 cfun->eh->region_array[i]->post_landing_pad);
2274 seq = get_insns ();
2275 end_sequence ();
2277 emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2278 ->post_landing_pad));
2281 static void
2282 sjlj_build_landing_pads ()
2284 struct sjlj_lp_info *lp_info;
2286 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2287 sizeof (struct sjlj_lp_info));
2289 if (sjlj_find_directly_reachable_regions (lp_info))
2291 rtx dispatch_label = gen_label_rtx ();
2293 cfun->eh->sjlj_fc
2294 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2295 int_size_in_bytes (sjlj_fc_type_node),
2296 TYPE_ALIGN (sjlj_fc_type_node));
2298 sjlj_assign_call_site_values (dispatch_label, lp_info);
2299 sjlj_mark_call_sites (lp_info);
2301 sjlj_emit_function_enter (dispatch_label);
2302 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2303 sjlj_emit_function_exit ();
2306 free (lp_info);
2309 void
2310 finish_eh_generation ()
2312 /* Nothing to do if no regions created. */
2313 if (cfun->eh->region_tree == NULL)
2314 return;
2316 /* The object here is to provide find_basic_blocks with detailed
2317 information (via reachable_handlers) on how exception control
2318 flows within the function. In this first pass, we can include
2319 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2320 regions, and hope that it will be useful in deleting unreachable
2321 handlers. Subsequently, we will generate landing pads which will
2322 connect many of the handlers, and then type information will not
2323 be effective. Still, this is a win over previous implementations. */
2325 jump_optimize_minimal (get_insns ());
2326 find_basic_blocks (get_insns (), max_reg_num (), 0);
2327 cleanup_cfg ();
2329 /* These registers are used by the landing pads. Make sure they
2330 have been generated. */
2331 get_exception_pointer ();
2332 cfun->eh->filter = gen_reg_rtx (word_mode);
2334 /* Construct the landing pads. */
2336 assign_filter_values ();
2337 build_post_landing_pads ();
2338 connect_post_landing_pads ();
2339 if (USING_SJLJ_EXCEPTIONS)
2340 sjlj_build_landing_pads ();
2341 else
2342 dw2_build_landing_pads ();
2344 cfun->eh->built_landing_pads = 1;
2346 /* We've totally changed the CFG. Start over. */
2347 find_exception_handler_labels ();
2348 jump_optimize_minimal (get_insns ());
2349 find_basic_blocks (get_insns (), max_reg_num (), 0);
2350 cleanup_cfg ();
2353 /* This section handles removing dead code for flow. */
2355 /* Remove LABEL from the exception_handler_labels list. */
2357 static void
2358 remove_exception_handler_label (label)
2359 rtx label;
2361 rtx *pl, l;
2363 for (pl = &exception_handler_labels, l = *pl;
2364 XEXP (l, 0) != label;
2365 pl = &XEXP (l, 1), l = *pl)
2366 continue;
2368 *pl = XEXP (l, 1);
2369 free_EXPR_LIST_node (l);
2372 /* Splice REGION from the region tree etc. */
2374 static void
2375 remove_eh_handler (region)
2376 struct eh_region *region;
2378 struct eh_region **pp, *p;
2379 rtx lab;
2380 int i;
2382 /* For the benefit of efficiently handling REG_EH_REGION notes,
2383 replace this region in the region array with its containing
2384 region. Note that previous region deletions may result in
2385 multiple copies of this region in the array, so we have to
2386 search the whole thing. */
2387 for (i = cfun->eh->last_region_number; i > 0; --i)
2388 if (cfun->eh->region_array[i] == region)
2389 cfun->eh->region_array[i] = region->outer;
2391 if (cfun->eh->built_landing_pads)
2392 lab = region->landing_pad;
2393 else
2394 lab = region->label;
2395 if (lab)
2396 remove_exception_handler_label (lab);
2398 if (region->outer)
2399 pp = &region->outer->inner;
2400 else
2401 pp = &cfun->eh->region_tree;
2402 for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
2403 continue;
2405 if (region->inner)
2407 for (p = region->inner; p->next_peer ; p = p->next_peer)
2408 p->outer = region->outer;
2409 p->next_peer = region->next_peer;
2410 p->outer = region->outer;
2411 *pp = region->inner;
2413 else
2414 *pp = region->next_peer;
2416 if (region->type == ERT_CATCH)
2418 struct eh_region *try, *next, *prev;
2420 for (try = region->next_peer;
2421 try->type == ERT_CATCH;
2422 try = try->next_peer)
2423 continue;
2424 if (try->type != ERT_TRY)
2425 abort ();
2427 next = region->u.catch.next_catch;
2428 prev = region->u.catch.prev_catch;
2430 if (next)
2431 next->u.catch.prev_catch = prev;
2432 else
2433 try->u.try.last_catch = prev;
2434 if (prev)
2435 prev->u.catch.next_catch = next;
2436 else
2438 try->u.try.catch = next;
2439 if (! next)
2440 remove_eh_handler (try);
2444 free (region);
2447 /* LABEL heads a basic block that is about to be deleted. If this
2448 label corresponds to an exception region, we may be able to
2449 delete the region. */
2451 void
2452 maybe_remove_eh_handler (label)
2453 rtx label;
2455 int i;
2457 /* ??? After generating landing pads, it's not so simple to determine
2458 if the region data is completely unused. One must examine the
2459 landing pad and the post landing pad, and whether an inner try block
2460 is referencing the catch handlers directly. */
2461 if (cfun->eh->built_landing_pads)
2462 return;
2464 for (i = cfun->eh->last_region_number; i > 0; --i)
2466 struct eh_region *region = cfun->eh->region_array[i];
2467 if (region && region->label == label)
2469 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2470 because there is no path to the fallback call to terminate.
2471 But the region continues to affect call-site data until there
2472 are no more contained calls, which we don't see here. */
2473 if (region->type == ERT_MUST_NOT_THROW)
2475 remove_exception_handler_label (region->label);
2476 region->label = NULL_RTX;
2478 else
2479 remove_eh_handler (region);
2480 break;
2486 /* This section describes CFG exception edges for flow. */
2488 /* For communicating between calls to reachable_next_level. */
2489 struct reachable_info
2491 tree types_caught;
2492 tree types_allowed;
2493 rtx handlers;
2496 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2497 base class of TYPE, is in HANDLED. */
2499 static int
2500 check_handled (handled, type)
2501 tree handled, type;
2503 tree t;
2505 /* We can check for exact matches without front-end help. */
2506 if (! lang_eh_type_covers)
2508 for (t = handled; t ; t = TREE_CHAIN (t))
2509 if (TREE_VALUE (t) == type)
2510 return 1;
2512 else
2514 for (t = handled; t ; t = TREE_CHAIN (t))
2515 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2516 return 1;
2519 return 0;
2522 /* A subroutine of reachable_next_level. If we are collecting a list
2523 of handlers, add one. After landing pad generation, reference
2524 it instead of the handlers themselves. Further, the handlers are
2525 all wired together, so by referencing one, we've got them all.
2526 Before landing pad generation we reference each handler individually.
2528 LP_REGION contains the landing pad; REGION is the handler. */
2530 static void
2531 add_reachable_handler (info, lp_region, region)
2532 struct reachable_info *info;
2533 struct eh_region *lp_region;
2534 struct eh_region *region;
2536 if (! info)
2537 return;
2539 if (cfun->eh->built_landing_pads)
2541 if (! info->handlers)
2542 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2544 else
2545 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2548 /* Process one level of exception regions for reachability.
2549 If TYPE_THROWN is non-null, then it is the *exact* type being
2550 propagated. If INFO is non-null, then collect handler labels
2551 and caught/allowed type information between invocations. */
2553 static enum reachable_code
2554 reachable_next_level (region, type_thrown, info)
2555 struct eh_region *region;
2556 tree type_thrown;
2557 struct reachable_info *info;
2559 switch (region->type)
2561 case ERT_CLEANUP:
2562 /* Before landing-pad generation, we model control flow
2563 directly to the individual handlers. In this way we can
2564 see that catch handler types may shadow one another. */
2565 add_reachable_handler (info, region, region);
2566 return RNL_MAYBE_CAUGHT;
2568 case ERT_TRY:
2570 struct eh_region *c;
2571 enum reachable_code ret = RNL_NOT_CAUGHT;
2573 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2575 /* A catch-all handler ends the search. */
2576 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2577 to be run as well. */
2578 if (c->u.catch.type == NULL)
2580 add_reachable_handler (info, region, c);
2581 return RNL_CAUGHT;
2584 if (type_thrown)
2586 /* If we have a type match, end the search. */
2587 if (c->u.catch.type == type_thrown
2588 || (lang_eh_type_covers
2589 && (*lang_eh_type_covers) (c->u.catch.type,
2590 type_thrown)))
2592 add_reachable_handler (info, region, c);
2593 return RNL_CAUGHT;
2596 /* If we have definitive information of a match failure,
2597 the catch won't trigger. */
2598 if (lang_eh_type_covers)
2599 return RNL_NOT_CAUGHT;
2602 if (! info)
2603 ret = RNL_MAYBE_CAUGHT;
2605 /* A type must not have been previously caught. */
2606 else if (! check_handled (info->types_caught, c->u.catch.type))
2608 add_reachable_handler (info, region, c);
2609 info->types_caught = tree_cons (NULL, c->u.catch.type,
2610 info->types_caught);
2612 /* ??? If the catch type is a base class of every allowed
2613 type, then we know we can stop the search. */
2614 ret = RNL_MAYBE_CAUGHT;
2618 return ret;
2621 case ERT_ALLOWED_EXCEPTIONS:
2622 /* An empty list of types definitely ends the search. */
2623 if (region->u.allowed.type_list == NULL_TREE)
2625 add_reachable_handler (info, region, region);
2626 return RNL_CAUGHT;
2629 /* Collect a list of lists of allowed types for use in detecting
2630 when a catch may be transformed into a catch-all. */
2631 if (info)
2632 info->types_allowed = tree_cons (NULL_TREE,
2633 region->u.allowed.type_list,
2634 info->types_allowed);
2636 /* If we have definitive information about the type heirarchy,
2637 then we can tell if the thrown type will pass through the
2638 filter. */
2639 if (type_thrown && lang_eh_type_covers)
2641 if (check_handled (region->u.allowed.type_list, type_thrown))
2642 return RNL_NOT_CAUGHT;
2643 else
2645 add_reachable_handler (info, region, region);
2646 return RNL_CAUGHT;
2650 add_reachable_handler (info, region, region);
2651 return RNL_MAYBE_CAUGHT;
2653 case ERT_CATCH:
2654 /* Catch regions are handled by their controling try region. */
2655 return RNL_NOT_CAUGHT;
2657 case ERT_MUST_NOT_THROW:
2658 /* Here we end our search, since no exceptions may propagate.
2659 If we've touched down at some landing pad previous, then the
2660 explicit function call we generated may be used. Otherwise
2661 the call is made by the runtime. */
2662 if (info && info->handlers)
2664 add_reachable_handler (info, region, region);
2665 return RNL_CAUGHT;
2667 else
2668 return RNL_BLOCKED;
2670 case ERT_THROW:
2671 case ERT_FIXUP:
2672 /* Shouldn't see these here. */
2673 break;
2676 abort ();
2679 /* Retrieve a list of labels of exception handlers which can be
2680 reached by a given insn. */
2683 reachable_handlers (insn)
2684 rtx insn;
2686 struct reachable_info info;
2687 struct eh_region *region;
2688 tree type_thrown;
2689 int region_number;
2691 if (GET_CODE (insn) == JUMP_INSN
2692 && GET_CODE (PATTERN (insn)) == RESX)
2693 region_number = XINT (PATTERN (insn), 0);
2694 else
2696 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2697 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2698 return NULL;
2699 region_number = INTVAL (XEXP (note, 0));
2702 memset (&info, 0, sizeof (info));
2704 region = cfun->eh->region_array[region_number];
2706 type_thrown = NULL_TREE;
2707 if (region->type == ERT_THROW)
2709 type_thrown = region->u.throw.type;
2710 region = region->outer;
2713 for (; region; region = region->outer)
2714 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2715 break;
2717 return info.handlers;
2720 /* Determine if the given INSN can throw an exception that is caught
2721 within the function. */
2723 bool
2724 can_throw_internal (insn)
2725 rtx insn;
2727 struct eh_region *region;
2728 tree type_thrown;
2729 rtx note;
2731 if (! INSN_P (insn))
2732 return false;
2734 if (GET_CODE (insn) == INSN
2735 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2736 insn = XVECEXP (PATTERN (insn), 0, 0);
2738 if (GET_CODE (insn) == CALL_INSN
2739 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2741 int i;
2742 for (i = 0; i < 3; ++i)
2744 rtx sub = XEXP (PATTERN (insn), i);
2745 for (; sub ; sub = NEXT_INSN (sub))
2746 if (can_throw_internal (sub))
2747 return true;
2749 return false;
2752 /* Every insn that might throw has an EH_REGION note. */
2753 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2754 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2755 return false;
2757 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2759 type_thrown = NULL_TREE;
2760 if (region->type == ERT_THROW)
2762 type_thrown = region->u.throw.type;
2763 region = region->outer;
2766 /* If this exception is ignored by each and every containing region,
2767 then control passes straight out. The runtime may handle some
2768 regions, which also do not require processing internally. */
2769 for (; region; region = region->outer)
2771 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2772 if (how == RNL_BLOCKED)
2773 return false;
2774 if (how != RNL_NOT_CAUGHT)
2775 return true;
2778 return false;
2781 /* Determine if the given INSN can throw an exception that is
2782 visible outside the function. */
2784 bool
2785 can_throw_external (insn)
2786 rtx insn;
2788 struct eh_region *region;
2789 tree type_thrown;
2790 rtx note;
2792 if (! INSN_P (insn))
2793 return false;
2795 if (GET_CODE (insn) == INSN
2796 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2797 insn = XVECEXP (PATTERN (insn), 0, 0);
2799 if (GET_CODE (insn) == CALL_INSN
2800 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2802 int i;
2803 for (i = 0; i < 3; ++i)
2805 rtx sub = XEXP (PATTERN (insn), i);
2806 for (; sub ; sub = NEXT_INSN (sub))
2807 if (can_throw_external (sub))
2808 return true;
2810 return false;
2813 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2814 if (!note)
2816 /* Calls (and trapping insns) without notes are outside any
2817 exception handling region in this function. We have to
2818 assume it might throw. Given that the front end and middle
2819 ends mark known NOTHROW functions, this isn't so wildly
2820 inaccurate. */
2821 return (GET_CODE (insn) == CALL_INSN
2822 || (flag_non_call_exceptions
2823 && may_trap_p (PATTERN (insn))));
2825 if (INTVAL (XEXP (note, 0)) <= 0)
2826 return false;
2828 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2830 type_thrown = NULL_TREE;
2831 if (region->type == ERT_THROW)
2833 type_thrown = region->u.throw.type;
2834 region = region->outer;
2837 /* If the exception is caught or blocked by any containing region,
2838 then it is not seen by any calling function. */
2839 for (; region ; region = region->outer)
2840 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2841 return false;
2843 return true;
2846 /* True if nothing in this function can throw outside this function. */
2848 bool
2849 nothrow_function_p ()
2851 rtx insn;
2853 if (! flag_exceptions)
2854 return true;
2856 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2857 if (can_throw_external (insn))
2858 return false;
2859 for (insn = current_function_epilogue_delay_list; insn;
2860 insn = XEXP (insn, 1))
2861 if (can_throw_external (insn))
2862 return false;
2864 return true;
2868 /* Various hooks for unwind library. */
2870 /* Do any necessary initialization to access arbitrary stack frames.
2871 On the SPARC, this means flushing the register windows. */
2873 void
2874 expand_builtin_unwind_init ()
2876 /* Set this so all the registers get saved in our frame; we need to be
2877 able to copy the saved values for any registers from frames we unwind. */
2878 current_function_has_nonlocal_label = 1;
2880 #ifdef SETUP_FRAME_ADDRESSES
2881 SETUP_FRAME_ADDRESSES ();
2882 #endif
2886 expand_builtin_eh_return_data_regno (arglist)
2887 tree arglist;
2889 tree which = TREE_VALUE (arglist);
2890 unsigned HOST_WIDE_INT iwhich;
2892 if (TREE_CODE (which) != INTEGER_CST)
2894 error ("argument of `__builtin_eh_return_regno' must be constant");
2895 return constm1_rtx;
2898 iwhich = tree_low_cst (which, 1);
2899 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2900 if (iwhich == INVALID_REGNUM)
2901 return constm1_rtx;
2903 #ifdef DWARF_FRAME_REGNUM
2904 iwhich = DWARF_FRAME_REGNUM (iwhich);
2905 #else
2906 iwhich = DBX_REGISTER_NUMBER (iwhich);
2907 #endif
2909 return GEN_INT (iwhich);
2912 /* Given a value extracted from the return address register or stack slot,
2913 return the actual address encoded in that value. */
2916 expand_builtin_extract_return_addr (addr_tree)
2917 tree addr_tree;
2919 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2921 /* First mask out any unwanted bits. */
2922 #ifdef MASK_RETURN_ADDR
2923 expand_and (addr, MASK_RETURN_ADDR, addr);
2924 #endif
2926 /* Then adjust to find the real return address. */
2927 #if defined (RETURN_ADDR_OFFSET)
2928 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2929 #endif
2931 return addr;
2934 /* Given an actual address in addr_tree, do any necessary encoding
2935 and return the value to be stored in the return address register or
2936 stack slot so the epilogue will return to that address. */
2939 expand_builtin_frob_return_addr (addr_tree)
2940 tree addr_tree;
2942 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2944 #ifdef RETURN_ADDR_OFFSET
2945 addr = force_reg (Pmode, addr);
2946 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2947 #endif
2949 return addr;
2952 /* Set up the epilogue with the magic bits we'll need to return to the
2953 exception handler. */
2955 void
2956 expand_builtin_eh_return (stackadj_tree, handler_tree)
2957 tree stackadj_tree, handler_tree;
2959 rtx stackadj, handler;
2961 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2962 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
2964 if (! cfun->eh->ehr_label)
2966 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
2967 cfun->eh->ehr_handler = copy_to_reg (handler);
2968 cfun->eh->ehr_label = gen_label_rtx ();
2970 else
2972 if (stackadj != cfun->eh->ehr_stackadj)
2973 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
2974 if (handler != cfun->eh->ehr_handler)
2975 emit_move_insn (cfun->eh->ehr_handler, handler);
2978 emit_jump (cfun->eh->ehr_label);
2981 void
2982 expand_eh_return ()
2984 rtx sa, ra, around_label;
2986 if (! cfun->eh->ehr_label)
2987 return;
2989 sa = EH_RETURN_STACKADJ_RTX;
2990 if (! sa)
2992 error ("__builtin_eh_return not supported on this target");
2993 return;
2996 current_function_calls_eh_return = 1;
2998 around_label = gen_label_rtx ();
2999 emit_move_insn (sa, const0_rtx);
3000 emit_jump (around_label);
3002 emit_label (cfun->eh->ehr_label);
3003 clobber_return_register ();
3005 #ifdef HAVE_eh_return
3006 if (HAVE_eh_return)
3007 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3008 else
3009 #endif
3011 ra = EH_RETURN_HANDLER_RTX;
3012 if (! ra)
3014 error ("__builtin_eh_return not supported on this target");
3015 ra = gen_reg_rtx (Pmode);
3018 emit_move_insn (sa, cfun->eh->ehr_stackadj);
3019 emit_move_insn (ra, cfun->eh->ehr_handler);
3022 emit_label (around_label);
3025 struct action_record
3027 int offset;
3028 int filter;
3029 int next;
3032 static int
3033 action_record_eq (pentry, pdata)
3034 const PTR pentry;
3035 const PTR pdata;
3037 const struct action_record *entry = (const struct action_record *) pentry;
3038 const struct action_record *data = (const struct action_record *) pdata;
3039 return entry->filter == data->filter && entry->next == data->next;
3042 static hashval_t
3043 action_record_hash (pentry)
3044 const PTR pentry;
3046 const struct action_record *entry = (const struct action_record *) pentry;
3047 return entry->next * 1009 + entry->filter;
3050 static int
3051 add_action_record (ar_hash, filter, next)
3052 htab_t ar_hash;
3053 int filter, next;
3055 struct action_record **slot, *new, tmp;
3057 tmp.filter = filter;
3058 tmp.next = next;
3059 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3061 if ((new = *slot) == NULL)
3063 new = (struct action_record *) xmalloc (sizeof (*new));
3064 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3065 new->filter = filter;
3066 new->next = next;
3067 *slot = new;
3069 /* The filter value goes in untouched. The link to the next
3070 record is a "self-relative" byte offset, or zero to indicate
3071 that there is no next record. So convert the absolute 1 based
3072 indicies we've been carrying around into a displacement. */
3074 push_sleb128 (&cfun->eh->action_record_data, filter);
3075 if (next)
3076 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3077 push_sleb128 (&cfun->eh->action_record_data, next);
3080 return new->offset;
3083 static int
3084 collect_one_action_chain (ar_hash, region)
3085 htab_t ar_hash;
3086 struct eh_region *region;
3088 struct eh_region *c;
3089 int next;
3091 /* If we've reached the top of the region chain, then we have
3092 no actions, and require no landing pad. */
3093 if (region == NULL)
3094 return -1;
3096 switch (region->type)
3098 case ERT_CLEANUP:
3099 /* A cleanup adds a zero filter to the beginning of the chain, but
3100 there are special cases to look out for. If there are *only*
3101 cleanups along a path, then it compresses to a zero action.
3102 Further, if there are multiple cleanups along a path, we only
3103 need to represent one of them, as that is enough to trigger
3104 entry to the landing pad at runtime. */
3105 next = collect_one_action_chain (ar_hash, region->outer);
3106 if (next <= 0)
3107 return 0;
3108 for (c = region->outer; c ; c = c->outer)
3109 if (c->type == ERT_CLEANUP)
3110 return next;
3111 return add_action_record (ar_hash, 0, next);
3113 case ERT_TRY:
3114 /* Process the associated catch regions in reverse order.
3115 If there's a catch-all handler, then we don't need to
3116 search outer regions. Use a magic -3 value to record
3117 that we havn't done the outer search. */
3118 next = -3;
3119 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3121 if (c->u.catch.type == NULL)
3122 next = add_action_record (ar_hash, c->u.catch.filter, 0);
3123 else
3125 if (next == -3)
3127 next = collect_one_action_chain (ar_hash, region->outer);
3128 if (next < 0)
3129 next = 0;
3131 next = add_action_record (ar_hash, c->u.catch.filter, next);
3134 return next;
3136 case ERT_ALLOWED_EXCEPTIONS:
3137 /* An exception specification adds its filter to the
3138 beginning of the chain. */
3139 next = collect_one_action_chain (ar_hash, region->outer);
3140 return add_action_record (ar_hash, region->u.allowed.filter,
3141 next < 0 ? 0 : next);
3143 case ERT_MUST_NOT_THROW:
3144 /* A must-not-throw region with no inner handlers or cleanups
3145 requires no call-site entry. Note that this differs from
3146 the no handler or cleanup case in that we do require an lsda
3147 to be generated. Return a magic -2 value to record this. */
3148 return -2;
3150 case ERT_CATCH:
3151 case ERT_THROW:
3152 /* CATCH regions are handled in TRY above. THROW regions are
3153 for optimization information only and produce no output. */
3154 return collect_one_action_chain (ar_hash, region->outer);
3156 default:
3157 abort ();
3161 static int
3162 add_call_site (landing_pad, action)
3163 rtx landing_pad;
3164 int action;
3166 struct call_site_record *data = cfun->eh->call_site_data;
3167 int used = cfun->eh->call_site_data_used;
3168 int size = cfun->eh->call_site_data_size;
3170 if (used >= size)
3172 size = (size ? size * 2 : 64);
3173 data = (struct call_site_record *)
3174 xrealloc (data, sizeof (*data) * size);
3175 cfun->eh->call_site_data = data;
3176 cfun->eh->call_site_data_size = size;
3179 data[used].landing_pad = landing_pad;
3180 data[used].action = action;
3182 cfun->eh->call_site_data_used = used + 1;
3184 return used + call_site_base;
3187 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3188 The new note numbers will not refer to region numbers, but
3189 instead to call site entries. */
3191 void
3192 convert_to_eh_region_ranges ()
3194 rtx insn, iter, note;
3195 htab_t ar_hash;
3196 int last_action = -3;
3197 rtx last_action_insn = NULL_RTX;
3198 rtx last_landing_pad = NULL_RTX;
3199 rtx first_no_action_insn = NULL_RTX;
3200 int call_site;
3202 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3203 return;
3205 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3207 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3209 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3210 if (INSN_P (iter))
3212 struct eh_region *region;
3213 int this_action;
3214 rtx this_landing_pad;
3216 insn = iter;
3217 if (GET_CODE (insn) == INSN
3218 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3219 insn = XVECEXP (PATTERN (insn), 0, 0);
3221 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3222 if (!note)
3224 if (! (GET_CODE (insn) == CALL_INSN
3225 || (flag_non_call_exceptions
3226 && may_trap_p (PATTERN (insn)))))
3227 continue;
3228 this_action = -1;
3229 region = NULL;
3231 else
3233 if (INTVAL (XEXP (note, 0)) <= 0)
3234 continue;
3235 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3236 this_action = collect_one_action_chain (ar_hash, region);
3239 /* Existence of catch handlers, or must-not-throw regions
3240 implies that an lsda is needed (even if empty). */
3241 if (this_action != -1)
3242 cfun->uses_eh_lsda = 1;
3244 /* Delay creation of region notes for no-action regions
3245 until we're sure that an lsda will be required. */
3246 else if (last_action == -3)
3248 first_no_action_insn = iter;
3249 last_action = -1;
3252 /* Cleanups and handlers may share action chains but not
3253 landing pads. Collect the landing pad for this region. */
3254 if (this_action >= 0)
3256 struct eh_region *o;
3257 for (o = region; ! o->landing_pad ; o = o->outer)
3258 continue;
3259 this_landing_pad = o->landing_pad;
3261 else
3262 this_landing_pad = NULL_RTX;
3264 /* Differing actions or landing pads implies a change in call-site
3265 info, which implies some EH_REGION note should be emitted. */
3266 if (last_action != this_action
3267 || last_landing_pad != this_landing_pad)
3269 /* If we'd not seen a previous action (-3) or the previous
3270 action was must-not-throw (-2), then we do not need an
3271 end note. */
3272 if (last_action >= -1)
3274 /* If we delayed the creation of the begin, do it now. */
3275 if (first_no_action_insn)
3277 call_site = add_call_site (NULL_RTX, 0);
3278 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3279 first_no_action_insn);
3280 NOTE_EH_HANDLER (note) = call_site;
3281 first_no_action_insn = NULL_RTX;
3284 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3285 last_action_insn);
3286 NOTE_EH_HANDLER (note) = call_site;
3289 /* If the new action is must-not-throw, then no region notes
3290 are created. */
3291 if (this_action >= -1)
3293 call_site = add_call_site (this_landing_pad,
3294 this_action < 0 ? 0 : this_action);
3295 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3296 NOTE_EH_HANDLER (note) = call_site;
3299 last_action = this_action;
3300 last_landing_pad = this_landing_pad;
3302 last_action_insn = iter;
3305 if (last_action >= -1 && ! first_no_action_insn)
3307 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3308 NOTE_EH_HANDLER (note) = call_site;
3311 htab_delete (ar_hash);
3315 static void
3316 push_uleb128 (data_area, value)
3317 varray_type *data_area;
3318 unsigned int value;
3322 unsigned char byte = value & 0x7f;
3323 value >>= 7;
3324 if (value)
3325 byte |= 0x80;
3326 VARRAY_PUSH_UCHAR (*data_area, byte);
3328 while (value);
3331 static void
3332 push_sleb128 (data_area, value)
3333 varray_type *data_area;
3334 int value;
3336 unsigned char byte;
3337 int more;
3341 byte = value & 0x7f;
3342 value >>= 7;
3343 more = ! ((value == 0 && (byte & 0x40) == 0)
3344 || (value == -1 && (byte & 0x40) != 0));
3345 if (more)
3346 byte |= 0x80;
3347 VARRAY_PUSH_UCHAR (*data_area, byte);
3349 while (more);
3353 #define DW_EH_PE_absptr 0x00
3354 #define DW_EH_PE_omit 0xff
3356 #define DW_EH_PE_uleb128 0x01
3357 #define DW_EH_PE_udata2 0x02
3358 #define DW_EH_PE_udata4 0x03
3359 #define DW_EH_PE_udata8 0x04
3360 #define DW_EH_PE_sleb128 0x09
3361 #define DW_EH_PE_sdata2 0x0A
3362 #define DW_EH_PE_sdata4 0x0B
3363 #define DW_EH_PE_sdata8 0x0C
3364 #define DW_EH_PE_signed 0x08
3366 #define DW_EH_PE_pcrel 0x10
3367 #define DW_EH_PE_textrel 0x20
3368 #define DW_EH_PE_datarel 0x30
3369 #define DW_EH_PE_funcrel 0x40
3371 static const char *
3372 eh_data_format_name (format)
3373 int format;
3375 switch (format)
3377 case DW_EH_PE_absptr: return "absolute";
3378 case DW_EH_PE_omit: return "omit";
3380 case DW_EH_PE_uleb128: return "uleb128";
3381 case DW_EH_PE_udata2: return "udata2";
3382 case DW_EH_PE_udata4: return "udata4";
3383 case DW_EH_PE_udata8: return "udata8";
3384 case DW_EH_PE_sleb128: return "sleb128";
3385 case DW_EH_PE_sdata2: return "sdata2";
3386 case DW_EH_PE_sdata4: return "sdata4";
3387 case DW_EH_PE_sdata8: return "sdata8";
3389 case DW_EH_PE_uleb128 | DW_EH_PE_pcrel: return "pcrel uleb128";
3390 case DW_EH_PE_udata2 | DW_EH_PE_pcrel: return "pcrel udata2";
3391 case DW_EH_PE_udata4 | DW_EH_PE_pcrel: return "pcrel udata4";
3392 case DW_EH_PE_udata8 | DW_EH_PE_pcrel: return "pcrel udata8";
3393 case DW_EH_PE_sleb128 | DW_EH_PE_pcrel: return "pcrel sleb128";
3394 case DW_EH_PE_sdata2 | DW_EH_PE_pcrel: return "pcrel sdata2";
3395 case DW_EH_PE_sdata4 | DW_EH_PE_pcrel: return "pcrel sdata4";
3396 case DW_EH_PE_sdata8 | DW_EH_PE_pcrel: return "pcrel sdata8";
3398 case DW_EH_PE_uleb128 | DW_EH_PE_textrel: return "textrel uleb128";
3399 case DW_EH_PE_udata2 | DW_EH_PE_textrel: return "textrel udata2";
3400 case DW_EH_PE_udata4 | DW_EH_PE_textrel: return "textrel udata4";
3401 case DW_EH_PE_udata8 | DW_EH_PE_textrel: return "textrel udata8";
3402 case DW_EH_PE_sleb128 | DW_EH_PE_textrel: return "textrel sleb128";
3403 case DW_EH_PE_sdata2 | DW_EH_PE_textrel: return "textrel sdata2";
3404 case DW_EH_PE_sdata4 | DW_EH_PE_textrel: return "textrel sdata4";
3405 case DW_EH_PE_sdata8 | DW_EH_PE_textrel: return "textrel sdata8";
3407 case DW_EH_PE_uleb128 | DW_EH_PE_datarel: return "datarel uleb128";
3408 case DW_EH_PE_udata2 | DW_EH_PE_datarel: return "datarel udata2";
3409 case DW_EH_PE_udata4 | DW_EH_PE_datarel: return "datarel udata4";
3410 case DW_EH_PE_udata8 | DW_EH_PE_datarel: return "datarel udata8";
3411 case DW_EH_PE_sleb128 | DW_EH_PE_datarel: return "datarel sleb128";
3412 case DW_EH_PE_sdata2 | DW_EH_PE_datarel: return "datarel sdata2";
3413 case DW_EH_PE_sdata4 | DW_EH_PE_datarel: return "datarel sdata4";
3414 case DW_EH_PE_sdata8 | DW_EH_PE_datarel: return "datarel sdata8";
3416 case DW_EH_PE_uleb128 | DW_EH_PE_funcrel: return "funcrel uleb128";
3417 case DW_EH_PE_udata2 | DW_EH_PE_funcrel: return "funcrel udata2";
3418 case DW_EH_PE_udata4 | DW_EH_PE_funcrel: return "funcrel udata4";
3419 case DW_EH_PE_udata8 | DW_EH_PE_funcrel: return "funcrel udata8";
3420 case DW_EH_PE_sleb128 | DW_EH_PE_funcrel: return "funcrel sleb128";
3421 case DW_EH_PE_sdata2 | DW_EH_PE_funcrel: return "funcrel sdata2";
3422 case DW_EH_PE_sdata4 | DW_EH_PE_funcrel: return "funcrel sdata4";
3423 case DW_EH_PE_sdata8 | DW_EH_PE_funcrel: return "funcrel sdata8";
3425 default:
3426 abort ();
3430 #ifndef HAVE_AS_LEB128
3431 static int
3432 dw2_size_of_call_site_table ()
3434 int n = cfun->eh->call_site_data_used;
3435 int size = n * (4 + 4 + 4);
3436 int i;
3438 for (i = 0; i < n; ++i)
3440 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3441 size += size_of_uleb128 (cs->action);
3444 return size;
3447 static int
3448 sjlj_size_of_call_site_table ()
3450 int n = cfun->eh->call_site_data_used;
3451 int size = 0;
3452 int i;
3454 for (i = 0; i < n; ++i)
3456 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3457 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3458 size += size_of_uleb128 (cs->action);
3461 return size;
3463 #endif
3465 static void
3466 dw2_output_call_site_table ()
3468 const char *function_start_lab
3469 = IDENTIFIER_POINTER (current_function_func_begin_label);
3470 int n = cfun->eh->call_site_data_used;
3471 int i;
3473 for (i = 0; i < n; ++i)
3475 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3476 char reg_start_lab[32];
3477 char reg_end_lab[32];
3478 char landing_pad_lab[32];
3480 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3481 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3483 if (cs->landing_pad)
3484 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3485 CODE_LABEL_NUMBER (cs->landing_pad));
3487 /* ??? Perhaps use insn length scaling if the assembler supports
3488 generic arithmetic. */
3489 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3490 data4 if the function is small enough. */
3491 #ifdef HAVE_AS_LEB128
3492 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3493 "region %d start", i);
3494 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3495 "length");
3496 if (cs->landing_pad)
3497 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3498 "landing pad");
3499 else
3500 dw2_asm_output_data_uleb128 (0, "landing pad");
3501 #else
3502 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3503 "region %d start", i);
3504 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3505 if (cs->landing_pad)
3506 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3507 "landing pad");
3508 else
3509 dw2_asm_output_data (4, 0, "landing pad");
3510 #endif
3511 dw2_asm_output_data_uleb128 (cs->action, "action");
3514 call_site_base += n;
3517 static void
3518 sjlj_output_call_site_table ()
3520 int n = cfun->eh->call_site_data_used;
3521 int i;
3523 for (i = 0; i < n; ++i)
3525 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3527 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3528 "region %d landing pad", i);
3529 dw2_asm_output_data_uleb128 (cs->action, "action");
3532 call_site_base += n;
3535 void
3536 output_function_exception_table ()
3538 int format, i, n;
3539 #ifdef HAVE_AS_LEB128
3540 char ttype_label[32];
3541 char cs_after_size_label[32];
3542 char cs_end_label[32];
3543 #else
3544 int call_site_len;
3545 #endif
3546 int have_tt_data;
3547 int funcdef_number;
3549 /* Not all functions need anything. */
3550 if (! cfun->uses_eh_lsda)
3551 return;
3553 funcdef_number = (USING_SJLJ_EXCEPTIONS
3554 ? sjlj_funcdef_number
3555 : current_funcdef_number);
3557 exception_section ();
3559 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3560 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3562 if (have_tt_data)
3563 assemble_eh_align (GET_MODE_ALIGNMENT (ptr_mode));
3565 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3567 /* The LSDA header. */
3569 /* Indicate the format of the landing pad start pointer. An omitted
3570 field implies @LPStart == @Start. */
3571 /* Currently we always put @LPStart == @Start. This field would
3572 be most useful in moving the landing pads completely out of
3573 line to another section, but it could also be used to minimize
3574 the size of uleb128 landing pad offsets. */
3575 format = DW_EH_PE_omit;
3576 dw2_asm_output_data (1, format, "@LPStart format (%s)",
3577 eh_data_format_name (format));
3579 /* @LPStart pointer would go here. */
3581 /* Indicate the format of the @TType entries. */
3582 if (! have_tt_data)
3583 format = DW_EH_PE_omit;
3584 else
3586 /* ??? Define a ASM_PREFERRED_DATA_FORMAT to say what
3587 sort of dynamic-relocation-free reference to emit. */
3588 format = 0;
3589 #ifdef HAVE_AS_LEB128
3590 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3591 #endif
3593 dw2_asm_output_data (1, format, "@TType format (%s)",
3594 eh_data_format_name (format));
3596 #ifndef HAVE_AS_LEB128
3597 if (USING_SJLJ_EXCEPTIONS)
3598 call_site_len = sjlj_size_of_call_site_table ();
3599 else
3600 call_site_len = dw2_size_of_call_site_table ();
3601 #endif
3603 /* A pc-relative 4-byte displacement to the @TType data. */
3604 if (have_tt_data)
3606 #ifdef HAVE_AS_LEB128
3607 char ttype_after_disp_label[32];
3608 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3609 funcdef_number);
3610 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3611 "@TType base offset");
3612 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3613 #else
3614 /* Ug. Alignment queers things. */
3615 unsigned int before_disp, after_disp, last_disp, disp, align;
3617 align = POINTER_SIZE / BITS_PER_UNIT;
3618 before_disp = 1 + 1;
3619 after_disp = (1 + size_of_uleb128 (call_site_len)
3620 + call_site_len
3621 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3622 + VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) * align);
3624 disp = after_disp;
3627 unsigned int disp_size, pad;
3629 last_disp = disp;
3630 disp_size = size_of_uleb128 (disp);
3631 pad = before_disp + disp_size + after_disp;
3632 if (pad % align)
3633 pad = align - (pad % align);
3634 else
3635 pad = 0;
3636 disp = after_disp + pad;
3638 while (disp != last_disp);
3640 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3641 #endif
3644 /* Indicate the format of the call-site offsets. */
3645 #ifdef HAVE_AS_LEB128
3646 format = DW_EH_PE_uleb128;
3647 #else
3648 format = DW_EH_PE_udata4;
3649 #endif
3650 dw2_asm_output_data (1, format, "call-site format (%s)",
3651 eh_data_format_name (format));
3653 #ifdef HAVE_AS_LEB128
3654 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3655 funcdef_number);
3656 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3657 funcdef_number);
3658 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3659 "Call-site table length");
3660 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3661 if (USING_SJLJ_EXCEPTIONS)
3662 sjlj_output_call_site_table ();
3663 else
3664 dw2_output_call_site_table ();
3665 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3666 #else
3667 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3668 if (USING_SJLJ_EXCEPTIONS)
3669 sjlj_output_call_site_table ();
3670 else
3671 dw2_output_call_site_table ();
3672 #endif
3674 /* ??? Decode and interpret the data for flag_debug_asm. */
3675 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3676 for (i = 0; i < n; ++i)
3677 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3678 (i ? NULL : "Action record table"));
3680 if (have_tt_data)
3681 assemble_eh_align (GET_MODE_ALIGNMENT (ptr_mode));
3683 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3684 while (i-- > 0)
3686 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3688 if (type == NULL_TREE)
3689 type = integer_zero_node;
3690 else
3691 type = lookup_type_for_runtime (type);
3693 /* ??? Handle ASM_PREFERRED_DATA_FORMAT. */
3694 output_constant (type, GET_MODE_SIZE (ptr_mode));
3697 #ifdef HAVE_AS_LEB128
3698 if (have_tt_data)
3699 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3700 #endif
3702 /* ??? Decode and interpret the data for flag_debug_asm. */
3703 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3704 for (i = 0; i < n; ++i)
3705 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3706 (i ? NULL : "Exception specification table"));
3708 function_section (current_function_decl);
3710 if (USING_SJLJ_EXCEPTIONS)
3711 sjlj_funcdef_number += 1;