Daily bump.
[official-gcc.git] / gcc / except.c
bloba258731e5cf1cbe6b6f59fcaaed6d271e5e9b49b
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "rtl.h"
53 #include "tree.h"
54 #include "flags.h"
55 #include "function.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "insn-config.h"
59 #include "except.h"
60 #include "integrate.h"
61 #include "hard-reg-set.h"
62 #include "basic-block.h"
63 #include "output.h"
64 #include "dwarf2asm.h"
65 #include "dwarf2out.h"
66 #include "dwarf2.h"
67 #include "toplev.h"
68 #include "hashtab.h"
69 #include "intl.h"
70 #include "ggc.h"
71 #include "tm_p.h"
72 #include "target.h"
74 /* Provide defaults for stuff that may not be defined when using
75 sjlj exceptions. */
76 #ifndef EH_RETURN_STACKADJ_RTX
77 #define EH_RETURN_STACKADJ_RTX 0
78 #endif
79 #ifndef EH_RETURN_HANDLER_RTX
80 #define EH_RETURN_HANDLER_RTX 0
81 #endif
82 #ifndef EH_RETURN_DATA_REGNO
83 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
84 #endif
87 /* Nonzero means enable synchronous exceptions for non-call instructions. */
88 int flag_non_call_exceptions;
90 /* Protect cleanup actions with must-not-throw regions, with a call
91 to the given failure handler. */
92 tree (*lang_protect_cleanup_actions) PARAMS ((void));
94 /* Return true if type A catches type B. */
95 int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
97 /* Map a type to a runtime object to match type. */
98 tree (*lang_eh_runtime_type) PARAMS ((tree));
100 /* A hash table of label to region number. */
102 struct ehl_map_entry
104 rtx label;
105 struct eh_region *region;
108 static htab_t exception_handler_label_map;
110 static int call_site_base;
111 static unsigned int sjlj_funcdef_number;
112 static htab_t type_to_runtime_map;
114 /* Describe the SjLj_Function_Context structure. */
115 static tree sjlj_fc_type_node;
116 static int sjlj_fc_call_site_ofs;
117 static int sjlj_fc_data_ofs;
118 static int sjlj_fc_personality_ofs;
119 static int sjlj_fc_lsda_ofs;
120 static int sjlj_fc_jbuf_ofs;
122 /* Describes one exception region. */
123 struct eh_region
125 /* The immediately surrounding region. */
126 struct eh_region *outer;
128 /* The list of immediately contained regions. */
129 struct eh_region *inner;
130 struct eh_region *next_peer;
132 /* An identifier for this region. */
133 int region_number;
135 /* When a region is deleted, its parents inherit the REG_EH_REGION
136 numbers already assigned. */
137 bitmap aka;
139 /* Each region does exactly one thing. */
140 enum eh_region_type
142 ERT_UNKNOWN = 0,
143 ERT_CLEANUP,
144 ERT_TRY,
145 ERT_CATCH,
146 ERT_ALLOWED_EXCEPTIONS,
147 ERT_MUST_NOT_THROW,
148 ERT_THROW,
149 ERT_FIXUP
150 } type;
152 /* Holds the action to perform based on the preceding type. */
153 union {
154 /* A list of catch blocks, a surrounding try block,
155 and the label for continuing after a catch. */
156 struct {
157 struct eh_region *catch;
158 struct eh_region *last_catch;
159 struct eh_region *prev_try;
160 rtx continue_label;
161 } try;
163 /* The list through the catch handlers, the list of type objects
164 matched, and the list of associated filters. */
165 struct {
166 struct eh_region *next_catch;
167 struct eh_region *prev_catch;
168 tree type_list;
169 tree filter_list;
170 } catch;
172 /* A tree_list of allowed types. */
173 struct {
174 tree type_list;
175 int filter;
176 } allowed;
178 /* The type given by a call to "throw foo();", or discovered
179 for a throw. */
180 struct {
181 tree type;
182 } throw;
184 /* Retain the cleanup expression even after expansion so that
185 we can match up fixup regions. */
186 struct {
187 tree exp;
188 } cleanup;
190 /* The real region (by expression and by pointer) that fixup code
191 should live in. */
192 struct {
193 tree cleanup_exp;
194 struct eh_region *real_region;
195 } fixup;
196 } u;
198 /* Entry point for this region's handler before landing pads are built. */
199 rtx label;
201 /* Entry point for this region's handler from the runtime eh library. */
202 rtx landing_pad;
204 /* Entry point for this region's handler from an inner region. */
205 rtx post_landing_pad;
207 /* The RESX insn for handing off control to the next outermost handler,
208 if appropriate. */
209 rtx resume;
212 /* Used to save exception status for each function. */
213 struct eh_status
215 /* The tree of all regions for this function. */
216 struct eh_region *region_tree;
218 /* The same information as an indexable array. */
219 struct eh_region **region_array;
221 /* The most recently open region. */
222 struct eh_region *cur_region;
224 /* This is the region for which we are processing catch blocks. */
225 struct eh_region *try_region;
227 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
228 node is itself a TREE_CHAINed list of handlers for regions that
229 are not yet closed. The TREE_VALUE of each entry contains the
230 handler for the corresponding entry on the ehstack. */
231 tree protect_list;
233 rtx filter;
234 rtx exc_ptr;
236 int built_landing_pads;
237 int last_region_number;
239 varray_type ttype_data;
240 varray_type ehspec_data;
241 varray_type action_record_data;
243 struct call_site_record
245 rtx landing_pad;
246 int action;
247 } *call_site_data;
248 int call_site_data_used;
249 int call_site_data_size;
251 rtx ehr_stackadj;
252 rtx ehr_handler;
253 rtx ehr_label;
255 rtx sjlj_fc;
256 rtx sjlj_exit_after;
260 static void mark_eh_region PARAMS ((struct eh_region *));
261 static int mark_ehl_map_entry PARAMS ((PTR *, PTR));
262 static void mark_ehl_map PARAMS ((void *));
264 static void free_region PARAMS ((struct eh_region *));
266 static int t2r_eq PARAMS ((const PTR,
267 const PTR));
268 static hashval_t t2r_hash PARAMS ((const PTR));
269 static int t2r_mark_1 PARAMS ((PTR *, PTR));
270 static void t2r_mark PARAMS ((PTR));
271 static void add_type_for_runtime PARAMS ((tree));
272 static tree lookup_type_for_runtime PARAMS ((tree));
274 static struct eh_region *expand_eh_region_end PARAMS ((void));
276 static rtx get_exception_filter PARAMS ((struct function *));
278 static void collect_eh_region_array PARAMS ((void));
279 static void resolve_fixup_regions PARAMS ((void));
280 static void remove_fixup_regions PARAMS ((void));
281 static void remove_unreachable_regions PARAMS ((rtx));
282 static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
284 static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
285 struct inline_remap *));
286 static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
287 struct eh_region **));
288 static int ttypes_filter_eq PARAMS ((const PTR,
289 const PTR));
290 static hashval_t ttypes_filter_hash PARAMS ((const PTR));
291 static int ehspec_filter_eq PARAMS ((const PTR,
292 const PTR));
293 static hashval_t ehspec_filter_hash PARAMS ((const PTR));
294 static int add_ttypes_entry PARAMS ((htab_t, tree));
295 static int add_ehspec_entry PARAMS ((htab_t, htab_t,
296 tree));
297 static void assign_filter_values PARAMS ((void));
298 static void build_post_landing_pads PARAMS ((void));
299 static void connect_post_landing_pads PARAMS ((void));
300 static void dw2_build_landing_pads PARAMS ((void));
302 struct sjlj_lp_info;
303 static bool sjlj_find_directly_reachable_regions
304 PARAMS ((struct sjlj_lp_info *));
305 static void sjlj_assign_call_site_values
306 PARAMS ((rtx, struct sjlj_lp_info *));
307 static void sjlj_mark_call_sites
308 PARAMS ((struct sjlj_lp_info *));
309 static void sjlj_emit_function_enter PARAMS ((rtx));
310 static void sjlj_emit_function_exit PARAMS ((void));
311 static void sjlj_emit_dispatch_table
312 PARAMS ((rtx, struct sjlj_lp_info *));
313 static void sjlj_build_landing_pads PARAMS ((void));
315 static hashval_t ehl_hash PARAMS ((const PTR));
316 static int ehl_eq PARAMS ((const PTR,
317 const PTR));
318 static void ehl_free PARAMS ((PTR));
319 static void add_ehl_entry PARAMS ((rtx,
320 struct eh_region *));
321 static void remove_exception_handler_label PARAMS ((rtx));
322 static void remove_eh_handler PARAMS ((struct eh_region *));
323 static int for_each_eh_label_1 PARAMS ((PTR *, PTR));
325 struct reachable_info;
327 /* The return value of reachable_next_level. */
328 enum reachable_code
330 /* The given exception is not processed by the given region. */
331 RNL_NOT_CAUGHT,
332 /* The given exception may need processing by the given region. */
333 RNL_MAYBE_CAUGHT,
334 /* The given exception is completely processed by the given region. */
335 RNL_CAUGHT,
336 /* The given exception is completely processed by the runtime. */
337 RNL_BLOCKED
340 static int check_handled PARAMS ((tree, tree));
341 static void add_reachable_handler
342 PARAMS ((struct reachable_info *, struct eh_region *,
343 struct eh_region *));
344 static enum reachable_code reachable_next_level
345 PARAMS ((struct eh_region *, tree, struct reachable_info *));
347 static int action_record_eq PARAMS ((const PTR,
348 const PTR));
349 static hashval_t action_record_hash PARAMS ((const PTR));
350 static int add_action_record PARAMS ((htab_t, int, int));
351 static int collect_one_action_chain PARAMS ((htab_t,
352 struct eh_region *));
353 static int add_call_site PARAMS ((rtx, int));
355 static void push_uleb128 PARAMS ((varray_type *,
356 unsigned int));
357 static void push_sleb128 PARAMS ((varray_type *, int));
358 #ifndef HAVE_AS_LEB128
359 static int dw2_size_of_call_site_table PARAMS ((void));
360 static int sjlj_size_of_call_site_table PARAMS ((void));
361 #endif
362 static void dw2_output_call_site_table PARAMS ((void));
363 static void sjlj_output_call_site_table PARAMS ((void));
366 /* Routine to see if exception handling is turned on.
367 DO_WARN is non-zero if we want to inform the user that exception
368 handling is turned off.
370 This is used to ensure that -fexceptions has been specified if the
371 compiler tries to use any exception-specific functions. */
374 doing_eh (do_warn)
375 int do_warn;
377 if (! flag_exceptions)
379 static int warned = 0;
380 if (! warned && do_warn)
382 error ("exception handling disabled, use -fexceptions to enable");
383 warned = 1;
385 return 0;
387 return 1;
391 void
392 init_eh ()
394 ggc_add_root (&exception_handler_label_map, 1, 1, mark_ehl_map);
396 if (! flag_exceptions)
397 return;
399 type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
400 ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
402 /* Create the SjLj_Function_Context structure. This should match
403 the definition in unwind-sjlj.c. */
404 if (USING_SJLJ_EXCEPTIONS)
406 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
408 sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
409 ggc_add_tree_root (&sjlj_fc_type_node, 1);
411 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
412 build_pointer_type (sjlj_fc_type_node));
413 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
415 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
416 integer_type_node);
417 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
419 tmp = build_index_type (build_int_2 (4 - 1, 0));
420 tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
421 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
422 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
424 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
425 ptr_type_node);
426 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
428 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
429 ptr_type_node);
430 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
432 #ifdef DONT_USE_BUILTIN_SETJMP
433 #ifdef JMP_BUF_SIZE
434 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
435 #else
436 /* Should be large enough for most systems, if it is not,
437 JMP_BUF_SIZE should be defined with the proper value. It will
438 also tend to be larger than necessary for most systems, a more
439 optimal port will define JMP_BUF_SIZE. */
440 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
441 #endif
442 #else
443 /* This is 2 for builtin_setjmp, plus whatever the target requires
444 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
445 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
446 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
447 #endif
448 tmp = build_index_type (tmp);
449 tmp = build_array_type (ptr_type_node, tmp);
450 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
451 #ifdef DONT_USE_BUILTIN_SETJMP
452 /* We don't know what the alignment requirements of the
453 runtime's jmp_buf has. Overestimate. */
454 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
455 DECL_USER_ALIGN (f_jbuf) = 1;
456 #endif
457 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
459 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
460 TREE_CHAIN (f_prev) = f_cs;
461 TREE_CHAIN (f_cs) = f_data;
462 TREE_CHAIN (f_data) = f_per;
463 TREE_CHAIN (f_per) = f_lsda;
464 TREE_CHAIN (f_lsda) = f_jbuf;
466 layout_type (sjlj_fc_type_node);
468 /* Cache the interesting field offsets so that we have
469 easy access from rtl. */
470 sjlj_fc_call_site_ofs
471 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
472 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
473 sjlj_fc_data_ofs
474 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
475 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
476 sjlj_fc_personality_ofs
477 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
478 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
479 sjlj_fc_lsda_ofs
480 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
481 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
482 sjlj_fc_jbuf_ofs
483 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
484 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
488 void
489 init_eh_for_function ()
491 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
494 /* Mark EH for GC. */
496 static void
497 mark_eh_region (region)
498 struct eh_region *region;
500 if (! region)
501 return;
503 switch (region->type)
505 case ERT_UNKNOWN:
506 /* This can happen if a nested function is inside the body of a region
507 and we do a GC as part of processing it. */
508 break;
509 case ERT_CLEANUP:
510 ggc_mark_tree (region->u.cleanup.exp);
511 break;
512 case ERT_TRY:
513 ggc_mark_rtx (region->u.try.continue_label);
514 break;
515 case ERT_CATCH:
516 ggc_mark_tree (region->u.catch.type_list);
517 ggc_mark_tree (region->u.catch.filter_list);
518 break;
519 case ERT_ALLOWED_EXCEPTIONS:
520 ggc_mark_tree (region->u.allowed.type_list);
521 break;
522 case ERT_MUST_NOT_THROW:
523 break;
524 case ERT_THROW:
525 ggc_mark_tree (region->u.throw.type);
526 break;
527 case ERT_FIXUP:
528 ggc_mark_tree (region->u.fixup.cleanup_exp);
529 break;
530 default:
531 abort ();
534 ggc_mark_rtx (region->label);
535 ggc_mark_rtx (region->resume);
536 ggc_mark_rtx (region->landing_pad);
537 ggc_mark_rtx (region->post_landing_pad);
540 static int
541 mark_ehl_map_entry (pentry, data)
542 PTR *pentry;
543 PTR data ATTRIBUTE_UNUSED;
545 struct ehl_map_entry *entry = *(struct ehl_map_entry **) pentry;
546 ggc_mark_rtx (entry->label);
547 return 1;
550 static void
551 mark_ehl_map (pp)
552 void *pp;
554 htab_t map = *(htab_t *) pp;
555 if (map)
556 htab_traverse (map, mark_ehl_map_entry, NULL);
559 void
560 mark_eh_status (eh)
561 struct eh_status *eh;
563 int i;
565 if (eh == 0)
566 return;
568 /* If we've called collect_eh_region_array, use it. Otherwise walk
569 the tree non-recursively. */
570 if (eh->region_array)
572 for (i = eh->last_region_number; i > 0; --i)
574 struct eh_region *r = eh->region_array[i];
575 if (r && r->region_number == i)
576 mark_eh_region (r);
579 else if (eh->region_tree)
581 struct eh_region *r = eh->region_tree;
582 while (1)
584 mark_eh_region (r);
585 if (r->inner)
586 r = r->inner;
587 else if (r->next_peer)
588 r = r->next_peer;
589 else
591 do {
592 r = r->outer;
593 if (r == NULL)
594 goto tree_done;
595 } while (r->next_peer == NULL);
596 r = r->next_peer;
599 tree_done:;
602 ggc_mark_tree (eh->protect_list);
603 ggc_mark_rtx (eh->filter);
604 ggc_mark_rtx (eh->exc_ptr);
605 ggc_mark_tree_varray (eh->ttype_data);
607 if (eh->call_site_data)
609 for (i = eh->call_site_data_used - 1; i >= 0; --i)
610 ggc_mark_rtx (eh->call_site_data[i].landing_pad);
613 ggc_mark_rtx (eh->ehr_stackadj);
614 ggc_mark_rtx (eh->ehr_handler);
615 ggc_mark_rtx (eh->ehr_label);
617 ggc_mark_rtx (eh->sjlj_fc);
618 ggc_mark_rtx (eh->sjlj_exit_after);
621 static inline void
622 free_region (r)
623 struct eh_region *r;
625 /* Note that the aka bitmap is freed by regset_release_memory. But if
626 we ever replace with a non-obstack implementation, this would be
627 the place to do it. */
628 free (r);
631 void
632 free_eh_status (f)
633 struct function *f;
635 struct eh_status *eh = f->eh;
637 if (eh->region_array)
639 int i;
640 for (i = eh->last_region_number; i > 0; --i)
642 struct eh_region *r = eh->region_array[i];
643 /* Mind we don't free a region struct more than once. */
644 if (r && r->region_number == i)
645 free_region (r);
647 free (eh->region_array);
649 else if (eh->region_tree)
651 struct eh_region *next, *r = eh->region_tree;
652 while (1)
654 if (r->inner)
655 r = r->inner;
656 else if (r->next_peer)
658 next = r->next_peer;
659 free_region (r);
660 r = next;
662 else
664 do {
665 next = r->outer;
666 free_region (r);
667 r = next;
668 if (r == NULL)
669 goto tree_done;
670 } while (r->next_peer == NULL);
671 next = r->next_peer;
672 free_region (r);
673 r = next;
676 tree_done:;
679 VARRAY_FREE (eh->ttype_data);
680 VARRAY_FREE (eh->ehspec_data);
681 VARRAY_FREE (eh->action_record_data);
682 if (eh->call_site_data)
683 free (eh->call_site_data);
685 free (eh);
686 f->eh = NULL;
688 if (exception_handler_label_map)
690 htab_delete (exception_handler_label_map);
691 exception_handler_label_map = NULL;
696 /* Start an exception handling region. All instructions emitted
697 after this point are considered to be part of the region until
698 expand_eh_region_end is invoked. */
700 void
701 expand_eh_region_start ()
703 struct eh_region *new_region;
704 struct eh_region *cur_region;
705 rtx note;
707 if (! doing_eh (0))
708 return;
710 /* Insert a new blank region as a leaf in the tree. */
711 new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
712 cur_region = cfun->eh->cur_region;
713 new_region->outer = cur_region;
714 if (cur_region)
716 new_region->next_peer = cur_region->inner;
717 cur_region->inner = new_region;
719 else
721 new_region->next_peer = cfun->eh->region_tree;
722 cfun->eh->region_tree = new_region;
724 cfun->eh->cur_region = new_region;
726 /* Create a note marking the start of this region. */
727 new_region->region_number = ++cfun->eh->last_region_number;
728 note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
729 NOTE_EH_HANDLER (note) = new_region->region_number;
732 /* Common code to end a region. Returns the region just ended. */
734 static struct eh_region *
735 expand_eh_region_end ()
737 struct eh_region *cur_region = cfun->eh->cur_region;
738 rtx note;
740 /* Create a note marking the end of this region. */
741 note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
742 NOTE_EH_HANDLER (note) = cur_region->region_number;
744 /* Pop. */
745 cfun->eh->cur_region = cur_region->outer;
747 return cur_region;
750 /* End an exception handling region for a cleanup. HANDLER is an
751 expression to expand for the cleanup. */
753 void
754 expand_eh_region_end_cleanup (handler)
755 tree handler;
757 struct eh_region *region;
758 tree protect_cleanup_actions;
759 rtx around_label;
760 rtx data_save[2];
762 if (! doing_eh (0))
763 return;
765 region = expand_eh_region_end ();
766 region->type = ERT_CLEANUP;
767 region->label = gen_label_rtx ();
768 region->u.cleanup.exp = handler;
770 around_label = gen_label_rtx ();
771 emit_jump (around_label);
773 emit_label (region->label);
775 /* Give the language a chance to specify an action to be taken if an
776 exception is thrown that would propagate out of the HANDLER. */
777 protect_cleanup_actions
778 = (lang_protect_cleanup_actions
779 ? (*lang_protect_cleanup_actions) ()
780 : NULL_TREE);
782 if (protect_cleanup_actions)
783 expand_eh_region_start ();
785 /* In case this cleanup involves an inline destructor with a try block in
786 it, we need to save the EH return data registers around it. */
787 data_save[0] = gen_reg_rtx (Pmode);
788 emit_move_insn (data_save[0], get_exception_pointer (cfun));
789 data_save[1] = gen_reg_rtx (word_mode);
790 emit_move_insn (data_save[1], get_exception_filter (cfun));
792 expand_expr (handler, const0_rtx, VOIDmode, 0);
794 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
795 emit_move_insn (cfun->eh->filter, data_save[1]);
797 if (protect_cleanup_actions)
798 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
800 /* We need any stack adjustment complete before the around_label. */
801 do_pending_stack_adjust ();
803 /* We delay the generation of the _Unwind_Resume until we generate
804 landing pads. We emit a marker here so as to get good control
805 flow data in the meantime. */
806 region->resume
807 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
808 emit_barrier ();
810 emit_label (around_label);
813 /* End an exception handling region for a try block, and prepares
814 for subsequent calls to expand_start_catch. */
816 void
817 expand_start_all_catch ()
819 struct eh_region *region;
821 if (! doing_eh (1))
822 return;
824 region = expand_eh_region_end ();
825 region->type = ERT_TRY;
826 region->u.try.prev_try = cfun->eh->try_region;
827 region->u.try.continue_label = gen_label_rtx ();
829 cfun->eh->try_region = region;
831 emit_jump (region->u.try.continue_label);
834 /* Begin a catch clause. TYPE is the type caught, a list of such types, or
835 null if this is a catch-all clause. Providing a type list enables to
836 associate the catch region with potentially several exception types, which
837 is useful e.g. for Ada. */
839 void
840 expand_start_catch (type_or_list)
841 tree type_or_list;
843 struct eh_region *t, *c, *l;
844 tree type_list;
846 if (! doing_eh (0))
847 return;
849 type_list = type_or_list;
851 if (type_or_list)
853 /* Ensure to always end up with a type list to normalize further
854 processing, then register each type against the runtime types
855 map. */
856 tree type_node;
858 if (TREE_CODE (type_or_list) != TREE_LIST)
859 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
861 type_node = type_list;
862 for (; type_node; type_node = TREE_CHAIN (type_node))
863 add_type_for_runtime (TREE_VALUE (type_node));
866 expand_eh_region_start ();
868 t = cfun->eh->try_region;
869 c = cfun->eh->cur_region;
870 c->type = ERT_CATCH;
871 c->u.catch.type_list = type_list;
872 c->label = gen_label_rtx ();
874 l = t->u.try.last_catch;
875 c->u.catch.prev_catch = l;
876 if (l)
877 l->u.catch.next_catch = c;
878 else
879 t->u.try.catch = c;
880 t->u.try.last_catch = c;
882 emit_label (c->label);
885 /* End a catch clause. Control will resume after the try/catch block. */
887 void
888 expand_end_catch ()
890 struct eh_region *try_region, *catch_region;
892 if (! doing_eh (0))
893 return;
895 catch_region = expand_eh_region_end ();
896 try_region = cfun->eh->try_region;
898 emit_jump (try_region->u.try.continue_label);
901 /* End a sequence of catch handlers for a try block. */
903 void
904 expand_end_all_catch ()
906 struct eh_region *try_region;
908 if (! doing_eh (0))
909 return;
911 try_region = cfun->eh->try_region;
912 cfun->eh->try_region = try_region->u.try.prev_try;
914 emit_label (try_region->u.try.continue_label);
917 /* End an exception region for an exception type filter. ALLOWED is a
918 TREE_LIST of types to be matched by the runtime. FAILURE is an
919 expression to invoke if a mismatch occurs.
921 ??? We could use these semantics for calls to rethrow, too; if we can
922 see the surrounding catch clause, we know that the exception we're
923 rethrowing satisfies the "filter" of the catch type. */
925 void
926 expand_eh_region_end_allowed (allowed, failure)
927 tree allowed, failure;
929 struct eh_region *region;
930 rtx around_label;
932 if (! doing_eh (0))
933 return;
935 region = expand_eh_region_end ();
936 region->type = ERT_ALLOWED_EXCEPTIONS;
937 region->u.allowed.type_list = allowed;
938 region->label = gen_label_rtx ();
940 for (; allowed ; allowed = TREE_CHAIN (allowed))
941 add_type_for_runtime (TREE_VALUE (allowed));
943 /* We must emit the call to FAILURE here, so that if this function
944 throws a different exception, that it will be processed by the
945 correct region. */
947 around_label = gen_label_rtx ();
948 emit_jump (around_label);
950 emit_label (region->label);
951 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
952 /* We must adjust the stack before we reach the AROUND_LABEL because
953 the call to FAILURE does not occur on all paths to the
954 AROUND_LABEL. */
955 do_pending_stack_adjust ();
957 emit_label (around_label);
960 /* End an exception region for a must-not-throw filter. FAILURE is an
961 expression invoke if an uncaught exception propagates this far.
963 This is conceptually identical to expand_eh_region_end_allowed with
964 an empty allowed list (if you passed "std::terminate" instead of
965 "__cxa_call_unexpected"), but they are represented differently in
966 the C++ LSDA. */
968 void
969 expand_eh_region_end_must_not_throw (failure)
970 tree failure;
972 struct eh_region *region;
973 rtx around_label;
975 if (! doing_eh (0))
976 return;
978 region = expand_eh_region_end ();
979 region->type = ERT_MUST_NOT_THROW;
980 region->label = gen_label_rtx ();
982 /* We must emit the call to FAILURE here, so that if this function
983 throws a different exception, that it will be processed by the
984 correct region. */
986 around_label = gen_label_rtx ();
987 emit_jump (around_label);
989 emit_label (region->label);
990 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
992 emit_label (around_label);
995 /* End an exception region for a throw. No handling goes on here,
996 but it's the easiest way for the front-end to indicate what type
997 is being thrown. */
999 void
1000 expand_eh_region_end_throw (type)
1001 tree type;
1003 struct eh_region *region;
1005 if (! doing_eh (0))
1006 return;
1008 region = expand_eh_region_end ();
1009 region->type = ERT_THROW;
1010 region->u.throw.type = type;
1013 /* End a fixup region. Within this region the cleanups for the immediately
1014 enclosing region are _not_ run. This is used for goto cleanup to avoid
1015 destroying an object twice.
1017 This would be an extraordinarily simple prospect, were it not for the
1018 fact that we don't actually know what the immediately enclosing region
1019 is. This surprising fact is because expand_cleanups is currently
1020 generating a sequence that it will insert somewhere else. We collect
1021 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
1023 void
1024 expand_eh_region_end_fixup (handler)
1025 tree handler;
1027 struct eh_region *fixup;
1029 if (! doing_eh (0))
1030 return;
1032 fixup = expand_eh_region_end ();
1033 fixup->type = ERT_FIXUP;
1034 fixup->u.fixup.cleanup_exp = handler;
1037 /* Return an rtl expression for a pointer to the exception object
1038 within a handler. */
1041 get_exception_pointer (fun)
1042 struct function *fun;
1044 rtx exc_ptr = fun->eh->exc_ptr;
1045 if (fun == cfun && ! exc_ptr)
1047 exc_ptr = gen_reg_rtx (Pmode);
1048 fun->eh->exc_ptr = exc_ptr;
1050 return exc_ptr;
1053 /* Return an rtl expression for the exception dispatch filter
1054 within a handler. */
1056 static rtx
1057 get_exception_filter (fun)
1058 struct function *fun;
1060 rtx filter = fun->eh->filter;
1061 if (fun == cfun && ! filter)
1063 filter = gen_reg_rtx (word_mode);
1064 fun->eh->filter = filter;
1066 return filter;
1069 /* Begin a region that will contain entries created with
1070 add_partial_entry. */
1072 void
1073 begin_protect_partials ()
1075 /* Push room for a new list. */
1076 cfun->eh->protect_list
1077 = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
1080 /* Start a new exception region for a region of code that has a
1081 cleanup action and push the HANDLER for the region onto
1082 protect_list. All of the regions created with add_partial_entry
1083 will be ended when end_protect_partials is invoked.
1085 ??? The only difference between this purpose and that of
1086 expand_decl_cleanup is that in this case, we only want the cleanup to
1087 run if an exception is thrown. This should also be handled using
1088 binding levels. */
1090 void
1091 add_partial_entry (handler)
1092 tree handler;
1094 expand_eh_region_start ();
1096 /* Add this entry to the front of the list. */
1097 TREE_VALUE (cfun->eh->protect_list)
1098 = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
1101 /* End all the pending exception regions on protect_list. */
1103 void
1104 end_protect_partials ()
1106 tree t;
1108 /* Pop the topmost entry. */
1109 t = TREE_VALUE (cfun->eh->protect_list);
1110 cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
1112 /* End all the exception regions. */
1113 for (; t; t = TREE_CHAIN (t))
1114 expand_eh_region_end_cleanup (TREE_VALUE (t));
1118 /* This section is for the exception handling specific optimization pass. */
1120 /* Random access the exception region tree. It's just as simple to
1121 collect the regions this way as in expand_eh_region_start, but
1122 without having to realloc memory. */
1124 static void
1125 collect_eh_region_array ()
1127 struct eh_region **array, *i;
1129 i = cfun->eh->region_tree;
1130 if (! i)
1131 return;
1133 array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1134 cfun->eh->region_array = array;
1136 while (1)
1138 array[i->region_number] = i;
1140 /* If there are sub-regions, process them. */
1141 if (i->inner)
1142 i = i->inner;
1143 /* If there are peers, process them. */
1144 else if (i->next_peer)
1145 i = i->next_peer;
1146 /* Otherwise, step back up the tree to the next peer. */
1147 else
1149 do {
1150 i = i->outer;
1151 if (i == NULL)
1152 return;
1153 } while (i->next_peer == NULL);
1154 i = i->next_peer;
1159 static void
1160 resolve_fixup_regions ()
1162 int i, j, n = cfun->eh->last_region_number;
1164 for (i = 1; i <= n; ++i)
1166 struct eh_region *fixup = cfun->eh->region_array[i];
1167 struct eh_region *cleanup = 0;
1169 if (! fixup || fixup->type != ERT_FIXUP)
1170 continue;
1172 for (j = 1; j <= n; ++j)
1174 cleanup = cfun->eh->region_array[j];
1175 if (cleanup->type == ERT_CLEANUP
1176 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1177 break;
1179 if (j > n)
1180 abort ();
1182 fixup->u.fixup.real_region = cleanup->outer;
1186 /* Now that we've discovered what region actually encloses a fixup,
1187 we can shuffle pointers and remove them from the tree. */
1189 static void
1190 remove_fixup_regions ()
1192 int i;
1193 rtx insn, note;
1194 struct eh_region *fixup;
1196 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1197 for instructions referencing fixup regions. This is only
1198 strictly necessary for fixup regions with no parent, but
1199 doesn't hurt to do it for all regions. */
1200 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
1201 if (INSN_P (insn)
1202 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
1203 && INTVAL (XEXP (note, 0)) > 0
1204 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
1205 && fixup->type == ERT_FIXUP)
1207 if (fixup->u.fixup.real_region)
1208 XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
1209 else
1210 remove_note (insn, note);
1213 /* Remove the fixup regions from the tree. */
1214 for (i = cfun->eh->last_region_number; i > 0; --i)
1216 fixup = cfun->eh->region_array[i];
1217 if (! fixup)
1218 continue;
1220 /* Allow GC to maybe free some memory. */
1221 if (fixup->type == ERT_CLEANUP)
1222 fixup->u.cleanup.exp = NULL_TREE;
1224 if (fixup->type != ERT_FIXUP)
1225 continue;
1227 if (fixup->inner)
1229 struct eh_region *parent, *p, **pp;
1231 parent = fixup->u.fixup.real_region;
1233 /* Fix up the children's parent pointers; find the end of
1234 the list. */
1235 for (p = fixup->inner; ; p = p->next_peer)
1237 p->outer = parent;
1238 if (! p->next_peer)
1239 break;
1242 /* In the tree of cleanups, only outer-inner ordering matters.
1243 So link the children back in anywhere at the correct level. */
1244 if (parent)
1245 pp = &parent->inner;
1246 else
1247 pp = &cfun->eh->region_tree;
1248 p->next_peer = *pp;
1249 *pp = fixup->inner;
1250 fixup->inner = NULL;
1253 remove_eh_handler (fixup);
1257 /* Remove all regions whose labels are not reachable from insns. */
1259 static void
1260 remove_unreachable_regions (insns)
1261 rtx insns;
1263 int i, *uid_region_num;
1264 bool *reachable;
1265 struct eh_region *r;
1266 rtx insn;
1268 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
1269 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
1271 for (i = cfun->eh->last_region_number; i > 0; --i)
1273 r = cfun->eh->region_array[i];
1274 if (!r || r->region_number != i)
1275 continue;
1277 if (r->resume)
1279 if (uid_region_num[INSN_UID (r->resume)])
1280 abort ();
1281 uid_region_num[INSN_UID (r->resume)] = i;
1283 if (r->label)
1285 if (uid_region_num[INSN_UID (r->label)])
1286 abort ();
1287 uid_region_num[INSN_UID (r->label)] = i;
1289 if (r->type == ERT_TRY && r->u.try.continue_label)
1291 if (uid_region_num[INSN_UID (r->u.try.continue_label)])
1292 abort ();
1293 uid_region_num[INSN_UID (r->u.try.continue_label)] = i;
1297 for (insn = insns; insn; insn = NEXT_INSN (insn))
1298 reachable[uid_region_num[INSN_UID (insn)]] = true;
1300 for (i = cfun->eh->last_region_number; i > 0; --i)
1302 r = cfun->eh->region_array[i];
1303 if (r && r->region_number == i && !reachable[i])
1305 /* Don't remove ERT_THROW regions if their outer region
1306 is reachable. */
1307 if (r->type == ERT_THROW
1308 && r->outer
1309 && reachable[r->outer->region_number])
1310 continue;
1312 remove_eh_handler (r);
1316 free (reachable);
1317 free (uid_region_num);
1320 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1321 can_throw instruction in the region. */
1323 static void
1324 convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1325 rtx *pinsns;
1326 int *orig_sp;
1327 int cur;
1329 int *sp = orig_sp;
1330 rtx insn, next;
1332 for (insn = *pinsns; insn ; insn = next)
1334 next = NEXT_INSN (insn);
1335 if (GET_CODE (insn) == NOTE)
1337 int kind = NOTE_LINE_NUMBER (insn);
1338 if (kind == NOTE_INSN_EH_REGION_BEG
1339 || kind == NOTE_INSN_EH_REGION_END)
1341 if (kind == NOTE_INSN_EH_REGION_BEG)
1343 struct eh_region *r;
1345 *sp++ = cur;
1346 cur = NOTE_EH_HANDLER (insn);
1348 r = cfun->eh->region_array[cur];
1349 if (r->type == ERT_FIXUP)
1351 r = r->u.fixup.real_region;
1352 cur = r ? r->region_number : 0;
1354 else if (r->type == ERT_CATCH)
1356 r = r->outer;
1357 cur = r ? r->region_number : 0;
1360 else
1361 cur = *--sp;
1363 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1364 requires extra care to adjust sequence start. */
1365 if (insn == *pinsns)
1366 *pinsns = next;
1367 remove_insn (insn);
1368 continue;
1371 else if (INSN_P (insn))
1373 if (cur > 0
1374 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1375 /* Calls can always potentially throw exceptions, unless
1376 they have a REG_EH_REGION note with a value of 0 or less.
1377 Which should be the only possible kind so far. */
1378 && (GET_CODE (insn) == CALL_INSN
1379 /* If we wanted exceptions for non-call insns, then
1380 any may_trap_p instruction could throw. */
1381 || (flag_non_call_exceptions
1382 && GET_CODE (PATTERN (insn)) != CLOBBER
1383 && GET_CODE (PATTERN (insn)) != USE
1384 && may_trap_p (PATTERN (insn)))))
1386 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1387 REG_NOTES (insn));
1390 if (GET_CODE (insn) == CALL_INSN
1391 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1393 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1394 sp, cur);
1395 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1396 sp, cur);
1397 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1398 sp, cur);
1403 if (sp != orig_sp)
1404 abort ();
1407 void
1408 convert_from_eh_region_ranges ()
1410 int *stack;
1411 rtx insns;
1413 collect_eh_region_array ();
1414 resolve_fixup_regions ();
1416 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1417 insns = get_insns ();
1418 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1419 free (stack);
1421 remove_fixup_regions ();
1422 remove_unreachable_regions (insns);
1425 static void
1426 add_ehl_entry (label, region)
1427 rtx label;
1428 struct eh_region *region;
1430 struct ehl_map_entry **slot, *entry;
1432 LABEL_PRESERVE_P (label) = 1;
1434 entry = (struct ehl_map_entry *) xmalloc (sizeof (*entry));
1435 entry->label = label;
1436 entry->region = region;
1438 slot = (struct ehl_map_entry **)
1439 htab_find_slot (exception_handler_label_map, entry, INSERT);
1441 /* Before landing pad creation, each exception handler has its own
1442 label. After landing pad creation, the exception handlers may
1443 share landing pads. This is ok, since maybe_remove_eh_handler
1444 only requires the 1-1 mapping before landing pad creation. */
1445 if (*slot && !cfun->eh->built_landing_pads)
1446 abort ();
1448 *slot = entry;
1451 static void
1452 ehl_free (pentry)
1453 PTR pentry;
1455 struct ehl_map_entry *entry = (struct ehl_map_entry *)pentry;
1456 LABEL_PRESERVE_P (entry->label) = 0;
1457 free (entry);
1460 void
1461 find_exception_handler_labels ()
1463 int i;
1465 if (exception_handler_label_map)
1466 htab_empty (exception_handler_label_map);
1467 else
1469 /* ??? The expansion factor here (3/2) must be greater than the htab
1470 occupancy factor (4/3) to avoid unnecessary resizing. */
1471 exception_handler_label_map
1472 = htab_create (cfun->eh->last_region_number * 3 / 2,
1473 ehl_hash, ehl_eq, ehl_free);
1476 if (cfun->eh->region_tree == NULL)
1477 return;
1479 for (i = cfun->eh->last_region_number; i > 0; --i)
1481 struct eh_region *region = cfun->eh->region_array[i];
1482 rtx lab;
1484 if (! region || region->region_number != i)
1485 continue;
1486 if (cfun->eh->built_landing_pads)
1487 lab = region->landing_pad;
1488 else
1489 lab = region->label;
1491 if (lab)
1492 add_ehl_entry (lab, region);
1495 /* For sjlj exceptions, need the return label to remain live until
1496 after landing pad generation. */
1497 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1498 add_ehl_entry (return_label, NULL);
1501 bool
1502 current_function_has_exception_handlers ()
1504 int i;
1506 for (i = cfun->eh->last_region_number; i > 0; --i)
1508 struct eh_region *region = cfun->eh->region_array[i];
1510 if (! region || region->region_number != i)
1511 continue;
1512 if (region->type != ERT_THROW)
1513 return true;
1516 return false;
1519 static struct eh_region *
1520 duplicate_eh_region_1 (o, map)
1521 struct eh_region *o;
1522 struct inline_remap *map;
1524 struct eh_region *n
1525 = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
1527 n->region_number = o->region_number + cfun->eh->last_region_number;
1528 n->type = o->type;
1530 switch (n->type)
1532 case ERT_CLEANUP:
1533 case ERT_MUST_NOT_THROW:
1534 break;
1536 case ERT_TRY:
1537 if (o->u.try.continue_label)
1538 n->u.try.continue_label
1539 = get_label_from_map (map,
1540 CODE_LABEL_NUMBER (o->u.try.continue_label));
1541 break;
1543 case ERT_CATCH:
1544 n->u.catch.type_list = o->u.catch.type_list;
1545 break;
1547 case ERT_ALLOWED_EXCEPTIONS:
1548 n->u.allowed.type_list = o->u.allowed.type_list;
1549 break;
1551 case ERT_THROW:
1552 n->u.throw.type = o->u.throw.type;
1554 default:
1555 abort ();
1558 if (o->label)
1559 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1560 if (o->resume)
1562 n->resume = map->insn_map[INSN_UID (o->resume)];
1563 if (n->resume == NULL)
1564 abort ();
1567 return n;
1570 static void
1571 duplicate_eh_region_2 (o, n_array)
1572 struct eh_region *o;
1573 struct eh_region **n_array;
1575 struct eh_region *n = n_array[o->region_number];
1577 switch (n->type)
1579 case ERT_TRY:
1580 n->u.try.catch = n_array[o->u.try.catch->region_number];
1581 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1582 break;
1584 case ERT_CATCH:
1585 if (o->u.catch.next_catch)
1586 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1587 if (o->u.catch.prev_catch)
1588 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1589 break;
1591 default:
1592 break;
1595 if (o->outer)
1596 n->outer = n_array[o->outer->region_number];
1597 if (o->inner)
1598 n->inner = n_array[o->inner->region_number];
1599 if (o->next_peer)
1600 n->next_peer = n_array[o->next_peer->region_number];
1604 duplicate_eh_regions (ifun, map)
1605 struct function *ifun;
1606 struct inline_remap *map;
1608 int ifun_last_region_number = ifun->eh->last_region_number;
1609 struct eh_region **n_array, *root, *cur;
1610 int i;
1612 if (ifun_last_region_number == 0)
1613 return 0;
1615 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1617 for (i = 1; i <= ifun_last_region_number; ++i)
1619 cur = ifun->eh->region_array[i];
1620 if (!cur || cur->region_number != i)
1621 continue;
1622 n_array[i] = duplicate_eh_region_1 (cur, map);
1624 for (i = 1; i <= ifun_last_region_number; ++i)
1626 cur = ifun->eh->region_array[i];
1627 if (!cur || cur->region_number != i)
1628 continue;
1629 duplicate_eh_region_2 (cur, n_array);
1632 root = n_array[ifun->eh->region_tree->region_number];
1633 cur = cfun->eh->cur_region;
1634 if (cur)
1636 struct eh_region *p = cur->inner;
1637 if (p)
1639 while (p->next_peer)
1640 p = p->next_peer;
1641 p->next_peer = root;
1643 else
1644 cur->inner = root;
1646 for (i = 1; i <= ifun_last_region_number; ++i)
1647 if (n_array[i] && n_array[i]->outer == NULL)
1648 n_array[i]->outer = cur;
1650 else
1652 struct eh_region *p = cfun->eh->region_tree;
1653 if (p)
1655 while (p->next_peer)
1656 p = p->next_peer;
1657 p->next_peer = root;
1659 else
1660 cfun->eh->region_tree = root;
1663 free (n_array);
1665 i = cfun->eh->last_region_number;
1666 cfun->eh->last_region_number = i + ifun_last_region_number;
1667 return i;
1671 static int
1672 t2r_eq (pentry, pdata)
1673 const PTR pentry;
1674 const PTR pdata;
1676 tree entry = (tree) pentry;
1677 tree data = (tree) pdata;
1679 return TREE_PURPOSE (entry) == data;
1682 static hashval_t
1683 t2r_hash (pentry)
1684 const PTR pentry;
1686 tree entry = (tree) pentry;
1687 return TYPE_HASH (TREE_PURPOSE (entry));
1690 static int
1691 t2r_mark_1 (slot, data)
1692 PTR *slot;
1693 PTR data ATTRIBUTE_UNUSED;
1695 tree contents = (tree) *slot;
1696 ggc_mark_tree (contents);
1697 return 1;
1700 static void
1701 t2r_mark (addr)
1702 PTR addr;
1704 htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1707 static void
1708 add_type_for_runtime (type)
1709 tree type;
1711 tree *slot;
1713 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1714 TYPE_HASH (type), INSERT);
1715 if (*slot == NULL)
1717 tree runtime = (*lang_eh_runtime_type) (type);
1718 *slot = tree_cons (type, runtime, NULL_TREE);
1722 static tree
1723 lookup_type_for_runtime (type)
1724 tree type;
1726 tree *slot;
1728 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1729 TYPE_HASH (type), NO_INSERT);
1731 /* We should have always inserted the data earlier. */
1732 return TREE_VALUE (*slot);
1736 /* Represent an entry in @TTypes for either catch actions
1737 or exception filter actions. */
1738 struct ttypes_filter
1740 tree t;
1741 int filter;
1744 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1745 (a tree) for a @TTypes type node we are thinking about adding. */
1747 static int
1748 ttypes_filter_eq (pentry, pdata)
1749 const PTR pentry;
1750 const PTR pdata;
1752 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1753 tree data = (tree) pdata;
1755 return entry->t == data;
1758 static hashval_t
1759 ttypes_filter_hash (pentry)
1760 const PTR pentry;
1762 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1763 return TYPE_HASH (entry->t);
1766 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1767 exception specification list we are thinking about adding. */
1768 /* ??? Currently we use the type lists in the order given. Someone
1769 should put these in some canonical order. */
1771 static int
1772 ehspec_filter_eq (pentry, pdata)
1773 const PTR pentry;
1774 const PTR pdata;
1776 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1777 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1779 return type_list_equal (entry->t, data->t);
1782 /* Hash function for exception specification lists. */
1784 static hashval_t
1785 ehspec_filter_hash (pentry)
1786 const PTR pentry;
1788 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1789 hashval_t h = 0;
1790 tree list;
1792 for (list = entry->t; list ; list = TREE_CHAIN (list))
1793 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1794 return h;
1797 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1798 up the search. Return the filter value to be used. */
1800 static int
1801 add_ttypes_entry (ttypes_hash, type)
1802 htab_t ttypes_hash;
1803 tree type;
1805 struct ttypes_filter **slot, *n;
1807 slot = (struct ttypes_filter **)
1808 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1810 if ((n = *slot) == NULL)
1812 /* Filter value is a 1 based table index. */
1814 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1815 n->t = type;
1816 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1817 *slot = n;
1819 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1822 return n->filter;
1825 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1826 to speed up the search. Return the filter value to be used. */
1828 static int
1829 add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1830 htab_t ehspec_hash;
1831 htab_t ttypes_hash;
1832 tree list;
1834 struct ttypes_filter **slot, *n;
1835 struct ttypes_filter dummy;
1837 dummy.t = list;
1838 slot = (struct ttypes_filter **)
1839 htab_find_slot (ehspec_hash, &dummy, INSERT);
1841 if ((n = *slot) == NULL)
1843 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1845 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1846 n->t = list;
1847 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1848 *slot = n;
1850 /* Look up each type in the list and encode its filter
1851 value as a uleb128. Terminate the list with 0. */
1852 for (; list ; list = TREE_CHAIN (list))
1853 push_uleb128 (&cfun->eh->ehspec_data,
1854 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1855 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1858 return n->filter;
1861 /* Generate the action filter values to be used for CATCH and
1862 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1863 we use lots of landing pads, and so every type or list can share
1864 the same filter value, which saves table space. */
1866 static void
1867 assign_filter_values ()
1869 int i;
1870 htab_t ttypes, ehspec;
1872 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1873 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1875 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1876 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1878 for (i = cfun->eh->last_region_number; i > 0; --i)
1880 struct eh_region *r = cfun->eh->region_array[i];
1882 /* Mind we don't process a region more than once. */
1883 if (!r || r->region_number != i)
1884 continue;
1886 switch (r->type)
1888 case ERT_CATCH:
1889 /* Whatever type_list is (NULL or true list), we build a list
1890 of filters for the region. */
1891 r->u.catch.filter_list = NULL_TREE;
1893 if (r->u.catch.type_list != NULL)
1895 /* Get a filter value for each of the types caught and store
1896 them in the region's dedicated list. */
1897 tree tp_node = r->u.catch.type_list;
1899 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1901 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1902 tree flt_node = build_int_2 (flt, 0);
1904 r->u.catch.filter_list
1905 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1908 else
1910 /* Get a filter value for the NULL list also since it will need
1911 an action record anyway. */
1912 int flt = add_ttypes_entry (ttypes, NULL);
1913 tree flt_node = build_int_2 (flt, 0);
1915 r->u.catch.filter_list
1916 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1919 break;
1921 case ERT_ALLOWED_EXCEPTIONS:
1922 r->u.allowed.filter
1923 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1924 break;
1926 default:
1927 break;
1931 htab_delete (ttypes);
1932 htab_delete (ehspec);
1935 static void
1936 build_post_landing_pads ()
1938 int i;
1940 for (i = cfun->eh->last_region_number; i > 0; --i)
1942 struct eh_region *region = cfun->eh->region_array[i];
1943 rtx seq;
1945 /* Mind we don't process a region more than once. */
1946 if (!region || region->region_number != i)
1947 continue;
1949 switch (region->type)
1951 case ERT_TRY:
1952 /* ??? Collect the set of all non-overlapping catch handlers
1953 all the way up the chain until blocked by a cleanup. */
1954 /* ??? Outer try regions can share landing pads with inner
1955 try regions if the types are completely non-overlapping,
1956 and there are no intervening cleanups. */
1958 region->post_landing_pad = gen_label_rtx ();
1960 start_sequence ();
1962 emit_label (region->post_landing_pad);
1964 /* ??? It is mighty inconvenient to call back into the
1965 switch statement generation code in expand_end_case.
1966 Rapid prototyping sez a sequence of ifs. */
1968 struct eh_region *c;
1969 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1971 /* ??? _Unwind_ForcedUnwind wants no match here. */
1972 if (c->u.catch.type_list == NULL)
1973 emit_jump (c->label);
1974 else
1976 /* Need for one cmp/jump per type caught. Each type
1977 list entry has a matching entry in the filter list
1978 (see assign_filter_values). */
1979 tree tp_node = c->u.catch.type_list;
1980 tree flt_node = c->u.catch.filter_list;
1982 for (; tp_node; )
1984 emit_cmp_and_jump_insns
1985 (cfun->eh->filter,
1986 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1987 EQ, NULL_RTX, word_mode, 0, c->label);
1989 tp_node = TREE_CHAIN (tp_node);
1990 flt_node = TREE_CHAIN (flt_node);
1996 /* We delay the generation of the _Unwind_Resume until we generate
1997 landing pads. We emit a marker here so as to get good control
1998 flow data in the meantime. */
1999 region->resume
2000 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
2001 emit_barrier ();
2003 seq = get_insns ();
2004 end_sequence ();
2006 emit_insns_before (seq, region->u.try.catch->label);
2007 break;
2009 case ERT_ALLOWED_EXCEPTIONS:
2010 region->post_landing_pad = gen_label_rtx ();
2012 start_sequence ();
2014 emit_label (region->post_landing_pad);
2016 emit_cmp_and_jump_insns (cfun->eh->filter,
2017 GEN_INT (region->u.allowed.filter),
2018 EQ, NULL_RTX, word_mode, 0, region->label);
2020 /* We delay the generation of the _Unwind_Resume until we generate
2021 landing pads. We emit a marker here so as to get good control
2022 flow data in the meantime. */
2023 region->resume
2024 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
2025 emit_barrier ();
2027 seq = get_insns ();
2028 end_sequence ();
2030 emit_insns_before (seq, region->label);
2031 break;
2033 case ERT_CLEANUP:
2034 case ERT_MUST_NOT_THROW:
2035 region->post_landing_pad = region->label;
2036 break;
2038 case ERT_CATCH:
2039 case ERT_THROW:
2040 /* Nothing to do. */
2041 break;
2043 default:
2044 abort ();
2049 /* Replace RESX patterns with jumps to the next handler if any, or calls to
2050 _Unwind_Resume otherwise. */
2052 static void
2053 connect_post_landing_pads ()
2055 int i;
2057 for (i = cfun->eh->last_region_number; i > 0; --i)
2059 struct eh_region *region = cfun->eh->region_array[i];
2060 struct eh_region *outer;
2061 rtx seq;
2063 /* Mind we don't process a region more than once. */
2064 if (!region || region->region_number != i)
2065 continue;
2067 /* If there is no RESX, or it has been deleted by flow, there's
2068 nothing to fix up. */
2069 if (! region->resume || INSN_DELETED_P (region->resume))
2070 continue;
2072 /* Search for another landing pad in this function. */
2073 for (outer = region->outer; outer ; outer = outer->outer)
2074 if (outer->post_landing_pad)
2075 break;
2077 start_sequence ();
2079 if (outer)
2080 emit_jump (outer->post_landing_pad);
2081 else
2082 emit_library_call (unwind_resume_libfunc, LCT_THROW,
2083 VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
2085 seq = get_insns ();
2086 end_sequence ();
2087 emit_insns_before (seq, region->resume);
2088 delete_insn (region->resume);
2093 static void
2094 dw2_build_landing_pads ()
2096 int i;
2097 unsigned int j;
2099 for (i = cfun->eh->last_region_number; i > 0; --i)
2101 struct eh_region *region = cfun->eh->region_array[i];
2102 rtx seq;
2103 bool clobbers_hard_regs = false;
2105 /* Mind we don't process a region more than once. */
2106 if (!region || region->region_number != i)
2107 continue;
2109 if (region->type != ERT_CLEANUP
2110 && region->type != ERT_TRY
2111 && region->type != ERT_ALLOWED_EXCEPTIONS)
2112 continue;
2114 start_sequence ();
2116 region->landing_pad = gen_label_rtx ();
2117 emit_label (region->landing_pad);
2119 #ifdef HAVE_exception_receiver
2120 if (HAVE_exception_receiver)
2121 emit_insn (gen_exception_receiver ());
2122 else
2123 #endif
2124 #ifdef HAVE_nonlocal_goto_receiver
2125 if (HAVE_nonlocal_goto_receiver)
2126 emit_insn (gen_nonlocal_goto_receiver ());
2127 else
2128 #endif
2129 { /* Nothing */ }
2131 /* If the eh_return data registers are call-saved, then we
2132 won't have considered them clobbered from the call that
2133 threw. Kill them now. */
2134 for (j = 0; ; ++j)
2136 unsigned r = EH_RETURN_DATA_REGNO (j);
2137 if (r == INVALID_REGNUM)
2138 break;
2139 if (! call_used_regs[r])
2141 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
2142 clobbers_hard_regs = true;
2146 if (clobbers_hard_regs)
2148 /* @@@ This is a kludge. Not all machine descriptions define a
2149 blockage insn, but we must not allow the code we just generated
2150 to be reordered by scheduling. So emit an ASM_INPUT to act as
2151 blockage insn. */
2152 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
2155 emit_move_insn (cfun->eh->exc_ptr,
2156 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
2157 emit_move_insn (cfun->eh->filter,
2158 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
2160 seq = get_insns ();
2161 end_sequence ();
2163 emit_insns_before (seq, region->post_landing_pad);
2168 struct sjlj_lp_info
2170 int directly_reachable;
2171 int action_index;
2172 int dispatch_index;
2173 int call_site_index;
2176 static bool
2177 sjlj_find_directly_reachable_regions (lp_info)
2178 struct sjlj_lp_info *lp_info;
2180 rtx insn;
2181 bool found_one = false;
2183 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2185 struct eh_region *region;
2186 enum reachable_code rc;
2187 tree type_thrown;
2188 rtx note;
2190 if (! INSN_P (insn))
2191 continue;
2193 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2194 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2195 continue;
2197 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2199 type_thrown = NULL_TREE;
2200 if (region->type == ERT_THROW)
2202 type_thrown = region->u.throw.type;
2203 region = region->outer;
2206 /* Find the first containing region that might handle the exception.
2207 That's the landing pad to which we will transfer control. */
2208 rc = RNL_NOT_CAUGHT;
2209 for (; region; region = region->outer)
2211 rc = reachable_next_level (region, type_thrown, 0);
2212 if (rc != RNL_NOT_CAUGHT)
2213 break;
2215 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
2217 lp_info[region->region_number].directly_reachable = 1;
2218 found_one = true;
2222 return found_one;
2225 static void
2226 sjlj_assign_call_site_values (dispatch_label, lp_info)
2227 rtx dispatch_label;
2228 struct sjlj_lp_info *lp_info;
2230 htab_t ar_hash;
2231 int i, index;
2233 /* First task: build the action table. */
2235 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
2236 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2238 for (i = cfun->eh->last_region_number; i > 0; --i)
2239 if (lp_info[i].directly_reachable)
2241 struct eh_region *r = cfun->eh->region_array[i];
2242 r->landing_pad = dispatch_label;
2243 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2244 if (lp_info[i].action_index != -1)
2245 cfun->uses_eh_lsda = 1;
2248 htab_delete (ar_hash);
2250 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2251 landing pad label for the region. For sjlj though, there is one
2252 common landing pad from which we dispatch to the post-landing pads.
2254 A region receives a dispatch index if it is directly reachable
2255 and requires in-function processing. Regions that share post-landing
2256 pads may share dispatch indices. */
2257 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2258 (see build_post_landing_pads) so we don't bother checking for it. */
2260 index = 0;
2261 for (i = cfun->eh->last_region_number; i > 0; --i)
2262 if (lp_info[i].directly_reachable)
2263 lp_info[i].dispatch_index = index++;
2265 /* Finally: assign call-site values. If dwarf2 terms, this would be
2266 the region number assigned by convert_to_eh_region_ranges, but
2267 handles no-action and must-not-throw differently. */
2269 call_site_base = 1;
2270 for (i = cfun->eh->last_region_number; i > 0; --i)
2271 if (lp_info[i].directly_reachable)
2273 int action = lp_info[i].action_index;
2275 /* Map must-not-throw to otherwise unused call-site index 0. */
2276 if (action == -2)
2277 index = 0;
2278 /* Map no-action to otherwise unused call-site index -1. */
2279 else if (action == -1)
2280 index = -1;
2281 /* Otherwise, look it up in the table. */
2282 else
2283 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2285 lp_info[i].call_site_index = index;
2289 static void
2290 sjlj_mark_call_sites (lp_info)
2291 struct sjlj_lp_info *lp_info;
2293 int last_call_site = -2;
2294 rtx insn, mem;
2296 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2298 struct eh_region *region;
2299 int this_call_site;
2300 rtx note, before, p;
2302 /* Reset value tracking at extended basic block boundaries. */
2303 if (GET_CODE (insn) == CODE_LABEL)
2304 last_call_site = -2;
2306 if (! INSN_P (insn))
2307 continue;
2309 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2310 if (!note)
2312 /* Calls (and trapping insns) without notes are outside any
2313 exception handling region in this function. Mark them as
2314 no action. */
2315 if (GET_CODE (insn) == CALL_INSN
2316 || (flag_non_call_exceptions
2317 && may_trap_p (PATTERN (insn))))
2318 this_call_site = -1;
2319 else
2320 continue;
2322 else
2324 /* Calls that are known to not throw need not be marked. */
2325 if (INTVAL (XEXP (note, 0)) <= 0)
2326 continue;
2328 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2329 this_call_site = lp_info[region->region_number].call_site_index;
2332 if (this_call_site == last_call_site)
2333 continue;
2335 /* Don't separate a call from it's argument loads. */
2336 before = insn;
2337 if (GET_CODE (insn) == CALL_INSN)
2338 before = find_first_parameter_load (insn, NULL_RTX);
2340 start_sequence ();
2341 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2342 sjlj_fc_call_site_ofs);
2343 emit_move_insn (mem, GEN_INT (this_call_site));
2344 p = get_insns ();
2345 end_sequence ();
2347 emit_insns_before (p, before);
2348 last_call_site = this_call_site;
2352 /* Construct the SjLj_Function_Context. */
2354 static void
2355 sjlj_emit_function_enter (dispatch_label)
2356 rtx dispatch_label;
2358 rtx fn_begin, fc, mem, seq;
2360 fc = cfun->eh->sjlj_fc;
2362 start_sequence ();
2364 /* We're storing this libcall's address into memory instead of
2365 calling it directly. Thus, we must call assemble_external_libcall
2366 here, as we can not depend on emit_library_call to do it for us. */
2367 assemble_external_libcall (eh_personality_libfunc);
2368 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2369 emit_move_insn (mem, eh_personality_libfunc);
2371 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2372 if (cfun->uses_eh_lsda)
2374 char buf[20];
2375 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2376 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
2378 else
2379 emit_move_insn (mem, const0_rtx);
2381 #ifdef DONT_USE_BUILTIN_SETJMP
2383 rtx x, note;
2384 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2385 TYPE_MODE (integer_type_node), 1,
2386 plus_constant (XEXP (fc, 0),
2387 sjlj_fc_jbuf_ofs), Pmode);
2389 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2390 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2392 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2393 TYPE_MODE (integer_type_node), 0, dispatch_label);
2395 #else
2396 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2397 dispatch_label);
2398 #endif
2400 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2401 1, XEXP (fc, 0), Pmode);
2403 seq = get_insns ();
2404 end_sequence ();
2406 /* ??? Instead of doing this at the beginning of the function,
2407 do this in a block that is at loop level 0 and dominates all
2408 can_throw_internal instructions. */
2410 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2411 if (GET_CODE (fn_begin) == NOTE
2412 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2413 break;
2414 emit_insns_after (seq, fn_begin);
2417 /* Call back from expand_function_end to know where we should put
2418 the call to unwind_sjlj_unregister_libfunc if needed. */
2420 void
2421 sjlj_emit_function_exit_after (after)
2422 rtx after;
2424 cfun->eh->sjlj_exit_after = after;
2427 static void
2428 sjlj_emit_function_exit ()
2430 rtx seq;
2432 start_sequence ();
2434 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2435 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2437 seq = get_insns ();
2438 end_sequence ();
2440 /* ??? Really this can be done in any block at loop level 0 that
2441 post-dominates all can_throw_internal instructions. This is
2442 the last possible moment. */
2444 emit_insns_after (seq, cfun->eh->sjlj_exit_after);
2447 static void
2448 sjlj_emit_dispatch_table (dispatch_label, lp_info)
2449 rtx dispatch_label;
2450 struct sjlj_lp_info *lp_info;
2452 int i, first_reachable;
2453 rtx mem, dispatch, seq, fc;
2455 fc = cfun->eh->sjlj_fc;
2457 start_sequence ();
2459 emit_label (dispatch_label);
2461 #ifndef DONT_USE_BUILTIN_SETJMP
2462 expand_builtin_setjmp_receiver (dispatch_label);
2463 #endif
2465 /* Load up dispatch index, exc_ptr and filter values from the
2466 function context. */
2467 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2468 sjlj_fc_call_site_ofs);
2469 dispatch = copy_to_reg (mem);
2471 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
2472 if (word_mode != Pmode)
2474 #ifdef POINTERS_EXTEND_UNSIGNED
2475 mem = convert_memory_address (Pmode, mem);
2476 #else
2477 mem = convert_to_mode (Pmode, mem, 0);
2478 #endif
2480 emit_move_insn (cfun->eh->exc_ptr, mem);
2482 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2483 emit_move_insn (cfun->eh->filter, mem);
2485 /* Jump to one of the directly reachable regions. */
2486 /* ??? This really ought to be using a switch statement. */
2488 first_reachable = 0;
2489 for (i = cfun->eh->last_region_number; i > 0; --i)
2491 if (! lp_info[i].directly_reachable)
2492 continue;
2494 if (! first_reachable)
2496 first_reachable = i;
2497 continue;
2500 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2501 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2502 cfun->eh->region_array[i]->post_landing_pad);
2505 seq = get_insns ();
2506 end_sequence ();
2508 emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2509 ->post_landing_pad));
2512 static void
2513 sjlj_build_landing_pads ()
2515 struct sjlj_lp_info *lp_info;
2517 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2518 sizeof (struct sjlj_lp_info));
2520 if (sjlj_find_directly_reachable_regions (lp_info))
2522 rtx dispatch_label = gen_label_rtx ();
2524 cfun->eh->sjlj_fc
2525 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2526 int_size_in_bytes (sjlj_fc_type_node),
2527 TYPE_ALIGN (sjlj_fc_type_node));
2529 sjlj_assign_call_site_values (dispatch_label, lp_info);
2530 sjlj_mark_call_sites (lp_info);
2532 sjlj_emit_function_enter (dispatch_label);
2533 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2534 sjlj_emit_function_exit ();
2537 free (lp_info);
2540 void
2541 finish_eh_generation ()
2543 /* Nothing to do if no regions created. */
2544 if (cfun->eh->region_tree == NULL)
2545 return;
2547 /* The object here is to provide find_basic_blocks with detailed
2548 information (via reachable_handlers) on how exception control
2549 flows within the function. In this first pass, we can include
2550 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2551 regions, and hope that it will be useful in deleting unreachable
2552 handlers. Subsequently, we will generate landing pads which will
2553 connect many of the handlers, and then type information will not
2554 be effective. Still, this is a win over previous implementations. */
2556 rebuild_jump_labels (get_insns ());
2557 find_basic_blocks (get_insns (), max_reg_num (), 0);
2558 cleanup_cfg (CLEANUP_PRE_LOOP);
2560 /* These registers are used by the landing pads. Make sure they
2561 have been generated. */
2562 get_exception_pointer (cfun);
2563 get_exception_filter (cfun);
2565 /* Construct the landing pads. */
2567 assign_filter_values ();
2568 build_post_landing_pads ();
2569 connect_post_landing_pads ();
2570 if (USING_SJLJ_EXCEPTIONS)
2571 sjlj_build_landing_pads ();
2572 else
2573 dw2_build_landing_pads ();
2575 cfun->eh->built_landing_pads = 1;
2577 /* We've totally changed the CFG. Start over. */
2578 find_exception_handler_labels ();
2579 rebuild_jump_labels (get_insns ());
2580 find_basic_blocks (get_insns (), max_reg_num (), 0);
2581 cleanup_cfg (CLEANUP_PRE_LOOP);
2584 static hashval_t
2585 ehl_hash (pentry)
2586 const PTR pentry;
2588 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2590 /* 2^32 * ((sqrt(5) - 1) / 2) */
2591 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2592 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2595 static int
2596 ehl_eq (pentry, pdata)
2597 const PTR pentry;
2598 const PTR pdata;
2600 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2601 struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2603 return entry->label == data->label;
2606 /* This section handles removing dead code for flow. */
2608 /* Remove LABEL from exception_handler_label_map. */
2610 static void
2611 remove_exception_handler_label (label)
2612 rtx label;
2614 struct ehl_map_entry **slot, tmp;
2616 /* If exception_handler_label_map was not built yet,
2617 there is nothing to do. */
2618 if (exception_handler_label_map == NULL)
2619 return;
2621 tmp.label = label;
2622 slot = (struct ehl_map_entry **)
2623 htab_find_slot (exception_handler_label_map, &tmp, NO_INSERT);
2624 if (! slot)
2625 abort ();
2627 htab_clear_slot (exception_handler_label_map, (void **) slot);
2630 /* Splice REGION from the region tree etc. */
2632 static void
2633 remove_eh_handler (region)
2634 struct eh_region *region;
2636 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2637 rtx lab;
2639 /* For the benefit of efficiently handling REG_EH_REGION notes,
2640 replace this region in the region array with its containing
2641 region. Note that previous region deletions may result in
2642 multiple copies of this region in the array, so we have a
2643 list of alternate numbers by which we are known. */
2645 outer = region->outer;
2646 cfun->eh->region_array[region->region_number] = outer;
2647 if (region->aka)
2649 int i;
2650 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i,
2651 { cfun->eh->region_array[i] = outer; });
2654 if (outer)
2656 if (!outer->aka)
2657 outer->aka = BITMAP_XMALLOC ();
2658 if (region->aka)
2659 bitmap_a_or_b (outer->aka, outer->aka, region->aka);
2660 bitmap_set_bit (outer->aka, region->region_number);
2663 if (cfun->eh->built_landing_pads)
2664 lab = region->landing_pad;
2665 else
2666 lab = region->label;
2667 if (lab)
2668 remove_exception_handler_label (lab);
2670 if (outer)
2671 pp_start = &outer->inner;
2672 else
2673 pp_start = &cfun->eh->region_tree;
2674 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2675 continue;
2676 *pp = region->next_peer;
2678 inner = region->inner;
2679 if (inner)
2681 for (p = inner; p->next_peer ; p = p->next_peer)
2682 p->outer = outer;
2683 p->outer = outer;
2685 p->next_peer = *pp_start;
2686 *pp_start = inner;
2689 if (region->type == ERT_CATCH)
2691 struct eh_region *try, *next, *prev;
2693 for (try = region->next_peer;
2694 try->type == ERT_CATCH;
2695 try = try->next_peer)
2696 continue;
2697 if (try->type != ERT_TRY)
2698 abort ();
2700 next = region->u.catch.next_catch;
2701 prev = region->u.catch.prev_catch;
2703 if (next)
2704 next->u.catch.prev_catch = prev;
2705 else
2706 try->u.try.last_catch = prev;
2707 if (prev)
2708 prev->u.catch.next_catch = next;
2709 else
2711 try->u.try.catch = next;
2712 if (! next)
2713 remove_eh_handler (try);
2717 free_region (region);
2720 /* LABEL heads a basic block that is about to be deleted. If this
2721 label corresponds to an exception region, we may be able to
2722 delete the region. */
2724 void
2725 maybe_remove_eh_handler (label)
2726 rtx label;
2728 struct ehl_map_entry **slot, tmp;
2729 struct eh_region *region;
2731 /* ??? After generating landing pads, it's not so simple to determine
2732 if the region data is completely unused. One must examine the
2733 landing pad and the post landing pad, and whether an inner try block
2734 is referencing the catch handlers directly. */
2735 if (cfun->eh->built_landing_pads)
2736 return;
2738 tmp.label = label;
2739 slot = (struct ehl_map_entry **)
2740 htab_find_slot (exception_handler_label_map, &tmp, NO_INSERT);
2741 if (! slot)
2742 return;
2743 region = (*slot)->region;
2744 if (! region)
2745 return;
2747 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2748 because there is no path to the fallback call to terminate.
2749 But the region continues to affect call-site data until there
2750 are no more contained calls, which we don't see here. */
2751 if (region->type == ERT_MUST_NOT_THROW)
2753 htab_clear_slot (exception_handler_label_map, (void **) slot);
2754 region->label = NULL_RTX;
2756 else
2757 remove_eh_handler (region);
2760 /* Invokes CALLBACK for every exception handler label. Only used by old
2761 loop hackery; should not be used by new code. */
2763 void
2764 for_each_eh_label (callback)
2765 void (*callback) PARAMS ((rtx));
2767 htab_traverse (exception_handler_label_map, for_each_eh_label_1,
2768 (void *)callback);
2771 static int
2772 for_each_eh_label_1 (pentry, data)
2773 PTR *pentry;
2774 PTR data;
2776 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2777 void (*callback) PARAMS ((rtx)) = (void (*) PARAMS ((rtx))) data;
2779 (*callback) (entry->label);
2780 return 1;
2783 /* This section describes CFG exception edges for flow. */
2785 /* For communicating between calls to reachable_next_level. */
2786 struct reachable_info
2788 tree types_caught;
2789 tree types_allowed;
2790 rtx handlers;
2793 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2794 base class of TYPE, is in HANDLED. */
2796 static int
2797 check_handled (handled, type)
2798 tree handled, type;
2800 tree t;
2802 /* We can check for exact matches without front-end help. */
2803 if (! lang_eh_type_covers)
2805 for (t = handled; t ; t = TREE_CHAIN (t))
2806 if (TREE_VALUE (t) == type)
2807 return 1;
2809 else
2811 for (t = handled; t ; t = TREE_CHAIN (t))
2812 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2813 return 1;
2816 return 0;
2819 /* A subroutine of reachable_next_level. If we are collecting a list
2820 of handlers, add one. After landing pad generation, reference
2821 it instead of the handlers themselves. Further, the handlers are
2822 all wired together, so by referencing one, we've got them all.
2823 Before landing pad generation we reference each handler individually.
2825 LP_REGION contains the landing pad; REGION is the handler. */
2827 static void
2828 add_reachable_handler (info, lp_region, region)
2829 struct reachable_info *info;
2830 struct eh_region *lp_region;
2831 struct eh_region *region;
2833 if (! info)
2834 return;
2836 if (cfun->eh->built_landing_pads)
2838 if (! info->handlers)
2839 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2841 else
2842 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2845 /* Process one level of exception regions for reachability.
2846 If TYPE_THROWN is non-null, then it is the *exact* type being
2847 propagated. If INFO is non-null, then collect handler labels
2848 and caught/allowed type information between invocations. */
2850 static enum reachable_code
2851 reachable_next_level (region, type_thrown, info)
2852 struct eh_region *region;
2853 tree type_thrown;
2854 struct reachable_info *info;
2856 switch (region->type)
2858 case ERT_CLEANUP:
2859 /* Before landing-pad generation, we model control flow
2860 directly to the individual handlers. In this way we can
2861 see that catch handler types may shadow one another. */
2862 add_reachable_handler (info, region, region);
2863 return RNL_MAYBE_CAUGHT;
2865 case ERT_TRY:
2867 struct eh_region *c;
2868 enum reachable_code ret = RNL_NOT_CAUGHT;
2870 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2872 /* A catch-all handler ends the search. */
2873 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2874 to be run as well. */
2875 if (c->u.catch.type_list == NULL)
2877 add_reachable_handler (info, region, c);
2878 return RNL_CAUGHT;
2881 if (type_thrown)
2883 /* If we have at least one type match, end the search. */
2884 tree tp_node = c->u.catch.type_list;
2886 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2888 tree type = TREE_VALUE (tp_node);
2890 if (type == type_thrown
2891 || (lang_eh_type_covers
2892 && (*lang_eh_type_covers) (type, type_thrown)))
2894 add_reachable_handler (info, region, c);
2895 return RNL_CAUGHT;
2899 /* If we have definitive information of a match failure,
2900 the catch won't trigger. */
2901 if (lang_eh_type_covers)
2902 return RNL_NOT_CAUGHT;
2905 /* At this point, we either don't know what type is thrown or
2906 don't have front-end assistance to help deciding if it is
2907 covered by one of the types in the list for this region.
2909 We'd then like to add this region to the list of reachable
2910 handlers since it is indeed potentially reachable based on the
2911 information we have.
2913 Actually, this handler is for sure not reachable if all the
2914 types it matches have already been caught. That is, it is only
2915 potentially reachable if at least one of the types it catches
2916 has not been previously caught. */
2918 if (! info)
2919 ret = RNL_MAYBE_CAUGHT;
2920 else
2922 tree tp_node = c->u.catch.type_list;
2923 bool maybe_reachable = false;
2925 /* Compute the potential reachability of this handler and
2926 update the list of types caught at the same time. */
2927 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2929 tree type = TREE_VALUE (tp_node);
2931 if (! check_handled (info->types_caught, type))
2933 info->types_caught
2934 = tree_cons (NULL, type, info->types_caught);
2936 maybe_reachable = true;
2940 if (maybe_reachable)
2942 add_reachable_handler (info, region, c);
2944 /* ??? If the catch type is a base class of every allowed
2945 type, then we know we can stop the search. */
2946 ret = RNL_MAYBE_CAUGHT;
2951 return ret;
2954 case ERT_ALLOWED_EXCEPTIONS:
2955 /* An empty list of types definitely ends the search. */
2956 if (region->u.allowed.type_list == NULL_TREE)
2958 add_reachable_handler (info, region, region);
2959 return RNL_CAUGHT;
2962 /* Collect a list of lists of allowed types for use in detecting
2963 when a catch may be transformed into a catch-all. */
2964 if (info)
2965 info->types_allowed = tree_cons (NULL_TREE,
2966 region->u.allowed.type_list,
2967 info->types_allowed);
2969 /* If we have definitive information about the type hierarchy,
2970 then we can tell if the thrown type will pass through the
2971 filter. */
2972 if (type_thrown && lang_eh_type_covers)
2974 if (check_handled (region->u.allowed.type_list, type_thrown))
2975 return RNL_NOT_CAUGHT;
2976 else
2978 add_reachable_handler (info, region, region);
2979 return RNL_CAUGHT;
2983 add_reachable_handler (info, region, region);
2984 return RNL_MAYBE_CAUGHT;
2986 case ERT_CATCH:
2987 /* Catch regions are handled by their controling try region. */
2988 return RNL_NOT_CAUGHT;
2990 case ERT_MUST_NOT_THROW:
2991 /* Here we end our search, since no exceptions may propagate.
2992 If we've touched down at some landing pad previous, then the
2993 explicit function call we generated may be used. Otherwise
2994 the call is made by the runtime. */
2995 if (info && info->handlers)
2997 add_reachable_handler (info, region, region);
2998 return RNL_CAUGHT;
3000 else
3001 return RNL_BLOCKED;
3003 case ERT_THROW:
3004 case ERT_FIXUP:
3005 case ERT_UNKNOWN:
3006 /* Shouldn't see these here. */
3007 break;
3010 abort ();
3013 /* Retrieve a list of labels of exception handlers which can be
3014 reached by a given insn. */
3017 reachable_handlers (insn)
3018 rtx insn;
3020 struct reachable_info info;
3021 struct eh_region *region;
3022 tree type_thrown;
3023 int region_number;
3025 if (GET_CODE (insn) == JUMP_INSN
3026 && GET_CODE (PATTERN (insn)) == RESX)
3027 region_number = XINT (PATTERN (insn), 0);
3028 else
3030 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3031 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3032 return NULL;
3033 region_number = INTVAL (XEXP (note, 0));
3036 memset (&info, 0, sizeof (info));
3038 region = cfun->eh->region_array[region_number];
3040 type_thrown = NULL_TREE;
3041 if (GET_CODE (insn) == JUMP_INSN
3042 && GET_CODE (PATTERN (insn)) == RESX)
3044 /* A RESX leaves a region instead of entering it. Thus the
3045 region itself may have been deleted out from under us. */
3046 if (region == NULL)
3047 return NULL;
3048 region = region->outer;
3050 else if (region->type == ERT_THROW)
3052 type_thrown = region->u.throw.type;
3053 region = region->outer;
3056 for (; region; region = region->outer)
3057 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
3058 break;
3060 return info.handlers;
3063 /* Determine if the given INSN can throw an exception that is caught
3064 within the function. */
3066 bool
3067 can_throw_internal (insn)
3068 rtx insn;
3070 struct eh_region *region;
3071 tree type_thrown;
3072 rtx note;
3074 if (! INSN_P (insn))
3075 return false;
3077 if (GET_CODE (insn) == INSN
3078 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3079 insn = XVECEXP (PATTERN (insn), 0, 0);
3081 if (GET_CODE (insn) == CALL_INSN
3082 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3084 int i;
3085 for (i = 0; i < 3; ++i)
3087 rtx sub = XEXP (PATTERN (insn), i);
3088 for (; sub ; sub = NEXT_INSN (sub))
3089 if (can_throw_internal (sub))
3090 return true;
3092 return false;
3095 /* Every insn that might throw has an EH_REGION note. */
3096 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3097 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3098 return false;
3100 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3102 type_thrown = NULL_TREE;
3103 if (region->type == ERT_THROW)
3105 type_thrown = region->u.throw.type;
3106 region = region->outer;
3109 /* If this exception is ignored by each and every containing region,
3110 then control passes straight out. The runtime may handle some
3111 regions, which also do not require processing internally. */
3112 for (; region; region = region->outer)
3114 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
3115 if (how == RNL_BLOCKED)
3116 return false;
3117 if (how != RNL_NOT_CAUGHT)
3118 return true;
3121 return false;
3124 /* Determine if the given INSN can throw an exception that is
3125 visible outside the function. */
3127 bool
3128 can_throw_external (insn)
3129 rtx insn;
3131 struct eh_region *region;
3132 tree type_thrown;
3133 rtx note;
3135 if (! INSN_P (insn))
3136 return false;
3138 if (GET_CODE (insn) == INSN
3139 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3140 insn = XVECEXP (PATTERN (insn), 0, 0);
3142 if (GET_CODE (insn) == CALL_INSN
3143 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3145 int i;
3146 for (i = 0; i < 3; ++i)
3148 rtx sub = XEXP (PATTERN (insn), i);
3149 for (; sub ; sub = NEXT_INSN (sub))
3150 if (can_throw_external (sub))
3151 return true;
3153 return false;
3156 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3157 if (!note)
3159 /* Calls (and trapping insns) without notes are outside any
3160 exception handling region in this function. We have to
3161 assume it might throw. Given that the front end and middle
3162 ends mark known NOTHROW functions, this isn't so wildly
3163 inaccurate. */
3164 return (GET_CODE (insn) == CALL_INSN
3165 || (flag_non_call_exceptions
3166 && may_trap_p (PATTERN (insn))));
3168 if (INTVAL (XEXP (note, 0)) <= 0)
3169 return false;
3171 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3173 type_thrown = NULL_TREE;
3174 if (region->type == ERT_THROW)
3176 type_thrown = region->u.throw.type;
3177 region = region->outer;
3180 /* If the exception is caught or blocked by any containing region,
3181 then it is not seen by any calling function. */
3182 for (; region ; region = region->outer)
3183 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
3184 return false;
3186 return true;
3189 /* True if nothing in this function can throw outside this function. */
3191 bool
3192 nothrow_function_p ()
3194 rtx insn;
3196 if (! flag_exceptions)
3197 return true;
3199 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3200 if (can_throw_external (insn))
3201 return false;
3202 for (insn = current_function_epilogue_delay_list; insn;
3203 insn = XEXP (insn, 1))
3204 if (can_throw_external (insn))
3205 return false;
3207 return true;
3211 /* Various hooks for unwind library. */
3213 /* Do any necessary initialization to access arbitrary stack frames.
3214 On the SPARC, this means flushing the register windows. */
3216 void
3217 expand_builtin_unwind_init ()
3219 /* Set this so all the registers get saved in our frame; we need to be
3220 able to copy the saved values for any registers from frames we unwind. */
3221 current_function_has_nonlocal_label = 1;
3223 #ifdef SETUP_FRAME_ADDRESSES
3224 SETUP_FRAME_ADDRESSES ();
3225 #endif
3229 expand_builtin_eh_return_data_regno (arglist)
3230 tree arglist;
3232 tree which = TREE_VALUE (arglist);
3233 unsigned HOST_WIDE_INT iwhich;
3235 if (TREE_CODE (which) != INTEGER_CST)
3237 error ("argument of `__builtin_eh_return_regno' must be constant");
3238 return constm1_rtx;
3241 iwhich = tree_low_cst (which, 1);
3242 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3243 if (iwhich == INVALID_REGNUM)
3244 return constm1_rtx;
3246 #ifdef DWARF_FRAME_REGNUM
3247 iwhich = DWARF_FRAME_REGNUM (iwhich);
3248 #else
3249 iwhich = DBX_REGISTER_NUMBER (iwhich);
3250 #endif
3252 return GEN_INT (iwhich);
3255 /* Given a value extracted from the return address register or stack slot,
3256 return the actual address encoded in that value. */
3259 expand_builtin_extract_return_addr (addr_tree)
3260 tree addr_tree;
3262 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
3264 /* First mask out any unwanted bits. */
3265 #ifdef MASK_RETURN_ADDR
3266 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3267 #endif
3269 /* Then adjust to find the real return address. */
3270 #if defined (RETURN_ADDR_OFFSET)
3271 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3272 #endif
3274 return addr;
3277 /* Given an actual address in addr_tree, do any necessary encoding
3278 and return the value to be stored in the return address register or
3279 stack slot so the epilogue will return to that address. */
3282 expand_builtin_frob_return_addr (addr_tree)
3283 tree addr_tree;
3285 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3287 #ifdef POINTERS_EXTEND_UNSIGNED
3288 if (GET_MODE (addr) != Pmode)
3289 addr = convert_memory_address (Pmode, addr);
3290 #endif
3292 #ifdef RETURN_ADDR_OFFSET
3293 addr = force_reg (Pmode, addr);
3294 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3295 #endif
3297 return addr;
3300 /* Set up the epilogue with the magic bits we'll need to return to the
3301 exception handler. */
3303 void
3304 expand_builtin_eh_return (stackadj_tree, handler_tree)
3305 tree stackadj_tree, handler_tree;
3307 rtx stackadj, handler;
3309 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
3310 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
3312 #ifdef POINTERS_EXTEND_UNSIGNED
3313 if (GET_MODE (stackadj) != Pmode)
3314 stackadj = convert_memory_address (Pmode, stackadj);
3316 if (GET_MODE (handler) != Pmode)
3317 handler = convert_memory_address (Pmode, handler);
3318 #endif
3320 if (! cfun->eh->ehr_label)
3322 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
3323 cfun->eh->ehr_handler = copy_to_reg (handler);
3324 cfun->eh->ehr_label = gen_label_rtx ();
3326 else
3328 if (stackadj != cfun->eh->ehr_stackadj)
3329 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
3330 if (handler != cfun->eh->ehr_handler)
3331 emit_move_insn (cfun->eh->ehr_handler, handler);
3334 emit_jump (cfun->eh->ehr_label);
3337 void
3338 expand_eh_return ()
3340 rtx sa, ra, around_label;
3342 if (! cfun->eh->ehr_label)
3343 return;
3345 sa = EH_RETURN_STACKADJ_RTX;
3346 if (! sa)
3348 error ("__builtin_eh_return not supported on this target");
3349 return;
3352 current_function_calls_eh_return = 1;
3354 around_label = gen_label_rtx ();
3355 emit_move_insn (sa, const0_rtx);
3356 emit_jump (around_label);
3358 emit_label (cfun->eh->ehr_label);
3359 clobber_return_register ();
3361 #ifdef HAVE_eh_return
3362 if (HAVE_eh_return)
3363 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3364 else
3365 #endif
3367 ra = EH_RETURN_HANDLER_RTX;
3368 if (! ra)
3370 error ("__builtin_eh_return not supported on this target");
3371 ra = gen_reg_rtx (Pmode);
3374 emit_move_insn (sa, cfun->eh->ehr_stackadj);
3375 emit_move_insn (ra, cfun->eh->ehr_handler);
3378 emit_label (around_label);
3381 /* In the following functions, we represent entries in the action table
3382 as 1-based indices. Special cases are:
3384 0: null action record, non-null landing pad; implies cleanups
3385 -1: null action record, null landing pad; implies no action
3386 -2: no call-site entry; implies must_not_throw
3387 -3: we have yet to process outer regions
3389 Further, no special cases apply to the "next" field of the record.
3390 For next, 0 means end of list. */
3392 struct action_record
3394 int offset;
3395 int filter;
3396 int next;
3399 static int
3400 action_record_eq (pentry, pdata)
3401 const PTR pentry;
3402 const PTR pdata;
3404 const struct action_record *entry = (const struct action_record *) pentry;
3405 const struct action_record *data = (const struct action_record *) pdata;
3406 return entry->filter == data->filter && entry->next == data->next;
3409 static hashval_t
3410 action_record_hash (pentry)
3411 const PTR pentry;
3413 const struct action_record *entry = (const struct action_record *) pentry;
3414 return entry->next * 1009 + entry->filter;
3417 static int
3418 add_action_record (ar_hash, filter, next)
3419 htab_t ar_hash;
3420 int filter, next;
3422 struct action_record **slot, *new, tmp;
3424 tmp.filter = filter;
3425 tmp.next = next;
3426 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3428 if ((new = *slot) == NULL)
3430 new = (struct action_record *) xmalloc (sizeof (*new));
3431 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3432 new->filter = filter;
3433 new->next = next;
3434 *slot = new;
3436 /* The filter value goes in untouched. The link to the next
3437 record is a "self-relative" byte offset, or zero to indicate
3438 that there is no next record. So convert the absolute 1 based
3439 indices we've been carrying around into a displacement. */
3441 push_sleb128 (&cfun->eh->action_record_data, filter);
3442 if (next)
3443 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3444 push_sleb128 (&cfun->eh->action_record_data, next);
3447 return new->offset;
3450 static int
3451 collect_one_action_chain (ar_hash, region)
3452 htab_t ar_hash;
3453 struct eh_region *region;
3455 struct eh_region *c;
3456 int next;
3458 /* If we've reached the top of the region chain, then we have
3459 no actions, and require no landing pad. */
3460 if (region == NULL)
3461 return -1;
3463 switch (region->type)
3465 case ERT_CLEANUP:
3466 /* A cleanup adds a zero filter to the beginning of the chain, but
3467 there are special cases to look out for. If there are *only*
3468 cleanups along a path, then it compresses to a zero action.
3469 Further, if there are multiple cleanups along a path, we only
3470 need to represent one of them, as that is enough to trigger
3471 entry to the landing pad at runtime. */
3472 next = collect_one_action_chain (ar_hash, region->outer);
3473 if (next <= 0)
3474 return 0;
3475 for (c = region->outer; c ; c = c->outer)
3476 if (c->type == ERT_CLEANUP)
3477 return next;
3478 return add_action_record (ar_hash, 0, next);
3480 case ERT_TRY:
3481 /* Process the associated catch regions in reverse order.
3482 If there's a catch-all handler, then we don't need to
3483 search outer regions. Use a magic -3 value to record
3484 that we haven't done the outer search. */
3485 next = -3;
3486 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3488 if (c->u.catch.type_list == NULL)
3490 /* Retrieve the filter from the head of the filter list
3491 where we have stored it (see assign_filter_values). */
3492 int filter
3493 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3495 next = add_action_record (ar_hash, filter, 0);
3497 else
3499 /* Once the outer search is done, trigger an action record for
3500 each filter we have. */
3501 tree flt_node;
3503 if (next == -3)
3505 next = collect_one_action_chain (ar_hash, region->outer);
3507 /* If there is no next action, terminate the chain. */
3508 if (next == -1)
3509 next = 0;
3510 /* If all outer actions are cleanups or must_not_throw,
3511 we'll have no action record for it, since we had wanted
3512 to encode these states in the call-site record directly.
3513 Add a cleanup action to the chain to catch these. */
3514 else if (next <= 0)
3515 next = add_action_record (ar_hash, 0, 0);
3518 flt_node = c->u.catch.filter_list;
3519 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3521 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3522 next = add_action_record (ar_hash, filter, next);
3526 return next;
3528 case ERT_ALLOWED_EXCEPTIONS:
3529 /* An exception specification adds its filter to the
3530 beginning of the chain. */
3531 next = collect_one_action_chain (ar_hash, region->outer);
3532 return add_action_record (ar_hash, region->u.allowed.filter,
3533 next < 0 ? 0 : next);
3535 case ERT_MUST_NOT_THROW:
3536 /* A must-not-throw region with no inner handlers or cleanups
3537 requires no call-site entry. Note that this differs from
3538 the no handler or cleanup case in that we do require an lsda
3539 to be generated. Return a magic -2 value to record this. */
3540 return -2;
3542 case ERT_CATCH:
3543 case ERT_THROW:
3544 /* CATCH regions are handled in TRY above. THROW regions are
3545 for optimization information only and produce no output. */
3546 return collect_one_action_chain (ar_hash, region->outer);
3548 default:
3549 abort ();
3553 static int
3554 add_call_site (landing_pad, action)
3555 rtx landing_pad;
3556 int action;
3558 struct call_site_record *data = cfun->eh->call_site_data;
3559 int used = cfun->eh->call_site_data_used;
3560 int size = cfun->eh->call_site_data_size;
3562 if (used >= size)
3564 size = (size ? size * 2 : 64);
3565 data = (struct call_site_record *)
3566 xrealloc (data, sizeof (*data) * size);
3567 cfun->eh->call_site_data = data;
3568 cfun->eh->call_site_data_size = size;
3571 data[used].landing_pad = landing_pad;
3572 data[used].action = action;
3574 cfun->eh->call_site_data_used = used + 1;
3576 return used + call_site_base;
3579 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3580 The new note numbers will not refer to region numbers, but
3581 instead to call site entries. */
3583 void
3584 convert_to_eh_region_ranges ()
3586 rtx insn, iter, note;
3587 htab_t ar_hash;
3588 int last_action = -3;
3589 rtx last_action_insn = NULL_RTX;
3590 rtx last_landing_pad = NULL_RTX;
3591 rtx first_no_action_insn = NULL_RTX;
3592 int call_site = 0;
3594 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3595 return;
3597 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3599 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3601 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3602 if (INSN_P (iter))
3604 struct eh_region *region;
3605 int this_action;
3606 rtx this_landing_pad;
3608 insn = iter;
3609 if (GET_CODE (insn) == INSN
3610 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3611 insn = XVECEXP (PATTERN (insn), 0, 0);
3613 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3614 if (!note)
3616 if (! (GET_CODE (insn) == CALL_INSN
3617 || (flag_non_call_exceptions
3618 && may_trap_p (PATTERN (insn)))))
3619 continue;
3620 this_action = -1;
3621 region = NULL;
3623 else
3625 if (INTVAL (XEXP (note, 0)) <= 0)
3626 continue;
3627 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3628 this_action = collect_one_action_chain (ar_hash, region);
3631 /* Existence of catch handlers, or must-not-throw regions
3632 implies that an lsda is needed (even if empty). */
3633 if (this_action != -1)
3634 cfun->uses_eh_lsda = 1;
3636 /* Delay creation of region notes for no-action regions
3637 until we're sure that an lsda will be required. */
3638 else if (last_action == -3)
3640 first_no_action_insn = iter;
3641 last_action = -1;
3644 /* Cleanups and handlers may share action chains but not
3645 landing pads. Collect the landing pad for this region. */
3646 if (this_action >= 0)
3648 struct eh_region *o;
3649 for (o = region; ! o->landing_pad ; o = o->outer)
3650 continue;
3651 this_landing_pad = o->landing_pad;
3653 else
3654 this_landing_pad = NULL_RTX;
3656 /* Differing actions or landing pads implies a change in call-site
3657 info, which implies some EH_REGION note should be emitted. */
3658 if (last_action != this_action
3659 || last_landing_pad != this_landing_pad)
3661 /* If we'd not seen a previous action (-3) or the previous
3662 action was must-not-throw (-2), then we do not need an
3663 end note. */
3664 if (last_action >= -1)
3666 /* If we delayed the creation of the begin, do it now. */
3667 if (first_no_action_insn)
3669 call_site = add_call_site (NULL_RTX, 0);
3670 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3671 first_no_action_insn);
3672 NOTE_EH_HANDLER (note) = call_site;
3673 first_no_action_insn = NULL_RTX;
3676 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3677 last_action_insn);
3678 NOTE_EH_HANDLER (note) = call_site;
3681 /* If the new action is must-not-throw, then no region notes
3682 are created. */
3683 if (this_action >= -1)
3685 call_site = add_call_site (this_landing_pad,
3686 this_action < 0 ? 0 : this_action);
3687 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3688 NOTE_EH_HANDLER (note) = call_site;
3691 last_action = this_action;
3692 last_landing_pad = this_landing_pad;
3694 last_action_insn = iter;
3697 if (last_action >= -1 && ! first_no_action_insn)
3699 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3700 NOTE_EH_HANDLER (note) = call_site;
3703 htab_delete (ar_hash);
3707 static void
3708 push_uleb128 (data_area, value)
3709 varray_type *data_area;
3710 unsigned int value;
3714 unsigned char byte = value & 0x7f;
3715 value >>= 7;
3716 if (value)
3717 byte |= 0x80;
3718 VARRAY_PUSH_UCHAR (*data_area, byte);
3720 while (value);
3723 static void
3724 push_sleb128 (data_area, value)
3725 varray_type *data_area;
3726 int value;
3728 unsigned char byte;
3729 int more;
3733 byte = value & 0x7f;
3734 value >>= 7;
3735 more = ! ((value == 0 && (byte & 0x40) == 0)
3736 || (value == -1 && (byte & 0x40) != 0));
3737 if (more)
3738 byte |= 0x80;
3739 VARRAY_PUSH_UCHAR (*data_area, byte);
3741 while (more);
3745 #ifndef HAVE_AS_LEB128
3746 static int
3747 dw2_size_of_call_site_table ()
3749 int n = cfun->eh->call_site_data_used;
3750 int size = n * (4 + 4 + 4);
3751 int i;
3753 for (i = 0; i < n; ++i)
3755 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3756 size += size_of_uleb128 (cs->action);
3759 return size;
3762 static int
3763 sjlj_size_of_call_site_table ()
3765 int n = cfun->eh->call_site_data_used;
3766 int size = 0;
3767 int i;
3769 for (i = 0; i < n; ++i)
3771 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3772 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3773 size += size_of_uleb128 (cs->action);
3776 return size;
3778 #endif
3780 static void
3781 dw2_output_call_site_table ()
3783 const char *const function_start_lab
3784 = IDENTIFIER_POINTER (current_function_func_begin_label);
3785 int n = cfun->eh->call_site_data_used;
3786 int i;
3788 for (i = 0; i < n; ++i)
3790 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3791 char reg_start_lab[32];
3792 char reg_end_lab[32];
3793 char landing_pad_lab[32];
3795 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3796 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3798 if (cs->landing_pad)
3799 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3800 CODE_LABEL_NUMBER (cs->landing_pad));
3802 /* ??? Perhaps use insn length scaling if the assembler supports
3803 generic arithmetic. */
3804 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3805 data4 if the function is small enough. */
3806 #ifdef HAVE_AS_LEB128
3807 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3808 "region %d start", i);
3809 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3810 "length");
3811 if (cs->landing_pad)
3812 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3813 "landing pad");
3814 else
3815 dw2_asm_output_data_uleb128 (0, "landing pad");
3816 #else
3817 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3818 "region %d start", i);
3819 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3820 if (cs->landing_pad)
3821 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3822 "landing pad");
3823 else
3824 dw2_asm_output_data (4, 0, "landing pad");
3825 #endif
3826 dw2_asm_output_data_uleb128 (cs->action, "action");
3829 call_site_base += n;
3832 static void
3833 sjlj_output_call_site_table ()
3835 int n = cfun->eh->call_site_data_used;
3836 int i;
3838 for (i = 0; i < n; ++i)
3840 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3842 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3843 "region %d landing pad", i);
3844 dw2_asm_output_data_uleb128 (cs->action, "action");
3847 call_site_base += n;
3850 void
3851 output_function_exception_table ()
3853 int tt_format, cs_format, lp_format, i, n;
3854 #ifdef HAVE_AS_LEB128
3855 char ttype_label[32];
3856 char cs_after_size_label[32];
3857 char cs_end_label[32];
3858 #else
3859 int call_site_len;
3860 #endif
3861 int have_tt_data;
3862 int funcdef_number;
3863 int tt_format_size = 0;
3865 /* Not all functions need anything. */
3866 if (! cfun->uses_eh_lsda)
3867 return;
3869 funcdef_number = (USING_SJLJ_EXCEPTIONS
3870 ? sjlj_funcdef_number
3871 : current_funcdef_number);
3873 #ifdef IA64_UNWIND_INFO
3874 fputs ("\t.personality\t", asm_out_file);
3875 output_addr_const (asm_out_file, eh_personality_libfunc);
3876 fputs ("\n\t.handlerdata\n", asm_out_file);
3877 /* Note that varasm still thinks we're in the function's code section.
3878 The ".endp" directive that will immediately follow will take us back. */
3879 #else
3880 (*targetm.asm_out.exception_section) ();
3881 #endif
3883 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3884 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3886 /* Indicate the format of the @TType entries. */
3887 if (! have_tt_data)
3888 tt_format = DW_EH_PE_omit;
3889 else
3891 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3892 #ifdef HAVE_AS_LEB128
3893 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3894 #endif
3895 tt_format_size = size_of_encoded_value (tt_format);
3897 assemble_align (tt_format_size * BITS_PER_UNIT);
3900 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3902 /* The LSDA header. */
3904 /* Indicate the format of the landing pad start pointer. An omitted
3905 field implies @LPStart == @Start. */
3906 /* Currently we always put @LPStart == @Start. This field would
3907 be most useful in moving the landing pads completely out of
3908 line to another section, but it could also be used to minimize
3909 the size of uleb128 landing pad offsets. */
3910 lp_format = DW_EH_PE_omit;
3911 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3912 eh_data_format_name (lp_format));
3914 /* @LPStart pointer would go here. */
3916 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3917 eh_data_format_name (tt_format));
3919 #ifndef HAVE_AS_LEB128
3920 if (USING_SJLJ_EXCEPTIONS)
3921 call_site_len = sjlj_size_of_call_site_table ();
3922 else
3923 call_site_len = dw2_size_of_call_site_table ();
3924 #endif
3926 /* A pc-relative 4-byte displacement to the @TType data. */
3927 if (have_tt_data)
3929 #ifdef HAVE_AS_LEB128
3930 char ttype_after_disp_label[32];
3931 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3932 funcdef_number);
3933 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3934 "@TType base offset");
3935 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3936 #else
3937 /* Ug. Alignment queers things. */
3938 unsigned int before_disp, after_disp, last_disp, disp;
3940 before_disp = 1 + 1;
3941 after_disp = (1 + size_of_uleb128 (call_site_len)
3942 + call_site_len
3943 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3944 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3945 * tt_format_size));
3947 disp = after_disp;
3950 unsigned int disp_size, pad;
3952 last_disp = disp;
3953 disp_size = size_of_uleb128 (disp);
3954 pad = before_disp + disp_size + after_disp;
3955 if (pad % tt_format_size)
3956 pad = tt_format_size - (pad % tt_format_size);
3957 else
3958 pad = 0;
3959 disp = after_disp + pad;
3961 while (disp != last_disp);
3963 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3964 #endif
3967 /* Indicate the format of the call-site offsets. */
3968 #ifdef HAVE_AS_LEB128
3969 cs_format = DW_EH_PE_uleb128;
3970 #else
3971 cs_format = DW_EH_PE_udata4;
3972 #endif
3973 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3974 eh_data_format_name (cs_format));
3976 #ifdef HAVE_AS_LEB128
3977 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3978 funcdef_number);
3979 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3980 funcdef_number);
3981 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3982 "Call-site table length");
3983 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3984 if (USING_SJLJ_EXCEPTIONS)
3985 sjlj_output_call_site_table ();
3986 else
3987 dw2_output_call_site_table ();
3988 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3989 #else
3990 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3991 if (USING_SJLJ_EXCEPTIONS)
3992 sjlj_output_call_site_table ();
3993 else
3994 dw2_output_call_site_table ();
3995 #endif
3997 /* ??? Decode and interpret the data for flag_debug_asm. */
3998 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3999 for (i = 0; i < n; ++i)
4000 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
4001 (i ? NULL : "Action record table"));
4003 if (have_tt_data)
4004 assemble_align (tt_format_size * BITS_PER_UNIT);
4006 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
4007 while (i-- > 0)
4009 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
4010 rtx value;
4012 if (type == NULL_TREE)
4013 type = integer_zero_node;
4014 else
4015 type = lookup_type_for_runtime (type);
4017 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
4018 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
4019 assemble_integer (value, tt_format_size,
4020 tt_format_size * BITS_PER_UNIT, 1);
4021 else
4022 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
4025 #ifdef HAVE_AS_LEB128
4026 if (have_tt_data)
4027 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
4028 #endif
4030 /* ??? Decode and interpret the data for flag_debug_asm. */
4031 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
4032 for (i = 0; i < n; ++i)
4033 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
4034 (i ? NULL : "Exception specification table"));
4036 function_section (current_function_decl);
4038 if (USING_SJLJ_EXCEPTIONS)
4039 sjlj_funcdef_number += 1;