expr.c (store_constructor_field): Don't call store_constructor if bitsize is not...
[official-gcc.git] / gcc / except.c
blobd7daec3e6025ea68f99832432da62766dfff3a3f
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
78 /* Provide defaults for stuff that may not be defined when using
79 sjlj exceptions. */
80 #ifndef EH_RETURN_DATA_REGNO
81 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
82 #endif
85 /* Nonzero means enable synchronous exceptions for non-call instructions. */
86 int flag_non_call_exceptions;
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 tree (*lang_protect_cleanup_actions) (void);
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers) (tree a, tree b);
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type) (tree);
98 /* A hash table of label to region number. */
100 struct ehl_map_entry GTY(())
102 rtx label;
103 struct eh_region *region;
106 static GTY(()) int call_site_base;
107 static GTY ((param_is (union tree_node)))
108 htab_t type_to_runtime_map;
110 /* Describe the SjLj_Function_Context structure. */
111 static GTY(()) tree sjlj_fc_type_node;
112 static int sjlj_fc_call_site_ofs;
113 static int sjlj_fc_data_ofs;
114 static int sjlj_fc_personality_ofs;
115 static int sjlj_fc_lsda_ofs;
116 static int sjlj_fc_jbuf_ofs;
118 /* Describes one exception region. */
119 struct eh_region GTY(())
121 /* The immediately surrounding region. */
122 struct eh_region *outer;
124 /* The list of immediately contained regions. */
125 struct eh_region *inner;
126 struct eh_region *next_peer;
128 /* An identifier for this region. */
129 int region_number;
131 /* When a region is deleted, its parents inherit the REG_EH_REGION
132 numbers already assigned. */
133 bitmap aka;
135 /* Each region does exactly one thing. */
136 enum eh_region_type
138 ERT_UNKNOWN = 0,
139 ERT_CLEANUP,
140 ERT_TRY,
141 ERT_CATCH,
142 ERT_ALLOWED_EXCEPTIONS,
143 ERT_MUST_NOT_THROW,
144 ERT_THROW,
145 ERT_FIXUP
146 } type;
148 /* Holds the action to perform based on the preceding type. */
149 union eh_region_u {
150 /* A list of catch blocks, a surrounding try block,
151 and the label for continuing after a catch. */
152 struct eh_region_u_try {
153 struct eh_region *catch;
154 struct eh_region *last_catch;
155 struct eh_region *prev_try;
156 rtx continue_label;
157 } GTY ((tag ("ERT_TRY"))) try;
159 /* The list through the catch handlers, the list of type objects
160 matched, and the list of associated filters. */
161 struct eh_region_u_catch {
162 struct eh_region *next_catch;
163 struct eh_region *prev_catch;
164 tree type_list;
165 tree filter_list;
166 } GTY ((tag ("ERT_CATCH"))) catch;
168 /* A tree_list of allowed types. */
169 struct eh_region_u_allowed {
170 tree type_list;
171 int filter;
172 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
174 /* The type given by a call to "throw foo();", or discovered
175 for a throw. */
176 struct eh_region_u_throw {
177 tree type;
178 } GTY ((tag ("ERT_THROW"))) throw;
180 /* Retain the cleanup expression even after expansion so that
181 we can match up fixup regions. */
182 struct eh_region_u_cleanup {
183 tree exp;
184 struct eh_region *prev_try;
185 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
187 /* The real region (by expression and by pointer) that fixup code
188 should live in. */
189 struct eh_region_u_fixup {
190 tree cleanup_exp;
191 struct eh_region *real_region;
192 bool resolved;
193 } GTY ((tag ("ERT_FIXUP"))) fixup;
194 } GTY ((desc ("%0.type"))) u;
196 /* Entry point for this region's handler before landing pads are built. */
197 rtx label;
199 /* Entry point for this region's handler from the runtime eh library. */
200 rtx landing_pad;
202 /* Entry point for this region's handler from an inner region. */
203 rtx post_landing_pad;
205 /* The RESX insn for handing off control to the next outermost handler,
206 if appropriate. */
207 rtx resume;
209 /* True if something in this region may throw. */
210 unsigned may_contain_throw : 1;
213 struct call_site_record GTY(())
215 rtx landing_pad;
216 int action;
219 /* Used to save exception status for each function. */
220 struct eh_status GTY(())
222 /* The tree of all regions for this function. */
223 struct eh_region *region_tree;
225 /* The same information as an indexable array. */
226 struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
228 /* The most recently open region. */
229 struct eh_region *cur_region;
231 /* This is the region for which we are processing catch blocks. */
232 struct eh_region *try_region;
234 rtx filter;
235 rtx exc_ptr;
237 int built_landing_pads;
238 int last_region_number;
240 varray_type ttype_data;
241 varray_type ehspec_data;
242 varray_type action_record_data;
244 htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
246 struct call_site_record * GTY ((length ("%h.call_site_data_used")))
247 call_site_data;
248 int call_site_data_used;
249 int call_site_data_size;
251 rtx ehr_stackadj;
252 rtx ehr_handler;
253 rtx ehr_label;
255 rtx sjlj_fc;
256 rtx sjlj_exit_after;
260 static int t2r_eq (const void *, const void *);
261 static hashval_t t2r_hash (const void *);
262 static void add_type_for_runtime (tree);
263 static tree lookup_type_for_runtime (tree);
265 static struct eh_region *expand_eh_region_end (void);
267 static rtx get_exception_filter (struct function *);
269 static void collect_eh_region_array (void);
270 static void resolve_fixup_regions (void);
271 static void remove_fixup_regions (void);
272 static void remove_unreachable_regions (rtx);
273 static void convert_from_eh_region_ranges_1 (rtx *, int *, int);
275 static struct eh_region *duplicate_eh_region_1 (struct eh_region *,
276 struct inline_remap *);
277 static void duplicate_eh_region_2 (struct eh_region *, struct eh_region **);
278 static int ttypes_filter_eq (const void *, const void *);
279 static hashval_t ttypes_filter_hash (const void *);
280 static int ehspec_filter_eq (const void *, const void *);
281 static hashval_t ehspec_filter_hash (const void *);
282 static int add_ttypes_entry (htab_t, tree);
283 static int add_ehspec_entry (htab_t, htab_t, tree);
284 static void assign_filter_values (void);
285 static void build_post_landing_pads (void);
286 static void connect_post_landing_pads (void);
287 static void dw2_build_landing_pads (void);
289 struct sjlj_lp_info;
290 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
291 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
292 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
293 static void sjlj_emit_function_enter (rtx);
294 static void sjlj_emit_function_exit (void);
295 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
296 static void sjlj_build_landing_pads (void);
298 static hashval_t ehl_hash (const void *);
299 static int ehl_eq (const void *, const void *);
300 static void add_ehl_entry (rtx, struct eh_region *);
301 static void remove_exception_handler_label (rtx);
302 static void remove_eh_handler (struct eh_region *);
303 static int for_each_eh_label_1 (void **, void *);
305 struct reachable_info;
307 /* The return value of reachable_next_level. */
308 enum reachable_code
310 /* The given exception is not processed by the given region. */
311 RNL_NOT_CAUGHT,
312 /* The given exception may need processing by the given region. */
313 RNL_MAYBE_CAUGHT,
314 /* The given exception is completely processed by the given region. */
315 RNL_CAUGHT,
316 /* The given exception is completely processed by the runtime. */
317 RNL_BLOCKED
320 static int check_handled (tree, tree);
321 static void add_reachable_handler (struct reachable_info *,
322 struct eh_region *, struct eh_region *);
323 static enum reachable_code reachable_next_level (struct eh_region *, tree,
324 struct reachable_info *);
326 static int action_record_eq (const void *, const void *);
327 static hashval_t action_record_hash (const void *);
328 static int add_action_record (htab_t, int, int);
329 static int collect_one_action_chain (htab_t, struct eh_region *);
330 static int add_call_site (rtx, int);
332 static void push_uleb128 (varray_type *, unsigned int);
333 static void push_sleb128 (varray_type *, int);
334 #ifndef HAVE_AS_LEB128
335 static int dw2_size_of_call_site_table (void);
336 static int sjlj_size_of_call_site_table (void);
337 #endif
338 static void dw2_output_call_site_table (void);
339 static void sjlj_output_call_site_table (void);
342 /* Routine to see if exception handling is turned on.
343 DO_WARN is nonzero if we want to inform the user that exception
344 handling is turned off.
346 This is used to ensure that -fexceptions has been specified if the
347 compiler tries to use any exception-specific functions. */
350 doing_eh (int do_warn)
352 if (! flag_exceptions)
354 static int warned = 0;
355 if (! warned && do_warn)
357 error ("exception handling disabled, use -fexceptions to enable");
358 warned = 1;
360 return 0;
362 return 1;
366 void
367 init_eh (void)
369 if (! flag_exceptions)
370 return;
372 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
374 /* Create the SjLj_Function_Context structure. This should match
375 the definition in unwind-sjlj.c. */
376 if (USING_SJLJ_EXCEPTIONS)
378 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
380 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
382 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
383 build_pointer_type (sjlj_fc_type_node));
384 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
386 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
387 integer_type_node);
388 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
390 tmp = build_index_type (build_int_2 (4 - 1, 0));
391 tmp = build_array_type (lang_hooks.types.type_for_mode (word_mode, 1),
392 tmp);
393 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
394 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
396 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
397 ptr_type_node);
398 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
400 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
401 ptr_type_node);
402 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
404 #ifdef DONT_USE_BUILTIN_SETJMP
405 #ifdef JMP_BUF_SIZE
406 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
407 #else
408 /* Should be large enough for most systems, if it is not,
409 JMP_BUF_SIZE should be defined with the proper value. It will
410 also tend to be larger than necessary for most systems, a more
411 optimal port will define JMP_BUF_SIZE. */
412 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
413 #endif
414 #else
415 /* builtin_setjmp takes a pointer to 5 words. */
416 tmp = build_int_2 (5 * BITS_PER_WORD / POINTER_SIZE - 1, 0);
417 #endif
418 tmp = build_index_type (tmp);
419 tmp = build_array_type (ptr_type_node, tmp);
420 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
421 #ifdef DONT_USE_BUILTIN_SETJMP
422 /* We don't know what the alignment requirements of the
423 runtime's jmp_buf has. Overestimate. */
424 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
425 DECL_USER_ALIGN (f_jbuf) = 1;
426 #endif
427 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
429 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
430 TREE_CHAIN (f_prev) = f_cs;
431 TREE_CHAIN (f_cs) = f_data;
432 TREE_CHAIN (f_data) = f_per;
433 TREE_CHAIN (f_per) = f_lsda;
434 TREE_CHAIN (f_lsda) = f_jbuf;
436 layout_type (sjlj_fc_type_node);
438 /* Cache the interesting field offsets so that we have
439 easy access from rtl. */
440 sjlj_fc_call_site_ofs
441 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
442 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
443 sjlj_fc_data_ofs
444 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
445 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
446 sjlj_fc_personality_ofs
447 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
448 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
449 sjlj_fc_lsda_ofs
450 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
451 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
452 sjlj_fc_jbuf_ofs
453 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
454 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
458 void
459 init_eh_for_function (void)
461 cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
464 /* Start an exception handling region. All instructions emitted
465 after this point are considered to be part of the region until
466 expand_eh_region_end is invoked. */
468 void
469 expand_eh_region_start (void)
471 struct eh_region *new_region;
472 struct eh_region *cur_region;
473 rtx note;
475 if (! doing_eh (0))
476 return;
478 /* Insert a new blank region as a leaf in the tree. */
479 new_region = ggc_alloc_cleared (sizeof (*new_region));
480 cur_region = cfun->eh->cur_region;
481 new_region->outer = cur_region;
482 if (cur_region)
484 new_region->next_peer = cur_region->inner;
485 cur_region->inner = new_region;
487 else
489 new_region->next_peer = cfun->eh->region_tree;
490 cfun->eh->region_tree = new_region;
492 cfun->eh->cur_region = new_region;
494 /* Create a note marking the start of this region. */
495 new_region->region_number = ++cfun->eh->last_region_number;
496 note = emit_note (NOTE_INSN_EH_REGION_BEG);
497 NOTE_EH_HANDLER (note) = new_region->region_number;
500 /* Common code to end a region. Returns the region just ended. */
502 static struct eh_region *
503 expand_eh_region_end (void)
505 struct eh_region *cur_region = cfun->eh->cur_region;
506 rtx note;
508 /* Create a note marking the end of this region. */
509 note = emit_note (NOTE_INSN_EH_REGION_END);
510 NOTE_EH_HANDLER (note) = cur_region->region_number;
512 /* Pop. */
513 cfun->eh->cur_region = cur_region->outer;
515 return cur_region;
518 /* End an exception handling region for a cleanup. HANDLER is an
519 expression to expand for the cleanup. */
521 void
522 expand_eh_region_end_cleanup (tree handler)
524 struct eh_region *region;
525 tree protect_cleanup_actions;
526 rtx around_label;
527 rtx data_save[2];
529 if (! doing_eh (0))
530 return;
532 region = expand_eh_region_end ();
533 region->type = ERT_CLEANUP;
534 region->label = gen_label_rtx ();
535 region->u.cleanup.exp = handler;
536 region->u.cleanup.prev_try = cfun->eh->try_region;
538 around_label = gen_label_rtx ();
539 emit_jump (around_label);
541 emit_label (region->label);
543 if (flag_non_call_exceptions || region->may_contain_throw)
545 /* Give the language a chance to specify an action to be taken if an
546 exception is thrown that would propagate out of the HANDLER. */
547 protect_cleanup_actions
548 = (lang_protect_cleanup_actions
549 ? (*lang_protect_cleanup_actions) ()
550 : NULL_TREE);
552 if (protect_cleanup_actions)
553 expand_eh_region_start ();
555 /* In case this cleanup involves an inline destructor with a try block in
556 it, we need to save the EH return data registers around it. */
557 data_save[0] = gen_reg_rtx (ptr_mode);
558 emit_move_insn (data_save[0], get_exception_pointer (cfun));
559 data_save[1] = gen_reg_rtx (word_mode);
560 emit_move_insn (data_save[1], get_exception_filter (cfun));
562 expand_expr (handler, const0_rtx, VOIDmode, 0);
564 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
565 emit_move_insn (cfun->eh->filter, data_save[1]);
567 if (protect_cleanup_actions)
568 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
570 /* We need any stack adjustment complete before the around_label. */
571 do_pending_stack_adjust ();
574 /* We delay the generation of the _Unwind_Resume until we generate
575 landing pads. We emit a marker here so as to get good control
576 flow data in the meantime. */
577 region->resume
578 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
579 emit_barrier ();
581 emit_label (around_label);
584 /* End an exception handling region for a try block, and prepares
585 for subsequent calls to expand_start_catch. */
587 void
588 expand_start_all_catch (void)
590 struct eh_region *region;
592 if (! doing_eh (1))
593 return;
595 region = expand_eh_region_end ();
596 region->type = ERT_TRY;
597 region->u.try.prev_try = cfun->eh->try_region;
598 region->u.try.continue_label = gen_label_rtx ();
600 cfun->eh->try_region = region;
602 emit_jump (region->u.try.continue_label);
605 /* Begin a catch clause. TYPE is the type caught, a list of such
606 types, (in the case of Java) an ADDR_EXPR which points to the
607 runtime type to match, or null if this is a catch-all
608 clause. Providing a type list enables to associate the catch region
609 with potentially several exception types, which is useful e.g. for
610 Ada. */
612 void
613 expand_start_catch (tree type_or_list)
615 struct eh_region *t, *c, *l;
616 tree type_list;
618 if (! doing_eh (0))
619 return;
621 type_list = type_or_list;
623 if (type_or_list)
625 /* Ensure to always end up with a type list to normalize further
626 processing, then register each type against the runtime types
627 map. */
628 tree type_node;
630 if (TREE_CODE (type_or_list) != TREE_LIST)
631 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
633 type_node = type_list;
634 for (; type_node; type_node = TREE_CHAIN (type_node))
635 add_type_for_runtime (TREE_VALUE (type_node));
638 expand_eh_region_start ();
640 t = cfun->eh->try_region;
641 c = cfun->eh->cur_region;
642 c->type = ERT_CATCH;
643 c->u.catch.type_list = type_list;
644 c->label = gen_label_rtx ();
646 l = t->u.try.last_catch;
647 c->u.catch.prev_catch = l;
648 if (l)
649 l->u.catch.next_catch = c;
650 else
651 t->u.try.catch = c;
652 t->u.try.last_catch = c;
654 emit_label (c->label);
657 /* End a catch clause. Control will resume after the try/catch block. */
659 void
660 expand_end_catch (void)
662 struct eh_region *try_region;
664 if (! doing_eh (0))
665 return;
667 expand_eh_region_end ();
668 try_region = cfun->eh->try_region;
670 emit_jump (try_region->u.try.continue_label);
673 /* End a sequence of catch handlers for a try block. */
675 void
676 expand_end_all_catch (void)
678 struct eh_region *try_region;
680 if (! doing_eh (0))
681 return;
683 try_region = cfun->eh->try_region;
684 cfun->eh->try_region = try_region->u.try.prev_try;
686 emit_label (try_region->u.try.continue_label);
689 /* End an exception region for an exception type filter. ALLOWED is a
690 TREE_LIST of types to be matched by the runtime. FAILURE is an
691 expression to invoke if a mismatch occurs.
693 ??? We could use these semantics for calls to rethrow, too; if we can
694 see the surrounding catch clause, we know that the exception we're
695 rethrowing satisfies the "filter" of the catch type. */
697 void
698 expand_eh_region_end_allowed (tree allowed, tree failure)
700 struct eh_region *region;
701 rtx around_label;
703 if (! doing_eh (0))
704 return;
706 region = expand_eh_region_end ();
707 region->type = ERT_ALLOWED_EXCEPTIONS;
708 region->u.allowed.type_list = allowed;
709 region->label = gen_label_rtx ();
711 for (; allowed ; allowed = TREE_CHAIN (allowed))
712 add_type_for_runtime (TREE_VALUE (allowed));
714 /* We must emit the call to FAILURE here, so that if this function
715 throws a different exception, that it will be processed by the
716 correct region. */
718 around_label = gen_label_rtx ();
719 emit_jump (around_label);
721 emit_label (region->label);
722 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
723 /* We must adjust the stack before we reach the AROUND_LABEL because
724 the call to FAILURE does not occur on all paths to the
725 AROUND_LABEL. */
726 do_pending_stack_adjust ();
728 emit_label (around_label);
731 /* End an exception region for a must-not-throw filter. FAILURE is an
732 expression invoke if an uncaught exception propagates this far.
734 This is conceptually identical to expand_eh_region_end_allowed with
735 an empty allowed list (if you passed "std::terminate" instead of
736 "__cxa_call_unexpected"), but they are represented differently in
737 the C++ LSDA. */
739 void
740 expand_eh_region_end_must_not_throw (tree failure)
742 struct eh_region *region;
743 rtx around_label;
745 if (! doing_eh (0))
746 return;
748 region = expand_eh_region_end ();
749 region->type = ERT_MUST_NOT_THROW;
750 region->label = gen_label_rtx ();
752 /* We must emit the call to FAILURE here, so that if this function
753 throws a different exception, that it will be processed by the
754 correct region. */
756 around_label = gen_label_rtx ();
757 emit_jump (around_label);
759 emit_label (region->label);
760 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
762 emit_label (around_label);
765 /* End an exception region for a throw. No handling goes on here,
766 but it's the easiest way for the front-end to indicate what type
767 is being thrown. */
769 void
770 expand_eh_region_end_throw (tree type)
772 struct eh_region *region;
774 if (! doing_eh (0))
775 return;
777 region = expand_eh_region_end ();
778 region->type = ERT_THROW;
779 region->u.throw.type = type;
782 /* End a fixup region. Within this region the cleanups for the immediately
783 enclosing region are _not_ run. This is used for goto cleanup to avoid
784 destroying an object twice.
786 This would be an extraordinarily simple prospect, were it not for the
787 fact that we don't actually know what the immediately enclosing region
788 is. This surprising fact is because expand_cleanups is currently
789 generating a sequence that it will insert somewhere else. We collect
790 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
792 void
793 expand_eh_region_end_fixup (tree handler)
795 struct eh_region *fixup;
797 if (! doing_eh (0))
798 return;
800 fixup = expand_eh_region_end ();
801 fixup->type = ERT_FIXUP;
802 fixup->u.fixup.cleanup_exp = handler;
805 /* Note that the current EH region (if any) may contain a throw, or a
806 call to a function which itself may contain a throw. */
808 void
809 note_eh_region_may_contain_throw (void)
811 struct eh_region *region;
813 region = cfun->eh->cur_region;
814 while (region && !region->may_contain_throw)
816 region->may_contain_throw = 1;
817 region = region->outer;
821 /* Return an rtl expression for a pointer to the exception object
822 within a handler. */
825 get_exception_pointer (struct function *fun)
827 rtx exc_ptr = fun->eh->exc_ptr;
828 if (fun == cfun && ! exc_ptr)
830 exc_ptr = gen_reg_rtx (ptr_mode);
831 fun->eh->exc_ptr = exc_ptr;
833 return exc_ptr;
836 /* Return an rtl expression for the exception dispatch filter
837 within a handler. */
839 static rtx
840 get_exception_filter (struct function *fun)
842 rtx filter = fun->eh->filter;
843 if (fun == cfun && ! filter)
845 filter = gen_reg_rtx (word_mode);
846 fun->eh->filter = filter;
848 return filter;
851 /* This section is for the exception handling specific optimization pass. */
853 /* Random access the exception region tree. It's just as simple to
854 collect the regions this way as in expand_eh_region_start, but
855 without having to realloc memory. */
857 static void
858 collect_eh_region_array (void)
860 struct eh_region **array, *i;
862 i = cfun->eh->region_tree;
863 if (! i)
864 return;
866 array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
867 * sizeof (*array));
868 cfun->eh->region_array = array;
870 while (1)
872 array[i->region_number] = i;
874 /* If there are sub-regions, process them. */
875 if (i->inner)
876 i = i->inner;
877 /* If there are peers, process them. */
878 else if (i->next_peer)
879 i = i->next_peer;
880 /* Otherwise, step back up the tree to the next peer. */
881 else
883 do {
884 i = i->outer;
885 if (i == NULL)
886 return;
887 } while (i->next_peer == NULL);
888 i = i->next_peer;
893 static void
894 resolve_one_fixup_region (struct eh_region *fixup)
896 struct eh_region *cleanup, *real;
897 int j, n;
899 n = cfun->eh->last_region_number;
900 cleanup = 0;
902 for (j = 1; j <= n; ++j)
904 cleanup = cfun->eh->region_array[j];
905 if (cleanup && cleanup->type == ERT_CLEANUP
906 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
907 break;
909 if (j > n)
910 abort ();
912 real = cleanup->outer;
913 if (real && real->type == ERT_FIXUP)
915 if (!real->u.fixup.resolved)
916 resolve_one_fixup_region (real);
917 real = real->u.fixup.real_region;
920 fixup->u.fixup.real_region = real;
921 fixup->u.fixup.resolved = true;
924 static void
925 resolve_fixup_regions (void)
927 int i, n = cfun->eh->last_region_number;
929 for (i = 1; i <= n; ++i)
931 struct eh_region *fixup = cfun->eh->region_array[i];
933 if (!fixup || fixup->type != ERT_FIXUP || fixup->u.fixup.resolved)
934 continue;
936 resolve_one_fixup_region (fixup);
940 /* Now that we've discovered what region actually encloses a fixup,
941 we can shuffle pointers and remove them from the tree. */
943 static void
944 remove_fixup_regions (void)
946 int i;
947 rtx insn, note;
948 struct eh_region *fixup;
950 /* Walk the insn chain and adjust the REG_EH_REGION numbers
951 for instructions referencing fixup regions. This is only
952 strictly necessary for fixup regions with no parent, but
953 doesn't hurt to do it for all regions. */
954 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
955 if (INSN_P (insn)
956 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
957 && INTVAL (XEXP (note, 0)) > 0
958 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
959 && fixup->type == ERT_FIXUP)
961 if (fixup->u.fixup.real_region)
962 XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
963 else
964 remove_note (insn, note);
967 /* Remove the fixup regions from the tree. */
968 for (i = cfun->eh->last_region_number; i > 0; --i)
970 fixup = cfun->eh->region_array[i];
971 if (! fixup)
972 continue;
974 /* Allow GC to maybe free some memory. */
975 if (fixup->type == ERT_CLEANUP)
976 fixup->u.cleanup.exp = NULL_TREE;
978 if (fixup->type != ERT_FIXUP)
979 continue;
981 if (fixup->inner)
983 struct eh_region *parent, *p, **pp;
985 parent = fixup->u.fixup.real_region;
987 /* Fix up the children's parent pointers; find the end of
988 the list. */
989 for (p = fixup->inner; ; p = p->next_peer)
991 p->outer = parent;
992 if (! p->next_peer)
993 break;
996 /* In the tree of cleanups, only outer-inner ordering matters.
997 So link the children back in anywhere at the correct level. */
998 if (parent)
999 pp = &parent->inner;
1000 else
1001 pp = &cfun->eh->region_tree;
1002 p->next_peer = *pp;
1003 *pp = fixup->inner;
1004 fixup->inner = NULL;
1007 remove_eh_handler (fixup);
1011 /* Remove all regions whose labels are not reachable from insns. */
1013 static void
1014 remove_unreachable_regions (rtx insns)
1016 int i, *uid_region_num;
1017 bool *reachable;
1018 struct eh_region *r;
1019 rtx insn;
1021 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
1022 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
1024 for (i = cfun->eh->last_region_number; i > 0; --i)
1026 r = cfun->eh->region_array[i];
1027 if (!r || r->region_number != i)
1028 continue;
1030 if (r->resume)
1032 if (uid_region_num[INSN_UID (r->resume)])
1033 abort ();
1034 uid_region_num[INSN_UID (r->resume)] = i;
1036 if (r->label)
1038 if (uid_region_num[INSN_UID (r->label)])
1039 abort ();
1040 uid_region_num[INSN_UID (r->label)] = i;
1042 if (r->type == ERT_TRY && r->u.try.continue_label)
1044 if (uid_region_num[INSN_UID (r->u.try.continue_label)])
1045 abort ();
1046 uid_region_num[INSN_UID (r->u.try.continue_label)] = i;
1050 for (insn = insns; insn; insn = NEXT_INSN (insn))
1052 reachable[uid_region_num[INSN_UID (insn)]] = true;
1054 if (GET_CODE (insn) == CALL_INSN
1055 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1056 for (i = 0; i < 3; i++)
1058 rtx sub = XEXP (PATTERN (insn), i);
1059 for (; sub ; sub = NEXT_INSN (sub))
1060 reachable[uid_region_num[INSN_UID (sub)]] = true;
1064 for (i = cfun->eh->last_region_number; i > 0; --i)
1066 r = cfun->eh->region_array[i];
1067 if (r && r->region_number == i && !reachable[i])
1069 /* Don't remove ERT_THROW regions if their outer region
1070 is reachable. */
1071 if (r->type == ERT_THROW
1072 && r->outer
1073 && reachable[r->outer->region_number])
1074 continue;
1076 remove_eh_handler (r);
1080 free (reachable);
1081 free (uid_region_num);
1084 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1085 can_throw instruction in the region. */
1087 static void
1088 convert_from_eh_region_ranges_1 (rtx *pinsns, int *orig_sp, int cur)
1090 int *sp = orig_sp;
1091 rtx insn, next;
1093 for (insn = *pinsns; insn ; insn = next)
1095 next = NEXT_INSN (insn);
1096 if (GET_CODE (insn) == NOTE)
1098 int kind = NOTE_LINE_NUMBER (insn);
1099 if (kind == NOTE_INSN_EH_REGION_BEG
1100 || kind == NOTE_INSN_EH_REGION_END)
1102 if (kind == NOTE_INSN_EH_REGION_BEG)
1104 struct eh_region *r;
1106 *sp++ = cur;
1107 cur = NOTE_EH_HANDLER (insn);
1109 r = cfun->eh->region_array[cur];
1110 if (r->type == ERT_FIXUP)
1112 r = r->u.fixup.real_region;
1113 cur = r ? r->region_number : 0;
1115 else if (r->type == ERT_CATCH)
1117 r = r->outer;
1118 cur = r ? r->region_number : 0;
1121 else
1122 cur = *--sp;
1124 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1125 requires extra care to adjust sequence start. */
1126 if (insn == *pinsns)
1127 *pinsns = next;
1128 remove_insn (insn);
1129 continue;
1132 else if (INSN_P (insn))
1134 if (cur > 0
1135 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1136 /* Calls can always potentially throw exceptions, unless
1137 they have a REG_EH_REGION note with a value of 0 or less.
1138 Which should be the only possible kind so far. */
1139 && (GET_CODE (insn) == CALL_INSN
1140 /* If we wanted exceptions for non-call insns, then
1141 any may_trap_p instruction could throw. */
1142 || (flag_non_call_exceptions
1143 && GET_CODE (PATTERN (insn)) != CLOBBER
1144 && GET_CODE (PATTERN (insn)) != USE
1145 && may_trap_p (PATTERN (insn)))))
1147 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1148 REG_NOTES (insn));
1151 if (GET_CODE (insn) == CALL_INSN
1152 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1154 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1155 sp, cur);
1156 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1157 sp, cur);
1158 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1159 sp, cur);
1164 if (sp != orig_sp)
1165 abort ();
1168 void
1169 convert_from_eh_region_ranges (void)
1171 int *stack;
1172 rtx insns;
1174 collect_eh_region_array ();
1175 resolve_fixup_regions ();
1177 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1178 insns = get_insns ();
1179 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1180 free (stack);
1182 remove_fixup_regions ();
1183 remove_unreachable_regions (insns);
1186 static void
1187 add_ehl_entry (rtx label, struct eh_region *region)
1189 struct ehl_map_entry **slot, *entry;
1191 LABEL_PRESERVE_P (label) = 1;
1193 entry = ggc_alloc (sizeof (*entry));
1194 entry->label = label;
1195 entry->region = region;
1197 slot = (struct ehl_map_entry **)
1198 htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
1200 /* Before landing pad creation, each exception handler has its own
1201 label. After landing pad creation, the exception handlers may
1202 share landing pads. This is ok, since maybe_remove_eh_handler
1203 only requires the 1-1 mapping before landing pad creation. */
1204 if (*slot && !cfun->eh->built_landing_pads)
1205 abort ();
1207 *slot = entry;
1210 void
1211 find_exception_handler_labels (void)
1213 int i;
1215 if (cfun->eh->exception_handler_label_map)
1216 htab_empty (cfun->eh->exception_handler_label_map);
1217 else
1219 /* ??? The expansion factor here (3/2) must be greater than the htab
1220 occupancy factor (4/3) to avoid unnecessary resizing. */
1221 cfun->eh->exception_handler_label_map
1222 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
1223 ehl_hash, ehl_eq, NULL);
1226 if (cfun->eh->region_tree == NULL)
1227 return;
1229 for (i = cfun->eh->last_region_number; i > 0; --i)
1231 struct eh_region *region = cfun->eh->region_array[i];
1232 rtx lab;
1234 if (! region || region->region_number != i)
1235 continue;
1236 if (cfun->eh->built_landing_pads)
1237 lab = region->landing_pad;
1238 else
1239 lab = region->label;
1241 if (lab)
1242 add_ehl_entry (lab, region);
1245 /* For sjlj exceptions, need the return label to remain live until
1246 after landing pad generation. */
1247 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1248 add_ehl_entry (return_label, NULL);
1251 bool
1252 current_function_has_exception_handlers (void)
1254 int i;
1256 for (i = cfun->eh->last_region_number; i > 0; --i)
1258 struct eh_region *region = cfun->eh->region_array[i];
1260 if (! region || region->region_number != i)
1261 continue;
1262 if (region->type != ERT_THROW)
1263 return true;
1266 return false;
1269 static struct eh_region *
1270 duplicate_eh_region_1 (struct eh_region *o, struct inline_remap *map)
1272 struct eh_region *n = ggc_alloc_cleared (sizeof (struct eh_region));
1274 n->region_number = o->region_number + cfun->eh->last_region_number;
1275 n->type = o->type;
1277 switch (n->type)
1279 case ERT_CLEANUP:
1280 case ERT_MUST_NOT_THROW:
1281 break;
1283 case ERT_TRY:
1284 if (o->u.try.continue_label)
1285 n->u.try.continue_label
1286 = get_label_from_map (map,
1287 CODE_LABEL_NUMBER (o->u.try.continue_label));
1288 break;
1290 case ERT_CATCH:
1291 n->u.catch.type_list = o->u.catch.type_list;
1292 break;
1294 case ERT_ALLOWED_EXCEPTIONS:
1295 n->u.allowed.type_list = o->u.allowed.type_list;
1296 break;
1298 case ERT_THROW:
1299 n->u.throw.type = o->u.throw.type;
1301 default:
1302 abort ();
1305 if (o->label)
1306 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1307 if (o->resume)
1309 n->resume = map->insn_map[INSN_UID (o->resume)];
1310 if (n->resume == NULL)
1311 abort ();
1314 return n;
1317 static void
1318 duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array)
1320 struct eh_region *n = n_array[o->region_number];
1322 switch (n->type)
1324 case ERT_TRY:
1325 n->u.try.catch = n_array[o->u.try.catch->region_number];
1326 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1327 break;
1329 case ERT_CATCH:
1330 if (o->u.catch.next_catch)
1331 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1332 if (o->u.catch.prev_catch)
1333 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1334 break;
1336 default:
1337 break;
1340 if (o->outer)
1341 n->outer = n_array[o->outer->region_number];
1342 if (o->inner)
1343 n->inner = n_array[o->inner->region_number];
1344 if (o->next_peer)
1345 n->next_peer = n_array[o->next_peer->region_number];
1349 duplicate_eh_regions (struct function *ifun, struct inline_remap *map)
1351 int ifun_last_region_number = ifun->eh->last_region_number;
1352 struct eh_region **n_array, *root, *cur;
1353 int i;
1355 if (ifun_last_region_number == 0)
1356 return 0;
1358 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1360 for (i = 1; i <= ifun_last_region_number; ++i)
1362 cur = ifun->eh->region_array[i];
1363 if (!cur || cur->region_number != i)
1364 continue;
1365 n_array[i] = duplicate_eh_region_1 (cur, map);
1367 for (i = 1; i <= ifun_last_region_number; ++i)
1369 cur = ifun->eh->region_array[i];
1370 if (!cur || cur->region_number != i)
1371 continue;
1372 duplicate_eh_region_2 (cur, n_array);
1375 root = n_array[ifun->eh->region_tree->region_number];
1376 cur = cfun->eh->cur_region;
1377 if (cur)
1379 struct eh_region *p = cur->inner;
1380 if (p)
1382 while (p->next_peer)
1383 p = p->next_peer;
1384 p->next_peer = root;
1386 else
1387 cur->inner = root;
1389 for (i = 1; i <= ifun_last_region_number; ++i)
1390 if (n_array[i] && n_array[i]->outer == NULL)
1391 n_array[i]->outer = cur;
1393 else
1395 struct eh_region *p = cfun->eh->region_tree;
1396 if (p)
1398 while (p->next_peer)
1399 p = p->next_peer;
1400 p->next_peer = root;
1402 else
1403 cfun->eh->region_tree = root;
1406 free (n_array);
1408 i = cfun->eh->last_region_number;
1409 cfun->eh->last_region_number = i + ifun_last_region_number;
1410 return i;
1414 static int
1415 t2r_eq (const void *pentry, const void *pdata)
1417 tree entry = (tree) pentry;
1418 tree data = (tree) pdata;
1420 return TREE_PURPOSE (entry) == data;
1423 static hashval_t
1424 t2r_hash (const void *pentry)
1426 tree entry = (tree) pentry;
1427 return TREE_HASH (TREE_PURPOSE (entry));
1430 static void
1431 add_type_for_runtime (tree type)
1433 tree *slot;
1435 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1436 TREE_HASH (type), INSERT);
1437 if (*slot == NULL)
1439 tree runtime = (*lang_eh_runtime_type) (type);
1440 *slot = tree_cons (type, runtime, NULL_TREE);
1444 static tree
1445 lookup_type_for_runtime (tree type)
1447 tree *slot;
1449 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1450 TREE_HASH (type), NO_INSERT);
1452 /* We should have always inserted the data earlier. */
1453 return TREE_VALUE (*slot);
1457 /* Represent an entry in @TTypes for either catch actions
1458 or exception filter actions. */
1459 struct ttypes_filter GTY(())
1461 tree t;
1462 int filter;
1465 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1466 (a tree) for a @TTypes type node we are thinking about adding. */
1468 static int
1469 ttypes_filter_eq (const void *pentry, const void *pdata)
1471 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1472 tree data = (tree) pdata;
1474 return entry->t == data;
1477 static hashval_t
1478 ttypes_filter_hash (const void *pentry)
1480 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1481 return TREE_HASH (entry->t);
1484 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1485 exception specification list we are thinking about adding. */
1486 /* ??? Currently we use the type lists in the order given. Someone
1487 should put these in some canonical order. */
1489 static int
1490 ehspec_filter_eq (const void *pentry, const void *pdata)
1492 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1493 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1495 return type_list_equal (entry->t, data->t);
1498 /* Hash function for exception specification lists. */
1500 static hashval_t
1501 ehspec_filter_hash (const void *pentry)
1503 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1504 hashval_t h = 0;
1505 tree list;
1507 for (list = entry->t; list ; list = TREE_CHAIN (list))
1508 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1509 return h;
1512 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
1513 to speed up the search. Return the filter value to be used. */
1515 static int
1516 add_ttypes_entry (htab_t ttypes_hash, tree type)
1518 struct ttypes_filter **slot, *n;
1520 slot = (struct ttypes_filter **)
1521 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1523 if ((n = *slot) == NULL)
1525 /* Filter value is a 1 based table index. */
1527 n = xmalloc (sizeof (*n));
1528 n->t = type;
1529 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1530 *slot = n;
1532 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1535 return n->filter;
1538 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1539 to speed up the search. Return the filter value to be used. */
1541 static int
1542 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1544 struct ttypes_filter **slot, *n;
1545 struct ttypes_filter dummy;
1547 dummy.t = list;
1548 slot = (struct ttypes_filter **)
1549 htab_find_slot (ehspec_hash, &dummy, INSERT);
1551 if ((n = *slot) == NULL)
1553 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1555 n = xmalloc (sizeof (*n));
1556 n->t = list;
1557 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1558 *slot = n;
1560 /* Look up each type in the list and encode its filter
1561 value as a uleb128. Terminate the list with 0. */
1562 for (; list ; list = TREE_CHAIN (list))
1563 push_uleb128 (&cfun->eh->ehspec_data,
1564 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1565 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1568 return n->filter;
1571 /* Generate the action filter values to be used for CATCH and
1572 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1573 we use lots of landing pads, and so every type or list can share
1574 the same filter value, which saves table space. */
1576 static void
1577 assign_filter_values (void)
1579 int i;
1580 htab_t ttypes, ehspec;
1582 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1583 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1585 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1586 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1588 for (i = cfun->eh->last_region_number; i > 0; --i)
1590 struct eh_region *r = cfun->eh->region_array[i];
1592 /* Mind we don't process a region more than once. */
1593 if (!r || r->region_number != i)
1594 continue;
1596 switch (r->type)
1598 case ERT_CATCH:
1599 /* Whatever type_list is (NULL or true list), we build a list
1600 of filters for the region. */
1601 r->u.catch.filter_list = NULL_TREE;
1603 if (r->u.catch.type_list != NULL)
1605 /* Get a filter value for each of the types caught and store
1606 them in the region's dedicated list. */
1607 tree tp_node = r->u.catch.type_list;
1609 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1611 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1612 tree flt_node = build_int_2 (flt, 0);
1614 r->u.catch.filter_list
1615 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1618 else
1620 /* Get a filter value for the NULL list also since it will need
1621 an action record anyway. */
1622 int flt = add_ttypes_entry (ttypes, NULL);
1623 tree flt_node = build_int_2 (flt, 0);
1625 r->u.catch.filter_list
1626 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1629 break;
1631 case ERT_ALLOWED_EXCEPTIONS:
1632 r->u.allowed.filter
1633 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1634 break;
1636 default:
1637 break;
1641 htab_delete (ttypes);
1642 htab_delete (ehspec);
1645 /* Emit SEQ into basic block just before INSN (that is assumed to be
1646 first instruction of some existing BB and return the newly
1647 produced block. */
1648 static basic_block
1649 emit_to_new_bb_before (rtx seq, rtx insn)
1651 rtx last;
1652 basic_block bb;
1653 edge e;
1655 /* If there happens to be an fallthru edge (possibly created by cleanup_cfg
1656 call), we don't want it to go into newly created landing pad or other EH
1657 construct. */
1658 for (e = BLOCK_FOR_INSN (insn)->pred; e; e = e->pred_next)
1659 if (e->flags & EDGE_FALLTHRU)
1660 force_nonfallthru (e);
1661 last = emit_insn_before (seq, insn);
1662 if (GET_CODE (last) == BARRIER)
1663 last = PREV_INSN (last);
1664 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1665 update_bb_for_insn (bb);
1666 bb->flags |= BB_SUPERBLOCK;
1667 return bb;
1670 /* Generate the code to actually handle exceptions, which will follow the
1671 landing pads. */
1673 static void
1674 build_post_landing_pads (void)
1676 int i;
1678 for (i = cfun->eh->last_region_number; i > 0; --i)
1680 struct eh_region *region = cfun->eh->region_array[i];
1681 rtx seq;
1683 /* Mind we don't process a region more than once. */
1684 if (!region || region->region_number != i)
1685 continue;
1687 switch (region->type)
1689 case ERT_TRY:
1690 /* ??? Collect the set of all non-overlapping catch handlers
1691 all the way up the chain until blocked by a cleanup. */
1692 /* ??? Outer try regions can share landing pads with inner
1693 try regions if the types are completely non-overlapping,
1694 and there are no intervening cleanups. */
1696 region->post_landing_pad = gen_label_rtx ();
1698 start_sequence ();
1700 emit_label (region->post_landing_pad);
1702 /* ??? It is mighty inconvenient to call back into the
1703 switch statement generation code in expand_end_case.
1704 Rapid prototyping sez a sequence of ifs. */
1706 struct eh_region *c;
1707 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1709 if (c->u.catch.type_list == NULL)
1710 emit_jump (c->label);
1711 else
1713 /* Need for one cmp/jump per type caught. Each type
1714 list entry has a matching entry in the filter list
1715 (see assign_filter_values). */
1716 tree tp_node = c->u.catch.type_list;
1717 tree flt_node = c->u.catch.filter_list;
1719 for (; tp_node; )
1721 emit_cmp_and_jump_insns
1722 (cfun->eh->filter,
1723 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1724 EQ, NULL_RTX, word_mode, 0, c->label);
1726 tp_node = TREE_CHAIN (tp_node);
1727 flt_node = TREE_CHAIN (flt_node);
1733 /* We delay the generation of the _Unwind_Resume until we generate
1734 landing pads. We emit a marker here so as to get good control
1735 flow data in the meantime. */
1736 region->resume
1737 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1738 emit_barrier ();
1740 seq = get_insns ();
1741 end_sequence ();
1743 emit_to_new_bb_before (seq, region->u.try.catch->label);
1745 break;
1747 case ERT_ALLOWED_EXCEPTIONS:
1748 region->post_landing_pad = gen_label_rtx ();
1750 start_sequence ();
1752 emit_label (region->post_landing_pad);
1754 emit_cmp_and_jump_insns (cfun->eh->filter,
1755 GEN_INT (region->u.allowed.filter),
1756 EQ, NULL_RTX, word_mode, 0, region->label);
1758 /* We delay the generation of the _Unwind_Resume until we generate
1759 landing pads. We emit a marker here so as to get good control
1760 flow data in the meantime. */
1761 region->resume
1762 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1763 emit_barrier ();
1765 seq = get_insns ();
1766 end_sequence ();
1768 emit_to_new_bb_before (seq, region->label);
1769 break;
1771 case ERT_CLEANUP:
1772 case ERT_MUST_NOT_THROW:
1773 region->post_landing_pad = region->label;
1774 break;
1776 case ERT_CATCH:
1777 case ERT_THROW:
1778 /* Nothing to do. */
1779 break;
1781 default:
1782 abort ();
1787 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1788 _Unwind_Resume otherwise. */
1790 static void
1791 connect_post_landing_pads (void)
1793 int i;
1795 for (i = cfun->eh->last_region_number; i > 0; --i)
1797 struct eh_region *region = cfun->eh->region_array[i];
1798 struct eh_region *outer;
1799 rtx seq;
1800 rtx barrier;
1802 /* Mind we don't process a region more than once. */
1803 if (!region || region->region_number != i)
1804 continue;
1806 /* If there is no RESX, or it has been deleted by flow, there's
1807 nothing to fix up. */
1808 if (! region->resume || INSN_DELETED_P (region->resume))
1809 continue;
1811 /* Search for another landing pad in this function. */
1812 for (outer = region->outer; outer ; outer = outer->outer)
1813 if (outer->post_landing_pad)
1814 break;
1816 start_sequence ();
1818 if (outer)
1820 edge e;
1821 basic_block src, dest;
1823 emit_jump (outer->post_landing_pad);
1824 src = BLOCK_FOR_INSN (region->resume);
1825 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1826 while (src->succ)
1827 remove_edge (src->succ);
1828 e = make_edge (src, dest, 0);
1829 e->probability = REG_BR_PROB_BASE;
1830 e->count = src->count;
1832 else
1834 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1835 VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1837 /* What we just emitted was a throwing libcall, so it got a
1838 barrier automatically added after it. If the last insn in
1839 the libcall sequence isn't the barrier, it's because the
1840 target emits multiple insns for a call, and there are insns
1841 after the actual call insn (which are redundant and would be
1842 optimized away). The barrier is inserted exactly after the
1843 call insn, so let's go get that and delete the insns after
1844 it, because below we need the barrier to be the last insn in
1845 the sequence. */
1846 delete_insns_since (NEXT_INSN (last_call_insn ()));
1849 seq = get_insns ();
1850 end_sequence ();
1851 barrier = emit_insn_before (seq, region->resume);
1852 /* Avoid duplicate barrier. */
1853 if (GET_CODE (barrier) != BARRIER)
1854 abort ();
1855 delete_insn (barrier);
1856 delete_insn (region->resume);
1861 static void
1862 dw2_build_landing_pads (void)
1864 int i;
1865 unsigned int j;
1867 for (i = cfun->eh->last_region_number; i > 0; --i)
1869 struct eh_region *region = cfun->eh->region_array[i];
1870 rtx seq;
1871 basic_block bb;
1872 bool clobbers_hard_regs = false;
1873 edge e;
1875 /* Mind we don't process a region more than once. */
1876 if (!region || region->region_number != i)
1877 continue;
1879 if (region->type != ERT_CLEANUP
1880 && region->type != ERT_TRY
1881 && region->type != ERT_ALLOWED_EXCEPTIONS)
1882 continue;
1884 start_sequence ();
1886 region->landing_pad = gen_label_rtx ();
1887 emit_label (region->landing_pad);
1889 #ifdef HAVE_exception_receiver
1890 if (HAVE_exception_receiver)
1891 emit_insn (gen_exception_receiver ());
1892 else
1893 #endif
1894 #ifdef HAVE_nonlocal_goto_receiver
1895 if (HAVE_nonlocal_goto_receiver)
1896 emit_insn (gen_nonlocal_goto_receiver ());
1897 else
1898 #endif
1899 { /* Nothing */ }
1901 /* If the eh_return data registers are call-saved, then we
1902 won't have considered them clobbered from the call that
1903 threw. Kill them now. */
1904 for (j = 0; ; ++j)
1906 unsigned r = EH_RETURN_DATA_REGNO (j);
1907 if (r == INVALID_REGNUM)
1908 break;
1909 if (! call_used_regs[r])
1911 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1912 clobbers_hard_regs = true;
1916 if (clobbers_hard_regs)
1918 /* @@@ This is a kludge. Not all machine descriptions define a
1919 blockage insn, but we must not allow the code we just generated
1920 to be reordered by scheduling. So emit an ASM_INPUT to act as
1921 blockage insn. */
1922 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1925 emit_move_insn (cfun->eh->exc_ptr,
1926 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1927 emit_move_insn (cfun->eh->filter,
1928 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
1930 seq = get_insns ();
1931 end_sequence ();
1933 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1934 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1935 e->count = bb->count;
1936 e->probability = REG_BR_PROB_BASE;
1941 struct sjlj_lp_info
1943 int directly_reachable;
1944 int action_index;
1945 int dispatch_index;
1946 int call_site_index;
1949 static bool
1950 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1952 rtx insn;
1953 bool found_one = false;
1955 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1957 struct eh_region *region;
1958 enum reachable_code rc;
1959 tree type_thrown;
1960 rtx note;
1962 if (! INSN_P (insn))
1963 continue;
1965 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1966 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1967 continue;
1969 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1971 type_thrown = NULL_TREE;
1972 if (region->type == ERT_THROW)
1974 type_thrown = region->u.throw.type;
1975 region = region->outer;
1978 /* Find the first containing region that might handle the exception.
1979 That's the landing pad to which we will transfer control. */
1980 rc = RNL_NOT_CAUGHT;
1981 for (; region; region = region->outer)
1983 rc = reachable_next_level (region, type_thrown, 0);
1984 if (rc != RNL_NOT_CAUGHT)
1985 break;
1987 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1989 lp_info[region->region_number].directly_reachable = 1;
1990 found_one = true;
1994 return found_one;
1997 static void
1998 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2000 htab_t ar_hash;
2001 int i, index;
2003 /* First task: build the action table. */
2005 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
2006 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2008 for (i = cfun->eh->last_region_number; i > 0; --i)
2009 if (lp_info[i].directly_reachable)
2011 struct eh_region *r = cfun->eh->region_array[i];
2012 r->landing_pad = dispatch_label;
2013 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2014 if (lp_info[i].action_index != -1)
2015 cfun->uses_eh_lsda = 1;
2018 htab_delete (ar_hash);
2020 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2021 landing pad label for the region. For sjlj though, there is one
2022 common landing pad from which we dispatch to the post-landing pads.
2024 A region receives a dispatch index if it is directly reachable
2025 and requires in-function processing. Regions that share post-landing
2026 pads may share dispatch indices. */
2027 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2028 (see build_post_landing_pads) so we don't bother checking for it. */
2030 index = 0;
2031 for (i = cfun->eh->last_region_number; i > 0; --i)
2032 if (lp_info[i].directly_reachable)
2033 lp_info[i].dispatch_index = index++;
2035 /* Finally: assign call-site values. If dwarf2 terms, this would be
2036 the region number assigned by convert_to_eh_region_ranges, but
2037 handles no-action and must-not-throw differently. */
2039 call_site_base = 1;
2040 for (i = cfun->eh->last_region_number; i > 0; --i)
2041 if (lp_info[i].directly_reachable)
2043 int action = lp_info[i].action_index;
2045 /* Map must-not-throw to otherwise unused call-site index 0. */
2046 if (action == -2)
2047 index = 0;
2048 /* Map no-action to otherwise unused call-site index -1. */
2049 else if (action == -1)
2050 index = -1;
2051 /* Otherwise, look it up in the table. */
2052 else
2053 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2055 lp_info[i].call_site_index = index;
2059 static void
2060 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
2062 int last_call_site = -2;
2063 rtx insn, mem;
2065 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2067 struct eh_region *region;
2068 int this_call_site;
2069 rtx note, before, p;
2071 /* Reset value tracking at extended basic block boundaries. */
2072 if (GET_CODE (insn) == CODE_LABEL)
2073 last_call_site = -2;
2075 if (! INSN_P (insn))
2076 continue;
2078 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2079 if (!note)
2081 /* Calls (and trapping insns) without notes are outside any
2082 exception handling region in this function. Mark them as
2083 no action. */
2084 if (GET_CODE (insn) == CALL_INSN
2085 || (flag_non_call_exceptions
2086 && may_trap_p (PATTERN (insn))))
2087 this_call_site = -1;
2088 else
2089 continue;
2091 else
2093 /* Calls that are known to not throw need not be marked. */
2094 if (INTVAL (XEXP (note, 0)) <= 0)
2095 continue;
2097 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2098 this_call_site = lp_info[region->region_number].call_site_index;
2101 if (this_call_site == last_call_site)
2102 continue;
2104 /* Don't separate a call from it's argument loads. */
2105 before = insn;
2106 if (GET_CODE (insn) == CALL_INSN)
2107 before = find_first_parameter_load (insn, NULL_RTX);
2109 start_sequence ();
2110 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2111 sjlj_fc_call_site_ofs);
2112 emit_move_insn (mem, GEN_INT (this_call_site));
2113 p = get_insns ();
2114 end_sequence ();
2116 emit_insn_before (p, before);
2117 last_call_site = this_call_site;
2121 /* Construct the SjLj_Function_Context. */
2123 static void
2124 sjlj_emit_function_enter (rtx dispatch_label)
2126 rtx fn_begin, fc, mem, seq;
2128 fc = cfun->eh->sjlj_fc;
2130 start_sequence ();
2132 /* We're storing this libcall's address into memory instead of
2133 calling it directly. Thus, we must call assemble_external_libcall
2134 here, as we can not depend on emit_library_call to do it for us. */
2135 assemble_external_libcall (eh_personality_libfunc);
2136 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2137 emit_move_insn (mem, eh_personality_libfunc);
2139 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2140 if (cfun->uses_eh_lsda)
2142 char buf[20];
2143 rtx sym;
2145 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2146 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2147 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
2148 emit_move_insn (mem, sym);
2150 else
2151 emit_move_insn (mem, const0_rtx);
2153 #ifdef DONT_USE_BUILTIN_SETJMP
2155 rtx x, note;
2156 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2157 TYPE_MODE (integer_type_node), 1,
2158 plus_constant (XEXP (fc, 0),
2159 sjlj_fc_jbuf_ofs), Pmode);
2161 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
2162 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2164 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2165 TYPE_MODE (integer_type_node), 0, dispatch_label);
2167 #else
2168 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2169 dispatch_label);
2170 #endif
2172 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2173 1, XEXP (fc, 0), Pmode);
2175 seq = get_insns ();
2176 end_sequence ();
2178 /* ??? Instead of doing this at the beginning of the function,
2179 do this in a block that is at loop level 0 and dominates all
2180 can_throw_internal instructions. */
2182 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2183 if (GET_CODE (fn_begin) == NOTE
2184 && (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG
2185 || NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK))
2186 break;
2187 if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2188 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
2189 else
2191 rtx last = BB_END (ENTRY_BLOCK_PTR->succ->dest);
2192 for (; ; fn_begin = NEXT_INSN (fn_begin))
2193 if ((GET_CODE (fn_begin) == NOTE
2194 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2195 || fn_begin == last)
2196 break;
2197 emit_insn_after (seq, fn_begin);
2201 /* Call back from expand_function_end to know where we should put
2202 the call to unwind_sjlj_unregister_libfunc if needed. */
2204 void
2205 sjlj_emit_function_exit_after (rtx after)
2207 cfun->eh->sjlj_exit_after = after;
2210 static void
2211 sjlj_emit_function_exit (void)
2213 rtx seq;
2214 edge e;
2216 start_sequence ();
2218 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2219 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2221 seq = get_insns ();
2222 end_sequence ();
2224 /* ??? Really this can be done in any block at loop level 0 that
2225 post-dominates all can_throw_internal instructions. This is
2226 the last possible moment. */
2228 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
2229 if (e->flags & EDGE_FALLTHRU)
2230 break;
2231 if (e)
2233 rtx insn;
2235 /* Figure out whether the place we are supposed to insert libcall
2236 is inside the last basic block or after it. In the other case
2237 we need to emit to edge. */
2238 if (e->src->next_bb != EXIT_BLOCK_PTR)
2239 abort ();
2240 for (insn = NEXT_INSN (BB_END (e->src)); insn; insn = NEXT_INSN (insn))
2241 if (insn == cfun->eh->sjlj_exit_after)
2242 break;
2243 if (insn)
2244 insert_insn_on_edge (seq, e);
2245 else
2247 insn = cfun->eh->sjlj_exit_after;
2248 if (GET_CODE (insn) == CODE_LABEL)
2249 insn = NEXT_INSN (insn);
2250 emit_insn_after (seq, insn);
2255 static void
2256 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2258 int i, first_reachable;
2259 rtx mem, dispatch, seq, fc;
2260 rtx before;
2261 basic_block bb;
2262 edge e;
2264 fc = cfun->eh->sjlj_fc;
2266 start_sequence ();
2268 emit_label (dispatch_label);
2270 #ifndef DONT_USE_BUILTIN_SETJMP
2271 expand_builtin_setjmp_receiver (dispatch_label);
2272 #endif
2274 /* Load up dispatch index, exc_ptr and filter values from the
2275 function context. */
2276 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2277 sjlj_fc_call_site_ofs);
2278 dispatch = copy_to_reg (mem);
2280 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
2281 if (word_mode != ptr_mode)
2283 #ifdef POINTERS_EXTEND_UNSIGNED
2284 mem = convert_memory_address (ptr_mode, mem);
2285 #else
2286 mem = convert_to_mode (ptr_mode, mem, 0);
2287 #endif
2289 emit_move_insn (cfun->eh->exc_ptr, mem);
2291 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2292 emit_move_insn (cfun->eh->filter, mem);
2294 /* Jump to one of the directly reachable regions. */
2295 /* ??? This really ought to be using a switch statement. */
2297 first_reachable = 0;
2298 for (i = cfun->eh->last_region_number; i > 0; --i)
2300 if (! lp_info[i].directly_reachable)
2301 continue;
2303 if (! first_reachable)
2305 first_reachable = i;
2306 continue;
2309 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2310 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2311 cfun->eh->region_array[i]->post_landing_pad);
2314 seq = get_insns ();
2315 end_sequence ();
2317 before = cfun->eh->region_array[first_reachable]->post_landing_pad;
2319 bb = emit_to_new_bb_before (seq, before);
2320 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2321 e->count = bb->count;
2322 e->probability = REG_BR_PROB_BASE;
2325 static void
2326 sjlj_build_landing_pads (void)
2328 struct sjlj_lp_info *lp_info;
2330 lp_info = xcalloc (cfun->eh->last_region_number + 1,
2331 sizeof (struct sjlj_lp_info));
2333 if (sjlj_find_directly_reachable_regions (lp_info))
2335 rtx dispatch_label = gen_label_rtx ();
2337 cfun->eh->sjlj_fc
2338 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2339 int_size_in_bytes (sjlj_fc_type_node),
2340 TYPE_ALIGN (sjlj_fc_type_node));
2342 sjlj_assign_call_site_values (dispatch_label, lp_info);
2343 sjlj_mark_call_sites (lp_info);
2345 sjlj_emit_function_enter (dispatch_label);
2346 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2347 sjlj_emit_function_exit ();
2350 free (lp_info);
2353 void
2354 finish_eh_generation (void)
2356 basic_block bb;
2358 /* Nothing to do if no regions created. */
2359 if (cfun->eh->region_tree == NULL)
2360 return;
2362 /* The object here is to provide find_basic_blocks with detailed
2363 information (via reachable_handlers) on how exception control
2364 flows within the function. In this first pass, we can include
2365 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2366 regions, and hope that it will be useful in deleting unreachable
2367 handlers. Subsequently, we will generate landing pads which will
2368 connect many of the handlers, and then type information will not
2369 be effective. Still, this is a win over previous implementations. */
2371 cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
2373 /* These registers are used by the landing pads. Make sure they
2374 have been generated. */
2375 get_exception_pointer (cfun);
2376 get_exception_filter (cfun);
2378 /* Construct the landing pads. */
2380 assign_filter_values ();
2381 build_post_landing_pads ();
2382 connect_post_landing_pads ();
2383 if (USING_SJLJ_EXCEPTIONS)
2384 sjlj_build_landing_pads ();
2385 else
2386 dw2_build_landing_pads ();
2388 cfun->eh->built_landing_pads = 1;
2390 /* We've totally changed the CFG. Start over. */
2391 find_exception_handler_labels ();
2392 break_superblocks ();
2393 if (USING_SJLJ_EXCEPTIONS)
2394 commit_edge_insertions ();
2395 FOR_EACH_BB (bb)
2397 edge e, next;
2398 bool eh = false;
2399 for (e = bb->succ; e; e = next)
2401 next = e->succ_next;
2402 if (e->flags & EDGE_EH)
2404 remove_edge (e);
2405 eh = true;
2408 if (eh)
2409 make_eh_edge (NULL, bb, BB_END (bb));
2411 cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
2414 static hashval_t
2415 ehl_hash (const void *pentry)
2417 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2419 /* 2^32 * ((sqrt(5) - 1) / 2) */
2420 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2421 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2424 static int
2425 ehl_eq (const void *pentry, const void *pdata)
2427 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2428 struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2430 return entry->label == data->label;
2433 /* This section handles removing dead code for flow. */
2435 /* Remove LABEL from exception_handler_label_map. */
2437 static void
2438 remove_exception_handler_label (rtx label)
2440 struct ehl_map_entry **slot, tmp;
2442 /* If exception_handler_label_map was not built yet,
2443 there is nothing to do. */
2444 if (cfun->eh->exception_handler_label_map == NULL)
2445 return;
2447 tmp.label = label;
2448 slot = (struct ehl_map_entry **)
2449 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2450 if (! slot)
2451 abort ();
2453 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2456 /* Splice REGION from the region tree etc. */
2458 static void
2459 remove_eh_handler (struct eh_region *region)
2461 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2462 rtx lab;
2464 /* For the benefit of efficiently handling REG_EH_REGION notes,
2465 replace this region in the region array with its containing
2466 region. Note that previous region deletions may result in
2467 multiple copies of this region in the array, so we have a
2468 list of alternate numbers by which we are known. */
2470 outer = region->outer;
2471 cfun->eh->region_array[region->region_number] = outer;
2472 if (region->aka)
2474 int i;
2475 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i,
2476 { cfun->eh->region_array[i] = outer; });
2479 if (outer)
2481 if (!outer->aka)
2482 outer->aka = BITMAP_GGC_ALLOC ();
2483 if (region->aka)
2484 bitmap_a_or_b (outer->aka, outer->aka, region->aka);
2485 bitmap_set_bit (outer->aka, region->region_number);
2488 if (cfun->eh->built_landing_pads)
2489 lab = region->landing_pad;
2490 else
2491 lab = region->label;
2492 if (lab)
2493 remove_exception_handler_label (lab);
2495 if (outer)
2496 pp_start = &outer->inner;
2497 else
2498 pp_start = &cfun->eh->region_tree;
2499 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2500 continue;
2501 *pp = region->next_peer;
2503 inner = region->inner;
2504 if (inner)
2506 for (p = inner; p->next_peer ; p = p->next_peer)
2507 p->outer = outer;
2508 p->outer = outer;
2510 p->next_peer = *pp_start;
2511 *pp_start = inner;
2514 if (region->type == ERT_CATCH)
2516 struct eh_region *try, *next, *prev;
2518 for (try = region->next_peer;
2519 try->type == ERT_CATCH;
2520 try = try->next_peer)
2521 continue;
2522 if (try->type != ERT_TRY)
2523 abort ();
2525 next = region->u.catch.next_catch;
2526 prev = region->u.catch.prev_catch;
2528 if (next)
2529 next->u.catch.prev_catch = prev;
2530 else
2531 try->u.try.last_catch = prev;
2532 if (prev)
2533 prev->u.catch.next_catch = next;
2534 else
2536 try->u.try.catch = next;
2537 if (! next)
2538 remove_eh_handler (try);
2543 /* LABEL heads a basic block that is about to be deleted. If this
2544 label corresponds to an exception region, we may be able to
2545 delete the region. */
2547 void
2548 maybe_remove_eh_handler (rtx label)
2550 struct ehl_map_entry **slot, tmp;
2551 struct eh_region *region;
2553 /* ??? After generating landing pads, it's not so simple to determine
2554 if the region data is completely unused. One must examine the
2555 landing pad and the post landing pad, and whether an inner try block
2556 is referencing the catch handlers directly. */
2557 if (cfun->eh->built_landing_pads)
2558 return;
2560 tmp.label = label;
2561 slot = (struct ehl_map_entry **)
2562 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2563 if (! slot)
2564 return;
2565 region = (*slot)->region;
2566 if (! region)
2567 return;
2569 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2570 because there is no path to the fallback call to terminate.
2571 But the region continues to affect call-site data until there
2572 are no more contained calls, which we don't see here. */
2573 if (region->type == ERT_MUST_NOT_THROW)
2575 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2576 region->label = NULL_RTX;
2578 else
2579 remove_eh_handler (region);
2582 /* Invokes CALLBACK for every exception handler label. Only used by old
2583 loop hackery; should not be used by new code. */
2585 void
2586 for_each_eh_label (void (*callback) (rtx))
2588 htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
2589 (void *) &callback);
2592 static int
2593 for_each_eh_label_1 (void **pentry, void *data)
2595 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2596 void (*callback) (rtx) = *(void (**) (rtx)) data;
2598 (*callback) (entry->label);
2599 return 1;
2602 /* This section describes CFG exception edges for flow. */
2604 /* For communicating between calls to reachable_next_level. */
2605 struct reachable_info GTY(())
2607 tree types_caught;
2608 tree types_allowed;
2609 rtx handlers;
2612 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2613 base class of TYPE, is in HANDLED. */
2615 static int
2616 check_handled (tree handled, tree type)
2618 tree t;
2620 /* We can check for exact matches without front-end help. */
2621 if (! lang_eh_type_covers)
2623 for (t = handled; t ; t = TREE_CHAIN (t))
2624 if (TREE_VALUE (t) == type)
2625 return 1;
2627 else
2629 for (t = handled; t ; t = TREE_CHAIN (t))
2630 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2631 return 1;
2634 return 0;
2637 /* A subroutine of reachable_next_level. If we are collecting a list
2638 of handlers, add one. After landing pad generation, reference
2639 it instead of the handlers themselves. Further, the handlers are
2640 all wired together, so by referencing one, we've got them all.
2641 Before landing pad generation we reference each handler individually.
2643 LP_REGION contains the landing pad; REGION is the handler. */
2645 static void
2646 add_reachable_handler (struct reachable_info *info, struct eh_region *lp_region, struct eh_region *region)
2648 if (! info)
2649 return;
2651 if (cfun->eh->built_landing_pads)
2653 if (! info->handlers)
2654 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2656 else
2657 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2660 /* Process one level of exception regions for reachability.
2661 If TYPE_THROWN is non-null, then it is the *exact* type being
2662 propagated. If INFO is non-null, then collect handler labels
2663 and caught/allowed type information between invocations. */
2665 static enum reachable_code
2666 reachable_next_level (struct eh_region *region, tree type_thrown,
2667 struct reachable_info *info)
2669 switch (region->type)
2671 case ERT_CLEANUP:
2672 /* Before landing-pad generation, we model control flow
2673 directly to the individual handlers. In this way we can
2674 see that catch handler types may shadow one another. */
2675 add_reachable_handler (info, region, region);
2676 return RNL_MAYBE_CAUGHT;
2678 case ERT_TRY:
2680 struct eh_region *c;
2681 enum reachable_code ret = RNL_NOT_CAUGHT;
2683 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2685 /* A catch-all handler ends the search. */
2686 if (c->u.catch.type_list == NULL)
2688 add_reachable_handler (info, region, c);
2689 return RNL_CAUGHT;
2692 if (type_thrown)
2694 /* If we have at least one type match, end the search. */
2695 tree tp_node = c->u.catch.type_list;
2697 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2699 tree type = TREE_VALUE (tp_node);
2701 if (type == type_thrown
2702 || (lang_eh_type_covers
2703 && (*lang_eh_type_covers) (type, type_thrown)))
2705 add_reachable_handler (info, region, c);
2706 return RNL_CAUGHT;
2710 /* If we have definitive information of a match failure,
2711 the catch won't trigger. */
2712 if (lang_eh_type_covers)
2713 return RNL_NOT_CAUGHT;
2716 /* At this point, we either don't know what type is thrown or
2717 don't have front-end assistance to help deciding if it is
2718 covered by one of the types in the list for this region.
2720 We'd then like to add this region to the list of reachable
2721 handlers since it is indeed potentially reachable based on the
2722 information we have.
2724 Actually, this handler is for sure not reachable if all the
2725 types it matches have already been caught. That is, it is only
2726 potentially reachable if at least one of the types it catches
2727 has not been previously caught. */
2729 if (! info)
2730 ret = RNL_MAYBE_CAUGHT;
2731 else
2733 tree tp_node = c->u.catch.type_list;
2734 bool maybe_reachable = false;
2736 /* Compute the potential reachability of this handler and
2737 update the list of types caught at the same time. */
2738 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2740 tree type = TREE_VALUE (tp_node);
2742 if (! check_handled (info->types_caught, type))
2744 info->types_caught
2745 = tree_cons (NULL, type, info->types_caught);
2747 maybe_reachable = true;
2751 if (maybe_reachable)
2753 add_reachable_handler (info, region, c);
2755 /* ??? If the catch type is a base class of every allowed
2756 type, then we know we can stop the search. */
2757 ret = RNL_MAYBE_CAUGHT;
2762 return ret;
2765 case ERT_ALLOWED_EXCEPTIONS:
2766 /* An empty list of types definitely ends the search. */
2767 if (region->u.allowed.type_list == NULL_TREE)
2769 add_reachable_handler (info, region, region);
2770 return RNL_CAUGHT;
2773 /* Collect a list of lists of allowed types for use in detecting
2774 when a catch may be transformed into a catch-all. */
2775 if (info)
2776 info->types_allowed = tree_cons (NULL_TREE,
2777 region->u.allowed.type_list,
2778 info->types_allowed);
2780 /* If we have definitive information about the type hierarchy,
2781 then we can tell if the thrown type will pass through the
2782 filter. */
2783 if (type_thrown && lang_eh_type_covers)
2785 if (check_handled (region->u.allowed.type_list, type_thrown))
2786 return RNL_NOT_CAUGHT;
2787 else
2789 add_reachable_handler (info, region, region);
2790 return RNL_CAUGHT;
2794 add_reachable_handler (info, region, region);
2795 return RNL_MAYBE_CAUGHT;
2797 case ERT_CATCH:
2798 /* Catch regions are handled by their controlling try region. */
2799 return RNL_NOT_CAUGHT;
2801 case ERT_MUST_NOT_THROW:
2802 /* Here we end our search, since no exceptions may propagate.
2803 If we've touched down at some landing pad previous, then the
2804 explicit function call we generated may be used. Otherwise
2805 the call is made by the runtime. */
2806 if (info && info->handlers)
2808 add_reachable_handler (info, region, region);
2809 return RNL_CAUGHT;
2811 else
2812 return RNL_BLOCKED;
2814 case ERT_THROW:
2815 case ERT_FIXUP:
2816 case ERT_UNKNOWN:
2817 /* Shouldn't see these here. */
2818 break;
2821 abort ();
2824 /* Retrieve a list of labels of exception handlers which can be
2825 reached by a given insn. */
2828 reachable_handlers (rtx insn)
2830 struct reachable_info info;
2831 struct eh_region *region;
2832 tree type_thrown;
2833 int region_number;
2835 if (GET_CODE (insn) == JUMP_INSN
2836 && GET_CODE (PATTERN (insn)) == RESX)
2837 region_number = XINT (PATTERN (insn), 0);
2838 else
2840 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2841 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2842 return NULL;
2843 region_number = INTVAL (XEXP (note, 0));
2846 memset (&info, 0, sizeof (info));
2848 region = cfun->eh->region_array[region_number];
2850 type_thrown = NULL_TREE;
2851 if (GET_CODE (insn) == JUMP_INSN
2852 && GET_CODE (PATTERN (insn)) == RESX)
2854 /* A RESX leaves a region instead of entering it. Thus the
2855 region itself may have been deleted out from under us. */
2856 if (region == NULL)
2857 return NULL;
2858 region = region->outer;
2860 else if (region->type == ERT_THROW)
2862 type_thrown = region->u.throw.type;
2863 region = region->outer;
2866 while (region)
2868 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2869 break;
2870 /* If we have processed one cleanup, there is no point in
2871 processing any more of them. Each cleanup will have an edge
2872 to the next outer cleanup region, so the flow graph will be
2873 accurate. */
2874 if (region->type == ERT_CLEANUP)
2875 region = region->u.cleanup.prev_try;
2876 else
2877 region = region->outer;
2880 return info.handlers;
2883 /* Determine if the given INSN can throw an exception that is caught
2884 within the function. */
2886 bool
2887 can_throw_internal (rtx insn)
2889 struct eh_region *region;
2890 tree type_thrown;
2891 rtx note;
2893 if (! INSN_P (insn))
2894 return false;
2896 if (GET_CODE (insn) == INSN
2897 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2898 insn = XVECEXP (PATTERN (insn), 0, 0);
2900 if (GET_CODE (insn) == CALL_INSN
2901 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2903 int i;
2904 for (i = 0; i < 3; ++i)
2906 rtx sub = XEXP (PATTERN (insn), i);
2907 for (; sub ; sub = NEXT_INSN (sub))
2908 if (can_throw_internal (sub))
2909 return true;
2911 return false;
2914 /* Every insn that might throw has an EH_REGION note. */
2915 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2916 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2917 return false;
2919 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2921 type_thrown = NULL_TREE;
2922 if (region->type == ERT_THROW)
2924 type_thrown = region->u.throw.type;
2925 region = region->outer;
2928 /* If this exception is ignored by each and every containing region,
2929 then control passes straight out. The runtime may handle some
2930 regions, which also do not require processing internally. */
2931 for (; region; region = region->outer)
2933 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2934 if (how == RNL_BLOCKED)
2935 return false;
2936 if (how != RNL_NOT_CAUGHT)
2937 return true;
2940 return false;
2943 /* Determine if the given INSN can throw an exception that is
2944 visible outside the function. */
2946 bool
2947 can_throw_external (rtx insn)
2949 struct eh_region *region;
2950 tree type_thrown;
2951 rtx note;
2953 if (! INSN_P (insn))
2954 return false;
2956 if (GET_CODE (insn) == INSN
2957 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2958 insn = XVECEXP (PATTERN (insn), 0, 0);
2960 if (GET_CODE (insn) == CALL_INSN
2961 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2963 int i;
2964 for (i = 0; i < 3; ++i)
2966 rtx sub = XEXP (PATTERN (insn), i);
2967 for (; sub ; sub = NEXT_INSN (sub))
2968 if (can_throw_external (sub))
2969 return true;
2971 return false;
2974 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2975 if (!note)
2977 /* Calls (and trapping insns) without notes are outside any
2978 exception handling region in this function. We have to
2979 assume it might throw. Given that the front end and middle
2980 ends mark known NOTHROW functions, this isn't so wildly
2981 inaccurate. */
2982 return (GET_CODE (insn) == CALL_INSN
2983 || (flag_non_call_exceptions
2984 && may_trap_p (PATTERN (insn))));
2986 if (INTVAL (XEXP (note, 0)) <= 0)
2987 return false;
2989 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2991 type_thrown = NULL_TREE;
2992 if (region->type == ERT_THROW)
2994 type_thrown = region->u.throw.type;
2995 region = region->outer;
2998 /* If the exception is caught or blocked by any containing region,
2999 then it is not seen by any calling function. */
3000 for (; region ; region = region->outer)
3001 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
3002 return false;
3004 return true;
3007 /* Set current_function_nothrow and cfun->all_throwers_are_sibcalls. */
3009 void
3010 set_nothrow_function_flags (void)
3012 rtx insn;
3014 current_function_nothrow = 1;
3016 /* Assume cfun->all_throwers_are_sibcalls until we encounter
3017 something that can throw an exception. We specifically exempt
3018 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
3019 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
3020 is optimistic. */
3022 cfun->all_throwers_are_sibcalls = 1;
3024 if (! flag_exceptions)
3025 return;
3027 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3028 if (can_throw_external (insn))
3030 current_function_nothrow = 0;
3032 if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
3034 cfun->all_throwers_are_sibcalls = 0;
3035 return;
3039 for (insn = current_function_epilogue_delay_list; insn;
3040 insn = XEXP (insn, 1))
3041 if (can_throw_external (insn))
3043 current_function_nothrow = 0;
3045 if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
3047 cfun->all_throwers_are_sibcalls = 0;
3048 return;
3054 /* Various hooks for unwind library. */
3056 /* Do any necessary initialization to access arbitrary stack frames.
3057 On the SPARC, this means flushing the register windows. */
3059 void
3060 expand_builtin_unwind_init (void)
3062 /* Set this so all the registers get saved in our frame; we need to be
3063 able to copy the saved values for any registers from frames we unwind. */
3064 current_function_has_nonlocal_label = 1;
3066 #ifdef SETUP_FRAME_ADDRESSES
3067 SETUP_FRAME_ADDRESSES ();
3068 #endif
3072 expand_builtin_eh_return_data_regno (tree arglist)
3074 tree which = TREE_VALUE (arglist);
3075 unsigned HOST_WIDE_INT iwhich;
3077 if (TREE_CODE (which) != INTEGER_CST)
3079 error ("argument of `__builtin_eh_return_regno' must be constant");
3080 return constm1_rtx;
3083 iwhich = tree_low_cst (which, 1);
3084 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3085 if (iwhich == INVALID_REGNUM)
3086 return constm1_rtx;
3088 #ifdef DWARF_FRAME_REGNUM
3089 iwhich = DWARF_FRAME_REGNUM (iwhich);
3090 #else
3091 iwhich = DBX_REGISTER_NUMBER (iwhich);
3092 #endif
3094 return GEN_INT (iwhich);
3097 /* Given a value extracted from the return address register or stack slot,
3098 return the actual address encoded in that value. */
3101 expand_builtin_extract_return_addr (tree addr_tree)
3103 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
3105 if (GET_MODE (addr) != Pmode
3106 && GET_MODE (addr) != VOIDmode)
3108 #ifdef POINTERS_EXTEND_UNSIGNED
3109 addr = convert_memory_address (Pmode, addr);
3110 #else
3111 addr = convert_to_mode (Pmode, addr, 0);
3112 #endif
3115 /* First mask out any unwanted bits. */
3116 #ifdef MASK_RETURN_ADDR
3117 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3118 #endif
3120 /* Then adjust to find the real return address. */
3121 #if defined (RETURN_ADDR_OFFSET)
3122 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3123 #endif
3125 return addr;
3128 /* Given an actual address in addr_tree, do any necessary encoding
3129 and return the value to be stored in the return address register or
3130 stack slot so the epilogue will return to that address. */
3133 expand_builtin_frob_return_addr (tree addr_tree)
3135 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3137 addr = convert_memory_address (Pmode, addr);
3139 #ifdef RETURN_ADDR_OFFSET
3140 addr = force_reg (Pmode, addr);
3141 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3142 #endif
3144 return addr;
3147 /* Set up the epilogue with the magic bits we'll need to return to the
3148 exception handler. */
3150 void
3151 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3152 tree handler_tree)
3154 rtx tmp;
3156 #ifdef EH_RETURN_STACKADJ_RTX
3157 tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
3158 tmp = convert_memory_address (Pmode, tmp);
3159 if (!cfun->eh->ehr_stackadj)
3160 cfun->eh->ehr_stackadj = copy_to_reg (tmp);
3161 else if (tmp != cfun->eh->ehr_stackadj)
3162 emit_move_insn (cfun->eh->ehr_stackadj, tmp);
3163 #endif
3165 tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
3166 tmp = convert_memory_address (Pmode, tmp);
3167 if (!cfun->eh->ehr_handler)
3168 cfun->eh->ehr_handler = copy_to_reg (tmp);
3169 else if (tmp != cfun->eh->ehr_handler)
3170 emit_move_insn (cfun->eh->ehr_handler, tmp);
3172 if (!cfun->eh->ehr_label)
3173 cfun->eh->ehr_label = gen_label_rtx ();
3174 emit_jump (cfun->eh->ehr_label);
3177 void
3178 expand_eh_return (void)
3180 rtx around_label;
3182 if (! cfun->eh->ehr_label)
3183 return;
3185 current_function_calls_eh_return = 1;
3187 #ifdef EH_RETURN_STACKADJ_RTX
3188 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3189 #endif
3191 around_label = gen_label_rtx ();
3192 emit_jump (around_label);
3194 emit_label (cfun->eh->ehr_label);
3195 clobber_return_register ();
3197 #ifdef EH_RETURN_STACKADJ_RTX
3198 emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
3199 #endif
3201 #ifdef HAVE_eh_return
3202 if (HAVE_eh_return)
3203 emit_insn (gen_eh_return (cfun->eh->ehr_handler));
3204 else
3205 #endif
3207 #ifdef EH_RETURN_HANDLER_RTX
3208 emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
3209 #else
3210 error ("__builtin_eh_return not supported on this target");
3211 #endif
3214 emit_label (around_label);
3217 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3218 POINTERS_EXTEND_UNSIGNED and return it. */
3221 expand_builtin_extend_pointer (tree addr_tree)
3223 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3224 int extend;
3226 #ifdef POINTERS_EXTEND_UNSIGNED
3227 extend = POINTERS_EXTEND_UNSIGNED;
3228 #else
3229 /* The previous EH code did an unsigned extend by default, so we do this also
3230 for consistency. */
3231 extend = 1;
3232 #endif
3234 return convert_modes (word_mode, ptr_mode, addr, extend);
3237 /* In the following functions, we represent entries in the action table
3238 as 1-based indices. Special cases are:
3240 0: null action record, non-null landing pad; implies cleanups
3241 -1: null action record, null landing pad; implies no action
3242 -2: no call-site entry; implies must_not_throw
3243 -3: we have yet to process outer regions
3245 Further, no special cases apply to the "next" field of the record.
3246 For next, 0 means end of list. */
3248 struct action_record
3250 int offset;
3251 int filter;
3252 int next;
3255 static int
3256 action_record_eq (const void *pentry, const void *pdata)
3258 const struct action_record *entry = (const struct action_record *) pentry;
3259 const struct action_record *data = (const struct action_record *) pdata;
3260 return entry->filter == data->filter && entry->next == data->next;
3263 static hashval_t
3264 action_record_hash (const void *pentry)
3266 const struct action_record *entry = (const struct action_record *) pentry;
3267 return entry->next * 1009 + entry->filter;
3270 static int
3271 add_action_record (htab_t ar_hash, int filter, int next)
3273 struct action_record **slot, *new, tmp;
3275 tmp.filter = filter;
3276 tmp.next = next;
3277 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3279 if ((new = *slot) == NULL)
3281 new = xmalloc (sizeof (*new));
3282 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3283 new->filter = filter;
3284 new->next = next;
3285 *slot = new;
3287 /* The filter value goes in untouched. The link to the next
3288 record is a "self-relative" byte offset, or zero to indicate
3289 that there is no next record. So convert the absolute 1 based
3290 indices we've been carrying around into a displacement. */
3292 push_sleb128 (&cfun->eh->action_record_data, filter);
3293 if (next)
3294 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3295 push_sleb128 (&cfun->eh->action_record_data, next);
3298 return new->offset;
3301 static int
3302 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3304 struct eh_region *c;
3305 int next;
3307 /* If we've reached the top of the region chain, then we have
3308 no actions, and require no landing pad. */
3309 if (region == NULL)
3310 return -1;
3312 switch (region->type)
3314 case ERT_CLEANUP:
3315 /* A cleanup adds a zero filter to the beginning of the chain, but
3316 there are special cases to look out for. If there are *only*
3317 cleanups along a path, then it compresses to a zero action.
3318 Further, if there are multiple cleanups along a path, we only
3319 need to represent one of them, as that is enough to trigger
3320 entry to the landing pad at runtime. */
3321 next = collect_one_action_chain (ar_hash, region->outer);
3322 if (next <= 0)
3323 return 0;
3324 for (c = region->outer; c ; c = c->outer)
3325 if (c->type == ERT_CLEANUP)
3326 return next;
3327 return add_action_record (ar_hash, 0, next);
3329 case ERT_TRY:
3330 /* Process the associated catch regions in reverse order.
3331 If there's a catch-all handler, then we don't need to
3332 search outer regions. Use a magic -3 value to record
3333 that we haven't done the outer search. */
3334 next = -3;
3335 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3337 if (c->u.catch.type_list == NULL)
3339 /* Retrieve the filter from the head of the filter list
3340 where we have stored it (see assign_filter_values). */
3341 int filter
3342 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3344 next = add_action_record (ar_hash, filter, 0);
3346 else
3348 /* Once the outer search is done, trigger an action record for
3349 each filter we have. */
3350 tree flt_node;
3352 if (next == -3)
3354 next = collect_one_action_chain (ar_hash, region->outer);
3356 /* If there is no next action, terminate the chain. */
3357 if (next == -1)
3358 next = 0;
3359 /* If all outer actions are cleanups or must_not_throw,
3360 we'll have no action record for it, since we had wanted
3361 to encode these states in the call-site record directly.
3362 Add a cleanup action to the chain to catch these. */
3363 else if (next <= 0)
3364 next = add_action_record (ar_hash, 0, 0);
3367 flt_node = c->u.catch.filter_list;
3368 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3370 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3371 next = add_action_record (ar_hash, filter, next);
3375 return next;
3377 case ERT_ALLOWED_EXCEPTIONS:
3378 /* An exception specification adds its filter to the
3379 beginning of the chain. */
3380 next = collect_one_action_chain (ar_hash, region->outer);
3382 /* If there is no next action, terminate the chain. */
3383 if (next == -1)
3384 next = 0;
3385 /* If all outer actions are cleanups or must_not_throw,
3386 we'll have no action record for it, since we had wanted
3387 to encode these states in the call-site record directly.
3388 Add a cleanup action to the chain to catch these. */
3389 else if (next <= 0)
3390 next = add_action_record (ar_hash, 0, 0);
3392 return add_action_record (ar_hash, region->u.allowed.filter, next);
3394 case ERT_MUST_NOT_THROW:
3395 /* A must-not-throw region with no inner handlers or cleanups
3396 requires no call-site entry. Note that this differs from
3397 the no handler or cleanup case in that we do require an lsda
3398 to be generated. Return a magic -2 value to record this. */
3399 return -2;
3401 case ERT_CATCH:
3402 case ERT_THROW:
3403 /* CATCH regions are handled in TRY above. THROW regions are
3404 for optimization information only and produce no output. */
3405 return collect_one_action_chain (ar_hash, region->outer);
3407 default:
3408 abort ();
3412 static int
3413 add_call_site (rtx landing_pad, int action)
3415 struct call_site_record *data = cfun->eh->call_site_data;
3416 int used = cfun->eh->call_site_data_used;
3417 int size = cfun->eh->call_site_data_size;
3419 if (used >= size)
3421 size = (size ? size * 2 : 64);
3422 data = ggc_realloc (data, sizeof (*data) * size);
3423 cfun->eh->call_site_data = data;
3424 cfun->eh->call_site_data_size = size;
3427 data[used].landing_pad = landing_pad;
3428 data[used].action = action;
3430 cfun->eh->call_site_data_used = used + 1;
3432 return used + call_site_base;
3435 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3436 The new note numbers will not refer to region numbers, but
3437 instead to call site entries. */
3439 void
3440 convert_to_eh_region_ranges (void)
3442 rtx insn, iter, note;
3443 htab_t ar_hash;
3444 int last_action = -3;
3445 rtx last_action_insn = NULL_RTX;
3446 rtx last_landing_pad = NULL_RTX;
3447 rtx first_no_action_insn = NULL_RTX;
3448 int call_site = 0;
3450 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3451 return;
3453 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3455 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3457 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3458 if (INSN_P (iter))
3460 struct eh_region *region;
3461 int this_action;
3462 rtx this_landing_pad;
3464 insn = iter;
3465 if (GET_CODE (insn) == INSN
3466 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3467 insn = XVECEXP (PATTERN (insn), 0, 0);
3469 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3470 if (!note)
3472 if (! (GET_CODE (insn) == CALL_INSN
3473 || (flag_non_call_exceptions
3474 && may_trap_p (PATTERN (insn)))))
3475 continue;
3476 this_action = -1;
3477 region = NULL;
3479 else
3481 if (INTVAL (XEXP (note, 0)) <= 0)
3482 continue;
3483 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3484 this_action = collect_one_action_chain (ar_hash, region);
3487 /* Existence of catch handlers, or must-not-throw regions
3488 implies that an lsda is needed (even if empty). */
3489 if (this_action != -1)
3490 cfun->uses_eh_lsda = 1;
3492 /* Delay creation of region notes for no-action regions
3493 until we're sure that an lsda will be required. */
3494 else if (last_action == -3)
3496 first_no_action_insn = iter;
3497 last_action = -1;
3500 /* Cleanups and handlers may share action chains but not
3501 landing pads. Collect the landing pad for this region. */
3502 if (this_action >= 0)
3504 struct eh_region *o;
3505 for (o = region; ! o->landing_pad ; o = o->outer)
3506 continue;
3507 this_landing_pad = o->landing_pad;
3509 else
3510 this_landing_pad = NULL_RTX;
3512 /* Differing actions or landing pads implies a change in call-site
3513 info, which implies some EH_REGION note should be emitted. */
3514 if (last_action != this_action
3515 || last_landing_pad != this_landing_pad)
3517 /* If we'd not seen a previous action (-3) or the previous
3518 action was must-not-throw (-2), then we do not need an
3519 end note. */
3520 if (last_action >= -1)
3522 /* If we delayed the creation of the begin, do it now. */
3523 if (first_no_action_insn)
3525 call_site = add_call_site (NULL_RTX, 0);
3526 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3527 first_no_action_insn);
3528 NOTE_EH_HANDLER (note) = call_site;
3529 first_no_action_insn = NULL_RTX;
3532 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3533 last_action_insn);
3534 NOTE_EH_HANDLER (note) = call_site;
3537 /* If the new action is must-not-throw, then no region notes
3538 are created. */
3539 if (this_action >= -1)
3541 call_site = add_call_site (this_landing_pad,
3542 this_action < 0 ? 0 : this_action);
3543 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3544 NOTE_EH_HANDLER (note) = call_site;
3547 last_action = this_action;
3548 last_landing_pad = this_landing_pad;
3550 last_action_insn = iter;
3553 if (last_action >= -1 && ! first_no_action_insn)
3555 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3556 NOTE_EH_HANDLER (note) = call_site;
3559 htab_delete (ar_hash);
3563 static void
3564 push_uleb128 (varray_type *data_area, unsigned int value)
3568 unsigned char byte = value & 0x7f;
3569 value >>= 7;
3570 if (value)
3571 byte |= 0x80;
3572 VARRAY_PUSH_UCHAR (*data_area, byte);
3574 while (value);
3577 static void
3578 push_sleb128 (varray_type *data_area, int value)
3580 unsigned char byte;
3581 int more;
3585 byte = value & 0x7f;
3586 value >>= 7;
3587 more = ! ((value == 0 && (byte & 0x40) == 0)
3588 || (value == -1 && (byte & 0x40) != 0));
3589 if (more)
3590 byte |= 0x80;
3591 VARRAY_PUSH_UCHAR (*data_area, byte);
3593 while (more);
3597 #ifndef HAVE_AS_LEB128
3598 static int
3599 dw2_size_of_call_site_table (void)
3601 int n = cfun->eh->call_site_data_used;
3602 int size = n * (4 + 4 + 4);
3603 int i;
3605 for (i = 0; i < n; ++i)
3607 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3608 size += size_of_uleb128 (cs->action);
3611 return size;
3614 static int
3615 sjlj_size_of_call_site_table (void)
3617 int n = cfun->eh->call_site_data_used;
3618 int size = 0;
3619 int i;
3621 for (i = 0; i < n; ++i)
3623 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3624 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3625 size += size_of_uleb128 (cs->action);
3628 return size;
3630 #endif
3632 static void
3633 dw2_output_call_site_table (void)
3635 const char *const function_start_lab
3636 = IDENTIFIER_POINTER (current_function_func_begin_label);
3637 int n = cfun->eh->call_site_data_used;
3638 int i;
3640 for (i = 0; i < n; ++i)
3642 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3643 char reg_start_lab[32];
3644 char reg_end_lab[32];
3645 char landing_pad_lab[32];
3647 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3648 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3650 if (cs->landing_pad)
3651 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3652 CODE_LABEL_NUMBER (cs->landing_pad));
3654 /* ??? Perhaps use insn length scaling if the assembler supports
3655 generic arithmetic. */
3656 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3657 data4 if the function is small enough. */
3658 #ifdef HAVE_AS_LEB128
3659 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3660 "region %d start", i);
3661 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3662 "length");
3663 if (cs->landing_pad)
3664 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3665 "landing pad");
3666 else
3667 dw2_asm_output_data_uleb128 (0, "landing pad");
3668 #else
3669 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3670 "region %d start", i);
3671 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3672 if (cs->landing_pad)
3673 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3674 "landing pad");
3675 else
3676 dw2_asm_output_data (4, 0, "landing pad");
3677 #endif
3678 dw2_asm_output_data_uleb128 (cs->action, "action");
3681 call_site_base += n;
3684 static void
3685 sjlj_output_call_site_table (void)
3687 int n = cfun->eh->call_site_data_used;
3688 int i;
3690 for (i = 0; i < n; ++i)
3692 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3694 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3695 "region %d landing pad", i);
3696 dw2_asm_output_data_uleb128 (cs->action, "action");
3699 call_site_base += n;
3702 /* Tell assembler to switch to the section for the exception handling
3703 table. */
3705 void
3706 default_exception_section (void)
3708 if (targetm.have_named_sections)
3710 int flags;
3711 #ifdef HAVE_LD_RO_RW_SECTION_MIXING
3712 int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3714 flags = (! flag_pic
3715 || ((tt_format & 0x70) != DW_EH_PE_absptr
3716 && (tt_format & 0x70) != DW_EH_PE_aligned))
3717 ? 0 : SECTION_WRITE;
3718 #else
3719 flags = SECTION_WRITE;
3720 #endif
3721 named_section_flags (".gcc_except_table", flags);
3723 else if (flag_pic)
3724 data_section ();
3725 else
3726 readonly_data_section ();
3729 void
3730 output_function_exception_table (void)
3732 int tt_format, cs_format, lp_format, i, n;
3733 #ifdef HAVE_AS_LEB128
3734 char ttype_label[32];
3735 char cs_after_size_label[32];
3736 char cs_end_label[32];
3737 #else
3738 int call_site_len;
3739 #endif
3740 int have_tt_data;
3741 int tt_format_size = 0;
3743 /* Not all functions need anything. */
3744 if (! cfun->uses_eh_lsda)
3745 return;
3747 #ifdef IA64_UNWIND_INFO
3748 fputs ("\t.personality\t", asm_out_file);
3749 output_addr_const (asm_out_file, eh_personality_libfunc);
3750 fputs ("\n\t.handlerdata\n", asm_out_file);
3751 /* Note that varasm still thinks we're in the function's code section.
3752 The ".endp" directive that will immediately follow will take us back. */
3753 #else
3754 targetm.asm_out.exception_section ();
3755 #endif
3757 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3758 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3760 /* Indicate the format of the @TType entries. */
3761 if (! have_tt_data)
3762 tt_format = DW_EH_PE_omit;
3763 else
3765 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3766 #ifdef HAVE_AS_LEB128
3767 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3768 current_function_funcdef_no);
3769 #endif
3770 tt_format_size = size_of_encoded_value (tt_format);
3772 assemble_align (tt_format_size * BITS_PER_UNIT);
3775 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3776 current_function_funcdef_no);
3778 /* The LSDA header. */
3780 /* Indicate the format of the landing pad start pointer. An omitted
3781 field implies @LPStart == @Start. */
3782 /* Currently we always put @LPStart == @Start. This field would
3783 be most useful in moving the landing pads completely out of
3784 line to another section, but it could also be used to minimize
3785 the size of uleb128 landing pad offsets. */
3786 lp_format = DW_EH_PE_omit;
3787 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3788 eh_data_format_name (lp_format));
3790 /* @LPStart pointer would go here. */
3792 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3793 eh_data_format_name (tt_format));
3795 #ifndef HAVE_AS_LEB128
3796 if (USING_SJLJ_EXCEPTIONS)
3797 call_site_len = sjlj_size_of_call_site_table ();
3798 else
3799 call_site_len = dw2_size_of_call_site_table ();
3800 #endif
3802 /* A pc-relative 4-byte displacement to the @TType data. */
3803 if (have_tt_data)
3805 #ifdef HAVE_AS_LEB128
3806 char ttype_after_disp_label[32];
3807 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3808 current_function_funcdef_no);
3809 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3810 "@TType base offset");
3811 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3812 #else
3813 /* Ug. Alignment queers things. */
3814 unsigned int before_disp, after_disp, last_disp, disp;
3816 before_disp = 1 + 1;
3817 after_disp = (1 + size_of_uleb128 (call_site_len)
3818 + call_site_len
3819 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3820 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3821 * tt_format_size));
3823 disp = after_disp;
3826 unsigned int disp_size, pad;
3828 last_disp = disp;
3829 disp_size = size_of_uleb128 (disp);
3830 pad = before_disp + disp_size + after_disp;
3831 if (pad % tt_format_size)
3832 pad = tt_format_size - (pad % tt_format_size);
3833 else
3834 pad = 0;
3835 disp = after_disp + pad;
3837 while (disp != last_disp);
3839 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3840 #endif
3843 /* Indicate the format of the call-site offsets. */
3844 #ifdef HAVE_AS_LEB128
3845 cs_format = DW_EH_PE_uleb128;
3846 #else
3847 cs_format = DW_EH_PE_udata4;
3848 #endif
3849 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3850 eh_data_format_name (cs_format));
3852 #ifdef HAVE_AS_LEB128
3853 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3854 current_function_funcdef_no);
3855 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3856 current_function_funcdef_no);
3857 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3858 "Call-site table length");
3859 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3860 if (USING_SJLJ_EXCEPTIONS)
3861 sjlj_output_call_site_table ();
3862 else
3863 dw2_output_call_site_table ();
3864 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3865 #else
3866 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3867 if (USING_SJLJ_EXCEPTIONS)
3868 sjlj_output_call_site_table ();
3869 else
3870 dw2_output_call_site_table ();
3871 #endif
3873 /* ??? Decode and interpret the data for flag_debug_asm. */
3874 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3875 for (i = 0; i < n; ++i)
3876 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3877 (i ? NULL : "Action record table"));
3879 if (have_tt_data)
3880 assemble_align (tt_format_size * BITS_PER_UNIT);
3882 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3883 while (i-- > 0)
3885 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3886 rtx value;
3888 if (type == NULL_TREE)
3889 value = const0_rtx;
3890 else
3892 struct cgraph_varpool_node *node;
3894 type = lookup_type_for_runtime (type);
3895 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3897 /* Let cgraph know that the rtti decl is used. Not all of the
3898 paths below go through assemble_integer, which would take
3899 care of this for us. */
3900 if (TREE_CODE (type) == ADDR_EXPR)
3902 type = TREE_OPERAND (type, 0);
3903 node = cgraph_varpool_node (type);
3904 if (node)
3905 cgraph_varpool_mark_needed_node (node);
3907 else if (TREE_CODE (type) != INTEGER_CST)
3908 abort ();
3911 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3912 assemble_integer (value, tt_format_size,
3913 tt_format_size * BITS_PER_UNIT, 1);
3914 else
3915 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
3918 #ifdef HAVE_AS_LEB128
3919 if (have_tt_data)
3920 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3921 #endif
3923 /* ??? Decode and interpret the data for flag_debug_asm. */
3924 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3925 for (i = 0; i < n; ++i)
3926 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3927 (i ? NULL : "Exception specification table"));
3929 function_section (current_function_decl);
3932 #include "gt-except.h"