2012-05-01 François Dumont <fdumont@gcc.gnu.org>
[official-gcc.git] / gcc / except.c
blob254dd8c32aefb07613e24424614eefdb2bb2d223
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 2011, 2012 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* An exception is an event that can be "thrown" from within a
25 function. This event can then be "caught" by the callers of
26 the function.
28 The representation of exceptions changes several times during
29 the compilation process:
31 In the beginning, in the front end, we have the GENERIC trees
32 TRY_CATCH_EXPR, TRY_FINALLY_EXPR, WITH_CLEANUP_EXPR,
33 CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
35 During initial gimplification (gimplify.c) these are lowered
36 to the GIMPLE_TRY, GIMPLE_CATCH, and GIMPLE_EH_FILTER nodes.
37 The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are converted
38 into GIMPLE_TRY_FINALLY nodes; the others are a more direct 1-1
39 conversion.
41 During pass_lower_eh (tree-eh.c) we record the nested structure
42 of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
43 We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
44 regions at this time. We can then flatten the statements within
45 the TRY nodes to straight-line code. Statements that had been within
46 TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
47 so that we may remember what action is supposed to be taken if
48 a given statement does throw. During this lowering process,
49 we create an EH_LANDING_PAD node for each EH_REGION that has
50 some code within the function that needs to be executed if a
51 throw does happen. We also create RESX statements that are
52 used to transfer control from an inner EH_REGION to an outer
53 EH_REGION. We also create EH_DISPATCH statements as placeholders
54 for a runtime type comparison that should be made in order to
55 select the action to perform among different CATCH and EH_FILTER
56 regions.
58 During pass_lower_eh_dispatch (tree-eh.c), which is run after
59 all inlining is complete, we are able to run assign_filter_values,
60 which allows us to map the set of types manipulated by all of the
61 CATCH and EH_FILTER regions to a set of integers. This set of integers
62 will be how the exception runtime communicates with the code generated
63 within the function. We then expand the GIMPLE_EH_DISPATCH statements
64 to a switch or conditional branches that use the argument provided by
65 the runtime (__builtin_eh_filter) and the set of integers we computed
66 in assign_filter_values.
68 During pass_lower_resx (tree-eh.c), which is run near the end
69 of optimization, we expand RESX statements. If the eh region
70 that is outer to the RESX statement is a MUST_NOT_THROW, then
71 the RESX expands to some form of abort statement. If the eh
72 region that is outer to the RESX statement is within the current
73 function, then the RESX expands to a bookkeeping call
74 (__builtin_eh_copy_values) and a goto. Otherwise, the next
75 handler for the exception must be within a function somewhere
76 up the call chain, so we call back into the exception runtime
77 (__builtin_unwind_resume).
79 During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
80 that create an rtl to eh_region mapping that corresponds to the
81 gimple to eh_region mapping that had been recorded in the
82 THROW_STMT_TABLE.
84 Then, via finish_eh_generation, we generate the real landing pads
85 to which the runtime will actually transfer control. These new
86 landing pads perform whatever bookkeeping is needed by the target
87 backend in order to resume execution within the current function.
88 Each of these new landing pads falls through into the post_landing_pad
89 label which had been used within the CFG up to this point. All
90 exception edges within the CFG are redirected to the new landing pads.
91 If the target uses setjmp to implement exceptions, the various extra
92 calls into the runtime to register and unregister the current stack
93 frame are emitted at this time.
95 During pass_convert_to_eh_region_ranges (except.c), we transform
96 the REG_EH_REGION notes attached to individual insns into
97 non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
98 and NOTE_INSN_EH_REGION_END. Each insn within such ranges has the
99 same associated action within the exception region tree, meaning
100 that (1) the exception is caught by the same landing pad within the
101 current function, (2) the exception is blocked by the runtime with
102 a MUST_NOT_THROW region, or (3) the exception is not handled at all
103 within the current function.
105 Finally, during assembly generation, we call
106 output_function_exception_table (except.c) to emit the tables with
107 which the exception runtime can determine if a given stack frame
108 handles a given exception, and if so what filter value to provide
109 to the function when the non-local control transfer is effected.
110 If the target uses dwarf2 unwinding to implement exceptions, then
111 output_call_frame_info (dwarf2out.c) emits the required unwind data. */
114 #include "config.h"
115 #include "system.h"
116 #include "coretypes.h"
117 #include "tm.h"
118 #include "rtl.h"
119 #include "tree.h"
120 #include "flags.h"
121 #include "function.h"
122 #include "expr.h"
123 #include "libfuncs.h"
124 #include "insn-config.h"
125 #include "except.h"
126 #include "integrate.h"
127 #include "hard-reg-set.h"
128 #include "basic-block.h"
129 #include "output.h"
130 #include "dwarf2asm.h"
131 #include "dwarf2out.h"
132 #include "dwarf2.h"
133 #include "toplev.h"
134 #include "hashtab.h"
135 #include "intl.h"
136 #include "ggc.h"
137 #include "tm_p.h"
138 #include "target.h"
139 #include "common/common-target.h"
140 #include "langhooks.h"
141 #include "cgraph.h"
142 #include "diagnostic.h"
143 #include "tree-pretty-print.h"
144 #include "tree-pass.h"
145 #include "timevar.h"
146 #include "tree-flow.h"
147 #include "cfgloop.h"
149 /* Provide defaults for stuff that may not be defined when using
150 sjlj exceptions. */
151 #ifndef EH_RETURN_DATA_REGNO
152 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
153 #endif
155 static GTY(()) int call_site_base;
156 static GTY ((param_is (union tree_node)))
157 htab_t type_to_runtime_map;
159 /* Describe the SjLj_Function_Context structure. */
160 static GTY(()) tree sjlj_fc_type_node;
161 static int sjlj_fc_call_site_ofs;
162 static int sjlj_fc_data_ofs;
163 static int sjlj_fc_personality_ofs;
164 static int sjlj_fc_lsda_ofs;
165 static int sjlj_fc_jbuf_ofs;
168 struct GTY(()) call_site_record_d
170 rtx landing_pad;
171 int action;
174 static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
175 eh_landing_pad *);
177 static int t2r_eq (const void *, const void *);
178 static hashval_t t2r_hash (const void *);
180 static int ttypes_filter_eq (const void *, const void *);
181 static hashval_t ttypes_filter_hash (const void *);
182 static int ehspec_filter_eq (const void *, const void *);
183 static hashval_t ehspec_filter_hash (const void *);
184 static int add_ttypes_entry (htab_t, tree);
185 static int add_ehspec_entry (htab_t, htab_t, tree);
186 static void dw2_build_landing_pads (void);
188 static int action_record_eq (const void *, const void *);
189 static hashval_t action_record_hash (const void *);
190 static int add_action_record (htab_t, int, int);
191 static int collect_one_action_chain (htab_t, eh_region);
192 static int add_call_site (rtx, int, int);
194 static void push_uleb128 (VEC (uchar, gc) **, unsigned int);
195 static void push_sleb128 (VEC (uchar, gc) **, int);
196 #ifndef HAVE_AS_LEB128
197 static int dw2_size_of_call_site_table (int);
198 static int sjlj_size_of_call_site_table (void);
199 #endif
200 static void dw2_output_call_site_table (int, int);
201 static void sjlj_output_call_site_table (void);
204 void
205 init_eh (void)
207 if (! flag_exceptions)
208 return;
210 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
212 /* Create the SjLj_Function_Context structure. This should match
213 the definition in unwind-sjlj.c. */
214 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
216 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
218 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
220 f_prev = build_decl (BUILTINS_LOCATION,
221 FIELD_DECL, get_identifier ("__prev"),
222 build_pointer_type (sjlj_fc_type_node));
223 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
225 f_cs = build_decl (BUILTINS_LOCATION,
226 FIELD_DECL, get_identifier ("__call_site"),
227 integer_type_node);
228 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
230 tmp = build_index_type (size_int (4 - 1));
231 tmp = build_array_type (lang_hooks.types.type_for_mode
232 (targetm.unwind_word_mode (), 1),
233 tmp);
234 f_data = build_decl (BUILTINS_LOCATION,
235 FIELD_DECL, get_identifier ("__data"), tmp);
236 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
238 f_per = build_decl (BUILTINS_LOCATION,
239 FIELD_DECL, get_identifier ("__personality"),
240 ptr_type_node);
241 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
243 f_lsda = build_decl (BUILTINS_LOCATION,
244 FIELD_DECL, get_identifier ("__lsda"),
245 ptr_type_node);
246 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
248 #ifdef DONT_USE_BUILTIN_SETJMP
249 #ifdef JMP_BUF_SIZE
250 tmp = size_int (JMP_BUF_SIZE - 1);
251 #else
252 /* Should be large enough for most systems, if it is not,
253 JMP_BUF_SIZE should be defined with the proper value. It will
254 also tend to be larger than necessary for most systems, a more
255 optimal port will define JMP_BUF_SIZE. */
256 tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
257 #endif
258 #else
259 /* builtin_setjmp takes a pointer to 5 words. */
260 tmp = size_int (5 * BITS_PER_WORD / POINTER_SIZE - 1);
261 #endif
262 tmp = build_index_type (tmp);
263 tmp = build_array_type (ptr_type_node, tmp);
264 f_jbuf = build_decl (BUILTINS_LOCATION,
265 FIELD_DECL, get_identifier ("__jbuf"), tmp);
266 #ifdef DONT_USE_BUILTIN_SETJMP
267 /* We don't know what the alignment requirements of the
268 runtime's jmp_buf has. Overestimate. */
269 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
270 DECL_USER_ALIGN (f_jbuf) = 1;
271 #endif
272 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
274 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
275 TREE_CHAIN (f_prev) = f_cs;
276 TREE_CHAIN (f_cs) = f_data;
277 TREE_CHAIN (f_data) = f_per;
278 TREE_CHAIN (f_per) = f_lsda;
279 TREE_CHAIN (f_lsda) = f_jbuf;
281 layout_type (sjlj_fc_type_node);
283 /* Cache the interesting field offsets so that we have
284 easy access from rtl. */
285 sjlj_fc_call_site_ofs
286 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
287 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
288 sjlj_fc_data_ofs
289 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
290 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
291 sjlj_fc_personality_ofs
292 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
293 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
294 sjlj_fc_lsda_ofs
295 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
296 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
297 sjlj_fc_jbuf_ofs
298 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
299 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
303 void
304 init_eh_for_function (void)
306 cfun->eh = ggc_alloc_cleared_eh_status ();
308 /* Make sure zero'th entries are used. */
309 VEC_safe_push (eh_region, gc, cfun->eh->region_array, NULL);
310 VEC_safe_push (eh_landing_pad, gc, cfun->eh->lp_array, NULL);
313 /* Routines to generate the exception tree somewhat directly.
314 These are used from tree-eh.c when processing exception related
315 nodes during tree optimization. */
317 static eh_region
318 gen_eh_region (enum eh_region_type type, eh_region outer)
320 eh_region new_eh;
322 /* Insert a new blank region as a leaf in the tree. */
323 new_eh = ggc_alloc_cleared_eh_region_d ();
324 new_eh->type = type;
325 new_eh->outer = outer;
326 if (outer)
328 new_eh->next_peer = outer->inner;
329 outer->inner = new_eh;
331 else
333 new_eh->next_peer = cfun->eh->region_tree;
334 cfun->eh->region_tree = new_eh;
337 new_eh->index = VEC_length (eh_region, cfun->eh->region_array);
338 VEC_safe_push (eh_region, gc, cfun->eh->region_array, new_eh);
340 /* Copy the language's notion of whether to use __cxa_end_cleanup. */
341 if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
342 new_eh->use_cxa_end_cleanup = true;
344 return new_eh;
347 eh_region
348 gen_eh_region_cleanup (eh_region outer)
350 return gen_eh_region (ERT_CLEANUP, outer);
353 eh_region
354 gen_eh_region_try (eh_region outer)
356 return gen_eh_region (ERT_TRY, outer);
359 eh_catch
360 gen_eh_region_catch (eh_region t, tree type_or_list)
362 eh_catch c, l;
363 tree type_list, type_node;
365 gcc_assert (t->type == ERT_TRY);
367 /* Ensure to always end up with a type list to normalize further
368 processing, then register each type against the runtime types map. */
369 type_list = type_or_list;
370 if (type_or_list)
372 if (TREE_CODE (type_or_list) != TREE_LIST)
373 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
375 type_node = type_list;
376 for (; type_node; type_node = TREE_CHAIN (type_node))
377 add_type_for_runtime (TREE_VALUE (type_node));
380 c = ggc_alloc_cleared_eh_catch_d ();
381 c->type_list = type_list;
382 l = t->u.eh_try.last_catch;
383 c->prev_catch = l;
384 if (l)
385 l->next_catch = c;
386 else
387 t->u.eh_try.first_catch = c;
388 t->u.eh_try.last_catch = c;
390 return c;
393 eh_region
394 gen_eh_region_allowed (eh_region outer, tree allowed)
396 eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
397 region->u.allowed.type_list = allowed;
399 for (; allowed ; allowed = TREE_CHAIN (allowed))
400 add_type_for_runtime (TREE_VALUE (allowed));
402 return region;
405 eh_region
406 gen_eh_region_must_not_throw (eh_region outer)
408 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
411 eh_landing_pad
412 gen_eh_landing_pad (eh_region region)
414 eh_landing_pad lp = ggc_alloc_cleared_eh_landing_pad_d ();
416 lp->next_lp = region->landing_pads;
417 lp->region = region;
418 lp->index = VEC_length (eh_landing_pad, cfun->eh->lp_array);
419 region->landing_pads = lp;
421 VEC_safe_push (eh_landing_pad, gc, cfun->eh->lp_array, lp);
423 return lp;
426 eh_region
427 get_eh_region_from_number_fn (struct function *ifun, int i)
429 return VEC_index (eh_region, ifun->eh->region_array, i);
432 eh_region
433 get_eh_region_from_number (int i)
435 return get_eh_region_from_number_fn (cfun, i);
438 eh_landing_pad
439 get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
441 return VEC_index (eh_landing_pad, ifun->eh->lp_array, i);
444 eh_landing_pad
445 get_eh_landing_pad_from_number (int i)
447 return get_eh_landing_pad_from_number_fn (cfun, i);
450 eh_region
451 get_eh_region_from_lp_number_fn (struct function *ifun, int i)
453 if (i < 0)
454 return VEC_index (eh_region, ifun->eh->region_array, -i);
455 else if (i == 0)
456 return NULL;
457 else
459 eh_landing_pad lp;
460 lp = VEC_index (eh_landing_pad, ifun->eh->lp_array, i);
461 return lp->region;
465 eh_region
466 get_eh_region_from_lp_number (int i)
468 return get_eh_region_from_lp_number_fn (cfun, i);
471 /* Returns true if the current function has exception handling regions. */
473 bool
474 current_function_has_exception_handlers (void)
476 return cfun->eh->region_tree != NULL;
479 /* A subroutine of duplicate_eh_regions. Copy the eh_region tree at OLD.
480 Root it at OUTER, and apply LP_OFFSET to the lp numbers. */
482 struct duplicate_eh_regions_data
484 duplicate_eh_regions_map label_map;
485 void *label_map_data;
486 struct pointer_map_t *eh_map;
489 static void
490 duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
491 eh_region old_r, eh_region outer)
493 eh_landing_pad old_lp, new_lp;
494 eh_region new_r;
495 void **slot;
497 new_r = gen_eh_region (old_r->type, outer);
498 slot = pointer_map_insert (data->eh_map, (void *)old_r);
499 gcc_assert (*slot == NULL);
500 *slot = (void *)new_r;
502 switch (old_r->type)
504 case ERT_CLEANUP:
505 break;
507 case ERT_TRY:
509 eh_catch oc, nc;
510 for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
512 /* We should be doing all our region duplication before and
513 during inlining, which is before filter lists are created. */
514 gcc_assert (oc->filter_list == NULL);
515 nc = gen_eh_region_catch (new_r, oc->type_list);
516 nc->label = data->label_map (oc->label, data->label_map_data);
519 break;
521 case ERT_ALLOWED_EXCEPTIONS:
522 new_r->u.allowed.type_list = old_r->u.allowed.type_list;
523 if (old_r->u.allowed.label)
524 new_r->u.allowed.label
525 = data->label_map (old_r->u.allowed.label, data->label_map_data);
526 else
527 new_r->u.allowed.label = NULL_TREE;
528 break;
530 case ERT_MUST_NOT_THROW:
531 new_r->u.must_not_throw = old_r->u.must_not_throw;
532 break;
535 for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
537 /* Don't bother copying unused landing pads. */
538 if (old_lp->post_landing_pad == NULL)
539 continue;
541 new_lp = gen_eh_landing_pad (new_r);
542 slot = pointer_map_insert (data->eh_map, (void *)old_lp);
543 gcc_assert (*slot == NULL);
544 *slot = (void *)new_lp;
546 new_lp->post_landing_pad
547 = data->label_map (old_lp->post_landing_pad, data->label_map_data);
548 EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
551 /* Make sure to preserve the original use of __cxa_end_cleanup. */
552 new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
554 for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
555 duplicate_eh_regions_1 (data, old_r, new_r);
558 /* Duplicate the EH regions from IFUN rooted at COPY_REGION into
559 the current function and root the tree below OUTER_REGION.
560 The special case of COPY_REGION of NULL means all regions.
561 Remap labels using MAP/MAP_DATA callback. Return a pointer map
562 that allows the caller to remap uses of both EH regions and
563 EH landing pads. */
565 struct pointer_map_t *
566 duplicate_eh_regions (struct function *ifun,
567 eh_region copy_region, int outer_lp,
568 duplicate_eh_regions_map map, void *map_data)
570 struct duplicate_eh_regions_data data;
571 eh_region outer_region;
573 #ifdef ENABLE_CHECKING
574 verify_eh_tree (ifun);
575 #endif
577 data.label_map = map;
578 data.label_map_data = map_data;
579 data.eh_map = pointer_map_create ();
581 outer_region = get_eh_region_from_lp_number (outer_lp);
583 /* Copy all the regions in the subtree. */
584 if (copy_region)
585 duplicate_eh_regions_1 (&data, copy_region, outer_region);
586 else
588 eh_region r;
589 for (r = ifun->eh->region_tree; r ; r = r->next_peer)
590 duplicate_eh_regions_1 (&data, r, outer_region);
593 #ifdef ENABLE_CHECKING
594 verify_eh_tree (cfun);
595 #endif
597 return data.eh_map;
600 /* Return the region that is outer to both REGION_A and REGION_B in IFUN. */
602 eh_region
603 eh_region_outermost (struct function *ifun, eh_region region_a,
604 eh_region region_b)
606 sbitmap b_outer;
608 gcc_assert (ifun->eh->region_array);
609 gcc_assert (ifun->eh->region_tree);
611 b_outer = sbitmap_alloc (VEC_length (eh_region, ifun->eh->region_array));
612 sbitmap_zero (b_outer);
616 SET_BIT (b_outer, region_b->index);
617 region_b = region_b->outer;
619 while (region_b);
623 if (TEST_BIT (b_outer, region_a->index))
624 break;
625 region_a = region_a->outer;
627 while (region_a);
629 sbitmap_free (b_outer);
630 return region_a;
633 static int
634 t2r_eq (const void *pentry, const void *pdata)
636 const_tree const entry = (const_tree) pentry;
637 const_tree const data = (const_tree) pdata;
639 return TREE_PURPOSE (entry) == data;
642 static hashval_t
643 t2r_hash (const void *pentry)
645 const_tree const entry = (const_tree) pentry;
646 return TREE_HASH (TREE_PURPOSE (entry));
649 void
650 add_type_for_runtime (tree type)
652 tree *slot;
654 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
655 if (TREE_CODE (type) == NOP_EXPR)
656 return;
658 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
659 TREE_HASH (type), INSERT);
660 if (*slot == NULL)
662 tree runtime = lang_hooks.eh_runtime_type (type);
663 *slot = tree_cons (type, runtime, NULL_TREE);
667 tree
668 lookup_type_for_runtime (tree type)
670 tree *slot;
672 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
673 if (TREE_CODE (type) == NOP_EXPR)
674 return type;
676 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
677 TREE_HASH (type), NO_INSERT);
679 /* We should have always inserted the data earlier. */
680 return TREE_VALUE (*slot);
684 /* Represent an entry in @TTypes for either catch actions
685 or exception filter actions. */
686 struct ttypes_filter {
687 tree t;
688 int filter;
691 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
692 (a tree) for a @TTypes type node we are thinking about adding. */
694 static int
695 ttypes_filter_eq (const void *pentry, const void *pdata)
697 const struct ttypes_filter *const entry
698 = (const struct ttypes_filter *) pentry;
699 const_tree const data = (const_tree) pdata;
701 return entry->t == data;
704 static hashval_t
705 ttypes_filter_hash (const void *pentry)
707 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
708 return TREE_HASH (entry->t);
711 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
712 exception specification list we are thinking about adding. */
713 /* ??? Currently we use the type lists in the order given. Someone
714 should put these in some canonical order. */
716 static int
717 ehspec_filter_eq (const void *pentry, const void *pdata)
719 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
720 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
722 return type_list_equal (entry->t, data->t);
725 /* Hash function for exception specification lists. */
727 static hashval_t
728 ehspec_filter_hash (const void *pentry)
730 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
731 hashval_t h = 0;
732 tree list;
734 for (list = entry->t; list ; list = TREE_CHAIN (list))
735 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
736 return h;
739 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
740 to speed up the search. Return the filter value to be used. */
742 static int
743 add_ttypes_entry (htab_t ttypes_hash, tree type)
745 struct ttypes_filter **slot, *n;
747 slot = (struct ttypes_filter **)
748 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
750 if ((n = *slot) == NULL)
752 /* Filter value is a 1 based table index. */
754 n = XNEW (struct ttypes_filter);
755 n->t = type;
756 n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1;
757 *slot = n;
759 VEC_safe_push (tree, gc, cfun->eh->ttype_data, type);
762 return n->filter;
765 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
766 to speed up the search. Return the filter value to be used. */
768 static int
769 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
771 struct ttypes_filter **slot, *n;
772 struct ttypes_filter dummy;
774 dummy.t = list;
775 slot = (struct ttypes_filter **)
776 htab_find_slot (ehspec_hash, &dummy, INSERT);
778 if ((n = *slot) == NULL)
780 int len;
782 if (targetm.arm_eabi_unwinder)
783 len = VEC_length (tree, cfun->eh->ehspec_data.arm_eabi);
784 else
785 len = VEC_length (uchar, cfun->eh->ehspec_data.other);
787 /* Filter value is a -1 based byte index into a uleb128 buffer. */
789 n = XNEW (struct ttypes_filter);
790 n->t = list;
791 n->filter = -(len + 1);
792 *slot = n;
794 /* Generate a 0 terminated list of filter values. */
795 for (; list ; list = TREE_CHAIN (list))
797 if (targetm.arm_eabi_unwinder)
798 VEC_safe_push (tree, gc, cfun->eh->ehspec_data.arm_eabi,
799 TREE_VALUE (list));
800 else
802 /* Look up each type in the list and encode its filter
803 value as a uleb128. */
804 push_uleb128 (&cfun->eh->ehspec_data.other,
805 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
808 if (targetm.arm_eabi_unwinder)
809 VEC_safe_push (tree, gc, cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
810 else
811 VEC_safe_push (uchar, gc, cfun->eh->ehspec_data.other, 0);
814 return n->filter;
817 /* Generate the action filter values to be used for CATCH and
818 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
819 we use lots of landing pads, and so every type or list can share
820 the same filter value, which saves table space. */
822 void
823 assign_filter_values (void)
825 int i;
826 htab_t ttypes, ehspec;
827 eh_region r;
828 eh_catch c;
830 cfun->eh->ttype_data = VEC_alloc (tree, gc, 16);
831 if (targetm.arm_eabi_unwinder)
832 cfun->eh->ehspec_data.arm_eabi = VEC_alloc (tree, gc, 64);
833 else
834 cfun->eh->ehspec_data.other = VEC_alloc (uchar, gc, 64);
836 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
837 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
839 for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
841 if (r == NULL)
842 continue;
844 switch (r->type)
846 case ERT_TRY:
847 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
849 /* Whatever type_list is (NULL or true list), we build a list
850 of filters for the region. */
851 c->filter_list = NULL_TREE;
853 if (c->type_list != NULL)
855 /* Get a filter value for each of the types caught and store
856 them in the region's dedicated list. */
857 tree tp_node = c->type_list;
859 for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
861 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
862 tree flt_node = build_int_cst (integer_type_node, flt);
864 c->filter_list
865 = tree_cons (NULL_TREE, flt_node, c->filter_list);
868 else
870 /* Get a filter value for the NULL list also since it
871 will need an action record anyway. */
872 int flt = add_ttypes_entry (ttypes, NULL);
873 tree flt_node = build_int_cst (integer_type_node, flt);
875 c->filter_list
876 = tree_cons (NULL_TREE, flt_node, NULL);
879 break;
881 case ERT_ALLOWED_EXCEPTIONS:
882 r->u.allowed.filter
883 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
884 break;
886 default:
887 break;
891 htab_delete (ttypes);
892 htab_delete (ehspec);
895 /* Emit SEQ into basic block just before INSN (that is assumed to be
896 first instruction of some existing BB and return the newly
897 produced block. */
898 static basic_block
899 emit_to_new_bb_before (rtx seq, rtx insn)
901 rtx last;
902 basic_block bb, prev_bb;
903 edge e;
904 edge_iterator ei;
906 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
907 call), we don't want it to go into newly created landing pad or other EH
908 construct. */
909 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
910 if (e->flags & EDGE_FALLTHRU)
911 force_nonfallthru (e);
912 else
913 ei_next (&ei);
914 last = emit_insn_before (seq, insn);
915 if (BARRIER_P (last))
916 last = PREV_INSN (last);
917 prev_bb = BLOCK_FOR_INSN (insn)->prev_bb;
918 bb = create_basic_block (seq, last, prev_bb);
919 update_bb_for_insn (bb);
920 bb->flags |= BB_SUPERBLOCK;
921 return bb;
924 /* A subroutine of dw2_build_landing_pads, also used for edge splitting
925 at the rtl level. Emit the code required by the target at a landing
926 pad for the given region. */
928 void
929 expand_dw2_landing_pad_for_region (eh_region region)
931 #ifdef HAVE_exception_receiver
932 if (HAVE_exception_receiver)
933 emit_insn (gen_exception_receiver ());
934 else
935 #endif
936 #ifdef HAVE_nonlocal_goto_receiver
937 if (HAVE_nonlocal_goto_receiver)
938 emit_insn (gen_nonlocal_goto_receiver ());
939 else
940 #endif
941 { /* Nothing */ }
943 if (region->exc_ptr_reg)
944 emit_move_insn (region->exc_ptr_reg,
945 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
946 if (region->filter_reg)
947 emit_move_insn (region->filter_reg,
948 gen_rtx_REG (targetm.eh_return_filter_mode (),
949 EH_RETURN_DATA_REGNO (1)));
952 /* Expand the extra code needed at landing pads for dwarf2 unwinding. */
954 static void
955 dw2_build_landing_pads (void)
957 int i;
958 eh_landing_pad lp;
959 int e_flags = EDGE_FALLTHRU;
961 /* If we're going to partition blocks, we need to be able to add
962 new landing pads later, which means that we need to hold on to
963 the post-landing-pad block. Prevent it from being merged away.
964 We'll remove this bit after partitioning. */
965 if (flag_reorder_blocks_and_partition)
966 e_flags |= EDGE_PRESERVE;
968 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
970 basic_block bb;
971 rtx seq;
972 edge e;
974 if (lp == NULL || lp->post_landing_pad == NULL)
975 continue;
977 start_sequence ();
979 lp->landing_pad = gen_label_rtx ();
980 emit_label (lp->landing_pad);
981 LABEL_PRESERVE_P (lp->landing_pad) = 1;
983 expand_dw2_landing_pad_for_region (lp->region);
985 seq = get_insns ();
986 end_sequence ();
988 bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
989 e = make_edge (bb, bb->next_bb, e_flags);
990 e->count = bb->count;
991 e->probability = REG_BR_PROB_BASE;
992 if (current_loops)
994 struct loop *loop = bb->next_bb->loop_father;
995 /* If we created a pre-header block, add the new block to the
996 outer loop, otherwise to the loop itself. */
997 if (bb->next_bb == loop->header)
998 add_bb_to_loop (bb, loop_outer (loop));
999 else
1000 add_bb_to_loop (bb, loop);
1006 static VEC (int, heap) *sjlj_lp_call_site_index;
1008 /* Process all active landing pads. Assign each one a compact dispatch
1009 index, and a call-site index. */
1011 static int
1012 sjlj_assign_call_site_values (void)
1014 htab_t ar_hash;
1015 int i, disp_index;
1016 eh_landing_pad lp;
1018 crtl->eh.action_record_data = VEC_alloc (uchar, gc, 64);
1019 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1021 disp_index = 0;
1022 call_site_base = 1;
1023 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1024 if (lp && lp->post_landing_pad)
1026 int action, call_site;
1028 /* First: build the action table. */
1029 action = collect_one_action_chain (ar_hash, lp->region);
1031 /* Next: assign call-site values. If dwarf2 terms, this would be
1032 the region number assigned by convert_to_eh_region_ranges, but
1033 handles no-action and must-not-throw differently. */
1034 /* Map must-not-throw to otherwise unused call-site index 0. */
1035 if (action == -2)
1036 call_site = 0;
1037 /* Map no-action to otherwise unused call-site index -1. */
1038 else if (action == -1)
1039 call_site = -1;
1040 /* Otherwise, look it up in the table. */
1041 else
1042 call_site = add_call_site (GEN_INT (disp_index), action, 0);
1043 VEC_replace (int, sjlj_lp_call_site_index, i, call_site);
1045 disp_index++;
1048 htab_delete (ar_hash);
1050 return disp_index;
1053 /* Emit code to record the current call-site index before every
1054 insn that can throw. */
1056 static void
1057 sjlj_mark_call_sites (void)
1059 int last_call_site = -2;
1060 rtx insn, mem;
1062 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1064 eh_landing_pad lp;
1065 eh_region r;
1066 bool nothrow;
1067 int this_call_site;
1068 rtx before, p;
1070 /* Reset value tracking at extended basic block boundaries. */
1071 if (LABEL_P (insn))
1072 last_call_site = -2;
1074 if (! INSN_P (insn))
1075 continue;
1077 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1078 if (nothrow)
1079 continue;
1080 if (lp)
1081 this_call_site = VEC_index (int, sjlj_lp_call_site_index, lp->index);
1082 else if (r == NULL)
1084 /* Calls (and trapping insns) without notes are outside any
1085 exception handling region in this function. Mark them as
1086 no action. */
1087 this_call_site = -1;
1089 else
1091 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1092 this_call_site = 0;
1095 if (this_call_site != -1)
1096 crtl->uses_eh_lsda = 1;
1098 if (this_call_site == last_call_site)
1099 continue;
1101 /* Don't separate a call from it's argument loads. */
1102 before = insn;
1103 if (CALL_P (insn))
1104 before = find_first_parameter_load (insn, NULL_RTX);
1106 start_sequence ();
1107 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1108 sjlj_fc_call_site_ofs);
1109 emit_move_insn (mem, GEN_INT (this_call_site));
1110 p = get_insns ();
1111 end_sequence ();
1113 emit_insn_before (p, before);
1114 last_call_site = this_call_site;
1118 /* Construct the SjLj_Function_Context. */
1120 static void
1121 sjlj_emit_function_enter (rtx dispatch_label)
1123 rtx fn_begin, fc, mem, seq;
1124 bool fn_begin_outside_block;
1125 rtx personality = get_personality_function (current_function_decl);
1127 fc = crtl->eh.sjlj_fc;
1129 start_sequence ();
1131 /* We're storing this libcall's address into memory instead of
1132 calling it directly. Thus, we must call assemble_external_libcall
1133 here, as we can not depend on emit_library_call to do it for us. */
1134 assemble_external_libcall (personality);
1135 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1136 emit_move_insn (mem, personality);
1138 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1139 if (crtl->uses_eh_lsda)
1141 char buf[20];
1142 rtx sym;
1144 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1145 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1146 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1147 emit_move_insn (mem, sym);
1149 else
1150 emit_move_insn (mem, const0_rtx);
1152 if (dispatch_label)
1154 #ifdef DONT_USE_BUILTIN_SETJMP
1155 rtx x, last;
1156 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1157 TYPE_MODE (integer_type_node), 1,
1158 plus_constant (XEXP (fc, 0),
1159 sjlj_fc_jbuf_ofs), Pmode);
1161 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1162 TYPE_MODE (integer_type_node), 0,
1163 dispatch_label);
1164 last = get_last_insn ();
1165 if (JUMP_P (last) && any_condjump_p (last))
1167 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
1168 add_reg_note (last, REG_BR_PROB, GEN_INT (REG_BR_PROB_BASE / 100));
1170 #else
1171 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0),
1172 sjlj_fc_jbuf_ofs),
1173 dispatch_label);
1174 #endif
1177 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1178 1, XEXP (fc, 0), Pmode);
1180 seq = get_insns ();
1181 end_sequence ();
1183 /* ??? Instead of doing this at the beginning of the function,
1184 do this in a block that is at loop level 0 and dominates all
1185 can_throw_internal instructions. */
1187 fn_begin_outside_block = true;
1188 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1189 if (NOTE_P (fn_begin))
1191 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1192 break;
1193 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1194 fn_begin_outside_block = false;
1197 if (fn_begin_outside_block)
1198 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
1199 else
1200 emit_insn_after (seq, fn_begin);
1203 /* Call back from expand_function_end to know where we should put
1204 the call to unwind_sjlj_unregister_libfunc if needed. */
1206 void
1207 sjlj_emit_function_exit_after (rtx after)
1209 crtl->eh.sjlj_exit_after = after;
1212 static void
1213 sjlj_emit_function_exit (void)
1215 rtx seq, insn;
1217 start_sequence ();
1219 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1220 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1222 seq = get_insns ();
1223 end_sequence ();
1225 /* ??? Really this can be done in any block at loop level 0 that
1226 post-dominates all can_throw_internal instructions. This is
1227 the last possible moment. */
1229 insn = crtl->eh.sjlj_exit_after;
1230 if (LABEL_P (insn))
1231 insn = NEXT_INSN (insn);
1233 emit_insn_after (seq, insn);
1236 static void
1237 sjlj_emit_dispatch_table (rtx dispatch_label, int num_dispatch)
1239 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
1240 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
1241 eh_landing_pad lp;
1242 rtx mem, seq, fc, before, exc_ptr_reg, filter_reg;
1243 rtx first_reachable_label;
1244 basic_block bb;
1245 eh_region r;
1246 edge e;
1247 int i, disp_index;
1248 gimple switch_stmt;
1250 fc = crtl->eh.sjlj_fc;
1252 start_sequence ();
1254 emit_label (dispatch_label);
1256 #ifndef DONT_USE_BUILTIN_SETJMP
1257 expand_builtin_setjmp_receiver (dispatch_label);
1259 /* The caller of expand_builtin_setjmp_receiver is responsible for
1260 making sure that the label doesn't vanish. The only other caller
1261 is the expander for __builtin_setjmp_receiver, which places this
1262 label on the nonlocal_goto_label list. Since we're modeling these
1263 CFG edges more exactly, we can use the forced_labels list instead. */
1264 LABEL_PRESERVE_P (dispatch_label) = 1;
1265 forced_labels
1266 = gen_rtx_EXPR_LIST (VOIDmode, dispatch_label, forced_labels);
1267 #endif
1269 /* Load up exc_ptr and filter values from the function context. */
1270 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
1271 if (unwind_word_mode != ptr_mode)
1273 #ifdef POINTERS_EXTEND_UNSIGNED
1274 mem = convert_memory_address (ptr_mode, mem);
1275 #else
1276 mem = convert_to_mode (ptr_mode, mem, 0);
1277 #endif
1279 exc_ptr_reg = force_reg (ptr_mode, mem);
1281 mem = adjust_address (fc, unwind_word_mode,
1282 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
1283 if (unwind_word_mode != filter_mode)
1284 mem = convert_to_mode (filter_mode, mem, 0);
1285 filter_reg = force_reg (filter_mode, mem);
1287 /* Jump to one of the directly reachable regions. */
1289 disp_index = 0;
1290 first_reachable_label = NULL;
1292 /* If there's exactly one call site in the function, don't bother
1293 generating a switch statement. */
1294 switch_stmt = NULL;
1295 if (num_dispatch > 1)
1297 tree disp;
1299 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
1300 sjlj_fc_call_site_ofs);
1301 disp = make_tree (integer_type_node, mem);
1303 switch_stmt = gimple_build_switch_nlabels (num_dispatch, disp, NULL);
1306 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1307 if (lp && lp->post_landing_pad)
1309 rtx seq2, label;
1311 start_sequence ();
1313 lp->landing_pad = dispatch_label;
1315 if (num_dispatch > 1)
1317 tree t_label, case_elt, t;
1319 t_label = create_artificial_label (UNKNOWN_LOCATION);
1320 t = build_int_cst (integer_type_node, disp_index);
1321 case_elt = build_case_label (t, NULL, t_label);
1322 gimple_switch_set_label (switch_stmt, disp_index, case_elt);
1324 label = label_rtx (t_label);
1326 else
1327 label = gen_label_rtx ();
1329 if (disp_index == 0)
1330 first_reachable_label = label;
1331 emit_label (label);
1333 r = lp->region;
1334 if (r->exc_ptr_reg)
1335 emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
1336 if (r->filter_reg)
1337 emit_move_insn (r->filter_reg, filter_reg);
1339 seq2 = get_insns ();
1340 end_sequence ();
1342 before = label_rtx (lp->post_landing_pad);
1343 bb = emit_to_new_bb_before (seq2, before);
1344 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1345 e->count = bb->count;
1346 e->probability = REG_BR_PROB_BASE;
1347 if (current_loops)
1349 struct loop *loop = bb->next_bb->loop_father;
1350 /* If we created a pre-header block, add the new block to the
1351 outer loop, otherwise to the loop itself. */
1352 if (bb->next_bb == loop->header)
1353 add_bb_to_loop (bb, loop_outer (loop));
1354 else
1355 add_bb_to_loop (bb, loop);
1356 /* ??? For multiple dispatches we will end up with edges
1357 from the loop tree root into this loop, making it a
1358 multiple-entry loop. Discard all affected loops. */
1359 if (num_dispatch > 1)
1361 for (loop = bb->loop_father;
1362 loop_outer (loop); loop = loop_outer (loop))
1364 loop->header = NULL;
1365 loop->latch = NULL;
1370 disp_index++;
1372 gcc_assert (disp_index == num_dispatch);
1374 if (num_dispatch > 1)
1376 expand_case (switch_stmt);
1377 expand_builtin_trap ();
1380 seq = get_insns ();
1381 end_sequence ();
1383 bb = emit_to_new_bb_before (seq, first_reachable_label);
1384 if (num_dispatch == 1)
1386 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1387 e->count = bb->count;
1388 e->probability = REG_BR_PROB_BASE;
1389 if (current_loops)
1391 struct loop *loop = bb->next_bb->loop_father;
1392 /* If we created a pre-header block, add the new block to the
1393 outer loop, otherwise to the loop itself. */
1394 if (bb->next_bb == loop->header)
1395 add_bb_to_loop (bb, loop_outer (loop));
1396 else
1397 add_bb_to_loop (bb, loop);
1400 else
1402 /* We are not wiring up edges here, but as the dispatcher call
1403 is at function begin simply associate the block with the
1404 outermost (non-)loop. */
1405 if (current_loops)
1406 add_bb_to_loop (bb, current_loops->tree_root);
1410 static void
1411 sjlj_build_landing_pads (void)
1413 int num_dispatch;
1415 num_dispatch = VEC_length (eh_landing_pad, cfun->eh->lp_array);
1416 if (num_dispatch == 0)
1417 return;
1418 VEC_safe_grow (int, heap, sjlj_lp_call_site_index, num_dispatch);
1420 num_dispatch = sjlj_assign_call_site_values ();
1421 if (num_dispatch > 0)
1423 rtx dispatch_label = gen_label_rtx ();
1424 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1425 TYPE_MODE (sjlj_fc_type_node),
1426 TYPE_ALIGN (sjlj_fc_type_node));
1427 crtl->eh.sjlj_fc
1428 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1429 int_size_in_bytes (sjlj_fc_type_node),
1430 align);
1432 sjlj_mark_call_sites ();
1433 sjlj_emit_function_enter (dispatch_label);
1434 sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
1435 sjlj_emit_function_exit ();
1438 /* If we do not have any landing pads, we may still need to register a
1439 personality routine and (empty) LSDA to handle must-not-throw regions. */
1440 else if (function_needs_eh_personality (cfun) != eh_personality_none)
1442 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1443 TYPE_MODE (sjlj_fc_type_node),
1444 TYPE_ALIGN (sjlj_fc_type_node));
1445 crtl->eh.sjlj_fc
1446 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1447 int_size_in_bytes (sjlj_fc_type_node),
1448 align);
1450 sjlj_mark_call_sites ();
1451 sjlj_emit_function_enter (NULL_RTX);
1452 sjlj_emit_function_exit ();
1455 VEC_free (int, heap, sjlj_lp_call_site_index);
1458 /* After initial rtl generation, call back to finish generating
1459 exception support code. */
1461 void
1462 finish_eh_generation (void)
1464 basic_block bb;
1466 /* Construct the landing pads. */
1467 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1468 sjlj_build_landing_pads ();
1469 else
1470 dw2_build_landing_pads ();
1471 break_superblocks ();
1473 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1474 /* Kludge for Alpha (see alpha_gp_save_rtx). */
1475 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
1476 commit_edge_insertions ();
1478 /* Redirect all EH edges from the post_landing_pad to the landing pad. */
1479 FOR_EACH_BB (bb)
1481 eh_landing_pad lp;
1482 edge_iterator ei;
1483 edge e;
1485 lp = get_eh_landing_pad_from_rtx (BB_END (bb));
1487 FOR_EACH_EDGE (e, ei, bb->succs)
1488 if (e->flags & EDGE_EH)
1489 break;
1491 /* We should not have generated any new throwing insns during this
1492 pass, and we should not have lost any EH edges, so we only need
1493 to handle two cases here:
1494 (1) reachable handler and an existing edge to post-landing-pad,
1495 (2) no reachable handler and no edge. */
1496 gcc_assert ((lp != NULL) == (e != NULL));
1497 if (lp != NULL)
1499 gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad));
1501 redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
1502 e->flags |= (CALL_P (BB_END (bb))
1503 ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
1504 : EDGE_ABNORMAL);
1509 /* This section handles removing dead code for flow. */
1511 void
1512 remove_eh_landing_pad (eh_landing_pad lp)
1514 eh_landing_pad *pp;
1516 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
1517 continue;
1518 *pp = lp->next_lp;
1520 if (lp->post_landing_pad)
1521 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1522 VEC_replace (eh_landing_pad, cfun->eh->lp_array, lp->index, NULL);
1525 /* Splice REGION from the region tree. */
1527 void
1528 remove_eh_handler (eh_region region)
1530 eh_region *pp, *pp_start, p, outer;
1531 eh_landing_pad lp;
1533 for (lp = region->landing_pads; lp ; lp = lp->next_lp)
1535 if (lp->post_landing_pad)
1536 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1537 VEC_replace (eh_landing_pad, cfun->eh->lp_array, lp->index, NULL);
1540 outer = region->outer;
1541 if (outer)
1542 pp_start = &outer->inner;
1543 else
1544 pp_start = &cfun->eh->region_tree;
1545 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1546 continue;
1547 if (region->inner)
1549 *pp = p = region->inner;
1552 p->outer = outer;
1553 pp = &p->next_peer;
1554 p = *pp;
1556 while (p);
1558 *pp = region->next_peer;
1560 VEC_replace (eh_region, cfun->eh->region_array, region->index, NULL);
1563 /* Invokes CALLBACK for every exception handler landing pad label.
1564 Only used by reload hackery; should not be used by new code. */
1566 void
1567 for_each_eh_label (void (*callback) (rtx))
1569 eh_landing_pad lp;
1570 int i;
1572 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1574 if (lp)
1576 rtx lab = lp->landing_pad;
1577 if (lab && LABEL_P (lab))
1578 (*callback) (lab);
1583 /* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
1584 call insn.
1586 At the gimple level, we use LP_NR
1587 > 0 : The statement transfers to landing pad LP_NR
1588 = 0 : The statement is outside any EH region
1589 < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1591 At the rtl level, we use LP_NR
1592 > 0 : The insn transfers to landing pad LP_NR
1593 = 0 : The insn cannot throw
1594 < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1595 = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1596 missing note: The insn is outside any EH region.
1598 ??? This difference probably ought to be avoided. We could stand
1599 to record nothrow for arbitrary gimple statements, and so avoid
1600 some moderately complex lookups in stmt_could_throw_p. Perhaps
1601 NOTHROW should be mapped on both sides to INT_MIN. Perhaps the
1602 no-nonlocal-goto property should be recorded elsewhere as a bit
1603 on the call_insn directly. Perhaps we should make more use of
1604 attaching the trees to call_insns (reachable via symbol_ref in
1605 direct call cases) and just pull the data out of the trees. */
1607 void
1608 make_reg_eh_region_note (rtx insn, int ecf_flags, int lp_nr)
1610 rtx value;
1611 if (ecf_flags & ECF_NOTHROW)
1612 value = const0_rtx;
1613 else if (lp_nr != 0)
1614 value = GEN_INT (lp_nr);
1615 else
1616 return;
1617 add_reg_note (insn, REG_EH_REGION, value);
1620 /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1621 nor perform a non-local goto. Replace the region note if it
1622 already exists. */
1624 void
1625 make_reg_eh_region_note_nothrow_nononlocal (rtx insn)
1627 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1628 rtx intmin = GEN_INT (INT_MIN);
1630 if (note != 0)
1631 XEXP (note, 0) = intmin;
1632 else
1633 add_reg_note (insn, REG_EH_REGION, intmin);
1636 /* Return true if INSN could throw, assuming no REG_EH_REGION note
1637 to the contrary. */
1639 bool
1640 insn_could_throw_p (const_rtx insn)
1642 if (!flag_exceptions)
1643 return false;
1644 if (CALL_P (insn))
1645 return true;
1646 if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
1647 return may_trap_p (PATTERN (insn));
1648 return false;
1651 /* Copy an REG_EH_REGION note to each insn that might throw beginning
1652 at FIRST and ending at LAST. NOTE_OR_INSN is either the source insn
1653 to look for a note, or the note itself. */
1655 void
1656 copy_reg_eh_region_note_forward (rtx note_or_insn, rtx first, rtx last)
1658 rtx insn, note = note_or_insn;
1660 if (INSN_P (note_or_insn))
1662 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1663 if (note == NULL)
1664 return;
1666 note = XEXP (note, 0);
1668 for (insn = first; insn != last ; insn = NEXT_INSN (insn))
1669 if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1670 && insn_could_throw_p (insn))
1671 add_reg_note (insn, REG_EH_REGION, note);
1674 /* Likewise, but iterate backward. */
1676 void
1677 copy_reg_eh_region_note_backward (rtx note_or_insn, rtx last, rtx first)
1679 rtx insn, note = note_or_insn;
1681 if (INSN_P (note_or_insn))
1683 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1684 if (note == NULL)
1685 return;
1687 note = XEXP (note, 0);
1689 for (insn = last; insn != first; insn = PREV_INSN (insn))
1690 if (insn_could_throw_p (insn))
1691 add_reg_note (insn, REG_EH_REGION, note);
1695 /* Extract all EH information from INSN. Return true if the insn
1696 was marked NOTHROW. */
1698 static bool
1699 get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
1700 eh_landing_pad *plp)
1702 eh_landing_pad lp = NULL;
1703 eh_region r = NULL;
1704 bool ret = false;
1705 rtx note;
1706 int lp_nr;
1708 if (! INSN_P (insn))
1709 goto egress;
1711 if (NONJUMP_INSN_P (insn)
1712 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1713 insn = XVECEXP (PATTERN (insn), 0, 0);
1715 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1716 if (!note)
1718 ret = !insn_could_throw_p (insn);
1719 goto egress;
1722 lp_nr = INTVAL (XEXP (note, 0));
1723 if (lp_nr == 0 || lp_nr == INT_MIN)
1725 ret = true;
1726 goto egress;
1729 if (lp_nr < 0)
1730 r = VEC_index (eh_region, cfun->eh->region_array, -lp_nr);
1731 else
1733 lp = VEC_index (eh_landing_pad, cfun->eh->lp_array, lp_nr);
1734 r = lp->region;
1737 egress:
1738 *plp = lp;
1739 *pr = r;
1740 return ret;
1743 /* Return the landing pad to which INSN may go, or NULL if it does not
1744 have a reachable landing pad within this function. */
1746 eh_landing_pad
1747 get_eh_landing_pad_from_rtx (const_rtx insn)
1749 eh_landing_pad lp;
1750 eh_region r;
1752 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1753 return lp;
1756 /* Return the region to which INSN may go, or NULL if it does not
1757 have a reachable region within this function. */
1759 eh_region
1760 get_eh_region_from_rtx (const_rtx insn)
1762 eh_landing_pad lp;
1763 eh_region r;
1765 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1766 return r;
1769 /* Return true if INSN throws and is caught by something in this function. */
1771 bool
1772 can_throw_internal (const_rtx insn)
1774 return get_eh_landing_pad_from_rtx (insn) != NULL;
1777 /* Return true if INSN throws and escapes from the current function. */
1779 bool
1780 can_throw_external (const_rtx insn)
1782 eh_landing_pad lp;
1783 eh_region r;
1784 bool nothrow;
1786 if (! INSN_P (insn))
1787 return false;
1789 if (NONJUMP_INSN_P (insn)
1790 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1792 rtx seq = PATTERN (insn);
1793 int i, n = XVECLEN (seq, 0);
1795 for (i = 0; i < n; i++)
1796 if (can_throw_external (XVECEXP (seq, 0, i)))
1797 return true;
1799 return false;
1802 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1804 /* If we can't throw, we obviously can't throw external. */
1805 if (nothrow)
1806 return false;
1808 /* If we have an internal landing pad, then we're not external. */
1809 if (lp != NULL)
1810 return false;
1812 /* If we're not within an EH region, then we are external. */
1813 if (r == NULL)
1814 return true;
1816 /* The only thing that ought to be left is MUST_NOT_THROW regions,
1817 which don't always have landing pads. */
1818 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1819 return false;
1822 /* Return true if INSN cannot throw at all. */
1824 bool
1825 insn_nothrow_p (const_rtx insn)
1827 eh_landing_pad lp;
1828 eh_region r;
1830 if (! INSN_P (insn))
1831 return true;
1833 if (NONJUMP_INSN_P (insn)
1834 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1836 rtx seq = PATTERN (insn);
1837 int i, n = XVECLEN (seq, 0);
1839 for (i = 0; i < n; i++)
1840 if (!insn_nothrow_p (XVECEXP (seq, 0, i)))
1841 return false;
1843 return true;
1846 return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1849 /* Return true if INSN can perform a non-local goto. */
1850 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION. */
1852 bool
1853 can_nonlocal_goto (const_rtx insn)
1855 if (nonlocal_goto_handler_labels && CALL_P (insn))
1857 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1858 if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
1859 return true;
1861 return false;
1864 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
1866 static unsigned int
1867 set_nothrow_function_flags (void)
1869 rtx insn;
1871 crtl->nothrow = 1;
1873 /* Assume crtl->all_throwers_are_sibcalls until we encounter
1874 something that can throw an exception. We specifically exempt
1875 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1876 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
1877 is optimistic. */
1879 crtl->all_throwers_are_sibcalls = 1;
1881 /* If we don't know that this implementation of the function will
1882 actually be used, then we must not set TREE_NOTHROW, since
1883 callers must not assume that this function does not throw. */
1884 if (TREE_NOTHROW (current_function_decl))
1885 return 0;
1887 if (! flag_exceptions)
1888 return 0;
1890 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1891 if (can_throw_external (insn))
1893 crtl->nothrow = 0;
1895 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
1897 crtl->all_throwers_are_sibcalls = 0;
1898 return 0;
1902 for (insn = crtl->epilogue_delay_list; insn;
1903 insn = XEXP (insn, 1))
1904 if (can_throw_external (insn))
1906 crtl->nothrow = 0;
1908 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
1910 crtl->all_throwers_are_sibcalls = 0;
1911 return 0;
1914 if (crtl->nothrow
1915 && (cgraph_function_body_availability (cgraph_get_node
1916 (current_function_decl))
1917 >= AVAIL_AVAILABLE))
1919 struct cgraph_node *node = cgraph_get_node (current_function_decl);
1920 struct cgraph_edge *e;
1921 for (e = node->callers; e; e = e->next_caller)
1922 e->can_throw_external = false;
1923 cgraph_set_nothrow_flag (node, true);
1925 if (dump_file)
1926 fprintf (dump_file, "Marking function nothrow: %s\n\n",
1927 current_function_name ());
1929 return 0;
1932 struct rtl_opt_pass pass_set_nothrow_function_flags =
1935 RTL_PASS,
1936 "nothrow", /* name */
1937 NULL, /* gate */
1938 set_nothrow_function_flags, /* execute */
1939 NULL, /* sub */
1940 NULL, /* next */
1941 0, /* static_pass_number */
1942 TV_NONE, /* tv_id */
1943 0, /* properties_required */
1944 0, /* properties_provided */
1945 0, /* properties_destroyed */
1946 0, /* todo_flags_start */
1947 0 /* todo_flags_finish */
1952 /* Various hooks for unwind library. */
1954 /* Expand the EH support builtin functions:
1955 __builtin_eh_pointer and __builtin_eh_filter. */
1957 static eh_region
1958 expand_builtin_eh_common (tree region_nr_t)
1960 HOST_WIDE_INT region_nr;
1961 eh_region region;
1963 gcc_assert (host_integerp (region_nr_t, 0));
1964 region_nr = tree_low_cst (region_nr_t, 0);
1966 region = VEC_index (eh_region, cfun->eh->region_array, region_nr);
1968 /* ??? We shouldn't have been able to delete a eh region without
1969 deleting all the code that depended on it. */
1970 gcc_assert (region != NULL);
1972 return region;
1975 /* Expand to the exc_ptr value from the given eh region. */
1978 expand_builtin_eh_pointer (tree exp)
1980 eh_region region
1981 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
1982 if (region->exc_ptr_reg == NULL)
1983 region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
1984 return region->exc_ptr_reg;
1987 /* Expand to the filter value from the given eh region. */
1990 expand_builtin_eh_filter (tree exp)
1992 eh_region region
1993 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
1994 if (region->filter_reg == NULL)
1995 region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
1996 return region->filter_reg;
1999 /* Copy the exc_ptr and filter values from one landing pad's registers
2000 to another. This is used to inline the resx statement. */
2003 expand_builtin_eh_copy_values (tree exp)
2005 eh_region dst
2006 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2007 eh_region src
2008 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
2009 enum machine_mode fmode = targetm.eh_return_filter_mode ();
2011 if (dst->exc_ptr_reg == NULL)
2012 dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2013 if (src->exc_ptr_reg == NULL)
2014 src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2016 if (dst->filter_reg == NULL)
2017 dst->filter_reg = gen_reg_rtx (fmode);
2018 if (src->filter_reg == NULL)
2019 src->filter_reg = gen_reg_rtx (fmode);
2021 emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
2022 emit_move_insn (dst->filter_reg, src->filter_reg);
2024 return const0_rtx;
2027 /* Do any necessary initialization to access arbitrary stack frames.
2028 On the SPARC, this means flushing the register windows. */
2030 void
2031 expand_builtin_unwind_init (void)
2033 /* Set this so all the registers get saved in our frame; we need to be
2034 able to copy the saved values for any registers from frames we unwind. */
2035 crtl->saves_all_registers = 1;
2037 #ifdef SETUP_FRAME_ADDRESSES
2038 SETUP_FRAME_ADDRESSES ();
2039 #endif
2042 /* Map a non-negative number to an eh return data register number; expands
2043 to -1 if no return data register is associated with the input number.
2044 At least the inputs 0 and 1 must be mapped; the target may provide more. */
2047 expand_builtin_eh_return_data_regno (tree exp)
2049 tree which = CALL_EXPR_ARG (exp, 0);
2050 unsigned HOST_WIDE_INT iwhich;
2052 if (TREE_CODE (which) != INTEGER_CST)
2054 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2055 return constm1_rtx;
2058 iwhich = tree_low_cst (which, 1);
2059 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2060 if (iwhich == INVALID_REGNUM)
2061 return constm1_rtx;
2063 #ifdef DWARF_FRAME_REGNUM
2064 iwhich = DWARF_FRAME_REGNUM (iwhich);
2065 #else
2066 iwhich = DBX_REGISTER_NUMBER (iwhich);
2067 #endif
2069 return GEN_INT (iwhich);
2072 /* Given a value extracted from the return address register or stack slot,
2073 return the actual address encoded in that value. */
2076 expand_builtin_extract_return_addr (tree addr_tree)
2078 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2080 if (GET_MODE (addr) != Pmode
2081 && GET_MODE (addr) != VOIDmode)
2083 #ifdef POINTERS_EXTEND_UNSIGNED
2084 addr = convert_memory_address (Pmode, addr);
2085 #else
2086 addr = convert_to_mode (Pmode, addr, 0);
2087 #endif
2090 /* First mask out any unwanted bits. */
2091 #ifdef MASK_RETURN_ADDR
2092 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2093 #endif
2095 /* Then adjust to find the real return address. */
2096 #if defined (RETURN_ADDR_OFFSET)
2097 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2098 #endif
2100 return addr;
2103 /* Given an actual address in addr_tree, do any necessary encoding
2104 and return the value to be stored in the return address register or
2105 stack slot so the epilogue will return to that address. */
2108 expand_builtin_frob_return_addr (tree addr_tree)
2110 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2112 addr = convert_memory_address (Pmode, addr);
2114 #ifdef RETURN_ADDR_OFFSET
2115 addr = force_reg (Pmode, addr);
2116 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2117 #endif
2119 return addr;
2122 /* Set up the epilogue with the magic bits we'll need to return to the
2123 exception handler. */
2125 void
2126 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2127 tree handler_tree)
2129 rtx tmp;
2131 #ifdef EH_RETURN_STACKADJ_RTX
2132 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2133 VOIDmode, EXPAND_NORMAL);
2134 tmp = convert_memory_address (Pmode, tmp);
2135 if (!crtl->eh.ehr_stackadj)
2136 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
2137 else if (tmp != crtl->eh.ehr_stackadj)
2138 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2139 #endif
2141 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2142 VOIDmode, EXPAND_NORMAL);
2143 tmp = convert_memory_address (Pmode, tmp);
2144 if (!crtl->eh.ehr_handler)
2145 crtl->eh.ehr_handler = copy_to_reg (tmp);
2146 else if (tmp != crtl->eh.ehr_handler)
2147 emit_move_insn (crtl->eh.ehr_handler, tmp);
2149 if (!crtl->eh.ehr_label)
2150 crtl->eh.ehr_label = gen_label_rtx ();
2151 emit_jump (crtl->eh.ehr_label);
2154 /* Expand __builtin_eh_return. This exit path from the function loads up
2155 the eh return data registers, adjusts the stack, and branches to a
2156 given PC other than the normal return address. */
2158 void
2159 expand_eh_return (void)
2161 rtx around_label;
2163 if (! crtl->eh.ehr_label)
2164 return;
2166 crtl->calls_eh_return = 1;
2168 #ifdef EH_RETURN_STACKADJ_RTX
2169 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2170 #endif
2172 around_label = gen_label_rtx ();
2173 emit_jump (around_label);
2175 emit_label (crtl->eh.ehr_label);
2176 clobber_return_register ();
2178 #ifdef EH_RETURN_STACKADJ_RTX
2179 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
2180 #endif
2182 #ifdef HAVE_eh_return
2183 if (HAVE_eh_return)
2184 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
2185 else
2186 #endif
2188 #ifdef EH_RETURN_HANDLER_RTX
2189 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
2190 #else
2191 error ("__builtin_eh_return not supported on this target");
2192 #endif
2195 emit_label (around_label);
2198 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2199 POINTERS_EXTEND_UNSIGNED and return it. */
2202 expand_builtin_extend_pointer (tree addr_tree)
2204 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2205 int extend;
2207 #ifdef POINTERS_EXTEND_UNSIGNED
2208 extend = POINTERS_EXTEND_UNSIGNED;
2209 #else
2210 /* The previous EH code did an unsigned extend by default, so we do this also
2211 for consistency. */
2212 extend = 1;
2213 #endif
2215 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
2218 /* In the following functions, we represent entries in the action table
2219 as 1-based indices. Special cases are:
2221 0: null action record, non-null landing pad; implies cleanups
2222 -1: null action record, null landing pad; implies no action
2223 -2: no call-site entry; implies must_not_throw
2224 -3: we have yet to process outer regions
2226 Further, no special cases apply to the "next" field of the record.
2227 For next, 0 means end of list. */
2229 struct action_record
2231 int offset;
2232 int filter;
2233 int next;
2236 static int
2237 action_record_eq (const void *pentry, const void *pdata)
2239 const struct action_record *entry = (const struct action_record *) pentry;
2240 const struct action_record *data = (const struct action_record *) pdata;
2241 return entry->filter == data->filter && entry->next == data->next;
2244 static hashval_t
2245 action_record_hash (const void *pentry)
2247 const struct action_record *entry = (const struct action_record *) pentry;
2248 return entry->next * 1009 + entry->filter;
2251 static int
2252 add_action_record (htab_t ar_hash, int filter, int next)
2254 struct action_record **slot, *new_ar, tmp;
2256 tmp.filter = filter;
2257 tmp.next = next;
2258 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
2260 if ((new_ar = *slot) == NULL)
2262 new_ar = XNEW (struct action_record);
2263 new_ar->offset = VEC_length (uchar, crtl->eh.action_record_data) + 1;
2264 new_ar->filter = filter;
2265 new_ar->next = next;
2266 *slot = new_ar;
2268 /* The filter value goes in untouched. The link to the next
2269 record is a "self-relative" byte offset, or zero to indicate
2270 that there is no next record. So convert the absolute 1 based
2271 indices we've been carrying around into a displacement. */
2273 push_sleb128 (&crtl->eh.action_record_data, filter);
2274 if (next)
2275 next -= VEC_length (uchar, crtl->eh.action_record_data) + 1;
2276 push_sleb128 (&crtl->eh.action_record_data, next);
2279 return new_ar->offset;
2282 static int
2283 collect_one_action_chain (htab_t ar_hash, eh_region region)
2285 int next;
2287 /* If we've reached the top of the region chain, then we have
2288 no actions, and require no landing pad. */
2289 if (region == NULL)
2290 return -1;
2292 switch (region->type)
2294 case ERT_CLEANUP:
2296 eh_region r;
2297 /* A cleanup adds a zero filter to the beginning of the chain, but
2298 there are special cases to look out for. If there are *only*
2299 cleanups along a path, then it compresses to a zero action.
2300 Further, if there are multiple cleanups along a path, we only
2301 need to represent one of them, as that is enough to trigger
2302 entry to the landing pad at runtime. */
2303 next = collect_one_action_chain (ar_hash, region->outer);
2304 if (next <= 0)
2305 return 0;
2306 for (r = region->outer; r ; r = r->outer)
2307 if (r->type == ERT_CLEANUP)
2308 return next;
2309 return add_action_record (ar_hash, 0, next);
2312 case ERT_TRY:
2314 eh_catch c;
2316 /* Process the associated catch regions in reverse order.
2317 If there's a catch-all handler, then we don't need to
2318 search outer regions. Use a magic -3 value to record
2319 that we haven't done the outer search. */
2320 next = -3;
2321 for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
2323 if (c->type_list == NULL)
2325 /* Retrieve the filter from the head of the filter list
2326 where we have stored it (see assign_filter_values). */
2327 int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list));
2328 next = add_action_record (ar_hash, filter, 0);
2330 else
2332 /* Once the outer search is done, trigger an action record for
2333 each filter we have. */
2334 tree flt_node;
2336 if (next == -3)
2338 next = collect_one_action_chain (ar_hash, region->outer);
2340 /* If there is no next action, terminate the chain. */
2341 if (next == -1)
2342 next = 0;
2343 /* If all outer actions are cleanups or must_not_throw,
2344 we'll have no action record for it, since we had wanted
2345 to encode these states in the call-site record directly.
2346 Add a cleanup action to the chain to catch these. */
2347 else if (next <= 0)
2348 next = add_action_record (ar_hash, 0, 0);
2351 flt_node = c->filter_list;
2352 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2354 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2355 next = add_action_record (ar_hash, filter, next);
2359 return next;
2362 case ERT_ALLOWED_EXCEPTIONS:
2363 /* An exception specification adds its filter to the
2364 beginning of the chain. */
2365 next = collect_one_action_chain (ar_hash, region->outer);
2367 /* If there is no next action, terminate the chain. */
2368 if (next == -1)
2369 next = 0;
2370 /* If all outer actions are cleanups or must_not_throw,
2371 we'll have no action record for it, since we had wanted
2372 to encode these states in the call-site record directly.
2373 Add a cleanup action to the chain to catch these. */
2374 else if (next <= 0)
2375 next = add_action_record (ar_hash, 0, 0);
2377 return add_action_record (ar_hash, region->u.allowed.filter, next);
2379 case ERT_MUST_NOT_THROW:
2380 /* A must-not-throw region with no inner handlers or cleanups
2381 requires no call-site entry. Note that this differs from
2382 the no handler or cleanup case in that we do require an lsda
2383 to be generated. Return a magic -2 value to record this. */
2384 return -2;
2387 gcc_unreachable ();
2390 static int
2391 add_call_site (rtx landing_pad, int action, int section)
2393 call_site_record record;
2395 record = ggc_alloc_call_site_record_d ();
2396 record->landing_pad = landing_pad;
2397 record->action = action;
2399 VEC_safe_push (call_site_record, gc,
2400 crtl->eh.call_site_record[section], record);
2402 return call_site_base + VEC_length (call_site_record,
2403 crtl->eh.call_site_record[section]) - 1;
2406 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2407 The new note numbers will not refer to region numbers, but
2408 instead to call site entries. */
2410 static unsigned int
2411 convert_to_eh_region_ranges (void)
2413 rtx insn, iter, note;
2414 htab_t ar_hash;
2415 int last_action = -3;
2416 rtx last_action_insn = NULL_RTX;
2417 rtx last_landing_pad = NULL_RTX;
2418 rtx first_no_action_insn = NULL_RTX;
2419 int call_site = 0;
2420 int cur_sec = 0;
2421 rtx section_switch_note = NULL_RTX;
2422 rtx first_no_action_insn_before_switch = NULL_RTX;
2423 rtx last_no_action_insn_before_switch = NULL_RTX;
2424 int saved_call_site_base = call_site_base;
2426 crtl->eh.action_record_data = VEC_alloc (uchar, gc, 64);
2428 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2430 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2431 if (INSN_P (iter))
2433 eh_landing_pad lp;
2434 eh_region region;
2435 bool nothrow;
2436 int this_action;
2437 rtx this_landing_pad;
2439 insn = iter;
2440 if (NONJUMP_INSN_P (insn)
2441 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2442 insn = XVECEXP (PATTERN (insn), 0, 0);
2444 nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp);
2445 if (nothrow)
2446 continue;
2447 if (region)
2448 this_action = collect_one_action_chain (ar_hash, region);
2449 else
2450 this_action = -1;
2452 /* Existence of catch handlers, or must-not-throw regions
2453 implies that an lsda is needed (even if empty). */
2454 if (this_action != -1)
2455 crtl->uses_eh_lsda = 1;
2457 /* Delay creation of region notes for no-action regions
2458 until we're sure that an lsda will be required. */
2459 else if (last_action == -3)
2461 first_no_action_insn = iter;
2462 last_action = -1;
2465 if (this_action >= 0)
2466 this_landing_pad = lp->landing_pad;
2467 else
2468 this_landing_pad = NULL_RTX;
2470 /* Differing actions or landing pads implies a change in call-site
2471 info, which implies some EH_REGION note should be emitted. */
2472 if (last_action != this_action
2473 || last_landing_pad != this_landing_pad)
2475 /* If there is a queued no-action region in the other section
2476 with hot/cold partitioning, emit it now. */
2477 if (first_no_action_insn_before_switch)
2479 gcc_assert (this_action != -1
2480 && last_action == (first_no_action_insn
2481 ? -1 : -3));
2482 call_site = add_call_site (NULL_RTX, 0, 0);
2483 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2484 first_no_action_insn_before_switch);
2485 NOTE_EH_HANDLER (note) = call_site;
2486 note = emit_note_after (NOTE_INSN_EH_REGION_END,
2487 last_no_action_insn_before_switch);
2488 NOTE_EH_HANDLER (note) = call_site;
2489 gcc_assert (last_action != -3
2490 || (last_action_insn
2491 == last_no_action_insn_before_switch));
2492 first_no_action_insn_before_switch = NULL_RTX;
2493 last_no_action_insn_before_switch = NULL_RTX;
2494 call_site_base++;
2496 /* If we'd not seen a previous action (-3) or the previous
2497 action was must-not-throw (-2), then we do not need an
2498 end note. */
2499 if (last_action >= -1)
2501 /* If we delayed the creation of the begin, do it now. */
2502 if (first_no_action_insn)
2504 call_site = add_call_site (NULL_RTX, 0, cur_sec);
2505 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2506 first_no_action_insn);
2507 NOTE_EH_HANDLER (note) = call_site;
2508 first_no_action_insn = NULL_RTX;
2511 note = emit_note_after (NOTE_INSN_EH_REGION_END,
2512 last_action_insn);
2513 NOTE_EH_HANDLER (note) = call_site;
2516 /* If the new action is must-not-throw, then no region notes
2517 are created. */
2518 if (this_action >= -1)
2520 call_site = add_call_site (this_landing_pad,
2521 this_action < 0 ? 0 : this_action,
2522 cur_sec);
2523 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
2524 NOTE_EH_HANDLER (note) = call_site;
2527 last_action = this_action;
2528 last_landing_pad = this_landing_pad;
2530 last_action_insn = iter;
2532 else if (NOTE_P (iter)
2533 && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2535 gcc_assert (section_switch_note == NULL_RTX);
2536 gcc_assert (flag_reorder_blocks_and_partition);
2537 section_switch_note = iter;
2538 if (first_no_action_insn)
2540 first_no_action_insn_before_switch = first_no_action_insn;
2541 last_no_action_insn_before_switch = last_action_insn;
2542 first_no_action_insn = NULL_RTX;
2543 gcc_assert (last_action == -1);
2544 last_action = -3;
2546 /* Force closing of current EH region before section switch and
2547 opening a new one afterwards. */
2548 else if (last_action != -3)
2549 last_landing_pad = pc_rtx;
2550 call_site_base += VEC_length (call_site_record,
2551 crtl->eh.call_site_record[cur_sec]);
2552 cur_sec++;
2553 gcc_assert (crtl->eh.call_site_record[cur_sec] == NULL);
2554 crtl->eh.call_site_record[cur_sec]
2555 = VEC_alloc (call_site_record, gc, 10);
2558 if (last_action >= -1 && ! first_no_action_insn)
2560 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
2561 NOTE_EH_HANDLER (note) = call_site;
2564 call_site_base = saved_call_site_base;
2566 htab_delete (ar_hash);
2567 return 0;
2570 static bool
2571 gate_convert_to_eh_region_ranges (void)
2573 /* Nothing to do for SJLJ exceptions or if no regions created. */
2574 if (cfun->eh->region_tree == NULL)
2575 return false;
2576 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2577 return false;
2578 return true;
2581 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
2584 RTL_PASS,
2585 "eh_ranges", /* name */
2586 gate_convert_to_eh_region_ranges, /* gate */
2587 convert_to_eh_region_ranges, /* execute */
2588 NULL, /* sub */
2589 NULL, /* next */
2590 0, /* static_pass_number */
2591 TV_NONE, /* tv_id */
2592 0, /* properties_required */
2593 0, /* properties_provided */
2594 0, /* properties_destroyed */
2595 0, /* todo_flags_start */
2596 0 /* todo_flags_finish */
2600 static void
2601 push_uleb128 (VEC (uchar, gc) **data_area, unsigned int value)
2605 unsigned char byte = value & 0x7f;
2606 value >>= 7;
2607 if (value)
2608 byte |= 0x80;
2609 VEC_safe_push (uchar, gc, *data_area, byte);
2611 while (value);
2614 static void
2615 push_sleb128 (VEC (uchar, gc) **data_area, int value)
2617 unsigned char byte;
2618 int more;
2622 byte = value & 0x7f;
2623 value >>= 7;
2624 more = ! ((value == 0 && (byte & 0x40) == 0)
2625 || (value == -1 && (byte & 0x40) != 0));
2626 if (more)
2627 byte |= 0x80;
2628 VEC_safe_push (uchar, gc, *data_area, byte);
2630 while (more);
2634 #ifndef HAVE_AS_LEB128
2635 static int
2636 dw2_size_of_call_site_table (int section)
2638 int n = VEC_length (call_site_record, crtl->eh.call_site_record[section]);
2639 int size = n * (4 + 4 + 4);
2640 int i;
2642 for (i = 0; i < n; ++i)
2644 struct call_site_record_d *cs =
2645 VEC_index (call_site_record, crtl->eh.call_site_record[section], i);
2646 size += size_of_uleb128 (cs->action);
2649 return size;
2652 static int
2653 sjlj_size_of_call_site_table (void)
2655 int n = VEC_length (call_site_record, crtl->eh.call_site_record[0]);
2656 int size = 0;
2657 int i;
2659 for (i = 0; i < n; ++i)
2661 struct call_site_record_d *cs =
2662 VEC_index (call_site_record, crtl->eh.call_site_record[0], i);
2663 size += size_of_uleb128 (INTVAL (cs->landing_pad));
2664 size += size_of_uleb128 (cs->action);
2667 return size;
2669 #endif
2671 static void
2672 dw2_output_call_site_table (int cs_format, int section)
2674 int n = VEC_length (call_site_record, crtl->eh.call_site_record[section]);
2675 int i;
2676 const char *begin;
2678 if (section == 0)
2679 begin = current_function_func_begin_label;
2680 else if (first_function_block_is_cold)
2681 begin = crtl->subsections.hot_section_label;
2682 else
2683 begin = crtl->subsections.cold_section_label;
2685 for (i = 0; i < n; ++i)
2687 struct call_site_record_d *cs =
2688 VEC_index (call_site_record, crtl->eh.call_site_record[section], i);
2689 char reg_start_lab[32];
2690 char reg_end_lab[32];
2691 char landing_pad_lab[32];
2693 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
2694 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
2696 if (cs->landing_pad)
2697 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
2698 CODE_LABEL_NUMBER (cs->landing_pad));
2700 /* ??? Perhaps use insn length scaling if the assembler supports
2701 generic arithmetic. */
2702 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2703 data4 if the function is small enough. */
2704 if (cs_format == DW_EH_PE_uleb128)
2706 dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
2707 "region %d start", i);
2708 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
2709 "length");
2710 if (cs->landing_pad)
2711 dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
2712 "landing pad");
2713 else
2714 dw2_asm_output_data_uleb128 (0, "landing pad");
2716 else
2718 dw2_asm_output_delta (4, reg_start_lab, begin,
2719 "region %d start", i);
2720 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
2721 if (cs->landing_pad)
2722 dw2_asm_output_delta (4, landing_pad_lab, begin,
2723 "landing pad");
2724 else
2725 dw2_asm_output_data (4, 0, "landing pad");
2727 dw2_asm_output_data_uleb128 (cs->action, "action");
2730 call_site_base += n;
2733 static void
2734 sjlj_output_call_site_table (void)
2736 int n = VEC_length (call_site_record, crtl->eh.call_site_record[0]);
2737 int i;
2739 for (i = 0; i < n; ++i)
2741 struct call_site_record_d *cs =
2742 VEC_index (call_site_record, crtl->eh.call_site_record[0], i);
2744 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
2745 "region %d landing pad", i);
2746 dw2_asm_output_data_uleb128 (cs->action, "action");
2749 call_site_base += n;
2752 /* Switch to the section that should be used for exception tables. */
2754 static void
2755 switch_to_exception_section (const char * ARG_UNUSED (fnname))
2757 section *s;
2759 if (exception_section)
2760 s = exception_section;
2761 else
2763 /* Compute the section and cache it into exception_section,
2764 unless it depends on the function name. */
2765 if (targetm_common.have_named_sections)
2767 int flags;
2769 if (EH_TABLES_CAN_BE_READ_ONLY)
2771 int tt_format =
2772 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2773 flags = ((! flag_pic
2774 || ((tt_format & 0x70) != DW_EH_PE_absptr
2775 && (tt_format & 0x70) != DW_EH_PE_aligned))
2776 ? 0 : SECTION_WRITE);
2778 else
2779 flags = SECTION_WRITE;
2781 #ifdef HAVE_LD_EH_GC_SECTIONS
2782 if (flag_function_sections)
2784 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
2785 sprintf (section_name, ".gcc_except_table.%s", fnname);
2786 s = get_section (section_name, flags, NULL);
2787 free (section_name);
2789 else
2790 #endif
2791 exception_section
2792 = s = get_section (".gcc_except_table", flags, NULL);
2794 else
2795 exception_section
2796 = s = flag_pic ? data_section : readonly_data_section;
2799 switch_to_section (s);
2803 /* Output a reference from an exception table to the type_info object TYPE.
2804 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
2805 the value. */
2807 static void
2808 output_ttype (tree type, int tt_format, int tt_format_size)
2810 rtx value;
2811 bool is_public = true;
2813 if (type == NULL_TREE)
2814 value = const0_rtx;
2815 else
2817 /* FIXME lto. pass_ipa_free_lang_data changes all types to
2818 runtime types so TYPE should already be a runtime type
2819 reference. When pass_ipa_free_lang data is made a default
2820 pass, we can then remove the call to lookup_type_for_runtime
2821 below. */
2822 if (TYPE_P (type))
2823 type = lookup_type_for_runtime (type);
2825 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
2827 /* Let cgraph know that the rtti decl is used. Not all of the
2828 paths below go through assemble_integer, which would take
2829 care of this for us. */
2830 STRIP_NOPS (type);
2831 if (TREE_CODE (type) == ADDR_EXPR)
2833 type = TREE_OPERAND (type, 0);
2834 if (TREE_CODE (type) == VAR_DECL)
2835 is_public = TREE_PUBLIC (type);
2837 else
2838 gcc_assert (TREE_CODE (type) == INTEGER_CST);
2841 /* Allow the target to override the type table entry format. */
2842 if (targetm.asm_out.ttype (value))
2843 return;
2845 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
2846 assemble_integer (value, tt_format_size,
2847 tt_format_size * BITS_PER_UNIT, 1);
2848 else
2849 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
2852 static void
2853 output_one_function_exception_table (int section)
2855 int tt_format, cs_format, lp_format, i;
2856 #ifdef HAVE_AS_LEB128
2857 char ttype_label[32];
2858 char cs_after_size_label[32];
2859 char cs_end_label[32];
2860 #else
2861 int call_site_len;
2862 #endif
2863 int have_tt_data;
2864 int tt_format_size = 0;
2866 have_tt_data = (VEC_length (tree, cfun->eh->ttype_data)
2867 || (targetm.arm_eabi_unwinder
2868 ? VEC_length (tree, cfun->eh->ehspec_data.arm_eabi)
2869 : VEC_length (uchar, cfun->eh->ehspec_data.other)));
2871 /* Indicate the format of the @TType entries. */
2872 if (! have_tt_data)
2873 tt_format = DW_EH_PE_omit;
2874 else
2876 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2877 #ifdef HAVE_AS_LEB128
2878 ASM_GENERATE_INTERNAL_LABEL (ttype_label,
2879 section ? "LLSDATTC" : "LLSDATT",
2880 current_function_funcdef_no);
2881 #endif
2882 tt_format_size = size_of_encoded_value (tt_format);
2884 assemble_align (tt_format_size * BITS_PER_UNIT);
2887 targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
2888 current_function_funcdef_no);
2890 /* The LSDA header. */
2892 /* Indicate the format of the landing pad start pointer. An omitted
2893 field implies @LPStart == @Start. */
2894 /* Currently we always put @LPStart == @Start. This field would
2895 be most useful in moving the landing pads completely out of
2896 line to another section, but it could also be used to minimize
2897 the size of uleb128 landing pad offsets. */
2898 lp_format = DW_EH_PE_omit;
2899 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
2900 eh_data_format_name (lp_format));
2902 /* @LPStart pointer would go here. */
2904 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
2905 eh_data_format_name (tt_format));
2907 #ifndef HAVE_AS_LEB128
2908 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2909 call_site_len = sjlj_size_of_call_site_table ();
2910 else
2911 call_site_len = dw2_size_of_call_site_table (section);
2912 #endif
2914 /* A pc-relative 4-byte displacement to the @TType data. */
2915 if (have_tt_data)
2917 #ifdef HAVE_AS_LEB128
2918 char ttype_after_disp_label[32];
2919 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
2920 section ? "LLSDATTDC" : "LLSDATTD",
2921 current_function_funcdef_no);
2922 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
2923 "@TType base offset");
2924 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
2925 #else
2926 /* Ug. Alignment queers things. */
2927 unsigned int before_disp, after_disp, last_disp, disp;
2929 before_disp = 1 + 1;
2930 after_disp = (1 + size_of_uleb128 (call_site_len)
2931 + call_site_len
2932 + VEC_length (uchar, crtl->eh.action_record_data)
2933 + (VEC_length (tree, cfun->eh->ttype_data)
2934 * tt_format_size));
2936 disp = after_disp;
2939 unsigned int disp_size, pad;
2941 last_disp = disp;
2942 disp_size = size_of_uleb128 (disp);
2943 pad = before_disp + disp_size + after_disp;
2944 if (pad % tt_format_size)
2945 pad = tt_format_size - (pad % tt_format_size);
2946 else
2947 pad = 0;
2948 disp = after_disp + pad;
2950 while (disp != last_disp);
2952 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
2953 #endif
2956 /* Indicate the format of the call-site offsets. */
2957 #ifdef HAVE_AS_LEB128
2958 cs_format = DW_EH_PE_uleb128;
2959 #else
2960 cs_format = DW_EH_PE_udata4;
2961 #endif
2962 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
2963 eh_data_format_name (cs_format));
2965 #ifdef HAVE_AS_LEB128
2966 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
2967 section ? "LLSDACSBC" : "LLSDACSB",
2968 current_function_funcdef_no);
2969 ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
2970 section ? "LLSDACSEC" : "LLSDACSE",
2971 current_function_funcdef_no);
2972 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
2973 "Call-site table length");
2974 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
2975 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2976 sjlj_output_call_site_table ();
2977 else
2978 dw2_output_call_site_table (cs_format, section);
2979 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
2980 #else
2981 dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
2982 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2983 sjlj_output_call_site_table ();
2984 else
2985 dw2_output_call_site_table (cs_format, section);
2986 #endif
2988 /* ??? Decode and interpret the data for flag_debug_asm. */
2990 uchar uc;
2991 FOR_EACH_VEC_ELT (uchar, crtl->eh.action_record_data, i, uc)
2992 dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
2995 if (have_tt_data)
2996 assemble_align (tt_format_size * BITS_PER_UNIT);
2998 i = VEC_length (tree, cfun->eh->ttype_data);
2999 while (i-- > 0)
3001 tree type = VEC_index (tree, cfun->eh->ttype_data, i);
3002 output_ttype (type, tt_format, tt_format_size);
3005 #ifdef HAVE_AS_LEB128
3006 if (have_tt_data)
3007 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3008 #endif
3010 /* ??? Decode and interpret the data for flag_debug_asm. */
3011 if (targetm.arm_eabi_unwinder)
3013 tree type;
3014 for (i = 0;
3015 VEC_iterate (tree, cfun->eh->ehspec_data.arm_eabi, i, type); ++i)
3016 output_ttype (type, tt_format, tt_format_size);
3018 else
3020 uchar uc;
3021 for (i = 0;
3022 VEC_iterate (uchar, cfun->eh->ehspec_data.other, i, uc); ++i)
3023 dw2_asm_output_data (1, uc,
3024 i ? NULL : "Exception specification table");
3028 void
3029 output_function_exception_table (const char *fnname)
3031 rtx personality = get_personality_function (current_function_decl);
3033 /* Not all functions need anything. */
3034 if (! crtl->uses_eh_lsda)
3035 return;
3037 if (personality)
3039 assemble_external_libcall (personality);
3041 if (targetm.asm_out.emit_except_personality)
3042 targetm.asm_out.emit_except_personality (personality);
3045 switch_to_exception_section (fnname);
3047 /* If the target wants a label to begin the table, emit it here. */
3048 targetm.asm_out.emit_except_table_label (asm_out_file);
3050 output_one_function_exception_table (0);
3051 if (crtl->eh.call_site_record[1] != NULL)
3052 output_one_function_exception_table (1);
3054 switch_to_section (current_function_section ());
3057 void
3058 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3060 fun->eh->throw_stmt_table = table;
3063 htab_t
3064 get_eh_throw_stmt_table (struct function *fun)
3066 return fun->eh->throw_stmt_table;
3069 /* Determine if the function needs an EH personality function. */
3071 enum eh_personality_kind
3072 function_needs_eh_personality (struct function *fn)
3074 enum eh_personality_kind kind = eh_personality_none;
3075 eh_region i;
3077 FOR_ALL_EH_REGION_FN (i, fn)
3079 switch (i->type)
3081 case ERT_CLEANUP:
3082 /* Can do with any personality including the generic C one. */
3083 kind = eh_personality_any;
3084 break;
3086 case ERT_TRY:
3087 case ERT_ALLOWED_EXCEPTIONS:
3088 /* Always needs a EH personality function. The generic C
3089 personality doesn't handle these even for empty type lists. */
3090 return eh_personality_lang;
3092 case ERT_MUST_NOT_THROW:
3093 /* Always needs a EH personality function. The language may specify
3094 what abort routine that must be used, e.g. std::terminate. */
3095 return eh_personality_lang;
3099 return kind;
3102 /* Dump EH information to OUT. */
3104 void
3105 dump_eh_tree (FILE * out, struct function *fun)
3107 eh_region i;
3108 int depth = 0;
3109 static const char *const type_name[] = {
3110 "cleanup", "try", "allowed_exceptions", "must_not_throw"
3113 i = fun->eh->region_tree;
3114 if (!i)
3115 return;
3117 fprintf (out, "Eh tree:\n");
3118 while (1)
3120 fprintf (out, " %*s %i %s", depth * 2, "",
3121 i->index, type_name[(int) i->type]);
3123 if (i->landing_pads)
3125 eh_landing_pad lp;
3127 fprintf (out, " land:");
3128 if (current_ir_type () == IR_GIMPLE)
3130 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3132 fprintf (out, "{%i,", lp->index);
3133 print_generic_expr (out, lp->post_landing_pad, 0);
3134 fputc ('}', out);
3135 if (lp->next_lp)
3136 fputc (',', out);
3139 else
3141 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3143 fprintf (out, "{%i,", lp->index);
3144 if (lp->landing_pad)
3145 fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
3146 NOTE_P (lp->landing_pad) ? "(del)" : "");
3147 else
3148 fprintf (out, "(nil),");
3149 if (lp->post_landing_pad)
3151 rtx lab = label_rtx (lp->post_landing_pad);
3152 fprintf (out, "%i%s}", INSN_UID (lab),
3153 NOTE_P (lab) ? "(del)" : "");
3155 else
3156 fprintf (out, "(nil)}");
3157 if (lp->next_lp)
3158 fputc (',', out);
3163 switch (i->type)
3165 case ERT_CLEANUP:
3166 case ERT_MUST_NOT_THROW:
3167 break;
3169 case ERT_TRY:
3171 eh_catch c;
3172 fprintf (out, " catch:");
3173 for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
3175 fputc ('{', out);
3176 if (c->label)
3178 fprintf (out, "lab:");
3179 print_generic_expr (out, c->label, 0);
3180 fputc (';', out);
3182 print_generic_expr (out, c->type_list, 0);
3183 fputc ('}', out);
3184 if (c->next_catch)
3185 fputc (',', out);
3188 break;
3190 case ERT_ALLOWED_EXCEPTIONS:
3191 fprintf (out, " filter :%i types:", i->u.allowed.filter);
3192 print_generic_expr (out, i->u.allowed.type_list, 0);
3193 break;
3195 fputc ('\n', out);
3197 /* If there are sub-regions, process them. */
3198 if (i->inner)
3199 i = i->inner, depth++;
3200 /* If there are peers, process them. */
3201 else if (i->next_peer)
3202 i = i->next_peer;
3203 /* Otherwise, step back up the tree to the next peer. */
3204 else
3208 i = i->outer;
3209 depth--;
3210 if (i == NULL)
3211 return;
3213 while (i->next_peer == NULL);
3214 i = i->next_peer;
3219 /* Dump the EH tree for FN on stderr. */
3221 DEBUG_FUNCTION void
3222 debug_eh_tree (struct function *fn)
3224 dump_eh_tree (stderr, fn);
3227 /* Verify invariants on EH datastructures. */
3229 DEBUG_FUNCTION void
3230 verify_eh_tree (struct function *fun)
3232 eh_region r, outer;
3233 int nvisited_lp, nvisited_r;
3234 int count_lp, count_r, depth, i;
3235 eh_landing_pad lp;
3236 bool err = false;
3238 if (!fun->eh->region_tree)
3239 return;
3241 count_r = 0;
3242 for (i = 1; VEC_iterate (eh_region, fun->eh->region_array, i, r); ++i)
3243 if (r)
3245 if (r->index == i)
3246 count_r++;
3247 else
3249 error ("region_array is corrupted for region %i", r->index);
3250 err = true;
3254 count_lp = 0;
3255 for (i = 1; VEC_iterate (eh_landing_pad, fun->eh->lp_array, i, lp); ++i)
3256 if (lp)
3258 if (lp->index == i)
3259 count_lp++;
3260 else
3262 error ("lp_array is corrupted for lp %i", lp->index);
3263 err = true;
3267 depth = nvisited_lp = nvisited_r = 0;
3268 outer = NULL;
3269 r = fun->eh->region_tree;
3270 while (1)
3272 if (VEC_index (eh_region, fun->eh->region_array, r->index) != r)
3274 error ("region_array is corrupted for region %i", r->index);
3275 err = true;
3277 if (r->outer != outer)
3279 error ("outer block of region %i is wrong", r->index);
3280 err = true;
3282 if (depth < 0)
3284 error ("negative nesting depth of region %i", r->index);
3285 err = true;
3287 nvisited_r++;
3289 for (lp = r->landing_pads; lp ; lp = lp->next_lp)
3291 if (VEC_index (eh_landing_pad, fun->eh->lp_array, lp->index) != lp)
3293 error ("lp_array is corrupted for lp %i", lp->index);
3294 err = true;
3296 if (lp->region != r)
3298 error ("region of lp %i is wrong", lp->index);
3299 err = true;
3301 nvisited_lp++;
3304 if (r->inner)
3305 outer = r, r = r->inner, depth++;
3306 else if (r->next_peer)
3307 r = r->next_peer;
3308 else
3312 r = r->outer;
3313 if (r == NULL)
3314 goto region_done;
3315 depth--;
3316 outer = r->outer;
3318 while (r->next_peer == NULL);
3319 r = r->next_peer;
3322 region_done:
3323 if (depth != 0)
3325 error ("tree list ends on depth %i", depth);
3326 err = true;
3328 if (count_r != nvisited_r)
3330 error ("region_array does not match region_tree");
3331 err = true;
3333 if (count_lp != nvisited_lp)
3335 error ("lp_array does not match region_tree");
3336 err = true;
3339 if (err)
3341 dump_eh_tree (stderr, fun);
3342 internal_error ("verify_eh_tree failed");
3346 #include "gt-except.h"