gcc/
[official-gcc.git] / gcc / except.c
blob05da989e32a8bec443ada9ed8a2dc91f723e5a10
1 /* Implements exception handling.
2 Copyright (C) 1989-2014 Free Software Foundation, Inc.
3 Contributed by Mike Stump <mrs@cygnus.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 /* An exception is an event that can be "thrown" from within a
23 function. This event can then be "caught" by the callers of
24 the function.
26 The representation of exceptions changes several times during
27 the compilation process:
29 In the beginning, in the front end, we have the GENERIC trees
30 TRY_CATCH_EXPR, TRY_FINALLY_EXPR, WITH_CLEANUP_EXPR,
31 CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
33 During initial gimplification (gimplify.c) these are lowered
34 to the GIMPLE_TRY, GIMPLE_CATCH, and GIMPLE_EH_FILTER nodes.
35 The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are converted
36 into GIMPLE_TRY_FINALLY nodes; the others are a more direct 1-1
37 conversion.
39 During pass_lower_eh (tree-eh.c) we record the nested structure
40 of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
41 We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
42 regions at this time. We can then flatten the statements within
43 the TRY nodes to straight-line code. Statements that had been within
44 TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
45 so that we may remember what action is supposed to be taken if
46 a given statement does throw. During this lowering process,
47 we create an EH_LANDING_PAD node for each EH_REGION that has
48 some code within the function that needs to be executed if a
49 throw does happen. We also create RESX statements that are
50 used to transfer control from an inner EH_REGION to an outer
51 EH_REGION. We also create EH_DISPATCH statements as placeholders
52 for a runtime type comparison that should be made in order to
53 select the action to perform among different CATCH and EH_FILTER
54 regions.
56 During pass_lower_eh_dispatch (tree-eh.c), which is run after
57 all inlining is complete, we are able to run assign_filter_values,
58 which allows us to map the set of types manipulated by all of the
59 CATCH and EH_FILTER regions to a set of integers. This set of integers
60 will be how the exception runtime communicates with the code generated
61 within the function. We then expand the GIMPLE_EH_DISPATCH statements
62 to a switch or conditional branches that use the argument provided by
63 the runtime (__builtin_eh_filter) and the set of integers we computed
64 in assign_filter_values.
66 During pass_lower_resx (tree-eh.c), which is run near the end
67 of optimization, we expand RESX statements. If the eh region
68 that is outer to the RESX statement is a MUST_NOT_THROW, then
69 the RESX expands to some form of abort statement. If the eh
70 region that is outer to the RESX statement is within the current
71 function, then the RESX expands to a bookkeeping call
72 (__builtin_eh_copy_values) and a goto. Otherwise, the next
73 handler for the exception must be within a function somewhere
74 up the call chain, so we call back into the exception runtime
75 (__builtin_unwind_resume).
77 During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
78 that create an rtl to eh_region mapping that corresponds to the
79 gimple to eh_region mapping that had been recorded in the
80 THROW_STMT_TABLE.
82 Then, via finish_eh_generation, we generate the real landing pads
83 to which the runtime will actually transfer control. These new
84 landing pads perform whatever bookkeeping is needed by the target
85 backend in order to resume execution within the current function.
86 Each of these new landing pads falls through into the post_landing_pad
87 label which had been used within the CFG up to this point. All
88 exception edges within the CFG are redirected to the new landing pads.
89 If the target uses setjmp to implement exceptions, the various extra
90 calls into the runtime to register and unregister the current stack
91 frame are emitted at this time.
93 During pass_convert_to_eh_region_ranges (except.c), we transform
94 the REG_EH_REGION notes attached to individual insns into
95 non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
96 and NOTE_INSN_EH_REGION_END. Each insn within such ranges has the
97 same associated action within the exception region tree, meaning
98 that (1) the exception is caught by the same landing pad within the
99 current function, (2) the exception is blocked by the runtime with
100 a MUST_NOT_THROW region, or (3) the exception is not handled at all
101 within the current function.
103 Finally, during assembly generation, we call
104 output_function_exception_table (except.c) to emit the tables with
105 which the exception runtime can determine if a given stack frame
106 handles a given exception, and if so what filter value to provide
107 to the function when the non-local control transfer is effected.
108 If the target uses dwarf2 unwinding to implement exceptions, then
109 output_call_frame_info (dwarf2out.c) emits the required unwind data. */
112 #include "config.h"
113 #include "system.h"
114 #include "coretypes.h"
115 #include "tm.h"
116 #include "rtl.h"
117 #include "tree.h"
118 #include "stringpool.h"
119 #include "stor-layout.h"
120 #include "flags.h"
121 #include "function.h"
122 #include "expr.h"
123 #include "libfuncs.h"
124 #include "insn-config.h"
125 #include "except.h"
126 #include "hard-reg-set.h"
127 #include "output.h"
128 #include "dwarf2asm.h"
129 #include "dwarf2out.h"
130 #include "dwarf2.h"
131 #include "toplev.h"
132 #include "hash-table.h"
133 #include "intl.h"
134 #include "tm_p.h"
135 #include "target.h"
136 #include "common/common-target.h"
137 #include "langhooks.h"
138 #include "cgraph.h"
139 #include "diagnostic.h"
140 #include "tree-pretty-print.h"
141 #include "tree-pass.h"
142 #include "cfgloop.h"
143 #include "builtins.h"
145 /* Provide defaults for stuff that may not be defined when using
146 sjlj exceptions. */
147 #ifndef EH_RETURN_DATA_REGNO
148 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
149 #endif
151 static GTY(()) int call_site_base;
152 static GTY ((param_is (union tree_node)))
153 htab_t type_to_runtime_map;
155 /* Describe the SjLj_Function_Context structure. */
156 static GTY(()) tree sjlj_fc_type_node;
157 static int sjlj_fc_call_site_ofs;
158 static int sjlj_fc_data_ofs;
159 static int sjlj_fc_personality_ofs;
160 static int sjlj_fc_lsda_ofs;
161 static int sjlj_fc_jbuf_ofs;
164 struct GTY(()) call_site_record_d
166 rtx landing_pad;
167 int action;
170 /* In the following structure and associated functions,
171 we represent entries in the action table as 1-based indices.
172 Special cases are:
174 0: null action record, non-null landing pad; implies cleanups
175 -1: null action record, null landing pad; implies no action
176 -2: no call-site entry; implies must_not_throw
177 -3: we have yet to process outer regions
179 Further, no special cases apply to the "next" field of the record.
180 For next, 0 means end of list. */
182 struct action_record
184 int offset;
185 int filter;
186 int next;
189 /* Hashtable helpers. */
191 struct action_record_hasher : typed_free_remove <action_record>
193 typedef action_record value_type;
194 typedef action_record compare_type;
195 static inline hashval_t hash (const value_type *);
196 static inline bool equal (const value_type *, const compare_type *);
199 inline hashval_t
200 action_record_hasher::hash (const value_type *entry)
202 return entry->next * 1009 + entry->filter;
205 inline bool
206 action_record_hasher::equal (const value_type *entry, const compare_type *data)
208 return entry->filter == data->filter && entry->next == data->next;
211 typedef hash_table<action_record_hasher> action_hash_type;
213 static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
214 eh_landing_pad *);
216 static int t2r_eq (const void *, const void *);
217 static hashval_t t2r_hash (const void *);
219 static void dw2_build_landing_pads (void);
221 static int collect_one_action_chain (action_hash_type *, eh_region);
222 static int add_call_site (rtx, int, int);
224 static void push_uleb128 (vec<uchar, va_gc> **, unsigned int);
225 static void push_sleb128 (vec<uchar, va_gc> **, int);
226 #ifndef HAVE_AS_LEB128
227 static int dw2_size_of_call_site_table (int);
228 static int sjlj_size_of_call_site_table (void);
229 #endif
230 static void dw2_output_call_site_table (int, int);
231 static void sjlj_output_call_site_table (void);
234 void
235 init_eh (void)
237 if (! flag_exceptions)
238 return;
240 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
242 /* Create the SjLj_Function_Context structure. This should match
243 the definition in unwind-sjlj.c. */
244 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
246 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
248 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
250 f_prev = build_decl (BUILTINS_LOCATION,
251 FIELD_DECL, get_identifier ("__prev"),
252 build_pointer_type (sjlj_fc_type_node));
253 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
255 f_cs = build_decl (BUILTINS_LOCATION,
256 FIELD_DECL, get_identifier ("__call_site"),
257 integer_type_node);
258 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
260 tmp = build_index_type (size_int (4 - 1));
261 tmp = build_array_type (lang_hooks.types.type_for_mode
262 (targetm.unwind_word_mode (), 1),
263 tmp);
264 f_data = build_decl (BUILTINS_LOCATION,
265 FIELD_DECL, get_identifier ("__data"), tmp);
266 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
268 f_per = build_decl (BUILTINS_LOCATION,
269 FIELD_DECL, get_identifier ("__personality"),
270 ptr_type_node);
271 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
273 f_lsda = build_decl (BUILTINS_LOCATION,
274 FIELD_DECL, get_identifier ("__lsda"),
275 ptr_type_node);
276 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
278 #ifdef DONT_USE_BUILTIN_SETJMP
279 #ifdef JMP_BUF_SIZE
280 tmp = size_int (JMP_BUF_SIZE - 1);
281 #else
282 /* Should be large enough for most systems, if it is not,
283 JMP_BUF_SIZE should be defined with the proper value. It will
284 also tend to be larger than necessary for most systems, a more
285 optimal port will define JMP_BUF_SIZE. */
286 tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
287 #endif
288 #else
289 /* Compute a minimally sized jump buffer. We need room to store at
290 least 3 pointers - stack pointer, frame pointer and return address.
291 Plus for some targets we need room for an extra pointer - in the
292 case of MIPS this is the global pointer. This makes a total of four
293 pointers, but to be safe we actually allocate room for 5.
295 If pointers are smaller than words then we allocate enough room for
296 5 words, just in case the backend needs this much room. For more
297 discussion on this issue see:
298 http://gcc.gnu.org/ml/gcc-patches/2014-05/msg00313.html. */
299 if (POINTER_SIZE > BITS_PER_WORD)
300 tmp = size_int (5 - 1);
301 else
302 tmp = size_int ((5 * BITS_PER_WORD / POINTER_SIZE) - 1);
303 #endif
305 tmp = build_index_type (tmp);
306 tmp = build_array_type (ptr_type_node, tmp);
307 f_jbuf = build_decl (BUILTINS_LOCATION,
308 FIELD_DECL, get_identifier ("__jbuf"), tmp);
309 #ifdef DONT_USE_BUILTIN_SETJMP
310 /* We don't know what the alignment requirements of the
311 runtime's jmp_buf has. Overestimate. */
312 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
313 DECL_USER_ALIGN (f_jbuf) = 1;
314 #endif
315 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
317 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
318 TREE_CHAIN (f_prev) = f_cs;
319 TREE_CHAIN (f_cs) = f_data;
320 TREE_CHAIN (f_data) = f_per;
321 TREE_CHAIN (f_per) = f_lsda;
322 TREE_CHAIN (f_lsda) = f_jbuf;
324 layout_type (sjlj_fc_type_node);
326 /* Cache the interesting field offsets so that we have
327 easy access from rtl. */
328 sjlj_fc_call_site_ofs
329 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs))
330 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)) / BITS_PER_UNIT);
331 sjlj_fc_data_ofs
332 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_data))
333 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)) / BITS_PER_UNIT);
334 sjlj_fc_personality_ofs
335 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_per))
336 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)) / BITS_PER_UNIT);
337 sjlj_fc_lsda_ofs
338 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda))
339 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)) / BITS_PER_UNIT);
340 sjlj_fc_jbuf_ofs
341 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf))
342 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)) / BITS_PER_UNIT);
346 void
347 init_eh_for_function (void)
349 cfun->eh = ggc_cleared_alloc<eh_status> ();
351 /* Make sure zero'th entries are used. */
352 vec_safe_push (cfun->eh->region_array, (eh_region)0);
353 vec_safe_push (cfun->eh->lp_array, (eh_landing_pad)0);
356 /* Routines to generate the exception tree somewhat directly.
357 These are used from tree-eh.c when processing exception related
358 nodes during tree optimization. */
360 static eh_region
361 gen_eh_region (enum eh_region_type type, eh_region outer)
363 eh_region new_eh;
365 /* Insert a new blank region as a leaf in the tree. */
366 new_eh = ggc_cleared_alloc<eh_region_d> ();
367 new_eh->type = type;
368 new_eh->outer = outer;
369 if (outer)
371 new_eh->next_peer = outer->inner;
372 outer->inner = new_eh;
374 else
376 new_eh->next_peer = cfun->eh->region_tree;
377 cfun->eh->region_tree = new_eh;
380 new_eh->index = vec_safe_length (cfun->eh->region_array);
381 vec_safe_push (cfun->eh->region_array, new_eh);
383 /* Copy the language's notion of whether to use __cxa_end_cleanup. */
384 if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
385 new_eh->use_cxa_end_cleanup = true;
387 return new_eh;
390 eh_region
391 gen_eh_region_cleanup (eh_region outer)
393 return gen_eh_region (ERT_CLEANUP, outer);
396 eh_region
397 gen_eh_region_try (eh_region outer)
399 return gen_eh_region (ERT_TRY, outer);
402 eh_catch
403 gen_eh_region_catch (eh_region t, tree type_or_list)
405 eh_catch c, l;
406 tree type_list, type_node;
408 gcc_assert (t->type == ERT_TRY);
410 /* Ensure to always end up with a type list to normalize further
411 processing, then register each type against the runtime types map. */
412 type_list = type_or_list;
413 if (type_or_list)
415 if (TREE_CODE (type_or_list) != TREE_LIST)
416 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
418 type_node = type_list;
419 for (; type_node; type_node = TREE_CHAIN (type_node))
420 add_type_for_runtime (TREE_VALUE (type_node));
423 c = ggc_cleared_alloc<eh_catch_d> ();
424 c->type_list = type_list;
425 l = t->u.eh_try.last_catch;
426 c->prev_catch = l;
427 if (l)
428 l->next_catch = c;
429 else
430 t->u.eh_try.first_catch = c;
431 t->u.eh_try.last_catch = c;
433 return c;
436 eh_region
437 gen_eh_region_allowed (eh_region outer, tree allowed)
439 eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
440 region->u.allowed.type_list = allowed;
442 for (; allowed ; allowed = TREE_CHAIN (allowed))
443 add_type_for_runtime (TREE_VALUE (allowed));
445 return region;
448 eh_region
449 gen_eh_region_must_not_throw (eh_region outer)
451 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
454 eh_landing_pad
455 gen_eh_landing_pad (eh_region region)
457 eh_landing_pad lp = ggc_cleared_alloc<eh_landing_pad_d> ();
459 lp->next_lp = region->landing_pads;
460 lp->region = region;
461 lp->index = vec_safe_length (cfun->eh->lp_array);
462 region->landing_pads = lp;
464 vec_safe_push (cfun->eh->lp_array, lp);
466 return lp;
469 eh_region
470 get_eh_region_from_number_fn (struct function *ifun, int i)
472 return (*ifun->eh->region_array)[i];
475 eh_region
476 get_eh_region_from_number (int i)
478 return get_eh_region_from_number_fn (cfun, i);
481 eh_landing_pad
482 get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
484 return (*ifun->eh->lp_array)[i];
487 eh_landing_pad
488 get_eh_landing_pad_from_number (int i)
490 return get_eh_landing_pad_from_number_fn (cfun, i);
493 eh_region
494 get_eh_region_from_lp_number_fn (struct function *ifun, int i)
496 if (i < 0)
497 return (*ifun->eh->region_array)[-i];
498 else if (i == 0)
499 return NULL;
500 else
502 eh_landing_pad lp;
503 lp = (*ifun->eh->lp_array)[i];
504 return lp->region;
508 eh_region
509 get_eh_region_from_lp_number (int i)
511 return get_eh_region_from_lp_number_fn (cfun, i);
514 /* Returns true if the current function has exception handling regions. */
516 bool
517 current_function_has_exception_handlers (void)
519 return cfun->eh->region_tree != NULL;
522 /* A subroutine of duplicate_eh_regions. Copy the eh_region tree at OLD.
523 Root it at OUTER, and apply LP_OFFSET to the lp numbers. */
525 struct duplicate_eh_regions_data
527 duplicate_eh_regions_map label_map;
528 void *label_map_data;
529 hash_map<void *, void *> *eh_map;
532 static void
533 duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
534 eh_region old_r, eh_region outer)
536 eh_landing_pad old_lp, new_lp;
537 eh_region new_r;
539 new_r = gen_eh_region (old_r->type, outer);
540 gcc_assert (!data->eh_map->put (old_r, new_r));
542 switch (old_r->type)
544 case ERT_CLEANUP:
545 break;
547 case ERT_TRY:
549 eh_catch oc, nc;
550 for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
552 /* We should be doing all our region duplication before and
553 during inlining, which is before filter lists are created. */
554 gcc_assert (oc->filter_list == NULL);
555 nc = gen_eh_region_catch (new_r, oc->type_list);
556 nc->label = data->label_map (oc->label, data->label_map_data);
559 break;
561 case ERT_ALLOWED_EXCEPTIONS:
562 new_r->u.allowed.type_list = old_r->u.allowed.type_list;
563 if (old_r->u.allowed.label)
564 new_r->u.allowed.label
565 = data->label_map (old_r->u.allowed.label, data->label_map_data);
566 else
567 new_r->u.allowed.label = NULL_TREE;
568 break;
570 case ERT_MUST_NOT_THROW:
571 new_r->u.must_not_throw.failure_loc =
572 LOCATION_LOCUS (old_r->u.must_not_throw.failure_loc);
573 new_r->u.must_not_throw.failure_decl =
574 old_r->u.must_not_throw.failure_decl;
575 break;
578 for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
580 /* Don't bother copying unused landing pads. */
581 if (old_lp->post_landing_pad == NULL)
582 continue;
584 new_lp = gen_eh_landing_pad (new_r);
585 gcc_assert (!data->eh_map->put (old_lp, new_lp));
587 new_lp->post_landing_pad
588 = data->label_map (old_lp->post_landing_pad, data->label_map_data);
589 EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
592 /* Make sure to preserve the original use of __cxa_end_cleanup. */
593 new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
595 for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
596 duplicate_eh_regions_1 (data, old_r, new_r);
599 /* Duplicate the EH regions from IFUN rooted at COPY_REGION into
600 the current function and root the tree below OUTER_REGION.
601 The special case of COPY_REGION of NULL means all regions.
602 Remap labels using MAP/MAP_DATA callback. Return a pointer map
603 that allows the caller to remap uses of both EH regions and
604 EH landing pads. */
606 hash_map<void *, void *> *
607 duplicate_eh_regions (struct function *ifun,
608 eh_region copy_region, int outer_lp,
609 duplicate_eh_regions_map map, void *map_data)
611 struct duplicate_eh_regions_data data;
612 eh_region outer_region;
614 #ifdef ENABLE_CHECKING
615 verify_eh_tree (ifun);
616 #endif
618 data.label_map = map;
619 data.label_map_data = map_data;
620 data.eh_map = new hash_map<void *, void *>;
622 outer_region = get_eh_region_from_lp_number (outer_lp);
624 /* Copy all the regions in the subtree. */
625 if (copy_region)
626 duplicate_eh_regions_1 (&data, copy_region, outer_region);
627 else
629 eh_region r;
630 for (r = ifun->eh->region_tree; r ; r = r->next_peer)
631 duplicate_eh_regions_1 (&data, r, outer_region);
634 #ifdef ENABLE_CHECKING
635 verify_eh_tree (cfun);
636 #endif
638 return data.eh_map;
641 /* Return the region that is outer to both REGION_A and REGION_B in IFUN. */
643 eh_region
644 eh_region_outermost (struct function *ifun, eh_region region_a,
645 eh_region region_b)
647 sbitmap b_outer;
649 gcc_assert (ifun->eh->region_array);
650 gcc_assert (ifun->eh->region_tree);
652 b_outer = sbitmap_alloc (ifun->eh->region_array->length ());
653 bitmap_clear (b_outer);
657 bitmap_set_bit (b_outer, region_b->index);
658 region_b = region_b->outer;
660 while (region_b);
664 if (bitmap_bit_p (b_outer, region_a->index))
665 break;
666 region_a = region_a->outer;
668 while (region_a);
670 sbitmap_free (b_outer);
671 return region_a;
674 static int
675 t2r_eq (const void *pentry, const void *pdata)
677 const_tree const entry = (const_tree) pentry;
678 const_tree const data = (const_tree) pdata;
680 return TREE_PURPOSE (entry) == data;
683 static hashval_t
684 t2r_hash (const void *pentry)
686 const_tree const entry = (const_tree) pentry;
687 return TREE_HASH (TREE_PURPOSE (entry));
690 void
691 add_type_for_runtime (tree type)
693 tree *slot;
695 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
696 if (TREE_CODE (type) == NOP_EXPR)
697 return;
699 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
700 TREE_HASH (type), INSERT);
701 if (*slot == NULL)
703 tree runtime = lang_hooks.eh_runtime_type (type);
704 *slot = tree_cons (type, runtime, NULL_TREE);
708 tree
709 lookup_type_for_runtime (tree type)
711 tree *slot;
713 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
714 if (TREE_CODE (type) == NOP_EXPR)
715 return type;
717 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
718 TREE_HASH (type), NO_INSERT);
720 /* We should have always inserted the data earlier. */
721 return TREE_VALUE (*slot);
725 /* Represent an entry in @TTypes for either catch actions
726 or exception filter actions. */
727 struct ttypes_filter {
728 tree t;
729 int filter;
732 /* Helper for ttypes_filter hashing. */
734 struct ttypes_filter_hasher : typed_free_remove <ttypes_filter>
736 typedef ttypes_filter value_type;
737 typedef tree_node compare_type;
738 static inline hashval_t hash (const value_type *);
739 static inline bool equal (const value_type *, const compare_type *);
742 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
743 (a tree) for a @TTypes type node we are thinking about adding. */
745 inline bool
746 ttypes_filter_hasher::equal (const value_type *entry, const compare_type *data)
748 return entry->t == data;
751 inline hashval_t
752 ttypes_filter_hasher::hash (const value_type *entry)
754 return TREE_HASH (entry->t);
757 typedef hash_table<ttypes_filter_hasher> ttypes_hash_type;
760 /* Helper for ehspec hashing. */
762 struct ehspec_hasher : typed_free_remove <ttypes_filter>
764 typedef ttypes_filter value_type;
765 typedef ttypes_filter compare_type;
766 static inline hashval_t hash (const value_type *);
767 static inline bool equal (const value_type *, const compare_type *);
770 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
771 exception specification list we are thinking about adding. */
772 /* ??? Currently we use the type lists in the order given. Someone
773 should put these in some canonical order. */
775 inline bool
776 ehspec_hasher::equal (const value_type *entry, const compare_type *data)
778 return type_list_equal (entry->t, data->t);
781 /* Hash function for exception specification lists. */
783 inline hashval_t
784 ehspec_hasher::hash (const value_type *entry)
786 hashval_t h = 0;
787 tree list;
789 for (list = entry->t; list ; list = TREE_CHAIN (list))
790 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
791 return h;
794 typedef hash_table<ehspec_hasher> ehspec_hash_type;
797 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
798 to speed up the search. Return the filter value to be used. */
800 static int
801 add_ttypes_entry (ttypes_hash_type *ttypes_hash, tree type)
803 struct ttypes_filter **slot, *n;
805 slot = ttypes_hash->find_slot_with_hash (type, (hashval_t) TREE_HASH (type),
806 INSERT);
808 if ((n = *slot) == NULL)
810 /* Filter value is a 1 based table index. */
812 n = XNEW (struct ttypes_filter);
813 n->t = type;
814 n->filter = vec_safe_length (cfun->eh->ttype_data) + 1;
815 *slot = n;
817 vec_safe_push (cfun->eh->ttype_data, type);
820 return n->filter;
823 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
824 to speed up the search. Return the filter value to be used. */
826 static int
827 add_ehspec_entry (ehspec_hash_type *ehspec_hash, ttypes_hash_type *ttypes_hash,
828 tree list)
830 struct ttypes_filter **slot, *n;
831 struct ttypes_filter dummy;
833 dummy.t = list;
834 slot = ehspec_hash->find_slot (&dummy, INSERT);
836 if ((n = *slot) == NULL)
838 int len;
840 if (targetm.arm_eabi_unwinder)
841 len = vec_safe_length (cfun->eh->ehspec_data.arm_eabi);
842 else
843 len = vec_safe_length (cfun->eh->ehspec_data.other);
845 /* Filter value is a -1 based byte index into a uleb128 buffer. */
847 n = XNEW (struct ttypes_filter);
848 n->t = list;
849 n->filter = -(len + 1);
850 *slot = n;
852 /* Generate a 0 terminated list of filter values. */
853 for (; list ; list = TREE_CHAIN (list))
855 if (targetm.arm_eabi_unwinder)
856 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, TREE_VALUE (list));
857 else
859 /* Look up each type in the list and encode its filter
860 value as a uleb128. */
861 push_uleb128 (&cfun->eh->ehspec_data.other,
862 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
865 if (targetm.arm_eabi_unwinder)
866 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
867 else
868 vec_safe_push (cfun->eh->ehspec_data.other, (uchar)0);
871 return n->filter;
874 /* Generate the action filter values to be used for CATCH and
875 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
876 we use lots of landing pads, and so every type or list can share
877 the same filter value, which saves table space. */
879 void
880 assign_filter_values (void)
882 int i;
883 eh_region r;
884 eh_catch c;
886 vec_alloc (cfun->eh->ttype_data, 16);
887 if (targetm.arm_eabi_unwinder)
888 vec_alloc (cfun->eh->ehspec_data.arm_eabi, 64);
889 else
890 vec_alloc (cfun->eh->ehspec_data.other, 64);
892 ehspec_hash_type ehspec (31);
893 ttypes_hash_type ttypes (31);
895 for (i = 1; vec_safe_iterate (cfun->eh->region_array, i, &r); ++i)
897 if (r == NULL)
898 continue;
900 switch (r->type)
902 case ERT_TRY:
903 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
905 /* Whatever type_list is (NULL or true list), we build a list
906 of filters for the region. */
907 c->filter_list = NULL_TREE;
909 if (c->type_list != NULL)
911 /* Get a filter value for each of the types caught and store
912 them in the region's dedicated list. */
913 tree tp_node = c->type_list;
915 for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
917 int flt
918 = add_ttypes_entry (&ttypes, TREE_VALUE (tp_node));
919 tree flt_node = build_int_cst (integer_type_node, flt);
921 c->filter_list
922 = tree_cons (NULL_TREE, flt_node, c->filter_list);
925 else
927 /* Get a filter value for the NULL list also since it
928 will need an action record anyway. */
929 int flt = add_ttypes_entry (&ttypes, NULL);
930 tree flt_node = build_int_cst (integer_type_node, flt);
932 c->filter_list
933 = tree_cons (NULL_TREE, flt_node, NULL);
936 break;
938 case ERT_ALLOWED_EXCEPTIONS:
939 r->u.allowed.filter
940 = add_ehspec_entry (&ehspec, &ttypes, r->u.allowed.type_list);
941 break;
943 default:
944 break;
949 /* Emit SEQ into basic block just before INSN (that is assumed to be
950 first instruction of some existing BB and return the newly
951 produced block. */
952 static basic_block
953 emit_to_new_bb_before (rtx_insn *seq, rtx insn)
955 rtx_insn *last;
956 basic_block bb;
957 edge e;
958 edge_iterator ei;
960 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
961 call), we don't want it to go into newly created landing pad or other EH
962 construct. */
963 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
964 if (e->flags & EDGE_FALLTHRU)
965 force_nonfallthru (e);
966 else
967 ei_next (&ei);
968 last = emit_insn_before (seq, insn);
969 if (BARRIER_P (last))
970 last = PREV_INSN (last);
971 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
972 update_bb_for_insn (bb);
973 bb->flags |= BB_SUPERBLOCK;
974 return bb;
977 /* A subroutine of dw2_build_landing_pads, also used for edge splitting
978 at the rtl level. Emit the code required by the target at a landing
979 pad for the given region. */
981 void
982 expand_dw2_landing_pad_for_region (eh_region region)
984 #ifdef HAVE_exception_receiver
985 if (HAVE_exception_receiver)
986 emit_insn (gen_exception_receiver ());
987 else
988 #endif
989 #ifdef HAVE_nonlocal_goto_receiver
990 if (HAVE_nonlocal_goto_receiver)
991 emit_insn (gen_nonlocal_goto_receiver ());
992 else
993 #endif
994 { /* Nothing */ }
996 if (region->exc_ptr_reg)
997 emit_move_insn (region->exc_ptr_reg,
998 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
999 if (region->filter_reg)
1000 emit_move_insn (region->filter_reg,
1001 gen_rtx_REG (targetm.eh_return_filter_mode (),
1002 EH_RETURN_DATA_REGNO (1)));
1005 /* Expand the extra code needed at landing pads for dwarf2 unwinding. */
1007 static void
1008 dw2_build_landing_pads (void)
1010 int i;
1011 eh_landing_pad lp;
1012 int e_flags = EDGE_FALLTHRU;
1014 /* If we're going to partition blocks, we need to be able to add
1015 new landing pads later, which means that we need to hold on to
1016 the post-landing-pad block. Prevent it from being merged away.
1017 We'll remove this bit after partitioning. */
1018 if (flag_reorder_blocks_and_partition)
1019 e_flags |= EDGE_PRESERVE;
1021 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1023 basic_block bb;
1024 rtx_insn *seq;
1025 edge e;
1027 if (lp == NULL || lp->post_landing_pad == NULL)
1028 continue;
1030 start_sequence ();
1032 lp->landing_pad = gen_label_rtx ();
1033 emit_label (lp->landing_pad);
1034 LABEL_PRESERVE_P (lp->landing_pad) = 1;
1036 expand_dw2_landing_pad_for_region (lp->region);
1038 seq = get_insns ();
1039 end_sequence ();
1041 bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
1042 e = make_edge (bb, bb->next_bb, e_flags);
1043 e->count = bb->count;
1044 e->probability = REG_BR_PROB_BASE;
1045 if (current_loops)
1047 struct loop *loop = bb->next_bb->loop_father;
1048 /* If we created a pre-header block, add the new block to the
1049 outer loop, otherwise to the loop itself. */
1050 if (bb->next_bb == loop->header)
1051 add_bb_to_loop (bb, loop_outer (loop));
1052 else
1053 add_bb_to_loop (bb, loop);
1059 static vec<int> sjlj_lp_call_site_index;
1061 /* Process all active landing pads. Assign each one a compact dispatch
1062 index, and a call-site index. */
1064 static int
1065 sjlj_assign_call_site_values (void)
1067 action_hash_type ar_hash (31);
1068 int i, disp_index;
1069 eh_landing_pad lp;
1071 vec_alloc (crtl->eh.action_record_data, 64);
1073 disp_index = 0;
1074 call_site_base = 1;
1075 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1076 if (lp && lp->post_landing_pad)
1078 int action, call_site;
1080 /* First: build the action table. */
1081 action = collect_one_action_chain (&ar_hash, lp->region);
1083 /* Next: assign call-site values. If dwarf2 terms, this would be
1084 the region number assigned by convert_to_eh_region_ranges, but
1085 handles no-action and must-not-throw differently. */
1086 /* Map must-not-throw to otherwise unused call-site index 0. */
1087 if (action == -2)
1088 call_site = 0;
1089 /* Map no-action to otherwise unused call-site index -1. */
1090 else if (action == -1)
1091 call_site = -1;
1092 /* Otherwise, look it up in the table. */
1093 else
1094 call_site = add_call_site (GEN_INT (disp_index), action, 0);
1095 sjlj_lp_call_site_index[i] = call_site;
1097 disp_index++;
1100 return disp_index;
1103 /* Emit code to record the current call-site index before every
1104 insn that can throw. */
1106 static void
1107 sjlj_mark_call_sites (void)
1109 int last_call_site = -2;
1110 rtx_insn *insn;
1111 rtx mem;
1113 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1115 eh_landing_pad lp;
1116 eh_region r;
1117 bool nothrow;
1118 int this_call_site;
1119 rtx_insn *before, *p;
1121 /* Reset value tracking at extended basic block boundaries. */
1122 if (LABEL_P (insn))
1123 last_call_site = -2;
1125 if (! INSN_P (insn))
1126 continue;
1128 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1129 if (nothrow)
1130 continue;
1131 if (lp)
1132 this_call_site = sjlj_lp_call_site_index[lp->index];
1133 else if (r == NULL)
1135 /* Calls (and trapping insns) without notes are outside any
1136 exception handling region in this function. Mark them as
1137 no action. */
1138 this_call_site = -1;
1140 else
1142 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1143 this_call_site = 0;
1146 if (this_call_site != -1)
1147 crtl->uses_eh_lsda = 1;
1149 if (this_call_site == last_call_site)
1150 continue;
1152 /* Don't separate a call from it's argument loads. */
1153 before = insn;
1154 if (CALL_P (insn))
1155 before = find_first_parameter_load (insn, NULL_RTX);
1157 start_sequence ();
1158 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1159 sjlj_fc_call_site_ofs);
1160 emit_move_insn (mem, gen_int_mode (this_call_site, GET_MODE (mem)));
1161 p = get_insns ();
1162 end_sequence ();
1164 emit_insn_before (p, before);
1165 last_call_site = this_call_site;
1169 /* Construct the SjLj_Function_Context. */
1171 static void
1172 sjlj_emit_function_enter (rtx_code_label *dispatch_label)
1174 rtx_insn *fn_begin, *seq;
1175 rtx fc, mem;
1176 bool fn_begin_outside_block;
1177 rtx personality = get_personality_function (current_function_decl);
1179 fc = crtl->eh.sjlj_fc;
1181 start_sequence ();
1183 /* We're storing this libcall's address into memory instead of
1184 calling it directly. Thus, we must call assemble_external_libcall
1185 here, as we can not depend on emit_library_call to do it for us. */
1186 assemble_external_libcall (personality);
1187 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1188 emit_move_insn (mem, personality);
1190 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1191 if (crtl->uses_eh_lsda)
1193 char buf[20];
1194 rtx sym;
1196 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1197 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1198 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1199 emit_move_insn (mem, sym);
1201 else
1202 emit_move_insn (mem, const0_rtx);
1204 if (dispatch_label)
1206 #ifdef DONT_USE_BUILTIN_SETJMP
1207 rtx x;
1208 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1209 TYPE_MODE (integer_type_node), 1,
1210 plus_constant (Pmode, XEXP (fc, 0),
1211 sjlj_fc_jbuf_ofs), Pmode);
1213 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1214 TYPE_MODE (integer_type_node), 0,
1215 dispatch_label, REG_BR_PROB_BASE / 100);
1216 #else
1217 expand_builtin_setjmp_setup (plus_constant (Pmode, XEXP (fc, 0),
1218 sjlj_fc_jbuf_ofs),
1219 dispatch_label);
1220 #endif
1223 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1224 1, XEXP (fc, 0), Pmode);
1226 seq = get_insns ();
1227 end_sequence ();
1229 /* ??? Instead of doing this at the beginning of the function,
1230 do this in a block that is at loop level 0 and dominates all
1231 can_throw_internal instructions. */
1233 fn_begin_outside_block = true;
1234 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1235 if (NOTE_P (fn_begin))
1237 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1238 break;
1239 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1240 fn_begin_outside_block = false;
1243 if (fn_begin_outside_block)
1244 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1245 else
1246 emit_insn_after (seq, fn_begin);
1249 /* Call back from expand_function_end to know where we should put
1250 the call to unwind_sjlj_unregister_libfunc if needed. */
1252 void
1253 sjlj_emit_function_exit_after (rtx_insn *after)
1255 crtl->eh.sjlj_exit_after = after;
1258 static void
1259 sjlj_emit_function_exit (void)
1261 rtx_insn *seq, *insn;
1263 start_sequence ();
1265 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1266 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1268 seq = get_insns ();
1269 end_sequence ();
1271 /* ??? Really this can be done in any block at loop level 0 that
1272 post-dominates all can_throw_internal instructions. This is
1273 the last possible moment. */
1275 insn = crtl->eh.sjlj_exit_after;
1276 if (LABEL_P (insn))
1277 insn = NEXT_INSN (insn);
1279 emit_insn_after (seq, insn);
1282 static void
1283 sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
1285 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
1286 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
1287 eh_landing_pad lp;
1288 rtx mem, fc, before, exc_ptr_reg, filter_reg;
1289 rtx_insn *seq;
1290 rtx first_reachable_label;
1291 basic_block bb;
1292 eh_region r;
1293 edge e;
1294 int i, disp_index;
1295 vec<tree> dispatch_labels = vNULL;
1297 fc = crtl->eh.sjlj_fc;
1299 start_sequence ();
1301 emit_label (dispatch_label);
1303 #ifndef DONT_USE_BUILTIN_SETJMP
1304 expand_builtin_setjmp_receiver (dispatch_label);
1306 /* The caller of expand_builtin_setjmp_receiver is responsible for
1307 making sure that the label doesn't vanish. The only other caller
1308 is the expander for __builtin_setjmp_receiver, which places this
1309 label on the nonlocal_goto_label list. Since we're modeling these
1310 CFG edges more exactly, we can use the forced_labels list instead. */
1311 LABEL_PRESERVE_P (dispatch_label) = 1;
1312 forced_labels
1313 = gen_rtx_EXPR_LIST (VOIDmode, dispatch_label, forced_labels);
1314 #endif
1316 /* Load up exc_ptr and filter values from the function context. */
1317 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
1318 if (unwind_word_mode != ptr_mode)
1320 #ifdef POINTERS_EXTEND_UNSIGNED
1321 mem = convert_memory_address (ptr_mode, mem);
1322 #else
1323 mem = convert_to_mode (ptr_mode, mem, 0);
1324 #endif
1326 exc_ptr_reg = force_reg (ptr_mode, mem);
1328 mem = adjust_address (fc, unwind_word_mode,
1329 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
1330 if (unwind_word_mode != filter_mode)
1331 mem = convert_to_mode (filter_mode, mem, 0);
1332 filter_reg = force_reg (filter_mode, mem);
1334 /* Jump to one of the directly reachable regions. */
1336 disp_index = 0;
1337 first_reachable_label = NULL;
1339 /* If there's exactly one call site in the function, don't bother
1340 generating a switch statement. */
1341 if (num_dispatch > 1)
1342 dispatch_labels.create (num_dispatch);
1344 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1345 if (lp && lp->post_landing_pad)
1347 rtx_insn *seq2;
1348 rtx label;
1350 start_sequence ();
1352 lp->landing_pad = dispatch_label;
1354 if (num_dispatch > 1)
1356 tree t_label, case_elt, t;
1358 t_label = create_artificial_label (UNKNOWN_LOCATION);
1359 t = build_int_cst (integer_type_node, disp_index);
1360 case_elt = build_case_label (t, NULL, t_label);
1361 dispatch_labels.quick_push (case_elt);
1362 label = label_rtx (t_label);
1364 else
1365 label = gen_label_rtx ();
1367 if (disp_index == 0)
1368 first_reachable_label = label;
1369 emit_label (label);
1371 r = lp->region;
1372 if (r->exc_ptr_reg)
1373 emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
1374 if (r->filter_reg)
1375 emit_move_insn (r->filter_reg, filter_reg);
1377 seq2 = get_insns ();
1378 end_sequence ();
1380 before = label_rtx (lp->post_landing_pad);
1381 bb = emit_to_new_bb_before (seq2, before);
1382 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1383 e->count = bb->count;
1384 e->probability = REG_BR_PROB_BASE;
1385 if (current_loops)
1387 struct loop *loop = bb->next_bb->loop_father;
1388 /* If we created a pre-header block, add the new block to the
1389 outer loop, otherwise to the loop itself. */
1390 if (bb->next_bb == loop->header)
1391 add_bb_to_loop (bb, loop_outer (loop));
1392 else
1393 add_bb_to_loop (bb, loop);
1394 /* ??? For multiple dispatches we will end up with edges
1395 from the loop tree root into this loop, making it a
1396 multiple-entry loop. Discard all affected loops. */
1397 if (num_dispatch > 1)
1399 for (loop = bb->loop_father;
1400 loop_outer (loop); loop = loop_outer (loop))
1402 loop->header = NULL;
1403 loop->latch = NULL;
1408 disp_index++;
1410 gcc_assert (disp_index == num_dispatch);
1412 if (num_dispatch > 1)
1414 rtx disp = adjust_address (fc, TYPE_MODE (integer_type_node),
1415 sjlj_fc_call_site_ofs);
1416 expand_sjlj_dispatch_table (disp, dispatch_labels);
1419 seq = get_insns ();
1420 end_sequence ();
1422 bb = emit_to_new_bb_before (seq, first_reachable_label);
1423 if (num_dispatch == 1)
1425 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1426 e->count = bb->count;
1427 e->probability = REG_BR_PROB_BASE;
1428 if (current_loops)
1430 struct loop *loop = bb->next_bb->loop_father;
1431 /* If we created a pre-header block, add the new block to the
1432 outer loop, otherwise to the loop itself. */
1433 if (bb->next_bb == loop->header)
1434 add_bb_to_loop (bb, loop_outer (loop));
1435 else
1436 add_bb_to_loop (bb, loop);
1439 else
1441 /* We are not wiring up edges here, but as the dispatcher call
1442 is at function begin simply associate the block with the
1443 outermost (non-)loop. */
1444 if (current_loops)
1445 add_bb_to_loop (bb, current_loops->tree_root);
1449 static void
1450 sjlj_build_landing_pads (void)
1452 int num_dispatch;
1454 num_dispatch = vec_safe_length (cfun->eh->lp_array);
1455 if (num_dispatch == 0)
1456 return;
1457 sjlj_lp_call_site_index.safe_grow_cleared (num_dispatch);
1459 num_dispatch = sjlj_assign_call_site_values ();
1460 if (num_dispatch > 0)
1462 rtx_code_label *dispatch_label = gen_label_rtx ();
1463 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1464 TYPE_MODE (sjlj_fc_type_node),
1465 TYPE_ALIGN (sjlj_fc_type_node));
1466 crtl->eh.sjlj_fc
1467 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1468 int_size_in_bytes (sjlj_fc_type_node),
1469 align);
1471 sjlj_mark_call_sites ();
1472 sjlj_emit_function_enter (dispatch_label);
1473 sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
1474 sjlj_emit_function_exit ();
1477 /* If we do not have any landing pads, we may still need to register a
1478 personality routine and (empty) LSDA to handle must-not-throw regions. */
1479 else if (function_needs_eh_personality (cfun) != eh_personality_none)
1481 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1482 TYPE_MODE (sjlj_fc_type_node),
1483 TYPE_ALIGN (sjlj_fc_type_node));
1484 crtl->eh.sjlj_fc
1485 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1486 int_size_in_bytes (sjlj_fc_type_node),
1487 align);
1489 sjlj_mark_call_sites ();
1490 sjlj_emit_function_enter (NULL);
1491 sjlj_emit_function_exit ();
1494 sjlj_lp_call_site_index.release ();
1497 /* After initial rtl generation, call back to finish generating
1498 exception support code. */
1500 void
1501 finish_eh_generation (void)
1503 basic_block bb;
1505 /* Construct the landing pads. */
1506 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1507 sjlj_build_landing_pads ();
1508 else
1509 dw2_build_landing_pads ();
1510 break_superblocks ();
1512 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1513 /* Kludge for Alpha (see alpha_gp_save_rtx). */
1514 || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->insns.r)
1515 commit_edge_insertions ();
1517 /* Redirect all EH edges from the post_landing_pad to the landing pad. */
1518 FOR_EACH_BB_FN (bb, cfun)
1520 eh_landing_pad lp;
1521 edge_iterator ei;
1522 edge e;
1524 lp = get_eh_landing_pad_from_rtx (BB_END (bb));
1526 FOR_EACH_EDGE (e, ei, bb->succs)
1527 if (e->flags & EDGE_EH)
1528 break;
1530 /* We should not have generated any new throwing insns during this
1531 pass, and we should not have lost any EH edges, so we only need
1532 to handle two cases here:
1533 (1) reachable handler and an existing edge to post-landing-pad,
1534 (2) no reachable handler and no edge. */
1535 gcc_assert ((lp != NULL) == (e != NULL));
1536 if (lp != NULL)
1538 gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad));
1540 redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
1541 e->flags |= (CALL_P (BB_END (bb))
1542 ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
1543 : EDGE_ABNORMAL);
1548 /* This section handles removing dead code for flow. */
1550 void
1551 remove_eh_landing_pad (eh_landing_pad lp)
1553 eh_landing_pad *pp;
1555 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
1556 continue;
1557 *pp = lp->next_lp;
1559 if (lp->post_landing_pad)
1560 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1561 (*cfun->eh->lp_array)[lp->index] = NULL;
1564 /* Splice the EH region at PP from the region tree. */
1566 static void
1567 remove_eh_handler_splicer (eh_region *pp)
1569 eh_region region = *pp;
1570 eh_landing_pad lp;
1572 for (lp = region->landing_pads; lp ; lp = lp->next_lp)
1574 if (lp->post_landing_pad)
1575 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1576 (*cfun->eh->lp_array)[lp->index] = NULL;
1579 if (region->inner)
1581 eh_region p, outer;
1582 outer = region->outer;
1584 *pp = p = region->inner;
1587 p->outer = outer;
1588 pp = &p->next_peer;
1589 p = *pp;
1591 while (p);
1593 *pp = region->next_peer;
1595 (*cfun->eh->region_array)[region->index] = NULL;
1598 /* Splice a single EH region REGION from the region tree.
1600 To unlink REGION, we need to find the pointer to it with a relatively
1601 expensive search in REGION's outer region. If you are going to
1602 remove a number of handlers, using remove_unreachable_eh_regions may
1603 be a better option. */
1605 void
1606 remove_eh_handler (eh_region region)
1608 eh_region *pp, *pp_start, p, outer;
1610 outer = region->outer;
1611 if (outer)
1612 pp_start = &outer->inner;
1613 else
1614 pp_start = &cfun->eh->region_tree;
1615 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1616 continue;
1618 remove_eh_handler_splicer (pp);
1621 /* Worker for remove_unreachable_eh_regions.
1622 PP is a pointer to the region to start a region tree depth-first
1623 search from. R_REACHABLE is the set of regions that have to be
1624 preserved. */
1626 static void
1627 remove_unreachable_eh_regions_worker (eh_region *pp, sbitmap r_reachable)
1629 while (*pp)
1631 eh_region region = *pp;
1632 remove_unreachable_eh_regions_worker (&region->inner, r_reachable);
1633 if (!bitmap_bit_p (r_reachable, region->index))
1634 remove_eh_handler_splicer (pp);
1635 else
1636 pp = &region->next_peer;
1640 /* Splice all EH regions *not* marked in R_REACHABLE from the region tree.
1641 Do this by traversing the EH tree top-down and splice out regions that
1642 are not marked. By removing regions from the leaves, we avoid costly
1643 searches in the region tree. */
1645 void
1646 remove_unreachable_eh_regions (sbitmap r_reachable)
1648 remove_unreachable_eh_regions_worker (&cfun->eh->region_tree, r_reachable);
1651 /* Invokes CALLBACK for every exception handler landing pad label.
1652 Only used by reload hackery; should not be used by new code. */
1654 void
1655 for_each_eh_label (void (*callback) (rtx))
1657 eh_landing_pad lp;
1658 int i;
1660 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1662 if (lp)
1664 rtx lab = lp->landing_pad;
1665 if (lab && LABEL_P (lab))
1666 (*callback) (lab);
1671 /* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
1672 call insn.
1674 At the gimple level, we use LP_NR
1675 > 0 : The statement transfers to landing pad LP_NR
1676 = 0 : The statement is outside any EH region
1677 < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1679 At the rtl level, we use LP_NR
1680 > 0 : The insn transfers to landing pad LP_NR
1681 = 0 : The insn cannot throw
1682 < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1683 = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1684 missing note: The insn is outside any EH region.
1686 ??? This difference probably ought to be avoided. We could stand
1687 to record nothrow for arbitrary gimple statements, and so avoid
1688 some moderately complex lookups in stmt_could_throw_p. Perhaps
1689 NOTHROW should be mapped on both sides to INT_MIN. Perhaps the
1690 no-nonlocal-goto property should be recorded elsewhere as a bit
1691 on the call_insn directly. Perhaps we should make more use of
1692 attaching the trees to call_insns (reachable via symbol_ref in
1693 direct call cases) and just pull the data out of the trees. */
1695 void
1696 make_reg_eh_region_note (rtx insn, int ecf_flags, int lp_nr)
1698 rtx value;
1699 if (ecf_flags & ECF_NOTHROW)
1700 value = const0_rtx;
1701 else if (lp_nr != 0)
1702 value = GEN_INT (lp_nr);
1703 else
1704 return;
1705 add_reg_note (insn, REG_EH_REGION, value);
1708 /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1709 nor perform a non-local goto. Replace the region note if it
1710 already exists. */
1712 void
1713 make_reg_eh_region_note_nothrow_nononlocal (rtx insn)
1715 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1716 rtx intmin = GEN_INT (INT_MIN);
1718 if (note != 0)
1719 XEXP (note, 0) = intmin;
1720 else
1721 add_reg_note (insn, REG_EH_REGION, intmin);
1724 /* Return true if INSN could throw, assuming no REG_EH_REGION note
1725 to the contrary. */
1727 bool
1728 insn_could_throw_p (const_rtx insn)
1730 if (!flag_exceptions)
1731 return false;
1732 if (CALL_P (insn))
1733 return true;
1734 if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
1735 return may_trap_p (PATTERN (insn));
1736 return false;
1739 /* Copy an REG_EH_REGION note to each insn that might throw beginning
1740 at FIRST and ending at LAST. NOTE_OR_INSN is either the source insn
1741 to look for a note, or the note itself. */
1743 void
1744 copy_reg_eh_region_note_forward (rtx note_or_insn, rtx first, rtx last)
1746 rtx insn, note = note_or_insn;
1748 if (INSN_P (note_or_insn))
1750 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1751 if (note == NULL)
1752 return;
1754 note = XEXP (note, 0);
1756 for (insn = first; insn != last ; insn = NEXT_INSN (insn))
1757 if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1758 && insn_could_throw_p (insn))
1759 add_reg_note (insn, REG_EH_REGION, note);
1762 /* Likewise, but iterate backward. */
1764 void
1765 copy_reg_eh_region_note_backward (rtx note_or_insn, rtx last, rtx first)
1767 rtx insn, note = note_or_insn;
1769 if (INSN_P (note_or_insn))
1771 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1772 if (note == NULL)
1773 return;
1775 note = XEXP (note, 0);
1777 for (insn = last; insn != first; insn = PREV_INSN (insn))
1778 if (insn_could_throw_p (insn))
1779 add_reg_note (insn, REG_EH_REGION, note);
1783 /* Extract all EH information from INSN. Return true if the insn
1784 was marked NOTHROW. */
1786 static bool
1787 get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
1788 eh_landing_pad *plp)
1790 eh_landing_pad lp = NULL;
1791 eh_region r = NULL;
1792 bool ret = false;
1793 rtx note;
1794 int lp_nr;
1796 if (! INSN_P (insn))
1797 goto egress;
1799 if (NONJUMP_INSN_P (insn)
1800 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1801 insn = XVECEXP (PATTERN (insn), 0, 0);
1803 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1804 if (!note)
1806 ret = !insn_could_throw_p (insn);
1807 goto egress;
1810 lp_nr = INTVAL (XEXP (note, 0));
1811 if (lp_nr == 0 || lp_nr == INT_MIN)
1813 ret = true;
1814 goto egress;
1817 if (lp_nr < 0)
1818 r = (*cfun->eh->region_array)[-lp_nr];
1819 else
1821 lp = (*cfun->eh->lp_array)[lp_nr];
1822 r = lp->region;
1825 egress:
1826 *plp = lp;
1827 *pr = r;
1828 return ret;
1831 /* Return the landing pad to which INSN may go, or NULL if it does not
1832 have a reachable landing pad within this function. */
1834 eh_landing_pad
1835 get_eh_landing_pad_from_rtx (const_rtx insn)
1837 eh_landing_pad lp;
1838 eh_region r;
1840 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1841 return lp;
1844 /* Return the region to which INSN may go, or NULL if it does not
1845 have a reachable region within this function. */
1847 eh_region
1848 get_eh_region_from_rtx (const_rtx insn)
1850 eh_landing_pad lp;
1851 eh_region r;
1853 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1854 return r;
1857 /* Return true if INSN throws and is caught by something in this function. */
1859 bool
1860 can_throw_internal (const_rtx insn)
1862 return get_eh_landing_pad_from_rtx (insn) != NULL;
1865 /* Return true if INSN throws and escapes from the current function. */
1867 bool
1868 can_throw_external (const_rtx insn)
1870 eh_landing_pad lp;
1871 eh_region r;
1872 bool nothrow;
1874 if (! INSN_P (insn))
1875 return false;
1877 if (NONJUMP_INSN_P (insn)
1878 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1880 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1881 int i, n = seq->len ();
1883 for (i = 0; i < n; i++)
1884 if (can_throw_external (seq->element (i)))
1885 return true;
1887 return false;
1890 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1892 /* If we can't throw, we obviously can't throw external. */
1893 if (nothrow)
1894 return false;
1896 /* If we have an internal landing pad, then we're not external. */
1897 if (lp != NULL)
1898 return false;
1900 /* If we're not within an EH region, then we are external. */
1901 if (r == NULL)
1902 return true;
1904 /* The only thing that ought to be left is MUST_NOT_THROW regions,
1905 which don't always have landing pads. */
1906 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1907 return false;
1910 /* Return true if INSN cannot throw at all. */
1912 bool
1913 insn_nothrow_p (const_rtx insn)
1915 eh_landing_pad lp;
1916 eh_region r;
1918 if (! INSN_P (insn))
1919 return true;
1921 if (NONJUMP_INSN_P (insn)
1922 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1924 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1925 int i, n = seq->len ();
1927 for (i = 0; i < n; i++)
1928 if (!insn_nothrow_p (seq->element (i)))
1929 return false;
1931 return true;
1934 return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1937 /* Return true if INSN can perform a non-local goto. */
1938 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION. */
1940 bool
1941 can_nonlocal_goto (const_rtx insn)
1943 if (nonlocal_goto_handler_labels && CALL_P (insn))
1945 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1946 if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
1947 return true;
1949 return false;
1952 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
1954 static unsigned int
1955 set_nothrow_function_flags (void)
1957 rtx_insn *insn;
1959 crtl->nothrow = 1;
1961 /* Assume crtl->all_throwers_are_sibcalls until we encounter
1962 something that can throw an exception. We specifically exempt
1963 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1964 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
1965 is optimistic. */
1967 crtl->all_throwers_are_sibcalls = 1;
1969 /* If we don't know that this implementation of the function will
1970 actually be used, then we must not set TREE_NOTHROW, since
1971 callers must not assume that this function does not throw. */
1972 if (TREE_NOTHROW (current_function_decl))
1973 return 0;
1975 if (! flag_exceptions)
1976 return 0;
1978 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1979 if (can_throw_external (insn))
1981 crtl->nothrow = 0;
1983 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
1985 crtl->all_throwers_are_sibcalls = 0;
1986 return 0;
1990 if (crtl->nothrow
1991 && (cgraph_node::get (current_function_decl)->get_availability ()
1992 >= AVAIL_AVAILABLE))
1994 struct cgraph_node *node = cgraph_node::get (current_function_decl);
1995 struct cgraph_edge *e;
1996 for (e = node->callers; e; e = e->next_caller)
1997 e->can_throw_external = false;
1998 node->set_nothrow_flag (true);
2000 if (dump_file)
2001 fprintf (dump_file, "Marking function nothrow: %s\n\n",
2002 current_function_name ());
2004 return 0;
2007 namespace {
2009 const pass_data pass_data_set_nothrow_function_flags =
2011 RTL_PASS, /* type */
2012 "nothrow", /* name */
2013 OPTGROUP_NONE, /* optinfo_flags */
2014 TV_NONE, /* tv_id */
2015 0, /* properties_required */
2016 0, /* properties_provided */
2017 0, /* properties_destroyed */
2018 0, /* todo_flags_start */
2019 0, /* todo_flags_finish */
2022 class pass_set_nothrow_function_flags : public rtl_opt_pass
2024 public:
2025 pass_set_nothrow_function_flags (gcc::context *ctxt)
2026 : rtl_opt_pass (pass_data_set_nothrow_function_flags, ctxt)
2029 /* opt_pass methods: */
2030 virtual unsigned int execute (function *)
2032 return set_nothrow_function_flags ();
2035 }; // class pass_set_nothrow_function_flags
2037 } // anon namespace
2039 rtl_opt_pass *
2040 make_pass_set_nothrow_function_flags (gcc::context *ctxt)
2042 return new pass_set_nothrow_function_flags (ctxt);
2046 /* Various hooks for unwind library. */
2048 /* Expand the EH support builtin functions:
2049 __builtin_eh_pointer and __builtin_eh_filter. */
2051 static eh_region
2052 expand_builtin_eh_common (tree region_nr_t)
2054 HOST_WIDE_INT region_nr;
2055 eh_region region;
2057 gcc_assert (tree_fits_shwi_p (region_nr_t));
2058 region_nr = tree_to_shwi (region_nr_t);
2060 region = (*cfun->eh->region_array)[region_nr];
2062 /* ??? We shouldn't have been able to delete a eh region without
2063 deleting all the code that depended on it. */
2064 gcc_assert (region != NULL);
2066 return region;
2069 /* Expand to the exc_ptr value from the given eh region. */
2072 expand_builtin_eh_pointer (tree exp)
2074 eh_region region
2075 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2076 if (region->exc_ptr_reg == NULL)
2077 region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2078 return region->exc_ptr_reg;
2081 /* Expand to the filter value from the given eh region. */
2084 expand_builtin_eh_filter (tree exp)
2086 eh_region region
2087 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2088 if (region->filter_reg == NULL)
2089 region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
2090 return region->filter_reg;
2093 /* Copy the exc_ptr and filter values from one landing pad's registers
2094 to another. This is used to inline the resx statement. */
2097 expand_builtin_eh_copy_values (tree exp)
2099 eh_region dst
2100 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2101 eh_region src
2102 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
2103 enum machine_mode fmode = targetm.eh_return_filter_mode ();
2105 if (dst->exc_ptr_reg == NULL)
2106 dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2107 if (src->exc_ptr_reg == NULL)
2108 src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2110 if (dst->filter_reg == NULL)
2111 dst->filter_reg = gen_reg_rtx (fmode);
2112 if (src->filter_reg == NULL)
2113 src->filter_reg = gen_reg_rtx (fmode);
2115 emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
2116 emit_move_insn (dst->filter_reg, src->filter_reg);
2118 return const0_rtx;
2121 /* Do any necessary initialization to access arbitrary stack frames.
2122 On the SPARC, this means flushing the register windows. */
2124 void
2125 expand_builtin_unwind_init (void)
2127 /* Set this so all the registers get saved in our frame; we need to be
2128 able to copy the saved values for any registers from frames we unwind. */
2129 crtl->saves_all_registers = 1;
2131 #ifdef SETUP_FRAME_ADDRESSES
2132 SETUP_FRAME_ADDRESSES ();
2133 #endif
2136 /* Map a non-negative number to an eh return data register number; expands
2137 to -1 if no return data register is associated with the input number.
2138 At least the inputs 0 and 1 must be mapped; the target may provide more. */
2141 expand_builtin_eh_return_data_regno (tree exp)
2143 tree which = CALL_EXPR_ARG (exp, 0);
2144 unsigned HOST_WIDE_INT iwhich;
2146 if (TREE_CODE (which) != INTEGER_CST)
2148 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2149 return constm1_rtx;
2152 iwhich = tree_to_uhwi (which);
2153 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2154 if (iwhich == INVALID_REGNUM)
2155 return constm1_rtx;
2157 #ifdef DWARF_FRAME_REGNUM
2158 iwhich = DWARF_FRAME_REGNUM (iwhich);
2159 #else
2160 iwhich = DBX_REGISTER_NUMBER (iwhich);
2161 #endif
2163 return GEN_INT (iwhich);
2166 /* Given a value extracted from the return address register or stack slot,
2167 return the actual address encoded in that value. */
2170 expand_builtin_extract_return_addr (tree addr_tree)
2172 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2174 if (GET_MODE (addr) != Pmode
2175 && GET_MODE (addr) != VOIDmode)
2177 #ifdef POINTERS_EXTEND_UNSIGNED
2178 addr = convert_memory_address (Pmode, addr);
2179 #else
2180 addr = convert_to_mode (Pmode, addr, 0);
2181 #endif
2184 /* First mask out any unwanted bits. */
2185 #ifdef MASK_RETURN_ADDR
2186 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2187 #endif
2189 /* Then adjust to find the real return address. */
2190 #if defined (RETURN_ADDR_OFFSET)
2191 addr = plus_constant (Pmode, addr, RETURN_ADDR_OFFSET);
2192 #endif
2194 return addr;
2197 /* Given an actual address in addr_tree, do any necessary encoding
2198 and return the value to be stored in the return address register or
2199 stack slot so the epilogue will return to that address. */
2202 expand_builtin_frob_return_addr (tree addr_tree)
2204 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2206 addr = convert_memory_address (Pmode, addr);
2208 #ifdef RETURN_ADDR_OFFSET
2209 addr = force_reg (Pmode, addr);
2210 addr = plus_constant (Pmode, addr, -RETURN_ADDR_OFFSET);
2211 #endif
2213 return addr;
2216 /* Set up the epilogue with the magic bits we'll need to return to the
2217 exception handler. */
2219 void
2220 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2221 tree handler_tree)
2223 rtx tmp;
2225 #ifdef EH_RETURN_STACKADJ_RTX
2226 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2227 VOIDmode, EXPAND_NORMAL);
2228 tmp = convert_memory_address (Pmode, tmp);
2229 if (!crtl->eh.ehr_stackadj)
2230 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
2231 else if (tmp != crtl->eh.ehr_stackadj)
2232 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2233 #endif
2235 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2236 VOIDmode, EXPAND_NORMAL);
2237 tmp = convert_memory_address (Pmode, tmp);
2238 if (!crtl->eh.ehr_handler)
2239 crtl->eh.ehr_handler = copy_to_reg (tmp);
2240 else if (tmp != crtl->eh.ehr_handler)
2241 emit_move_insn (crtl->eh.ehr_handler, tmp);
2243 if (!crtl->eh.ehr_label)
2244 crtl->eh.ehr_label = gen_label_rtx ();
2245 emit_jump (crtl->eh.ehr_label);
2248 /* Expand __builtin_eh_return. This exit path from the function loads up
2249 the eh return data registers, adjusts the stack, and branches to a
2250 given PC other than the normal return address. */
2252 void
2253 expand_eh_return (void)
2255 rtx_code_label *around_label;
2257 if (! crtl->eh.ehr_label)
2258 return;
2260 crtl->calls_eh_return = 1;
2262 #ifdef EH_RETURN_STACKADJ_RTX
2263 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2264 #endif
2266 around_label = gen_label_rtx ();
2267 emit_jump (around_label);
2269 emit_label (crtl->eh.ehr_label);
2270 clobber_return_register ();
2272 #ifdef EH_RETURN_STACKADJ_RTX
2273 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
2274 #endif
2276 #ifdef HAVE_eh_return
2277 if (HAVE_eh_return)
2278 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
2279 else
2280 #endif
2282 #ifdef EH_RETURN_HANDLER_RTX
2283 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
2284 #else
2285 error ("__builtin_eh_return not supported on this target");
2286 #endif
2289 emit_label (around_label);
2292 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2293 POINTERS_EXTEND_UNSIGNED and return it. */
2296 expand_builtin_extend_pointer (tree addr_tree)
2298 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2299 int extend;
2301 #ifdef POINTERS_EXTEND_UNSIGNED
2302 extend = POINTERS_EXTEND_UNSIGNED;
2303 #else
2304 /* The previous EH code did an unsigned extend by default, so we do this also
2305 for consistency. */
2306 extend = 1;
2307 #endif
2309 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
2312 static int
2313 add_action_record (action_hash_type *ar_hash, int filter, int next)
2315 struct action_record **slot, *new_ar, tmp;
2317 tmp.filter = filter;
2318 tmp.next = next;
2319 slot = ar_hash->find_slot (&tmp, INSERT);
2321 if ((new_ar = *slot) == NULL)
2323 new_ar = XNEW (struct action_record);
2324 new_ar->offset = crtl->eh.action_record_data->length () + 1;
2325 new_ar->filter = filter;
2326 new_ar->next = next;
2327 *slot = new_ar;
2329 /* The filter value goes in untouched. The link to the next
2330 record is a "self-relative" byte offset, or zero to indicate
2331 that there is no next record. So convert the absolute 1 based
2332 indices we've been carrying around into a displacement. */
2334 push_sleb128 (&crtl->eh.action_record_data, filter);
2335 if (next)
2336 next -= crtl->eh.action_record_data->length () + 1;
2337 push_sleb128 (&crtl->eh.action_record_data, next);
2340 return new_ar->offset;
2343 static int
2344 collect_one_action_chain (action_hash_type *ar_hash, eh_region region)
2346 int next;
2348 /* If we've reached the top of the region chain, then we have
2349 no actions, and require no landing pad. */
2350 if (region == NULL)
2351 return -1;
2353 switch (region->type)
2355 case ERT_CLEANUP:
2357 eh_region r;
2358 /* A cleanup adds a zero filter to the beginning of the chain, but
2359 there are special cases to look out for. If there are *only*
2360 cleanups along a path, then it compresses to a zero action.
2361 Further, if there are multiple cleanups along a path, we only
2362 need to represent one of them, as that is enough to trigger
2363 entry to the landing pad at runtime. */
2364 next = collect_one_action_chain (ar_hash, region->outer);
2365 if (next <= 0)
2366 return 0;
2367 for (r = region->outer; r ; r = r->outer)
2368 if (r->type == ERT_CLEANUP)
2369 return next;
2370 return add_action_record (ar_hash, 0, next);
2373 case ERT_TRY:
2375 eh_catch c;
2377 /* Process the associated catch regions in reverse order.
2378 If there's a catch-all handler, then we don't need to
2379 search outer regions. Use a magic -3 value to record
2380 that we haven't done the outer search. */
2381 next = -3;
2382 for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
2384 if (c->type_list == NULL)
2386 /* Retrieve the filter from the head of the filter list
2387 where we have stored it (see assign_filter_values). */
2388 int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list));
2389 next = add_action_record (ar_hash, filter, 0);
2391 else
2393 /* Once the outer search is done, trigger an action record for
2394 each filter we have. */
2395 tree flt_node;
2397 if (next == -3)
2399 next = collect_one_action_chain (ar_hash, region->outer);
2401 /* If there is no next action, terminate the chain. */
2402 if (next == -1)
2403 next = 0;
2404 /* If all outer actions are cleanups or must_not_throw,
2405 we'll have no action record for it, since we had wanted
2406 to encode these states in the call-site record directly.
2407 Add a cleanup action to the chain to catch these. */
2408 else if (next <= 0)
2409 next = add_action_record (ar_hash, 0, 0);
2412 flt_node = c->filter_list;
2413 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2415 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2416 next = add_action_record (ar_hash, filter, next);
2420 return next;
2423 case ERT_ALLOWED_EXCEPTIONS:
2424 /* An exception specification adds its filter to the
2425 beginning of the chain. */
2426 next = collect_one_action_chain (ar_hash, region->outer);
2428 /* If there is no next action, terminate the chain. */
2429 if (next == -1)
2430 next = 0;
2431 /* If all outer actions are cleanups or must_not_throw,
2432 we'll have no action record for it, since we had wanted
2433 to encode these states in the call-site record directly.
2434 Add a cleanup action to the chain to catch these. */
2435 else if (next <= 0)
2436 next = add_action_record (ar_hash, 0, 0);
2438 return add_action_record (ar_hash, region->u.allowed.filter, next);
2440 case ERT_MUST_NOT_THROW:
2441 /* A must-not-throw region with no inner handlers or cleanups
2442 requires no call-site entry. Note that this differs from
2443 the no handler or cleanup case in that we do require an lsda
2444 to be generated. Return a magic -2 value to record this. */
2445 return -2;
2448 gcc_unreachable ();
2451 static int
2452 add_call_site (rtx landing_pad, int action, int section)
2454 call_site_record record;
2456 record = ggc_alloc<call_site_record_d> ();
2457 record->landing_pad = landing_pad;
2458 record->action = action;
2460 vec_safe_push (crtl->eh.call_site_record_v[section], record);
2462 return call_site_base + crtl->eh.call_site_record_v[section]->length () - 1;
2465 static rtx_note *
2466 emit_note_eh_region_end (rtx insn)
2468 rtx_insn *next = NEXT_INSN (insn);
2470 /* Make sure we do not split a call and its corresponding
2471 CALL_ARG_LOCATION note. */
2472 if (next && NOTE_P (next)
2473 && NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
2474 insn = next;
2476 return emit_note_after (NOTE_INSN_EH_REGION_END, insn);
2479 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2480 The new note numbers will not refer to region numbers, but
2481 instead to call site entries. */
2483 static unsigned int
2484 convert_to_eh_region_ranges (void)
2486 rtx insn;
2487 rtx_insn *iter;
2488 rtx_note *note;
2489 action_hash_type ar_hash (31);
2490 int last_action = -3;
2491 rtx_insn *last_action_insn = NULL;
2492 rtx last_landing_pad = NULL_RTX;
2493 rtx_insn *first_no_action_insn = NULL;
2494 int call_site = 0;
2495 int cur_sec = 0;
2496 rtx section_switch_note = NULL_RTX;
2497 rtx_insn *first_no_action_insn_before_switch = NULL;
2498 rtx_insn *last_no_action_insn_before_switch = NULL;
2499 int saved_call_site_base = call_site_base;
2501 vec_alloc (crtl->eh.action_record_data, 64);
2503 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2504 if (INSN_P (iter))
2506 eh_landing_pad lp;
2507 eh_region region;
2508 bool nothrow;
2509 int this_action;
2510 rtx this_landing_pad;
2512 insn = iter;
2513 if (NONJUMP_INSN_P (insn)
2514 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2515 insn = XVECEXP (PATTERN (insn), 0, 0);
2517 nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp);
2518 if (nothrow)
2519 continue;
2520 if (region)
2521 this_action = collect_one_action_chain (&ar_hash, region);
2522 else
2523 this_action = -1;
2525 /* Existence of catch handlers, or must-not-throw regions
2526 implies that an lsda is needed (even if empty). */
2527 if (this_action != -1)
2528 crtl->uses_eh_lsda = 1;
2530 /* Delay creation of region notes for no-action regions
2531 until we're sure that an lsda will be required. */
2532 else if (last_action == -3)
2534 first_no_action_insn = iter;
2535 last_action = -1;
2538 if (this_action >= 0)
2539 this_landing_pad = lp->landing_pad;
2540 else
2541 this_landing_pad = NULL_RTX;
2543 /* Differing actions or landing pads implies a change in call-site
2544 info, which implies some EH_REGION note should be emitted. */
2545 if (last_action != this_action
2546 || last_landing_pad != this_landing_pad)
2548 /* If there is a queued no-action region in the other section
2549 with hot/cold partitioning, emit it now. */
2550 if (first_no_action_insn_before_switch)
2552 gcc_assert (this_action != -1
2553 && last_action == (first_no_action_insn
2554 ? -1 : -3));
2555 call_site = add_call_site (NULL_RTX, 0, 0);
2556 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2557 first_no_action_insn_before_switch);
2558 NOTE_EH_HANDLER (note) = call_site;
2559 note
2560 = emit_note_eh_region_end (last_no_action_insn_before_switch);
2561 NOTE_EH_HANDLER (note) = call_site;
2562 gcc_assert (last_action != -3
2563 || (last_action_insn
2564 == last_no_action_insn_before_switch));
2565 first_no_action_insn_before_switch = NULL;
2566 last_no_action_insn_before_switch = NULL;
2567 call_site_base++;
2569 /* If we'd not seen a previous action (-3) or the previous
2570 action was must-not-throw (-2), then we do not need an
2571 end note. */
2572 if (last_action >= -1)
2574 /* If we delayed the creation of the begin, do it now. */
2575 if (first_no_action_insn)
2577 call_site = add_call_site (NULL_RTX, 0, cur_sec);
2578 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2579 first_no_action_insn);
2580 NOTE_EH_HANDLER (note) = call_site;
2581 first_no_action_insn = NULL;
2584 note = emit_note_eh_region_end (last_action_insn);
2585 NOTE_EH_HANDLER (note) = call_site;
2588 /* If the new action is must-not-throw, then no region notes
2589 are created. */
2590 if (this_action >= -1)
2592 call_site = add_call_site (this_landing_pad,
2593 this_action < 0 ? 0 : this_action,
2594 cur_sec);
2595 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
2596 NOTE_EH_HANDLER (note) = call_site;
2599 last_action = this_action;
2600 last_landing_pad = this_landing_pad;
2602 last_action_insn = iter;
2604 else if (NOTE_P (iter)
2605 && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2607 gcc_assert (section_switch_note == NULL_RTX);
2608 gcc_assert (flag_reorder_blocks_and_partition);
2609 section_switch_note = iter;
2610 if (first_no_action_insn)
2612 first_no_action_insn_before_switch = first_no_action_insn;
2613 last_no_action_insn_before_switch = last_action_insn;
2614 first_no_action_insn = NULL;
2615 gcc_assert (last_action == -1);
2616 last_action = -3;
2618 /* Force closing of current EH region before section switch and
2619 opening a new one afterwards. */
2620 else if (last_action != -3)
2621 last_landing_pad = pc_rtx;
2622 if (crtl->eh.call_site_record_v[cur_sec])
2623 call_site_base += crtl->eh.call_site_record_v[cur_sec]->length ();
2624 cur_sec++;
2625 gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL);
2626 vec_alloc (crtl->eh.call_site_record_v[cur_sec], 10);
2629 if (last_action >= -1 && ! first_no_action_insn)
2631 note = emit_note_eh_region_end (last_action_insn);
2632 NOTE_EH_HANDLER (note) = call_site;
2635 call_site_base = saved_call_site_base;
2637 return 0;
2640 namespace {
2642 const pass_data pass_data_convert_to_eh_region_ranges =
2644 RTL_PASS, /* type */
2645 "eh_ranges", /* name */
2646 OPTGROUP_NONE, /* optinfo_flags */
2647 TV_NONE, /* tv_id */
2648 0, /* properties_required */
2649 0, /* properties_provided */
2650 0, /* properties_destroyed */
2651 0, /* todo_flags_start */
2652 0, /* todo_flags_finish */
2655 class pass_convert_to_eh_region_ranges : public rtl_opt_pass
2657 public:
2658 pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2659 : rtl_opt_pass (pass_data_convert_to_eh_region_ranges, ctxt)
2662 /* opt_pass methods: */
2663 virtual bool gate (function *);
2664 virtual unsigned int execute (function *)
2666 return convert_to_eh_region_ranges ();
2669 }; // class pass_convert_to_eh_region_ranges
2671 bool
2672 pass_convert_to_eh_region_ranges::gate (function *)
2674 /* Nothing to do for SJLJ exceptions or if no regions created. */
2675 if (cfun->eh->region_tree == NULL)
2676 return false;
2677 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2678 return false;
2679 return true;
2682 } // anon namespace
2684 rtl_opt_pass *
2685 make_pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2687 return new pass_convert_to_eh_region_ranges (ctxt);
2690 static void
2691 push_uleb128 (vec<uchar, va_gc> **data_area, unsigned int value)
2695 unsigned char byte = value & 0x7f;
2696 value >>= 7;
2697 if (value)
2698 byte |= 0x80;
2699 vec_safe_push (*data_area, byte);
2701 while (value);
2704 static void
2705 push_sleb128 (vec<uchar, va_gc> **data_area, int value)
2707 unsigned char byte;
2708 int more;
2712 byte = value & 0x7f;
2713 value >>= 7;
2714 more = ! ((value == 0 && (byte & 0x40) == 0)
2715 || (value == -1 && (byte & 0x40) != 0));
2716 if (more)
2717 byte |= 0x80;
2718 vec_safe_push (*data_area, byte);
2720 while (more);
2724 #ifndef HAVE_AS_LEB128
2725 static int
2726 dw2_size_of_call_site_table (int section)
2728 int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2729 int size = n * (4 + 4 + 4);
2730 int i;
2732 for (i = 0; i < n; ++i)
2734 struct call_site_record_d *cs =
2735 (*crtl->eh.call_site_record_v[section])[i];
2736 size += size_of_uleb128 (cs->action);
2739 return size;
2742 static int
2743 sjlj_size_of_call_site_table (void)
2745 int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2746 int size = 0;
2747 int i;
2749 for (i = 0; i < n; ++i)
2751 struct call_site_record_d *cs =
2752 (*crtl->eh.call_site_record_v[0])[i];
2753 size += size_of_uleb128 (INTVAL (cs->landing_pad));
2754 size += size_of_uleb128 (cs->action);
2757 return size;
2759 #endif
2761 static void
2762 dw2_output_call_site_table (int cs_format, int section)
2764 int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2765 int i;
2766 const char *begin;
2768 if (section == 0)
2769 begin = current_function_func_begin_label;
2770 else if (first_function_block_is_cold)
2771 begin = crtl->subsections.hot_section_label;
2772 else
2773 begin = crtl->subsections.cold_section_label;
2775 for (i = 0; i < n; ++i)
2777 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[section])[i];
2778 char reg_start_lab[32];
2779 char reg_end_lab[32];
2780 char landing_pad_lab[32];
2782 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
2783 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
2785 if (cs->landing_pad)
2786 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
2787 CODE_LABEL_NUMBER (cs->landing_pad));
2789 /* ??? Perhaps use insn length scaling if the assembler supports
2790 generic arithmetic. */
2791 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2792 data4 if the function is small enough. */
2793 if (cs_format == DW_EH_PE_uleb128)
2795 dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
2796 "region %d start", i);
2797 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
2798 "length");
2799 if (cs->landing_pad)
2800 dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
2801 "landing pad");
2802 else
2803 dw2_asm_output_data_uleb128 (0, "landing pad");
2805 else
2807 dw2_asm_output_delta (4, reg_start_lab, begin,
2808 "region %d start", i);
2809 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
2810 if (cs->landing_pad)
2811 dw2_asm_output_delta (4, landing_pad_lab, begin,
2812 "landing pad");
2813 else
2814 dw2_asm_output_data (4, 0, "landing pad");
2816 dw2_asm_output_data_uleb128 (cs->action, "action");
2819 call_site_base += n;
2822 static void
2823 sjlj_output_call_site_table (void)
2825 int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2826 int i;
2828 for (i = 0; i < n; ++i)
2830 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[0])[i];
2832 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
2833 "region %d landing pad", i);
2834 dw2_asm_output_data_uleb128 (cs->action, "action");
2837 call_site_base += n;
2840 /* Switch to the section that should be used for exception tables. */
2842 static void
2843 switch_to_exception_section (const char * ARG_UNUSED (fnname))
2845 section *s;
2847 if (exception_section)
2848 s = exception_section;
2849 else
2851 /* Compute the section and cache it into exception_section,
2852 unless it depends on the function name. */
2853 if (targetm_common.have_named_sections)
2855 int flags;
2857 if (EH_TABLES_CAN_BE_READ_ONLY)
2859 int tt_format =
2860 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2861 flags = ((! flag_pic
2862 || ((tt_format & 0x70) != DW_EH_PE_absptr
2863 && (tt_format & 0x70) != DW_EH_PE_aligned))
2864 ? 0 : SECTION_WRITE);
2866 else
2867 flags = SECTION_WRITE;
2869 #ifdef HAVE_LD_EH_GC_SECTIONS
2870 if (flag_function_sections
2871 || (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP))
2873 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
2874 /* The EH table must match the code section, so only mark
2875 it linkonce if we have COMDAT groups to tie them together. */
2876 if (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP)
2877 flags |= SECTION_LINKONCE;
2878 sprintf (section_name, ".gcc_except_table.%s", fnname);
2879 s = get_section (section_name, flags, current_function_decl);
2880 free (section_name);
2882 else
2883 #endif
2884 exception_section
2885 = s = get_section (".gcc_except_table", flags, NULL);
2887 else
2888 exception_section
2889 = s = flag_pic ? data_section : readonly_data_section;
2892 switch_to_section (s);
2896 /* Output a reference from an exception table to the type_info object TYPE.
2897 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
2898 the value. */
2900 static void
2901 output_ttype (tree type, int tt_format, int tt_format_size)
2903 rtx value;
2904 bool is_public = true;
2906 if (type == NULL_TREE)
2907 value = const0_rtx;
2908 else
2910 /* FIXME lto. pass_ipa_free_lang_data changes all types to
2911 runtime types so TYPE should already be a runtime type
2912 reference. When pass_ipa_free_lang data is made a default
2913 pass, we can then remove the call to lookup_type_for_runtime
2914 below. */
2915 if (TYPE_P (type))
2916 type = lookup_type_for_runtime (type);
2918 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
2920 /* Let cgraph know that the rtti decl is used. Not all of the
2921 paths below go through assemble_integer, which would take
2922 care of this for us. */
2923 STRIP_NOPS (type);
2924 if (TREE_CODE (type) == ADDR_EXPR)
2926 type = TREE_OPERAND (type, 0);
2927 if (TREE_CODE (type) == VAR_DECL)
2928 is_public = TREE_PUBLIC (type);
2930 else
2931 gcc_assert (TREE_CODE (type) == INTEGER_CST);
2934 /* Allow the target to override the type table entry format. */
2935 if (targetm.asm_out.ttype (value))
2936 return;
2938 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
2939 assemble_integer (value, tt_format_size,
2940 tt_format_size * BITS_PER_UNIT, 1);
2941 else
2942 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
2945 static void
2946 output_one_function_exception_table (int section)
2948 int tt_format, cs_format, lp_format, i;
2949 #ifdef HAVE_AS_LEB128
2950 char ttype_label[32];
2951 char cs_after_size_label[32];
2952 char cs_end_label[32];
2953 #else
2954 int call_site_len;
2955 #endif
2956 int have_tt_data;
2957 int tt_format_size = 0;
2959 have_tt_data = (vec_safe_length (cfun->eh->ttype_data)
2960 || (targetm.arm_eabi_unwinder
2961 ? vec_safe_length (cfun->eh->ehspec_data.arm_eabi)
2962 : vec_safe_length (cfun->eh->ehspec_data.other)));
2964 /* Indicate the format of the @TType entries. */
2965 if (! have_tt_data)
2966 tt_format = DW_EH_PE_omit;
2967 else
2969 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2970 #ifdef HAVE_AS_LEB128
2971 ASM_GENERATE_INTERNAL_LABEL (ttype_label,
2972 section ? "LLSDATTC" : "LLSDATT",
2973 current_function_funcdef_no);
2974 #endif
2975 tt_format_size = size_of_encoded_value (tt_format);
2977 assemble_align (tt_format_size * BITS_PER_UNIT);
2980 targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
2981 current_function_funcdef_no);
2983 /* The LSDA header. */
2985 /* Indicate the format of the landing pad start pointer. An omitted
2986 field implies @LPStart == @Start. */
2987 /* Currently we always put @LPStart == @Start. This field would
2988 be most useful in moving the landing pads completely out of
2989 line to another section, but it could also be used to minimize
2990 the size of uleb128 landing pad offsets. */
2991 lp_format = DW_EH_PE_omit;
2992 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
2993 eh_data_format_name (lp_format));
2995 /* @LPStart pointer would go here. */
2997 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
2998 eh_data_format_name (tt_format));
3000 #ifndef HAVE_AS_LEB128
3001 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3002 call_site_len = sjlj_size_of_call_site_table ();
3003 else
3004 call_site_len = dw2_size_of_call_site_table (section);
3005 #endif
3007 /* A pc-relative 4-byte displacement to the @TType data. */
3008 if (have_tt_data)
3010 #ifdef HAVE_AS_LEB128
3011 char ttype_after_disp_label[32];
3012 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
3013 section ? "LLSDATTDC" : "LLSDATTD",
3014 current_function_funcdef_no);
3015 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3016 "@TType base offset");
3017 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3018 #else
3019 /* Ug. Alignment queers things. */
3020 unsigned int before_disp, after_disp, last_disp, disp;
3022 before_disp = 1 + 1;
3023 after_disp = (1 + size_of_uleb128 (call_site_len)
3024 + call_site_len
3025 + vec_safe_length (crtl->eh.action_record_data)
3026 + (vec_safe_length (cfun->eh->ttype_data)
3027 * tt_format_size));
3029 disp = after_disp;
3032 unsigned int disp_size, pad;
3034 last_disp = disp;
3035 disp_size = size_of_uleb128 (disp);
3036 pad = before_disp + disp_size + after_disp;
3037 if (pad % tt_format_size)
3038 pad = tt_format_size - (pad % tt_format_size);
3039 else
3040 pad = 0;
3041 disp = after_disp + pad;
3043 while (disp != last_disp);
3045 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3046 #endif
3049 /* Indicate the format of the call-site offsets. */
3050 #ifdef HAVE_AS_LEB128
3051 cs_format = DW_EH_PE_uleb128;
3052 #else
3053 cs_format = DW_EH_PE_udata4;
3054 #endif
3055 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3056 eh_data_format_name (cs_format));
3058 #ifdef HAVE_AS_LEB128
3059 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
3060 section ? "LLSDACSBC" : "LLSDACSB",
3061 current_function_funcdef_no);
3062 ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
3063 section ? "LLSDACSEC" : "LLSDACSE",
3064 current_function_funcdef_no);
3065 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3066 "Call-site table length");
3067 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3068 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3069 sjlj_output_call_site_table ();
3070 else
3071 dw2_output_call_site_table (cs_format, section);
3072 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3073 #else
3074 dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
3075 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3076 sjlj_output_call_site_table ();
3077 else
3078 dw2_output_call_site_table (cs_format, section);
3079 #endif
3081 /* ??? Decode and interpret the data for flag_debug_asm. */
3083 uchar uc;
3084 FOR_EACH_VEC_ELT (*crtl->eh.action_record_data, i, uc)
3085 dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
3088 if (have_tt_data)
3089 assemble_align (tt_format_size * BITS_PER_UNIT);
3091 i = vec_safe_length (cfun->eh->ttype_data);
3092 while (i-- > 0)
3094 tree type = (*cfun->eh->ttype_data)[i];
3095 output_ttype (type, tt_format, tt_format_size);
3098 #ifdef HAVE_AS_LEB128
3099 if (have_tt_data)
3100 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3101 #endif
3103 /* ??? Decode and interpret the data for flag_debug_asm. */
3104 if (targetm.arm_eabi_unwinder)
3106 tree type;
3107 for (i = 0;
3108 vec_safe_iterate (cfun->eh->ehspec_data.arm_eabi, i, &type); ++i)
3109 output_ttype (type, tt_format, tt_format_size);
3111 else
3113 uchar uc;
3114 for (i = 0;
3115 vec_safe_iterate (cfun->eh->ehspec_data.other, i, &uc); ++i)
3116 dw2_asm_output_data (1, uc,
3117 i ? NULL : "Exception specification table");
3121 void
3122 output_function_exception_table (const char *fnname)
3124 rtx personality = get_personality_function (current_function_decl);
3126 /* Not all functions need anything. */
3127 if (! crtl->uses_eh_lsda)
3128 return;
3130 if (personality)
3132 assemble_external_libcall (personality);
3134 if (targetm.asm_out.emit_except_personality)
3135 targetm.asm_out.emit_except_personality (personality);
3138 switch_to_exception_section (fnname);
3140 /* If the target wants a label to begin the table, emit it here. */
3141 targetm.asm_out.emit_except_table_label (asm_out_file);
3143 output_one_function_exception_table (0);
3144 if (crtl->eh.call_site_record_v[1])
3145 output_one_function_exception_table (1);
3147 switch_to_section (current_function_section ());
3150 void
3151 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3153 fun->eh->throw_stmt_table = table;
3156 htab_t
3157 get_eh_throw_stmt_table (struct function *fun)
3159 return fun->eh->throw_stmt_table;
3162 /* Determine if the function needs an EH personality function. */
3164 enum eh_personality_kind
3165 function_needs_eh_personality (struct function *fn)
3167 enum eh_personality_kind kind = eh_personality_none;
3168 eh_region i;
3170 FOR_ALL_EH_REGION_FN (i, fn)
3172 switch (i->type)
3174 case ERT_CLEANUP:
3175 /* Can do with any personality including the generic C one. */
3176 kind = eh_personality_any;
3177 break;
3179 case ERT_TRY:
3180 case ERT_ALLOWED_EXCEPTIONS:
3181 /* Always needs a EH personality function. The generic C
3182 personality doesn't handle these even for empty type lists. */
3183 return eh_personality_lang;
3185 case ERT_MUST_NOT_THROW:
3186 /* Always needs a EH personality function. The language may specify
3187 what abort routine that must be used, e.g. std::terminate. */
3188 return eh_personality_lang;
3192 return kind;
3195 /* Dump EH information to OUT. */
3197 void
3198 dump_eh_tree (FILE * out, struct function *fun)
3200 eh_region i;
3201 int depth = 0;
3202 static const char *const type_name[] = {
3203 "cleanup", "try", "allowed_exceptions", "must_not_throw"
3206 i = fun->eh->region_tree;
3207 if (!i)
3208 return;
3210 fprintf (out, "Eh tree:\n");
3211 while (1)
3213 fprintf (out, " %*s %i %s", depth * 2, "",
3214 i->index, type_name[(int) i->type]);
3216 if (i->landing_pads)
3218 eh_landing_pad lp;
3220 fprintf (out, " land:");
3221 if (current_ir_type () == IR_GIMPLE)
3223 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3225 fprintf (out, "{%i,", lp->index);
3226 print_generic_expr (out, lp->post_landing_pad, 0);
3227 fputc ('}', out);
3228 if (lp->next_lp)
3229 fputc (',', out);
3232 else
3234 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3236 fprintf (out, "{%i,", lp->index);
3237 if (lp->landing_pad)
3238 fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
3239 NOTE_P (lp->landing_pad) ? "(del)" : "");
3240 else
3241 fprintf (out, "(nil),");
3242 if (lp->post_landing_pad)
3244 rtx lab = label_rtx (lp->post_landing_pad);
3245 fprintf (out, "%i%s}", INSN_UID (lab),
3246 NOTE_P (lab) ? "(del)" : "");
3248 else
3249 fprintf (out, "(nil)}");
3250 if (lp->next_lp)
3251 fputc (',', out);
3256 switch (i->type)
3258 case ERT_CLEANUP:
3259 case ERT_MUST_NOT_THROW:
3260 break;
3262 case ERT_TRY:
3264 eh_catch c;
3265 fprintf (out, " catch:");
3266 for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
3268 fputc ('{', out);
3269 if (c->label)
3271 fprintf (out, "lab:");
3272 print_generic_expr (out, c->label, 0);
3273 fputc (';', out);
3275 print_generic_expr (out, c->type_list, 0);
3276 fputc ('}', out);
3277 if (c->next_catch)
3278 fputc (',', out);
3281 break;
3283 case ERT_ALLOWED_EXCEPTIONS:
3284 fprintf (out, " filter :%i types:", i->u.allowed.filter);
3285 print_generic_expr (out, i->u.allowed.type_list, 0);
3286 break;
3288 fputc ('\n', out);
3290 /* If there are sub-regions, process them. */
3291 if (i->inner)
3292 i = i->inner, depth++;
3293 /* If there are peers, process them. */
3294 else if (i->next_peer)
3295 i = i->next_peer;
3296 /* Otherwise, step back up the tree to the next peer. */
3297 else
3301 i = i->outer;
3302 depth--;
3303 if (i == NULL)
3304 return;
3306 while (i->next_peer == NULL);
3307 i = i->next_peer;
3312 /* Dump the EH tree for FN on stderr. */
3314 DEBUG_FUNCTION void
3315 debug_eh_tree (struct function *fn)
3317 dump_eh_tree (stderr, fn);
3320 /* Verify invariants on EH datastructures. */
3322 DEBUG_FUNCTION void
3323 verify_eh_tree (struct function *fun)
3325 eh_region r, outer;
3326 int nvisited_lp, nvisited_r;
3327 int count_lp, count_r, depth, i;
3328 eh_landing_pad lp;
3329 bool err = false;
3331 if (!fun->eh->region_tree)
3332 return;
3334 count_r = 0;
3335 for (i = 1; vec_safe_iterate (fun->eh->region_array, i, &r); ++i)
3336 if (r)
3338 if (r->index == i)
3339 count_r++;
3340 else
3342 error ("region_array is corrupted for region %i", r->index);
3343 err = true;
3347 count_lp = 0;
3348 for (i = 1; vec_safe_iterate (fun->eh->lp_array, i, &lp); ++i)
3349 if (lp)
3351 if (lp->index == i)
3352 count_lp++;
3353 else
3355 error ("lp_array is corrupted for lp %i", lp->index);
3356 err = true;
3360 depth = nvisited_lp = nvisited_r = 0;
3361 outer = NULL;
3362 r = fun->eh->region_tree;
3363 while (1)
3365 if ((*fun->eh->region_array)[r->index] != r)
3367 error ("region_array is corrupted for region %i", r->index);
3368 err = true;
3370 if (r->outer != outer)
3372 error ("outer block of region %i is wrong", r->index);
3373 err = true;
3375 if (depth < 0)
3377 error ("negative nesting depth of region %i", r->index);
3378 err = true;
3380 nvisited_r++;
3382 for (lp = r->landing_pads; lp ; lp = lp->next_lp)
3384 if ((*fun->eh->lp_array)[lp->index] != lp)
3386 error ("lp_array is corrupted for lp %i", lp->index);
3387 err = true;
3389 if (lp->region != r)
3391 error ("region of lp %i is wrong", lp->index);
3392 err = true;
3394 nvisited_lp++;
3397 if (r->inner)
3398 outer = r, r = r->inner, depth++;
3399 else if (r->next_peer)
3400 r = r->next_peer;
3401 else
3405 r = r->outer;
3406 if (r == NULL)
3407 goto region_done;
3408 depth--;
3409 outer = r->outer;
3411 while (r->next_peer == NULL);
3412 r = r->next_peer;
3415 region_done:
3416 if (depth != 0)
3418 error ("tree list ends on depth %i", depth);
3419 err = true;
3421 if (count_r != nvisited_r)
3423 error ("region_array does not match region_tree");
3424 err = true;
3426 if (count_lp != nvisited_lp)
3428 error ("lp_array does not match region_tree");
3429 err = true;
3432 if (err)
3434 dump_eh_tree (stderr, fun);
3435 internal_error ("verify_eh_tree failed");
3439 #include "gt-except.h"