Makefile.in (endfile.o): Add dependency on config.h.
[official-gcc.git] / gcc / except.c
blob3afb1edd4f045875f320ba1d4de74c4257b50839
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
50 #include "config.h"
51 #include "system.h"
52 #include "rtl.h"
53 #include "tree.h"
54 #include "flags.h"
55 #include "function.h"
56 #include "expr.h"
57 #include "insn-config.h"
58 #include "except.h"
59 #include "integrate.h"
60 #include "hard-reg-set.h"
61 #include "basic-block.h"
62 #include "output.h"
63 #include "dwarf2asm.h"
64 #include "dwarf2out.h"
65 #include "dwarf2.h"
66 #include "toplev.h"
67 #include "hashtab.h"
68 #include "intl.h"
69 #include "ggc.h"
70 #include "tm_p.h"
73 /* Provide defaults for stuff that may not be defined when using
74 sjlj exceptions. */
75 #ifndef EH_RETURN_STACKADJ_RTX
76 #define EH_RETURN_STACKADJ_RTX 0
77 #endif
78 #ifndef EH_RETURN_HANDLER_RTX
79 #define EH_RETURN_HANDLER_RTX 0
80 #endif
81 #ifndef EH_RETURN_DATA_REGNO
82 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
83 #endif
86 /* Nonzero means enable synchronous exceptions for non-call instructions. */
87 int flag_non_call_exceptions;
89 /* Protect cleanup actions with must-not-throw regions, with a call
90 to the given failure handler. */
91 tree protect_cleanup_actions;
93 /* Return true if type A catches type B. */
94 int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
96 /* Map a type to a runtime object to match type. */
97 tree (*lang_eh_runtime_type) PARAMS ((tree));
99 /* A list of labels used for exception handlers. */
100 rtx exception_handler_labels;
102 static int call_site_base;
103 static int sjlj_funcdef_number;
104 static htab_t type_to_runtime_map;
106 /* Describe the SjLj_Function_Context structure. */
107 static tree sjlj_fc_type_node;
108 static int sjlj_fc_call_site_ofs;
109 static int sjlj_fc_data_ofs;
110 static int sjlj_fc_personality_ofs;
111 static int sjlj_fc_lsda_ofs;
112 static int sjlj_fc_jbuf_ofs;
114 /* Describes one exception region. */
115 struct eh_region
117 /* The immediately surrounding region. */
118 struct eh_region *outer;
120 /* The list of immediately contained regions. */
121 struct eh_region *inner;
122 struct eh_region *next_peer;
124 /* An identifier for this region. */
125 int region_number;
127 /* Each region does exactly one thing. */
128 enum eh_region_type
130 ERT_CLEANUP = 1,
131 ERT_TRY,
132 ERT_CATCH,
133 ERT_ALLOWED_EXCEPTIONS,
134 ERT_MUST_NOT_THROW,
135 ERT_THROW,
136 ERT_FIXUP
137 } type;
139 /* Holds the action to perform based on the preceeding type. */
140 union {
141 /* A list of catch blocks, a surrounding try block,
142 and the label for continuing after a catch. */
143 struct {
144 struct eh_region *catch;
145 struct eh_region *last_catch;
146 struct eh_region *prev_try;
147 rtx continue_label;
148 } try;
150 /* The list through the catch handlers, the type object
151 matched, and a pointer to the generated code. */
152 struct {
153 struct eh_region *next_catch;
154 struct eh_region *prev_catch;
155 tree type;
156 int filter;
157 } catch;
159 /* A tree_list of allowed types. */
160 struct {
161 tree type_list;
162 int filter;
163 } allowed;
165 /* The type given by a call to "throw foo();", or discovered
166 for a throw. */
167 struct {
168 tree type;
169 } throw;
171 /* Retain the cleanup expression even after expansion so that
172 we can match up fixup regions. */
173 struct {
174 tree exp;
175 } cleanup;
177 /* The real region (by expression and by pointer) that fixup code
178 should live in. */
179 struct {
180 tree cleanup_exp;
181 struct eh_region *real_region;
182 } fixup;
183 } u;
185 /* Entry point for this region's handler before landing pads are built. */
186 rtx label;
188 /* Entry point for this region's handler from the runtime eh library. */
189 rtx landing_pad;
191 /* Entry point for this region's handler from an inner region. */
192 rtx post_landing_pad;
194 /* The RESX insn for handing off control to the next outermost handler,
195 if appropriate. */
196 rtx resume;
199 /* Used to save exception status for each function. */
200 struct eh_status
202 /* The tree of all regions for this function. */
203 struct eh_region *region_tree;
205 /* The same information as an indexable array. */
206 struct eh_region **region_array;
208 /* The most recently open region. */
209 struct eh_region *cur_region;
211 /* This is the region for which we are processing catch blocks. */
212 struct eh_region *try_region;
214 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
215 node is itself a TREE_CHAINed list of handlers for regions that
216 are not yet closed. The TREE_VALUE of each entry contains the
217 handler for the corresponding entry on the ehstack. */
218 tree protect_list;
220 rtx filter;
221 rtx exc_ptr;
223 int built_landing_pads;
224 int last_region_number;
226 varray_type ttype_data;
227 varray_type ehspec_data;
228 varray_type action_record_data;
230 struct call_site_record
232 rtx landing_pad;
233 int action;
234 } *call_site_data;
235 int call_site_data_used;
236 int call_site_data_size;
238 rtx ehr_stackadj;
239 rtx ehr_handler;
240 rtx ehr_label;
242 rtx sjlj_fc;
243 rtx sjlj_exit_after;
247 static void mark_eh_region PARAMS ((struct eh_region *));
249 static int t2r_eq PARAMS ((const PTR,
250 const PTR));
251 static hashval_t t2r_hash PARAMS ((const PTR));
252 static int t2r_mark_1 PARAMS ((PTR *, PTR));
253 static void t2r_mark PARAMS ((PTR));
254 static void add_type_for_runtime PARAMS ((tree));
255 static tree lookup_type_for_runtime PARAMS ((tree));
257 static struct eh_region *expand_eh_region_end PARAMS ((void));
259 static rtx get_exception_filter PARAMS ((void));
261 static void collect_eh_region_array PARAMS ((void));
262 static void resolve_fixup_regions PARAMS ((void));
263 static void remove_fixup_regions PARAMS ((void));
264 static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
266 static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
267 struct inline_remap *));
268 static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
269 struct eh_region **));
270 static int ttypes_filter_eq PARAMS ((const PTR,
271 const PTR));
272 static hashval_t ttypes_filter_hash PARAMS ((const PTR));
273 static int ehspec_filter_eq PARAMS ((const PTR,
274 const PTR));
275 static hashval_t ehspec_filter_hash PARAMS ((const PTR));
276 static int add_ttypes_entry PARAMS ((htab_t, tree));
277 static int add_ehspec_entry PARAMS ((htab_t, htab_t,
278 tree));
279 static void assign_filter_values PARAMS ((void));
280 static void build_post_landing_pads PARAMS ((void));
281 static void connect_post_landing_pads PARAMS ((void));
282 static void dw2_build_landing_pads PARAMS ((void));
284 struct sjlj_lp_info;
285 static bool sjlj_find_directly_reachable_regions
286 PARAMS ((struct sjlj_lp_info *));
287 static void sjlj_assign_call_site_values
288 PARAMS ((rtx, struct sjlj_lp_info *));
289 static void sjlj_mark_call_sites
290 PARAMS ((struct sjlj_lp_info *));
291 static void sjlj_emit_function_enter PARAMS ((rtx));
292 static void sjlj_emit_function_exit PARAMS ((void));
293 static void sjlj_emit_dispatch_table
294 PARAMS ((rtx, struct sjlj_lp_info *));
295 static void sjlj_build_landing_pads PARAMS ((void));
297 static void remove_exception_handler_label PARAMS ((rtx));
298 static void remove_eh_handler PARAMS ((struct eh_region *));
300 struct reachable_info;
302 /* The return value of reachable_next_level. */
303 enum reachable_code
305 /* The given exception is not processed by the given region. */
306 RNL_NOT_CAUGHT,
307 /* The given exception may need processing by the given region. */
308 RNL_MAYBE_CAUGHT,
309 /* The given exception is completely processed by the given region. */
310 RNL_CAUGHT,
311 /* The given exception is completely processed by the runtime. */
312 RNL_BLOCKED
315 static int check_handled PARAMS ((tree, tree));
316 static void add_reachable_handler
317 PARAMS ((struct reachable_info *, struct eh_region *,
318 struct eh_region *));
319 static enum reachable_code reachable_next_level
320 PARAMS ((struct eh_region *, tree, struct reachable_info *));
322 static int action_record_eq PARAMS ((const PTR,
323 const PTR));
324 static hashval_t action_record_hash PARAMS ((const PTR));
325 static int add_action_record PARAMS ((htab_t, int, int));
326 static int collect_one_action_chain PARAMS ((htab_t,
327 struct eh_region *));
328 static int add_call_site PARAMS ((rtx, int));
330 static void push_uleb128 PARAMS ((varray_type *,
331 unsigned int));
332 static void push_sleb128 PARAMS ((varray_type *, int));
333 static const char *eh_data_format_name PARAMS ((int));
334 #ifndef HAVE_AS_LEB128
335 static int dw2_size_of_call_site_table PARAMS ((void));
336 static int sjlj_size_of_call_site_table PARAMS ((void));
337 #endif
338 static void dw2_output_call_site_table PARAMS ((void));
339 static void sjlj_output_call_site_table PARAMS ((void));
342 /* Routine to see if exception handling is turned on.
343 DO_WARN is non-zero if we want to inform the user that exception
344 handling is turned off.
346 This is used to ensure that -fexceptions has been specified if the
347 compiler tries to use any exception-specific functions. */
350 doing_eh (do_warn)
351 int do_warn;
353 if (! flag_exceptions)
355 static int warned = 0;
356 if (! warned && do_warn)
358 error ("exception handling disabled, use -fexceptions to enable");
359 warned = 1;
361 return 0;
363 return 1;
367 void
368 init_eh ()
370 ggc_add_rtx_root (&exception_handler_labels, 1);
371 ggc_add_tree_root (&protect_cleanup_actions, 1);
373 if (! flag_exceptions)
374 return;
376 type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
377 ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
379 /* Create the SjLj_Function_Context structure. This should match
380 the definition in unwind-sjlj.c. */
381 if (USING_SJLJ_EXCEPTIONS)
383 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
385 sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
386 ggc_add_tree_root (&sjlj_fc_type_node, 1);
388 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
389 build_pointer_type (sjlj_fc_type_node));
390 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
392 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
393 integer_type_node);
394 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
396 tmp = build_index_type (build_int_2 (4 - 1, 0));
397 tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
398 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
399 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
401 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
402 ptr_type_node);
403 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
405 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
406 ptr_type_node);
407 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
409 #ifdef DONT_USE_BUILTIN_SETJMP
410 #ifdef JMP_BUF_SIZE
411 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
412 #else
413 /* Should be large enough for most systems, if it is not,
414 JMP_BUF_SIZE should be defined with the proper value. It will
415 also tend to be larger than necessary for most systems, a more
416 optimal port will define JMP_BUF_SIZE. */
417 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
418 #endif
419 #else
420 /* This is 2 for builtin_setjmp, plus whatever the target requires
421 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
422 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
423 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
424 #endif
425 tmp = build_index_type (tmp);
426 tmp = build_array_type (ptr_type_node, tmp);
427 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
428 #ifdef DONT_USE_BUILTIN_SETJMP
429 /* We don't know what the alignment requirements of the
430 runtime's jmp_buf has. Overestimate. */
431 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
432 DECL_USER_ALIGN (f_jbuf) = 1;
433 #endif
434 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
436 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
437 TREE_CHAIN (f_prev) = f_cs;
438 TREE_CHAIN (f_cs) = f_data;
439 TREE_CHAIN (f_data) = f_per;
440 TREE_CHAIN (f_per) = f_lsda;
441 TREE_CHAIN (f_lsda) = f_jbuf;
443 layout_type (sjlj_fc_type_node);
445 /* Cache the interesting field offsets so that we have
446 easy access from rtl. */
447 sjlj_fc_call_site_ofs
448 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
449 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
450 sjlj_fc_data_ofs
451 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
452 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
453 sjlj_fc_personality_ofs
454 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
455 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
456 sjlj_fc_lsda_ofs
457 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
458 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
459 sjlj_fc_jbuf_ofs
460 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
461 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
465 void
466 init_eh_for_function ()
468 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
471 /* Mark EH for GC. */
473 static void
474 mark_eh_region (region)
475 struct eh_region *region;
477 if (! region)
478 return;
480 switch (region->type)
482 case ERT_CLEANUP:
483 ggc_mark_tree (region->u.cleanup.exp);
484 break;
485 case ERT_TRY:
486 ggc_mark_rtx (region->u.try.continue_label);
487 break;
488 case ERT_CATCH:
489 ggc_mark_tree (region->u.catch.type);
490 break;
491 case ERT_ALLOWED_EXCEPTIONS:
492 ggc_mark_tree (region->u.allowed.type_list);
493 break;
494 case ERT_MUST_NOT_THROW:
495 break;
496 case ERT_THROW:
497 ggc_mark_tree (region->u.throw.type);
498 break;
499 case ERT_FIXUP:
500 ggc_mark_tree (region->u.fixup.cleanup_exp);
501 break;
502 default:
503 abort ();
506 ggc_mark_rtx (region->label);
507 ggc_mark_rtx (region->resume);
508 ggc_mark_rtx (region->landing_pad);
509 ggc_mark_rtx (region->post_landing_pad);
512 void
513 mark_eh_status (eh)
514 struct eh_status *eh;
516 int i;
518 if (eh == 0)
519 return;
521 /* If we've called collect_eh_region_array, use it. Otherwise walk
522 the tree non-recursively. */
523 if (eh->region_array)
525 for (i = eh->last_region_number; i > 0; --i)
527 struct eh_region *r = eh->region_array[i];
528 if (r && r->region_number == i)
529 mark_eh_region (r);
532 else if (eh->region_tree)
534 struct eh_region *r = eh->region_tree;
535 while (1)
537 mark_eh_region (r);
538 if (r->inner)
539 r = r->inner;
540 else if (r->next_peer)
541 r = r->next_peer;
542 else
544 do {
545 r = r->outer;
546 if (r == NULL)
547 goto tree_done;
548 } while (r->next_peer == NULL);
549 r = r->next_peer;
552 tree_done:;
555 ggc_mark_tree (eh->protect_list);
556 ggc_mark_rtx (eh->filter);
557 ggc_mark_rtx (eh->exc_ptr);
558 ggc_mark_tree_varray (eh->ttype_data);
560 if (eh->call_site_data)
562 for (i = eh->call_site_data_used - 1; i >= 0; --i)
563 ggc_mark_rtx (eh->call_site_data[i].landing_pad);
566 ggc_mark_rtx (eh->ehr_stackadj);
567 ggc_mark_rtx (eh->ehr_handler);
568 ggc_mark_rtx (eh->ehr_label);
570 ggc_mark_rtx (eh->sjlj_fc);
571 ggc_mark_rtx (eh->sjlj_exit_after);
574 void
575 free_eh_status (f)
576 struct function *f;
578 struct eh_status *eh = f->eh;
580 if (eh->region_array)
582 int i;
583 for (i = eh->last_region_number; i > 0; --i)
585 struct eh_region *r = eh->region_array[i];
586 /* Mind we don't free a region struct more than once. */
587 if (r && r->region_number == i)
588 free (r);
590 free (eh->region_array);
592 else if (eh->region_tree)
594 struct eh_region *next, *r = eh->region_tree;
595 while (1)
597 if (r->inner)
598 r = r->inner;
599 else if (r->next_peer)
601 next = r->next_peer;
602 free (r);
603 r = next;
605 else
607 do {
608 next = r->outer;
609 free (r);
610 r = next;
611 if (r == NULL)
612 goto tree_done;
613 } while (r->next_peer == NULL);
614 next = r->next_peer;
615 free (r);
616 r = next;
619 tree_done:;
622 VARRAY_FREE (eh->ttype_data);
623 VARRAY_FREE (eh->ehspec_data);
624 VARRAY_FREE (eh->action_record_data);
625 if (eh->call_site_data)
626 free (eh->call_site_data);
628 free (eh);
629 f->eh = NULL;
633 /* Start an exception handling region. All instructions emitted
634 after this point are considered to be part of the region until
635 expand_eh_region_end is invoked. */
637 void
638 expand_eh_region_start ()
640 struct eh_region *new_region;
641 struct eh_region *cur_region;
642 rtx note;
644 if (! doing_eh (0))
645 return;
647 /* Insert a new blank region as a leaf in the tree. */
648 new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
649 cur_region = cfun->eh->cur_region;
650 new_region->outer = cur_region;
651 if (cur_region)
653 new_region->next_peer = cur_region->inner;
654 cur_region->inner = new_region;
656 else
658 new_region->next_peer = cfun->eh->region_tree;
659 cfun->eh->region_tree = new_region;
661 cfun->eh->cur_region = new_region;
663 /* Create a note marking the start of this region. */
664 new_region->region_number = ++cfun->eh->last_region_number;
665 note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
666 NOTE_EH_HANDLER (note) = new_region->region_number;
669 /* Common code to end a region. Returns the region just ended. */
671 static struct eh_region *
672 expand_eh_region_end ()
674 struct eh_region *cur_region = cfun->eh->cur_region;
675 rtx note;
677 /* Create a nute marking the end of this region. */
678 note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
679 NOTE_EH_HANDLER (note) = cur_region->region_number;
681 /* Pop. */
682 cfun->eh->cur_region = cur_region->outer;
684 return cur_region;
687 /* End an exception handling region for a cleanup. HANDLER is an
688 expression to expand for the cleanup. */
690 void
691 expand_eh_region_end_cleanup (handler)
692 tree handler;
694 struct eh_region *region;
695 rtx around_label;
696 rtx data_save[2];
698 if (! doing_eh (0))
699 return;
701 region = expand_eh_region_end ();
702 region->type = ERT_CLEANUP;
703 region->label = gen_label_rtx ();
704 region->u.cleanup.exp = handler;
706 around_label = gen_label_rtx ();
707 emit_jump (around_label);
709 emit_label (region->label);
711 if (protect_cleanup_actions)
712 expand_eh_region_start ();
714 /* In case this cleanup involves an inline destructor with a try block in
715 it, we need to save the EH return data registers around it. */
716 data_save[0] = gen_reg_rtx (Pmode);
717 emit_move_insn (data_save[0], get_exception_pointer ());
718 data_save[1] = gen_reg_rtx (word_mode);
719 emit_move_insn (data_save[1], get_exception_filter ());
721 expand_expr (handler, const0_rtx, VOIDmode, 0);
723 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
724 emit_move_insn (cfun->eh->filter, data_save[1]);
726 if (protect_cleanup_actions)
727 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
729 /* We need any stack adjustment complete before the around_label. */
730 do_pending_stack_adjust ();
732 /* We delay the generation of the _Unwind_Resume until we generate
733 landing pads. We emit a marker here so as to get good control
734 flow data in the meantime. */
735 region->resume
736 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
737 emit_barrier ();
739 emit_label (around_label);
742 /* End an exception handling region for a try block, and prepares
743 for subsequent calls to expand_start_catch. */
745 void
746 expand_start_all_catch ()
748 struct eh_region *region;
750 if (! doing_eh (1))
751 return;
753 region = expand_eh_region_end ();
754 region->type = ERT_TRY;
755 region->u.try.prev_try = cfun->eh->try_region;
756 region->u.try.continue_label = gen_label_rtx ();
758 cfun->eh->try_region = region;
760 emit_jump (region->u.try.continue_label);
763 /* Begin a catch clause. TYPE is the type caught, or null if this is
764 a catch-all clause. */
766 void
767 expand_start_catch (type)
768 tree type;
770 struct eh_region *t, *c, *l;
772 if (! doing_eh (0))
773 return;
775 if (type)
776 add_type_for_runtime (type);
777 expand_eh_region_start ();
779 t = cfun->eh->try_region;
780 c = cfun->eh->cur_region;
781 c->type = ERT_CATCH;
782 c->u.catch.type = type;
783 c->label = gen_label_rtx ();
785 l = t->u.try.last_catch;
786 c->u.catch.prev_catch = l;
787 if (l)
788 l->u.catch.next_catch = c;
789 else
790 t->u.try.catch = c;
791 t->u.try.last_catch = c;
793 emit_label (c->label);
796 /* End a catch clause. Control will resume after the try/catch block. */
798 void
799 expand_end_catch ()
801 struct eh_region *try_region, *catch_region;
803 if (! doing_eh (0))
804 return;
806 catch_region = expand_eh_region_end ();
807 try_region = cfun->eh->try_region;
809 emit_jump (try_region->u.try.continue_label);
812 /* End a sequence of catch handlers for a try block. */
814 void
815 expand_end_all_catch ()
817 struct eh_region *try_region;
819 if (! doing_eh (0))
820 return;
822 try_region = cfun->eh->try_region;
823 cfun->eh->try_region = try_region->u.try.prev_try;
825 emit_label (try_region->u.try.continue_label);
828 /* End an exception region for an exception type filter. ALLOWED is a
829 TREE_LIST of types to be matched by the runtime. FAILURE is an
830 expression to invoke if a mismatch ocurrs. */
832 void
833 expand_eh_region_end_allowed (allowed, failure)
834 tree allowed, failure;
836 struct eh_region *region;
837 rtx around_label;
839 if (! doing_eh (0))
840 return;
842 region = expand_eh_region_end ();
843 region->type = ERT_ALLOWED_EXCEPTIONS;
844 region->u.allowed.type_list = allowed;
845 region->label = gen_label_rtx ();
847 for (; allowed ; allowed = TREE_CHAIN (allowed))
848 add_type_for_runtime (TREE_VALUE (allowed));
850 /* We must emit the call to FAILURE here, so that if this function
851 throws a different exception, that it will be processed by the
852 correct region. */
854 around_label = gen_label_rtx ();
855 emit_jump (around_label);
857 emit_label (region->label);
858 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
860 emit_label (around_label);
863 /* End an exception region for a must-not-throw filter. FAILURE is an
864 expression invoke if an uncaught exception propagates this far.
866 This is conceptually identical to expand_eh_region_end_allowed with
867 an empty allowed list (if you passed "std::terminate" instead of
868 "__cxa_call_unexpected"), but they are represented differently in
869 the C++ LSDA. */
871 void
872 expand_eh_region_end_must_not_throw (failure)
873 tree failure;
875 struct eh_region *region;
876 rtx around_label;
878 if (! doing_eh (0))
879 return;
881 region = expand_eh_region_end ();
882 region->type = ERT_MUST_NOT_THROW;
883 region->label = gen_label_rtx ();
885 /* We must emit the call to FAILURE here, so that if this function
886 throws a different exception, that it will be processed by the
887 correct region. */
889 around_label = gen_label_rtx ();
890 emit_jump (around_label);
892 emit_label (region->label);
893 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
895 emit_label (around_label);
898 /* End an exception region for a throw. No handling goes on here,
899 but it's the easiest way for the front-end to indicate what type
900 is being thrown. */
902 void
903 expand_eh_region_end_throw (type)
904 tree type;
906 struct eh_region *region;
908 if (! doing_eh (0))
909 return;
911 region = expand_eh_region_end ();
912 region->type = ERT_THROW;
913 region->u.throw.type = type;
916 /* End a fixup region. Within this region the cleanups for the immediately
917 enclosing region are _not_ run. This is used for goto cleanup to avoid
918 destroying an object twice.
920 This would be an extraordinarily simple prospect, were it not for the
921 fact that we don't actually know what the immediately enclosing region
922 is. This surprising fact is because expand_cleanups is currently
923 generating a sequence that it will insert somewhere else. We collect
924 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
926 void
927 expand_eh_region_end_fixup (handler)
928 tree handler;
930 struct eh_region *fixup;
932 if (! doing_eh (0))
933 return;
935 fixup = expand_eh_region_end ();
936 fixup->type = ERT_FIXUP;
937 fixup->u.fixup.cleanup_exp = handler;
940 /* Return an rtl expression for a pointer to the exception object
941 within a handler. */
944 get_exception_pointer ()
946 rtx exc_ptr = cfun->eh->exc_ptr;
947 if (! exc_ptr)
949 exc_ptr = gen_reg_rtx (Pmode);
950 cfun->eh->exc_ptr = exc_ptr;
952 return exc_ptr;
955 /* Return an rtl expression for the exception dispatch filter
956 within a handler. */
958 static rtx
959 get_exception_filter ()
961 rtx filter = cfun->eh->filter;
962 if (! filter)
964 filter = gen_reg_rtx (word_mode);
965 cfun->eh->filter = filter;
967 return filter;
970 /* Begin a region that will contain entries created with
971 add_partial_entry. */
973 void
974 begin_protect_partials ()
976 /* Push room for a new list. */
977 cfun->eh->protect_list
978 = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
981 /* Start a new exception region for a region of code that has a
982 cleanup action and push the HANDLER for the region onto
983 protect_list. All of the regions created with add_partial_entry
984 will be ended when end_protect_partials is invoked. */
986 void
987 add_partial_entry (handler)
988 tree handler;
990 expand_eh_region_start ();
992 /* ??? This comment was old before the most recent rewrite. We
993 really ought to fix the callers at some point. */
994 /* For backwards compatibility, we allow callers to omit calls to
995 begin_protect_partials for the outermost region. So, we must
996 explicitly do so here. */
997 if (!cfun->eh->protect_list)
998 begin_protect_partials ();
1000 /* Add this entry to the front of the list. */
1001 TREE_VALUE (cfun->eh->protect_list)
1002 = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
1005 /* End all the pending exception regions on protect_list. */
1007 void
1008 end_protect_partials ()
1010 tree t;
1012 /* ??? This comment was old before the most recent rewrite. We
1013 really ought to fix the callers at some point. */
1014 /* For backwards compatibility, we allow callers to omit the call to
1015 begin_protect_partials for the outermost region. So,
1016 PROTECT_LIST may be NULL. */
1017 if (!cfun->eh->protect_list)
1018 return;
1020 /* Pop the topmost entry. */
1021 t = TREE_VALUE (cfun->eh->protect_list);
1022 cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
1024 /* End all the exception regions. */
1025 for (; t; t = TREE_CHAIN (t))
1026 expand_eh_region_end_cleanup (TREE_VALUE (t));
1030 /* This section is for the exception handling specific optimization pass. */
1032 /* Random access the exception region tree. It's just as simple to
1033 collect the regions this way as in expand_eh_region_start, but
1034 without having to realloc memory. */
1036 static void
1037 collect_eh_region_array ()
1039 struct eh_region **array, *i;
1041 i = cfun->eh->region_tree;
1042 if (! i)
1043 return;
1045 array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1046 cfun->eh->region_array = array;
1048 while (1)
1050 array[i->region_number] = i;
1052 /* If there are sub-regions, process them. */
1053 if (i->inner)
1054 i = i->inner;
1055 /* If there are peers, process them. */
1056 else if (i->next_peer)
1057 i = i->next_peer;
1058 /* Otherwise, step back up the tree to the next peer. */
1059 else
1061 do {
1062 i = i->outer;
1063 if (i == NULL)
1064 return;
1065 } while (i->next_peer == NULL);
1066 i = i->next_peer;
1071 static void
1072 resolve_fixup_regions ()
1074 int i, j, n = cfun->eh->last_region_number;
1076 for (i = 1; i <= n; ++i)
1078 struct eh_region *fixup = cfun->eh->region_array[i];
1079 struct eh_region *cleanup;
1081 if (! fixup || fixup->type != ERT_FIXUP)
1082 continue;
1084 for (j = 1; j <= n; ++j)
1086 cleanup = cfun->eh->region_array[j];
1087 if (cleanup->type == ERT_CLEANUP
1088 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1089 break;
1091 if (j > n)
1092 abort ();
1094 fixup->u.fixup.real_region = cleanup->outer;
1098 /* Now that we've discovered what region actually encloses a fixup,
1099 we can shuffle pointers and remove them from the tree. */
1101 static void
1102 remove_fixup_regions ()
1104 int i;
1105 rtx insn, note;
1106 struct eh_region *fixup;
1108 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1109 for instructions referencing fixup regions. This is only
1110 strictly necessary for fixup regions with no parent, but
1111 doesn't hurt to do it for all regions. */
1112 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
1113 if (INSN_P (insn)
1114 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
1115 && INTVAL (XEXP (note, 0)) > 0
1116 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
1117 && fixup->type == ERT_FIXUP)
1119 if (fixup->u.fixup.real_region)
1120 XEXP (note, 1) = GEN_INT (fixup->u.fixup.real_region->region_number);
1121 else
1122 remove_note (insn, note);
1125 /* Remove the fixup regions from the tree. */
1126 for (i = cfun->eh->last_region_number; i > 0; --i)
1128 fixup = cfun->eh->region_array[i];
1129 if (! fixup)
1130 continue;
1132 /* Allow GC to maybe free some memory. */
1133 if (fixup->type == ERT_CLEANUP)
1134 fixup->u.cleanup.exp = NULL_TREE;
1136 if (fixup->type != ERT_FIXUP)
1137 continue;
1139 if (fixup->inner)
1141 struct eh_region *parent, *p, **pp;
1143 parent = fixup->u.fixup.real_region;
1145 /* Fix up the children's parent pointers; find the end of
1146 the list. */
1147 for (p = fixup->inner; ; p = p->next_peer)
1149 p->outer = parent;
1150 if (! p->next_peer)
1151 break;
1154 /* In the tree of cleanups, only outer-inner ordering matters.
1155 So link the children back in anywhere at the correct level. */
1156 if (parent)
1157 pp = &parent->inner;
1158 else
1159 pp = &cfun->eh->region_tree;
1160 p->next_peer = *pp;
1161 *pp = fixup->inner;
1162 fixup->inner = NULL;
1165 remove_eh_handler (fixup);
1169 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1170 can_throw instruction in the region. */
1172 static void
1173 convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1174 rtx *pinsns;
1175 int *orig_sp;
1176 int cur;
1178 int *sp = orig_sp;
1179 rtx insn, next;
1181 for (insn = *pinsns; insn ; insn = next)
1183 next = NEXT_INSN (insn);
1184 if (GET_CODE (insn) == NOTE)
1186 int kind = NOTE_LINE_NUMBER (insn);
1187 if (kind == NOTE_INSN_EH_REGION_BEG
1188 || kind == NOTE_INSN_EH_REGION_END)
1190 if (kind == NOTE_INSN_EH_REGION_BEG)
1192 struct eh_region *r;
1194 *sp++ = cur;
1195 cur = NOTE_EH_HANDLER (insn);
1197 r = cfun->eh->region_array[cur];
1198 if (r->type == ERT_FIXUP)
1200 r = r->u.fixup.real_region;
1201 cur = r ? r->region_number : 0;
1203 else if (r->type == ERT_CATCH)
1205 r = r->outer;
1206 cur = r ? r->region_number : 0;
1209 else
1210 cur = *--sp;
1212 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1213 requires extra care to adjust sequence start. */
1214 if (insn == *pinsns)
1215 *pinsns = next;
1216 remove_insn (insn);
1217 continue;
1220 else if (INSN_P (insn))
1222 if (cur > 0
1223 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1224 /* Calls can always potentially throw exceptions, unless
1225 they have a REG_EH_REGION note with a value of 0 or less.
1226 Which should be the only possible kind so far. */
1227 && (GET_CODE (insn) == CALL_INSN
1228 /* If we wanted exceptions for non-call insns, then
1229 any may_trap_p instruction could throw. */
1230 || (flag_non_call_exceptions
1231 && may_trap_p (PATTERN (insn)))))
1233 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1234 REG_NOTES (insn));
1237 if (GET_CODE (insn) == CALL_INSN
1238 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1240 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1241 sp, cur);
1242 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1243 sp, cur);
1244 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1245 sp, cur);
1250 if (sp != orig_sp)
1251 abort ();
1254 void
1255 convert_from_eh_region_ranges ()
1257 int *stack;
1258 rtx insns;
1260 collect_eh_region_array ();
1261 resolve_fixup_regions ();
1263 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1264 insns = get_insns ();
1265 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1266 free (stack);
1268 remove_fixup_regions ();
1271 void
1272 find_exception_handler_labels ()
1274 rtx list = NULL_RTX;
1275 int i;
1277 free_EXPR_LIST_list (&exception_handler_labels);
1279 if (cfun->eh->region_tree == NULL)
1280 return;
1282 for (i = cfun->eh->last_region_number; i > 0; --i)
1284 struct eh_region *region = cfun->eh->region_array[i];
1285 rtx lab;
1287 if (! region)
1288 continue;
1289 if (cfun->eh->built_landing_pads)
1290 lab = region->landing_pad;
1291 else
1292 lab = region->label;
1294 if (lab)
1295 list = alloc_EXPR_LIST (0, lab, list);
1298 /* For sjlj exceptions, need the return label to remain live until
1299 after landing pad generation. */
1300 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1301 list = alloc_EXPR_LIST (0, return_label, list);
1303 exception_handler_labels = list;
1307 static struct eh_region *
1308 duplicate_eh_region_1 (o, map)
1309 struct eh_region *o;
1310 struct inline_remap *map;
1312 struct eh_region *n
1313 = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
1315 n->region_number = o->region_number + cfun->eh->last_region_number;
1316 n->type = o->type;
1318 switch (n->type)
1320 case ERT_CLEANUP:
1321 case ERT_MUST_NOT_THROW:
1322 break;
1324 case ERT_TRY:
1325 if (o->u.try.continue_label)
1326 n->u.try.continue_label
1327 = get_label_from_map (map,
1328 CODE_LABEL_NUMBER (o->u.try.continue_label));
1329 break;
1331 case ERT_CATCH:
1332 n->u.catch.type = o->u.catch.type;
1333 break;
1335 case ERT_ALLOWED_EXCEPTIONS:
1336 n->u.allowed.type_list = o->u.allowed.type_list;
1337 break;
1339 case ERT_THROW:
1340 n->u.throw.type = o->u.throw.type;
1342 default:
1343 abort ();
1346 if (o->label)
1347 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1348 if (o->resume)
1350 n->resume = map->insn_map[INSN_UID (o->resume)];
1351 if (n->resume == NULL)
1352 abort ();
1355 return n;
1358 static void
1359 duplicate_eh_region_2 (o, n_array)
1360 struct eh_region *o;
1361 struct eh_region **n_array;
1363 struct eh_region *n = n_array[o->region_number];
1365 switch (n->type)
1367 case ERT_TRY:
1368 n->u.try.catch = n_array[o->u.try.catch->region_number];
1369 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1370 break;
1372 case ERT_CATCH:
1373 if (o->u.catch.next_catch)
1374 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1375 if (o->u.catch.prev_catch)
1376 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1377 break;
1379 default:
1380 break;
1383 if (o->outer)
1384 n->outer = n_array[o->outer->region_number];
1385 if (o->inner)
1386 n->inner = n_array[o->inner->region_number];
1387 if (o->next_peer)
1388 n->next_peer = n_array[o->next_peer->region_number];
1392 duplicate_eh_regions (ifun, map)
1393 struct function *ifun;
1394 struct inline_remap *map;
1396 int ifun_last_region_number = ifun->eh->last_region_number;
1397 struct eh_region **n_array, *root, *cur;
1398 int i;
1400 if (ifun_last_region_number == 0)
1401 return 0;
1403 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1405 for (i = 1; i <= ifun_last_region_number; ++i)
1407 cur = ifun->eh->region_array[i];
1408 if (!cur || cur->region_number != i)
1409 continue;
1410 n_array[i] = duplicate_eh_region_1 (cur, map);
1412 for (i = 1; i <= ifun_last_region_number; ++i)
1414 cur = ifun->eh->region_array[i];
1415 if (!cur || cur->region_number != i)
1416 continue;
1417 duplicate_eh_region_2 (cur, n_array);
1420 root = n_array[ifun->eh->region_tree->region_number];
1421 cur = cfun->eh->cur_region;
1422 if (cur)
1424 struct eh_region *p = cur->inner;
1425 if (p)
1427 while (p->next_peer)
1428 p = p->next_peer;
1429 p->next_peer = root;
1431 else
1432 cur->inner = root;
1434 for (i = 1; i <= ifun_last_region_number; ++i)
1435 if (n_array[i]->outer == NULL)
1436 n_array[i]->outer = cur;
1438 else
1440 struct eh_region *p = cfun->eh->region_tree;
1441 if (p)
1443 while (p->next_peer)
1444 p = p->next_peer;
1445 p->next_peer = root;
1447 else
1448 cfun->eh->region_tree = root;
1451 free (n_array);
1453 i = cfun->eh->last_region_number;
1454 cfun->eh->last_region_number = i + ifun_last_region_number;
1455 return i;
1459 /* ??? Move from tree.c to tree.h. */
1460 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
1462 static int
1463 t2r_eq (pentry, pdata)
1464 const PTR pentry;
1465 const PTR pdata;
1467 tree entry = (tree) pentry;
1468 tree data = (tree) pdata;
1470 return TREE_PURPOSE (entry) == data;
1473 static hashval_t
1474 t2r_hash (pentry)
1475 const PTR pentry;
1477 tree entry = (tree) pentry;
1478 return TYPE_HASH (TREE_PURPOSE (entry));
1481 static int
1482 t2r_mark_1 (slot, data)
1483 PTR *slot;
1484 PTR data ATTRIBUTE_UNUSED;
1486 tree contents = (tree) *slot;
1487 ggc_mark_tree (contents);
1488 return 1;
1491 static void
1492 t2r_mark (addr)
1493 PTR addr;
1495 htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1498 static void
1499 add_type_for_runtime (type)
1500 tree type;
1502 tree *slot;
1504 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1505 TYPE_HASH (type), INSERT);
1506 if (*slot == NULL)
1508 tree runtime = (*lang_eh_runtime_type) (type);
1509 *slot = tree_cons (type, runtime, NULL_TREE);
1513 static tree
1514 lookup_type_for_runtime (type)
1515 tree type;
1517 tree *slot;
1519 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1520 TYPE_HASH (type), NO_INSERT);
1522 /* We should have always inserrted the data earlier. */
1523 return TREE_VALUE (*slot);
1527 /* Represent an entry in @TTypes for either catch actions
1528 or exception filter actions. */
1529 struct ttypes_filter
1531 tree t;
1532 int filter;
1535 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1536 (a tree) for a @TTypes type node we are thinking about adding. */
1538 static int
1539 ttypes_filter_eq (pentry, pdata)
1540 const PTR pentry;
1541 const PTR pdata;
1543 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1544 tree data = (tree) pdata;
1546 return entry->t == data;
1549 static hashval_t
1550 ttypes_filter_hash (pentry)
1551 const PTR pentry;
1553 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1554 return TYPE_HASH (entry->t);
1557 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1558 exception specification list we are thinking about adding. */
1559 /* ??? Currently we use the type lists in the order given. Someone
1560 should put these in some canonical order. */
1562 static int
1563 ehspec_filter_eq (pentry, pdata)
1564 const PTR pentry;
1565 const PTR pdata;
1567 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1568 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1570 return type_list_equal (entry->t, data->t);
1573 /* Hash function for exception specification lists. */
1575 static hashval_t
1576 ehspec_filter_hash (pentry)
1577 const PTR pentry;
1579 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1580 hashval_t h = 0;
1581 tree list;
1583 for (list = entry->t; list ; list = TREE_CHAIN (list))
1584 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1585 return h;
1588 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1589 up the search. Return the filter value to be used. */
1591 static int
1592 add_ttypes_entry (ttypes_hash, type)
1593 htab_t ttypes_hash;
1594 tree type;
1596 struct ttypes_filter **slot, *n;
1598 slot = (struct ttypes_filter **)
1599 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1601 if ((n = *slot) == NULL)
1603 /* Filter value is a 1 based table index. */
1605 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1606 n->t = type;
1607 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1608 *slot = n;
1610 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1613 return n->filter;
1616 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1617 to speed up the search. Return the filter value to be used. */
1619 static int
1620 add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1621 htab_t ehspec_hash;
1622 htab_t ttypes_hash;
1623 tree list;
1625 struct ttypes_filter **slot, *n;
1626 struct ttypes_filter dummy;
1628 dummy.t = list;
1629 slot = (struct ttypes_filter **)
1630 htab_find_slot (ehspec_hash, &dummy, INSERT);
1632 if ((n = *slot) == NULL)
1634 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1636 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1637 n->t = list;
1638 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1639 *slot = n;
1641 /* Look up each type in the list and encode its filter
1642 value as a uleb128. Terminate the list with 0. */
1643 for (; list ; list = TREE_CHAIN (list))
1644 push_uleb128 (&cfun->eh->ehspec_data,
1645 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1646 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1649 return n->filter;
1652 /* Generate the action filter values to be used for CATCH and
1653 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1654 we use lots of landing pads, and so every type or list can share
1655 the same filter value, which saves table space. */
1657 static void
1658 assign_filter_values ()
1660 int i;
1661 htab_t ttypes, ehspec;
1663 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1664 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1666 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1667 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1669 for (i = cfun->eh->last_region_number; i > 0; --i)
1671 struct eh_region *r = cfun->eh->region_array[i];
1673 /* Mind we don't process a region more than once. */
1674 if (!r || r->region_number != i)
1675 continue;
1677 switch (r->type)
1679 case ERT_CATCH:
1680 r->u.catch.filter = add_ttypes_entry (ttypes, r->u.catch.type);
1681 break;
1683 case ERT_ALLOWED_EXCEPTIONS:
1684 r->u.allowed.filter
1685 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1686 break;
1688 default:
1689 break;
1693 htab_delete (ttypes);
1694 htab_delete (ehspec);
1697 static void
1698 build_post_landing_pads ()
1700 int i;
1702 for (i = cfun->eh->last_region_number; i > 0; --i)
1704 struct eh_region *region = cfun->eh->region_array[i];
1705 rtx seq;
1707 /* Mind we don't process a region more than once. */
1708 if (!region || region->region_number != i)
1709 continue;
1711 switch (region->type)
1713 case ERT_TRY:
1714 /* ??? Collect the set of all non-overlapping catch handlers
1715 all the way up the chain until blocked by a cleanup. */
1716 /* ??? Outer try regions can share landing pads with inner
1717 try regions if the types are completely non-overlapping,
1718 and there are no interveaning cleanups. */
1720 region->post_landing_pad = gen_label_rtx ();
1722 start_sequence ();
1724 emit_label (region->post_landing_pad);
1726 /* ??? It is mighty inconvenient to call back into the
1727 switch statement generation code in expand_end_case.
1728 Rapid prototyping sez a sequence of ifs. */
1730 struct eh_region *c;
1731 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1733 /* ??? _Unwind_ForcedUnwind wants no match here. */
1734 if (c->u.catch.type == NULL)
1735 emit_jump (c->label);
1736 else
1737 emit_cmp_and_jump_insns (cfun->eh->filter,
1738 GEN_INT (c->u.catch.filter),
1739 EQ, NULL_RTX, word_mode,
1740 0, 0, c->label);
1744 /* We delay the generation of the _Unwind_Resume until we generate
1745 landing pads. We emit a marker here so as to get good control
1746 flow data in the meantime. */
1747 region->resume
1748 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1749 emit_barrier ();
1751 seq = get_insns ();
1752 end_sequence ();
1754 emit_insns_before (seq, region->u.try.catch->label);
1755 break;
1757 case ERT_ALLOWED_EXCEPTIONS:
1758 region->post_landing_pad = gen_label_rtx ();
1760 start_sequence ();
1762 emit_label (region->post_landing_pad);
1764 emit_cmp_and_jump_insns (cfun->eh->filter,
1765 GEN_INT (region->u.allowed.filter),
1766 EQ, NULL_RTX, word_mode, 0, 0,
1767 region->label);
1769 /* We delay the generation of the _Unwind_Resume until we generate
1770 landing pads. We emit a marker here so as to get good control
1771 flow data in the meantime. */
1772 region->resume
1773 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1774 emit_barrier ();
1776 seq = get_insns ();
1777 end_sequence ();
1779 emit_insns_before (seq, region->label);
1780 break;
1782 case ERT_CLEANUP:
1783 case ERT_MUST_NOT_THROW:
1784 region->post_landing_pad = region->label;
1785 break;
1787 case ERT_CATCH:
1788 case ERT_THROW:
1789 /* Nothing to do. */
1790 break;
1792 default:
1793 abort ();
1798 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1799 _Unwind_Resume otherwise. */
1801 static void
1802 connect_post_landing_pads ()
1804 int i;
1806 for (i = cfun->eh->last_region_number; i > 0; --i)
1808 struct eh_region *region = cfun->eh->region_array[i];
1809 struct eh_region *outer;
1810 rtx seq;
1812 /* Mind we don't process a region more than once. */
1813 if (!region || region->region_number != i)
1814 continue;
1816 /* If there is no RESX, or it has been deleted by flow, there's
1817 nothing to fix up. */
1818 if (! region->resume || INSN_DELETED_P (region->resume))
1819 continue;
1821 /* Search for another landing pad in this function. */
1822 for (outer = region->outer; outer ; outer = outer->outer)
1823 if (outer->post_landing_pad)
1824 break;
1826 start_sequence ();
1828 if (outer)
1829 emit_jump (outer->post_landing_pad);
1830 else
1831 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1832 VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
1834 seq = get_insns ();
1835 end_sequence ();
1836 emit_insns_before (seq, region->resume);
1838 /* Leave the RESX to be deleted by flow. */
1843 static void
1844 dw2_build_landing_pads ()
1846 int i, j;
1848 for (i = cfun->eh->last_region_number; i > 0; --i)
1850 struct eh_region *region = cfun->eh->region_array[i];
1851 rtx seq;
1853 /* Mind we don't process a region more than once. */
1854 if (!region || region->region_number != i)
1855 continue;
1857 if (region->type != ERT_CLEANUP
1858 && region->type != ERT_TRY
1859 && region->type != ERT_ALLOWED_EXCEPTIONS)
1860 continue;
1862 start_sequence ();
1864 region->landing_pad = gen_label_rtx ();
1865 emit_label (region->landing_pad);
1867 #ifdef HAVE_exception_receiver
1868 if (HAVE_exception_receiver)
1869 emit_insn (gen_exception_receiver ());
1870 else
1871 #endif
1872 #ifdef HAVE_nonlocal_goto_receiver
1873 if (HAVE_nonlocal_goto_receiver)
1874 emit_insn (gen_nonlocal_goto_receiver ());
1875 else
1876 #endif
1877 { /* Nothing */ }
1879 /* If the eh_return data registers are call-saved, then we
1880 won't have considered them clobbered from the call that
1881 threw. Kill them now. */
1882 for (j = 0; ; ++j)
1884 unsigned r = EH_RETURN_DATA_REGNO (j);
1885 if (r == INVALID_REGNUM)
1886 break;
1887 if (! call_used_regs[r])
1888 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1891 emit_move_insn (cfun->eh->exc_ptr,
1892 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
1893 emit_move_insn (cfun->eh->filter,
1894 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (1)));
1896 seq = get_insns ();
1897 end_sequence ();
1899 emit_insns_before (seq, region->post_landing_pad);
1904 struct sjlj_lp_info
1906 int directly_reachable;
1907 int action_index;
1908 int dispatch_index;
1909 int call_site_index;
1912 static bool
1913 sjlj_find_directly_reachable_regions (lp_info)
1914 struct sjlj_lp_info *lp_info;
1916 rtx insn;
1917 bool found_one = false;
1919 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1921 struct eh_region *region;
1922 tree type_thrown;
1923 rtx note;
1925 if (! INSN_P (insn))
1926 continue;
1928 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1929 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1930 continue;
1932 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1934 type_thrown = NULL_TREE;
1935 if (region->type == ERT_THROW)
1937 type_thrown = region->u.throw.type;
1938 region = region->outer;
1941 /* Find the first containing region that might handle the exception.
1942 That's the landing pad to which we will transfer control. */
1943 for (; region; region = region->outer)
1944 if (reachable_next_level (region, type_thrown, 0) != RNL_NOT_CAUGHT)
1945 break;
1947 if (region)
1949 lp_info[region->region_number].directly_reachable = 1;
1950 found_one = true;
1954 return found_one;
1957 static void
1958 sjlj_assign_call_site_values (dispatch_label, lp_info)
1959 rtx dispatch_label;
1960 struct sjlj_lp_info *lp_info;
1962 htab_t ar_hash;
1963 int i, index;
1965 /* First task: build the action table. */
1967 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1968 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1970 for (i = cfun->eh->last_region_number; i > 0; --i)
1971 if (lp_info[i].directly_reachable)
1973 struct eh_region *r = cfun->eh->region_array[i];
1974 r->landing_pad = dispatch_label;
1975 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1976 if (lp_info[i].action_index != -1)
1977 cfun->uses_eh_lsda = 1;
1980 htab_delete (ar_hash);
1982 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1983 landing pad label for the region. For sjlj though, there is one
1984 common landing pad from which we dispatch to the post-landing pads.
1986 A region receives a dispatch index if it is directly reachable
1987 and requires in-function processing. Regions that share post-landing
1988 pads may share dispatch indicies. */
1989 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1990 (see build_post_landing_pads) so we don't bother checking for it. */
1992 index = 0;
1993 for (i = cfun->eh->last_region_number; i > 0; --i)
1994 if (lp_info[i].directly_reachable
1995 && lp_info[i].action_index >= 0)
1996 lp_info[i].dispatch_index = index++;
1998 /* Finally: assign call-site values. If dwarf2 terms, this would be
1999 the region number assigned by convert_to_eh_region_ranges, but
2000 handles no-action and must-not-throw differently. */
2002 call_site_base = 1;
2003 for (i = cfun->eh->last_region_number; i > 0; --i)
2004 if (lp_info[i].directly_reachable)
2006 int action = lp_info[i].action_index;
2008 /* Map must-not-throw to otherwise unused call-site index 0. */
2009 if (action == -2)
2010 index = 0;
2011 /* Map no-action to otherwise unused call-site index -1. */
2012 else if (action == -1)
2013 index = -1;
2014 /* Otherwise, look it up in the table. */
2015 else
2016 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2018 lp_info[i].call_site_index = index;
2022 static void
2023 sjlj_mark_call_sites (lp_info)
2024 struct sjlj_lp_info *lp_info;
2026 int last_call_site = -2;
2027 rtx insn, mem;
2029 mem = change_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2030 plus_constant (XEXP (cfun->eh->sjlj_fc, 0),
2031 sjlj_fc_call_site_ofs));
2033 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2035 struct eh_region *region;
2036 int this_call_site;
2037 rtx note, before, p;
2039 /* Reset value tracking at extended basic block boundaries. */
2040 if (GET_CODE (insn) == CODE_LABEL)
2041 last_call_site = -2;
2043 if (! INSN_P (insn))
2044 continue;
2046 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2047 if (!note)
2049 /* Calls (and trapping insns) without notes are outside any
2050 exception handling region in this function. Mark them as
2051 no action. */
2052 if (GET_CODE (insn) == CALL_INSN
2053 || (flag_non_call_exceptions
2054 && may_trap_p (PATTERN (insn))))
2055 this_call_site = -1;
2056 else
2057 continue;
2059 else
2061 /* Calls that are known to not throw need not be marked. */
2062 if (INTVAL (XEXP (note, 0)) <= 0)
2063 continue;
2065 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2066 this_call_site = lp_info[region->region_number].call_site_index;
2069 if (this_call_site == last_call_site)
2070 continue;
2072 /* Don't separate a call from it's argument loads. */
2073 before = insn;
2074 if (GET_CODE (insn) == CALL_INSN)
2076 HARD_REG_SET parm_regs;
2077 int nparm_regs;
2079 /* Since different machines initialize their parameter registers
2080 in different orders, assume nothing. Collect the set of all
2081 parameter registers. */
2082 CLEAR_HARD_REG_SET (parm_regs);
2083 nparm_regs = 0;
2084 for (p = CALL_INSN_FUNCTION_USAGE (insn); p ; p = XEXP (p, 1))
2085 if (GET_CODE (XEXP (p, 0)) == USE
2086 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
2088 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
2089 abort ();
2091 /* We only care about registers which can hold function
2092 arguments. */
2093 if (! FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
2094 continue;
2096 SET_HARD_REG_BIT (parm_regs, REGNO (XEXP (XEXP (p, 0), 0)));
2097 nparm_regs++;
2100 /* Search backward for the first set of a register in this set. */
2101 while (nparm_regs)
2103 before = PREV_INSN (before);
2105 /* Given that we've done no other optimizations yet,
2106 the arguments should be immediately available. */
2107 if (GET_CODE (before) == CODE_LABEL)
2108 abort ();
2110 p = single_set (before);
2111 if (p && GET_CODE (SET_DEST (p)) == REG
2112 && REGNO (SET_DEST (p)) < FIRST_PSEUDO_REGISTER
2113 && TEST_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p))))
2115 CLEAR_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p)));
2116 nparm_regs--;
2121 start_sequence ();
2122 emit_move_insn (mem, GEN_INT (this_call_site));
2123 p = get_insns ();
2124 end_sequence ();
2126 emit_insns_before (p, before);
2127 last_call_site = this_call_site;
2131 /* Construct the SjLj_Function_Context. */
2133 static void
2134 sjlj_emit_function_enter (dispatch_label)
2135 rtx dispatch_label;
2137 rtx fn_begin, fc, mem, seq;
2139 fc = cfun->eh->sjlj_fc;
2141 start_sequence ();
2143 mem = change_address (fc, Pmode,
2144 plus_constant (XEXP (fc, 0), sjlj_fc_personality_ofs));
2145 emit_move_insn (mem, eh_personality_libfunc);
2147 mem = change_address (fc, Pmode,
2148 plus_constant (XEXP (fc, 0), sjlj_fc_lsda_ofs));
2149 if (cfun->uses_eh_lsda)
2151 char buf[20];
2152 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2153 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
2155 else
2156 emit_move_insn (mem, const0_rtx);
2158 #ifdef DONT_USE_BUILTIN_SETJMP
2160 rtx x, note;
2161 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_NORMAL,
2162 TYPE_MODE (integer_type_node), 1,
2163 plus_constant (XEXP (fc, 0),
2164 sjlj_fc_jbuf_ofs), Pmode);
2166 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2167 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2169 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2170 TYPE_MODE (integer_type_node), 0, 0,
2171 dispatch_label);
2173 #else
2174 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2175 dispatch_label);
2176 #endif
2178 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2179 1, XEXP (fc, 0), Pmode);
2181 seq = get_insns ();
2182 end_sequence ();
2184 /* ??? Instead of doing this at the beginning of the function,
2185 do this in a block that is at loop level 0 and dominates all
2186 can_throw_internal instructions. */
2188 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2189 if (GET_CODE (fn_begin) == NOTE
2190 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2191 break;
2192 emit_insns_after (seq, fn_begin);
2195 /* Call back from expand_function_end to know where we should put
2196 the call to unwind_sjlj_unregister_libfunc if needed. */
2198 void
2199 sjlj_emit_function_exit_after (after)
2200 rtx after;
2202 cfun->eh->sjlj_exit_after = after;
2205 static void
2206 sjlj_emit_function_exit ()
2208 rtx seq;
2210 start_sequence ();
2212 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2213 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2215 seq = get_insns ();
2216 end_sequence ();
2218 /* ??? Really this can be done in any block at loop level 0 that
2219 post-dominates all can_throw_internal instructions. This is
2220 the last possible moment. */
2222 emit_insns_after (seq, cfun->eh->sjlj_exit_after);
2225 static void
2226 sjlj_emit_dispatch_table (dispatch_label, lp_info)
2227 rtx dispatch_label;
2228 struct sjlj_lp_info *lp_info;
2230 int i, first_reachable;
2231 rtx mem, dispatch, seq, fc;
2233 fc = cfun->eh->sjlj_fc;
2235 start_sequence ();
2237 emit_label (dispatch_label);
2239 #ifndef DONT_USE_BUILTIN_SETJMP
2240 expand_builtin_setjmp_receiver (dispatch_label);
2241 #endif
2243 /* Load up dispatch index, exc_ptr and filter values from the
2244 function context. */
2245 mem = change_address (fc, TYPE_MODE (integer_type_node),
2246 plus_constant (XEXP (fc, 0), sjlj_fc_call_site_ofs));
2247 dispatch = copy_to_reg (mem);
2249 mem = change_address (fc, word_mode,
2250 plus_constant (XEXP (fc, 0), sjlj_fc_data_ofs));
2251 if (word_mode != Pmode)
2253 #ifdef POINTERS_EXTEND_UNSIGNED
2254 mem = convert_memory_address (Pmode, mem);
2255 #else
2256 mem = convert_to_mode (Pmode, mem, 0);
2257 #endif
2259 emit_move_insn (cfun->eh->exc_ptr, mem);
2261 mem = change_address (fc, word_mode,
2262 plus_constant (XEXP (fc, 0),
2263 sjlj_fc_data_ofs + UNITS_PER_WORD));
2264 emit_move_insn (cfun->eh->filter, mem);
2266 /* Jump to one of the directly reachable regions. */
2267 /* ??? This really ought to be using a switch statement. */
2269 first_reachable = 0;
2270 for (i = cfun->eh->last_region_number; i > 0; --i)
2272 if (! lp_info[i].directly_reachable
2273 || lp_info[i].action_index < 0)
2274 continue;
2276 if (! first_reachable)
2278 first_reachable = i;
2279 continue;
2282 emit_cmp_and_jump_insns (dispatch,
2283 GEN_INT (lp_info[i].dispatch_index), EQ,
2284 NULL_RTX, TYPE_MODE (integer_type_node), 0, 0,
2285 cfun->eh->region_array[i]->post_landing_pad);
2288 seq = get_insns ();
2289 end_sequence ();
2291 emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2292 ->post_landing_pad));
2295 static void
2296 sjlj_build_landing_pads ()
2298 struct sjlj_lp_info *lp_info;
2300 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2301 sizeof (struct sjlj_lp_info));
2303 if (sjlj_find_directly_reachable_regions (lp_info))
2305 rtx dispatch_label = gen_label_rtx ();
2307 cfun->eh->sjlj_fc
2308 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2309 int_size_in_bytes (sjlj_fc_type_node),
2310 TYPE_ALIGN (sjlj_fc_type_node));
2312 sjlj_assign_call_site_values (dispatch_label, lp_info);
2313 sjlj_mark_call_sites (lp_info);
2315 sjlj_emit_function_enter (dispatch_label);
2316 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2317 sjlj_emit_function_exit ();
2320 free (lp_info);
2323 void
2324 finish_eh_generation ()
2326 /* Nothing to do if no regions created. */
2327 if (cfun->eh->region_tree == NULL)
2328 return;
2330 /* The object here is to provide find_basic_blocks with detailed
2331 information (via reachable_handlers) on how exception control
2332 flows within the function. In this first pass, we can include
2333 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2334 regions, and hope that it will be useful in deleting unreachable
2335 handlers. Subsequently, we will generate landing pads which will
2336 connect many of the handlers, and then type information will not
2337 be effective. Still, this is a win over previous implementations. */
2339 jump_optimize_minimal (get_insns ());
2340 find_basic_blocks (get_insns (), max_reg_num (), 0);
2341 cleanup_cfg ();
2343 /* These registers are used by the landing pads. Make sure they
2344 have been generated. */
2345 get_exception_pointer ();
2346 get_exception_filter ();
2348 /* Construct the landing pads. */
2350 assign_filter_values ();
2351 build_post_landing_pads ();
2352 connect_post_landing_pads ();
2353 if (USING_SJLJ_EXCEPTIONS)
2354 sjlj_build_landing_pads ();
2355 else
2356 dw2_build_landing_pads ();
2358 cfun->eh->built_landing_pads = 1;
2360 /* We've totally changed the CFG. Start over. */
2361 find_exception_handler_labels ();
2362 jump_optimize_minimal (get_insns ());
2363 find_basic_blocks (get_insns (), max_reg_num (), 0);
2364 cleanup_cfg ();
2367 /* This section handles removing dead code for flow. */
2369 /* Remove LABEL from the exception_handler_labels list. */
2371 static void
2372 remove_exception_handler_label (label)
2373 rtx label;
2375 rtx *pl, l;
2377 for (pl = &exception_handler_labels, l = *pl;
2378 XEXP (l, 0) != label;
2379 pl = &XEXP (l, 1), l = *pl)
2380 continue;
2382 *pl = XEXP (l, 1);
2383 free_EXPR_LIST_node (l);
2386 /* Splice REGION from the region tree etc. */
2388 static void
2389 remove_eh_handler (region)
2390 struct eh_region *region;
2392 struct eh_region **pp, *p;
2393 rtx lab;
2394 int i;
2396 /* For the benefit of efficiently handling REG_EH_REGION notes,
2397 replace this region in the region array with its containing
2398 region. Note that previous region deletions may result in
2399 multiple copies of this region in the array, so we have to
2400 search the whole thing. */
2401 for (i = cfun->eh->last_region_number; i > 0; --i)
2402 if (cfun->eh->region_array[i] == region)
2403 cfun->eh->region_array[i] = region->outer;
2405 if (cfun->eh->built_landing_pads)
2406 lab = region->landing_pad;
2407 else
2408 lab = region->label;
2409 if (lab)
2410 remove_exception_handler_label (lab);
2412 if (region->outer)
2413 pp = &region->outer->inner;
2414 else
2415 pp = &cfun->eh->region_tree;
2416 for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
2417 continue;
2419 if (region->inner)
2421 for (p = region->inner; p->next_peer ; p = p->next_peer)
2422 p->outer = region->outer;
2423 p->next_peer = region->next_peer;
2424 p->outer = region->outer;
2425 *pp = region->inner;
2427 else
2428 *pp = region->next_peer;
2430 if (region->type == ERT_CATCH)
2432 struct eh_region *try, *next, *prev;
2434 for (try = region->next_peer;
2435 try->type == ERT_CATCH;
2436 try = try->next_peer)
2437 continue;
2438 if (try->type != ERT_TRY)
2439 abort ();
2441 next = region->u.catch.next_catch;
2442 prev = region->u.catch.prev_catch;
2444 if (next)
2445 next->u.catch.prev_catch = prev;
2446 else
2447 try->u.try.last_catch = prev;
2448 if (prev)
2449 prev->u.catch.next_catch = next;
2450 else
2452 try->u.try.catch = next;
2453 if (! next)
2454 remove_eh_handler (try);
2458 free (region);
2461 /* LABEL heads a basic block that is about to be deleted. If this
2462 label corresponds to an exception region, we may be able to
2463 delete the region. */
2465 void
2466 maybe_remove_eh_handler (label)
2467 rtx label;
2469 int i;
2471 /* ??? After generating landing pads, it's not so simple to determine
2472 if the region data is completely unused. One must examine the
2473 landing pad and the post landing pad, and whether an inner try block
2474 is referencing the catch handlers directly. */
2475 if (cfun->eh->built_landing_pads)
2476 return;
2478 for (i = cfun->eh->last_region_number; i > 0; --i)
2480 struct eh_region *region = cfun->eh->region_array[i];
2481 if (region && region->label == label)
2483 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2484 because there is no path to the fallback call to terminate.
2485 But the region continues to affect call-site data until there
2486 are no more contained calls, which we don't see here. */
2487 if (region->type == ERT_MUST_NOT_THROW)
2489 remove_exception_handler_label (region->label);
2490 region->label = NULL_RTX;
2492 else
2493 remove_eh_handler (region);
2494 break;
2500 /* This section describes CFG exception edges for flow. */
2502 /* For communicating between calls to reachable_next_level. */
2503 struct reachable_info
2505 tree types_caught;
2506 tree types_allowed;
2507 rtx handlers;
2510 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2511 base class of TYPE, is in HANDLED. */
2513 static int
2514 check_handled (handled, type)
2515 tree handled, type;
2517 tree t;
2519 /* We can check for exact matches without front-end help. */
2520 if (! lang_eh_type_covers)
2522 for (t = handled; t ; t = TREE_CHAIN (t))
2523 if (TREE_VALUE (t) == type)
2524 return 1;
2526 else
2528 for (t = handled; t ; t = TREE_CHAIN (t))
2529 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2530 return 1;
2533 return 0;
2536 /* A subroutine of reachable_next_level. If we are collecting a list
2537 of handlers, add one. After landing pad generation, reference
2538 it instead of the handlers themselves. Further, the handlers are
2539 all wired together, so by referencing one, we've got them all.
2540 Before landing pad generation we reference each handler individually.
2542 LP_REGION contains the landing pad; REGION is the handler. */
2544 static void
2545 add_reachable_handler (info, lp_region, region)
2546 struct reachable_info *info;
2547 struct eh_region *lp_region;
2548 struct eh_region *region;
2550 if (! info)
2551 return;
2553 if (cfun->eh->built_landing_pads)
2555 if (! info->handlers)
2556 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2558 else
2559 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2562 /* Process one level of exception regions for reachability.
2563 If TYPE_THROWN is non-null, then it is the *exact* type being
2564 propagated. If INFO is non-null, then collect handler labels
2565 and caught/allowed type information between invocations. */
2567 static enum reachable_code
2568 reachable_next_level (region, type_thrown, info)
2569 struct eh_region *region;
2570 tree type_thrown;
2571 struct reachable_info *info;
2573 switch (region->type)
2575 case ERT_CLEANUP:
2576 /* Before landing-pad generation, we model control flow
2577 directly to the individual handlers. In this way we can
2578 see that catch handler types may shadow one another. */
2579 add_reachable_handler (info, region, region);
2580 return RNL_MAYBE_CAUGHT;
2582 case ERT_TRY:
2584 struct eh_region *c;
2585 enum reachable_code ret = RNL_NOT_CAUGHT;
2587 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2589 /* A catch-all handler ends the search. */
2590 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2591 to be run as well. */
2592 if (c->u.catch.type == NULL)
2594 add_reachable_handler (info, region, c);
2595 return RNL_CAUGHT;
2598 if (type_thrown)
2600 /* If we have a type match, end the search. */
2601 if (c->u.catch.type == type_thrown
2602 || (lang_eh_type_covers
2603 && (*lang_eh_type_covers) (c->u.catch.type,
2604 type_thrown)))
2606 add_reachable_handler (info, region, c);
2607 return RNL_CAUGHT;
2610 /* If we have definitive information of a match failure,
2611 the catch won't trigger. */
2612 if (lang_eh_type_covers)
2613 return RNL_NOT_CAUGHT;
2616 if (! info)
2617 ret = RNL_MAYBE_CAUGHT;
2619 /* A type must not have been previously caught. */
2620 else if (! check_handled (info->types_caught, c->u.catch.type))
2622 add_reachable_handler (info, region, c);
2623 info->types_caught = tree_cons (NULL, c->u.catch.type,
2624 info->types_caught);
2626 /* ??? If the catch type is a base class of every allowed
2627 type, then we know we can stop the search. */
2628 ret = RNL_MAYBE_CAUGHT;
2632 return ret;
2635 case ERT_ALLOWED_EXCEPTIONS:
2636 /* An empty list of types definitely ends the search. */
2637 if (region->u.allowed.type_list == NULL_TREE)
2639 add_reachable_handler (info, region, region);
2640 return RNL_CAUGHT;
2643 /* Collect a list of lists of allowed types for use in detecting
2644 when a catch may be transformed into a catch-all. */
2645 if (info)
2646 info->types_allowed = tree_cons (NULL_TREE,
2647 region->u.allowed.type_list,
2648 info->types_allowed);
2650 /* If we have definitive information about the type heirarchy,
2651 then we can tell if the thrown type will pass through the
2652 filter. */
2653 if (type_thrown && lang_eh_type_covers)
2655 if (check_handled (region->u.allowed.type_list, type_thrown))
2656 return RNL_NOT_CAUGHT;
2657 else
2659 add_reachable_handler (info, region, region);
2660 return RNL_CAUGHT;
2664 add_reachable_handler (info, region, region);
2665 return RNL_MAYBE_CAUGHT;
2667 case ERT_CATCH:
2668 /* Catch regions are handled by their controling try region. */
2669 return RNL_NOT_CAUGHT;
2671 case ERT_MUST_NOT_THROW:
2672 /* Here we end our search, since no exceptions may propagate.
2673 If we've touched down at some landing pad previous, then the
2674 explicit function call we generated may be used. Otherwise
2675 the call is made by the runtime. */
2676 if (info && info->handlers)
2678 add_reachable_handler (info, region, region);
2679 return RNL_CAUGHT;
2681 else
2682 return RNL_BLOCKED;
2684 case ERT_THROW:
2685 case ERT_FIXUP:
2686 /* Shouldn't see these here. */
2687 break;
2690 abort ();
2693 /* Retrieve a list of labels of exception handlers which can be
2694 reached by a given insn. */
2697 reachable_handlers (insn)
2698 rtx insn;
2700 struct reachable_info info;
2701 struct eh_region *region;
2702 tree type_thrown;
2703 int region_number;
2705 if (GET_CODE (insn) == JUMP_INSN
2706 && GET_CODE (PATTERN (insn)) == RESX)
2707 region_number = XINT (PATTERN (insn), 0);
2708 else
2710 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2711 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2712 return NULL;
2713 region_number = INTVAL (XEXP (note, 0));
2716 memset (&info, 0, sizeof (info));
2718 region = cfun->eh->region_array[region_number];
2720 type_thrown = NULL_TREE;
2721 if (region->type == ERT_THROW)
2723 type_thrown = region->u.throw.type;
2724 region = region->outer;
2726 else if (GET_CODE (insn) == JUMP_INSN
2727 && GET_CODE (PATTERN (insn)) == RESX)
2728 region = region->outer;
2730 for (; region; region = region->outer)
2731 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2732 break;
2734 return info.handlers;
2737 /* Determine if the given INSN can throw an exception that is caught
2738 within the function. */
2740 bool
2741 can_throw_internal (insn)
2742 rtx insn;
2744 struct eh_region *region;
2745 tree type_thrown;
2746 rtx note;
2748 if (! INSN_P (insn))
2749 return false;
2751 if (GET_CODE (insn) == INSN
2752 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2753 insn = XVECEXP (PATTERN (insn), 0, 0);
2755 if (GET_CODE (insn) == CALL_INSN
2756 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2758 int i;
2759 for (i = 0; i < 3; ++i)
2761 rtx sub = XEXP (PATTERN (insn), i);
2762 for (; sub ; sub = NEXT_INSN (sub))
2763 if (can_throw_internal (sub))
2764 return true;
2766 return false;
2769 /* Every insn that might throw has an EH_REGION note. */
2770 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2771 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2772 return false;
2774 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2776 type_thrown = NULL_TREE;
2777 if (region->type == ERT_THROW)
2779 type_thrown = region->u.throw.type;
2780 region = region->outer;
2783 /* If this exception is ignored by each and every containing region,
2784 then control passes straight out. The runtime may handle some
2785 regions, which also do not require processing internally. */
2786 for (; region; region = region->outer)
2788 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2789 if (how == RNL_BLOCKED)
2790 return false;
2791 if (how != RNL_NOT_CAUGHT)
2792 return true;
2795 return false;
2798 /* Determine if the given INSN can throw an exception that is
2799 visible outside the function. */
2801 bool
2802 can_throw_external (insn)
2803 rtx insn;
2805 struct eh_region *region;
2806 tree type_thrown;
2807 rtx note;
2809 if (! INSN_P (insn))
2810 return false;
2812 if (GET_CODE (insn) == INSN
2813 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2814 insn = XVECEXP (PATTERN (insn), 0, 0);
2816 if (GET_CODE (insn) == CALL_INSN
2817 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2819 int i;
2820 for (i = 0; i < 3; ++i)
2822 rtx sub = XEXP (PATTERN (insn), i);
2823 for (; sub ; sub = NEXT_INSN (sub))
2824 if (can_throw_external (sub))
2825 return true;
2827 return false;
2830 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2831 if (!note)
2833 /* Calls (and trapping insns) without notes are outside any
2834 exception handling region in this function. We have to
2835 assume it might throw. Given that the front end and middle
2836 ends mark known NOTHROW functions, this isn't so wildly
2837 inaccurate. */
2838 return (GET_CODE (insn) == CALL_INSN
2839 || (flag_non_call_exceptions
2840 && may_trap_p (PATTERN (insn))));
2842 if (INTVAL (XEXP (note, 0)) <= 0)
2843 return false;
2845 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2847 type_thrown = NULL_TREE;
2848 if (region->type == ERT_THROW)
2850 type_thrown = region->u.throw.type;
2851 region = region->outer;
2854 /* If the exception is caught or blocked by any containing region,
2855 then it is not seen by any calling function. */
2856 for (; region ; region = region->outer)
2857 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2858 return false;
2860 return true;
2863 /* True if nothing in this function can throw outside this function. */
2865 bool
2866 nothrow_function_p ()
2868 rtx insn;
2870 if (! flag_exceptions)
2871 return true;
2873 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2874 if (can_throw_external (insn))
2875 return false;
2876 for (insn = current_function_epilogue_delay_list; insn;
2877 insn = XEXP (insn, 1))
2878 if (can_throw_external (insn))
2879 return false;
2881 return true;
2885 /* Various hooks for unwind library. */
2887 /* Do any necessary initialization to access arbitrary stack frames.
2888 On the SPARC, this means flushing the register windows. */
2890 void
2891 expand_builtin_unwind_init ()
2893 /* Set this so all the registers get saved in our frame; we need to be
2894 able to copy the saved values for any registers from frames we unwind. */
2895 current_function_has_nonlocal_label = 1;
2897 #ifdef SETUP_FRAME_ADDRESSES
2898 SETUP_FRAME_ADDRESSES ();
2899 #endif
2903 expand_builtin_eh_return_data_regno (arglist)
2904 tree arglist;
2906 tree which = TREE_VALUE (arglist);
2907 unsigned HOST_WIDE_INT iwhich;
2909 if (TREE_CODE (which) != INTEGER_CST)
2911 error ("argument of `__builtin_eh_return_regno' must be constant");
2912 return constm1_rtx;
2915 iwhich = tree_low_cst (which, 1);
2916 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2917 if (iwhich == INVALID_REGNUM)
2918 return constm1_rtx;
2920 #ifdef DWARF_FRAME_REGNUM
2921 iwhich = DWARF_FRAME_REGNUM (iwhich);
2922 #else
2923 iwhich = DBX_REGISTER_NUMBER (iwhich);
2924 #endif
2926 return GEN_INT (iwhich);
2929 /* Given a value extracted from the return address register or stack slot,
2930 return the actual address encoded in that value. */
2933 expand_builtin_extract_return_addr (addr_tree)
2934 tree addr_tree;
2936 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2938 /* First mask out any unwanted bits. */
2939 #ifdef MASK_RETURN_ADDR
2940 expand_and (addr, MASK_RETURN_ADDR, addr);
2941 #endif
2943 /* Then adjust to find the real return address. */
2944 #if defined (RETURN_ADDR_OFFSET)
2945 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2946 #endif
2948 return addr;
2951 /* Given an actual address in addr_tree, do any necessary encoding
2952 and return the value to be stored in the return address register or
2953 stack slot so the epilogue will return to that address. */
2956 expand_builtin_frob_return_addr (addr_tree)
2957 tree addr_tree;
2959 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2961 #ifdef RETURN_ADDR_OFFSET
2962 addr = force_reg (Pmode, addr);
2963 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2964 #endif
2966 return addr;
2969 /* Set up the epilogue with the magic bits we'll need to return to the
2970 exception handler. */
2972 void
2973 expand_builtin_eh_return (stackadj_tree, handler_tree)
2974 tree stackadj_tree, handler_tree;
2976 rtx stackadj, handler;
2978 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2979 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
2981 if (! cfun->eh->ehr_label)
2983 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
2984 cfun->eh->ehr_handler = copy_to_reg (handler);
2985 cfun->eh->ehr_label = gen_label_rtx ();
2987 else
2989 if (stackadj != cfun->eh->ehr_stackadj)
2990 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
2991 if (handler != cfun->eh->ehr_handler)
2992 emit_move_insn (cfun->eh->ehr_handler, handler);
2995 emit_jump (cfun->eh->ehr_label);
2998 void
2999 expand_eh_return ()
3001 rtx sa, ra, around_label;
3003 if (! cfun->eh->ehr_label)
3004 return;
3006 sa = EH_RETURN_STACKADJ_RTX;
3007 if (! sa)
3009 error ("__builtin_eh_return not supported on this target");
3010 return;
3013 current_function_calls_eh_return = 1;
3015 around_label = gen_label_rtx ();
3016 emit_move_insn (sa, const0_rtx);
3017 emit_jump (around_label);
3019 emit_label (cfun->eh->ehr_label);
3020 clobber_return_register ();
3022 #ifdef HAVE_eh_return
3023 if (HAVE_eh_return)
3024 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3025 else
3026 #endif
3028 ra = EH_RETURN_HANDLER_RTX;
3029 if (! ra)
3031 error ("__builtin_eh_return not supported on this target");
3032 ra = gen_reg_rtx (Pmode);
3035 emit_move_insn (sa, cfun->eh->ehr_stackadj);
3036 emit_move_insn (ra, cfun->eh->ehr_handler);
3039 emit_label (around_label);
3042 struct action_record
3044 int offset;
3045 int filter;
3046 int next;
3049 static int
3050 action_record_eq (pentry, pdata)
3051 const PTR pentry;
3052 const PTR pdata;
3054 const struct action_record *entry = (const struct action_record *) pentry;
3055 const struct action_record *data = (const struct action_record *) pdata;
3056 return entry->filter == data->filter && entry->next == data->next;
3059 static hashval_t
3060 action_record_hash (pentry)
3061 const PTR pentry;
3063 const struct action_record *entry = (const struct action_record *) pentry;
3064 return entry->next * 1009 + entry->filter;
3067 static int
3068 add_action_record (ar_hash, filter, next)
3069 htab_t ar_hash;
3070 int filter, next;
3072 struct action_record **slot, *new, tmp;
3074 tmp.filter = filter;
3075 tmp.next = next;
3076 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3078 if ((new = *slot) == NULL)
3080 new = (struct action_record *) xmalloc (sizeof (*new));
3081 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3082 new->filter = filter;
3083 new->next = next;
3084 *slot = new;
3086 /* The filter value goes in untouched. The link to the next
3087 record is a "self-relative" byte offset, or zero to indicate
3088 that there is no next record. So convert the absolute 1 based
3089 indicies we've been carrying around into a displacement. */
3091 push_sleb128 (&cfun->eh->action_record_data, filter);
3092 if (next)
3093 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3094 push_sleb128 (&cfun->eh->action_record_data, next);
3097 return new->offset;
3100 static int
3101 collect_one_action_chain (ar_hash, region)
3102 htab_t ar_hash;
3103 struct eh_region *region;
3105 struct eh_region *c;
3106 int next;
3108 /* If we've reached the top of the region chain, then we have
3109 no actions, and require no landing pad. */
3110 if (region == NULL)
3111 return -1;
3113 switch (region->type)
3115 case ERT_CLEANUP:
3116 /* A cleanup adds a zero filter to the beginning of the chain, but
3117 there are special cases to look out for. If there are *only*
3118 cleanups along a path, then it compresses to a zero action.
3119 Further, if there are multiple cleanups along a path, we only
3120 need to represent one of them, as that is enough to trigger
3121 entry to the landing pad at runtime. */
3122 next = collect_one_action_chain (ar_hash, region->outer);
3123 if (next <= 0)
3124 return 0;
3125 for (c = region->outer; c ; c = c->outer)
3126 if (c->type == ERT_CLEANUP)
3127 return next;
3128 return add_action_record (ar_hash, 0, next);
3130 case ERT_TRY:
3131 /* Process the associated catch regions in reverse order.
3132 If there's a catch-all handler, then we don't need to
3133 search outer regions. Use a magic -3 value to record
3134 that we havn't done the outer search. */
3135 next = -3;
3136 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3138 if (c->u.catch.type == NULL)
3139 next = add_action_record (ar_hash, c->u.catch.filter, 0);
3140 else
3142 if (next == -3)
3144 next = collect_one_action_chain (ar_hash, region->outer);
3145 if (next < 0)
3146 next = 0;
3148 next = add_action_record (ar_hash, c->u.catch.filter, next);
3151 return next;
3153 case ERT_ALLOWED_EXCEPTIONS:
3154 /* An exception specification adds its filter to the
3155 beginning of the chain. */
3156 next = collect_one_action_chain (ar_hash, region->outer);
3157 return add_action_record (ar_hash, region->u.allowed.filter,
3158 next < 0 ? 0 : next);
3160 case ERT_MUST_NOT_THROW:
3161 /* A must-not-throw region with no inner handlers or cleanups
3162 requires no call-site entry. Note that this differs from
3163 the no handler or cleanup case in that we do require an lsda
3164 to be generated. Return a magic -2 value to record this. */
3165 return -2;
3167 case ERT_CATCH:
3168 case ERT_THROW:
3169 /* CATCH regions are handled in TRY above. THROW regions are
3170 for optimization information only and produce no output. */
3171 return collect_one_action_chain (ar_hash, region->outer);
3173 default:
3174 abort ();
3178 static int
3179 add_call_site (landing_pad, action)
3180 rtx landing_pad;
3181 int action;
3183 struct call_site_record *data = cfun->eh->call_site_data;
3184 int used = cfun->eh->call_site_data_used;
3185 int size = cfun->eh->call_site_data_size;
3187 if (used >= size)
3189 size = (size ? size * 2 : 64);
3190 data = (struct call_site_record *)
3191 xrealloc (data, sizeof (*data) * size);
3192 cfun->eh->call_site_data = data;
3193 cfun->eh->call_site_data_size = size;
3196 data[used].landing_pad = landing_pad;
3197 data[used].action = action;
3199 cfun->eh->call_site_data_used = used + 1;
3201 return used + call_site_base;
3204 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3205 The new note numbers will not refer to region numbers, but
3206 instead to call site entries. */
3208 void
3209 convert_to_eh_region_ranges ()
3211 rtx insn, iter, note;
3212 htab_t ar_hash;
3213 int last_action = -3;
3214 rtx last_action_insn = NULL_RTX;
3215 rtx last_landing_pad = NULL_RTX;
3216 rtx first_no_action_insn = NULL_RTX;
3217 int call_site;
3219 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3220 return;
3222 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3224 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3226 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3227 if (INSN_P (iter))
3229 struct eh_region *region;
3230 int this_action;
3231 rtx this_landing_pad;
3233 insn = iter;
3234 if (GET_CODE (insn) == INSN
3235 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3236 insn = XVECEXP (PATTERN (insn), 0, 0);
3238 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3239 if (!note)
3241 if (! (GET_CODE (insn) == CALL_INSN
3242 || (flag_non_call_exceptions
3243 && may_trap_p (PATTERN (insn)))))
3244 continue;
3245 this_action = -1;
3246 region = NULL;
3248 else
3250 if (INTVAL (XEXP (note, 0)) <= 0)
3251 continue;
3252 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3253 this_action = collect_one_action_chain (ar_hash, region);
3256 /* Existence of catch handlers, or must-not-throw regions
3257 implies that an lsda is needed (even if empty). */
3258 if (this_action != -1)
3259 cfun->uses_eh_lsda = 1;
3261 /* Delay creation of region notes for no-action regions
3262 until we're sure that an lsda will be required. */
3263 else if (last_action == -3)
3265 first_no_action_insn = iter;
3266 last_action = -1;
3269 /* Cleanups and handlers may share action chains but not
3270 landing pads. Collect the landing pad for this region. */
3271 if (this_action >= 0)
3273 struct eh_region *o;
3274 for (o = region; ! o->landing_pad ; o = o->outer)
3275 continue;
3276 this_landing_pad = o->landing_pad;
3278 else
3279 this_landing_pad = NULL_RTX;
3281 /* Differing actions or landing pads implies a change in call-site
3282 info, which implies some EH_REGION note should be emitted. */
3283 if (last_action != this_action
3284 || last_landing_pad != this_landing_pad)
3286 /* If we'd not seen a previous action (-3) or the previous
3287 action was must-not-throw (-2), then we do not need an
3288 end note. */
3289 if (last_action >= -1)
3291 /* If we delayed the creation of the begin, do it now. */
3292 if (first_no_action_insn)
3294 call_site = add_call_site (NULL_RTX, 0);
3295 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3296 first_no_action_insn);
3297 NOTE_EH_HANDLER (note) = call_site;
3298 first_no_action_insn = NULL_RTX;
3301 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3302 last_action_insn);
3303 NOTE_EH_HANDLER (note) = call_site;
3306 /* If the new action is must-not-throw, then no region notes
3307 are created. */
3308 if (this_action >= -1)
3310 call_site = add_call_site (this_landing_pad,
3311 this_action < 0 ? 0 : this_action);
3312 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3313 NOTE_EH_HANDLER (note) = call_site;
3316 last_action = this_action;
3317 last_landing_pad = this_landing_pad;
3319 last_action_insn = iter;
3322 if (last_action >= -1 && ! first_no_action_insn)
3324 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3325 NOTE_EH_HANDLER (note) = call_site;
3328 htab_delete (ar_hash);
3332 static void
3333 push_uleb128 (data_area, value)
3334 varray_type *data_area;
3335 unsigned int value;
3339 unsigned char byte = value & 0x7f;
3340 value >>= 7;
3341 if (value)
3342 byte |= 0x80;
3343 VARRAY_PUSH_UCHAR (*data_area, byte);
3345 while (value);
3348 static void
3349 push_sleb128 (data_area, value)
3350 varray_type *data_area;
3351 int value;
3353 unsigned char byte;
3354 int more;
3358 byte = value & 0x7f;
3359 value >>= 7;
3360 more = ! ((value == 0 && (byte & 0x40) == 0)
3361 || (value == -1 && (byte & 0x40) != 0));
3362 if (more)
3363 byte |= 0x80;
3364 VARRAY_PUSH_UCHAR (*data_area, byte);
3366 while (more);
3370 static const char *
3371 eh_data_format_name (format)
3372 int format;
3374 switch (format)
3376 case DW_EH_PE_absptr: return "absolute";
3377 case DW_EH_PE_omit: return "omit";
3379 case DW_EH_PE_uleb128: return "uleb128";
3380 case DW_EH_PE_udata2: return "udata2";
3381 case DW_EH_PE_udata4: return "udata4";
3382 case DW_EH_PE_udata8: return "udata8";
3383 case DW_EH_PE_sleb128: return "sleb128";
3384 case DW_EH_PE_sdata2: return "sdata2";
3385 case DW_EH_PE_sdata4: return "sdata4";
3386 case DW_EH_PE_sdata8: return "sdata8";
3388 case DW_EH_PE_uleb128 | DW_EH_PE_pcrel: return "pcrel uleb128";
3389 case DW_EH_PE_udata2 | DW_EH_PE_pcrel: return "pcrel udata2";
3390 case DW_EH_PE_udata4 | DW_EH_PE_pcrel: return "pcrel udata4";
3391 case DW_EH_PE_udata8 | DW_EH_PE_pcrel: return "pcrel udata8";
3392 case DW_EH_PE_sleb128 | DW_EH_PE_pcrel: return "pcrel sleb128";
3393 case DW_EH_PE_sdata2 | DW_EH_PE_pcrel: return "pcrel sdata2";
3394 case DW_EH_PE_sdata4 | DW_EH_PE_pcrel: return "pcrel sdata4";
3395 case DW_EH_PE_sdata8 | DW_EH_PE_pcrel: return "pcrel sdata8";
3397 case DW_EH_PE_uleb128 | DW_EH_PE_textrel: return "textrel uleb128";
3398 case DW_EH_PE_udata2 | DW_EH_PE_textrel: return "textrel udata2";
3399 case DW_EH_PE_udata4 | DW_EH_PE_textrel: return "textrel udata4";
3400 case DW_EH_PE_udata8 | DW_EH_PE_textrel: return "textrel udata8";
3401 case DW_EH_PE_sleb128 | DW_EH_PE_textrel: return "textrel sleb128";
3402 case DW_EH_PE_sdata2 | DW_EH_PE_textrel: return "textrel sdata2";
3403 case DW_EH_PE_sdata4 | DW_EH_PE_textrel: return "textrel sdata4";
3404 case DW_EH_PE_sdata8 | DW_EH_PE_textrel: return "textrel sdata8";
3406 case DW_EH_PE_uleb128 | DW_EH_PE_datarel: return "datarel uleb128";
3407 case DW_EH_PE_udata2 | DW_EH_PE_datarel: return "datarel udata2";
3408 case DW_EH_PE_udata4 | DW_EH_PE_datarel: return "datarel udata4";
3409 case DW_EH_PE_udata8 | DW_EH_PE_datarel: return "datarel udata8";
3410 case DW_EH_PE_sleb128 | DW_EH_PE_datarel: return "datarel sleb128";
3411 case DW_EH_PE_sdata2 | DW_EH_PE_datarel: return "datarel sdata2";
3412 case DW_EH_PE_sdata4 | DW_EH_PE_datarel: return "datarel sdata4";
3413 case DW_EH_PE_sdata8 | DW_EH_PE_datarel: return "datarel sdata8";
3415 case DW_EH_PE_uleb128 | DW_EH_PE_funcrel: return "funcrel uleb128";
3416 case DW_EH_PE_udata2 | DW_EH_PE_funcrel: return "funcrel udata2";
3417 case DW_EH_PE_udata4 | DW_EH_PE_funcrel: return "funcrel udata4";
3418 case DW_EH_PE_udata8 | DW_EH_PE_funcrel: return "funcrel udata8";
3419 case DW_EH_PE_sleb128 | DW_EH_PE_funcrel: return "funcrel sleb128";
3420 case DW_EH_PE_sdata2 | DW_EH_PE_funcrel: return "funcrel sdata2";
3421 case DW_EH_PE_sdata4 | DW_EH_PE_funcrel: return "funcrel sdata4";
3422 case DW_EH_PE_sdata8 | DW_EH_PE_funcrel: return "funcrel sdata8";
3424 case DW_EH_PE_indirect | DW_EH_PE_uleb128 | DW_EH_PE_pcrel:
3425 return "indirect pcrel uleb128";
3426 case DW_EH_PE_indirect | DW_EH_PE_udata2 | DW_EH_PE_pcrel:
3427 return "indirect pcrel udata2";
3428 case DW_EH_PE_indirect | DW_EH_PE_udata4 | DW_EH_PE_pcrel:
3429 return "indirect pcrel udata4";
3430 case DW_EH_PE_indirect | DW_EH_PE_udata8 | DW_EH_PE_pcrel:
3431 return "indirect pcrel udata8";
3432 case DW_EH_PE_indirect | DW_EH_PE_sleb128 | DW_EH_PE_pcrel:
3433 return "indirect pcrel sleb128";
3434 case DW_EH_PE_indirect | DW_EH_PE_sdata2 | DW_EH_PE_pcrel:
3435 return "indirect pcrel sdata2";
3436 case DW_EH_PE_indirect | DW_EH_PE_sdata4 | DW_EH_PE_pcrel:
3437 return "indirect pcrel sdata4";
3438 case DW_EH_PE_indirect | DW_EH_PE_sdata8 | DW_EH_PE_pcrel:
3439 return "indirect pcrel sdata8";
3441 case DW_EH_PE_indirect | DW_EH_PE_uleb128 | DW_EH_PE_textrel:
3442 return "indirect textrel uleb128";
3443 case DW_EH_PE_indirect | DW_EH_PE_udata2 | DW_EH_PE_textrel:
3444 return "indirect textrel udata2";
3445 case DW_EH_PE_indirect | DW_EH_PE_udata4 | DW_EH_PE_textrel:
3446 return "indirect textrel udata4";
3447 case DW_EH_PE_indirect | DW_EH_PE_udata8 | DW_EH_PE_textrel:
3448 return "indirect textrel udata8";
3449 case DW_EH_PE_indirect | DW_EH_PE_sleb128 | DW_EH_PE_textrel:
3450 return "indirect textrel sleb128";
3451 case DW_EH_PE_indirect | DW_EH_PE_sdata2 | DW_EH_PE_textrel:
3452 return "indirect textrel sdata2";
3453 case DW_EH_PE_indirect | DW_EH_PE_sdata4 | DW_EH_PE_textrel:
3454 return "indirect textrel sdata4";
3455 case DW_EH_PE_indirect | DW_EH_PE_sdata8 | DW_EH_PE_textrel:
3456 return "indirect textrel sdata8";
3458 case DW_EH_PE_indirect | DW_EH_PE_uleb128 | DW_EH_PE_datarel:
3459 return "indirect datarel uleb128";
3460 case DW_EH_PE_indirect | DW_EH_PE_udata2 | DW_EH_PE_datarel:
3461 return "indirect datarel udata2";
3462 case DW_EH_PE_indirect | DW_EH_PE_udata4 | DW_EH_PE_datarel:
3463 return "indirect datarel udata4";
3464 case DW_EH_PE_indirect | DW_EH_PE_udata8 | DW_EH_PE_datarel:
3465 return "indirect datarel udata8";
3466 case DW_EH_PE_indirect | DW_EH_PE_sleb128 | DW_EH_PE_datarel:
3467 return "indirect datarel sleb128";
3468 case DW_EH_PE_indirect | DW_EH_PE_sdata2 | DW_EH_PE_datarel:
3469 return "indirect datarel sdata2";
3470 case DW_EH_PE_indirect | DW_EH_PE_sdata4 | DW_EH_PE_datarel:
3471 return "indirect datarel sdata4";
3472 case DW_EH_PE_indirect | DW_EH_PE_sdata8 | DW_EH_PE_datarel:
3473 return "indirect datarel sdata8";
3475 case DW_EH_PE_indirect | DW_EH_PE_uleb128 | DW_EH_PE_funcrel:
3476 return "indirect funcrel uleb128";
3477 case DW_EH_PE_indirect | DW_EH_PE_udata2 | DW_EH_PE_funcrel:
3478 return "indirect funcrel udata2";
3479 case DW_EH_PE_indirect | DW_EH_PE_udata4 | DW_EH_PE_funcrel:
3480 return "indirect funcrel udata4";
3481 case DW_EH_PE_indirect | DW_EH_PE_udata8 | DW_EH_PE_funcrel:
3482 return "indirect funcrel udata8";
3483 case DW_EH_PE_indirect | DW_EH_PE_sleb128 | DW_EH_PE_funcrel:
3484 return "indirect funcrel sleb128";
3485 case DW_EH_PE_indirect | DW_EH_PE_sdata2 | DW_EH_PE_funcrel:
3486 return "indirect funcrel sdata2";
3487 case DW_EH_PE_indirect | DW_EH_PE_sdata4 | DW_EH_PE_funcrel:
3488 return "indirect funcrel sdata4";
3489 case DW_EH_PE_indirect | DW_EH_PE_sdata8 | DW_EH_PE_funcrel:
3490 return "indirect funcrel sdata8";
3492 default:
3493 abort ();
3497 #ifndef HAVE_AS_LEB128
3498 static int
3499 dw2_size_of_call_site_table ()
3501 int n = cfun->eh->call_site_data_used;
3502 int size = n * (4 + 4 + 4);
3503 int i;
3505 for (i = 0; i < n; ++i)
3507 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3508 size += size_of_uleb128 (cs->action);
3511 return size;
3514 static int
3515 sjlj_size_of_call_site_table ()
3517 int n = cfun->eh->call_site_data_used;
3518 int size = 0;
3519 int i;
3521 for (i = 0; i < n; ++i)
3523 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3524 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3525 size += size_of_uleb128 (cs->action);
3528 return size;
3530 #endif
3532 static void
3533 dw2_output_call_site_table ()
3535 const char *function_start_lab
3536 = IDENTIFIER_POINTER (current_function_func_begin_label);
3537 int n = cfun->eh->call_site_data_used;
3538 int i;
3540 for (i = 0; i < n; ++i)
3542 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3543 char reg_start_lab[32];
3544 char reg_end_lab[32];
3545 char landing_pad_lab[32];
3547 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3548 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3550 if (cs->landing_pad)
3551 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3552 CODE_LABEL_NUMBER (cs->landing_pad));
3554 /* ??? Perhaps use insn length scaling if the assembler supports
3555 generic arithmetic. */
3556 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3557 data4 if the function is small enough. */
3558 #ifdef HAVE_AS_LEB128
3559 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3560 "region %d start", i);
3561 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3562 "length");
3563 if (cs->landing_pad)
3564 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3565 "landing pad");
3566 else
3567 dw2_asm_output_data_uleb128 (0, "landing pad");
3568 #else
3569 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3570 "region %d start", i);
3571 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3572 if (cs->landing_pad)
3573 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3574 "landing pad");
3575 else
3576 dw2_asm_output_data (4, 0, "landing pad");
3577 #endif
3578 dw2_asm_output_data_uleb128 (cs->action, "action");
3581 call_site_base += n;
3584 static void
3585 sjlj_output_call_site_table ()
3587 int n = cfun->eh->call_site_data_used;
3588 int i;
3590 for (i = 0; i < n; ++i)
3592 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3594 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3595 "region %d landing pad", i);
3596 dw2_asm_output_data_uleb128 (cs->action, "action");
3599 call_site_base += n;
3602 void
3603 output_function_exception_table ()
3605 int tt_format, cs_format, lp_format, i, n;
3606 #ifdef HAVE_AS_LEB128
3607 char ttype_label[32];
3608 char cs_after_size_label[32];
3609 char cs_end_label[32];
3610 #else
3611 int call_site_len;
3612 #endif
3613 int have_tt_data;
3614 int funcdef_number;
3615 int tt_format_size;
3617 /* Not all functions need anything. */
3618 if (! cfun->uses_eh_lsda)
3619 return;
3621 funcdef_number = (USING_SJLJ_EXCEPTIONS
3622 ? sjlj_funcdef_number
3623 : current_funcdef_number);
3625 #ifdef IA64_UNWIND_INFO
3626 fputs ("\t.personality\t", asm_out_file);
3627 output_addr_const (asm_out_file, eh_personality_libfunc);
3628 fputs ("\n\t.handlerdata\n", asm_out_file);
3629 /* Note that varasm still thinks we're in the function's code section.
3630 The ".endp" directive that will immediately follow will take us back. */
3631 #else
3632 exception_section ();
3633 #endif
3635 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3636 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3638 /* Indicate the format of the @TType entries. */
3639 if (! have_tt_data)
3640 tt_format = DW_EH_PE_omit;
3641 else
3643 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3644 #ifdef HAVE_AS_LEB128
3645 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3646 #endif
3647 tt_format_size = size_of_encoded_value (tt_format);
3649 assemble_eh_align (tt_format_size * BITS_PER_UNIT);
3652 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3654 /* The LSDA header. */
3656 /* Indicate the format of the landing pad start pointer. An omitted
3657 field implies @LPStart == @Start. */
3658 /* Currently we always put @LPStart == @Start. This field would
3659 be most useful in moving the landing pads completely out of
3660 line to another section, but it could also be used to minimize
3661 the size of uleb128 landing pad offsets. */
3662 lp_format = DW_EH_PE_omit;
3663 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3664 eh_data_format_name (lp_format));
3666 /* @LPStart pointer would go here. */
3668 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3669 eh_data_format_name (tt_format));
3671 #ifndef HAVE_AS_LEB128
3672 if (USING_SJLJ_EXCEPTIONS)
3673 call_site_len = sjlj_size_of_call_site_table ();
3674 else
3675 call_site_len = dw2_size_of_call_site_table ();
3676 #endif
3678 /* A pc-relative 4-byte displacement to the @TType data. */
3679 if (have_tt_data)
3681 #ifdef HAVE_AS_LEB128
3682 char ttype_after_disp_label[32];
3683 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3684 funcdef_number);
3685 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3686 "@TType base offset");
3687 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3688 #else
3689 /* Ug. Alignment queers things. */
3690 unsigned int before_disp, after_disp, last_disp, disp;
3692 before_disp = 1 + 1;
3693 after_disp = (1 + size_of_uleb128 (call_site_len)
3694 + call_site_len
3695 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3696 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3697 * tt_format_size));
3699 disp = after_disp;
3702 unsigned int disp_size, pad;
3704 last_disp = disp;
3705 disp_size = size_of_uleb128 (disp);
3706 pad = before_disp + disp_size + after_disp;
3707 if (pad % tt_format_size)
3708 pad = tt_format_size - (pad % tt_format_size);
3709 else
3710 pad = 0;
3711 disp = after_disp + pad;
3713 while (disp != last_disp);
3715 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3716 #endif
3719 /* Indicate the format of the call-site offsets. */
3720 #ifdef HAVE_AS_LEB128
3721 cs_format = DW_EH_PE_uleb128;
3722 #else
3723 cs_format = DW_EH_PE_udata4;
3724 #endif
3725 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3726 eh_data_format_name (cs_format));
3728 #ifdef HAVE_AS_LEB128
3729 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3730 funcdef_number);
3731 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3732 funcdef_number);
3733 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3734 "Call-site table length");
3735 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3736 if (USING_SJLJ_EXCEPTIONS)
3737 sjlj_output_call_site_table ();
3738 else
3739 dw2_output_call_site_table ();
3740 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3741 #else
3742 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3743 if (USING_SJLJ_EXCEPTIONS)
3744 sjlj_output_call_site_table ();
3745 else
3746 dw2_output_call_site_table ();
3747 #endif
3749 /* ??? Decode and interpret the data for flag_debug_asm. */
3750 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3751 for (i = 0; i < n; ++i)
3752 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3753 (i ? NULL : "Action record table"));
3755 if (have_tt_data)
3756 assemble_eh_align (tt_format_size * BITS_PER_UNIT);
3758 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3759 while (i-- > 0)
3761 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3763 if (type == NULL_TREE)
3764 type = integer_zero_node;
3765 else
3766 type = lookup_type_for_runtime (type);
3768 dw2_asm_output_encoded_addr_rtx (tt_format,
3769 expand_expr (type, NULL_RTX, VOIDmode,
3770 EXPAND_INITIALIZER));
3773 #ifdef HAVE_AS_LEB128
3774 if (have_tt_data)
3775 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3776 #endif
3778 /* ??? Decode and interpret the data for flag_debug_asm. */
3779 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3780 for (i = 0; i < n; ++i)
3781 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3782 (i ? NULL : "Exception specification table"));
3784 function_section (current_function_decl);
3786 if (USING_SJLJ_EXCEPTIONS)
3787 sjlj_funcdef_number += 1;