New testcase
[official-gcc.git] / gcc / stmt.c
blob82a390d1da122ea8c5a471b8f60c9613f096a782
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "tm_p.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "insn-flags.h"
46 #include "insn-config.h"
47 #include "insn-codes.h"
48 #include "expr.h"
49 #include "hard-reg-set.h"
50 #include "obstack.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
58 #define obstack_chunk_alloc xmalloc
59 #define obstack_chunk_free free
60 struct obstack stmt_obstack;
62 /* Assume that case vectors are not pc-relative. */
63 #ifndef CASE_VECTOR_PC_RELATIVE
64 #define CASE_VECTOR_PC_RELATIVE 0
65 #endif
67 /* Functions and data structures for expanding case statements. */
69 /* Case label structure, used to hold info on labels within case
70 statements. We handle "range" labels; for a single-value label
71 as in C, the high and low limits are the same.
73 An AVL tree of case nodes is initially created, and later transformed
74 to a list linked via the RIGHT fields in the nodes. Nodes with
75 higher case values are later in the list.
77 Switch statements can be output in one of two forms. A branch table
78 is used if there are more than a few labels and the labels are dense
79 within the range between the smallest and largest case value. If a
80 branch table is used, no further manipulations are done with the case
81 node chain.
83 The alternative to the use of a branch table is to generate a series
84 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
85 and PARENT fields to hold a binary tree. Initially the tree is
86 totally unbalanced, with everything on the right. We balance the tree
87 with nodes on the left having lower case values than the parent
88 and nodes on the right having higher values. We then output the tree
89 in order. */
91 struct case_node
93 struct case_node *left; /* Left son in binary tree */
94 struct case_node *right; /* Right son in binary tree; also node chain */
95 struct case_node *parent; /* Parent of node in binary tree */
96 tree low; /* Lowest index value for this label */
97 tree high; /* Highest index value for this label */
98 tree code_label; /* Label to jump to when node matches */
99 int balance;
102 typedef struct case_node case_node;
103 typedef struct case_node *case_node_ptr;
105 /* These are used by estimate_case_costs and balance_case_nodes. */
107 /* This must be a signed type, and non-ANSI compilers lack signed char. */
108 static short cost_table_[129];
109 static short *cost_table;
110 static int use_cost_table;
112 /* Stack of control and binding constructs we are currently inside.
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
126 Each type of construct has its own individual stack.
127 For example, loops have `loop_stack'. Each object points to the
128 next object of the same type through the `next' field.
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
137 struct nesting
139 struct nesting *all;
140 struct nesting *next;
141 int depth;
142 rtx exit_label;
143 union
145 /* For conds (if-then and if-then-else statements). */
146 struct
148 /* Label for the end of the if construct.
149 There is none if EXITFLAG was not set
150 and no `else' has been seen yet. */
151 rtx endif_label;
152 /* Label for the end of this alternative.
153 This may be the end of the if or the next else/elseif. */
154 rtx next_label;
155 } cond;
156 /* For loops. */
157 struct
159 /* Label at the top of the loop; place to loop back to. */
160 rtx start_label;
161 /* Label at the end of the whole construct. */
162 rtx end_label;
163 /* Label before a jump that branches to the end of the whole
164 construct. This is where destructors go if any. */
165 rtx alt_end_label;
166 /* Label for `continue' statement to jump to;
167 this is in front of the stepper of the loop. */
168 rtx continue_label;
169 } loop;
170 /* For variable binding contours. */
171 struct
173 /* Sequence number of this binding contour within the function,
174 in order of entry. */
175 int block_start_count;
176 /* Nonzero => value to restore stack to on exit. */
177 rtx stack_level;
178 /* The NOTE that starts this contour.
179 Used by expand_goto to check whether the destination
180 is within each contour or not. */
181 rtx first_insn;
182 /* Innermost containing binding contour that has a stack level. */
183 struct nesting *innermost_stack_block;
184 /* List of cleanups to be run on exit from this contour.
185 This is a list of expressions to be evaluated.
186 The TREE_PURPOSE of each link is the ..._DECL node
187 which the cleanup pertains to. */
188 tree cleanups;
189 /* List of cleanup-lists of blocks containing this block,
190 as they were at the locus where this block appears.
191 There is an element for each containing block,
192 ordered innermost containing block first.
193 The tail of this list can be 0,
194 if all remaining elements would be empty lists.
195 The element's TREE_VALUE is the cleanup-list of that block,
196 which may be null. */
197 tree outer_cleanups;
198 /* Chain of labels defined inside this binding contour.
199 For contours that have stack levels or cleanups. */
200 struct label_chain *label_chain;
201 /* Number of function calls seen, as of start of this block. */
202 int n_function_calls;
203 /* Nonzero if this is associated with a EH region. */
204 int exception_region;
205 /* The saved target_temp_slot_level from our outer block.
206 We may reset target_temp_slot_level to be the level of
207 this block, if that is done, target_temp_slot_level
208 reverts to the saved target_temp_slot_level at the very
209 end of the block. */
210 int block_target_temp_slot_level;
211 /* True if we are currently emitting insns in an area of
212 output code that is controlled by a conditional
213 expression. This is used by the cleanup handling code to
214 generate conditional cleanup actions. */
215 int conditional_code;
216 /* A place to move the start of the exception region for any
217 of the conditional cleanups, must be at the end or after
218 the start of the last unconditional cleanup, and before any
219 conditional branch points. */
220 rtx last_unconditional_cleanup;
221 /* When in a conditional context, this is the specific
222 cleanup list associated with last_unconditional_cleanup,
223 where we place the conditionalized cleanups. */
224 tree *cleanup_ptr;
225 } block;
226 /* For switch (C) or case (Pascal) statements,
227 and also for dummies (see `expand_start_case_dummy'). */
228 struct
230 /* The insn after which the case dispatch should finally
231 be emitted. Zero for a dummy. */
232 rtx start;
233 /* A list of case labels; it is first built as an AVL tree.
234 During expand_end_case, this is converted to a list, and may be
235 rearranged into a nearly balanced binary tree. */
236 struct case_node *case_list;
237 /* Label to jump to if no case matches. */
238 tree default_label;
239 /* The expression to be dispatched on. */
240 tree index_expr;
241 /* Type that INDEX_EXPR should be converted to. */
242 tree nominal_type;
243 /* Name of this kind of statement, for warnings. */
244 const char *printname;
245 /* Used to save no_line_numbers till we see the first case label.
246 We set this to -1 when we see the first case label in this
247 case statement. */
248 int line_number_status;
249 } case_stmt;
250 } data;
253 /* Allocate and return a new `struct nesting'. */
255 #define ALLOC_NESTING() \
256 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
258 /* Pop the nesting stack element by element until we pop off
259 the element which is at the top of STACK.
260 Update all the other stacks, popping off elements from them
261 as we pop them from nesting_stack. */
263 #define POPSTACK(STACK) \
264 do { struct nesting *target = STACK; \
265 struct nesting *this; \
266 do { this = nesting_stack; \
267 if (loop_stack == this) \
268 loop_stack = loop_stack->next; \
269 if (cond_stack == this) \
270 cond_stack = cond_stack->next; \
271 if (block_stack == this) \
272 block_stack = block_stack->next; \
273 if (stack_block_stack == this) \
274 stack_block_stack = stack_block_stack->next; \
275 if (case_stack == this) \
276 case_stack = case_stack->next; \
277 nesting_depth = nesting_stack->depth - 1; \
278 nesting_stack = this->all; \
279 obstack_free (&stmt_obstack, this); } \
280 while (this != target); } while (0)
282 /* In some cases it is impossible to generate code for a forward goto
283 until the label definition is seen. This happens when it may be necessary
284 for the goto to reset the stack pointer: we don't yet know how to do that.
285 So expand_goto puts an entry on this fixup list.
286 Each time a binding contour that resets the stack is exited,
287 we check each fixup.
288 If the target label has now been defined, we can insert the proper code. */
290 struct goto_fixup
292 /* Points to following fixup. */
293 struct goto_fixup *next;
294 /* Points to the insn before the jump insn.
295 If more code must be inserted, it goes after this insn. */
296 rtx before_jump;
297 /* The LABEL_DECL that this jump is jumping to, or 0
298 for break, continue or return. */
299 tree target;
300 /* The BLOCK for the place where this goto was found. */
301 tree context;
302 /* The CODE_LABEL rtx that this is jumping to. */
303 rtx target_rtl;
304 /* Number of binding contours started in current function
305 before the label reference. */
306 int block_start_count;
307 /* The outermost stack level that should be restored for this jump.
308 Each time a binding contour that resets the stack is exited,
309 if the target label is *not* yet defined, this slot is updated. */
310 rtx stack_level;
311 /* List of lists of cleanup expressions to be run by this goto.
312 There is one element for each block that this goto is within.
313 The tail of this list can be 0,
314 if all remaining elements would be empty.
315 The TREE_VALUE contains the cleanup list of that block as of the
316 time this goto was seen.
317 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
318 tree cleanup_list_list;
321 /* Within any binding contour that must restore a stack level,
322 all labels are recorded with a chain of these structures. */
324 struct label_chain
326 /* Points to following fixup. */
327 struct label_chain *next;
328 tree label;
331 struct stmt_status
333 /* Chain of all pending binding contours. */
334 struct nesting *x_block_stack;
336 /* If any new stacks are added here, add them to POPSTACKS too. */
338 /* Chain of all pending binding contours that restore stack levels
339 or have cleanups. */
340 struct nesting *x_stack_block_stack;
342 /* Chain of all pending conditional statements. */
343 struct nesting *x_cond_stack;
345 /* Chain of all pending loops. */
346 struct nesting *x_loop_stack;
348 /* Chain of all pending case or switch statements. */
349 struct nesting *x_case_stack;
351 /* Separate chain including all of the above,
352 chained through the `all' field. */
353 struct nesting *x_nesting_stack;
355 /* Number of entries on nesting_stack now. */
356 int x_nesting_depth;
358 /* Number of binding contours started so far in this function. */
359 int x_block_start_count;
361 /* Each time we expand an expression-statement,
362 record the expr's type and its RTL value here. */
363 tree x_last_expr_type;
364 rtx x_last_expr_value;
366 /* Nonzero if within a ({...}) grouping, in which case we must
367 always compute a value for each expr-stmt in case it is the last one. */
368 int x_expr_stmts_for_value;
370 /* Filename and line number of last line-number note,
371 whether we actually emitted it or not. */
372 const char *x_emit_filename;
373 int x_emit_lineno;
375 struct goto_fixup *x_goto_fixup_chain;
378 #define block_stack (cfun->stmt->x_block_stack)
379 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
380 #define cond_stack (cfun->stmt->x_cond_stack)
381 #define loop_stack (cfun->stmt->x_loop_stack)
382 #define case_stack (cfun->stmt->x_case_stack)
383 #define nesting_stack (cfun->stmt->x_nesting_stack)
384 #define nesting_depth (cfun->stmt->x_nesting_depth)
385 #define current_block_start_count (cfun->stmt->x_block_start_count)
386 #define last_expr_type (cfun->stmt->x_last_expr_type)
387 #define last_expr_value (cfun->stmt->x_last_expr_value)
388 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
389 #define emit_filename (cfun->stmt->x_emit_filename)
390 #define emit_lineno (cfun->stmt->x_emit_lineno)
391 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
393 /* Non-zero if we are using EH to handle cleanus. */
394 static int using_eh_for_cleanups_p = 0;
396 /* Character strings, each containing a single decimal digit. */
397 static char *digit_strings[10];
399 static int n_occurrences PARAMS ((int, const char *));
400 static void expand_goto_internal PARAMS ((tree, rtx, rtx));
401 static int expand_fixup PARAMS ((tree, rtx, rtx));
402 static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
403 static void expand_nl_goto_receiver PARAMS ((void));
404 static void expand_nl_goto_receivers PARAMS ((struct nesting *));
405 static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
406 rtx, int));
407 static void expand_null_return_1 PARAMS ((rtx, int));
408 static void expand_value_return PARAMS ((rtx));
409 static int tail_recursion_args PARAMS ((tree, tree));
410 static void expand_cleanups PARAMS ((tree, tree, int, int));
411 static void check_seenlabel PARAMS ((void));
412 static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
413 static int estimate_case_costs PARAMS ((case_node_ptr));
414 static void group_case_nodes PARAMS ((case_node_ptr));
415 static void balance_case_nodes PARAMS ((case_node_ptr *,
416 case_node_ptr));
417 static int node_has_low_bound PARAMS ((case_node_ptr, tree));
418 static int node_has_high_bound PARAMS ((case_node_ptr, tree));
419 static int node_is_bounded PARAMS ((case_node_ptr, tree));
420 static void emit_jump_if_reachable PARAMS ((rtx));
421 static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
422 static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
423 static void mark_cond_nesting PARAMS ((struct nesting *));
424 static void mark_loop_nesting PARAMS ((struct nesting *));
425 static void mark_block_nesting PARAMS ((struct nesting *));
426 static void mark_case_nesting PARAMS ((struct nesting *));
427 static void mark_case_node PARAMS ((struct case_node *));
428 static void mark_goto_fixup PARAMS ((struct goto_fixup *));
429 static void free_case_nodes PARAMS ((case_node_ptr));
431 void
432 using_eh_for_cleanups ()
434 using_eh_for_cleanups_p = 1;
437 /* Mark N (known to be a cond-nesting) for GC. */
439 static void
440 mark_cond_nesting (n)
441 struct nesting *n;
443 while (n)
445 ggc_mark_rtx (n->exit_label);
446 ggc_mark_rtx (n->data.cond.endif_label);
447 ggc_mark_rtx (n->data.cond.next_label);
449 n = n->next;
453 /* Mark N (known to be a loop-nesting) for GC. */
455 static void
456 mark_loop_nesting (n)
457 struct nesting *n;
460 while (n)
462 ggc_mark_rtx (n->exit_label);
463 ggc_mark_rtx (n->data.loop.start_label);
464 ggc_mark_rtx (n->data.loop.end_label);
465 ggc_mark_rtx (n->data.loop.alt_end_label);
466 ggc_mark_rtx (n->data.loop.continue_label);
468 n = n->next;
472 /* Mark N (known to be a block-nesting) for GC. */
474 static void
475 mark_block_nesting (n)
476 struct nesting *n;
478 while (n)
480 struct label_chain *l;
482 ggc_mark_rtx (n->exit_label);
483 ggc_mark_rtx (n->data.block.stack_level);
484 ggc_mark_rtx (n->data.block.first_insn);
485 ggc_mark_tree (n->data.block.cleanups);
486 ggc_mark_tree (n->data.block.outer_cleanups);
488 for (l = n->data.block.label_chain; l != NULL; l = l->next)
490 ggc_mark (l);
491 ggc_mark_tree (l->label);
494 ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
496 /* ??? cleanup_ptr never points outside the stack, does it? */
498 n = n->next;
502 /* Mark N (known to be a case-nesting) for GC. */
504 static void
505 mark_case_nesting (n)
506 struct nesting *n;
508 while (n)
510 ggc_mark_rtx (n->exit_label);
511 ggc_mark_rtx (n->data.case_stmt.start);
513 ggc_mark_tree (n->data.case_stmt.default_label);
514 ggc_mark_tree (n->data.case_stmt.index_expr);
515 ggc_mark_tree (n->data.case_stmt.nominal_type);
517 mark_case_node (n->data.case_stmt.case_list);
518 n = n->next;
522 /* Mark C for GC. */
524 static void
525 mark_case_node (c)
526 struct case_node *c;
528 if (c != 0)
530 ggc_mark_tree (c->low);
531 ggc_mark_tree (c->high);
532 ggc_mark_tree (c->code_label);
534 mark_case_node (c->right);
535 mark_case_node (c->left);
539 /* Mark G for GC. */
541 static void
542 mark_goto_fixup (g)
543 struct goto_fixup *g;
545 while (g)
547 ggc_mark (g);
548 ggc_mark_rtx (g->before_jump);
549 ggc_mark_tree (g->target);
550 ggc_mark_tree (g->context);
551 ggc_mark_rtx (g->target_rtl);
552 ggc_mark_rtx (g->stack_level);
553 ggc_mark_tree (g->cleanup_list_list);
555 g = g->next;
559 /* Clear out all parts of the state in F that can safely be discarded
560 after the function has been compiled, to let garbage collection
561 reclaim the memory. */
563 void
564 free_stmt_status (f)
565 struct function *f;
567 /* We're about to free the function obstack. If we hold pointers to
568 things allocated there, then we'll try to mark them when we do
569 GC. So, we clear them out here explicitly. */
570 if (f->stmt)
571 free (f->stmt);
572 f->stmt = NULL;
575 /* Mark P for GC. */
577 void
578 mark_stmt_status (p)
579 struct stmt_status *p;
581 if (p == 0)
582 return;
584 mark_block_nesting (p->x_block_stack);
585 mark_cond_nesting (p->x_cond_stack);
586 mark_loop_nesting (p->x_loop_stack);
587 mark_case_nesting (p->x_case_stack);
589 ggc_mark_tree (p->x_last_expr_type);
590 /* last_epxr_value is only valid if last_expr_type is nonzero. */
591 if (p->x_last_expr_type)
592 ggc_mark_rtx (p->x_last_expr_value);
594 mark_goto_fixup (p->x_goto_fixup_chain);
597 void
598 init_stmt ()
600 int i;
602 gcc_obstack_init (&stmt_obstack);
604 for (i = 0; i < 10; i++)
606 digit_strings[i] = ggc_alloc_string (NULL, 1);
607 digit_strings[i][0] = '0' + i;
609 ggc_add_string_root (digit_strings, 10);
612 void
613 init_stmt_for_function ()
615 cfun->stmt = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
617 /* We are not currently within any block, conditional, loop or case. */
618 block_stack = 0;
619 stack_block_stack = 0;
620 loop_stack = 0;
621 case_stack = 0;
622 cond_stack = 0;
623 nesting_stack = 0;
624 nesting_depth = 0;
626 current_block_start_count = 0;
628 /* No gotos have been expanded yet. */
629 goto_fixup_chain = 0;
631 /* We are not processing a ({...}) grouping. */
632 expr_stmts_for_value = 0;
633 last_expr_type = 0;
634 last_expr_value = NULL_RTX;
637 /* Return nonzero if anything is pushed on the loop, condition, or case
638 stack. */
640 in_control_zone_p ()
642 return cond_stack || loop_stack || case_stack;
645 /* Record the current file and line. Called from emit_line_note. */
646 void
647 set_file_and_line_for_stmt (file, line)
648 const char *file;
649 int line;
651 /* If we're outputting an inline function, and we add a line note,
652 there may be no CFUN->STMT information. So, there's no need to
653 update it. */
654 if (cfun->stmt)
656 emit_filename = file;
657 emit_lineno = line;
661 /* Emit a no-op instruction. */
663 void
664 emit_nop ()
666 rtx last_insn;
668 last_insn = get_last_insn ();
669 if (!optimize
670 && (GET_CODE (last_insn) == CODE_LABEL
671 || (GET_CODE (last_insn) == NOTE
672 && prev_real_insn (last_insn) == 0)))
673 emit_insn (gen_nop ());
676 /* Return the rtx-label that corresponds to a LABEL_DECL,
677 creating it if necessary. */
680 label_rtx (label)
681 tree label;
683 if (TREE_CODE (label) != LABEL_DECL)
684 abort ();
686 if (DECL_RTL (label))
687 return DECL_RTL (label);
689 return DECL_RTL (label) = gen_label_rtx ();
692 /* Add an unconditional jump to LABEL as the next sequential instruction. */
694 void
695 emit_jump (label)
696 rtx label;
698 do_pending_stack_adjust ();
699 emit_jump_insn (gen_jump (label));
700 emit_barrier ();
703 /* Emit code to jump to the address
704 specified by the pointer expression EXP. */
706 void
707 expand_computed_goto (exp)
708 tree exp;
710 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
712 #ifdef POINTERS_EXTEND_UNSIGNED
713 x = convert_memory_address (Pmode, x);
714 #endif
716 emit_queue ();
717 /* Be sure the function is executable. */
718 if (current_function_check_memory_usage)
719 emit_library_call (chkr_check_exec_libfunc, LCT_CONST_MAKE_BLOCK,
720 VOIDmode, 1, x, ptr_mode);
722 do_pending_stack_adjust ();
723 emit_indirect_jump (x);
725 current_function_has_computed_jump = 1;
728 /* Handle goto statements and the labels that they can go to. */
730 /* Specify the location in the RTL code of a label LABEL,
731 which is a LABEL_DECL tree node.
733 This is used for the kind of label that the user can jump to with a
734 goto statement, and for alternatives of a switch or case statement.
735 RTL labels generated for loops and conditionals don't go through here;
736 they are generated directly at the RTL level, by other functions below.
738 Note that this has nothing to do with defining label *names*.
739 Languages vary in how they do that and what that even means. */
741 void
742 expand_label (label)
743 tree label;
745 struct label_chain *p;
747 do_pending_stack_adjust ();
748 emit_label (label_rtx (label));
749 if (DECL_NAME (label))
750 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
752 if (stack_block_stack != 0)
754 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
755 p->next = stack_block_stack->data.block.label_chain;
756 stack_block_stack->data.block.label_chain = p;
757 p->label = label;
761 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
762 from nested functions. */
764 void
765 declare_nonlocal_label (label)
766 tree label;
768 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
770 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
771 LABEL_PRESERVE_P (label_rtx (label)) = 1;
772 if (nonlocal_goto_handler_slots == 0)
774 emit_stack_save (SAVE_NONLOCAL,
775 &nonlocal_goto_stack_level,
776 PREV_INSN (tail_recursion_reentry));
778 nonlocal_goto_handler_slots
779 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
782 /* Generate RTL code for a `goto' statement with target label LABEL.
783 LABEL should be a LABEL_DECL tree node that was or will later be
784 defined with `expand_label'. */
786 void
787 expand_goto (label)
788 tree label;
790 tree context;
792 /* Check for a nonlocal goto to a containing function. */
793 context = decl_function_context (label);
794 if (context != 0 && context != current_function_decl)
796 struct function *p = find_function_data (context);
797 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
798 rtx handler_slot, static_chain, save_area;
799 tree link;
801 /* Find the corresponding handler slot for this label. */
802 handler_slot = p->x_nonlocal_goto_handler_slots;
803 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
804 link = TREE_CHAIN (link))
805 handler_slot = XEXP (handler_slot, 1);
806 handler_slot = XEXP (handler_slot, 0);
808 p->has_nonlocal_label = 1;
809 current_function_has_nonlocal_goto = 1;
810 LABEL_REF_NONLOCAL_P (label_ref) = 1;
812 /* Copy the rtl for the slots so that they won't be shared in
813 case the virtual stack vars register gets instantiated differently
814 in the parent than in the child. */
816 static_chain = copy_to_reg (lookup_static_chain (label));
818 /* Get addr of containing function's current nonlocal goto handler,
819 which will do any cleanups and then jump to the label. */
820 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
821 virtual_stack_vars_rtx,
822 static_chain));
824 /* Get addr of containing function's nonlocal save area. */
825 save_area = p->x_nonlocal_goto_stack_level;
826 if (save_area)
827 save_area = replace_rtx (copy_rtx (save_area),
828 virtual_stack_vars_rtx, static_chain);
830 #if HAVE_nonlocal_goto
831 if (HAVE_nonlocal_goto)
832 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
833 save_area, label_ref));
834 else
835 #endif
837 /* Restore frame pointer for containing function.
838 This sets the actual hard register used for the frame pointer
839 to the location of the function's incoming static chain info.
840 The non-local goto handler will then adjust it to contain the
841 proper value and reload the argument pointer, if needed. */
842 emit_move_insn (hard_frame_pointer_rtx, static_chain);
843 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
845 /* USE of hard_frame_pointer_rtx added for consistency;
846 not clear if really needed. */
847 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
848 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
849 emit_indirect_jump (handler_slot);
852 else
853 expand_goto_internal (label, label_rtx (label), NULL_RTX);
856 /* Generate RTL code for a `goto' statement with target label BODY.
857 LABEL should be a LABEL_REF.
858 LAST_INSN, if non-0, is the rtx we should consider as the last
859 insn emitted (for the purposes of cleaning up a return). */
861 static void
862 expand_goto_internal (body, label, last_insn)
863 tree body;
864 rtx label;
865 rtx last_insn;
867 struct nesting *block;
868 rtx stack_level = 0;
870 if (GET_CODE (label) != CODE_LABEL)
871 abort ();
873 /* If label has already been defined, we can tell now
874 whether and how we must alter the stack level. */
876 if (PREV_INSN (label) != 0)
878 /* Find the innermost pending block that contains the label.
879 (Check containment by comparing insn-uids.)
880 Then restore the outermost stack level within that block,
881 and do cleanups of all blocks contained in it. */
882 for (block = block_stack; block; block = block->next)
884 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
885 break;
886 if (block->data.block.stack_level != 0)
887 stack_level = block->data.block.stack_level;
888 /* Execute the cleanups for blocks we are exiting. */
889 if (block->data.block.cleanups != 0)
891 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
892 do_pending_stack_adjust ();
896 if (stack_level)
898 /* Ensure stack adjust isn't done by emit_jump, as this
899 would clobber the stack pointer. This one should be
900 deleted as dead by flow. */
901 clear_pending_stack_adjust ();
902 do_pending_stack_adjust ();
904 /* Don't do this adjust if it's to the end label and this function
905 is to return with a depressed stack pointer. */
906 if (label == return_label
907 && (((TREE_CODE (TREE_TYPE (current_function_decl))
908 == FUNCTION_TYPE)
909 && (TYPE_RETURNS_STACK_DEPRESSED
910 (TREE_TYPE (current_function_decl))))))
912 else
913 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
916 if (body != 0 && DECL_TOO_LATE (body))
917 error ("jump to `%s' invalidly jumps into binding contour",
918 IDENTIFIER_POINTER (DECL_NAME (body)));
920 /* Label not yet defined: may need to put this goto
921 on the fixup list. */
922 else if (! expand_fixup (body, label, last_insn))
924 /* No fixup needed. Record that the label is the target
925 of at least one goto that has no fixup. */
926 if (body != 0)
927 TREE_ADDRESSABLE (body) = 1;
930 emit_jump (label);
933 /* Generate if necessary a fixup for a goto
934 whose target label in tree structure (if any) is TREE_LABEL
935 and whose target in rtl is RTL_LABEL.
937 If LAST_INSN is nonzero, we pretend that the jump appears
938 after insn LAST_INSN instead of at the current point in the insn stream.
940 The fixup will be used later to insert insns just before the goto.
941 Those insns will restore the stack level as appropriate for the
942 target label, and will (in the case of C++) also invoke any object
943 destructors which have to be invoked when we exit the scopes which
944 are exited by the goto.
946 Value is nonzero if a fixup is made. */
948 static int
949 expand_fixup (tree_label, rtl_label, last_insn)
950 tree tree_label;
951 rtx rtl_label;
952 rtx last_insn;
954 struct nesting *block, *end_block;
956 /* See if we can recognize which block the label will be output in.
957 This is possible in some very common cases.
958 If we succeed, set END_BLOCK to that block.
959 Otherwise, set it to 0. */
961 if (cond_stack
962 && (rtl_label == cond_stack->data.cond.endif_label
963 || rtl_label == cond_stack->data.cond.next_label))
964 end_block = cond_stack;
965 /* If we are in a loop, recognize certain labels which
966 are likely targets. This reduces the number of fixups
967 we need to create. */
968 else if (loop_stack
969 && (rtl_label == loop_stack->data.loop.start_label
970 || rtl_label == loop_stack->data.loop.end_label
971 || rtl_label == loop_stack->data.loop.continue_label))
972 end_block = loop_stack;
973 else
974 end_block = 0;
976 /* Now set END_BLOCK to the binding level to which we will return. */
978 if (end_block)
980 struct nesting *next_block = end_block->all;
981 block = block_stack;
983 /* First see if the END_BLOCK is inside the innermost binding level.
984 If so, then no cleanups or stack levels are relevant. */
985 while (next_block && next_block != block)
986 next_block = next_block->all;
988 if (next_block)
989 return 0;
991 /* Otherwise, set END_BLOCK to the innermost binding level
992 which is outside the relevant control-structure nesting. */
993 next_block = block_stack->next;
994 for (block = block_stack; block != end_block; block = block->all)
995 if (block == next_block)
996 next_block = next_block->next;
997 end_block = next_block;
1000 /* Does any containing block have a stack level or cleanups?
1001 If not, no fixup is needed, and that is the normal case
1002 (the only case, for standard C). */
1003 for (block = block_stack; block != end_block; block = block->next)
1004 if (block->data.block.stack_level != 0
1005 || block->data.block.cleanups != 0)
1006 break;
1008 if (block != end_block)
1010 /* Ok, a fixup is needed. Add a fixup to the list of such. */
1011 struct goto_fixup *fixup
1012 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
1013 /* In case an old stack level is restored, make sure that comes
1014 after any pending stack adjust. */
1015 /* ?? If the fixup isn't to come at the present position,
1016 doing the stack adjust here isn't useful. Doing it with our
1017 settings at that location isn't useful either. Let's hope
1018 someone does it! */
1019 if (last_insn == 0)
1020 do_pending_stack_adjust ();
1021 fixup->target = tree_label;
1022 fixup->target_rtl = rtl_label;
1024 /* Create a BLOCK node and a corresponding matched set of
1025 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
1026 this point. The notes will encapsulate any and all fixup
1027 code which we might later insert at this point in the insn
1028 stream. Also, the BLOCK node will be the parent (i.e. the
1029 `SUPERBLOCK') of any other BLOCK nodes which we might create
1030 later on when we are expanding the fixup code.
1032 Note that optimization passes (including expand_end_loop)
1033 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
1034 as a placeholder. */
1037 register rtx original_before_jump
1038 = last_insn ? last_insn : get_last_insn ();
1039 rtx start;
1040 rtx end;
1041 tree block;
1043 block = make_node (BLOCK);
1044 TREE_USED (block) = 1;
1046 if (!cfun->x_whole_function_mode_p)
1047 insert_block (block);
1048 else
1050 BLOCK_CHAIN (block)
1051 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1052 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
1053 = block;
1056 start_sequence ();
1057 start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1058 if (cfun->x_whole_function_mode_p)
1059 NOTE_BLOCK (start) = block;
1060 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
1061 end = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1062 if (cfun->x_whole_function_mode_p)
1063 NOTE_BLOCK (end) = block;
1064 fixup->context = block;
1065 end_sequence ();
1066 emit_insns_after (start, original_before_jump);
1069 fixup->block_start_count = current_block_start_count;
1070 fixup->stack_level = 0;
1071 fixup->cleanup_list_list
1072 = ((block->data.block.outer_cleanups
1073 || block->data.block.cleanups)
1074 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1075 block->data.block.outer_cleanups)
1076 : 0);
1077 fixup->next = goto_fixup_chain;
1078 goto_fixup_chain = fixup;
1081 return block != 0;
1084 /* Expand any needed fixups in the outputmost binding level of the
1085 function. FIRST_INSN is the first insn in the function. */
1087 void
1088 expand_fixups (first_insn)
1089 rtx first_insn;
1091 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1094 /* When exiting a binding contour, process all pending gotos requiring fixups.
1095 THISBLOCK is the structure that describes the block being exited.
1096 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1097 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1098 FIRST_INSN is the insn that began this contour.
1100 Gotos that jump out of this contour must restore the
1101 stack level and do the cleanups before actually jumping.
1103 DONT_JUMP_IN nonzero means report error there is a jump into this
1104 contour from before the beginning of the contour.
1105 This is also done if STACK_LEVEL is nonzero. */
1107 static void
1108 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1109 struct nesting *thisblock;
1110 rtx stack_level;
1111 tree cleanup_list;
1112 rtx first_insn;
1113 int dont_jump_in;
1115 register struct goto_fixup *f, *prev;
1117 /* F is the fixup we are considering; PREV is the previous one. */
1118 /* We run this loop in two passes so that cleanups of exited blocks
1119 are run first, and blocks that are exited are marked so
1120 afterwards. */
1122 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1124 /* Test for a fixup that is inactive because it is already handled. */
1125 if (f->before_jump == 0)
1127 /* Delete inactive fixup from the chain, if that is easy to do. */
1128 if (prev != 0)
1129 prev->next = f->next;
1131 /* Has this fixup's target label been defined?
1132 If so, we can finalize it. */
1133 else if (PREV_INSN (f->target_rtl) != 0)
1135 register rtx cleanup_insns;
1137 /* If this fixup jumped into this contour from before the beginning
1138 of this contour, report an error. This code used to use
1139 the first non-label insn after f->target_rtl, but that's
1140 wrong since such can be added, by things like put_var_into_stack
1141 and have INSN_UIDs that are out of the range of the block. */
1142 /* ??? Bug: this does not detect jumping in through intermediate
1143 blocks that have stack levels or cleanups.
1144 It detects only a problem with the innermost block
1145 around the label. */
1146 if (f->target != 0
1147 && (dont_jump_in || stack_level || cleanup_list)
1148 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
1149 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1150 && ! DECL_ERROR_ISSUED (f->target))
1152 error_with_decl (f->target,
1153 "label `%s' used before containing binding contour");
1154 /* Prevent multiple errors for one label. */
1155 DECL_ERROR_ISSUED (f->target) = 1;
1158 /* We will expand the cleanups into a sequence of their own and
1159 then later on we will attach this new sequence to the insn
1160 stream just ahead of the actual jump insn. */
1162 start_sequence ();
1164 /* Temporarily restore the lexical context where we will
1165 logically be inserting the fixup code. We do this for the
1166 sake of getting the debugging information right. */
1168 pushlevel (0);
1169 set_block (f->context);
1171 /* Expand the cleanups for blocks this jump exits. */
1172 if (f->cleanup_list_list)
1174 tree lists;
1175 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1176 /* Marked elements correspond to blocks that have been closed.
1177 Do their cleanups. */
1178 if (TREE_ADDRESSABLE (lists)
1179 && TREE_VALUE (lists) != 0)
1181 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1182 /* Pop any pushes done in the cleanups,
1183 in case function is about to return. */
1184 do_pending_stack_adjust ();
1188 /* Restore stack level for the biggest contour that this
1189 jump jumps out of. */
1190 if (f->stack_level
1191 && ! (f->target_rtl == return_label
1192 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1193 == FUNCTION_TYPE)
1194 && (TYPE_RETURNS_STACK_DEPRESSED
1195 (TREE_TYPE (current_function_decl))))))
1196 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1198 /* Finish up the sequence containing the insns which implement the
1199 necessary cleanups, and then attach that whole sequence to the
1200 insn stream just ahead of the actual jump insn. Attaching it
1201 at that point insures that any cleanups which are in fact
1202 implicit C++ object destructions (which must be executed upon
1203 leaving the block) appear (to the debugger) to be taking place
1204 in an area of the generated code where the object(s) being
1205 destructed are still "in scope". */
1207 cleanup_insns = get_insns ();
1208 poplevel (1, 0, 0);
1210 end_sequence ();
1211 emit_insns_after (cleanup_insns, f->before_jump);
1213 f->before_jump = 0;
1217 /* For any still-undefined labels, do the cleanups for this block now.
1218 We must do this now since items in the cleanup list may go out
1219 of scope when the block ends. */
1220 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1221 if (f->before_jump != 0
1222 && PREV_INSN (f->target_rtl) == 0
1223 /* Label has still not appeared. If we are exiting a block with
1224 a stack level to restore, that started before the fixup,
1225 mark this stack level as needing restoration
1226 when the fixup is later finalized. */
1227 && thisblock != 0
1228 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1229 means the label is undefined. That's erroneous, but possible. */
1230 && (thisblock->data.block.block_start_count
1231 <= f->block_start_count))
1233 tree lists = f->cleanup_list_list;
1234 rtx cleanup_insns;
1236 for (; lists; lists = TREE_CHAIN (lists))
1237 /* If the following elt. corresponds to our containing block
1238 then the elt. must be for this block. */
1239 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1241 start_sequence ();
1242 pushlevel (0);
1243 set_block (f->context);
1244 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1245 do_pending_stack_adjust ();
1246 cleanup_insns = get_insns ();
1247 poplevel (1, 0, 0);
1248 end_sequence ();
1249 if (cleanup_insns != 0)
1250 f->before_jump
1251 = emit_insns_after (cleanup_insns, f->before_jump);
1253 f->cleanup_list_list = TREE_CHAIN (lists);
1256 if (stack_level)
1257 f->stack_level = stack_level;
1261 /* Return the number of times character C occurs in string S. */
1262 static int
1263 n_occurrences (c, s)
1264 int c;
1265 const char *s;
1267 int n = 0;
1268 while (*s)
1269 n += (*s++ == c);
1270 return n;
1273 /* Generate RTL for an asm statement (explicit assembler code).
1274 BODY is a STRING_CST node containing the assembler code text,
1275 or an ADDR_EXPR containing a STRING_CST. */
1277 void
1278 expand_asm (body)
1279 tree body;
1281 if (current_function_check_memory_usage)
1283 error ("`asm' cannot be used in function where memory usage is checked");
1284 return;
1287 if (TREE_CODE (body) == ADDR_EXPR)
1288 body = TREE_OPERAND (body, 0);
1290 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1291 TREE_STRING_POINTER (body)));
1292 last_expr_type = 0;
1295 /* Generate RTL for an asm statement with arguments.
1296 STRING is the instruction template.
1297 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1298 Each output or input has an expression in the TREE_VALUE and
1299 a constraint-string in the TREE_PURPOSE.
1300 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1301 that is clobbered by this insn.
1303 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1304 Some elements of OUTPUTS may be replaced with trees representing temporary
1305 values. The caller should copy those temporary values to the originally
1306 specified lvalues.
1308 VOL nonzero means the insn is volatile; don't optimize it. */
1310 void
1311 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1312 tree string, outputs, inputs, clobbers;
1313 int vol;
1314 const char *filename;
1315 int line;
1317 rtvec argvec, constraints;
1318 rtx body;
1319 int ninputs = list_length (inputs);
1320 int noutputs = list_length (outputs);
1321 int ninout = 0;
1322 int nclobbers;
1323 tree tail;
1324 register int i;
1325 /* Vector of RTX's of evaluated output operands. */
1326 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1327 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1328 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1329 enum machine_mode *inout_mode
1330 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1331 /* The insn we have emitted. */
1332 rtx insn;
1333 int old_generating_concat_p = generating_concat_p;
1335 /* An ASM with no outputs needs to be treated as volatile, for now. */
1336 if (noutputs == 0)
1337 vol = 1;
1339 if (current_function_check_memory_usage)
1341 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1342 return;
1345 #ifdef MD_ASM_CLOBBERS
1346 /* Sometimes we wish to automatically clobber registers across an asm.
1347 Case in point is when the i386 backend moved from cc0 to a hard reg --
1348 maintaining source-level compatability means automatically clobbering
1349 the flags register. */
1350 MD_ASM_CLOBBERS (clobbers);
1351 #endif
1353 if (current_function_check_memory_usage)
1355 error ("`asm' cannot be used in function where memory usage is checked");
1356 return;
1359 /* Count the number of meaningful clobbered registers, ignoring what
1360 we would ignore later. */
1361 nclobbers = 0;
1362 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1364 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1366 i = decode_reg_name (regname);
1367 if (i >= 0 || i == -4)
1368 ++nclobbers;
1369 else if (i == -2)
1370 error ("unknown register name `%s' in `asm'", regname);
1373 last_expr_type = 0;
1375 /* Check that the number of alternatives is constant across all
1376 operands. */
1377 if (outputs || inputs)
1379 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1380 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1381 tree next = inputs;
1383 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1385 error ("too many alternatives in `asm'");
1386 return;
1389 tmp = outputs;
1390 while (tmp)
1392 const char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1394 if (n_occurrences (',', constraint) != nalternatives)
1396 error ("operand constraints for `asm' differ in number of alternatives");
1397 return;
1400 if (TREE_CHAIN (tmp))
1401 tmp = TREE_CHAIN (tmp);
1402 else
1403 tmp = next, next = 0;
1407 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1409 tree val = TREE_VALUE (tail);
1410 tree type = TREE_TYPE (val);
1411 char *constraint;
1412 char *p;
1413 int c_len;
1414 int j;
1415 int is_inout = 0;
1416 int allows_reg = 0;
1417 int allows_mem = 0;
1419 /* If there's an erroneous arg, emit no insn. */
1420 if (TREE_TYPE (val) == error_mark_node)
1421 return;
1423 /* Make sure constraint has `=' and does not have `+'. Also, see
1424 if it allows any register. Be liberal on the latter test, since
1425 the worst that happens if we get it wrong is we issue an error
1426 message. */
1428 c_len = strlen (TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1429 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1431 /* Allow the `=' or `+' to not be at the beginning of the string,
1432 since it wasn't explicitly documented that way, and there is a
1433 large body of code that puts it last. Swap the character to
1434 the front, so as not to uglify any place else. */
1435 switch (c_len)
1437 default:
1438 if ((p = strchr (constraint, '=')) != NULL)
1439 break;
1440 if ((p = strchr (constraint, '+')) != NULL)
1441 break;
1442 case 0:
1443 error ("output operand constraint lacks `='");
1444 return;
1447 if (p != constraint)
1449 j = *p;
1450 bcopy (constraint, constraint+1, p-constraint);
1451 *constraint = j;
1453 warning ("output constraint `%c' for operand %d is not at the beginning", j, i);
1456 is_inout = constraint[0] == '+';
1457 /* Replace '+' with '='. */
1458 constraint[0] = '=';
1459 /* Make sure we can specify the matching operand. */
1460 if (is_inout && i > 9)
1462 error ("output operand constraint %d contains `+'", i);
1463 return;
1466 for (j = 1; j < c_len; j++)
1467 switch (constraint[j])
1469 case '+':
1470 case '=':
1471 error ("operand constraint contains '+' or '=' at illegal position.");
1472 return;
1474 case '%':
1475 if (i + 1 == ninputs + noutputs)
1477 error ("`%%' constraint used with last operand");
1478 return;
1480 break;
1482 case '?': case '!': case '*': case '&': case '#':
1483 case 'E': case 'F': case 'G': case 'H':
1484 case 's': case 'i': case 'n':
1485 case 'I': case 'J': case 'K': case 'L': case 'M':
1486 case 'N': case 'O': case 'P': case ',':
1487 break;
1489 case '0': case '1': case '2': case '3': case '4':
1490 case '5': case '6': case '7': case '8': case '9':
1491 error ("matching constraint not valid in output operand");
1492 break;
1494 case 'V': case 'm': case 'o':
1495 allows_mem = 1;
1496 break;
1498 case '<': case '>':
1499 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1500 excepting those that expand_call created. So match memory
1501 and hope. */
1502 allows_mem = 1;
1503 break;
1505 case 'g': case 'X':
1506 allows_reg = 1;
1507 allows_mem = 1;
1508 break;
1510 case 'p': case 'r':
1511 allows_reg = 1;
1512 break;
1514 default:
1515 if (! ISALPHA (constraint[j]))
1517 error ("invalid punctuation `%c' in constraint",
1518 constraint[j]);
1519 return;
1521 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1522 allows_reg = 1;
1523 #ifdef EXTRA_CONSTRAINT
1524 else
1526 /* Otherwise we can't assume anything about the nature of
1527 the constraint except that it isn't purely registers.
1528 Treat it like "g" and hope for the best. */
1529 allows_reg = 1;
1530 allows_mem = 1;
1532 #endif
1533 break;
1536 /* If an output operand is not a decl or indirect ref and our constraint
1537 allows a register, make a temporary to act as an intermediate.
1538 Make the asm insn write into that, then our caller will copy it to
1539 the real output operand. Likewise for promoted variables. */
1541 generating_concat_p = 0;
1543 real_output_rtx[i] = NULL_RTX;
1544 if ((TREE_CODE (val) == INDIRECT_REF
1545 && allows_mem)
1546 || (DECL_P (val)
1547 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1548 && ! (GET_CODE (DECL_RTL (val)) == REG
1549 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1550 || ! allows_reg
1551 || is_inout)
1553 if (! allows_reg)
1554 mark_addressable (TREE_VALUE (tail));
1556 output_rtx[i]
1557 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1558 EXPAND_MEMORY_USE_WO);
1560 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1561 error ("output number %d not directly addressable", i);
1562 if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1563 || GET_CODE (output_rtx[i]) == CONCAT)
1565 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1566 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1567 if (is_inout)
1568 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1571 else
1573 output_rtx[i] = assign_temp (type, 0, 0, 1);
1574 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1577 generating_concat_p = old_generating_concat_p;
1579 if (is_inout)
1581 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1582 inout_opnum[ninout++] = i;
1586 ninputs += ninout;
1587 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1589 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1590 return;
1593 /* Make vectors for the expression-rtx and constraint strings. */
1595 argvec = rtvec_alloc (ninputs);
1596 constraints = rtvec_alloc (ninputs);
1598 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1599 : GET_MODE (output_rtx[0])),
1600 TREE_STRING_POINTER (string),
1601 empty_string, 0, argvec, constraints,
1602 filename, line);
1604 MEM_VOLATILE_P (body) = vol;
1606 /* Eval the inputs and put them into ARGVEC.
1607 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1609 i = 0;
1610 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1612 int j;
1613 int allows_reg = 0, allows_mem = 0;
1614 char *constraint, *orig_constraint;
1615 int c_len;
1616 rtx op;
1618 /* If there's an erroneous arg, emit no insn,
1619 because the ASM_INPUT would get VOIDmode
1620 and that could cause a crash in reload. */
1621 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1622 return;
1624 /* ??? Can this happen, and does the error message make any sense? */
1625 if (TREE_PURPOSE (tail) == NULL_TREE)
1627 error ("hard register `%s' listed as input operand to `asm'",
1628 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1629 return;
1632 c_len = strlen (TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1633 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1634 orig_constraint = constraint;
1636 /* Make sure constraint has neither `=', `+', nor '&'. */
1638 for (j = 0; j < c_len; j++)
1639 switch (constraint[j])
1641 case '+': case '=': case '&':
1642 if (constraint == orig_constraint)
1644 error ("input operand constraint contains `%c'",
1645 constraint[j]);
1646 return;
1648 break;
1650 case '%':
1651 if (constraint == orig_constraint
1652 && i + 1 == ninputs - ninout)
1654 error ("`%%' constraint used with last operand");
1655 return;
1657 break;
1659 case 'V': case 'm': case 'o':
1660 allows_mem = 1;
1661 break;
1663 case '<': case '>':
1664 case '?': case '!': case '*': case '#':
1665 case 'E': case 'F': case 'G': case 'H':
1666 case 's': case 'i': case 'n':
1667 case 'I': case 'J': case 'K': case 'L': case 'M':
1668 case 'N': case 'O': case 'P': case ',':
1669 break;
1671 /* Whether or not a numeric constraint allows a register is
1672 decided by the matching constraint, and so there is no need
1673 to do anything special with them. We must handle them in
1674 the default case, so that we don't unnecessarily force
1675 operands to memory. */
1676 case '0': case '1': case '2': case '3': case '4':
1677 case '5': case '6': case '7': case '8': case '9':
1678 if (constraint[j] >= '0' + noutputs)
1680 error
1681 ("matching constraint references invalid operand number");
1682 return;
1685 /* Try and find the real constraint for this dup. */
1686 if ((j == 0 && c_len == 1)
1687 || (j == 1 && c_len == 2 && constraint[0] == '%'))
1689 tree o = outputs;
1691 for (j = constraint[j] - '0'; j > 0; --j)
1692 o = TREE_CHAIN (o);
1694 c_len = strlen (TREE_STRING_POINTER (TREE_PURPOSE (o)));
1695 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1696 j = 0;
1697 break;
1700 /* Fall through. */
1702 case 'p': case 'r':
1703 allows_reg = 1;
1704 break;
1706 case 'g': case 'X':
1707 allows_reg = 1;
1708 allows_mem = 1;
1709 break;
1711 default:
1712 if (! ISALPHA (constraint[j]))
1714 error ("invalid punctuation `%c' in constraint",
1715 constraint[j]);
1716 return;
1718 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1719 allows_reg = 1;
1720 #ifdef EXTRA_CONSTRAINT
1721 else
1723 /* Otherwise we can't assume anything about the nature of
1724 the constraint except that it isn't purely registers.
1725 Treat it like "g" and hope for the best. */
1726 allows_reg = 1;
1727 allows_mem = 1;
1729 #endif
1730 break;
1733 if (! allows_reg && allows_mem)
1734 mark_addressable (TREE_VALUE (tail));
1736 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1738 /* Never pass a CONCAT to an ASM. */
1739 generating_concat_p = 0;
1740 if (GET_CODE (op) == CONCAT)
1741 op = force_reg (GET_MODE (op), op);
1743 if (asm_operand_ok (op, constraint) <= 0)
1745 if (allows_reg)
1746 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1747 else if (!allows_mem)
1748 warning ("asm operand %d probably doesn't match constraints", i);
1749 else if (CONSTANT_P (op))
1750 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1751 op);
1752 else if (GET_CODE (op) == REG
1753 || GET_CODE (op) == SUBREG
1754 || GET_CODE (op) == CONCAT)
1756 tree type = TREE_TYPE (TREE_VALUE (tail));
1757 rtx memloc = assign_temp (type, 1, 1, 1);
1759 emit_move_insn (memloc, op);
1760 op = memloc;
1763 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1764 /* We won't recognize volatile memory as available a
1765 memory_operand at this point. Ignore it. */
1767 else if (queued_subexp_p (op))
1769 else
1770 /* ??? Leave this only until we have experience with what
1771 happens in combine and elsewhere when constraints are
1772 not satisfied. */
1773 warning ("asm operand %d probably doesn't match constraints", i);
1775 generating_concat_p = old_generating_concat_p;
1776 ASM_OPERANDS_INPUT (body, i) = op;
1778 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1779 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1780 orig_constraint);
1781 i++;
1784 /* Protect all the operands from the queue now that they have all been
1785 evaluated. */
1787 generating_concat_p = 0;
1789 for (i = 0; i < ninputs - ninout; i++)
1790 ASM_OPERANDS_INPUT (body, i)
1791 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1793 for (i = 0; i < noutputs; i++)
1794 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1796 /* For in-out operands, copy output rtx to input rtx. */
1797 for (i = 0; i < ninout; i++)
1799 int j = inout_opnum[i];
1801 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1802 = output_rtx[j];
1803 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1804 = gen_rtx_ASM_INPUT (inout_mode[i], digit_strings[j]);
1807 generating_concat_p = old_generating_concat_p;
1809 /* Now, for each output, construct an rtx
1810 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1811 ARGVEC CONSTRAINTS))
1812 If there is more than one, put them inside a PARALLEL. */
1814 if (noutputs == 1 && nclobbers == 0)
1816 ASM_OPERANDS_OUTPUT_CONSTRAINT (body)
1817 = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1818 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1821 else if (noutputs == 0 && nclobbers == 0)
1823 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1824 insn = emit_insn (body);
1827 else
1829 rtx obody = body;
1830 int num = noutputs;
1832 if (num == 0)
1833 num = 1;
1835 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1837 /* For each output operand, store a SET. */
1838 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1840 XVECEXP (body, 0, i)
1841 = gen_rtx_SET (VOIDmode,
1842 output_rtx[i],
1843 gen_rtx_ASM_OPERANDS
1844 (GET_MODE (output_rtx[i]),
1845 TREE_STRING_POINTER (string),
1846 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1847 i, argvec, constraints,
1848 filename, line));
1850 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1853 /* If there are no outputs (but there are some clobbers)
1854 store the bare ASM_OPERANDS into the PARALLEL. */
1856 if (i == 0)
1857 XVECEXP (body, 0, i++) = obody;
1859 /* Store (clobber REG) for each clobbered register specified. */
1861 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1863 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1864 int j = decode_reg_name (regname);
1866 if (j < 0)
1868 if (j == -3) /* `cc', which is not a register */
1869 continue;
1871 if (j == -4) /* `memory', don't cache memory across asm */
1873 XVECEXP (body, 0, i++)
1874 = gen_rtx_CLOBBER (VOIDmode,
1875 gen_rtx_MEM
1876 (BLKmode,
1877 gen_rtx_SCRATCH (VOIDmode)));
1878 continue;
1881 /* Ignore unknown register, error already signaled. */
1882 continue;
1885 /* Use QImode since that's guaranteed to clobber just one reg. */
1886 XVECEXP (body, 0, i++)
1887 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1890 insn = emit_insn (body);
1893 /* For any outputs that needed reloading into registers, spill them
1894 back to where they belong. */
1895 for (i = 0; i < noutputs; ++i)
1896 if (real_output_rtx[i])
1897 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1899 free_temp_slots ();
1902 /* Generate RTL to evaluate the expression EXP
1903 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1905 void
1906 expand_expr_stmt (exp)
1907 tree exp;
1909 /* If -W, warn about statements with no side effects,
1910 except for an explicit cast to void (e.g. for assert()), and
1911 except inside a ({...}) where they may be useful. */
1912 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1914 if (! TREE_SIDE_EFFECTS (exp))
1916 if ((extra_warnings || warn_unused_value)
1917 && !(TREE_CODE (exp) == CONVERT_EXPR
1918 && VOID_TYPE_P (TREE_TYPE (exp))))
1919 warning_with_file_and_line (emit_filename, emit_lineno,
1920 "statement with no effect");
1922 else if (warn_unused_value)
1923 warn_if_unused_value (exp);
1926 /* If EXP is of function type and we are expanding statements for
1927 value, convert it to pointer-to-function. */
1928 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1929 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1931 /* The call to `expand_expr' could cause last_expr_type and
1932 last_expr_value to get reset. Therefore, we set last_expr_value
1933 and last_expr_type *after* calling expand_expr. */
1934 last_expr_value = expand_expr (exp,
1935 (expr_stmts_for_value
1936 ? NULL_RTX : const0_rtx),
1937 VOIDmode, 0);
1938 last_expr_type = TREE_TYPE (exp);
1940 /* If all we do is reference a volatile value in memory,
1941 copy it to a register to be sure it is actually touched. */
1942 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1943 && TREE_THIS_VOLATILE (exp))
1945 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1947 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1948 copy_to_reg (last_expr_value);
1949 else
1951 rtx lab = gen_label_rtx ();
1953 /* Compare the value with itself to reference it. */
1954 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
1955 expand_expr (TYPE_SIZE (last_expr_type),
1956 NULL_RTX, VOIDmode, 0),
1957 BLKmode, 0,
1958 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
1959 lab);
1960 emit_label (lab);
1964 /* If this expression is part of a ({...}) and is in memory, we may have
1965 to preserve temporaries. */
1966 preserve_temp_slots (last_expr_value);
1968 /* Free any temporaries used to evaluate this expression. Any temporary
1969 used as a result of this expression will already have been preserved
1970 above. */
1971 free_temp_slots ();
1973 emit_queue ();
1976 /* Warn if EXP contains any computations whose results are not used.
1977 Return 1 if a warning is printed; 0 otherwise. */
1980 warn_if_unused_value (exp)
1981 tree exp;
1983 if (TREE_USED (exp))
1984 return 0;
1986 /* Don't warn about void constructs. This includes casting to void,
1987 void function calls, and statement expressions with a final cast
1988 to void. */
1989 if (VOID_TYPE_P (TREE_TYPE (exp)))
1990 return 0;
1992 switch (TREE_CODE (exp))
1994 case PREINCREMENT_EXPR:
1995 case POSTINCREMENT_EXPR:
1996 case PREDECREMENT_EXPR:
1997 case POSTDECREMENT_EXPR:
1998 case MODIFY_EXPR:
1999 case INIT_EXPR:
2000 case TARGET_EXPR:
2001 case CALL_EXPR:
2002 case METHOD_CALL_EXPR:
2003 case RTL_EXPR:
2004 case TRY_CATCH_EXPR:
2005 case WITH_CLEANUP_EXPR:
2006 case EXIT_EXPR:
2007 return 0;
2009 case BIND_EXPR:
2010 /* For a binding, warn if no side effect within it. */
2011 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2013 case SAVE_EXPR:
2014 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2016 case TRUTH_ORIF_EXPR:
2017 case TRUTH_ANDIF_EXPR:
2018 /* In && or ||, warn if 2nd operand has no side effect. */
2019 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2021 case COMPOUND_EXPR:
2022 if (TREE_NO_UNUSED_WARNING (exp))
2023 return 0;
2024 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2025 return 1;
2026 /* Let people do `(foo (), 0)' without a warning. */
2027 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2028 return 0;
2029 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2031 case NOP_EXPR:
2032 case CONVERT_EXPR:
2033 case NON_LVALUE_EXPR:
2034 /* Don't warn about conversions not explicit in the user's program. */
2035 if (TREE_NO_UNUSED_WARNING (exp))
2036 return 0;
2037 /* Assignment to a cast usually results in a cast of a modify.
2038 Don't complain about that. There can be an arbitrary number of
2039 casts before the modify, so we must loop until we find the first
2040 non-cast expression and then test to see if that is a modify. */
2042 tree tem = TREE_OPERAND (exp, 0);
2044 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2045 tem = TREE_OPERAND (tem, 0);
2047 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2048 || TREE_CODE (tem) == CALL_EXPR)
2049 return 0;
2051 goto warn;
2053 case INDIRECT_REF:
2054 /* Don't warn about automatic dereferencing of references, since
2055 the user cannot control it. */
2056 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2057 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2058 /* Fall through. */
2060 default:
2061 /* Referencing a volatile value is a side effect, so don't warn. */
2062 if ((DECL_P (exp)
2063 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2064 && TREE_THIS_VOLATILE (exp))
2065 return 0;
2067 /* If this is an expression with side effects, don't warn. */
2068 if (TREE_SIDE_EFFECTS (exp))
2069 return 0;
2071 /* If this is an expression which has no operands, there is no value
2072 to be unused. There are no such language-independent codes,
2073 but front ends may define such. */
2074 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2075 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2076 return 0;
2078 warn:
2079 warning_with_file_and_line (emit_filename, emit_lineno,
2080 "value computed is not used");
2081 return 1;
2085 /* Clear out the memory of the last expression evaluated. */
2087 void
2088 clear_last_expr ()
2090 last_expr_type = 0;
2093 /* Begin a statement which will return a value.
2094 Return the RTL_EXPR for this statement expr.
2095 The caller must save that value and pass it to expand_end_stmt_expr. */
2097 tree
2098 expand_start_stmt_expr ()
2100 tree t;
2102 /* Make the RTL_EXPR node temporary, not momentary,
2103 so that rtl_expr_chain doesn't become garbage. */
2104 t = make_node (RTL_EXPR);
2105 do_pending_stack_adjust ();
2106 start_sequence_for_rtl_expr (t);
2107 NO_DEFER_POP;
2108 expr_stmts_for_value++;
2109 return t;
2112 /* Restore the previous state at the end of a statement that returns a value.
2113 Returns a tree node representing the statement's value and the
2114 insns to compute the value.
2116 The nodes of that expression have been freed by now, so we cannot use them.
2117 But we don't want to do that anyway; the expression has already been
2118 evaluated and now we just want to use the value. So generate a RTL_EXPR
2119 with the proper type and RTL value.
2121 If the last substatement was not an expression,
2122 return something with type `void'. */
2124 tree
2125 expand_end_stmt_expr (t)
2126 tree t;
2128 OK_DEFER_POP;
2130 if (last_expr_type == 0)
2132 last_expr_type = void_type_node;
2133 last_expr_value = const0_rtx;
2135 else if (last_expr_value == 0)
2136 /* There are some cases where this can happen, such as when the
2137 statement is void type. */
2138 last_expr_value = const0_rtx;
2139 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2140 /* Remove any possible QUEUED. */
2141 last_expr_value = protect_from_queue (last_expr_value, 0);
2143 emit_queue ();
2145 TREE_TYPE (t) = last_expr_type;
2146 RTL_EXPR_RTL (t) = last_expr_value;
2147 RTL_EXPR_SEQUENCE (t) = get_insns ();
2149 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2151 end_sequence ();
2153 /* Don't consider deleting this expr or containing exprs at tree level. */
2154 TREE_SIDE_EFFECTS (t) = 1;
2155 /* Propagate volatility of the actual RTL expr. */
2156 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2158 last_expr_type = 0;
2159 expr_stmts_for_value--;
2161 return t;
2164 /* Generate RTL for the start of an if-then. COND is the expression
2165 whose truth should be tested.
2167 If EXITFLAG is nonzero, this conditional is visible to
2168 `exit_something'. */
2170 void
2171 expand_start_cond (cond, exitflag)
2172 tree cond;
2173 int exitflag;
2175 struct nesting *thiscond = ALLOC_NESTING ();
2177 /* Make an entry on cond_stack for the cond we are entering. */
2179 thiscond->next = cond_stack;
2180 thiscond->all = nesting_stack;
2181 thiscond->depth = ++nesting_depth;
2182 thiscond->data.cond.next_label = gen_label_rtx ();
2183 /* Before we encounter an `else', we don't need a separate exit label
2184 unless there are supposed to be exit statements
2185 to exit this conditional. */
2186 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2187 thiscond->data.cond.endif_label = thiscond->exit_label;
2188 cond_stack = thiscond;
2189 nesting_stack = thiscond;
2191 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2194 /* Generate RTL between then-clause and the elseif-clause
2195 of an if-then-elseif-.... */
2197 void
2198 expand_start_elseif (cond)
2199 tree cond;
2201 if (cond_stack->data.cond.endif_label == 0)
2202 cond_stack->data.cond.endif_label = gen_label_rtx ();
2203 emit_jump (cond_stack->data.cond.endif_label);
2204 emit_label (cond_stack->data.cond.next_label);
2205 cond_stack->data.cond.next_label = gen_label_rtx ();
2206 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2209 /* Generate RTL between the then-clause and the else-clause
2210 of an if-then-else. */
2212 void
2213 expand_start_else ()
2215 if (cond_stack->data.cond.endif_label == 0)
2216 cond_stack->data.cond.endif_label = gen_label_rtx ();
2218 emit_jump (cond_stack->data.cond.endif_label);
2219 emit_label (cond_stack->data.cond.next_label);
2220 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2223 /* After calling expand_start_else, turn this "else" into an "else if"
2224 by providing another condition. */
2226 void
2227 expand_elseif (cond)
2228 tree cond;
2230 cond_stack->data.cond.next_label = gen_label_rtx ();
2231 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2234 /* Generate RTL for the end of an if-then.
2235 Pop the record for it off of cond_stack. */
2237 void
2238 expand_end_cond ()
2240 struct nesting *thiscond = cond_stack;
2242 do_pending_stack_adjust ();
2243 if (thiscond->data.cond.next_label)
2244 emit_label (thiscond->data.cond.next_label);
2245 if (thiscond->data.cond.endif_label)
2246 emit_label (thiscond->data.cond.endif_label);
2248 POPSTACK (cond_stack);
2249 last_expr_type = 0;
2252 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2253 loop should be exited by `exit_something'. This is a loop for which
2254 `expand_continue' will jump to the top of the loop.
2256 Make an entry on loop_stack to record the labels associated with
2257 this loop. */
2259 struct nesting *
2260 expand_start_loop (exit_flag)
2261 int exit_flag;
2263 register struct nesting *thisloop = ALLOC_NESTING ();
2265 /* Make an entry on loop_stack for the loop we are entering. */
2267 thisloop->next = loop_stack;
2268 thisloop->all = nesting_stack;
2269 thisloop->depth = ++nesting_depth;
2270 thisloop->data.loop.start_label = gen_label_rtx ();
2271 thisloop->data.loop.end_label = gen_label_rtx ();
2272 thisloop->data.loop.alt_end_label = 0;
2273 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2274 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2275 loop_stack = thisloop;
2276 nesting_stack = thisloop;
2278 do_pending_stack_adjust ();
2279 emit_queue ();
2280 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2281 emit_label (thisloop->data.loop.start_label);
2283 return thisloop;
2286 /* Like expand_start_loop but for a loop where the continuation point
2287 (for expand_continue_loop) will be specified explicitly. */
2289 struct nesting *
2290 expand_start_loop_continue_elsewhere (exit_flag)
2291 int exit_flag;
2293 struct nesting *thisloop = expand_start_loop (exit_flag);
2294 loop_stack->data.loop.continue_label = gen_label_rtx ();
2295 return thisloop;
2298 /* Begin a null, aka do { } while (0) "loop". But since the contents
2299 of said loop can still contain a break, we must frob the loop nest. */
2301 struct nesting *
2302 expand_start_null_loop ()
2304 register struct nesting *thisloop = ALLOC_NESTING ();
2306 /* Make an entry on loop_stack for the loop we are entering. */
2308 thisloop->next = loop_stack;
2309 thisloop->all = nesting_stack;
2310 thisloop->depth = ++nesting_depth;
2311 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2312 thisloop->data.loop.end_label = gen_label_rtx ();
2313 thisloop->data.loop.alt_end_label = NULL_RTX;
2314 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2315 thisloop->exit_label = thisloop->data.loop.end_label;
2316 loop_stack = thisloop;
2317 nesting_stack = thisloop;
2319 return thisloop;
2322 /* Specify the continuation point for a loop started with
2323 expand_start_loop_continue_elsewhere.
2324 Use this at the point in the code to which a continue statement
2325 should jump. */
2327 void
2328 expand_loop_continue_here ()
2330 do_pending_stack_adjust ();
2331 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2332 emit_label (loop_stack->data.loop.continue_label);
2335 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2336 Pop the block off of loop_stack. */
2338 void
2339 expand_end_loop ()
2341 rtx start_label = loop_stack->data.loop.start_label;
2342 rtx insn = get_last_insn ();
2343 int needs_end_jump = 1;
2345 /* Mark the continue-point at the top of the loop if none elsewhere. */
2346 if (start_label == loop_stack->data.loop.continue_label)
2347 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2349 do_pending_stack_adjust ();
2351 /* If optimizing, perhaps reorder the loop.
2352 First, try to use a condjump near the end.
2353 expand_exit_loop_if_false ends loops with unconditional jumps,
2354 like this:
2356 if (test) goto label;
2357 optional: cleanup
2358 goto loop_stack->data.loop.end_label
2359 barrier
2360 label:
2362 If we find such a pattern, we can end the loop earlier. */
2364 if (optimize
2365 && GET_CODE (insn) == CODE_LABEL
2366 && LABEL_NAME (insn) == NULL
2367 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2369 rtx label = insn;
2370 rtx jump = PREV_INSN (PREV_INSN (label));
2372 if (GET_CODE (jump) == JUMP_INSN
2373 && GET_CODE (PATTERN (jump)) == SET
2374 && SET_DEST (PATTERN (jump)) == pc_rtx
2375 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2376 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2377 == loop_stack->data.loop.end_label))
2379 rtx prev;
2381 /* The test might be complex and reference LABEL multiple times,
2382 like the loop in loop_iterations to set vtop. To handle this,
2383 we move LABEL. */
2384 insn = PREV_INSN (label);
2385 reorder_insns (label, label, start_label);
2387 for (prev = PREV_INSN (jump);; prev = PREV_INSN (prev))
2389 /* We ignore line number notes, but if we see any other note,
2390 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2391 NOTE_INSN_LOOP_*, we disable this optimization. */
2392 if (GET_CODE (prev) == NOTE)
2394 if (NOTE_LINE_NUMBER (prev) < 0)
2395 break;
2396 continue;
2398 if (GET_CODE (prev) == CODE_LABEL)
2399 break;
2400 if (GET_CODE (prev) == JUMP_INSN)
2402 if (GET_CODE (PATTERN (prev)) == SET
2403 && SET_DEST (PATTERN (prev)) == pc_rtx
2404 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2405 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2406 == LABEL_REF)
2407 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2409 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2410 = start_label;
2411 emit_note_after (NOTE_INSN_LOOP_END, prev);
2412 needs_end_jump = 0;
2414 break;
2420 /* If the loop starts with a loop exit, roll that to the end where
2421 it will optimize together with the jump back.
2423 We look for the conditional branch to the exit, except that once
2424 we find such a branch, we don't look past 30 instructions.
2426 In more detail, if the loop presently looks like this (in pseudo-C):
2428 start_label:
2429 if (test) goto end_label;
2430 body;
2431 goto start_label;
2432 end_label:
2434 transform it to look like:
2436 goto start_label;
2437 newstart_label:
2438 body;
2439 start_label:
2440 if (test) goto end_label;
2441 goto newstart_label;
2442 end_label:
2444 Here, the `test' may actually consist of some reasonably complex
2445 code, terminating in a test. */
2447 if (optimize
2448 && needs_end_jump
2450 ! (GET_CODE (insn) == JUMP_INSN
2451 && GET_CODE (PATTERN (insn)) == SET
2452 && SET_DEST (PATTERN (insn)) == pc_rtx
2453 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2455 int eh_regions = 0;
2456 int num_insns = 0;
2457 rtx last_test_insn = NULL_RTX;
2459 /* Scan insns from the top of the loop looking for a qualified
2460 conditional exit. */
2461 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2462 insn = NEXT_INSN (insn))
2464 if (GET_CODE (insn) == NOTE)
2466 if (optimize < 2
2467 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2468 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2469 /* The code that actually moves the exit test will
2470 carefully leave BLOCK notes in their original
2471 location. That means, however, that we can't debug
2472 the exit test itself. So, we refuse to move code
2473 containing BLOCK notes at low optimization levels. */
2474 break;
2476 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2477 ++eh_regions;
2478 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2480 --eh_regions;
2481 if (eh_regions < 0)
2482 /* We've come to the end of an EH region, but
2483 never saw the beginning of that region. That
2484 means that an EH region begins before the top
2485 of the loop, and ends in the middle of it. The
2486 existence of such a situation violates a basic
2487 assumption in this code, since that would imply
2488 that even when EH_REGIONS is zero, we might
2489 move code out of an exception region. */
2490 abort ();
2493 /* We must not walk into a nested loop. */
2494 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2495 break;
2497 /* We already know this INSN is a NOTE, so there's no
2498 point in looking at it to see if it's a JUMP. */
2499 continue;
2502 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2503 num_insns++;
2505 if (last_test_insn && num_insns > 30)
2506 break;
2508 if (eh_regions > 0)
2509 /* We don't want to move a partial EH region. Consider:
2511 while ( ( { try {
2512 if (cond ()) 0;
2513 else {
2514 bar();
2517 } catch (...) {
2519 } )) {
2520 body;
2523 This isn't legal C++, but here's what it's supposed to
2524 mean: if cond() is true, stop looping. Otherwise,
2525 call bar, and keep looping. In addition, if cond
2526 throws an exception, catch it and keep looping. Such
2527 constructs are certainy legal in LISP.
2529 We should not move the `if (cond()) 0' test since then
2530 the EH-region for the try-block would be broken up.
2531 (In this case we would the EH_BEG note for the `try'
2532 and `if cond()' but not the call to bar() or the
2533 EH_END note.)
2535 So we don't look for tests within an EH region. */
2536 continue;
2538 if (GET_CODE (insn) == JUMP_INSN
2539 && GET_CODE (PATTERN (insn)) == SET
2540 && SET_DEST (PATTERN (insn)) == pc_rtx)
2542 /* This is indeed a jump. */
2543 rtx dest1 = NULL_RTX;
2544 rtx dest2 = NULL_RTX;
2545 rtx potential_last_test;
2546 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2548 /* A conditional jump. */
2549 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2550 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2551 potential_last_test = insn;
2553 else
2555 /* An unconditional jump. */
2556 dest1 = SET_SRC (PATTERN (insn));
2557 /* Include the BARRIER after the JUMP. */
2558 potential_last_test = NEXT_INSN (insn);
2561 do {
2562 if (dest1 && GET_CODE (dest1) == LABEL_REF
2563 && ((XEXP (dest1, 0)
2564 == loop_stack->data.loop.alt_end_label)
2565 || (XEXP (dest1, 0)
2566 == loop_stack->data.loop.end_label)))
2568 last_test_insn = potential_last_test;
2569 break;
2572 /* If this was a conditional jump, there may be
2573 another label at which we should look. */
2574 dest1 = dest2;
2575 dest2 = NULL_RTX;
2576 } while (dest1);
2580 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2582 /* We found one. Move everything from there up
2583 to the end of the loop, and add a jump into the loop
2584 to jump to there. */
2585 register rtx newstart_label = gen_label_rtx ();
2586 register rtx start_move = start_label;
2587 rtx next_insn;
2589 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2590 then we want to move this note also. */
2591 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2592 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2593 == NOTE_INSN_LOOP_CONT))
2594 start_move = PREV_INSN (start_move);
2596 emit_label_after (newstart_label, PREV_INSN (start_move));
2598 /* Actually move the insns. Start at the beginning, and
2599 keep copying insns until we've copied the
2600 last_test_insn. */
2601 for (insn = start_move; insn; insn = next_insn)
2603 /* Figure out which insn comes after this one. We have
2604 to do this before we move INSN. */
2605 if (insn == last_test_insn)
2606 /* We've moved all the insns. */
2607 next_insn = NULL_RTX;
2608 else
2609 next_insn = NEXT_INSN (insn);
2611 if (GET_CODE (insn) == NOTE
2612 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2613 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2614 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2615 NOTE_INSN_BLOCK_ENDs because the correct generation
2616 of debugging information depends on these appearing
2617 in the same order in the RTL and in the tree
2618 structure, where they are represented as BLOCKs.
2619 So, we don't move block notes. Of course, moving
2620 the code inside the block is likely to make it
2621 impossible to debug the instructions in the exit
2622 test, but such is the price of optimization. */
2623 continue;
2625 /* Move the INSN. */
2626 reorder_insns (insn, insn, get_last_insn ());
2629 emit_jump_insn_after (gen_jump (start_label),
2630 PREV_INSN (newstart_label));
2631 emit_barrier_after (PREV_INSN (newstart_label));
2632 start_label = newstart_label;
2636 if (needs_end_jump)
2638 emit_jump (start_label);
2639 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2641 emit_label (loop_stack->data.loop.end_label);
2643 POPSTACK (loop_stack);
2645 last_expr_type = 0;
2648 /* Finish a null loop, aka do { } while (0). */
2650 void
2651 expand_end_null_loop ()
2653 do_pending_stack_adjust ();
2654 emit_label (loop_stack->data.loop.end_label);
2656 POPSTACK (loop_stack);
2658 last_expr_type = 0;
2661 /* Generate a jump to the current loop's continue-point.
2662 This is usually the top of the loop, but may be specified
2663 explicitly elsewhere. If not currently inside a loop,
2664 return 0 and do nothing; caller will print an error message. */
2667 expand_continue_loop (whichloop)
2668 struct nesting *whichloop;
2670 last_expr_type = 0;
2671 if (whichloop == 0)
2672 whichloop = loop_stack;
2673 if (whichloop == 0)
2674 return 0;
2675 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2676 NULL_RTX);
2677 return 1;
2680 /* Generate a jump to exit the current loop. If not currently inside a loop,
2681 return 0 and do nothing; caller will print an error message. */
2684 expand_exit_loop (whichloop)
2685 struct nesting *whichloop;
2687 last_expr_type = 0;
2688 if (whichloop == 0)
2689 whichloop = loop_stack;
2690 if (whichloop == 0)
2691 return 0;
2692 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2693 return 1;
2696 /* Generate a conditional jump to exit the current loop if COND
2697 evaluates to zero. If not currently inside a loop,
2698 return 0 and do nothing; caller will print an error message. */
2701 expand_exit_loop_if_false (whichloop, cond)
2702 struct nesting *whichloop;
2703 tree cond;
2705 rtx label = gen_label_rtx ();
2706 rtx last_insn;
2707 last_expr_type = 0;
2709 if (whichloop == 0)
2710 whichloop = loop_stack;
2711 if (whichloop == 0)
2712 return 0;
2713 /* In order to handle fixups, we actually create a conditional jump
2714 around a unconditional branch to exit the loop. If fixups are
2715 necessary, they go before the unconditional branch. */
2717 do_jump (cond, NULL_RTX, label);
2718 last_insn = get_last_insn ();
2719 if (GET_CODE (last_insn) == CODE_LABEL)
2720 whichloop->data.loop.alt_end_label = last_insn;
2721 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2722 NULL_RTX);
2723 emit_label (label);
2725 return 1;
2728 /* Return nonzero if the loop nest is empty. Else return zero. */
2731 stmt_loop_nest_empty ()
2733 /* cfun->stmt can be NULL if we are building a call to get the
2734 EH context for a setjmp/longjmp EH target and the current
2735 function was a deferred inline function. */
2736 return (cfun->stmt == NULL || loop_stack == NULL);
2739 /* Return non-zero if we should preserve sub-expressions as separate
2740 pseudos. We never do so if we aren't optimizing. We always do so
2741 if -fexpensive-optimizations.
2743 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2744 the loop may still be a small one. */
2747 preserve_subexpressions_p ()
2749 rtx insn;
2751 if (flag_expensive_optimizations)
2752 return 1;
2754 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2755 return 0;
2757 insn = get_last_insn_anywhere ();
2759 return (insn
2760 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2761 < n_non_fixed_regs * 3));
2765 /* Generate a jump to exit the current loop, conditional, binding contour
2766 or case statement. Not all such constructs are visible to this function,
2767 only those started with EXIT_FLAG nonzero. Individual languages use
2768 the EXIT_FLAG parameter to control which kinds of constructs you can
2769 exit this way.
2771 If not currently inside anything that can be exited,
2772 return 0 and do nothing; caller will print an error message. */
2775 expand_exit_something ()
2777 struct nesting *n;
2778 last_expr_type = 0;
2779 for (n = nesting_stack; n; n = n->all)
2780 if (n->exit_label != 0)
2782 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2783 return 1;
2786 return 0;
2789 /* Generate RTL to return from the current function, with no value.
2790 (That is, we do not do anything about returning any value.) */
2792 void
2793 expand_null_return ()
2795 struct nesting *block = block_stack;
2796 rtx last_insn = get_last_insn ();
2798 /* If this function was declared to return a value, but we
2799 didn't, clobber the return registers so that they are not
2800 propogated live to the rest of the function. */
2801 clobber_return_register ();
2803 /* Does any pending block have cleanups? */
2804 while (block && block->data.block.cleanups == 0)
2805 block = block->next;
2807 /* If yes, use a goto to return, since that runs cleanups. */
2809 expand_null_return_1 (last_insn, block != 0);
2812 /* Generate RTL to return from the current function, with value VAL. */
2814 static void
2815 expand_value_return (val)
2816 rtx val;
2818 struct nesting *block = block_stack;
2819 rtx last_insn = get_last_insn ();
2820 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2822 /* Copy the value to the return location
2823 unless it's already there. */
2825 if (return_reg != val)
2827 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2828 #ifdef PROMOTE_FUNCTION_RETURN
2829 int unsignedp = TREE_UNSIGNED (type);
2830 enum machine_mode old_mode
2831 = DECL_MODE (DECL_RESULT (current_function_decl));
2832 enum machine_mode mode
2833 = promote_mode (type, old_mode, &unsignedp, 1);
2835 if (mode != old_mode)
2836 val = convert_modes (mode, old_mode, val, unsignedp);
2837 #endif
2838 if (GET_CODE (return_reg) == PARALLEL)
2839 emit_group_load (return_reg, val, int_size_in_bytes (type),
2840 TYPE_ALIGN (type));
2841 else
2842 emit_move_insn (return_reg, val);
2845 /* Does any pending block have cleanups? */
2847 while (block && block->data.block.cleanups == 0)
2848 block = block->next;
2850 /* If yes, use a goto to return, since that runs cleanups.
2851 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2853 expand_null_return_1 (last_insn, block != 0);
2856 /* Output a return with no value. If LAST_INSN is nonzero,
2857 pretend that the return takes place after LAST_INSN.
2858 If USE_GOTO is nonzero then don't use a return instruction;
2859 go to the return label instead. This causes any cleanups
2860 of pending blocks to be executed normally. */
2862 static void
2863 expand_null_return_1 (last_insn, use_goto)
2864 rtx last_insn;
2865 int use_goto;
2867 rtx end_label = cleanup_label ? cleanup_label : return_label;
2869 clear_pending_stack_adjust ();
2870 do_pending_stack_adjust ();
2871 last_expr_type = 0;
2873 /* PCC-struct return always uses an epilogue. */
2874 if (current_function_returns_pcc_struct || use_goto)
2876 if (end_label == 0)
2877 end_label = return_label = gen_label_rtx ();
2878 expand_goto_internal (NULL_TREE, end_label, last_insn);
2879 return;
2882 /* Otherwise output a simple return-insn if one is available,
2883 unless it won't do the job. */
2884 #ifdef HAVE_return
2885 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2887 emit_jump_insn (gen_return ());
2888 emit_barrier ();
2889 return;
2891 #endif
2893 /* Otherwise jump to the epilogue. */
2894 expand_goto_internal (NULL_TREE, end_label, last_insn);
2897 /* Generate RTL to evaluate the expression RETVAL and return it
2898 from the current function. */
2900 void
2901 expand_return (retval)
2902 tree retval;
2904 /* If there are any cleanups to be performed, then they will
2905 be inserted following LAST_INSN. It is desirable
2906 that the last_insn, for such purposes, should be the
2907 last insn before computing the return value. Otherwise, cleanups
2908 which call functions can clobber the return value. */
2909 /* ??? rms: I think that is erroneous, because in C++ it would
2910 run destructors on variables that might be used in the subsequent
2911 computation of the return value. */
2912 rtx last_insn = 0;
2913 rtx result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
2914 register rtx val = 0;
2915 tree retval_rhs;
2916 int cleanups;
2918 /* If function wants no value, give it none. */
2919 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2921 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2922 emit_queue ();
2923 expand_null_return ();
2924 return;
2927 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2928 /* This is not sufficient. We also need to watch for cleanups of the
2929 expression we are about to expand. Unfortunately, we cannot know
2930 if it has cleanups until we expand it, and we want to change how we
2931 expand it depending upon if we need cleanups. We can't win. */
2932 #if 0
2933 cleanups = any_pending_cleanups (1);
2934 #else
2935 cleanups = 1;
2936 #endif
2938 if (retval == error_mark_node)
2939 retval_rhs = NULL_TREE;
2940 else if (TREE_CODE (retval) == RESULT_DECL)
2941 retval_rhs = retval;
2942 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2943 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2944 retval_rhs = TREE_OPERAND (retval, 1);
2945 else if (VOID_TYPE_P (TREE_TYPE (retval)))
2946 /* Recognize tail-recursive call to void function. */
2947 retval_rhs = retval;
2948 else
2949 retval_rhs = NULL_TREE;
2951 /* Only use `last_insn' if there are cleanups which must be run. */
2952 if (cleanups || cleanup_label != 0)
2953 last_insn = get_last_insn ();
2955 /* Distribute return down conditional expr if either of the sides
2956 may involve tail recursion (see test below). This enhances the number
2957 of tail recursions we see. Don't do this always since it can produce
2958 sub-optimal code in some cases and we distribute assignments into
2959 conditional expressions when it would help. */
2961 if (optimize && retval_rhs != 0
2962 && frame_offset == 0
2963 && TREE_CODE (retval_rhs) == COND_EXPR
2964 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2965 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2967 rtx label = gen_label_rtx ();
2968 tree expr;
2970 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2971 start_cleanup_deferral ();
2972 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2973 DECL_RESULT (current_function_decl),
2974 TREE_OPERAND (retval_rhs, 1));
2975 TREE_SIDE_EFFECTS (expr) = 1;
2976 expand_return (expr);
2977 emit_label (label);
2979 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2980 DECL_RESULT (current_function_decl),
2981 TREE_OPERAND (retval_rhs, 2));
2982 TREE_SIDE_EFFECTS (expr) = 1;
2983 expand_return (expr);
2984 end_cleanup_deferral ();
2985 return;
2988 /* If the result is an aggregate that is being returned in one (or more)
2989 registers, load the registers here. The compiler currently can't handle
2990 copying a BLKmode value into registers. We could put this code in a
2991 more general area (for use by everyone instead of just function
2992 call/return), but until this feature is generally usable it is kept here
2993 (and in expand_call). The value must go into a pseudo in case there
2994 are cleanups that will clobber the real return register. */
2996 if (retval_rhs != 0
2997 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2998 && GET_CODE (result_rtl) == REG)
3000 int i;
3001 unsigned HOST_WIDE_INT bitpos, xbitpos;
3002 unsigned HOST_WIDE_INT big_endian_correction = 0;
3003 unsigned HOST_WIDE_INT bytes
3004 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3005 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3006 unsigned int bitsize
3007 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3008 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3009 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3010 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3011 enum machine_mode tmpmode, result_reg_mode;
3013 /* Structures whose size is not a multiple of a word are aligned
3014 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3015 machine, this means we must skip the empty high order bytes when
3016 calculating the bit offset. */
3017 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
3018 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3019 * BITS_PER_UNIT));
3021 /* Copy the structure BITSIZE bits at a time. */
3022 for (bitpos = 0, xbitpos = big_endian_correction;
3023 bitpos < bytes * BITS_PER_UNIT;
3024 bitpos += bitsize, xbitpos += bitsize)
3026 /* We need a new destination pseudo each time xbitpos is
3027 on a word boundary and when xbitpos == big_endian_correction
3028 (the first time through). */
3029 if (xbitpos % BITS_PER_WORD == 0
3030 || xbitpos == big_endian_correction)
3032 /* Generate an appropriate register. */
3033 dst = gen_reg_rtx (word_mode);
3034 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3036 /* Clobber the destination before we move anything into it. */
3037 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
3040 /* We need a new source operand each time bitpos is on a word
3041 boundary. */
3042 if (bitpos % BITS_PER_WORD == 0)
3043 src = operand_subword_force (result_val,
3044 bitpos / BITS_PER_WORD,
3045 BLKmode);
3047 /* Use bitpos for the source extraction (left justified) and
3048 xbitpos for the destination store (right justified). */
3049 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3050 extract_bit_field (src, bitsize,
3051 bitpos % BITS_PER_WORD, 1,
3052 NULL_RTX, word_mode, word_mode,
3053 bitsize, BITS_PER_WORD),
3054 bitsize, BITS_PER_WORD);
3057 /* Find the smallest integer mode large enough to hold the
3058 entire structure and use that mode instead of BLKmode
3059 on the USE insn for the return register. */
3060 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
3061 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3062 tmpmode != VOIDmode;
3063 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3065 /* Have we found a large enough mode? */
3066 if (GET_MODE_SIZE (tmpmode) >= bytes)
3067 break;
3070 /* No suitable mode found. */
3071 if (tmpmode == VOIDmode)
3072 abort ();
3074 PUT_MODE (result_rtl, tmpmode);
3076 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3077 result_reg_mode = word_mode;
3078 else
3079 result_reg_mode = tmpmode;
3080 result_reg = gen_reg_rtx (result_reg_mode);
3082 emit_queue ();
3083 for (i = 0; i < n_regs; i++)
3084 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3085 result_pseudos[i]);
3087 if (tmpmode != result_reg_mode)
3088 result_reg = gen_lowpart (tmpmode, result_reg);
3090 expand_value_return (result_reg);
3092 else if (cleanups
3093 && retval_rhs != 0
3094 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3095 && (GET_CODE (result_rtl) == REG
3096 || (GET_CODE (result_rtl) == PARALLEL)))
3098 /* Calculate the return value into a temporary (usually a pseudo
3099 reg). */
3100 val = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)),
3101 0, 0, 1);
3102 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3103 val = force_not_mem (val);
3104 emit_queue ();
3105 /* Return the calculated value, doing cleanups first. */
3106 expand_value_return (val);
3108 else
3110 /* No cleanups or no hard reg used;
3111 calculate value into hard return reg. */
3112 expand_expr (retval, const0_rtx, VOIDmode, 0);
3113 emit_queue ();
3114 expand_value_return (result_rtl);
3118 /* Return 1 if the end of the generated RTX is not a barrier.
3119 This means code already compiled can drop through. */
3122 drop_through_at_end_p ()
3124 rtx insn = get_last_insn ();
3125 while (insn && GET_CODE (insn) == NOTE)
3126 insn = PREV_INSN (insn);
3127 return insn && GET_CODE (insn) != BARRIER;
3130 /* Attempt to optimize a potential tail recursion call into a goto.
3131 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3132 where to place the jump to the tail recursion label.
3134 Return TRUE if the call was optimized into a goto. */
3137 optimize_tail_recursion (arguments, last_insn)
3138 tree arguments;
3139 rtx last_insn;
3141 /* Finish checking validity, and if valid emit code to set the
3142 argument variables for the new call. */
3143 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3145 if (tail_recursion_label == 0)
3147 tail_recursion_label = gen_label_rtx ();
3148 emit_label_after (tail_recursion_label,
3149 tail_recursion_reentry);
3151 emit_queue ();
3152 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3153 emit_barrier ();
3154 return 1;
3156 return 0;
3159 /* Emit code to alter this function's formal parms for a tail-recursive call.
3160 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3161 FORMALS is the chain of decls of formals.
3162 Return 1 if this can be done;
3163 otherwise return 0 and do not emit any code. */
3165 static int
3166 tail_recursion_args (actuals, formals)
3167 tree actuals, formals;
3169 register tree a = actuals, f = formals;
3170 register int i;
3171 register rtx *argvec;
3173 /* Check that number and types of actuals are compatible
3174 with the formals. This is not always true in valid C code.
3175 Also check that no formal needs to be addressable
3176 and that all formals are scalars. */
3178 /* Also count the args. */
3180 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3182 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3183 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3184 return 0;
3185 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3186 return 0;
3188 if (a != 0 || f != 0)
3189 return 0;
3191 /* Compute all the actuals. */
3193 argvec = (rtx *) alloca (i * sizeof (rtx));
3195 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3196 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3198 /* Find which actual values refer to current values of previous formals.
3199 Copy each of them now, before any formal is changed. */
3201 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3203 int copy = 0;
3204 register int j;
3205 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3206 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3208 copy = 1;
3209 break;
3211 if (copy)
3212 argvec[i] = copy_to_reg (argvec[i]);
3215 /* Store the values of the actuals into the formals. */
3217 for (f = formals, a = actuals, i = 0; f;
3218 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3220 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3221 emit_move_insn (DECL_RTL (f), argvec[i]);
3222 else
3223 convert_move (DECL_RTL (f), argvec[i],
3224 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3227 free_temp_slots ();
3228 return 1;
3231 /* Generate the RTL code for entering a binding contour.
3232 The variables are declared one by one, by calls to `expand_decl'.
3234 FLAGS is a bitwise or of the following flags:
3236 1 - Nonzero if this construct should be visible to
3237 `exit_something'.
3239 2 - Nonzero if this contour does not require a
3240 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3241 language-independent code should set this flag because they
3242 will not create corresponding BLOCK nodes. (There should be
3243 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3244 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3245 when expand_end_bindings is called.
3247 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3248 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3249 note. */
3251 void
3252 expand_start_bindings_and_block (flags, block)
3253 int flags;
3254 tree block;
3256 struct nesting *thisblock = ALLOC_NESTING ();
3257 rtx note;
3258 int exit_flag = ((flags & 1) != 0);
3259 int block_flag = ((flags & 2) == 0);
3261 /* If a BLOCK is supplied, then the caller should be requesting a
3262 NOTE_INSN_BLOCK_BEG note. */
3263 if (!block_flag && block)
3264 abort ();
3266 /* Create a note to mark the beginning of the block. */
3267 if (block_flag)
3269 note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3270 NOTE_BLOCK (note) = block;
3272 else
3273 note = emit_note (NULL_PTR, NOTE_INSN_DELETED);
3275 /* Make an entry on block_stack for the block we are entering. */
3277 thisblock->next = block_stack;
3278 thisblock->all = nesting_stack;
3279 thisblock->depth = ++nesting_depth;
3280 thisblock->data.block.stack_level = 0;
3281 thisblock->data.block.cleanups = 0;
3282 thisblock->data.block.n_function_calls = 0;
3283 thisblock->data.block.exception_region = 0;
3284 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3286 thisblock->data.block.conditional_code = 0;
3287 thisblock->data.block.last_unconditional_cleanup = note;
3288 /* When we insert instructions after the last unconditional cleanup,
3289 we don't adjust last_insn. That means that a later add_insn will
3290 clobber the instructions we've just added. The easiest way to
3291 fix this is to just insert another instruction here, so that the
3292 instructions inserted after the last unconditional cleanup are
3293 never the last instruction. */
3294 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3295 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3297 if (block_stack
3298 && !(block_stack->data.block.cleanups == NULL_TREE
3299 && block_stack->data.block.outer_cleanups == NULL_TREE))
3300 thisblock->data.block.outer_cleanups
3301 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3302 block_stack->data.block.outer_cleanups);
3303 else
3304 thisblock->data.block.outer_cleanups = 0;
3305 thisblock->data.block.label_chain = 0;
3306 thisblock->data.block.innermost_stack_block = stack_block_stack;
3307 thisblock->data.block.first_insn = note;
3308 thisblock->data.block.block_start_count = ++current_block_start_count;
3309 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3310 block_stack = thisblock;
3311 nesting_stack = thisblock;
3313 /* Make a new level for allocating stack slots. */
3314 push_temp_slots ();
3317 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3318 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3319 expand_expr are made. After we end the region, we know that all
3320 space for all temporaries that were created by TARGET_EXPRs will be
3321 destroyed and their space freed for reuse. */
3323 void
3324 expand_start_target_temps ()
3326 /* This is so that even if the result is preserved, the space
3327 allocated will be freed, as we know that it is no longer in use. */
3328 push_temp_slots ();
3330 /* Start a new binding layer that will keep track of all cleanup
3331 actions to be performed. */
3332 expand_start_bindings (2);
3334 target_temp_slot_level = temp_slot_level;
3337 void
3338 expand_end_target_temps ()
3340 expand_end_bindings (NULL_TREE, 0, 0);
3342 /* This is so that even if the result is preserved, the space
3343 allocated will be freed, as we know that it is no longer in use. */
3344 pop_temp_slots ();
3347 /* Given a pointer to a BLOCK node return non-zero if (and only if) the node
3348 in question represents the outermost pair of curly braces (i.e. the "body
3349 block") of a function or method.
3351 For any BLOCK node representing a "body block" of a function or method, the
3352 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3353 represents the outermost (function) scope for the function or method (i.e.
3354 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3355 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3358 is_body_block (stmt)
3359 register tree stmt;
3361 if (TREE_CODE (stmt) == BLOCK)
3363 tree parent = BLOCK_SUPERCONTEXT (stmt);
3365 if (parent && TREE_CODE (parent) == BLOCK)
3367 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3369 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3370 return 1;
3374 return 0;
3377 /* Mark top block of block_stack as an implicit binding for an
3378 exception region. This is used to prevent infinite recursion when
3379 ending a binding with expand_end_bindings. It is only ever called
3380 by expand_eh_region_start, as that it the only way to create a
3381 block stack for a exception region. */
3383 void
3384 mark_block_as_eh_region ()
3386 block_stack->data.block.exception_region = 1;
3387 if (block_stack->next
3388 && block_stack->next->data.block.conditional_code)
3390 block_stack->data.block.conditional_code
3391 = block_stack->next->data.block.conditional_code;
3392 block_stack->data.block.last_unconditional_cleanup
3393 = block_stack->next->data.block.last_unconditional_cleanup;
3394 block_stack->data.block.cleanup_ptr
3395 = block_stack->next->data.block.cleanup_ptr;
3399 /* True if we are currently emitting insns in an area of output code
3400 that is controlled by a conditional expression. This is used by
3401 the cleanup handling code to generate conditional cleanup actions. */
3404 conditional_context ()
3406 return block_stack && block_stack->data.block.conditional_code;
3409 /* Mark top block of block_stack as not for an implicit binding for an
3410 exception region. This is only ever done by expand_eh_region_end
3411 to let expand_end_bindings know that it is being called explicitly
3412 to end the binding layer for just the binding layer associated with
3413 the exception region, otherwise expand_end_bindings would try and
3414 end all implicit binding layers for exceptions regions, and then
3415 one normal binding layer. */
3417 void
3418 mark_block_as_not_eh_region ()
3420 block_stack->data.block.exception_region = 0;
3423 /* True if the top block of block_stack was marked as for an exception
3424 region by mark_block_as_eh_region. */
3427 is_eh_region ()
3429 return cfun && block_stack && block_stack->data.block.exception_region;
3432 /* Emit a handler label for a nonlocal goto handler.
3433 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3435 static rtx
3436 expand_nl_handler_label (slot, before_insn)
3437 rtx slot, before_insn;
3439 rtx insns;
3440 rtx handler_label = gen_label_rtx ();
3442 /* Don't let jump_optimize delete the handler. */
3443 LABEL_PRESERVE_P (handler_label) = 1;
3445 start_sequence ();
3446 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3447 insns = get_insns ();
3448 end_sequence ();
3449 emit_insns_before (insns, before_insn);
3451 emit_label (handler_label);
3453 return handler_label;
3456 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3457 handler. */
3458 static void
3459 expand_nl_goto_receiver ()
3461 #ifdef HAVE_nonlocal_goto
3462 if (! HAVE_nonlocal_goto)
3463 #endif
3464 /* First adjust our frame pointer to its actual value. It was
3465 previously set to the start of the virtual area corresponding to
3466 the stacked variables when we branched here and now needs to be
3467 adjusted to the actual hardware fp value.
3469 Assignments are to virtual registers are converted by
3470 instantiate_virtual_regs into the corresponding assignment
3471 to the underlying register (fp in this case) that makes
3472 the original assignment true.
3473 So the following insn will actually be
3474 decrementing fp by STARTING_FRAME_OFFSET. */
3475 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3477 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3478 if (fixed_regs[ARG_POINTER_REGNUM])
3480 #ifdef ELIMINABLE_REGS
3481 /* If the argument pointer can be eliminated in favor of the
3482 frame pointer, we don't need to restore it. We assume here
3483 that if such an elimination is present, it can always be used.
3484 This is the case on all known machines; if we don't make this
3485 assumption, we do unnecessary saving on many machines. */
3486 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3487 size_t i;
3489 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3490 if (elim_regs[i].from == ARG_POINTER_REGNUM
3491 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3492 break;
3494 if (i == ARRAY_SIZE (elim_regs))
3495 #endif
3497 /* Now restore our arg pointer from the address at which it
3498 was saved in our stack frame.
3499 If there hasn't be space allocated for it yet, make
3500 some now. */
3501 if (arg_pointer_save_area == 0)
3502 arg_pointer_save_area
3503 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3504 emit_move_insn (virtual_incoming_args_rtx,
3505 /* We need a pseudo here, or else
3506 instantiate_virtual_regs_1 complains. */
3507 copy_to_reg (arg_pointer_save_area));
3510 #endif
3512 #ifdef HAVE_nonlocal_goto_receiver
3513 if (HAVE_nonlocal_goto_receiver)
3514 emit_insn (gen_nonlocal_goto_receiver ());
3515 #endif
3518 /* Make handlers for nonlocal gotos taking place in the function calls in
3519 block THISBLOCK. */
3521 static void
3522 expand_nl_goto_receivers (thisblock)
3523 struct nesting *thisblock;
3525 tree link;
3526 rtx afterward = gen_label_rtx ();
3527 rtx insns, slot;
3528 rtx label_list;
3529 int any_invalid;
3531 /* Record the handler address in the stack slot for that purpose,
3532 during this block, saving and restoring the outer value. */
3533 if (thisblock->next != 0)
3534 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3536 rtx save_receiver = gen_reg_rtx (Pmode);
3537 emit_move_insn (XEXP (slot, 0), save_receiver);
3539 start_sequence ();
3540 emit_move_insn (save_receiver, XEXP (slot, 0));
3541 insns = get_insns ();
3542 end_sequence ();
3543 emit_insns_before (insns, thisblock->data.block.first_insn);
3546 /* Jump around the handlers; they run only when specially invoked. */
3547 emit_jump (afterward);
3549 /* Make a separate handler for each label. */
3550 link = nonlocal_labels;
3551 slot = nonlocal_goto_handler_slots;
3552 label_list = NULL_RTX;
3553 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3554 /* Skip any labels we shouldn't be able to jump to from here,
3555 we generate one special handler for all of them below which just calls
3556 abort. */
3557 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3559 rtx lab;
3560 lab = expand_nl_handler_label (XEXP (slot, 0),
3561 thisblock->data.block.first_insn);
3562 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3564 expand_nl_goto_receiver ();
3566 /* Jump to the "real" nonlocal label. */
3567 expand_goto (TREE_VALUE (link));
3570 /* A second pass over all nonlocal labels; this time we handle those
3571 we should not be able to jump to at this point. */
3572 link = nonlocal_labels;
3573 slot = nonlocal_goto_handler_slots;
3574 any_invalid = 0;
3575 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3576 if (DECL_TOO_LATE (TREE_VALUE (link)))
3578 rtx lab;
3579 lab = expand_nl_handler_label (XEXP (slot, 0),
3580 thisblock->data.block.first_insn);
3581 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3582 any_invalid = 1;
3585 if (any_invalid)
3587 expand_nl_goto_receiver ();
3588 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3589 VOIDmode, 0);
3590 emit_barrier ();
3593 nonlocal_goto_handler_labels = label_list;
3594 emit_label (afterward);
3597 /* Warn about any unused VARS (which may contain nodes other than
3598 VAR_DECLs, but such nodes are ignored). The nodes are connected
3599 via the TREE_CHAIN field. */
3601 void
3602 warn_about_unused_variables (vars)
3603 tree vars;
3605 tree decl;
3607 if (warn_unused_variable)
3608 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3609 if (TREE_CODE (decl) == VAR_DECL
3610 && ! TREE_USED (decl)
3611 && ! DECL_IN_SYSTEM_HEADER (decl)
3612 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3613 warning_with_decl (decl, "unused variable `%s'");
3616 /* Generate RTL code to terminate a binding contour.
3618 VARS is the chain of VAR_DECL nodes for the variables bound in this
3619 contour. There may actually be other nodes in this chain, but any
3620 nodes other than VAR_DECLS are ignored.
3622 MARK_ENDS is nonzero if we should put a note at the beginning
3623 and end of this binding contour.
3625 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3626 (That is true automatically if the contour has a saved stack level.) */
3628 void
3629 expand_end_bindings (vars, mark_ends, dont_jump_in)
3630 tree vars;
3631 int mark_ends;
3632 int dont_jump_in;
3634 register struct nesting *thisblock;
3636 while (block_stack->data.block.exception_region)
3638 /* Because we don't need or want a new temporary level and
3639 because we didn't create one in expand_eh_region_start,
3640 create a fake one now to avoid removing one in
3641 expand_end_bindings. */
3642 push_temp_slots ();
3644 block_stack->data.block.exception_region = 0;
3646 expand_end_bindings (NULL_TREE, 0, 0);
3649 /* Since expand_eh_region_start does an expand_start_bindings, we
3650 have to first end all the bindings that were created by
3651 expand_eh_region_start. */
3653 thisblock = block_stack;
3655 /* If any of the variables in this scope were not used, warn the
3656 user. */
3657 warn_about_unused_variables (vars);
3659 if (thisblock->exit_label)
3661 do_pending_stack_adjust ();
3662 emit_label (thisblock->exit_label);
3665 /* If necessary, make handlers for nonlocal gotos taking
3666 place in the function calls in this block. */
3667 if (function_call_count != thisblock->data.block.n_function_calls
3668 && nonlocal_labels
3669 /* Make handler for outermost block
3670 if there were any nonlocal gotos to this function. */
3671 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3672 /* Make handler for inner block if it has something
3673 special to do when you jump out of it. */
3674 : (thisblock->data.block.cleanups != 0
3675 || thisblock->data.block.stack_level != 0)))
3676 expand_nl_goto_receivers (thisblock);
3678 /* Don't allow jumping into a block that has a stack level.
3679 Cleanups are allowed, though. */
3680 if (dont_jump_in
3681 || thisblock->data.block.stack_level != 0)
3683 struct label_chain *chain;
3685 /* Any labels in this block are no longer valid to go to.
3686 Mark them to cause an error message. */
3687 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3689 DECL_TOO_LATE (chain->label) = 1;
3690 /* If any goto without a fixup came to this label,
3691 that must be an error, because gotos without fixups
3692 come from outside all saved stack-levels. */
3693 if (TREE_ADDRESSABLE (chain->label))
3694 error_with_decl (chain->label,
3695 "label `%s' used before containing binding contour");
3699 /* Restore stack level in effect before the block
3700 (only if variable-size objects allocated). */
3701 /* Perform any cleanups associated with the block. */
3703 if (thisblock->data.block.stack_level != 0
3704 || thisblock->data.block.cleanups != 0)
3706 int reachable;
3707 rtx insn;
3709 /* Don't let cleanups affect ({...}) constructs. */
3710 int old_expr_stmts_for_value = expr_stmts_for_value;
3711 rtx old_last_expr_value = last_expr_value;
3712 tree old_last_expr_type = last_expr_type;
3713 expr_stmts_for_value = 0;
3715 /* Only clean up here if this point can actually be reached. */
3716 insn = get_last_insn ();
3717 if (GET_CODE (insn) == NOTE)
3718 insn = prev_nonnote_insn (insn);
3719 reachable = (! insn || GET_CODE (insn) != BARRIER);
3721 /* Do the cleanups. */
3722 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3723 if (reachable)
3724 do_pending_stack_adjust ();
3726 expr_stmts_for_value = old_expr_stmts_for_value;
3727 last_expr_value = old_last_expr_value;
3728 last_expr_type = old_last_expr_type;
3730 /* Restore the stack level. */
3732 if (reachable && thisblock->data.block.stack_level != 0)
3734 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3735 thisblock->data.block.stack_level, NULL_RTX);
3736 if (nonlocal_goto_handler_slots != 0)
3737 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3738 NULL_RTX);
3741 /* Any gotos out of this block must also do these things.
3742 Also report any gotos with fixups that came to labels in this
3743 level. */
3744 fixup_gotos (thisblock,
3745 thisblock->data.block.stack_level,
3746 thisblock->data.block.cleanups,
3747 thisblock->data.block.first_insn,
3748 dont_jump_in);
3751 /* Mark the beginning and end of the scope if requested.
3752 We do this now, after running cleanups on the variables
3753 just going out of scope, so they are in scope for their cleanups. */
3755 if (mark_ends)
3757 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3758 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3760 else
3761 /* Get rid of the beginning-mark if we don't make an end-mark. */
3762 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3764 /* Restore the temporary level of TARGET_EXPRs. */
3765 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3767 /* Restore block_stack level for containing block. */
3769 stack_block_stack = thisblock->data.block.innermost_stack_block;
3770 POPSTACK (block_stack);
3772 /* Pop the stack slot nesting and free any slots at this level. */
3773 pop_temp_slots ();
3776 /* Generate code to save the stack pointer at the start of the current block
3777 and set up to restore it on exit. */
3779 void
3780 save_stack_pointer ()
3782 struct nesting *thisblock = block_stack;
3784 if (thisblock->data.block.stack_level == 0)
3786 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3787 &thisblock->data.block.stack_level,
3788 thisblock->data.block.first_insn);
3789 stack_block_stack = thisblock;
3793 /* Generate RTL for the automatic variable declaration DECL.
3794 (Other kinds of declarations are simply ignored if seen here.) */
3796 void
3797 expand_decl (decl)
3798 register tree decl;
3800 struct nesting *thisblock;
3801 tree type;
3803 type = TREE_TYPE (decl);
3805 /* Only automatic variables need any expansion done.
3806 Static and external variables, and external functions,
3807 will be handled by `assemble_variable' (called from finish_decl).
3808 TYPE_DECL and CONST_DECL require nothing.
3809 PARM_DECLs are handled in `assign_parms'. */
3811 if (TREE_CODE (decl) != VAR_DECL)
3812 return;
3813 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3814 return;
3816 thisblock = block_stack;
3818 /* Create the RTL representation for the variable. */
3820 if (type == error_mark_node)
3821 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3822 else if (DECL_SIZE (decl) == 0)
3823 /* Variable with incomplete type. */
3825 if (DECL_INITIAL (decl) == 0)
3826 /* Error message was already done; now avoid a crash. */
3827 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3828 else
3829 /* An initializer is going to decide the size of this array.
3830 Until we know the size, represent its address with a reg. */
3831 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3833 set_mem_attributes (DECL_RTL (decl), decl, 1);
3835 else if (DECL_MODE (decl) != BLKmode
3836 /* If -ffloat-store, don't put explicit float vars
3837 into regs. */
3838 && !(flag_float_store
3839 && TREE_CODE (type) == REAL_TYPE)
3840 && ! TREE_THIS_VOLATILE (decl)
3841 && (DECL_REGISTER (decl) || optimize)
3842 /* if -fcheck-memory-usage, check all variables. */
3843 && ! current_function_check_memory_usage)
3845 /* Automatic variable that can go in a register. */
3846 int unsignedp = TREE_UNSIGNED (type);
3847 enum machine_mode reg_mode
3848 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3850 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3851 mark_user_reg (DECL_RTL (decl));
3853 if (POINTER_TYPE_P (type))
3854 mark_reg_pointer (DECL_RTL (decl),
3855 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3857 maybe_set_unchanging (DECL_RTL (decl), decl);
3859 /* If something wants our address, try to use ADDRESSOF. */
3860 if (TREE_ADDRESSABLE (decl))
3861 put_var_into_stack (decl);
3864 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3865 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3866 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3867 STACK_CHECK_MAX_VAR_SIZE)))
3869 /* Variable of fixed size that goes on the stack. */
3870 rtx oldaddr = 0;
3871 rtx addr;
3873 /* If we previously made RTL for this decl, it must be an array
3874 whose size was determined by the initializer.
3875 The old address was a register; set that register now
3876 to the proper address. */
3877 if (DECL_RTL (decl) != 0)
3879 if (GET_CODE (DECL_RTL (decl)) != MEM
3880 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3881 abort ();
3882 oldaddr = XEXP (DECL_RTL (decl), 0);
3885 DECL_RTL (decl) = assign_temp (TREE_TYPE (decl), 1, 1, 1);
3887 /* Set alignment we actually gave this decl. */
3888 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3889 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3890 DECL_USER_ALIGN (decl) = 0;
3892 if (oldaddr)
3894 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3895 if (addr != oldaddr)
3896 emit_move_insn (oldaddr, addr);
3899 else
3900 /* Dynamic-size object: must push space on the stack. */
3902 rtx address, size;
3904 /* Record the stack pointer on entry to block, if have
3905 not already done so. */
3906 do_pending_stack_adjust ();
3907 save_stack_pointer ();
3909 /* In function-at-a-time mode, variable_size doesn't expand this,
3910 so do it now. */
3911 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3912 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3913 const0_rtx, VOIDmode, 0);
3915 /* Compute the variable's size, in bytes. */
3916 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3917 free_temp_slots ();
3919 /* Allocate space on the stack for the variable. Note that
3920 DECL_ALIGN says how the variable is to be aligned and we
3921 cannot use it to conclude anything about the alignment of
3922 the size. */
3923 address = allocate_dynamic_stack_space (size, NULL_RTX,
3924 TYPE_ALIGN (TREE_TYPE (decl)));
3926 /* Reference the variable indirect through that rtx. */
3927 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
3929 set_mem_attributes (DECL_RTL (decl), decl, 1);
3931 /* Indicate the alignment we actually gave this variable. */
3932 #ifdef STACK_BOUNDARY
3933 DECL_ALIGN (decl) = STACK_BOUNDARY;
3934 #else
3935 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3936 #endif
3937 DECL_USER_ALIGN (decl) = 0;
3941 /* Emit code to perform the initialization of a declaration DECL. */
3943 void
3944 expand_decl_init (decl)
3945 tree decl;
3947 int was_used = TREE_USED (decl);
3949 /* If this is a CONST_DECL, we don't have to generate any code, but
3950 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3951 to be set while in the obstack containing the constant. If we don't
3952 do this, we can lose if we have functions nested three deep and the middle
3953 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3954 the innermost function is the first to expand that STRING_CST. */
3955 if (TREE_CODE (decl) == CONST_DECL)
3957 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3958 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3959 EXPAND_INITIALIZER);
3960 return;
3963 if (TREE_STATIC (decl))
3964 return;
3966 /* Compute and store the initial value now. */
3968 if (DECL_INITIAL (decl) == error_mark_node)
3970 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3972 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3973 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3974 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3975 0, 0);
3976 emit_queue ();
3978 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3980 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3981 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3982 emit_queue ();
3985 /* Don't let the initialization count as "using" the variable. */
3986 TREE_USED (decl) = was_used;
3988 /* Free any temporaries we made while initializing the decl. */
3989 preserve_temp_slots (NULL_RTX);
3990 free_temp_slots ();
3993 /* CLEANUP is an expression to be executed at exit from this binding contour;
3994 for example, in C++, it might call the destructor for this variable.
3996 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3997 CLEANUP multiple times, and have the correct semantics. This
3998 happens in exception handling, for gotos, returns, breaks that
3999 leave the current scope.
4001 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4002 that is not associated with any particular variable. */
4005 expand_decl_cleanup (decl, cleanup)
4006 tree decl, cleanup;
4008 struct nesting *thisblock;
4010 /* Error if we are not in any block. */
4011 if (cfun == 0 || block_stack == 0)
4012 return 0;
4014 thisblock = block_stack;
4016 /* Record the cleanup if there is one. */
4018 if (cleanup != 0)
4020 tree t;
4021 rtx seq;
4022 tree *cleanups = &thisblock->data.block.cleanups;
4023 int cond_context = conditional_context ();
4025 if (cond_context)
4027 rtx flag = gen_reg_rtx (word_mode);
4028 rtx set_flag_0;
4029 tree cond;
4031 start_sequence ();
4032 emit_move_insn (flag, const0_rtx);
4033 set_flag_0 = get_insns ();
4034 end_sequence ();
4036 thisblock->data.block.last_unconditional_cleanup
4037 = emit_insns_after (set_flag_0,
4038 thisblock->data.block.last_unconditional_cleanup);
4040 emit_move_insn (flag, const1_rtx);
4042 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
4043 DECL_RTL (cond) = flag;
4045 /* Conditionalize the cleanup. */
4046 cleanup = build (COND_EXPR, void_type_node,
4047 truthvalue_conversion (cond),
4048 cleanup, integer_zero_node);
4049 cleanup = fold (cleanup);
4051 cleanups = thisblock->data.block.cleanup_ptr;
4054 cleanup = unsave_expr (cleanup);
4056 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4058 if (! cond_context)
4059 /* If this block has a cleanup, it belongs in stack_block_stack. */
4060 stack_block_stack = thisblock;
4062 if (cond_context)
4064 start_sequence ();
4067 /* If this was optimized so that there is no exception region for the
4068 cleanup, then mark the TREE_LIST node, so that we can later tell
4069 if we need to call expand_eh_region_end. */
4070 if (! using_eh_for_cleanups_p
4071 || expand_eh_region_start_tree (decl, cleanup))
4072 TREE_ADDRESSABLE (t) = 1;
4073 /* If that started a new EH region, we're in a new block. */
4074 thisblock = block_stack;
4076 if (cond_context)
4078 seq = get_insns ();
4079 end_sequence ();
4080 if (seq)
4081 thisblock->data.block.last_unconditional_cleanup
4082 = emit_insns_after (seq,
4083 thisblock->data.block.last_unconditional_cleanup);
4085 else
4087 thisblock->data.block.last_unconditional_cleanup
4088 = get_last_insn ();
4089 /* When we insert instructions after the last unconditional cleanup,
4090 we don't adjust last_insn. That means that a later add_insn will
4091 clobber the instructions we've just added. The easiest way to
4092 fix this is to just insert another instruction here, so that the
4093 instructions inserted after the last unconditional cleanup are
4094 never the last instruction. */
4095 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4096 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4099 return 1;
4102 /* Like expand_decl_cleanup, but suppress generating an exception handler
4103 to perform the cleanup. */
4105 #if 0
4107 expand_decl_cleanup_no_eh (decl, cleanup)
4108 tree decl, cleanup;
4110 int save_eh = using_eh_for_cleanups_p;
4111 int result;
4113 using_eh_for_cleanups_p = 0;
4114 result = expand_decl_cleanup (decl, cleanup);
4115 using_eh_for_cleanups_p = save_eh;
4117 return result;
4119 #endif
4121 /* Arrange for the top element of the dynamic cleanup chain to be
4122 popped if we exit the current binding contour. DECL is the
4123 associated declaration, if any, otherwise NULL_TREE. If the
4124 current contour is left via an exception, then __sjthrow will pop
4125 the top element off the dynamic cleanup chain. The code that
4126 avoids doing the action we push into the cleanup chain in the
4127 exceptional case is contained in expand_cleanups.
4129 This routine is only used by expand_eh_region_start, and that is
4130 the only way in which an exception region should be started. This
4131 routine is only used when using the setjmp/longjmp codegen method
4132 for exception handling. */
4135 expand_dcc_cleanup (decl)
4136 tree decl;
4138 struct nesting *thisblock;
4139 tree cleanup;
4141 /* Error if we are not in any block. */
4142 if (cfun == 0 || block_stack == 0)
4143 return 0;
4144 thisblock = block_stack;
4146 /* Record the cleanup for the dynamic handler chain. */
4148 cleanup = make_node (POPDCC_EXPR);
4150 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4151 thisblock->data.block.cleanups
4152 = tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4154 /* If this block has a cleanup, it belongs in stack_block_stack. */
4155 stack_block_stack = thisblock;
4156 return 1;
4159 /* Arrange for the top element of the dynamic handler chain to be
4160 popped if we exit the current binding contour. DECL is the
4161 associated declaration, if any, otherwise NULL_TREE. If the current
4162 contour is left via an exception, then __sjthrow will pop the top
4163 element off the dynamic handler chain. The code that avoids doing
4164 the action we push into the handler chain in the exceptional case
4165 is contained in expand_cleanups.
4167 This routine is only used by expand_eh_region_start, and that is
4168 the only way in which an exception region should be started. This
4169 routine is only used when using the setjmp/longjmp codegen method
4170 for exception handling. */
4173 expand_dhc_cleanup (decl)
4174 tree decl;
4176 struct nesting *thisblock;
4177 tree cleanup;
4179 /* Error if we are not in any block. */
4180 if (cfun == 0 || block_stack == 0)
4181 return 0;
4182 thisblock = block_stack;
4184 /* Record the cleanup for the dynamic handler chain. */
4186 cleanup = make_node (POPDHC_EXPR);
4188 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4189 thisblock->data.block.cleanups
4190 = tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4192 /* If this block has a cleanup, it belongs in stack_block_stack. */
4193 stack_block_stack = thisblock;
4194 return 1;
4197 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4198 DECL_ELTS is the list of elements that belong to DECL's type.
4199 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4201 void
4202 expand_anon_union_decl (decl, cleanup, decl_elts)
4203 tree decl, cleanup, decl_elts;
4205 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4206 rtx x;
4207 tree t;
4209 /* If any of the elements are addressable, so is the entire union. */
4210 for (t = decl_elts; t; t = TREE_CHAIN (t))
4211 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4213 TREE_ADDRESSABLE (decl) = 1;
4214 break;
4217 expand_decl (decl);
4218 expand_decl_cleanup (decl, cleanup);
4219 x = DECL_RTL (decl);
4221 /* Go through the elements, assigning RTL to each. */
4222 for (t = decl_elts; t; t = TREE_CHAIN (t))
4224 tree decl_elt = TREE_VALUE (t);
4225 tree cleanup_elt = TREE_PURPOSE (t);
4226 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4228 /* Propagate the union's alignment to the elements. */
4229 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4230 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4232 /* If the element has BLKmode and the union doesn't, the union is
4233 aligned such that the element doesn't need to have BLKmode, so
4234 change the element's mode to the appropriate one for its size. */
4235 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4236 DECL_MODE (decl_elt) = mode
4237 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4239 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4240 instead create a new MEM rtx with the proper mode. */
4241 if (GET_CODE (x) == MEM)
4243 if (mode == GET_MODE (x))
4244 DECL_RTL (decl_elt) = x;
4245 else
4247 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
4248 MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt), x);
4251 else if (GET_CODE (x) == REG)
4253 if (mode == GET_MODE (x))
4254 DECL_RTL (decl_elt) = x;
4255 else
4256 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
4258 else
4259 abort ();
4261 /* Record the cleanup if there is one. */
4263 if (cleanup != 0)
4264 thisblock->data.block.cleanups
4265 = tree_cons (decl_elt, cleanup_elt,
4266 thisblock->data.block.cleanups);
4270 /* Expand a list of cleanups LIST.
4271 Elements may be expressions or may be nested lists.
4273 If DONT_DO is nonnull, then any list-element
4274 whose TREE_PURPOSE matches DONT_DO is omitted.
4275 This is sometimes used to avoid a cleanup associated with
4276 a value that is being returned out of the scope.
4278 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4279 goto and handle protection regions specially in that case.
4281 If REACHABLE, we emit code, otherwise just inform the exception handling
4282 code about this finalization. */
4284 static void
4285 expand_cleanups (list, dont_do, in_fixup, reachable)
4286 tree list;
4287 tree dont_do;
4288 int in_fixup;
4289 int reachable;
4291 tree tail;
4292 for (tail = list; tail; tail = TREE_CHAIN (tail))
4293 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4295 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4296 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4297 else
4299 if (! in_fixup)
4301 tree cleanup = TREE_VALUE (tail);
4303 /* See expand_d{h,c}c_cleanup for why we avoid this. */
4304 if (TREE_CODE (cleanup) != POPDHC_EXPR
4305 && TREE_CODE (cleanup) != POPDCC_EXPR
4306 /* See expand_eh_region_start_tree for this case. */
4307 && ! TREE_ADDRESSABLE (tail))
4309 cleanup = protect_with_terminate (cleanup);
4310 expand_eh_region_end (cleanup);
4314 if (reachable)
4316 /* Cleanups may be run multiple times. For example,
4317 when exiting a binding contour, we expand the
4318 cleanups associated with that contour. When a goto
4319 within that binding contour has a target outside that
4320 contour, it will expand all cleanups from its scope to
4321 the target. Though the cleanups are expanded multiple
4322 times, the control paths are non-overlapping so the
4323 cleanups will not be executed twice. */
4325 /* We may need to protect fixups with rethrow regions. */
4326 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
4328 if (protect)
4329 expand_fixup_region_start ();
4331 /* The cleanup might contain try-blocks, so we have to
4332 preserve our current queue. */
4333 push_ehqueue ();
4334 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4335 pop_ehqueue ();
4336 if (protect)
4337 expand_fixup_region_end (TREE_VALUE (tail));
4338 free_temp_slots ();
4344 /* Mark when the context we are emitting RTL for as a conditional
4345 context, so that any cleanup actions we register with
4346 expand_decl_init will be properly conditionalized when those
4347 cleanup actions are later performed. Must be called before any
4348 expression (tree) is expanded that is within a conditional context. */
4350 void
4351 start_cleanup_deferral ()
4353 /* block_stack can be NULL if we are inside the parameter list. It is
4354 OK to do nothing, because cleanups aren't possible here. */
4355 if (block_stack)
4356 ++block_stack->data.block.conditional_code;
4359 /* Mark the end of a conditional region of code. Because cleanup
4360 deferrals may be nested, we may still be in a conditional region
4361 after we end the currently deferred cleanups, only after we end all
4362 deferred cleanups, are we back in unconditional code. */
4364 void
4365 end_cleanup_deferral ()
4367 /* block_stack can be NULL if we are inside the parameter list. It is
4368 OK to do nothing, because cleanups aren't possible here. */
4369 if (block_stack)
4370 --block_stack->data.block.conditional_code;
4373 /* Move all cleanups from the current block_stack
4374 to the containing block_stack, where they are assumed to
4375 have been created. If anything can cause a temporary to
4376 be created, but not expanded for more than one level of
4377 block_stacks, then this code will have to change. */
4379 void
4380 move_cleanups_up ()
4382 struct nesting *block = block_stack;
4383 struct nesting *outer = block->next;
4385 outer->data.block.cleanups
4386 = chainon (block->data.block.cleanups,
4387 outer->data.block.cleanups);
4388 block->data.block.cleanups = 0;
4391 tree
4392 last_cleanup_this_contour ()
4394 if (block_stack == 0)
4395 return 0;
4397 return block_stack->data.block.cleanups;
4400 /* Return 1 if there are any pending cleanups at this point.
4401 If THIS_CONTOUR is nonzero, check the current contour as well.
4402 Otherwise, look only at the contours that enclose this one. */
4405 any_pending_cleanups (this_contour)
4406 int this_contour;
4408 struct nesting *block;
4410 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4411 return 0;
4413 if (this_contour && block_stack->data.block.cleanups != NULL)
4414 return 1;
4415 if (block_stack->data.block.cleanups == 0
4416 && block_stack->data.block.outer_cleanups == 0)
4417 return 0;
4419 for (block = block_stack->next; block; block = block->next)
4420 if (block->data.block.cleanups != 0)
4421 return 1;
4423 return 0;
4426 /* Enter a case (Pascal) or switch (C) statement.
4427 Push a block onto case_stack and nesting_stack
4428 to accumulate the case-labels that are seen
4429 and to record the labels generated for the statement.
4431 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4432 Otherwise, this construct is transparent for `exit_something'.
4434 EXPR is the index-expression to be dispatched on.
4435 TYPE is its nominal type. We could simply convert EXPR to this type,
4436 but instead we take short cuts. */
4438 void
4439 expand_start_case (exit_flag, expr, type, printname)
4440 int exit_flag;
4441 tree expr;
4442 tree type;
4443 const char *printname;
4445 register struct nesting *thiscase = ALLOC_NESTING ();
4447 /* Make an entry on case_stack for the case we are entering. */
4449 thiscase->next = case_stack;
4450 thiscase->all = nesting_stack;
4451 thiscase->depth = ++nesting_depth;
4452 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4453 thiscase->data.case_stmt.case_list = 0;
4454 thiscase->data.case_stmt.index_expr = expr;
4455 thiscase->data.case_stmt.nominal_type = type;
4456 thiscase->data.case_stmt.default_label = 0;
4457 thiscase->data.case_stmt.printname = printname;
4458 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4459 case_stack = thiscase;
4460 nesting_stack = thiscase;
4462 do_pending_stack_adjust ();
4464 /* Make sure case_stmt.start points to something that won't
4465 need any transformation before expand_end_case. */
4466 if (GET_CODE (get_last_insn ()) != NOTE)
4467 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4469 thiscase->data.case_stmt.start = get_last_insn ();
4471 start_cleanup_deferral ();
4474 /* Start a "dummy case statement" within which case labels are invalid
4475 and are not connected to any larger real case statement.
4476 This can be used if you don't want to let a case statement jump
4477 into the middle of certain kinds of constructs. */
4479 void
4480 expand_start_case_dummy ()
4482 register struct nesting *thiscase = ALLOC_NESTING ();
4484 /* Make an entry on case_stack for the dummy. */
4486 thiscase->next = case_stack;
4487 thiscase->all = nesting_stack;
4488 thiscase->depth = ++nesting_depth;
4489 thiscase->exit_label = 0;
4490 thiscase->data.case_stmt.case_list = 0;
4491 thiscase->data.case_stmt.start = 0;
4492 thiscase->data.case_stmt.nominal_type = 0;
4493 thiscase->data.case_stmt.default_label = 0;
4494 case_stack = thiscase;
4495 nesting_stack = thiscase;
4496 start_cleanup_deferral ();
4499 /* End a dummy case statement. */
4501 void
4502 expand_end_case_dummy ()
4504 end_cleanup_deferral ();
4505 POPSTACK (case_stack);
4508 /* Return the data type of the index-expression
4509 of the innermost case statement, or null if none. */
4511 tree
4512 case_index_expr_type ()
4514 if (case_stack)
4515 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4516 return 0;
4519 static void
4520 check_seenlabel ()
4522 /* If this is the first label, warn if any insns have been emitted. */
4523 if (case_stack->data.case_stmt.line_number_status >= 0)
4525 rtx insn;
4527 restore_line_number_status
4528 (case_stack->data.case_stmt.line_number_status);
4529 case_stack->data.case_stmt.line_number_status = -1;
4531 for (insn = case_stack->data.case_stmt.start;
4532 insn;
4533 insn = NEXT_INSN (insn))
4535 if (GET_CODE (insn) == CODE_LABEL)
4536 break;
4537 if (GET_CODE (insn) != NOTE
4538 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4541 insn = PREV_INSN (insn);
4542 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4544 /* If insn is zero, then there must have been a syntax error. */
4545 if (insn)
4546 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4547 NOTE_LINE_NUMBER (insn),
4548 "unreachable code at beginning of %s",
4549 case_stack->data.case_stmt.printname);
4550 break;
4556 /* Accumulate one case or default label inside a case or switch statement.
4557 VALUE is the value of the case (a null pointer, for a default label).
4558 The function CONVERTER, when applied to arguments T and V,
4559 converts the value V to the type T.
4561 If not currently inside a case or switch statement, return 1 and do
4562 nothing. The caller will print a language-specific error message.
4563 If VALUE is a duplicate or overlaps, return 2 and do nothing
4564 except store the (first) duplicate node in *DUPLICATE.
4565 If VALUE is out of range, return 3 and do nothing.
4566 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4567 Return 0 on success.
4569 Extended to handle range statements. */
4572 pushcase (value, converter, label, duplicate)
4573 register tree value;
4574 tree (*converter) PARAMS ((tree, tree));
4575 register tree label;
4576 tree *duplicate;
4578 tree index_type;
4579 tree nominal_type;
4581 /* Fail if not inside a real case statement. */
4582 if (! (case_stack && case_stack->data.case_stmt.start))
4583 return 1;
4585 if (stack_block_stack
4586 && stack_block_stack->depth > case_stack->depth)
4587 return 5;
4589 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4590 nominal_type = case_stack->data.case_stmt.nominal_type;
4592 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4593 if (index_type == error_mark_node)
4594 return 0;
4596 /* Convert VALUE to the type in which the comparisons are nominally done. */
4597 if (value != 0)
4598 value = (*converter) (nominal_type, value);
4600 check_seenlabel ();
4602 /* Fail if this value is out of range for the actual type of the index
4603 (which may be narrower than NOMINAL_TYPE). */
4604 if (value != 0
4605 && (TREE_CONSTANT_OVERFLOW (value)
4606 || ! int_fits_type_p (value, index_type)))
4607 return 3;
4609 return add_case_node (value, value, label, duplicate);
4612 /* Like pushcase but this case applies to all values between VALUE1 and
4613 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4614 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4615 starts at VALUE1 and ends at the highest value of the index type.
4616 If both are NULL, this case applies to all values.
4618 The return value is the same as that of pushcase but there is one
4619 additional error code: 4 means the specified range was empty. */
4622 pushcase_range (value1, value2, converter, label, duplicate)
4623 register tree value1, value2;
4624 tree (*converter) PARAMS ((tree, tree));
4625 register tree label;
4626 tree *duplicate;
4628 tree index_type;
4629 tree nominal_type;
4631 /* Fail if not inside a real case statement. */
4632 if (! (case_stack && case_stack->data.case_stmt.start))
4633 return 1;
4635 if (stack_block_stack
4636 && stack_block_stack->depth > case_stack->depth)
4637 return 5;
4639 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4640 nominal_type = case_stack->data.case_stmt.nominal_type;
4642 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4643 if (index_type == error_mark_node)
4644 return 0;
4646 check_seenlabel ();
4648 /* Convert VALUEs to type in which the comparisons are nominally done
4649 and replace any unspecified value with the corresponding bound. */
4650 if (value1 == 0)
4651 value1 = TYPE_MIN_VALUE (index_type);
4652 if (value2 == 0)
4653 value2 = TYPE_MAX_VALUE (index_type);
4655 /* Fail if the range is empty. Do this before any conversion since
4656 we want to allow out-of-range empty ranges. */
4657 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4658 return 4;
4660 /* If the max was unbounded, use the max of the nominal_type we are
4661 converting to. Do this after the < check above to suppress false
4662 positives. */
4663 if (value2 == 0)
4664 value2 = TYPE_MAX_VALUE (nominal_type);
4666 value1 = (*converter) (nominal_type, value1);
4667 value2 = (*converter) (nominal_type, value2);
4669 /* Fail if these values are out of range. */
4670 if (TREE_CONSTANT_OVERFLOW (value1)
4671 || ! int_fits_type_p (value1, index_type))
4672 return 3;
4674 if (TREE_CONSTANT_OVERFLOW (value2)
4675 || ! int_fits_type_p (value2, index_type))
4676 return 3;
4678 return add_case_node (value1, value2, label, duplicate);
4681 /* Do the actual insertion of a case label for pushcase and pushcase_range
4682 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4683 slowdown for large switch statements. */
4686 add_case_node (low, high, label, duplicate)
4687 tree low, high;
4688 tree label;
4689 tree *duplicate;
4691 struct case_node *p, **q, *r;
4693 /* If there's no HIGH value, then this is not a case range; it's
4694 just a simple case label. But that's just a degenerate case
4695 range. */
4696 if (!high)
4697 high = low;
4699 /* Handle default labels specially. */
4700 if (!high && !low)
4702 if (case_stack->data.case_stmt.default_label != 0)
4704 *duplicate = case_stack->data.case_stmt.default_label;
4705 return 2;
4707 case_stack->data.case_stmt.default_label = label;
4708 expand_label (label);
4709 return 0;
4712 q = &case_stack->data.case_stmt.case_list;
4713 p = *q;
4715 while ((r = *q))
4717 p = r;
4719 /* Keep going past elements distinctly greater than HIGH. */
4720 if (tree_int_cst_lt (high, p->low))
4721 q = &p->left;
4723 /* or distinctly less than LOW. */
4724 else if (tree_int_cst_lt (p->high, low))
4725 q = &p->right;
4727 else
4729 /* We have an overlap; this is an error. */
4730 *duplicate = p->code_label;
4731 return 2;
4735 /* Add this label to the chain, and succeed.
4736 Copy LOW, HIGH so they are on temporary rather than momentary
4737 obstack and will thus survive till the end of the case statement. */
4739 r = (struct case_node *) xmalloc (sizeof (struct case_node));
4740 r->low = copy_node (low);
4742 /* If the bounds are equal, turn this into the one-value case. */
4743 if (tree_int_cst_equal (low, high))
4744 r->high = r->low;
4745 else
4746 r->high = copy_node (high);
4748 r->code_label = label;
4749 expand_label (label);
4751 *q = r;
4752 r->parent = p;
4753 r->left = 0;
4754 r->right = 0;
4755 r->balance = 0;
4757 while (p)
4759 struct case_node *s;
4761 if (r == p->left)
4763 int b;
4765 if (! (b = p->balance))
4766 /* Growth propagation from left side. */
4767 p->balance = -1;
4768 else if (b < 0)
4770 if (r->balance < 0)
4772 /* R-Rotation */
4773 if ((p->left = s = r->right))
4774 s->parent = p;
4776 r->right = p;
4777 p->balance = 0;
4778 r->balance = 0;
4779 s = p->parent;
4780 p->parent = r;
4782 if ((r->parent = s))
4784 if (s->left == p)
4785 s->left = r;
4786 else
4787 s->right = r;
4789 else
4790 case_stack->data.case_stmt.case_list = r;
4792 else
4793 /* r->balance == +1 */
4795 /* LR-Rotation */
4797 int b2;
4798 struct case_node *t = r->right;
4800 if ((p->left = s = t->right))
4801 s->parent = p;
4803 t->right = p;
4804 if ((r->right = s = t->left))
4805 s->parent = r;
4807 t->left = r;
4808 b = t->balance;
4809 b2 = b < 0;
4810 p->balance = b2;
4811 b2 = -b2 - b;
4812 r->balance = b2;
4813 t->balance = 0;
4814 s = p->parent;
4815 p->parent = t;
4816 r->parent = t;
4818 if ((t->parent = s))
4820 if (s->left == p)
4821 s->left = t;
4822 else
4823 s->right = t;
4825 else
4826 case_stack->data.case_stmt.case_list = t;
4828 break;
4831 else
4833 /* p->balance == +1; growth of left side balances the node. */
4834 p->balance = 0;
4835 break;
4838 else
4839 /* r == p->right */
4841 int b;
4843 if (! (b = p->balance))
4844 /* Growth propagation from right side. */
4845 p->balance++;
4846 else if (b > 0)
4848 if (r->balance > 0)
4850 /* L-Rotation */
4852 if ((p->right = s = r->left))
4853 s->parent = p;
4855 r->left = p;
4856 p->balance = 0;
4857 r->balance = 0;
4858 s = p->parent;
4859 p->parent = r;
4860 if ((r->parent = s))
4862 if (s->left == p)
4863 s->left = r;
4864 else
4865 s->right = r;
4868 else
4869 case_stack->data.case_stmt.case_list = r;
4872 else
4873 /* r->balance == -1 */
4875 /* RL-Rotation */
4876 int b2;
4877 struct case_node *t = r->left;
4879 if ((p->right = s = t->left))
4880 s->parent = p;
4882 t->left = p;
4884 if ((r->left = s = t->right))
4885 s->parent = r;
4887 t->right = r;
4888 b = t->balance;
4889 b2 = b < 0;
4890 r->balance = b2;
4891 b2 = -b2 - b;
4892 p->balance = b2;
4893 t->balance = 0;
4894 s = p->parent;
4895 p->parent = t;
4896 r->parent = t;
4898 if ((t->parent = s))
4900 if (s->left == p)
4901 s->left = t;
4902 else
4903 s->right = t;
4906 else
4907 case_stack->data.case_stmt.case_list = t;
4909 break;
4911 else
4913 /* p->balance == -1; growth of right side balances the node. */
4914 p->balance = 0;
4915 break;
4919 r = p;
4920 p = p->parent;
4923 return 0;
4926 /* Returns the number of possible values of TYPE.
4927 Returns -1 if the number is unknown, variable, or if the number does not
4928 fit in a HOST_WIDE_INT.
4929 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4930 do not increase monotonically (there may be duplicates);
4931 to 1 if the values increase monotonically, but not always by 1;
4932 otherwise sets it to 0. */
4934 HOST_WIDE_INT
4935 all_cases_count (type, spareness)
4936 tree type;
4937 int *spareness;
4939 tree t;
4940 HOST_WIDE_INT count, minval, lastval;
4942 *spareness = 0;
4944 switch (TREE_CODE (type))
4946 case BOOLEAN_TYPE:
4947 count = 2;
4948 break;
4950 case CHAR_TYPE:
4951 count = 1 << BITS_PER_UNIT;
4952 break;
4954 default:
4955 case INTEGER_TYPE:
4956 if (TYPE_MAX_VALUE (type) != 0
4957 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4958 TYPE_MIN_VALUE (type))))
4959 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4960 convert (type, integer_zero_node))))
4961 && host_integerp (t, 1))
4962 count = tree_low_cst (t, 1);
4963 else
4964 return -1;
4965 break;
4967 case ENUMERAL_TYPE:
4968 /* Don't waste time with enumeral types with huge values. */
4969 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4970 || TYPE_MAX_VALUE (type) == 0
4971 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4972 return -1;
4974 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4975 count = 0;
4977 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4979 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4981 if (*spareness == 2 || thisval < lastval)
4982 *spareness = 2;
4983 else if (thisval != minval + count)
4984 *spareness = 1;
4986 count++;
4990 return count;
4993 #define BITARRAY_TEST(ARRAY, INDEX) \
4994 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4995 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4996 #define BITARRAY_SET(ARRAY, INDEX) \
4997 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4998 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
5000 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
5001 with the case values we have seen, assuming the case expression
5002 has the given TYPE.
5003 SPARSENESS is as determined by all_cases_count.
5005 The time needed is proportional to COUNT, unless
5006 SPARSENESS is 2, in which case quadratic time is needed. */
5008 void
5009 mark_seen_cases (type, cases_seen, count, sparseness)
5010 tree type;
5011 unsigned char *cases_seen;
5012 HOST_WIDE_INT count;
5013 int sparseness;
5015 tree next_node_to_try = NULL_TREE;
5016 HOST_WIDE_INT next_node_offset = 0;
5018 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
5019 tree val = make_node (INTEGER_CST);
5021 TREE_TYPE (val) = type;
5022 if (! root)
5023 /* Do nothing. */
5025 else if (sparseness == 2)
5027 tree t;
5028 unsigned HOST_WIDE_INT xlo;
5030 /* This less efficient loop is only needed to handle
5031 duplicate case values (multiple enum constants
5032 with the same value). */
5033 TREE_TYPE (val) = TREE_TYPE (root->low);
5034 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5035 t = TREE_CHAIN (t), xlo++)
5037 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
5038 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5039 n = root;
5042 /* Keep going past elements distinctly greater than VAL. */
5043 if (tree_int_cst_lt (val, n->low))
5044 n = n->left;
5046 /* or distinctly less than VAL. */
5047 else if (tree_int_cst_lt (n->high, val))
5048 n = n->right;
5050 else
5052 /* We have found a matching range. */
5053 BITARRAY_SET (cases_seen, xlo);
5054 break;
5057 while (n);
5060 else
5062 if (root->left)
5063 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5065 for (n = root; n; n = n->right)
5067 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5068 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5069 while (! tree_int_cst_lt (n->high, val))
5071 /* Calculate (into xlo) the "offset" of the integer (val).
5072 The element with lowest value has offset 0, the next smallest
5073 element has offset 1, etc. */
5075 unsigned HOST_WIDE_INT xlo;
5076 HOST_WIDE_INT xhi;
5077 tree t;
5079 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5081 /* The TYPE_VALUES will be in increasing order, so
5082 starting searching where we last ended. */
5083 t = next_node_to_try;
5084 xlo = next_node_offset;
5085 xhi = 0;
5086 for (;;)
5088 if (t == NULL_TREE)
5090 t = TYPE_VALUES (type);
5091 xlo = 0;
5093 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5095 next_node_to_try = TREE_CHAIN (t);
5096 next_node_offset = xlo + 1;
5097 break;
5099 xlo++;
5100 t = TREE_CHAIN (t);
5101 if (t == next_node_to_try)
5103 xlo = -1;
5104 break;
5108 else
5110 t = TYPE_MIN_VALUE (type);
5111 if (t)
5112 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5113 &xlo, &xhi);
5114 else
5115 xlo = xhi = 0;
5116 add_double (xlo, xhi,
5117 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5118 &xlo, &xhi);
5121 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5122 BITARRAY_SET (cases_seen, xlo);
5124 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5125 1, 0,
5126 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5132 /* Called when the index of a switch statement is an enumerated type
5133 and there is no default label.
5135 Checks that all enumeration literals are covered by the case
5136 expressions of a switch. Also, warn if there are any extra
5137 switch cases that are *not* elements of the enumerated type.
5139 If all enumeration literals were covered by the case expressions,
5140 turn one of the expressions into the default expression since it should
5141 not be possible to fall through such a switch. */
5143 void
5144 check_for_full_enumeration_handling (type)
5145 tree type;
5147 register struct case_node *n;
5148 register tree chain;
5149 #if 0 /* variable used by 'if 0'ed code below. */
5150 register struct case_node **l;
5151 int all_values = 1;
5152 #endif
5154 /* True iff the selector type is a numbered set mode. */
5155 int sparseness = 0;
5157 /* The number of possible selector values. */
5158 HOST_WIDE_INT size;
5160 /* For each possible selector value. a one iff it has been matched
5161 by a case value alternative. */
5162 unsigned char *cases_seen;
5164 /* The allocated size of cases_seen, in chars. */
5165 HOST_WIDE_INT bytes_needed;
5167 if (! warn_switch)
5168 return;
5170 size = all_cases_count (type, &sparseness);
5171 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5173 if (size > 0 && size < 600000
5174 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5175 this optimization if we don't have enough memory rather than
5176 aborting, as xmalloc would do. */
5177 && (cases_seen = (unsigned char *) calloc (bytes_needed, 1)) != NULL)
5179 HOST_WIDE_INT i;
5180 tree v = TYPE_VALUES (type);
5182 /* The time complexity of this code is normally O(N), where
5183 N being the number of members in the enumerated type.
5184 However, if type is a ENUMERAL_TYPE whose values do not
5185 increase monotonically, O(N*log(N)) time may be needed. */
5187 mark_seen_cases (type, cases_seen, size, sparseness);
5189 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5190 if (BITARRAY_TEST (cases_seen, i) == 0)
5191 warning ("enumeration value `%s' not handled in switch",
5192 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5194 free (cases_seen);
5197 /* Now we go the other way around; we warn if there are case
5198 expressions that don't correspond to enumerators. This can
5199 occur since C and C++ don't enforce type-checking of
5200 assignments to enumeration variables. */
5202 if (case_stack->data.case_stmt.case_list
5203 && case_stack->data.case_stmt.case_list->left)
5204 case_stack->data.case_stmt.case_list
5205 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5206 if (warn_switch)
5207 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5209 for (chain = TYPE_VALUES (type);
5210 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5211 chain = TREE_CHAIN (chain))
5214 if (!chain)
5216 if (TYPE_NAME (type) == 0)
5217 warning ("case value `%ld' not in enumerated type",
5218 (long) TREE_INT_CST_LOW (n->low));
5219 else
5220 warning ("case value `%ld' not in enumerated type `%s'",
5221 (long) TREE_INT_CST_LOW (n->low),
5222 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5223 == IDENTIFIER_NODE)
5224 ? TYPE_NAME (type)
5225 : DECL_NAME (TYPE_NAME (type))));
5227 if (!tree_int_cst_equal (n->low, n->high))
5229 for (chain = TYPE_VALUES (type);
5230 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5231 chain = TREE_CHAIN (chain))
5234 if (!chain)
5236 if (TYPE_NAME (type) == 0)
5237 warning ("case value `%ld' not in enumerated type",
5238 (long) TREE_INT_CST_LOW (n->high));
5239 else
5240 warning ("case value `%ld' not in enumerated type `%s'",
5241 (long) TREE_INT_CST_LOW (n->high),
5242 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5243 == IDENTIFIER_NODE)
5244 ? TYPE_NAME (type)
5245 : DECL_NAME (TYPE_NAME (type))));
5250 #if 0
5251 /* ??? This optimization is disabled because it causes valid programs to
5252 fail. ANSI C does not guarantee that an expression with enum type
5253 will have a value that is the same as one of the enumeration literals. */
5255 /* If all values were found as case labels, make one of them the default
5256 label. Thus, this switch will never fall through. We arbitrarily pick
5257 the last one to make the default since this is likely the most
5258 efficient choice. */
5260 if (all_values)
5262 for (l = &case_stack->data.case_stmt.case_list;
5263 (*l)->right != 0;
5264 l = &(*l)->right)
5267 case_stack->data.case_stmt.default_label = (*l)->code_label;
5268 *l = 0;
5270 #endif /* 0 */
5273 /* Free CN, and its children. */
5275 static void
5276 free_case_nodes (cn)
5277 case_node_ptr cn;
5279 if (cn)
5281 free_case_nodes (cn->left);
5282 free_case_nodes (cn->right);
5283 free (cn);
5288 /* Terminate a case (Pascal) or switch (C) statement
5289 in which ORIG_INDEX is the expression to be tested.
5290 Generate the code to test it and jump to the right place. */
5292 void
5293 expand_end_case (orig_index)
5294 tree orig_index;
5296 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE, orig_minval;
5297 rtx default_label = 0;
5298 register struct case_node *n;
5299 unsigned int count;
5300 rtx index;
5301 rtx table_label;
5302 int ncases;
5303 rtx *labelvec;
5304 register int i;
5305 rtx before_case;
5306 register struct nesting *thiscase = case_stack;
5307 tree index_expr, index_type;
5308 int unsignedp;
5310 /* Don't crash due to previous errors. */
5311 if (thiscase == NULL)
5312 return;
5314 table_label = gen_label_rtx ();
5315 index_expr = thiscase->data.case_stmt.index_expr;
5316 index_type = TREE_TYPE (index_expr);
5317 unsignedp = TREE_UNSIGNED (index_type);
5319 do_pending_stack_adjust ();
5321 /* This might get an spurious warning in the presence of a syntax error;
5322 it could be fixed by moving the call to check_seenlabel after the
5323 check for error_mark_node, and copying the code of check_seenlabel that
5324 deals with case_stack->data.case_stmt.line_number_status /
5325 restore_line_number_status in front of the call to end_cleanup_deferral;
5326 However, this might miss some useful warnings in the presence of
5327 non-syntax errors. */
5328 check_seenlabel ();
5330 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5331 if (index_type != error_mark_node)
5333 /* If switch expression was an enumerated type, check that all
5334 enumeration literals are covered by the cases.
5335 No sense trying this if there's a default case, however. */
5337 if (!thiscase->data.case_stmt.default_label
5338 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5339 && TREE_CODE (index_expr) != INTEGER_CST)
5340 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5342 /* If we don't have a default-label, create one here,
5343 after the body of the switch. */
5344 if (thiscase->data.case_stmt.default_label == 0)
5346 thiscase->data.case_stmt.default_label
5347 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5348 expand_label (thiscase->data.case_stmt.default_label);
5350 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5352 before_case = get_last_insn ();
5354 if (thiscase->data.case_stmt.case_list
5355 && thiscase->data.case_stmt.case_list->left)
5356 thiscase->data.case_stmt.case_list
5357 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5359 /* Simplify the case-list before we count it. */
5360 group_case_nodes (thiscase->data.case_stmt.case_list);
5362 /* Get upper and lower bounds of case values.
5363 Also convert all the case values to the index expr's data type. */
5365 count = 0;
5366 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5368 /* Check low and high label values are integers. */
5369 if (TREE_CODE (n->low) != INTEGER_CST)
5370 abort ();
5371 if (TREE_CODE (n->high) != INTEGER_CST)
5372 abort ();
5374 n->low = convert (index_type, n->low);
5375 n->high = convert (index_type, n->high);
5377 /* Count the elements and track the largest and smallest
5378 of them (treating them as signed even if they are not). */
5379 if (count++ == 0)
5381 minval = n->low;
5382 maxval = n->high;
5384 else
5386 if (INT_CST_LT (n->low, minval))
5387 minval = n->low;
5388 if (INT_CST_LT (maxval, n->high))
5389 maxval = n->high;
5391 /* A range counts double, since it requires two compares. */
5392 if (! tree_int_cst_equal (n->low, n->high))
5393 count++;
5396 orig_minval = minval;
5398 /* Compute span of values. */
5399 if (count != 0)
5400 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5402 end_cleanup_deferral ();
5404 if (count == 0)
5406 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5407 emit_queue ();
5408 emit_jump (default_label);
5411 /* If range of values is much bigger than number of values,
5412 make a sequence of conditional branches instead of a dispatch.
5413 If the switch-index is a constant, do it this way
5414 because we can optimize it. */
5416 #ifndef CASE_VALUES_THRESHOLD
5417 #ifdef HAVE_casesi
5418 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5419 #else
5420 /* If machine does not have a case insn that compares the
5421 bounds, this means extra overhead for dispatch tables
5422 which raises the threshold for using them. */
5423 #define CASE_VALUES_THRESHOLD 5
5424 #endif /* HAVE_casesi */
5425 #endif /* CASE_VALUES_THRESHOLD */
5427 else if (count < CASE_VALUES_THRESHOLD
5428 || compare_tree_int (range, 10 * count) > 0
5429 /* RANGE may be signed, and really large ranges will show up
5430 as negative numbers. */
5431 || compare_tree_int (range, 0) < 0
5432 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5433 || flag_pic
5434 #endif
5435 || TREE_CODE (index_expr) == INTEGER_CST
5436 /* These will reduce to a constant. */
5437 || (TREE_CODE (index_expr) == CALL_EXPR
5438 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5439 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5440 && DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_NORMAL
5441 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5442 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5443 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5445 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5447 /* If the index is a short or char that we do not have
5448 an insn to handle comparisons directly, convert it to
5449 a full integer now, rather than letting each comparison
5450 generate the conversion. */
5452 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5453 && (cmp_optab->handlers[(int) GET_MODE (index)].insn_code
5454 == CODE_FOR_nothing))
5456 enum machine_mode wider_mode;
5457 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5458 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5459 if (cmp_optab->handlers[(int) wider_mode].insn_code
5460 != CODE_FOR_nothing)
5462 index = convert_to_mode (wider_mode, index, unsignedp);
5463 break;
5467 emit_queue ();
5468 do_pending_stack_adjust ();
5470 index = protect_from_queue (index, 0);
5471 if (GET_CODE (index) == MEM)
5472 index = copy_to_reg (index);
5473 if (GET_CODE (index) == CONST_INT
5474 || TREE_CODE (index_expr) == INTEGER_CST)
5476 /* Make a tree node with the proper constant value
5477 if we don't already have one. */
5478 if (TREE_CODE (index_expr) != INTEGER_CST)
5480 index_expr
5481 = build_int_2 (INTVAL (index),
5482 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5483 index_expr = convert (index_type, index_expr);
5486 /* For constant index expressions we need only
5487 issue a unconditional branch to the appropriate
5488 target code. The job of removing any unreachable
5489 code is left to the optimisation phase if the
5490 "-O" option is specified. */
5491 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5492 if (! tree_int_cst_lt (index_expr, n->low)
5493 && ! tree_int_cst_lt (n->high, index_expr))
5494 break;
5496 if (n)
5497 emit_jump (label_rtx (n->code_label));
5498 else
5499 emit_jump (default_label);
5501 else
5503 /* If the index expression is not constant we generate
5504 a binary decision tree to select the appropriate
5505 target code. This is done as follows:
5507 The list of cases is rearranged into a binary tree,
5508 nearly optimal assuming equal probability for each case.
5510 The tree is transformed into RTL, eliminating
5511 redundant test conditions at the same time.
5513 If program flow could reach the end of the
5514 decision tree an unconditional jump to the
5515 default code is emitted. */
5517 use_cost_table
5518 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5519 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5520 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5521 NULL_PTR);
5522 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5523 default_label, index_type);
5524 emit_jump_if_reachable (default_label);
5527 else
5529 int win = 0;
5530 #ifdef HAVE_casesi
5531 if (HAVE_casesi)
5533 enum machine_mode index_mode = SImode;
5534 int index_bits = GET_MODE_BITSIZE (index_mode);
5535 rtx op1, op2;
5536 enum machine_mode op_mode;
5538 /* Convert the index to SImode. */
5539 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5540 > GET_MODE_BITSIZE (index_mode))
5542 enum machine_mode omode = TYPE_MODE (index_type);
5543 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5545 /* We must handle the endpoints in the original mode. */
5546 index_expr = build (MINUS_EXPR, index_type,
5547 index_expr, minval);
5548 minval = integer_zero_node;
5549 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5550 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
5551 omode, 1, 0, default_label);
5552 /* Now we can safely truncate. */
5553 index = convert_to_mode (index_mode, index, 0);
5555 else
5557 if (TYPE_MODE (index_type) != index_mode)
5559 index_expr = convert (type_for_size (index_bits, 0),
5560 index_expr);
5561 index_type = TREE_TYPE (index_expr);
5564 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5566 emit_queue ();
5567 index = protect_from_queue (index, 0);
5568 do_pending_stack_adjust ();
5570 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
5571 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
5572 (index, op_mode))
5573 index = copy_to_mode_reg (op_mode, index);
5575 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5577 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
5578 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
5579 (op1, op_mode))
5580 op1 = copy_to_mode_reg (op_mode, op1);
5582 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5584 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
5585 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
5586 (op2, op_mode))
5587 op2 = copy_to_mode_reg (op_mode, op2);
5589 emit_jump_insn (gen_casesi (index, op1, op2,
5590 table_label, default_label));
5591 win = 1;
5593 #endif
5594 #ifdef HAVE_tablejump
5595 if (! win && HAVE_tablejump)
5597 index_type = thiscase->data.case_stmt.nominal_type;
5598 index_expr = fold (build (MINUS_EXPR, index_type,
5599 convert (index_type, index_expr),
5600 convert (index_type, minval)));
5601 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5602 emit_queue ();
5603 index = protect_from_queue (index, 0);
5604 do_pending_stack_adjust ();
5606 do_tablejump (index, TYPE_MODE (index_type),
5607 expand_expr (range, NULL_RTX, VOIDmode, 0),
5608 table_label, default_label);
5609 win = 1;
5611 #endif
5612 if (! win)
5613 abort ();
5615 /* Get table of labels to jump to, in order of case index. */
5617 ncases = TREE_INT_CST_LOW (range) + 1;
5618 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5619 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5621 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5623 register HOST_WIDE_INT i
5624 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5626 while (1)
5628 labelvec[i]
5629 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5630 if (i + TREE_INT_CST_LOW (orig_minval)
5631 == TREE_INT_CST_LOW (n->high))
5632 break;
5633 i++;
5637 /* Fill in the gaps with the default. */
5638 for (i = 0; i < ncases; i++)
5639 if (labelvec[i] == 0)
5640 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5642 /* Output the table */
5643 emit_label (table_label);
5645 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5646 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5647 gen_rtx_LABEL_REF (Pmode, table_label),
5648 gen_rtvec_v (ncases, labelvec),
5649 const0_rtx, const0_rtx));
5650 else
5651 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5652 gen_rtvec_v (ncases, labelvec)));
5654 /* If the case insn drops through the table,
5655 after the table we must jump to the default-label.
5656 Otherwise record no drop-through after the table. */
5657 #ifdef CASE_DROPS_THROUGH
5658 emit_jump (default_label);
5659 #else
5660 emit_barrier ();
5661 #endif
5664 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5665 reorder_insns (before_case, get_last_insn (),
5666 thiscase->data.case_stmt.start);
5668 else
5669 end_cleanup_deferral ();
5671 if (thiscase->exit_label)
5672 emit_label (thiscase->exit_label);
5674 free_case_nodes (case_stack->data.case_stmt.case_list);
5675 POPSTACK (case_stack);
5677 free_temp_slots ();
5680 /* Convert the tree NODE into a list linked by the right field, with the left
5681 field zeroed. RIGHT is used for recursion; it is a list to be placed
5682 rightmost in the resulting list. */
5684 static struct case_node *
5685 case_tree2list (node, right)
5686 struct case_node *node, *right;
5688 struct case_node *left;
5690 if (node->right)
5691 right = case_tree2list (node->right, right);
5693 node->right = right;
5694 if ((left = node->left))
5696 node->left = 0;
5697 return case_tree2list (left, node);
5700 return node;
5703 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5705 static void
5706 do_jump_if_equal (op1, op2, label, unsignedp)
5707 rtx op1, op2, label;
5708 int unsignedp;
5710 if (GET_CODE (op1) == CONST_INT
5711 && GET_CODE (op2) == CONST_INT)
5713 if (INTVAL (op1) == INTVAL (op2))
5714 emit_jump (label);
5716 else
5718 enum machine_mode mode = GET_MODE (op1);
5719 if (mode == VOIDmode)
5720 mode = GET_MODE (op2);
5721 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5722 0, label);
5726 /* Not all case values are encountered equally. This function
5727 uses a heuristic to weight case labels, in cases where that
5728 looks like a reasonable thing to do.
5730 Right now, all we try to guess is text, and we establish the
5731 following weights:
5733 chars above space: 16
5734 digits: 16
5735 default: 12
5736 space, punct: 8
5737 tab: 4
5738 newline: 2
5739 other "\" chars: 1
5740 remaining chars: 0
5742 If we find any cases in the switch that are not either -1 or in the range
5743 of valid ASCII characters, or are control characters other than those
5744 commonly used with "\", don't treat this switch scanning text.
5746 Return 1 if these nodes are suitable for cost estimation, otherwise
5747 return 0. */
5749 static int
5750 estimate_case_costs (node)
5751 case_node_ptr node;
5753 tree min_ascii = build_int_2 (-1, -1);
5754 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5755 case_node_ptr n;
5756 int i;
5758 /* If we haven't already made the cost table, make it now. Note that the
5759 lower bound of the table is -1, not zero. */
5761 if (cost_table == NULL)
5763 cost_table = cost_table_ + 1;
5765 for (i = 0; i < 128; i++)
5767 if (ISALNUM (i))
5768 cost_table[i] = 16;
5769 else if (ISPUNCT (i))
5770 cost_table[i] = 8;
5771 else if (ISCNTRL (i))
5772 cost_table[i] = -1;
5775 cost_table[' '] = 8;
5776 cost_table['\t'] = 4;
5777 cost_table['\0'] = 4;
5778 cost_table['\n'] = 2;
5779 cost_table['\f'] = 1;
5780 cost_table['\v'] = 1;
5781 cost_table['\b'] = 1;
5784 /* See if all the case expressions look like text. It is text if the
5785 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5786 as signed arithmetic since we don't want to ever access cost_table with a
5787 value less than -1. Also check that none of the constants in a range
5788 are strange control characters. */
5790 for (n = node; n; n = n->right)
5792 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5793 return 0;
5795 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5796 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5797 if (cost_table[i] < 0)
5798 return 0;
5801 /* All interesting values are within the range of interesting
5802 ASCII characters. */
5803 return 1;
5806 /* Scan an ordered list of case nodes
5807 combining those with consecutive values or ranges.
5809 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5811 static void
5812 group_case_nodes (head)
5813 case_node_ptr head;
5815 case_node_ptr node = head;
5817 while (node)
5819 rtx lb = next_real_insn (label_rtx (node->code_label));
5820 rtx lb2;
5821 case_node_ptr np = node;
5823 /* Try to group the successors of NODE with NODE. */
5824 while (((np = np->right) != 0)
5825 /* Do they jump to the same place? */
5826 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5827 || (lb != 0 && lb2 != 0
5828 && simplejump_p (lb)
5829 && simplejump_p (lb2)
5830 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5831 SET_SRC (PATTERN (lb2)))))
5832 /* Are their ranges consecutive? */
5833 && tree_int_cst_equal (np->low,
5834 fold (build (PLUS_EXPR,
5835 TREE_TYPE (node->high),
5836 node->high,
5837 integer_one_node)))
5838 /* An overflow is not consecutive. */
5839 && tree_int_cst_lt (node->high,
5840 fold (build (PLUS_EXPR,
5841 TREE_TYPE (node->high),
5842 node->high,
5843 integer_one_node))))
5845 node->high = np->high;
5847 /* NP is the first node after NODE which can't be grouped with it.
5848 Delete the nodes in between, and move on to that node. */
5849 node->right = np;
5850 node = np;
5854 /* Take an ordered list of case nodes
5855 and transform them into a near optimal binary tree,
5856 on the assumption that any target code selection value is as
5857 likely as any other.
5859 The transformation is performed by splitting the ordered
5860 list into two equal sections plus a pivot. The parts are
5861 then attached to the pivot as left and right branches. Each
5862 branch is then transformed recursively. */
5864 static void
5865 balance_case_nodes (head, parent)
5866 case_node_ptr *head;
5867 case_node_ptr parent;
5869 register case_node_ptr np;
5871 np = *head;
5872 if (np)
5874 int cost = 0;
5875 int i = 0;
5876 int ranges = 0;
5877 register case_node_ptr *npp;
5878 case_node_ptr left;
5880 /* Count the number of entries on branch. Also count the ranges. */
5882 while (np)
5884 if (!tree_int_cst_equal (np->low, np->high))
5886 ranges++;
5887 if (use_cost_table)
5888 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5891 if (use_cost_table)
5892 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5894 i++;
5895 np = np->right;
5898 if (i > 2)
5900 /* Split this list if it is long enough for that to help. */
5901 npp = head;
5902 left = *npp;
5903 if (use_cost_table)
5905 /* Find the place in the list that bisects the list's total cost,
5906 Here I gets half the total cost. */
5907 int n_moved = 0;
5908 i = (cost + 1) / 2;
5909 while (1)
5911 /* Skip nodes while their cost does not reach that amount. */
5912 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5913 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5914 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5915 if (i <= 0)
5916 break;
5917 npp = &(*npp)->right;
5918 n_moved += 1;
5920 if (n_moved == 0)
5922 /* Leave this branch lopsided, but optimize left-hand
5923 side and fill in `parent' fields for right-hand side. */
5924 np = *head;
5925 np->parent = parent;
5926 balance_case_nodes (&np->left, np);
5927 for (; np->right; np = np->right)
5928 np->right->parent = np;
5929 return;
5932 /* If there are just three nodes, split at the middle one. */
5933 else if (i == 3)
5934 npp = &(*npp)->right;
5935 else
5937 /* Find the place in the list that bisects the list's total cost,
5938 where ranges count as 2.
5939 Here I gets half the total cost. */
5940 i = (i + ranges + 1) / 2;
5941 while (1)
5943 /* Skip nodes while their cost does not reach that amount. */
5944 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5945 i--;
5946 i--;
5947 if (i <= 0)
5948 break;
5949 npp = &(*npp)->right;
5952 *head = np = *npp;
5953 *npp = 0;
5954 np->parent = parent;
5955 np->left = left;
5957 /* Optimize each of the two split parts. */
5958 balance_case_nodes (&np->left, np);
5959 balance_case_nodes (&np->right, np);
5961 else
5963 /* Else leave this branch as one level,
5964 but fill in `parent' fields. */
5965 np = *head;
5966 np->parent = parent;
5967 for (; np->right; np = np->right)
5968 np->right->parent = np;
5973 /* Search the parent sections of the case node tree
5974 to see if a test for the lower bound of NODE would be redundant.
5975 INDEX_TYPE is the type of the index expression.
5977 The instructions to generate the case decision tree are
5978 output in the same order as nodes are processed so it is
5979 known that if a parent node checks the range of the current
5980 node minus one that the current node is bounded at its lower
5981 span. Thus the test would be redundant. */
5983 static int
5984 node_has_low_bound (node, index_type)
5985 case_node_ptr node;
5986 tree index_type;
5988 tree low_minus_one;
5989 case_node_ptr pnode;
5991 /* If the lower bound of this node is the lowest value in the index type,
5992 we need not test it. */
5994 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5995 return 1;
5997 /* If this node has a left branch, the value at the left must be less
5998 than that at this node, so it cannot be bounded at the bottom and
5999 we need not bother testing any further. */
6001 if (node->left)
6002 return 0;
6004 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
6005 node->low, integer_one_node));
6007 /* If the subtraction above overflowed, we can't verify anything.
6008 Otherwise, look for a parent that tests our value - 1. */
6010 if (! tree_int_cst_lt (low_minus_one, node->low))
6011 return 0;
6013 for (pnode = node->parent; pnode; pnode = pnode->parent)
6014 if (tree_int_cst_equal (low_minus_one, pnode->high))
6015 return 1;
6017 return 0;
6020 /* Search the parent sections of the case node tree
6021 to see if a test for the upper bound of NODE would be redundant.
6022 INDEX_TYPE is the type of the index expression.
6024 The instructions to generate the case decision tree are
6025 output in the same order as nodes are processed so it is
6026 known that if a parent node checks the range of the current
6027 node plus one that the current node is bounded at its upper
6028 span. Thus the test would be redundant. */
6030 static int
6031 node_has_high_bound (node, index_type)
6032 case_node_ptr node;
6033 tree index_type;
6035 tree high_plus_one;
6036 case_node_ptr pnode;
6038 /* If there is no upper bound, obviously no test is needed. */
6040 if (TYPE_MAX_VALUE (index_type) == NULL)
6041 return 1;
6043 /* If the upper bound of this node is the highest value in the type
6044 of the index expression, we need not test against it. */
6046 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
6047 return 1;
6049 /* If this node has a right branch, the value at the right must be greater
6050 than that at this node, so it cannot be bounded at the top and
6051 we need not bother testing any further. */
6053 if (node->right)
6054 return 0;
6056 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6057 node->high, integer_one_node));
6059 /* If the addition above overflowed, we can't verify anything.
6060 Otherwise, look for a parent that tests our value + 1. */
6062 if (! tree_int_cst_lt (node->high, high_plus_one))
6063 return 0;
6065 for (pnode = node->parent; pnode; pnode = pnode->parent)
6066 if (tree_int_cst_equal (high_plus_one, pnode->low))
6067 return 1;
6069 return 0;
6072 /* Search the parent sections of the
6073 case node tree to see if both tests for the upper and lower
6074 bounds of NODE would be redundant. */
6076 static int
6077 node_is_bounded (node, index_type)
6078 case_node_ptr node;
6079 tree index_type;
6081 return (node_has_low_bound (node, index_type)
6082 && node_has_high_bound (node, index_type));
6085 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6087 static void
6088 emit_jump_if_reachable (label)
6089 rtx label;
6091 if (GET_CODE (get_last_insn ()) != BARRIER)
6092 emit_jump (label);
6095 /* Emit step-by-step code to select a case for the value of INDEX.
6096 The thus generated decision tree follows the form of the
6097 case-node binary tree NODE, whose nodes represent test conditions.
6098 INDEX_TYPE is the type of the index of the switch.
6100 Care is taken to prune redundant tests from the decision tree
6101 by detecting any boundary conditions already checked by
6102 emitted rtx. (See node_has_high_bound, node_has_low_bound
6103 and node_is_bounded, above.)
6105 Where the test conditions can be shown to be redundant we emit
6106 an unconditional jump to the target code. As a further
6107 optimization, the subordinates of a tree node are examined to
6108 check for bounded nodes. In this case conditional and/or
6109 unconditional jumps as a result of the boundary check for the
6110 current node are arranged to target the subordinates associated
6111 code for out of bound conditions on the current node.
6113 We can assume that when control reaches the code generated here,
6114 the index value has already been compared with the parents
6115 of this node, and determined to be on the same side of each parent
6116 as this node is. Thus, if this node tests for the value 51,
6117 and a parent tested for 52, we don't need to consider
6118 the possibility of a value greater than 51. If another parent
6119 tests for the value 50, then this node need not test anything. */
6121 static void
6122 emit_case_nodes (index, node, default_label, index_type)
6123 rtx index;
6124 case_node_ptr node;
6125 rtx default_label;
6126 tree index_type;
6128 /* If INDEX has an unsigned type, we must make unsigned branches. */
6129 int unsignedp = TREE_UNSIGNED (index_type);
6130 enum machine_mode mode = GET_MODE (index);
6132 /* See if our parents have already tested everything for us.
6133 If they have, emit an unconditional jump for this node. */
6134 if (node_is_bounded (node, index_type))
6135 emit_jump (label_rtx (node->code_label));
6137 else if (tree_int_cst_equal (node->low, node->high))
6139 /* Node is single valued. First see if the index expression matches
6140 this node and then check our children, if any. */
6142 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6143 label_rtx (node->code_label), unsignedp);
6145 if (node->right != 0 && node->left != 0)
6147 /* This node has children on both sides.
6148 Dispatch to one side or the other
6149 by comparing the index value with this node's value.
6150 If one subtree is bounded, check that one first,
6151 so we can avoid real branches in the tree. */
6153 if (node_is_bounded (node->right, index_type))
6155 emit_cmp_and_jump_insns (index,
6156 expand_expr (node->high, NULL_RTX,
6157 VOIDmode, 0),
6158 GT, NULL_RTX, mode, unsignedp, 0,
6159 label_rtx (node->right->code_label));
6160 emit_case_nodes (index, node->left, default_label, index_type);
6163 else if (node_is_bounded (node->left, index_type))
6165 emit_cmp_and_jump_insns (index,
6166 expand_expr (node->high, NULL_RTX,
6167 VOIDmode, 0),
6168 LT, NULL_RTX, mode, unsignedp, 0,
6169 label_rtx (node->left->code_label));
6170 emit_case_nodes (index, node->right, default_label, index_type);
6173 else
6175 /* Neither node is bounded. First distinguish the two sides;
6176 then emit the code for one side at a time. */
6178 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6180 /* See if the value is on the right. */
6181 emit_cmp_and_jump_insns (index,
6182 expand_expr (node->high, NULL_RTX,
6183 VOIDmode, 0),
6184 GT, NULL_RTX, mode, unsignedp, 0,
6185 label_rtx (test_label));
6187 /* Value must be on the left.
6188 Handle the left-hand subtree. */
6189 emit_case_nodes (index, node->left, default_label, index_type);
6190 /* If left-hand subtree does nothing,
6191 go to default. */
6192 emit_jump_if_reachable (default_label);
6194 /* Code branches here for the right-hand subtree. */
6195 expand_label (test_label);
6196 emit_case_nodes (index, node->right, default_label, index_type);
6200 else if (node->right != 0 && node->left == 0)
6202 /* Here we have a right child but no left so we issue conditional
6203 branch to default and process the right child.
6205 Omit the conditional branch to default if we it avoid only one
6206 right child; it costs too much space to save so little time. */
6208 if (node->right->right || node->right->left
6209 || !tree_int_cst_equal (node->right->low, node->right->high))
6211 if (!node_has_low_bound (node, index_type))
6213 emit_cmp_and_jump_insns (index,
6214 expand_expr (node->high, NULL_RTX,
6215 VOIDmode, 0),
6216 LT, NULL_RTX, mode, unsignedp, 0,
6217 default_label);
6220 emit_case_nodes (index, node->right, default_label, index_type);
6222 else
6223 /* We cannot process node->right normally
6224 since we haven't ruled out the numbers less than
6225 this node's value. So handle node->right explicitly. */
6226 do_jump_if_equal (index,
6227 expand_expr (node->right->low, NULL_RTX,
6228 VOIDmode, 0),
6229 label_rtx (node->right->code_label), unsignedp);
6232 else if (node->right == 0 && node->left != 0)
6234 /* Just one subtree, on the left. */
6236 #if 0 /* The following code and comment were formerly part
6237 of the condition here, but they didn't work
6238 and I don't understand what the idea was. -- rms. */
6239 /* If our "most probable entry" is less probable
6240 than the default label, emit a jump to
6241 the default label using condition codes
6242 already lying around. With no right branch,
6243 a branch-greater-than will get us to the default
6244 label correctly. */
6245 if (use_cost_table
6246 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
6248 #endif /* 0 */
6249 if (node->left->left || node->left->right
6250 || !tree_int_cst_equal (node->left->low, node->left->high))
6252 if (!node_has_high_bound (node, index_type))
6254 emit_cmp_and_jump_insns (index, expand_expr (node->high,
6255 NULL_RTX,
6256 VOIDmode, 0),
6257 GT, NULL_RTX, mode, unsignedp, 0,
6258 default_label);
6261 emit_case_nodes (index, node->left, default_label, index_type);
6263 else
6264 /* We cannot process node->left normally
6265 since we haven't ruled out the numbers less than
6266 this node's value. So handle node->left explicitly. */
6267 do_jump_if_equal (index,
6268 expand_expr (node->left->low, NULL_RTX,
6269 VOIDmode, 0),
6270 label_rtx (node->left->code_label), unsignedp);
6273 else
6275 /* Node is a range. These cases are very similar to those for a single
6276 value, except that we do not start by testing whether this node
6277 is the one to branch to. */
6279 if (node->right != 0 && node->left != 0)
6281 /* Node has subtrees on both sides.
6282 If the right-hand subtree is bounded,
6283 test for it first, since we can go straight there.
6284 Otherwise, we need to make a branch in the control structure,
6285 then handle the two subtrees. */
6286 tree test_label = 0;
6288 if (node_is_bounded (node->right, index_type))
6289 /* Right hand node is fully bounded so we can eliminate any
6290 testing and branch directly to the target code. */
6291 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6292 VOIDmode, 0),
6293 GT, NULL_RTX, mode, unsignedp, 0,
6294 label_rtx (node->right->code_label));
6295 else
6297 /* Right hand node requires testing.
6298 Branch to a label where we will handle it later. */
6300 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6301 emit_cmp_and_jump_insns (index,
6302 expand_expr (node->high, NULL_RTX,
6303 VOIDmode, 0),
6304 GT, NULL_RTX, mode, unsignedp, 0,
6305 label_rtx (test_label));
6308 /* Value belongs to this node or to the left-hand subtree. */
6310 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6311 VOIDmode, 0),
6312 GE, NULL_RTX, mode, unsignedp, 0,
6313 label_rtx (node->code_label));
6315 /* Handle the left-hand subtree. */
6316 emit_case_nodes (index, node->left, default_label, index_type);
6318 /* If right node had to be handled later, do that now. */
6320 if (test_label)
6322 /* If the left-hand subtree fell through,
6323 don't let it fall into the right-hand subtree. */
6324 emit_jump_if_reachable (default_label);
6326 expand_label (test_label);
6327 emit_case_nodes (index, node->right, default_label, index_type);
6331 else if (node->right != 0 && node->left == 0)
6333 /* Deal with values to the left of this node,
6334 if they are possible. */
6335 if (!node_has_low_bound (node, index_type))
6337 emit_cmp_and_jump_insns (index,
6338 expand_expr (node->low, NULL_RTX,
6339 VOIDmode, 0),
6340 LT, NULL_RTX, mode, unsignedp, 0,
6341 default_label);
6344 /* Value belongs to this node or to the right-hand subtree. */
6346 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6347 VOIDmode, 0),
6348 LE, NULL_RTX, mode, unsignedp, 0,
6349 label_rtx (node->code_label));
6351 emit_case_nodes (index, node->right, default_label, index_type);
6354 else if (node->right == 0 && node->left != 0)
6356 /* Deal with values to the right of this node,
6357 if they are possible. */
6358 if (!node_has_high_bound (node, index_type))
6360 emit_cmp_and_jump_insns (index,
6361 expand_expr (node->high, NULL_RTX,
6362 VOIDmode, 0),
6363 GT, NULL_RTX, mode, unsignedp, 0,
6364 default_label);
6367 /* Value belongs to this node or to the left-hand subtree. */
6369 emit_cmp_and_jump_insns (index,
6370 expand_expr (node->low, NULL_RTX,
6371 VOIDmode, 0),
6372 GE, NULL_RTX, mode, unsignedp, 0,
6373 label_rtx (node->code_label));
6375 emit_case_nodes (index, node->left, default_label, index_type);
6378 else
6380 /* Node has no children so we check low and high bounds to remove
6381 redundant tests. Only one of the bounds can exist,
6382 since otherwise this node is bounded--a case tested already. */
6384 if (!node_has_high_bound (node, index_type))
6386 emit_cmp_and_jump_insns (index,
6387 expand_expr (node->high, NULL_RTX,
6388 VOIDmode, 0),
6389 GT, NULL_RTX, mode, unsignedp, 0,
6390 default_label);
6393 if (!node_has_low_bound (node, index_type))
6395 emit_cmp_and_jump_insns (index,
6396 expand_expr (node->low, NULL_RTX,
6397 VOIDmode, 0),
6398 LT, NULL_RTX, mode, unsignedp, 0,
6399 default_label);
6402 emit_jump (label_rtx (node->code_label));