2001-01-23 Alexandre Petit-Bianco <apbianco@cygnus.com>
[official-gcc.git] / gcc / stmt.c
blob2399b6f83bf75a8c8a84d4facd4e9e6021246419
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "tm_p.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "insn-flags.h"
46 #include "insn-config.h"
47 #include "insn-codes.h"
48 #include "expr.h"
49 #include "hard-reg-set.h"
50 #include "obstack.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
58 #define obstack_chunk_alloc xmalloc
59 #define obstack_chunk_free free
60 struct obstack stmt_obstack;
62 /* Assume that case vectors are not pc-relative. */
63 #ifndef CASE_VECTOR_PC_RELATIVE
64 #define CASE_VECTOR_PC_RELATIVE 0
65 #endif
67 /* Functions and data structures for expanding case statements. */
69 /* Case label structure, used to hold info on labels within case
70 statements. We handle "range" labels; for a single-value label
71 as in C, the high and low limits are the same.
73 An AVL tree of case nodes is initially created, and later transformed
74 to a list linked via the RIGHT fields in the nodes. Nodes with
75 higher case values are later in the list.
77 Switch statements can be output in one of two forms. A branch table
78 is used if there are more than a few labels and the labels are dense
79 within the range between the smallest and largest case value. If a
80 branch table is used, no further manipulations are done with the case
81 node chain.
83 The alternative to the use of a branch table is to generate a series
84 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
85 and PARENT fields to hold a binary tree. Initially the tree is
86 totally unbalanced, with everything on the right. We balance the tree
87 with nodes on the left having lower case values than the parent
88 and nodes on the right having higher values. We then output the tree
89 in order. */
91 struct case_node
93 struct case_node *left; /* Left son in binary tree */
94 struct case_node *right; /* Right son in binary tree; also node chain */
95 struct case_node *parent; /* Parent of node in binary tree */
96 tree low; /* Lowest index value for this label */
97 tree high; /* Highest index value for this label */
98 tree code_label; /* Label to jump to when node matches */
99 int balance;
102 typedef struct case_node case_node;
103 typedef struct case_node *case_node_ptr;
105 /* These are used by estimate_case_costs and balance_case_nodes. */
107 /* This must be a signed type, and non-ANSI compilers lack signed char. */
108 static short cost_table_[129];
109 static short *cost_table;
110 static int use_cost_table;
112 /* Stack of control and binding constructs we are currently inside.
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
126 Each type of construct has its own individual stack.
127 For example, loops have `loop_stack'. Each object points to the
128 next object of the same type through the `next' field.
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
137 struct nesting
139 struct nesting *all;
140 struct nesting *next;
141 int depth;
142 rtx exit_label;
143 union
145 /* For conds (if-then and if-then-else statements). */
146 struct
148 /* Label for the end of the if construct.
149 There is none if EXITFLAG was not set
150 and no `else' has been seen yet. */
151 rtx endif_label;
152 /* Label for the end of this alternative.
153 This may be the end of the if or the next else/elseif. */
154 rtx next_label;
155 } cond;
156 /* For loops. */
157 struct
159 /* Label at the top of the loop; place to loop back to. */
160 rtx start_label;
161 /* Label at the end of the whole construct. */
162 rtx end_label;
163 /* Label before a jump that branches to the end of the whole
164 construct. This is where destructors go if any. */
165 rtx alt_end_label;
166 /* Label for `continue' statement to jump to;
167 this is in front of the stepper of the loop. */
168 rtx continue_label;
169 } loop;
170 /* For variable binding contours. */
171 struct
173 /* Sequence number of this binding contour within the function,
174 in order of entry. */
175 int block_start_count;
176 /* Nonzero => value to restore stack to on exit. */
177 rtx stack_level;
178 /* The NOTE that starts this contour.
179 Used by expand_goto to check whether the destination
180 is within each contour or not. */
181 rtx first_insn;
182 /* Innermost containing binding contour that has a stack level. */
183 struct nesting *innermost_stack_block;
184 /* List of cleanups to be run on exit from this contour.
185 This is a list of expressions to be evaluated.
186 The TREE_PURPOSE of each link is the ..._DECL node
187 which the cleanup pertains to. */
188 tree cleanups;
189 /* List of cleanup-lists of blocks containing this block,
190 as they were at the locus where this block appears.
191 There is an element for each containing block,
192 ordered innermost containing block first.
193 The tail of this list can be 0,
194 if all remaining elements would be empty lists.
195 The element's TREE_VALUE is the cleanup-list of that block,
196 which may be null. */
197 tree outer_cleanups;
198 /* Chain of labels defined inside this binding contour.
199 For contours that have stack levels or cleanups. */
200 struct label_chain *label_chain;
201 /* Number of function calls seen, as of start of this block. */
202 int n_function_calls;
203 /* Nonzero if this is associated with a EH region. */
204 int exception_region;
205 /* The saved target_temp_slot_level from our outer block.
206 We may reset target_temp_slot_level to be the level of
207 this block, if that is done, target_temp_slot_level
208 reverts to the saved target_temp_slot_level at the very
209 end of the block. */
210 int block_target_temp_slot_level;
211 /* True if we are currently emitting insns in an area of
212 output code that is controlled by a conditional
213 expression. This is used by the cleanup handling code to
214 generate conditional cleanup actions. */
215 int conditional_code;
216 /* A place to move the start of the exception region for any
217 of the conditional cleanups, must be at the end or after
218 the start of the last unconditional cleanup, and before any
219 conditional branch points. */
220 rtx last_unconditional_cleanup;
221 /* When in a conditional context, this is the specific
222 cleanup list associated with last_unconditional_cleanup,
223 where we place the conditionalized cleanups. */
224 tree *cleanup_ptr;
225 } block;
226 /* For switch (C) or case (Pascal) statements,
227 and also for dummies (see `expand_start_case_dummy'). */
228 struct
230 /* The insn after which the case dispatch should finally
231 be emitted. Zero for a dummy. */
232 rtx start;
233 /* A list of case labels; it is first built as an AVL tree.
234 During expand_end_case, this is converted to a list, and may be
235 rearranged into a nearly balanced binary tree. */
236 struct case_node *case_list;
237 /* Label to jump to if no case matches. */
238 tree default_label;
239 /* The expression to be dispatched on. */
240 tree index_expr;
241 /* Type that INDEX_EXPR should be converted to. */
242 tree nominal_type;
243 /* Name of this kind of statement, for warnings. */
244 const char *printname;
245 /* Used to save no_line_numbers till we see the first case label.
246 We set this to -1 when we see the first case label in this
247 case statement. */
248 int line_number_status;
249 } case_stmt;
250 } data;
253 /* Allocate and return a new `struct nesting'. */
255 #define ALLOC_NESTING() \
256 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
258 /* Pop the nesting stack element by element until we pop off
259 the element which is at the top of STACK.
260 Update all the other stacks, popping off elements from them
261 as we pop them from nesting_stack. */
263 #define POPSTACK(STACK) \
264 do { struct nesting *target = STACK; \
265 struct nesting *this; \
266 do { this = nesting_stack; \
267 if (loop_stack == this) \
268 loop_stack = loop_stack->next; \
269 if (cond_stack == this) \
270 cond_stack = cond_stack->next; \
271 if (block_stack == this) \
272 block_stack = block_stack->next; \
273 if (stack_block_stack == this) \
274 stack_block_stack = stack_block_stack->next; \
275 if (case_stack == this) \
276 case_stack = case_stack->next; \
277 nesting_depth = nesting_stack->depth - 1; \
278 nesting_stack = this->all; \
279 obstack_free (&stmt_obstack, this); } \
280 while (this != target); } while (0)
282 /* In some cases it is impossible to generate code for a forward goto
283 until the label definition is seen. This happens when it may be necessary
284 for the goto to reset the stack pointer: we don't yet know how to do that.
285 So expand_goto puts an entry on this fixup list.
286 Each time a binding contour that resets the stack is exited,
287 we check each fixup.
288 If the target label has now been defined, we can insert the proper code. */
290 struct goto_fixup
292 /* Points to following fixup. */
293 struct goto_fixup *next;
294 /* Points to the insn before the jump insn.
295 If more code must be inserted, it goes after this insn. */
296 rtx before_jump;
297 /* The LABEL_DECL that this jump is jumping to, or 0
298 for break, continue or return. */
299 tree target;
300 /* The BLOCK for the place where this goto was found. */
301 tree context;
302 /* The CODE_LABEL rtx that this is jumping to. */
303 rtx target_rtl;
304 /* Number of binding contours started in current function
305 before the label reference. */
306 int block_start_count;
307 /* The outermost stack level that should be restored for this jump.
308 Each time a binding contour that resets the stack is exited,
309 if the target label is *not* yet defined, this slot is updated. */
310 rtx stack_level;
311 /* List of lists of cleanup expressions to be run by this goto.
312 There is one element for each block that this goto is within.
313 The tail of this list can be 0,
314 if all remaining elements would be empty.
315 The TREE_VALUE contains the cleanup list of that block as of the
316 time this goto was seen.
317 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
318 tree cleanup_list_list;
321 /* Within any binding contour that must restore a stack level,
322 all labels are recorded with a chain of these structures. */
324 struct label_chain
326 /* Points to following fixup. */
327 struct label_chain *next;
328 tree label;
331 struct stmt_status
333 /* Chain of all pending binding contours. */
334 struct nesting *x_block_stack;
336 /* If any new stacks are added here, add them to POPSTACKS too. */
338 /* Chain of all pending binding contours that restore stack levels
339 or have cleanups. */
340 struct nesting *x_stack_block_stack;
342 /* Chain of all pending conditional statements. */
343 struct nesting *x_cond_stack;
345 /* Chain of all pending loops. */
346 struct nesting *x_loop_stack;
348 /* Chain of all pending case or switch statements. */
349 struct nesting *x_case_stack;
351 /* Separate chain including all of the above,
352 chained through the `all' field. */
353 struct nesting *x_nesting_stack;
355 /* Number of entries on nesting_stack now. */
356 int x_nesting_depth;
358 /* Number of binding contours started so far in this function. */
359 int x_block_start_count;
361 /* Each time we expand an expression-statement,
362 record the expr's type and its RTL value here. */
363 tree x_last_expr_type;
364 rtx x_last_expr_value;
366 /* Nonzero if within a ({...}) grouping, in which case we must
367 always compute a value for each expr-stmt in case it is the last one. */
368 int x_expr_stmts_for_value;
370 /* Filename and line number of last line-number note,
371 whether we actually emitted it or not. */
372 const char *x_emit_filename;
373 int x_emit_lineno;
375 struct goto_fixup *x_goto_fixup_chain;
378 #define block_stack (cfun->stmt->x_block_stack)
379 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
380 #define cond_stack (cfun->stmt->x_cond_stack)
381 #define loop_stack (cfun->stmt->x_loop_stack)
382 #define case_stack (cfun->stmt->x_case_stack)
383 #define nesting_stack (cfun->stmt->x_nesting_stack)
384 #define nesting_depth (cfun->stmt->x_nesting_depth)
385 #define current_block_start_count (cfun->stmt->x_block_start_count)
386 #define last_expr_type (cfun->stmt->x_last_expr_type)
387 #define last_expr_value (cfun->stmt->x_last_expr_value)
388 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
389 #define emit_filename (cfun->stmt->x_emit_filename)
390 #define emit_lineno (cfun->stmt->x_emit_lineno)
391 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
393 /* Non-zero if we are using EH to handle cleanus. */
394 static int using_eh_for_cleanups_p = 0;
396 static int n_occurrences PARAMS ((int, const char *));
397 static void expand_goto_internal PARAMS ((tree, rtx, rtx));
398 static int expand_fixup PARAMS ((tree, rtx, rtx));
399 static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
400 static void expand_nl_goto_receiver PARAMS ((void));
401 static void expand_nl_goto_receivers PARAMS ((struct nesting *));
402 static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
403 rtx, int));
404 static void expand_null_return_1 PARAMS ((rtx, int));
405 static void expand_value_return PARAMS ((rtx));
406 static int tail_recursion_args PARAMS ((tree, tree));
407 static void expand_cleanups PARAMS ((tree, tree, int, int));
408 static void check_seenlabel PARAMS ((void));
409 static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
410 static int estimate_case_costs PARAMS ((case_node_ptr));
411 static void group_case_nodes PARAMS ((case_node_ptr));
412 static void balance_case_nodes PARAMS ((case_node_ptr *,
413 case_node_ptr));
414 static int node_has_low_bound PARAMS ((case_node_ptr, tree));
415 static int node_has_high_bound PARAMS ((case_node_ptr, tree));
416 static int node_is_bounded PARAMS ((case_node_ptr, tree));
417 static void emit_jump_if_reachable PARAMS ((rtx));
418 static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
419 static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
420 static void mark_cond_nesting PARAMS ((struct nesting *));
421 static void mark_loop_nesting PARAMS ((struct nesting *));
422 static void mark_block_nesting PARAMS ((struct nesting *));
423 static void mark_case_nesting PARAMS ((struct nesting *));
424 static void mark_case_node PARAMS ((struct case_node *));
425 static void mark_goto_fixup PARAMS ((struct goto_fixup *));
426 static void free_case_nodes PARAMS ((case_node_ptr));
428 void
429 using_eh_for_cleanups ()
431 using_eh_for_cleanups_p = 1;
434 /* Mark N (known to be a cond-nesting) for GC. */
436 static void
437 mark_cond_nesting (n)
438 struct nesting *n;
440 while (n)
442 ggc_mark_rtx (n->exit_label);
443 ggc_mark_rtx (n->data.cond.endif_label);
444 ggc_mark_rtx (n->data.cond.next_label);
446 n = n->next;
450 /* Mark N (known to be a loop-nesting) for GC. */
452 static void
453 mark_loop_nesting (n)
454 struct nesting *n;
457 while (n)
459 ggc_mark_rtx (n->exit_label);
460 ggc_mark_rtx (n->data.loop.start_label);
461 ggc_mark_rtx (n->data.loop.end_label);
462 ggc_mark_rtx (n->data.loop.alt_end_label);
463 ggc_mark_rtx (n->data.loop.continue_label);
465 n = n->next;
469 /* Mark N (known to be a block-nesting) for GC. */
471 static void
472 mark_block_nesting (n)
473 struct nesting *n;
475 while (n)
477 struct label_chain *l;
479 ggc_mark_rtx (n->exit_label);
480 ggc_mark_rtx (n->data.block.stack_level);
481 ggc_mark_rtx (n->data.block.first_insn);
482 ggc_mark_tree (n->data.block.cleanups);
483 ggc_mark_tree (n->data.block.outer_cleanups);
485 for (l = n->data.block.label_chain; l != NULL; l = l->next)
487 ggc_mark (l);
488 ggc_mark_tree (l->label);
491 ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
493 /* ??? cleanup_ptr never points outside the stack, does it? */
495 n = n->next;
499 /* Mark N (known to be a case-nesting) for GC. */
501 static void
502 mark_case_nesting (n)
503 struct nesting *n;
505 while (n)
507 ggc_mark_rtx (n->exit_label);
508 ggc_mark_rtx (n->data.case_stmt.start);
510 ggc_mark_tree (n->data.case_stmt.default_label);
511 ggc_mark_tree (n->data.case_stmt.index_expr);
512 ggc_mark_tree (n->data.case_stmt.nominal_type);
514 mark_case_node (n->data.case_stmt.case_list);
515 n = n->next;
519 /* Mark C for GC. */
521 static void
522 mark_case_node (c)
523 struct case_node *c;
525 if (c != 0)
527 ggc_mark_tree (c->low);
528 ggc_mark_tree (c->high);
529 ggc_mark_tree (c->code_label);
531 mark_case_node (c->right);
532 mark_case_node (c->left);
536 /* Mark G for GC. */
538 static void
539 mark_goto_fixup (g)
540 struct goto_fixup *g;
542 while (g)
544 ggc_mark (g);
545 ggc_mark_rtx (g->before_jump);
546 ggc_mark_tree (g->target);
547 ggc_mark_tree (g->context);
548 ggc_mark_rtx (g->target_rtl);
549 ggc_mark_rtx (g->stack_level);
550 ggc_mark_tree (g->cleanup_list_list);
552 g = g->next;
556 /* Clear out all parts of the state in F that can safely be discarded
557 after the function has been compiled, to let garbage collection
558 reclaim the memory. */
560 void
561 free_stmt_status (f)
562 struct function *f;
564 /* We're about to free the function obstack. If we hold pointers to
565 things allocated there, then we'll try to mark them when we do
566 GC. So, we clear them out here explicitly. */
567 if (f->stmt)
568 free (f->stmt);
569 f->stmt = NULL;
572 /* Mark P for GC. */
574 void
575 mark_stmt_status (p)
576 struct stmt_status *p;
578 if (p == 0)
579 return;
581 mark_block_nesting (p->x_block_stack);
582 mark_cond_nesting (p->x_cond_stack);
583 mark_loop_nesting (p->x_loop_stack);
584 mark_case_nesting (p->x_case_stack);
586 ggc_mark_tree (p->x_last_expr_type);
587 /* last_epxr_value is only valid if last_expr_type is nonzero. */
588 if (p->x_last_expr_type)
589 ggc_mark_rtx (p->x_last_expr_value);
591 mark_goto_fixup (p->x_goto_fixup_chain);
594 void
595 init_stmt ()
597 gcc_obstack_init (&stmt_obstack);
600 void
601 init_stmt_for_function ()
603 cfun->stmt = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
605 /* We are not currently within any block, conditional, loop or case. */
606 block_stack = 0;
607 stack_block_stack = 0;
608 loop_stack = 0;
609 case_stack = 0;
610 cond_stack = 0;
611 nesting_stack = 0;
612 nesting_depth = 0;
614 current_block_start_count = 0;
616 /* No gotos have been expanded yet. */
617 goto_fixup_chain = 0;
619 /* We are not processing a ({...}) grouping. */
620 expr_stmts_for_value = 0;
621 last_expr_type = 0;
622 last_expr_value = NULL_RTX;
625 /* Return nonzero if anything is pushed on the loop, condition, or case
626 stack. */
628 in_control_zone_p ()
630 return cond_stack || loop_stack || case_stack;
633 /* Record the current file and line. Called from emit_line_note. */
634 void
635 set_file_and_line_for_stmt (file, line)
636 const char *file;
637 int line;
639 /* If we're outputting an inline function, and we add a line note,
640 there may be no CFUN->STMT information. So, there's no need to
641 update it. */
642 if (cfun->stmt)
644 emit_filename = file;
645 emit_lineno = line;
649 /* Emit a no-op instruction. */
651 void
652 emit_nop ()
654 rtx last_insn;
656 last_insn = get_last_insn ();
657 if (!optimize
658 && (GET_CODE (last_insn) == CODE_LABEL
659 || (GET_CODE (last_insn) == NOTE
660 && prev_real_insn (last_insn) == 0)))
661 emit_insn (gen_nop ());
664 /* Return the rtx-label that corresponds to a LABEL_DECL,
665 creating it if necessary. */
668 label_rtx (label)
669 tree label;
671 if (TREE_CODE (label) != LABEL_DECL)
672 abort ();
674 if (DECL_RTL (label))
675 return DECL_RTL (label);
677 return DECL_RTL (label) = gen_label_rtx ();
680 /* Add an unconditional jump to LABEL as the next sequential instruction. */
682 void
683 emit_jump (label)
684 rtx label;
686 do_pending_stack_adjust ();
687 emit_jump_insn (gen_jump (label));
688 emit_barrier ();
691 /* Emit code to jump to the address
692 specified by the pointer expression EXP. */
694 void
695 expand_computed_goto (exp)
696 tree exp;
698 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
700 #ifdef POINTERS_EXTEND_UNSIGNED
701 x = convert_memory_address (Pmode, x);
702 #endif
704 emit_queue ();
705 /* Be sure the function is executable. */
706 if (current_function_check_memory_usage)
707 emit_library_call (chkr_check_exec_libfunc, LCT_CONST_MAKE_BLOCK,
708 VOIDmode, 1, x, ptr_mode);
710 do_pending_stack_adjust ();
711 emit_indirect_jump (x);
713 current_function_has_computed_jump = 1;
716 /* Handle goto statements and the labels that they can go to. */
718 /* Specify the location in the RTL code of a label LABEL,
719 which is a LABEL_DECL tree node.
721 This is used for the kind of label that the user can jump to with a
722 goto statement, and for alternatives of a switch or case statement.
723 RTL labels generated for loops and conditionals don't go through here;
724 they are generated directly at the RTL level, by other functions below.
726 Note that this has nothing to do with defining label *names*.
727 Languages vary in how they do that and what that even means. */
729 void
730 expand_label (label)
731 tree label;
733 struct label_chain *p;
735 do_pending_stack_adjust ();
736 emit_label (label_rtx (label));
737 if (DECL_NAME (label))
738 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
740 if (stack_block_stack != 0)
742 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
743 p->next = stack_block_stack->data.block.label_chain;
744 stack_block_stack->data.block.label_chain = p;
745 p->label = label;
749 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
750 from nested functions. */
752 void
753 declare_nonlocal_label (label)
754 tree label;
756 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
758 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
759 LABEL_PRESERVE_P (label_rtx (label)) = 1;
760 if (nonlocal_goto_handler_slots == 0)
762 emit_stack_save (SAVE_NONLOCAL,
763 &nonlocal_goto_stack_level,
764 PREV_INSN (tail_recursion_reentry));
766 nonlocal_goto_handler_slots
767 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
770 /* Generate RTL code for a `goto' statement with target label LABEL.
771 LABEL should be a LABEL_DECL tree node that was or will later be
772 defined with `expand_label'. */
774 void
775 expand_goto (label)
776 tree label;
778 tree context;
780 /* Check for a nonlocal goto to a containing function. */
781 context = decl_function_context (label);
782 if (context != 0 && context != current_function_decl)
784 struct function *p = find_function_data (context);
785 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
786 rtx handler_slot, static_chain, save_area, insn;
787 tree link;
789 /* Find the corresponding handler slot for this label. */
790 handler_slot = p->x_nonlocal_goto_handler_slots;
791 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
792 link = TREE_CHAIN (link))
793 handler_slot = XEXP (handler_slot, 1);
794 handler_slot = XEXP (handler_slot, 0);
796 p->has_nonlocal_label = 1;
797 current_function_has_nonlocal_goto = 1;
798 LABEL_REF_NONLOCAL_P (label_ref) = 1;
800 /* Copy the rtl for the slots so that they won't be shared in
801 case the virtual stack vars register gets instantiated differently
802 in the parent than in the child. */
804 static_chain = copy_to_reg (lookup_static_chain (label));
806 /* Get addr of containing function's current nonlocal goto handler,
807 which will do any cleanups and then jump to the label. */
808 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
809 virtual_stack_vars_rtx,
810 static_chain));
812 /* Get addr of containing function's nonlocal save area. */
813 save_area = p->x_nonlocal_goto_stack_level;
814 if (save_area)
815 save_area = replace_rtx (copy_rtx (save_area),
816 virtual_stack_vars_rtx, static_chain);
818 #if HAVE_nonlocal_goto
819 if (HAVE_nonlocal_goto)
820 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
821 save_area, label_ref));
822 else
823 #endif
825 /* Restore frame pointer for containing function.
826 This sets the actual hard register used for the frame pointer
827 to the location of the function's incoming static chain info.
828 The non-local goto handler will then adjust it to contain the
829 proper value and reload the argument pointer, if needed. */
830 emit_move_insn (hard_frame_pointer_rtx, static_chain);
831 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
833 /* USE of hard_frame_pointer_rtx added for consistency;
834 not clear if really needed. */
835 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
836 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
837 emit_indirect_jump (handler_slot);
840 /* Search backwards to the jump insn and mark it as a
841 non-local goto. */
842 for (insn = get_last_insn ();
843 GET_CODE (insn) != JUMP_INSN;
844 insn = PREV_INSN (insn))
845 continue;
846 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
847 REG_NOTES (insn));
849 else
850 expand_goto_internal (label, label_rtx (label), NULL_RTX);
853 /* Generate RTL code for a `goto' statement with target label BODY.
854 LABEL should be a LABEL_REF.
855 LAST_INSN, if non-0, is the rtx we should consider as the last
856 insn emitted (for the purposes of cleaning up a return). */
858 static void
859 expand_goto_internal (body, label, last_insn)
860 tree body;
861 rtx label;
862 rtx last_insn;
864 struct nesting *block;
865 rtx stack_level = 0;
867 if (GET_CODE (label) != CODE_LABEL)
868 abort ();
870 /* If label has already been defined, we can tell now
871 whether and how we must alter the stack level. */
873 if (PREV_INSN (label) != 0)
875 /* Find the innermost pending block that contains the label.
876 (Check containment by comparing insn-uids.)
877 Then restore the outermost stack level within that block,
878 and do cleanups of all blocks contained in it. */
879 for (block = block_stack; block; block = block->next)
881 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
882 break;
883 if (block->data.block.stack_level != 0)
884 stack_level = block->data.block.stack_level;
885 /* Execute the cleanups for blocks we are exiting. */
886 if (block->data.block.cleanups != 0)
888 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
889 do_pending_stack_adjust ();
893 if (stack_level)
895 /* Ensure stack adjust isn't done by emit_jump, as this
896 would clobber the stack pointer. This one should be
897 deleted as dead by flow. */
898 clear_pending_stack_adjust ();
899 do_pending_stack_adjust ();
901 /* Don't do this adjust if it's to the end label and this function
902 is to return with a depressed stack pointer. */
903 if (label == return_label
904 && (((TREE_CODE (TREE_TYPE (current_function_decl))
905 == FUNCTION_TYPE)
906 && (TYPE_RETURNS_STACK_DEPRESSED
907 (TREE_TYPE (current_function_decl))))))
909 else
910 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
913 if (body != 0 && DECL_TOO_LATE (body))
914 error ("jump to `%s' invalidly jumps into binding contour",
915 IDENTIFIER_POINTER (DECL_NAME (body)));
917 /* Label not yet defined: may need to put this goto
918 on the fixup list. */
919 else if (! expand_fixup (body, label, last_insn))
921 /* No fixup needed. Record that the label is the target
922 of at least one goto that has no fixup. */
923 if (body != 0)
924 TREE_ADDRESSABLE (body) = 1;
927 emit_jump (label);
930 /* Generate if necessary a fixup for a goto
931 whose target label in tree structure (if any) is TREE_LABEL
932 and whose target in rtl is RTL_LABEL.
934 If LAST_INSN is nonzero, we pretend that the jump appears
935 after insn LAST_INSN instead of at the current point in the insn stream.
937 The fixup will be used later to insert insns just before the goto.
938 Those insns will restore the stack level as appropriate for the
939 target label, and will (in the case of C++) also invoke any object
940 destructors which have to be invoked when we exit the scopes which
941 are exited by the goto.
943 Value is nonzero if a fixup is made. */
945 static int
946 expand_fixup (tree_label, rtl_label, last_insn)
947 tree tree_label;
948 rtx rtl_label;
949 rtx last_insn;
951 struct nesting *block, *end_block;
953 /* See if we can recognize which block the label will be output in.
954 This is possible in some very common cases.
955 If we succeed, set END_BLOCK to that block.
956 Otherwise, set it to 0. */
958 if (cond_stack
959 && (rtl_label == cond_stack->data.cond.endif_label
960 || rtl_label == cond_stack->data.cond.next_label))
961 end_block = cond_stack;
962 /* If we are in a loop, recognize certain labels which
963 are likely targets. This reduces the number of fixups
964 we need to create. */
965 else if (loop_stack
966 && (rtl_label == loop_stack->data.loop.start_label
967 || rtl_label == loop_stack->data.loop.end_label
968 || rtl_label == loop_stack->data.loop.continue_label))
969 end_block = loop_stack;
970 else
971 end_block = 0;
973 /* Now set END_BLOCK to the binding level to which we will return. */
975 if (end_block)
977 struct nesting *next_block = end_block->all;
978 block = block_stack;
980 /* First see if the END_BLOCK is inside the innermost binding level.
981 If so, then no cleanups or stack levels are relevant. */
982 while (next_block && next_block != block)
983 next_block = next_block->all;
985 if (next_block)
986 return 0;
988 /* Otherwise, set END_BLOCK to the innermost binding level
989 which is outside the relevant control-structure nesting. */
990 next_block = block_stack->next;
991 for (block = block_stack; block != end_block; block = block->all)
992 if (block == next_block)
993 next_block = next_block->next;
994 end_block = next_block;
997 /* Does any containing block have a stack level or cleanups?
998 If not, no fixup is needed, and that is the normal case
999 (the only case, for standard C). */
1000 for (block = block_stack; block != end_block; block = block->next)
1001 if (block->data.block.stack_level != 0
1002 || block->data.block.cleanups != 0)
1003 break;
1005 if (block != end_block)
1007 /* Ok, a fixup is needed. Add a fixup to the list of such. */
1008 struct goto_fixup *fixup
1009 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
1010 /* In case an old stack level is restored, make sure that comes
1011 after any pending stack adjust. */
1012 /* ?? If the fixup isn't to come at the present position,
1013 doing the stack adjust here isn't useful. Doing it with our
1014 settings at that location isn't useful either. Let's hope
1015 someone does it! */
1016 if (last_insn == 0)
1017 do_pending_stack_adjust ();
1018 fixup->target = tree_label;
1019 fixup->target_rtl = rtl_label;
1021 /* Create a BLOCK node and a corresponding matched set of
1022 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
1023 this point. The notes will encapsulate any and all fixup
1024 code which we might later insert at this point in the insn
1025 stream. Also, the BLOCK node will be the parent (i.e. the
1026 `SUPERBLOCK') of any other BLOCK nodes which we might create
1027 later on when we are expanding the fixup code.
1029 Note that optimization passes (including expand_end_loop)
1030 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
1031 as a placeholder. */
1034 register rtx original_before_jump
1035 = last_insn ? last_insn : get_last_insn ();
1036 rtx start;
1037 rtx end;
1038 tree block;
1040 block = make_node (BLOCK);
1041 TREE_USED (block) = 1;
1043 if (!cfun->x_whole_function_mode_p)
1044 insert_block (block);
1045 else
1047 BLOCK_CHAIN (block)
1048 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1049 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
1050 = block;
1053 start_sequence ();
1054 start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1055 if (cfun->x_whole_function_mode_p)
1056 NOTE_BLOCK (start) = block;
1057 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
1058 end = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1059 if (cfun->x_whole_function_mode_p)
1060 NOTE_BLOCK (end) = block;
1061 fixup->context = block;
1062 end_sequence ();
1063 emit_insns_after (start, original_before_jump);
1066 fixup->block_start_count = current_block_start_count;
1067 fixup->stack_level = 0;
1068 fixup->cleanup_list_list
1069 = ((block->data.block.outer_cleanups
1070 || block->data.block.cleanups)
1071 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1072 block->data.block.outer_cleanups)
1073 : 0);
1074 fixup->next = goto_fixup_chain;
1075 goto_fixup_chain = fixup;
1078 return block != 0;
1081 /* Expand any needed fixups in the outputmost binding level of the
1082 function. FIRST_INSN is the first insn in the function. */
1084 void
1085 expand_fixups (first_insn)
1086 rtx first_insn;
1088 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1091 /* When exiting a binding contour, process all pending gotos requiring fixups.
1092 THISBLOCK is the structure that describes the block being exited.
1093 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1094 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1095 FIRST_INSN is the insn that began this contour.
1097 Gotos that jump out of this contour must restore the
1098 stack level and do the cleanups before actually jumping.
1100 DONT_JUMP_IN nonzero means report error there is a jump into this
1101 contour from before the beginning of the contour.
1102 This is also done if STACK_LEVEL is nonzero. */
1104 static void
1105 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1106 struct nesting *thisblock;
1107 rtx stack_level;
1108 tree cleanup_list;
1109 rtx first_insn;
1110 int dont_jump_in;
1112 register struct goto_fixup *f, *prev;
1114 /* F is the fixup we are considering; PREV is the previous one. */
1115 /* We run this loop in two passes so that cleanups of exited blocks
1116 are run first, and blocks that are exited are marked so
1117 afterwards. */
1119 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1121 /* Test for a fixup that is inactive because it is already handled. */
1122 if (f->before_jump == 0)
1124 /* Delete inactive fixup from the chain, if that is easy to do. */
1125 if (prev != 0)
1126 prev->next = f->next;
1128 /* Has this fixup's target label been defined?
1129 If so, we can finalize it. */
1130 else if (PREV_INSN (f->target_rtl) != 0)
1132 register rtx cleanup_insns;
1134 /* If this fixup jumped into this contour from before the beginning
1135 of this contour, report an error. This code used to use
1136 the first non-label insn after f->target_rtl, but that's
1137 wrong since such can be added, by things like put_var_into_stack
1138 and have INSN_UIDs that are out of the range of the block. */
1139 /* ??? Bug: this does not detect jumping in through intermediate
1140 blocks that have stack levels or cleanups.
1141 It detects only a problem with the innermost block
1142 around the label. */
1143 if (f->target != 0
1144 && (dont_jump_in || stack_level || cleanup_list)
1145 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
1146 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1147 && ! DECL_ERROR_ISSUED (f->target))
1149 error_with_decl (f->target,
1150 "label `%s' used before containing binding contour");
1151 /* Prevent multiple errors for one label. */
1152 DECL_ERROR_ISSUED (f->target) = 1;
1155 /* We will expand the cleanups into a sequence of their own and
1156 then later on we will attach this new sequence to the insn
1157 stream just ahead of the actual jump insn. */
1159 start_sequence ();
1161 /* Temporarily restore the lexical context where we will
1162 logically be inserting the fixup code. We do this for the
1163 sake of getting the debugging information right. */
1165 pushlevel (0);
1166 set_block (f->context);
1168 /* Expand the cleanups for blocks this jump exits. */
1169 if (f->cleanup_list_list)
1171 tree lists;
1172 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1173 /* Marked elements correspond to blocks that have been closed.
1174 Do their cleanups. */
1175 if (TREE_ADDRESSABLE (lists)
1176 && TREE_VALUE (lists) != 0)
1178 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1179 /* Pop any pushes done in the cleanups,
1180 in case function is about to return. */
1181 do_pending_stack_adjust ();
1185 /* Restore stack level for the biggest contour that this
1186 jump jumps out of. */
1187 if (f->stack_level
1188 && ! (f->target_rtl == return_label
1189 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1190 == FUNCTION_TYPE)
1191 && (TYPE_RETURNS_STACK_DEPRESSED
1192 (TREE_TYPE (current_function_decl))))))
1193 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1195 /* Finish up the sequence containing the insns which implement the
1196 necessary cleanups, and then attach that whole sequence to the
1197 insn stream just ahead of the actual jump insn. Attaching it
1198 at that point insures that any cleanups which are in fact
1199 implicit C++ object destructions (which must be executed upon
1200 leaving the block) appear (to the debugger) to be taking place
1201 in an area of the generated code where the object(s) being
1202 destructed are still "in scope". */
1204 cleanup_insns = get_insns ();
1205 poplevel (1, 0, 0);
1207 end_sequence ();
1208 emit_insns_after (cleanup_insns, f->before_jump);
1210 f->before_jump = 0;
1214 /* For any still-undefined labels, do the cleanups for this block now.
1215 We must do this now since items in the cleanup list may go out
1216 of scope when the block ends. */
1217 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1218 if (f->before_jump != 0
1219 && PREV_INSN (f->target_rtl) == 0
1220 /* Label has still not appeared. If we are exiting a block with
1221 a stack level to restore, that started before the fixup,
1222 mark this stack level as needing restoration
1223 when the fixup is later finalized. */
1224 && thisblock != 0
1225 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1226 means the label is undefined. That's erroneous, but possible. */
1227 && (thisblock->data.block.block_start_count
1228 <= f->block_start_count))
1230 tree lists = f->cleanup_list_list;
1231 rtx cleanup_insns;
1233 for (; lists; lists = TREE_CHAIN (lists))
1234 /* If the following elt. corresponds to our containing block
1235 then the elt. must be for this block. */
1236 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1238 start_sequence ();
1239 pushlevel (0);
1240 set_block (f->context);
1241 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1242 do_pending_stack_adjust ();
1243 cleanup_insns = get_insns ();
1244 poplevel (1, 0, 0);
1245 end_sequence ();
1246 if (cleanup_insns != 0)
1247 f->before_jump
1248 = emit_insns_after (cleanup_insns, f->before_jump);
1250 f->cleanup_list_list = TREE_CHAIN (lists);
1253 if (stack_level)
1254 f->stack_level = stack_level;
1258 /* Return the number of times character C occurs in string S. */
1259 static int
1260 n_occurrences (c, s)
1261 int c;
1262 const char *s;
1264 int n = 0;
1265 while (*s)
1266 n += (*s++ == c);
1267 return n;
1270 /* Generate RTL for an asm statement (explicit assembler code).
1271 BODY is a STRING_CST node containing the assembler code text,
1272 or an ADDR_EXPR containing a STRING_CST. */
1274 void
1275 expand_asm (body)
1276 tree body;
1278 if (current_function_check_memory_usage)
1280 error ("`asm' cannot be used in function where memory usage is checked");
1281 return;
1284 if (TREE_CODE (body) == ADDR_EXPR)
1285 body = TREE_OPERAND (body, 0);
1287 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1288 TREE_STRING_POINTER (body)));
1289 last_expr_type = 0;
1292 /* Generate RTL for an asm statement with arguments.
1293 STRING is the instruction template.
1294 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1295 Each output or input has an expression in the TREE_VALUE and
1296 a constraint-string in the TREE_PURPOSE.
1297 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1298 that is clobbered by this insn.
1300 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1301 Some elements of OUTPUTS may be replaced with trees representing temporary
1302 values. The caller should copy those temporary values to the originally
1303 specified lvalues.
1305 VOL nonzero means the insn is volatile; don't optimize it. */
1307 void
1308 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1309 tree string, outputs, inputs, clobbers;
1310 int vol;
1311 const char *filename;
1312 int line;
1314 rtvec argvec, constraints;
1315 rtx body;
1316 int ninputs = list_length (inputs);
1317 int noutputs = list_length (outputs);
1318 int ninout = 0;
1319 int nclobbers;
1320 tree tail;
1321 register int i;
1322 /* Vector of RTX's of evaluated output operands. */
1323 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1324 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1325 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1326 enum machine_mode *inout_mode
1327 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1328 /* The insn we have emitted. */
1329 rtx insn;
1330 int old_generating_concat_p = generating_concat_p;
1332 /* An ASM with no outputs needs to be treated as volatile, for now. */
1333 if (noutputs == 0)
1334 vol = 1;
1336 if (current_function_check_memory_usage)
1338 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1339 return;
1342 #ifdef MD_ASM_CLOBBERS
1343 /* Sometimes we wish to automatically clobber registers across an asm.
1344 Case in point is when the i386 backend moved from cc0 to a hard reg --
1345 maintaining source-level compatability means automatically clobbering
1346 the flags register. */
1347 MD_ASM_CLOBBERS (clobbers);
1348 #endif
1350 if (current_function_check_memory_usage)
1352 error ("`asm' cannot be used in function where memory usage is checked");
1353 return;
1356 /* Count the number of meaningful clobbered registers, ignoring what
1357 we would ignore later. */
1358 nclobbers = 0;
1359 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1361 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1363 i = decode_reg_name (regname);
1364 if (i >= 0 || i == -4)
1365 ++nclobbers;
1366 else if (i == -2)
1367 error ("unknown register name `%s' in `asm'", regname);
1370 last_expr_type = 0;
1372 /* Check that the number of alternatives is constant across all
1373 operands. */
1374 if (outputs || inputs)
1376 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1377 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1378 tree next = inputs;
1380 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1382 error ("too many alternatives in `asm'");
1383 return;
1386 tmp = outputs;
1387 while (tmp)
1389 const char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1391 if (n_occurrences (',', constraint) != nalternatives)
1393 error ("operand constraints for `asm' differ in number of alternatives");
1394 return;
1397 if (TREE_CHAIN (tmp))
1398 tmp = TREE_CHAIN (tmp);
1399 else
1400 tmp = next, next = 0;
1404 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1406 tree val = TREE_VALUE (tail);
1407 tree type = TREE_TYPE (val);
1408 const char *constraint;
1409 char *p;
1410 int c_len;
1411 int j;
1412 int is_inout = 0;
1413 int allows_reg = 0;
1414 int allows_mem = 0;
1416 /* If there's an erroneous arg, emit no insn. */
1417 if (TREE_TYPE (val) == error_mark_node)
1418 return;
1420 /* Make sure constraint has `=' and does not have `+'. Also, see
1421 if it allows any register. Be liberal on the latter test, since
1422 the worst that happens if we get it wrong is we issue an error
1423 message. */
1425 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1426 c_len = strlen (constraint);
1428 /* Allow the `=' or `+' to not be at the beginning of the string,
1429 since it wasn't explicitly documented that way, and there is a
1430 large body of code that puts it last. Swap the character to
1431 the front, so as not to uglify any place else. */
1432 switch (c_len)
1434 default:
1435 if ((p = strchr (constraint, '=')) != NULL)
1436 break;
1437 if ((p = strchr (constraint, '+')) != NULL)
1438 break;
1439 case 0:
1440 error ("output operand constraint lacks `='");
1441 return;
1443 j = p - constraint;
1444 is_inout = *p == '+';
1446 if (j || is_inout)
1448 /* Have to throw away this constraint string and get a new one. */
1449 char *buf = alloca (c_len + 1);
1450 buf[0] = '=';
1451 if (j)
1452 memcpy (buf + 1, constraint, j);
1453 memcpy (buf + 1 + j, p + 1, c_len - j); /* not -j-1 - copy null */
1454 constraint = ggc_alloc_string (buf, c_len);
1456 if (j)
1457 warning (
1458 "output constraint `%c' for operand %d is not at the beginning",
1459 *p, i);
1462 /* Make sure we can specify the matching operand. */
1463 if (is_inout && i > 9)
1465 error ("output operand constraint %d contains `+'", i);
1466 return;
1469 for (j = 1; j < c_len; j++)
1470 switch (constraint[j])
1472 case '+':
1473 case '=':
1474 error ("operand constraint contains '+' or '=' at illegal position.");
1475 return;
1477 case '%':
1478 if (i + 1 == ninputs + noutputs)
1480 error ("`%%' constraint used with last operand");
1481 return;
1483 break;
1485 case '?': case '!': case '*': case '&': case '#':
1486 case 'E': case 'F': case 'G': case 'H':
1487 case 's': case 'i': case 'n':
1488 case 'I': case 'J': case 'K': case 'L': case 'M':
1489 case 'N': case 'O': case 'P': case ',':
1490 break;
1492 case '0': case '1': case '2': case '3': case '4':
1493 case '5': case '6': case '7': case '8': case '9':
1494 error ("matching constraint not valid in output operand");
1495 break;
1497 case 'V': case 'm': case 'o':
1498 allows_mem = 1;
1499 break;
1501 case '<': case '>':
1502 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1503 excepting those that expand_call created. So match memory
1504 and hope. */
1505 allows_mem = 1;
1506 break;
1508 case 'g': case 'X':
1509 allows_reg = 1;
1510 allows_mem = 1;
1511 break;
1513 case 'p': case 'r':
1514 allows_reg = 1;
1515 break;
1517 default:
1518 if (! ISALPHA (constraint[j]))
1520 error ("invalid punctuation `%c' in constraint",
1521 constraint[j]);
1522 return;
1524 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1525 allows_reg = 1;
1526 #ifdef EXTRA_CONSTRAINT
1527 else
1529 /* Otherwise we can't assume anything about the nature of
1530 the constraint except that it isn't purely registers.
1531 Treat it like "g" and hope for the best. */
1532 allows_reg = 1;
1533 allows_mem = 1;
1535 #endif
1536 break;
1539 /* If an output operand is not a decl or indirect ref and our constraint
1540 allows a register, make a temporary to act as an intermediate.
1541 Make the asm insn write into that, then our caller will copy it to
1542 the real output operand. Likewise for promoted variables. */
1544 generating_concat_p = 0;
1546 real_output_rtx[i] = NULL_RTX;
1547 if ((TREE_CODE (val) == INDIRECT_REF
1548 && allows_mem)
1549 || (DECL_P (val)
1550 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1551 && ! (GET_CODE (DECL_RTL (val)) == REG
1552 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1553 || ! allows_reg
1554 || is_inout)
1556 if (! allows_reg)
1557 mark_addressable (TREE_VALUE (tail));
1559 output_rtx[i]
1560 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1561 EXPAND_MEMORY_USE_WO);
1563 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1564 error ("output number %d not directly addressable", i);
1565 if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1566 || GET_CODE (output_rtx[i]) == CONCAT)
1568 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1569 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1570 if (is_inout)
1571 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1574 else
1576 output_rtx[i] = assign_temp (type, 0, 0, 1);
1577 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1580 generating_concat_p = old_generating_concat_p;
1582 if (is_inout)
1584 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1585 inout_opnum[ninout++] = i;
1589 ninputs += ninout;
1590 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1592 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1593 return;
1596 /* Make vectors for the expression-rtx and constraint strings. */
1598 argvec = rtvec_alloc (ninputs);
1599 constraints = rtvec_alloc (ninputs);
1601 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1602 : GET_MODE (output_rtx[0])),
1603 TREE_STRING_POINTER (string),
1604 empty_string, 0, argvec, constraints,
1605 filename, line);
1607 MEM_VOLATILE_P (body) = vol;
1609 /* Eval the inputs and put them into ARGVEC.
1610 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1612 i = 0;
1613 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1615 int j;
1616 int allows_reg = 0, allows_mem = 0;
1617 const char *constraint, *orig_constraint;
1618 int c_len;
1619 rtx op;
1621 /* If there's an erroneous arg, emit no insn,
1622 because the ASM_INPUT would get VOIDmode
1623 and that could cause a crash in reload. */
1624 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1625 return;
1627 /* ??? Can this happen, and does the error message make any sense? */
1628 if (TREE_PURPOSE (tail) == NULL_TREE)
1630 error ("hard register `%s' listed as input operand to `asm'",
1631 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1632 return;
1635 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1636 c_len = strlen (constraint);
1637 orig_constraint = constraint;
1639 /* Make sure constraint has neither `=', `+', nor '&'. */
1641 for (j = 0; j < c_len; j++)
1642 switch (constraint[j])
1644 case '+': case '=': case '&':
1645 if (constraint == orig_constraint)
1647 error ("input operand constraint contains `%c'",
1648 constraint[j]);
1649 return;
1651 break;
1653 case '%':
1654 if (constraint == orig_constraint
1655 && i + 1 == ninputs - ninout)
1657 error ("`%%' constraint used with last operand");
1658 return;
1660 break;
1662 case 'V': case 'm': case 'o':
1663 allows_mem = 1;
1664 break;
1666 case '<': case '>':
1667 case '?': case '!': case '*': case '#':
1668 case 'E': case 'F': case 'G': case 'H':
1669 case 's': case 'i': case 'n':
1670 case 'I': case 'J': case 'K': case 'L': case 'M':
1671 case 'N': case 'O': case 'P': case ',':
1672 break;
1674 /* Whether or not a numeric constraint allows a register is
1675 decided by the matching constraint, and so there is no need
1676 to do anything special with them. We must handle them in
1677 the default case, so that we don't unnecessarily force
1678 operands to memory. */
1679 case '0': case '1': case '2': case '3': case '4':
1680 case '5': case '6': case '7': case '8': case '9':
1681 if (constraint[j] >= '0' + noutputs)
1683 error
1684 ("matching constraint references invalid operand number");
1685 return;
1688 /* Try and find the real constraint for this dup. */
1689 if ((j == 0 && c_len == 1)
1690 || (j == 1 && c_len == 2 && constraint[0] == '%'))
1692 tree o = outputs;
1694 for (j = constraint[j] - '0'; j > 0; --j)
1695 o = TREE_CHAIN (o);
1697 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1698 c_len = strlen (constraint);
1699 j = 0;
1700 break;
1703 /* Fall through. */
1705 case 'p': case 'r':
1706 allows_reg = 1;
1707 break;
1709 case 'g': case 'X':
1710 allows_reg = 1;
1711 allows_mem = 1;
1712 break;
1714 default:
1715 if (! ISALPHA (constraint[j]))
1717 error ("invalid punctuation `%c' in constraint",
1718 constraint[j]);
1719 return;
1721 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1722 allows_reg = 1;
1723 #ifdef EXTRA_CONSTRAINT
1724 else
1726 /* Otherwise we can't assume anything about the nature of
1727 the constraint except that it isn't purely registers.
1728 Treat it like "g" and hope for the best. */
1729 allows_reg = 1;
1730 allows_mem = 1;
1732 #endif
1733 break;
1736 if (! allows_reg && allows_mem)
1737 mark_addressable (TREE_VALUE (tail));
1739 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1741 /* Never pass a CONCAT to an ASM. */
1742 generating_concat_p = 0;
1743 if (GET_CODE (op) == CONCAT)
1744 op = force_reg (GET_MODE (op), op);
1746 if (asm_operand_ok (op, constraint) <= 0)
1748 if (allows_reg)
1749 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1750 else if (!allows_mem)
1751 warning ("asm operand %d probably doesn't match constraints", i);
1752 else if (CONSTANT_P (op))
1753 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1754 op);
1755 else if (GET_CODE (op) == REG
1756 || GET_CODE (op) == SUBREG
1757 || GET_CODE (op) == CONCAT)
1759 tree type = TREE_TYPE (TREE_VALUE (tail));
1760 tree qual_type = build_qualified_type (type,
1761 (TYPE_QUALS (type)
1762 | TYPE_QUAL_CONST));
1763 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1765 emit_move_insn (memloc, op);
1766 op = memloc;
1769 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1770 /* We won't recognize volatile memory as available a
1771 memory_operand at this point. Ignore it. */
1773 else if (queued_subexp_p (op))
1775 else
1776 /* ??? Leave this only until we have experience with what
1777 happens in combine and elsewhere when constraints are
1778 not satisfied. */
1779 warning ("asm operand %d probably doesn't match constraints", i);
1781 generating_concat_p = old_generating_concat_p;
1782 ASM_OPERANDS_INPUT (body, i) = op;
1784 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1785 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1786 orig_constraint);
1787 i++;
1790 /* Protect all the operands from the queue now that they have all been
1791 evaluated. */
1793 generating_concat_p = 0;
1795 for (i = 0; i < ninputs - ninout; i++)
1796 ASM_OPERANDS_INPUT (body, i)
1797 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1799 for (i = 0; i < noutputs; i++)
1800 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1802 /* For in-out operands, copy output rtx to input rtx. */
1803 for (i = 0; i < ninout; i++)
1805 int j = inout_opnum[i];
1807 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1808 = output_rtx[j];
1809 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1810 = gen_rtx_ASM_INPUT (inout_mode[i], digit_string (j));
1813 generating_concat_p = old_generating_concat_p;
1815 /* Now, for each output, construct an rtx
1816 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1817 ARGVEC CONSTRAINTS))
1818 If there is more than one, put them inside a PARALLEL. */
1820 if (noutputs == 1 && nclobbers == 0)
1822 ASM_OPERANDS_OUTPUT_CONSTRAINT (body)
1823 = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1824 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1827 else if (noutputs == 0 && nclobbers == 0)
1829 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1830 insn = emit_insn (body);
1833 else
1835 rtx obody = body;
1836 int num = noutputs;
1838 if (num == 0)
1839 num = 1;
1841 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1843 /* For each output operand, store a SET. */
1844 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1846 XVECEXP (body, 0, i)
1847 = gen_rtx_SET (VOIDmode,
1848 output_rtx[i],
1849 gen_rtx_ASM_OPERANDS
1850 (GET_MODE (output_rtx[i]),
1851 TREE_STRING_POINTER (string),
1852 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1853 i, argvec, constraints,
1854 filename, line));
1856 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1859 /* If there are no outputs (but there are some clobbers)
1860 store the bare ASM_OPERANDS into the PARALLEL. */
1862 if (i == 0)
1863 XVECEXP (body, 0, i++) = obody;
1865 /* Store (clobber REG) for each clobbered register specified. */
1867 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1869 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1870 int j = decode_reg_name (regname);
1872 if (j < 0)
1874 if (j == -3) /* `cc', which is not a register */
1875 continue;
1877 if (j == -4) /* `memory', don't cache memory across asm */
1879 XVECEXP (body, 0, i++)
1880 = gen_rtx_CLOBBER (VOIDmode,
1881 gen_rtx_MEM
1882 (BLKmode,
1883 gen_rtx_SCRATCH (VOIDmode)));
1884 continue;
1887 /* Ignore unknown register, error already signaled. */
1888 continue;
1891 /* Use QImode since that's guaranteed to clobber just one reg. */
1892 XVECEXP (body, 0, i++)
1893 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1896 insn = emit_insn (body);
1899 /* For any outputs that needed reloading into registers, spill them
1900 back to where they belong. */
1901 for (i = 0; i < noutputs; ++i)
1902 if (real_output_rtx[i])
1903 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1905 free_temp_slots ();
1908 /* Generate RTL to evaluate the expression EXP
1909 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1911 void
1912 expand_expr_stmt (exp)
1913 tree exp;
1915 /* If -W, warn about statements with no side effects,
1916 except for an explicit cast to void (e.g. for assert()), and
1917 except inside a ({...}) where they may be useful. */
1918 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1920 if (! TREE_SIDE_EFFECTS (exp))
1922 if ((extra_warnings || warn_unused_value)
1923 && !(TREE_CODE (exp) == CONVERT_EXPR
1924 && VOID_TYPE_P (TREE_TYPE (exp))))
1925 warning_with_file_and_line (emit_filename, emit_lineno,
1926 "statement with no effect");
1928 else if (warn_unused_value)
1929 warn_if_unused_value (exp);
1932 /* If EXP is of function type and we are expanding statements for
1933 value, convert it to pointer-to-function. */
1934 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1935 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1937 /* The call to `expand_expr' could cause last_expr_type and
1938 last_expr_value to get reset. Therefore, we set last_expr_value
1939 and last_expr_type *after* calling expand_expr. */
1940 last_expr_value = expand_expr (exp,
1941 (expr_stmts_for_value
1942 ? NULL_RTX : const0_rtx),
1943 VOIDmode, 0);
1944 last_expr_type = TREE_TYPE (exp);
1946 /* If all we do is reference a volatile value in memory,
1947 copy it to a register to be sure it is actually touched. */
1948 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1949 && TREE_THIS_VOLATILE (exp))
1951 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1953 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1954 copy_to_reg (last_expr_value);
1955 else
1957 rtx lab = gen_label_rtx ();
1959 /* Compare the value with itself to reference it. */
1960 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
1961 expand_expr (TYPE_SIZE (last_expr_type),
1962 NULL_RTX, VOIDmode, 0),
1963 BLKmode, 0,
1964 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
1965 lab);
1966 emit_label (lab);
1970 /* If this expression is part of a ({...}) and is in memory, we may have
1971 to preserve temporaries. */
1972 preserve_temp_slots (last_expr_value);
1974 /* Free any temporaries used to evaluate this expression. Any temporary
1975 used as a result of this expression will already have been preserved
1976 above. */
1977 free_temp_slots ();
1979 emit_queue ();
1982 /* Warn if EXP contains any computations whose results are not used.
1983 Return 1 if a warning is printed; 0 otherwise. */
1986 warn_if_unused_value (exp)
1987 tree exp;
1989 if (TREE_USED (exp))
1990 return 0;
1992 /* Don't warn about void constructs. This includes casting to void,
1993 void function calls, and statement expressions with a final cast
1994 to void. */
1995 if (VOID_TYPE_P (TREE_TYPE (exp)))
1996 return 0;
1998 /* If this is an expression with side effects, don't warn. */
1999 if (TREE_SIDE_EFFECTS (exp))
2000 return 0;
2002 switch (TREE_CODE (exp))
2004 case PREINCREMENT_EXPR:
2005 case POSTINCREMENT_EXPR:
2006 case PREDECREMENT_EXPR:
2007 case POSTDECREMENT_EXPR:
2008 case MODIFY_EXPR:
2009 case INIT_EXPR:
2010 case TARGET_EXPR:
2011 case CALL_EXPR:
2012 case METHOD_CALL_EXPR:
2013 case RTL_EXPR:
2014 case TRY_CATCH_EXPR:
2015 case WITH_CLEANUP_EXPR:
2016 case EXIT_EXPR:
2017 return 0;
2019 case BIND_EXPR:
2020 /* For a binding, warn if no side effect within it. */
2021 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2023 case SAVE_EXPR:
2024 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2026 case TRUTH_ORIF_EXPR:
2027 case TRUTH_ANDIF_EXPR:
2028 /* In && or ||, warn if 2nd operand has no side effect. */
2029 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2031 case COMPOUND_EXPR:
2032 if (TREE_NO_UNUSED_WARNING (exp))
2033 return 0;
2034 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2035 return 1;
2036 /* Let people do `(foo (), 0)' without a warning. */
2037 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2038 return 0;
2039 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2041 case NOP_EXPR:
2042 case CONVERT_EXPR:
2043 case NON_LVALUE_EXPR:
2044 /* Don't warn about conversions not explicit in the user's program. */
2045 if (TREE_NO_UNUSED_WARNING (exp))
2046 return 0;
2047 /* Assignment to a cast usually results in a cast of a modify.
2048 Don't complain about that. There can be an arbitrary number of
2049 casts before the modify, so we must loop until we find the first
2050 non-cast expression and then test to see if that is a modify. */
2052 tree tem = TREE_OPERAND (exp, 0);
2054 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2055 tem = TREE_OPERAND (tem, 0);
2057 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2058 || TREE_CODE (tem) == CALL_EXPR)
2059 return 0;
2061 goto warn;
2063 case INDIRECT_REF:
2064 /* Don't warn about automatic dereferencing of references, since
2065 the user cannot control it. */
2066 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2067 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2068 /* Fall through. */
2070 default:
2071 /* Referencing a volatile value is a side effect, so don't warn. */
2072 if ((DECL_P (exp)
2073 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2074 && TREE_THIS_VOLATILE (exp))
2075 return 0;
2077 /* If this is an expression which has no operands, there is no value
2078 to be unused. There are no such language-independent codes,
2079 but front ends may define such. */
2080 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2081 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2082 return 0;
2084 warn:
2085 warning_with_file_and_line (emit_filename, emit_lineno,
2086 "value computed is not used");
2087 return 1;
2091 /* Clear out the memory of the last expression evaluated. */
2093 void
2094 clear_last_expr ()
2096 last_expr_type = 0;
2099 /* Begin a statement which will return a value.
2100 Return the RTL_EXPR for this statement expr.
2101 The caller must save that value and pass it to expand_end_stmt_expr. */
2103 tree
2104 expand_start_stmt_expr ()
2106 tree t;
2108 /* Make the RTL_EXPR node temporary, not momentary,
2109 so that rtl_expr_chain doesn't become garbage. */
2110 t = make_node (RTL_EXPR);
2111 do_pending_stack_adjust ();
2112 start_sequence_for_rtl_expr (t);
2113 NO_DEFER_POP;
2114 expr_stmts_for_value++;
2115 return t;
2118 /* Restore the previous state at the end of a statement that returns a value.
2119 Returns a tree node representing the statement's value and the
2120 insns to compute the value.
2122 The nodes of that expression have been freed by now, so we cannot use them.
2123 But we don't want to do that anyway; the expression has already been
2124 evaluated and now we just want to use the value. So generate a RTL_EXPR
2125 with the proper type and RTL value.
2127 If the last substatement was not an expression,
2128 return something with type `void'. */
2130 tree
2131 expand_end_stmt_expr (t)
2132 tree t;
2134 OK_DEFER_POP;
2136 if (last_expr_type == 0)
2138 last_expr_type = void_type_node;
2139 last_expr_value = const0_rtx;
2141 else if (last_expr_value == 0)
2142 /* There are some cases where this can happen, such as when the
2143 statement is void type. */
2144 last_expr_value = const0_rtx;
2145 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2146 /* Remove any possible QUEUED. */
2147 last_expr_value = protect_from_queue (last_expr_value, 0);
2149 emit_queue ();
2151 TREE_TYPE (t) = last_expr_type;
2152 RTL_EXPR_RTL (t) = last_expr_value;
2153 RTL_EXPR_SEQUENCE (t) = get_insns ();
2155 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2157 end_sequence ();
2159 /* Don't consider deleting this expr or containing exprs at tree level. */
2160 TREE_SIDE_EFFECTS (t) = 1;
2161 /* Propagate volatility of the actual RTL expr. */
2162 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2164 last_expr_type = 0;
2165 expr_stmts_for_value--;
2167 return t;
2170 /* Generate RTL for the start of an if-then. COND is the expression
2171 whose truth should be tested.
2173 If EXITFLAG is nonzero, this conditional is visible to
2174 `exit_something'. */
2176 void
2177 expand_start_cond (cond, exitflag)
2178 tree cond;
2179 int exitflag;
2181 struct nesting *thiscond = ALLOC_NESTING ();
2183 /* Make an entry on cond_stack for the cond we are entering. */
2185 thiscond->next = cond_stack;
2186 thiscond->all = nesting_stack;
2187 thiscond->depth = ++nesting_depth;
2188 thiscond->data.cond.next_label = gen_label_rtx ();
2189 /* Before we encounter an `else', we don't need a separate exit label
2190 unless there are supposed to be exit statements
2191 to exit this conditional. */
2192 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2193 thiscond->data.cond.endif_label = thiscond->exit_label;
2194 cond_stack = thiscond;
2195 nesting_stack = thiscond;
2197 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2200 /* Generate RTL between then-clause and the elseif-clause
2201 of an if-then-elseif-.... */
2203 void
2204 expand_start_elseif (cond)
2205 tree cond;
2207 if (cond_stack->data.cond.endif_label == 0)
2208 cond_stack->data.cond.endif_label = gen_label_rtx ();
2209 emit_jump (cond_stack->data.cond.endif_label);
2210 emit_label (cond_stack->data.cond.next_label);
2211 cond_stack->data.cond.next_label = gen_label_rtx ();
2212 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2215 /* Generate RTL between the then-clause and the else-clause
2216 of an if-then-else. */
2218 void
2219 expand_start_else ()
2221 if (cond_stack->data.cond.endif_label == 0)
2222 cond_stack->data.cond.endif_label = gen_label_rtx ();
2224 emit_jump (cond_stack->data.cond.endif_label);
2225 emit_label (cond_stack->data.cond.next_label);
2226 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2229 /* After calling expand_start_else, turn this "else" into an "else if"
2230 by providing another condition. */
2232 void
2233 expand_elseif (cond)
2234 tree cond;
2236 cond_stack->data.cond.next_label = gen_label_rtx ();
2237 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2240 /* Generate RTL for the end of an if-then.
2241 Pop the record for it off of cond_stack. */
2243 void
2244 expand_end_cond ()
2246 struct nesting *thiscond = cond_stack;
2248 do_pending_stack_adjust ();
2249 if (thiscond->data.cond.next_label)
2250 emit_label (thiscond->data.cond.next_label);
2251 if (thiscond->data.cond.endif_label)
2252 emit_label (thiscond->data.cond.endif_label);
2254 POPSTACK (cond_stack);
2255 last_expr_type = 0;
2258 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2259 loop should be exited by `exit_something'. This is a loop for which
2260 `expand_continue' will jump to the top of the loop.
2262 Make an entry on loop_stack to record the labels associated with
2263 this loop. */
2265 struct nesting *
2266 expand_start_loop (exit_flag)
2267 int exit_flag;
2269 register struct nesting *thisloop = ALLOC_NESTING ();
2271 /* Make an entry on loop_stack for the loop we are entering. */
2273 thisloop->next = loop_stack;
2274 thisloop->all = nesting_stack;
2275 thisloop->depth = ++nesting_depth;
2276 thisloop->data.loop.start_label = gen_label_rtx ();
2277 thisloop->data.loop.end_label = gen_label_rtx ();
2278 thisloop->data.loop.alt_end_label = 0;
2279 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2280 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2281 loop_stack = thisloop;
2282 nesting_stack = thisloop;
2284 do_pending_stack_adjust ();
2285 emit_queue ();
2286 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2287 emit_label (thisloop->data.loop.start_label);
2289 return thisloop;
2292 /* Like expand_start_loop but for a loop where the continuation point
2293 (for expand_continue_loop) will be specified explicitly. */
2295 struct nesting *
2296 expand_start_loop_continue_elsewhere (exit_flag)
2297 int exit_flag;
2299 struct nesting *thisloop = expand_start_loop (exit_flag);
2300 loop_stack->data.loop.continue_label = gen_label_rtx ();
2301 return thisloop;
2304 /* Begin a null, aka do { } while (0) "loop". But since the contents
2305 of said loop can still contain a break, we must frob the loop nest. */
2307 struct nesting *
2308 expand_start_null_loop ()
2310 register struct nesting *thisloop = ALLOC_NESTING ();
2312 /* Make an entry on loop_stack for the loop we are entering. */
2314 thisloop->next = loop_stack;
2315 thisloop->all = nesting_stack;
2316 thisloop->depth = ++nesting_depth;
2317 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2318 thisloop->data.loop.end_label = gen_label_rtx ();
2319 thisloop->data.loop.alt_end_label = NULL_RTX;
2320 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2321 thisloop->exit_label = thisloop->data.loop.end_label;
2322 loop_stack = thisloop;
2323 nesting_stack = thisloop;
2325 return thisloop;
2328 /* Specify the continuation point for a loop started with
2329 expand_start_loop_continue_elsewhere.
2330 Use this at the point in the code to which a continue statement
2331 should jump. */
2333 void
2334 expand_loop_continue_here ()
2336 do_pending_stack_adjust ();
2337 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2338 emit_label (loop_stack->data.loop.continue_label);
2341 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2342 Pop the block off of loop_stack. */
2344 void
2345 expand_end_loop ()
2347 rtx start_label = loop_stack->data.loop.start_label;
2348 rtx insn = get_last_insn ();
2349 int needs_end_jump = 1;
2351 /* Mark the continue-point at the top of the loop if none elsewhere. */
2352 if (start_label == loop_stack->data.loop.continue_label)
2353 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2355 do_pending_stack_adjust ();
2357 /* If optimizing, perhaps reorder the loop.
2358 First, try to use a condjump near the end.
2359 expand_exit_loop_if_false ends loops with unconditional jumps,
2360 like this:
2362 if (test) goto label;
2363 optional: cleanup
2364 goto loop_stack->data.loop.end_label
2365 barrier
2366 label:
2368 If we find such a pattern, we can end the loop earlier. */
2370 if (optimize
2371 && GET_CODE (insn) == CODE_LABEL
2372 && LABEL_NAME (insn) == NULL
2373 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2375 rtx label = insn;
2376 rtx jump = PREV_INSN (PREV_INSN (label));
2378 if (GET_CODE (jump) == JUMP_INSN
2379 && GET_CODE (PATTERN (jump)) == SET
2380 && SET_DEST (PATTERN (jump)) == pc_rtx
2381 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2382 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2383 == loop_stack->data.loop.end_label))
2385 rtx prev;
2387 /* The test might be complex and reference LABEL multiple times,
2388 like the loop in loop_iterations to set vtop. To handle this,
2389 we move LABEL. */
2390 insn = PREV_INSN (label);
2391 reorder_insns (label, label, start_label);
2393 for (prev = PREV_INSN (jump);; prev = PREV_INSN (prev))
2395 /* We ignore line number notes, but if we see any other note,
2396 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2397 NOTE_INSN_LOOP_*, we disable this optimization. */
2398 if (GET_CODE (prev) == NOTE)
2400 if (NOTE_LINE_NUMBER (prev) < 0)
2401 break;
2402 continue;
2404 if (GET_CODE (prev) == CODE_LABEL)
2405 break;
2406 if (GET_CODE (prev) == JUMP_INSN)
2408 if (GET_CODE (PATTERN (prev)) == SET
2409 && SET_DEST (PATTERN (prev)) == pc_rtx
2410 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2411 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2412 == LABEL_REF)
2413 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2415 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2416 = start_label;
2417 emit_note_after (NOTE_INSN_LOOP_END, prev);
2418 needs_end_jump = 0;
2420 break;
2426 /* If the loop starts with a loop exit, roll that to the end where
2427 it will optimize together with the jump back.
2429 We look for the conditional branch to the exit, except that once
2430 we find such a branch, we don't look past 30 instructions.
2432 In more detail, if the loop presently looks like this (in pseudo-C):
2434 start_label:
2435 if (test) goto end_label;
2436 body;
2437 goto start_label;
2438 end_label:
2440 transform it to look like:
2442 goto start_label;
2443 newstart_label:
2444 body;
2445 start_label:
2446 if (test) goto end_label;
2447 goto newstart_label;
2448 end_label:
2450 Here, the `test' may actually consist of some reasonably complex
2451 code, terminating in a test. */
2453 if (optimize
2454 && needs_end_jump
2456 ! (GET_CODE (insn) == JUMP_INSN
2457 && GET_CODE (PATTERN (insn)) == SET
2458 && SET_DEST (PATTERN (insn)) == pc_rtx
2459 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2461 int eh_regions = 0;
2462 int num_insns = 0;
2463 rtx last_test_insn = NULL_RTX;
2465 /* Scan insns from the top of the loop looking for a qualified
2466 conditional exit. */
2467 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2468 insn = NEXT_INSN (insn))
2470 if (GET_CODE (insn) == NOTE)
2472 if (optimize < 2
2473 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2474 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2475 /* The code that actually moves the exit test will
2476 carefully leave BLOCK notes in their original
2477 location. That means, however, that we can't debug
2478 the exit test itself. So, we refuse to move code
2479 containing BLOCK notes at low optimization levels. */
2480 break;
2482 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2483 ++eh_regions;
2484 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2486 --eh_regions;
2487 if (eh_regions < 0)
2488 /* We've come to the end of an EH region, but
2489 never saw the beginning of that region. That
2490 means that an EH region begins before the top
2491 of the loop, and ends in the middle of it. The
2492 existence of such a situation violates a basic
2493 assumption in this code, since that would imply
2494 that even when EH_REGIONS is zero, we might
2495 move code out of an exception region. */
2496 abort ();
2499 /* We must not walk into a nested loop. */
2500 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2501 break;
2503 /* We already know this INSN is a NOTE, so there's no
2504 point in looking at it to see if it's a JUMP. */
2505 continue;
2508 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2509 num_insns++;
2511 if (last_test_insn && num_insns > 30)
2512 break;
2514 if (eh_regions > 0)
2515 /* We don't want to move a partial EH region. Consider:
2517 while ( ( { try {
2518 if (cond ()) 0;
2519 else {
2520 bar();
2523 } catch (...) {
2525 } )) {
2526 body;
2529 This isn't legal C++, but here's what it's supposed to
2530 mean: if cond() is true, stop looping. Otherwise,
2531 call bar, and keep looping. In addition, if cond
2532 throws an exception, catch it and keep looping. Such
2533 constructs are certainy legal in LISP.
2535 We should not move the `if (cond()) 0' test since then
2536 the EH-region for the try-block would be broken up.
2537 (In this case we would the EH_BEG note for the `try'
2538 and `if cond()' but not the call to bar() or the
2539 EH_END note.)
2541 So we don't look for tests within an EH region. */
2542 continue;
2544 if (GET_CODE (insn) == JUMP_INSN
2545 && GET_CODE (PATTERN (insn)) == SET
2546 && SET_DEST (PATTERN (insn)) == pc_rtx)
2548 /* This is indeed a jump. */
2549 rtx dest1 = NULL_RTX;
2550 rtx dest2 = NULL_RTX;
2551 rtx potential_last_test;
2552 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2554 /* A conditional jump. */
2555 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2556 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2557 potential_last_test = insn;
2559 else
2561 /* An unconditional jump. */
2562 dest1 = SET_SRC (PATTERN (insn));
2563 /* Include the BARRIER after the JUMP. */
2564 potential_last_test = NEXT_INSN (insn);
2567 do {
2568 if (dest1 && GET_CODE (dest1) == LABEL_REF
2569 && ((XEXP (dest1, 0)
2570 == loop_stack->data.loop.alt_end_label)
2571 || (XEXP (dest1, 0)
2572 == loop_stack->data.loop.end_label)))
2574 last_test_insn = potential_last_test;
2575 break;
2578 /* If this was a conditional jump, there may be
2579 another label at which we should look. */
2580 dest1 = dest2;
2581 dest2 = NULL_RTX;
2582 } while (dest1);
2586 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2588 /* We found one. Move everything from there up
2589 to the end of the loop, and add a jump into the loop
2590 to jump to there. */
2591 register rtx newstart_label = gen_label_rtx ();
2592 register rtx start_move = start_label;
2593 rtx next_insn;
2595 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2596 then we want to move this note also. */
2597 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2598 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2599 == NOTE_INSN_LOOP_CONT))
2600 start_move = PREV_INSN (start_move);
2602 emit_label_after (newstart_label, PREV_INSN (start_move));
2604 /* Actually move the insns. Start at the beginning, and
2605 keep copying insns until we've copied the
2606 last_test_insn. */
2607 for (insn = start_move; insn; insn = next_insn)
2609 /* Figure out which insn comes after this one. We have
2610 to do this before we move INSN. */
2611 if (insn == last_test_insn)
2612 /* We've moved all the insns. */
2613 next_insn = NULL_RTX;
2614 else
2615 next_insn = NEXT_INSN (insn);
2617 if (GET_CODE (insn) == NOTE
2618 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2619 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2620 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2621 NOTE_INSN_BLOCK_ENDs because the correct generation
2622 of debugging information depends on these appearing
2623 in the same order in the RTL and in the tree
2624 structure, where they are represented as BLOCKs.
2625 So, we don't move block notes. Of course, moving
2626 the code inside the block is likely to make it
2627 impossible to debug the instructions in the exit
2628 test, but such is the price of optimization. */
2629 continue;
2631 /* Move the INSN. */
2632 reorder_insns (insn, insn, get_last_insn ());
2635 emit_jump_insn_after (gen_jump (start_label),
2636 PREV_INSN (newstart_label));
2637 emit_barrier_after (PREV_INSN (newstart_label));
2638 start_label = newstart_label;
2642 if (needs_end_jump)
2644 emit_jump (start_label);
2645 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2647 emit_label (loop_stack->data.loop.end_label);
2649 POPSTACK (loop_stack);
2651 last_expr_type = 0;
2654 /* Finish a null loop, aka do { } while (0). */
2656 void
2657 expand_end_null_loop ()
2659 do_pending_stack_adjust ();
2660 emit_label (loop_stack->data.loop.end_label);
2662 POPSTACK (loop_stack);
2664 last_expr_type = 0;
2667 /* Generate a jump to the current loop's continue-point.
2668 This is usually the top of the loop, but may be specified
2669 explicitly elsewhere. If not currently inside a loop,
2670 return 0 and do nothing; caller will print an error message. */
2673 expand_continue_loop (whichloop)
2674 struct nesting *whichloop;
2676 last_expr_type = 0;
2677 if (whichloop == 0)
2678 whichloop = loop_stack;
2679 if (whichloop == 0)
2680 return 0;
2681 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2682 NULL_RTX);
2683 return 1;
2686 /* Generate a jump to exit the current loop. If not currently inside a loop,
2687 return 0 and do nothing; caller will print an error message. */
2690 expand_exit_loop (whichloop)
2691 struct nesting *whichloop;
2693 last_expr_type = 0;
2694 if (whichloop == 0)
2695 whichloop = loop_stack;
2696 if (whichloop == 0)
2697 return 0;
2698 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2699 return 1;
2702 /* Generate a conditional jump to exit the current loop if COND
2703 evaluates to zero. If not currently inside a loop,
2704 return 0 and do nothing; caller will print an error message. */
2707 expand_exit_loop_if_false (whichloop, cond)
2708 struct nesting *whichloop;
2709 tree cond;
2711 rtx label = gen_label_rtx ();
2712 rtx last_insn;
2713 last_expr_type = 0;
2715 if (whichloop == 0)
2716 whichloop = loop_stack;
2717 if (whichloop == 0)
2718 return 0;
2719 /* In order to handle fixups, we actually create a conditional jump
2720 around a unconditional branch to exit the loop. If fixups are
2721 necessary, they go before the unconditional branch. */
2723 do_jump (cond, NULL_RTX, label);
2724 last_insn = get_last_insn ();
2725 if (GET_CODE (last_insn) == CODE_LABEL)
2726 whichloop->data.loop.alt_end_label = last_insn;
2727 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2728 NULL_RTX);
2729 emit_label (label);
2731 return 1;
2734 /* Return nonzero if the loop nest is empty. Else return zero. */
2737 stmt_loop_nest_empty ()
2739 /* cfun->stmt can be NULL if we are building a call to get the
2740 EH context for a setjmp/longjmp EH target and the current
2741 function was a deferred inline function. */
2742 return (cfun->stmt == NULL || loop_stack == NULL);
2745 /* Return non-zero if we should preserve sub-expressions as separate
2746 pseudos. We never do so if we aren't optimizing. We always do so
2747 if -fexpensive-optimizations.
2749 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2750 the loop may still be a small one. */
2753 preserve_subexpressions_p ()
2755 rtx insn;
2757 if (flag_expensive_optimizations)
2758 return 1;
2760 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2761 return 0;
2763 insn = get_last_insn_anywhere ();
2765 return (insn
2766 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2767 < n_non_fixed_regs * 3));
2771 /* Generate a jump to exit the current loop, conditional, binding contour
2772 or case statement. Not all such constructs are visible to this function,
2773 only those started with EXIT_FLAG nonzero. Individual languages use
2774 the EXIT_FLAG parameter to control which kinds of constructs you can
2775 exit this way.
2777 If not currently inside anything that can be exited,
2778 return 0 and do nothing; caller will print an error message. */
2781 expand_exit_something ()
2783 struct nesting *n;
2784 last_expr_type = 0;
2785 for (n = nesting_stack; n; n = n->all)
2786 if (n->exit_label != 0)
2788 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2789 return 1;
2792 return 0;
2795 /* Generate RTL to return from the current function, with no value.
2796 (That is, we do not do anything about returning any value.) */
2798 void
2799 expand_null_return ()
2801 struct nesting *block = block_stack;
2802 rtx last_insn = get_last_insn ();
2804 /* If this function was declared to return a value, but we
2805 didn't, clobber the return registers so that they are not
2806 propogated live to the rest of the function. */
2807 clobber_return_register ();
2809 /* Does any pending block have cleanups? */
2810 while (block && block->data.block.cleanups == 0)
2811 block = block->next;
2813 /* If yes, use a goto to return, since that runs cleanups. */
2815 expand_null_return_1 (last_insn, block != 0);
2818 /* Generate RTL to return from the current function, with value VAL. */
2820 static void
2821 expand_value_return (val)
2822 rtx val;
2824 struct nesting *block = block_stack;
2825 rtx last_insn = get_last_insn ();
2826 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2828 /* Copy the value to the return location
2829 unless it's already there. */
2831 if (return_reg != val)
2833 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2834 #ifdef PROMOTE_FUNCTION_RETURN
2835 int unsignedp = TREE_UNSIGNED (type);
2836 enum machine_mode old_mode
2837 = DECL_MODE (DECL_RESULT (current_function_decl));
2838 enum machine_mode mode
2839 = promote_mode (type, old_mode, &unsignedp, 1);
2841 if (mode != old_mode)
2842 val = convert_modes (mode, old_mode, val, unsignedp);
2843 #endif
2844 if (GET_CODE (return_reg) == PARALLEL)
2845 emit_group_load (return_reg, val, int_size_in_bytes (type),
2846 TYPE_ALIGN (type));
2847 else
2848 emit_move_insn (return_reg, val);
2851 /* Does any pending block have cleanups? */
2853 while (block && block->data.block.cleanups == 0)
2854 block = block->next;
2856 /* If yes, use a goto to return, since that runs cleanups.
2857 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2859 expand_null_return_1 (last_insn, block != 0);
2862 /* Output a return with no value. If LAST_INSN is nonzero,
2863 pretend that the return takes place after LAST_INSN.
2864 If USE_GOTO is nonzero then don't use a return instruction;
2865 go to the return label instead. This causes any cleanups
2866 of pending blocks to be executed normally. */
2868 static void
2869 expand_null_return_1 (last_insn, use_goto)
2870 rtx last_insn;
2871 int use_goto;
2873 rtx end_label = cleanup_label ? cleanup_label : return_label;
2875 clear_pending_stack_adjust ();
2876 do_pending_stack_adjust ();
2877 last_expr_type = 0;
2879 /* PCC-struct return always uses an epilogue. */
2880 if (current_function_returns_pcc_struct || use_goto)
2882 if (end_label == 0)
2883 end_label = return_label = gen_label_rtx ();
2884 expand_goto_internal (NULL_TREE, end_label, last_insn);
2885 return;
2888 /* Otherwise output a simple return-insn if one is available,
2889 unless it won't do the job. */
2890 #ifdef HAVE_return
2891 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2893 emit_jump_insn (gen_return ());
2894 emit_barrier ();
2895 return;
2897 #endif
2899 /* Otherwise jump to the epilogue. */
2900 expand_goto_internal (NULL_TREE, end_label, last_insn);
2903 /* Generate RTL to evaluate the expression RETVAL and return it
2904 from the current function. */
2906 void
2907 expand_return (retval)
2908 tree retval;
2910 /* If there are any cleanups to be performed, then they will
2911 be inserted following LAST_INSN. It is desirable
2912 that the last_insn, for such purposes, should be the
2913 last insn before computing the return value. Otherwise, cleanups
2914 which call functions can clobber the return value. */
2915 /* ??? rms: I think that is erroneous, because in C++ it would
2916 run destructors on variables that might be used in the subsequent
2917 computation of the return value. */
2918 rtx last_insn = 0;
2919 rtx result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
2920 register rtx val = 0;
2921 tree retval_rhs;
2922 int cleanups;
2924 /* If function wants no value, give it none. */
2925 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2927 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2928 emit_queue ();
2929 expand_null_return ();
2930 return;
2933 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2934 /* This is not sufficient. We also need to watch for cleanups of the
2935 expression we are about to expand. Unfortunately, we cannot know
2936 if it has cleanups until we expand it, and we want to change how we
2937 expand it depending upon if we need cleanups. We can't win. */
2938 #if 0
2939 cleanups = any_pending_cleanups (1);
2940 #else
2941 cleanups = 1;
2942 #endif
2944 if (retval == error_mark_node)
2945 retval_rhs = NULL_TREE;
2946 else if (TREE_CODE (retval) == RESULT_DECL)
2947 retval_rhs = retval;
2948 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2949 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2950 retval_rhs = TREE_OPERAND (retval, 1);
2951 else if (VOID_TYPE_P (TREE_TYPE (retval)))
2952 /* Recognize tail-recursive call to void function. */
2953 retval_rhs = retval;
2954 else
2955 retval_rhs = NULL_TREE;
2957 /* Only use `last_insn' if there are cleanups which must be run. */
2958 if (cleanups || cleanup_label != 0)
2959 last_insn = get_last_insn ();
2961 /* Distribute return down conditional expr if either of the sides
2962 may involve tail recursion (see test below). This enhances the number
2963 of tail recursions we see. Don't do this always since it can produce
2964 sub-optimal code in some cases and we distribute assignments into
2965 conditional expressions when it would help. */
2967 if (optimize && retval_rhs != 0
2968 && frame_offset == 0
2969 && TREE_CODE (retval_rhs) == COND_EXPR
2970 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2971 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2973 rtx label = gen_label_rtx ();
2974 tree expr;
2976 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2977 start_cleanup_deferral ();
2978 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2979 DECL_RESULT (current_function_decl),
2980 TREE_OPERAND (retval_rhs, 1));
2981 TREE_SIDE_EFFECTS (expr) = 1;
2982 expand_return (expr);
2983 emit_label (label);
2985 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2986 DECL_RESULT (current_function_decl),
2987 TREE_OPERAND (retval_rhs, 2));
2988 TREE_SIDE_EFFECTS (expr) = 1;
2989 expand_return (expr);
2990 end_cleanup_deferral ();
2991 return;
2994 /* If the result is an aggregate that is being returned in one (or more)
2995 registers, load the registers here. The compiler currently can't handle
2996 copying a BLKmode value into registers. We could put this code in a
2997 more general area (for use by everyone instead of just function
2998 call/return), but until this feature is generally usable it is kept here
2999 (and in expand_call). The value must go into a pseudo in case there
3000 are cleanups that will clobber the real return register. */
3002 if (retval_rhs != 0
3003 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3004 && GET_CODE (result_rtl) == REG)
3006 int i;
3007 unsigned HOST_WIDE_INT bitpos, xbitpos;
3008 unsigned HOST_WIDE_INT big_endian_correction = 0;
3009 unsigned HOST_WIDE_INT bytes
3010 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3011 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3012 unsigned int bitsize
3013 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3014 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3015 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3016 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3017 enum machine_mode tmpmode, result_reg_mode;
3019 if (bytes == 0)
3021 expand_null_return ();
3022 return;
3025 /* Structures whose size is not a multiple of a word are aligned
3026 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3027 machine, this means we must skip the empty high order bytes when
3028 calculating the bit offset. */
3029 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
3030 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3031 * BITS_PER_UNIT));
3033 /* Copy the structure BITSIZE bits at a time. */
3034 for (bitpos = 0, xbitpos = big_endian_correction;
3035 bitpos < bytes * BITS_PER_UNIT;
3036 bitpos += bitsize, xbitpos += bitsize)
3038 /* We need a new destination pseudo each time xbitpos is
3039 on a word boundary and when xbitpos == big_endian_correction
3040 (the first time through). */
3041 if (xbitpos % BITS_PER_WORD == 0
3042 || xbitpos == big_endian_correction)
3044 /* Generate an appropriate register. */
3045 dst = gen_reg_rtx (word_mode);
3046 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3048 /* Clobber the destination before we move anything into it. */
3049 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
3052 /* We need a new source operand each time bitpos is on a word
3053 boundary. */
3054 if (bitpos % BITS_PER_WORD == 0)
3055 src = operand_subword_force (result_val,
3056 bitpos / BITS_PER_WORD,
3057 BLKmode);
3059 /* Use bitpos for the source extraction (left justified) and
3060 xbitpos for the destination store (right justified). */
3061 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3062 extract_bit_field (src, bitsize,
3063 bitpos % BITS_PER_WORD, 1,
3064 NULL_RTX, word_mode, word_mode,
3065 bitsize, BITS_PER_WORD),
3066 bitsize, BITS_PER_WORD);
3069 /* Find the smallest integer mode large enough to hold the
3070 entire structure and use that mode instead of BLKmode
3071 on the USE insn for the return register. */
3072 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3073 tmpmode != VOIDmode;
3074 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3075 /* Have we found a large enough mode? */
3076 if (GET_MODE_SIZE (tmpmode) >= bytes)
3077 break;
3079 /* No suitable mode found. */
3080 if (tmpmode == VOIDmode)
3081 abort ();
3083 PUT_MODE (result_rtl, tmpmode);
3085 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3086 result_reg_mode = word_mode;
3087 else
3088 result_reg_mode = tmpmode;
3089 result_reg = gen_reg_rtx (result_reg_mode);
3091 emit_queue ();
3092 for (i = 0; i < n_regs; i++)
3093 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3094 result_pseudos[i]);
3096 if (tmpmode != result_reg_mode)
3097 result_reg = gen_lowpart (tmpmode, result_reg);
3099 expand_value_return (result_reg);
3101 else if (cleanups
3102 && retval_rhs != 0
3103 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3104 && (GET_CODE (result_rtl) == REG
3105 || (GET_CODE (result_rtl) == PARALLEL)))
3107 /* Calculate the return value into a temporary (usually a pseudo
3108 reg). */
3109 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3110 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3112 val = assign_temp (nt, 0, 0, 1);
3113 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3114 val = force_not_mem (val);
3115 emit_queue ();
3116 /* Return the calculated value, doing cleanups first. */
3117 expand_value_return (val);
3119 else
3121 /* No cleanups or no hard reg used;
3122 calculate value into hard return reg. */
3123 expand_expr (retval, const0_rtx, VOIDmode, 0);
3124 emit_queue ();
3125 expand_value_return (result_rtl);
3129 /* Return 1 if the end of the generated RTX is not a barrier.
3130 This means code already compiled can drop through. */
3133 drop_through_at_end_p ()
3135 rtx insn = get_last_insn ();
3136 while (insn && GET_CODE (insn) == NOTE)
3137 insn = PREV_INSN (insn);
3138 return insn && GET_CODE (insn) != BARRIER;
3141 /* Attempt to optimize a potential tail recursion call into a goto.
3142 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3143 where to place the jump to the tail recursion label.
3145 Return TRUE if the call was optimized into a goto. */
3148 optimize_tail_recursion (arguments, last_insn)
3149 tree arguments;
3150 rtx last_insn;
3152 /* Finish checking validity, and if valid emit code to set the
3153 argument variables for the new call. */
3154 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3156 if (tail_recursion_label == 0)
3158 tail_recursion_label = gen_label_rtx ();
3159 emit_label_after (tail_recursion_label,
3160 tail_recursion_reentry);
3162 emit_queue ();
3163 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3164 emit_barrier ();
3165 return 1;
3167 return 0;
3170 /* Emit code to alter this function's formal parms for a tail-recursive call.
3171 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3172 FORMALS is the chain of decls of formals.
3173 Return 1 if this can be done;
3174 otherwise return 0 and do not emit any code. */
3176 static int
3177 tail_recursion_args (actuals, formals)
3178 tree actuals, formals;
3180 register tree a = actuals, f = formals;
3181 register int i;
3182 register rtx *argvec;
3184 /* Check that number and types of actuals are compatible
3185 with the formals. This is not always true in valid C code.
3186 Also check that no formal needs to be addressable
3187 and that all formals are scalars. */
3189 /* Also count the args. */
3191 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3193 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3194 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3195 return 0;
3196 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3197 return 0;
3199 if (a != 0 || f != 0)
3200 return 0;
3202 /* Compute all the actuals. */
3204 argvec = (rtx *) alloca (i * sizeof (rtx));
3206 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3207 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3209 /* Find which actual values refer to current values of previous formals.
3210 Copy each of them now, before any formal is changed. */
3212 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3214 int copy = 0;
3215 register int j;
3216 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3217 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3219 copy = 1;
3220 break;
3222 if (copy)
3223 argvec[i] = copy_to_reg (argvec[i]);
3226 /* Store the values of the actuals into the formals. */
3228 for (f = formals, a = actuals, i = 0; f;
3229 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3231 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3232 emit_move_insn (DECL_RTL (f), argvec[i]);
3233 else
3234 convert_move (DECL_RTL (f), argvec[i],
3235 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3238 free_temp_slots ();
3239 return 1;
3242 /* Generate the RTL code for entering a binding contour.
3243 The variables are declared one by one, by calls to `expand_decl'.
3245 FLAGS is a bitwise or of the following flags:
3247 1 - Nonzero if this construct should be visible to
3248 `exit_something'.
3250 2 - Nonzero if this contour does not require a
3251 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3252 language-independent code should set this flag because they
3253 will not create corresponding BLOCK nodes. (There should be
3254 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3255 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3256 when expand_end_bindings is called.
3258 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3259 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3260 note. */
3262 void
3263 expand_start_bindings_and_block (flags, block)
3264 int flags;
3265 tree block;
3267 struct nesting *thisblock = ALLOC_NESTING ();
3268 rtx note;
3269 int exit_flag = ((flags & 1) != 0);
3270 int block_flag = ((flags & 2) == 0);
3272 /* If a BLOCK is supplied, then the caller should be requesting a
3273 NOTE_INSN_BLOCK_BEG note. */
3274 if (!block_flag && block)
3275 abort ();
3277 /* Create a note to mark the beginning of the block. */
3278 if (block_flag)
3280 note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3281 NOTE_BLOCK (note) = block;
3283 else
3284 note = emit_note (NULL_PTR, NOTE_INSN_DELETED);
3286 /* Make an entry on block_stack for the block we are entering. */
3288 thisblock->next = block_stack;
3289 thisblock->all = nesting_stack;
3290 thisblock->depth = ++nesting_depth;
3291 thisblock->data.block.stack_level = 0;
3292 thisblock->data.block.cleanups = 0;
3293 thisblock->data.block.n_function_calls = 0;
3294 thisblock->data.block.exception_region = 0;
3295 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3297 thisblock->data.block.conditional_code = 0;
3298 thisblock->data.block.last_unconditional_cleanup = note;
3299 /* When we insert instructions after the last unconditional cleanup,
3300 we don't adjust last_insn. That means that a later add_insn will
3301 clobber the instructions we've just added. The easiest way to
3302 fix this is to just insert another instruction here, so that the
3303 instructions inserted after the last unconditional cleanup are
3304 never the last instruction. */
3305 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3306 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3308 if (block_stack
3309 && !(block_stack->data.block.cleanups == NULL_TREE
3310 && block_stack->data.block.outer_cleanups == NULL_TREE))
3311 thisblock->data.block.outer_cleanups
3312 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3313 block_stack->data.block.outer_cleanups);
3314 else
3315 thisblock->data.block.outer_cleanups = 0;
3316 thisblock->data.block.label_chain = 0;
3317 thisblock->data.block.innermost_stack_block = stack_block_stack;
3318 thisblock->data.block.first_insn = note;
3319 thisblock->data.block.block_start_count = ++current_block_start_count;
3320 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3321 block_stack = thisblock;
3322 nesting_stack = thisblock;
3324 /* Make a new level for allocating stack slots. */
3325 push_temp_slots ();
3328 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3329 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3330 expand_expr are made. After we end the region, we know that all
3331 space for all temporaries that were created by TARGET_EXPRs will be
3332 destroyed and their space freed for reuse. */
3334 void
3335 expand_start_target_temps ()
3337 /* This is so that even if the result is preserved, the space
3338 allocated will be freed, as we know that it is no longer in use. */
3339 push_temp_slots ();
3341 /* Start a new binding layer that will keep track of all cleanup
3342 actions to be performed. */
3343 expand_start_bindings (2);
3345 target_temp_slot_level = temp_slot_level;
3348 void
3349 expand_end_target_temps ()
3351 expand_end_bindings (NULL_TREE, 0, 0);
3353 /* This is so that even if the result is preserved, the space
3354 allocated will be freed, as we know that it is no longer in use. */
3355 pop_temp_slots ();
3358 /* Given a pointer to a BLOCK node return non-zero if (and only if) the node
3359 in question represents the outermost pair of curly braces (i.e. the "body
3360 block") of a function or method.
3362 For any BLOCK node representing a "body block" of a function or method, the
3363 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3364 represents the outermost (function) scope for the function or method (i.e.
3365 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3366 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3369 is_body_block (stmt)
3370 register tree stmt;
3372 if (TREE_CODE (stmt) == BLOCK)
3374 tree parent = BLOCK_SUPERCONTEXT (stmt);
3376 if (parent && TREE_CODE (parent) == BLOCK)
3378 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3380 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3381 return 1;
3385 return 0;
3388 /* Mark top block of block_stack as an implicit binding for an
3389 exception region. This is used to prevent infinite recursion when
3390 ending a binding with expand_end_bindings. It is only ever called
3391 by expand_eh_region_start, as that it the only way to create a
3392 block stack for a exception region. */
3394 void
3395 mark_block_as_eh_region ()
3397 block_stack->data.block.exception_region = 1;
3398 if (block_stack->next
3399 && block_stack->next->data.block.conditional_code)
3401 block_stack->data.block.conditional_code
3402 = block_stack->next->data.block.conditional_code;
3403 block_stack->data.block.last_unconditional_cleanup
3404 = block_stack->next->data.block.last_unconditional_cleanup;
3405 block_stack->data.block.cleanup_ptr
3406 = block_stack->next->data.block.cleanup_ptr;
3410 /* True if we are currently emitting insns in an area of output code
3411 that is controlled by a conditional expression. This is used by
3412 the cleanup handling code to generate conditional cleanup actions. */
3415 conditional_context ()
3417 return block_stack && block_stack->data.block.conditional_code;
3420 /* Mark top block of block_stack as not for an implicit binding for an
3421 exception region. This is only ever done by expand_eh_region_end
3422 to let expand_end_bindings know that it is being called explicitly
3423 to end the binding layer for just the binding layer associated with
3424 the exception region, otherwise expand_end_bindings would try and
3425 end all implicit binding layers for exceptions regions, and then
3426 one normal binding layer. */
3428 void
3429 mark_block_as_not_eh_region ()
3431 block_stack->data.block.exception_region = 0;
3434 /* True if the top block of block_stack was marked as for an exception
3435 region by mark_block_as_eh_region. */
3438 is_eh_region ()
3440 return cfun && block_stack && block_stack->data.block.exception_region;
3443 /* Emit a handler label for a nonlocal goto handler.
3444 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3446 static rtx
3447 expand_nl_handler_label (slot, before_insn)
3448 rtx slot, before_insn;
3450 rtx insns;
3451 rtx handler_label = gen_label_rtx ();
3453 /* Don't let jump_optimize delete the handler. */
3454 LABEL_PRESERVE_P (handler_label) = 1;
3456 start_sequence ();
3457 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3458 insns = get_insns ();
3459 end_sequence ();
3460 emit_insns_before (insns, before_insn);
3462 emit_label (handler_label);
3464 return handler_label;
3467 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3468 handler. */
3469 static void
3470 expand_nl_goto_receiver ()
3472 #ifdef HAVE_nonlocal_goto
3473 if (! HAVE_nonlocal_goto)
3474 #endif
3475 /* First adjust our frame pointer to its actual value. It was
3476 previously set to the start of the virtual area corresponding to
3477 the stacked variables when we branched here and now needs to be
3478 adjusted to the actual hardware fp value.
3480 Assignments are to virtual registers are converted by
3481 instantiate_virtual_regs into the corresponding assignment
3482 to the underlying register (fp in this case) that makes
3483 the original assignment true.
3484 So the following insn will actually be
3485 decrementing fp by STARTING_FRAME_OFFSET. */
3486 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3488 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3489 if (fixed_regs[ARG_POINTER_REGNUM])
3491 #ifdef ELIMINABLE_REGS
3492 /* If the argument pointer can be eliminated in favor of the
3493 frame pointer, we don't need to restore it. We assume here
3494 that if such an elimination is present, it can always be used.
3495 This is the case on all known machines; if we don't make this
3496 assumption, we do unnecessary saving on many machines. */
3497 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3498 size_t i;
3500 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3501 if (elim_regs[i].from == ARG_POINTER_REGNUM
3502 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3503 break;
3505 if (i == ARRAY_SIZE (elim_regs))
3506 #endif
3508 /* Now restore our arg pointer from the address at which it
3509 was saved in our stack frame.
3510 If there hasn't be space allocated for it yet, make
3511 some now. */
3512 if (arg_pointer_save_area == 0)
3513 arg_pointer_save_area
3514 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3515 emit_move_insn (virtual_incoming_args_rtx,
3516 /* We need a pseudo here, or else
3517 instantiate_virtual_regs_1 complains. */
3518 copy_to_reg (arg_pointer_save_area));
3521 #endif
3523 #ifdef HAVE_nonlocal_goto_receiver
3524 if (HAVE_nonlocal_goto_receiver)
3525 emit_insn (gen_nonlocal_goto_receiver ());
3526 #endif
3529 /* Make handlers for nonlocal gotos taking place in the function calls in
3530 block THISBLOCK. */
3532 static void
3533 expand_nl_goto_receivers (thisblock)
3534 struct nesting *thisblock;
3536 tree link;
3537 rtx afterward = gen_label_rtx ();
3538 rtx insns, slot;
3539 rtx label_list;
3540 int any_invalid;
3542 /* Record the handler address in the stack slot for that purpose,
3543 during this block, saving and restoring the outer value. */
3544 if (thisblock->next != 0)
3545 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3547 rtx save_receiver = gen_reg_rtx (Pmode);
3548 emit_move_insn (XEXP (slot, 0), save_receiver);
3550 start_sequence ();
3551 emit_move_insn (save_receiver, XEXP (slot, 0));
3552 insns = get_insns ();
3553 end_sequence ();
3554 emit_insns_before (insns, thisblock->data.block.first_insn);
3557 /* Jump around the handlers; they run only when specially invoked. */
3558 emit_jump (afterward);
3560 /* Make a separate handler for each label. */
3561 link = nonlocal_labels;
3562 slot = nonlocal_goto_handler_slots;
3563 label_list = NULL_RTX;
3564 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3565 /* Skip any labels we shouldn't be able to jump to from here,
3566 we generate one special handler for all of them below which just calls
3567 abort. */
3568 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3570 rtx lab;
3571 lab = expand_nl_handler_label (XEXP (slot, 0),
3572 thisblock->data.block.first_insn);
3573 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3575 expand_nl_goto_receiver ();
3577 /* Jump to the "real" nonlocal label. */
3578 expand_goto (TREE_VALUE (link));
3581 /* A second pass over all nonlocal labels; this time we handle those
3582 we should not be able to jump to at this point. */
3583 link = nonlocal_labels;
3584 slot = nonlocal_goto_handler_slots;
3585 any_invalid = 0;
3586 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3587 if (DECL_TOO_LATE (TREE_VALUE (link)))
3589 rtx lab;
3590 lab = expand_nl_handler_label (XEXP (slot, 0),
3591 thisblock->data.block.first_insn);
3592 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3593 any_invalid = 1;
3596 if (any_invalid)
3598 expand_nl_goto_receiver ();
3599 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3600 VOIDmode, 0);
3601 emit_barrier ();
3604 nonlocal_goto_handler_labels = label_list;
3605 emit_label (afterward);
3608 /* Warn about any unused VARS (which may contain nodes other than
3609 VAR_DECLs, but such nodes are ignored). The nodes are connected
3610 via the TREE_CHAIN field. */
3612 void
3613 warn_about_unused_variables (vars)
3614 tree vars;
3616 tree decl;
3618 if (warn_unused_variable)
3619 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3620 if (TREE_CODE (decl) == VAR_DECL
3621 && ! TREE_USED (decl)
3622 && ! DECL_IN_SYSTEM_HEADER (decl)
3623 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3624 warning_with_decl (decl, "unused variable `%s'");
3627 /* Generate RTL code to terminate a binding contour.
3629 VARS is the chain of VAR_DECL nodes for the variables bound in this
3630 contour. There may actually be other nodes in this chain, but any
3631 nodes other than VAR_DECLS are ignored.
3633 MARK_ENDS is nonzero if we should put a note at the beginning
3634 and end of this binding contour.
3636 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3637 (That is true automatically if the contour has a saved stack level.) */
3639 void
3640 expand_end_bindings (vars, mark_ends, dont_jump_in)
3641 tree vars;
3642 int mark_ends;
3643 int dont_jump_in;
3645 register struct nesting *thisblock;
3647 while (block_stack->data.block.exception_region)
3649 /* Because we don't need or want a new temporary level and
3650 because we didn't create one in expand_eh_region_start,
3651 create a fake one now to avoid removing one in
3652 expand_end_bindings. */
3653 push_temp_slots ();
3655 block_stack->data.block.exception_region = 0;
3657 expand_end_bindings (NULL_TREE, 0, 0);
3660 /* Since expand_eh_region_start does an expand_start_bindings, we
3661 have to first end all the bindings that were created by
3662 expand_eh_region_start. */
3664 thisblock = block_stack;
3666 /* If any of the variables in this scope were not used, warn the
3667 user. */
3668 warn_about_unused_variables (vars);
3670 if (thisblock->exit_label)
3672 do_pending_stack_adjust ();
3673 emit_label (thisblock->exit_label);
3676 /* If necessary, make handlers for nonlocal gotos taking
3677 place in the function calls in this block. */
3678 if (function_call_count != thisblock->data.block.n_function_calls
3679 && nonlocal_labels
3680 /* Make handler for outermost block
3681 if there were any nonlocal gotos to this function. */
3682 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3683 /* Make handler for inner block if it has something
3684 special to do when you jump out of it. */
3685 : (thisblock->data.block.cleanups != 0
3686 || thisblock->data.block.stack_level != 0)))
3687 expand_nl_goto_receivers (thisblock);
3689 /* Don't allow jumping into a block that has a stack level.
3690 Cleanups are allowed, though. */
3691 if (dont_jump_in
3692 || thisblock->data.block.stack_level != 0)
3694 struct label_chain *chain;
3696 /* Any labels in this block are no longer valid to go to.
3697 Mark them to cause an error message. */
3698 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3700 DECL_TOO_LATE (chain->label) = 1;
3701 /* If any goto without a fixup came to this label,
3702 that must be an error, because gotos without fixups
3703 come from outside all saved stack-levels. */
3704 if (TREE_ADDRESSABLE (chain->label))
3705 error_with_decl (chain->label,
3706 "label `%s' used before containing binding contour");
3710 /* Restore stack level in effect before the block
3711 (only if variable-size objects allocated). */
3712 /* Perform any cleanups associated with the block. */
3714 if (thisblock->data.block.stack_level != 0
3715 || thisblock->data.block.cleanups != 0)
3717 int reachable;
3718 rtx insn;
3720 /* Don't let cleanups affect ({...}) constructs. */
3721 int old_expr_stmts_for_value = expr_stmts_for_value;
3722 rtx old_last_expr_value = last_expr_value;
3723 tree old_last_expr_type = last_expr_type;
3724 expr_stmts_for_value = 0;
3726 /* Only clean up here if this point can actually be reached. */
3727 insn = get_last_insn ();
3728 if (GET_CODE (insn) == NOTE)
3729 insn = prev_nonnote_insn (insn);
3730 reachable = (! insn || GET_CODE (insn) != BARRIER);
3732 /* Do the cleanups. */
3733 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3734 if (reachable)
3735 do_pending_stack_adjust ();
3737 expr_stmts_for_value = old_expr_stmts_for_value;
3738 last_expr_value = old_last_expr_value;
3739 last_expr_type = old_last_expr_type;
3741 /* Restore the stack level. */
3743 if (reachable && thisblock->data.block.stack_level != 0)
3745 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3746 thisblock->data.block.stack_level, NULL_RTX);
3747 if (nonlocal_goto_handler_slots != 0)
3748 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3749 NULL_RTX);
3752 /* Any gotos out of this block must also do these things.
3753 Also report any gotos with fixups that came to labels in this
3754 level. */
3755 fixup_gotos (thisblock,
3756 thisblock->data.block.stack_level,
3757 thisblock->data.block.cleanups,
3758 thisblock->data.block.first_insn,
3759 dont_jump_in);
3762 /* Mark the beginning and end of the scope if requested.
3763 We do this now, after running cleanups on the variables
3764 just going out of scope, so they are in scope for their cleanups. */
3766 if (mark_ends)
3768 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3769 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3771 else
3772 /* Get rid of the beginning-mark if we don't make an end-mark. */
3773 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3775 /* Restore the temporary level of TARGET_EXPRs. */
3776 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3778 /* Restore block_stack level for containing block. */
3780 stack_block_stack = thisblock->data.block.innermost_stack_block;
3781 POPSTACK (block_stack);
3783 /* Pop the stack slot nesting and free any slots at this level. */
3784 pop_temp_slots ();
3787 /* Generate code to save the stack pointer at the start of the current block
3788 and set up to restore it on exit. */
3790 void
3791 save_stack_pointer ()
3793 struct nesting *thisblock = block_stack;
3795 if (thisblock->data.block.stack_level == 0)
3797 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3798 &thisblock->data.block.stack_level,
3799 thisblock->data.block.first_insn);
3800 stack_block_stack = thisblock;
3804 /* Generate RTL for the automatic variable declaration DECL.
3805 (Other kinds of declarations are simply ignored if seen here.) */
3807 void
3808 expand_decl (decl)
3809 register tree decl;
3811 struct nesting *thisblock;
3812 tree type;
3814 type = TREE_TYPE (decl);
3816 /* Only automatic variables need any expansion done.
3817 Static and external variables, and external functions,
3818 will be handled by `assemble_variable' (called from finish_decl).
3819 TYPE_DECL and CONST_DECL require nothing.
3820 PARM_DECLs are handled in `assign_parms'. */
3822 if (TREE_CODE (decl) != VAR_DECL)
3823 return;
3824 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3825 return;
3827 thisblock = block_stack;
3829 /* Create the RTL representation for the variable. */
3831 if (type == error_mark_node)
3832 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3834 else if (DECL_SIZE (decl) == 0)
3835 /* Variable with incomplete type. */
3837 if (DECL_INITIAL (decl) == 0)
3838 /* Error message was already done; now avoid a crash. */
3839 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3840 else
3841 /* An initializer is going to decide the size of this array.
3842 Until we know the size, represent its address with a reg. */
3843 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3845 set_mem_attributes (DECL_RTL (decl), decl, 1);
3847 else if (DECL_MODE (decl) != BLKmode
3848 /* If -ffloat-store, don't put explicit float vars
3849 into regs. */
3850 && !(flag_float_store
3851 && TREE_CODE (type) == REAL_TYPE)
3852 && ! TREE_THIS_VOLATILE (decl)
3853 && (DECL_REGISTER (decl) || optimize)
3854 /* if -fcheck-memory-usage, check all variables. */
3855 && ! current_function_check_memory_usage)
3857 /* Automatic variable that can go in a register. */
3858 int unsignedp = TREE_UNSIGNED (type);
3859 enum machine_mode reg_mode
3860 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3862 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3863 mark_user_reg (DECL_RTL (decl));
3865 if (POINTER_TYPE_P (type))
3866 mark_reg_pointer (DECL_RTL (decl),
3867 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3869 maybe_set_unchanging (DECL_RTL (decl), decl);
3871 /* If something wants our address, try to use ADDRESSOF. */
3872 if (TREE_ADDRESSABLE (decl))
3873 put_var_into_stack (decl);
3876 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3877 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3878 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3879 STACK_CHECK_MAX_VAR_SIZE)))
3881 /* Variable of fixed size that goes on the stack. */
3882 rtx oldaddr = 0;
3883 rtx addr;
3885 /* If we previously made RTL for this decl, it must be an array
3886 whose size was determined by the initializer.
3887 The old address was a register; set that register now
3888 to the proper address. */
3889 if (DECL_RTL (decl) != 0)
3891 if (GET_CODE (DECL_RTL (decl)) != MEM
3892 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3893 abort ();
3894 oldaddr = XEXP (DECL_RTL (decl), 0);
3897 DECL_RTL (decl) = assign_temp (TREE_TYPE (decl), 1, 1, 1);
3899 /* Set alignment we actually gave this decl. */
3900 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3901 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3902 DECL_USER_ALIGN (decl) = 0;
3904 if (oldaddr)
3906 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3907 if (addr != oldaddr)
3908 emit_move_insn (oldaddr, addr);
3911 else
3912 /* Dynamic-size object: must push space on the stack. */
3914 rtx address, size;
3916 /* Record the stack pointer on entry to block, if have
3917 not already done so. */
3918 do_pending_stack_adjust ();
3919 save_stack_pointer ();
3921 /* In function-at-a-time mode, variable_size doesn't expand this,
3922 so do it now. */
3923 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3924 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3925 const0_rtx, VOIDmode, 0);
3927 /* Compute the variable's size, in bytes. */
3928 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3929 free_temp_slots ();
3931 /* Allocate space on the stack for the variable. Note that
3932 DECL_ALIGN says how the variable is to be aligned and we
3933 cannot use it to conclude anything about the alignment of
3934 the size. */
3935 address = allocate_dynamic_stack_space (size, NULL_RTX,
3936 TYPE_ALIGN (TREE_TYPE (decl)));
3938 /* Reference the variable indirect through that rtx. */
3939 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
3941 set_mem_attributes (DECL_RTL (decl), decl, 1);
3943 /* Indicate the alignment we actually gave this variable. */
3944 #ifdef STACK_BOUNDARY
3945 DECL_ALIGN (decl) = STACK_BOUNDARY;
3946 #else
3947 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3948 #endif
3949 DECL_USER_ALIGN (decl) = 0;
3953 /* Emit code to perform the initialization of a declaration DECL. */
3955 void
3956 expand_decl_init (decl)
3957 tree decl;
3959 int was_used = TREE_USED (decl);
3961 /* If this is a CONST_DECL, we don't have to generate any code, but
3962 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3963 to be set while in the obstack containing the constant. If we don't
3964 do this, we can lose if we have functions nested three deep and the middle
3965 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3966 the innermost function is the first to expand that STRING_CST. */
3967 if (TREE_CODE (decl) == CONST_DECL)
3969 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3970 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3971 EXPAND_INITIALIZER);
3972 return;
3975 if (TREE_STATIC (decl))
3976 return;
3978 /* Compute and store the initial value now. */
3980 if (DECL_INITIAL (decl) == error_mark_node)
3982 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3984 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3985 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3986 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3987 0, 0);
3988 emit_queue ();
3990 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3992 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3993 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3994 emit_queue ();
3997 /* Don't let the initialization count as "using" the variable. */
3998 TREE_USED (decl) = was_used;
4000 /* Free any temporaries we made while initializing the decl. */
4001 preserve_temp_slots (NULL_RTX);
4002 free_temp_slots ();
4005 /* CLEANUP is an expression to be executed at exit from this binding contour;
4006 for example, in C++, it might call the destructor for this variable.
4008 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4009 CLEANUP multiple times, and have the correct semantics. This
4010 happens in exception handling, for gotos, returns, breaks that
4011 leave the current scope.
4013 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4014 that is not associated with any particular variable. */
4017 expand_decl_cleanup (decl, cleanup)
4018 tree decl, cleanup;
4020 struct nesting *thisblock;
4022 /* Error if we are not in any block. */
4023 if (cfun == 0 || block_stack == 0)
4024 return 0;
4026 thisblock = block_stack;
4028 /* Record the cleanup if there is one. */
4030 if (cleanup != 0)
4032 tree t;
4033 rtx seq;
4034 tree *cleanups = &thisblock->data.block.cleanups;
4035 int cond_context = conditional_context ();
4037 if (cond_context)
4039 rtx flag = gen_reg_rtx (word_mode);
4040 rtx set_flag_0;
4041 tree cond;
4043 start_sequence ();
4044 emit_move_insn (flag, const0_rtx);
4045 set_flag_0 = get_insns ();
4046 end_sequence ();
4048 thisblock->data.block.last_unconditional_cleanup
4049 = emit_insns_after (set_flag_0,
4050 thisblock->data.block.last_unconditional_cleanup);
4052 emit_move_insn (flag, const1_rtx);
4054 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
4055 DECL_RTL (cond) = flag;
4057 /* Conditionalize the cleanup. */
4058 cleanup = build (COND_EXPR, void_type_node,
4059 truthvalue_conversion (cond),
4060 cleanup, integer_zero_node);
4061 cleanup = fold (cleanup);
4063 cleanups = thisblock->data.block.cleanup_ptr;
4066 cleanup = unsave_expr (cleanup);
4068 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4070 if (! cond_context)
4071 /* If this block has a cleanup, it belongs in stack_block_stack. */
4072 stack_block_stack = thisblock;
4074 if (cond_context)
4076 start_sequence ();
4079 /* If this was optimized so that there is no exception region for the
4080 cleanup, then mark the TREE_LIST node, so that we can later tell
4081 if we need to call expand_eh_region_end. */
4082 if (! using_eh_for_cleanups_p
4083 || expand_eh_region_start_tree (decl, cleanup))
4084 TREE_ADDRESSABLE (t) = 1;
4085 /* If that started a new EH region, we're in a new block. */
4086 thisblock = block_stack;
4088 if (cond_context)
4090 seq = get_insns ();
4091 end_sequence ();
4092 if (seq)
4093 thisblock->data.block.last_unconditional_cleanup
4094 = emit_insns_after (seq,
4095 thisblock->data.block.last_unconditional_cleanup);
4097 else
4099 thisblock->data.block.last_unconditional_cleanup
4100 = get_last_insn ();
4101 /* When we insert instructions after the last unconditional cleanup,
4102 we don't adjust last_insn. That means that a later add_insn will
4103 clobber the instructions we've just added. The easiest way to
4104 fix this is to just insert another instruction here, so that the
4105 instructions inserted after the last unconditional cleanup are
4106 never the last instruction. */
4107 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4108 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4111 return 1;
4114 /* Like expand_decl_cleanup, but suppress generating an exception handler
4115 to perform the cleanup. */
4117 #if 0
4119 expand_decl_cleanup_no_eh (decl, cleanup)
4120 tree decl, cleanup;
4122 int save_eh = using_eh_for_cleanups_p;
4123 int result;
4125 using_eh_for_cleanups_p = 0;
4126 result = expand_decl_cleanup (decl, cleanup);
4127 using_eh_for_cleanups_p = save_eh;
4129 return result;
4131 #endif
4133 /* Arrange for the top element of the dynamic cleanup chain to be
4134 popped if we exit the current binding contour. DECL is the
4135 associated declaration, if any, otherwise NULL_TREE. If the
4136 current contour is left via an exception, then __sjthrow will pop
4137 the top element off the dynamic cleanup chain. The code that
4138 avoids doing the action we push into the cleanup chain in the
4139 exceptional case is contained in expand_cleanups.
4141 This routine is only used by expand_eh_region_start, and that is
4142 the only way in which an exception region should be started. This
4143 routine is only used when using the setjmp/longjmp codegen method
4144 for exception handling. */
4147 expand_dcc_cleanup (decl)
4148 tree decl;
4150 struct nesting *thisblock;
4151 tree cleanup;
4153 /* Error if we are not in any block. */
4154 if (cfun == 0 || block_stack == 0)
4155 return 0;
4156 thisblock = block_stack;
4158 /* Record the cleanup for the dynamic handler chain. */
4160 cleanup = make_node (POPDCC_EXPR);
4162 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4163 thisblock->data.block.cleanups
4164 = tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4166 /* If this block has a cleanup, it belongs in stack_block_stack. */
4167 stack_block_stack = thisblock;
4168 return 1;
4171 /* Arrange for the top element of the dynamic handler chain to be
4172 popped if we exit the current binding contour. DECL is the
4173 associated declaration, if any, otherwise NULL_TREE. If the current
4174 contour is left via an exception, then __sjthrow will pop the top
4175 element off the dynamic handler chain. The code that avoids doing
4176 the action we push into the handler chain in the exceptional case
4177 is contained in expand_cleanups.
4179 This routine is only used by expand_eh_region_start, and that is
4180 the only way in which an exception region should be started. This
4181 routine is only used when using the setjmp/longjmp codegen method
4182 for exception handling. */
4185 expand_dhc_cleanup (decl)
4186 tree decl;
4188 struct nesting *thisblock;
4189 tree cleanup;
4191 /* Error if we are not in any block. */
4192 if (cfun == 0 || block_stack == 0)
4193 return 0;
4194 thisblock = block_stack;
4196 /* Record the cleanup for the dynamic handler chain. */
4198 cleanup = make_node (POPDHC_EXPR);
4200 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4201 thisblock->data.block.cleanups
4202 = tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4204 /* If this block has a cleanup, it belongs in stack_block_stack. */
4205 stack_block_stack = thisblock;
4206 return 1;
4209 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4210 DECL_ELTS is the list of elements that belong to DECL's type.
4211 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4213 void
4214 expand_anon_union_decl (decl, cleanup, decl_elts)
4215 tree decl, cleanup, decl_elts;
4217 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4218 rtx x;
4219 tree t;
4221 /* If any of the elements are addressable, so is the entire union. */
4222 for (t = decl_elts; t; t = TREE_CHAIN (t))
4223 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4225 TREE_ADDRESSABLE (decl) = 1;
4226 break;
4229 expand_decl (decl);
4230 expand_decl_cleanup (decl, cleanup);
4231 x = DECL_RTL (decl);
4233 /* Go through the elements, assigning RTL to each. */
4234 for (t = decl_elts; t; t = TREE_CHAIN (t))
4236 tree decl_elt = TREE_VALUE (t);
4237 tree cleanup_elt = TREE_PURPOSE (t);
4238 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4240 /* Propagate the union's alignment to the elements. */
4241 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4242 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4244 /* If the element has BLKmode and the union doesn't, the union is
4245 aligned such that the element doesn't need to have BLKmode, so
4246 change the element's mode to the appropriate one for its size. */
4247 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4248 DECL_MODE (decl_elt) = mode
4249 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4251 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4252 instead create a new MEM rtx with the proper mode. */
4253 if (GET_CODE (x) == MEM)
4255 if (mode == GET_MODE (x))
4256 DECL_RTL (decl_elt) = x;
4257 else
4259 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
4260 MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt), x);
4263 else if (GET_CODE (x) == REG)
4265 if (mode == GET_MODE (x))
4266 DECL_RTL (decl_elt) = x;
4267 else
4268 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
4270 else
4271 abort ();
4273 /* Record the cleanup if there is one. */
4275 if (cleanup != 0)
4276 thisblock->data.block.cleanups
4277 = tree_cons (decl_elt, cleanup_elt,
4278 thisblock->data.block.cleanups);
4282 /* Expand a list of cleanups LIST.
4283 Elements may be expressions or may be nested lists.
4285 If DONT_DO is nonnull, then any list-element
4286 whose TREE_PURPOSE matches DONT_DO is omitted.
4287 This is sometimes used to avoid a cleanup associated with
4288 a value that is being returned out of the scope.
4290 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4291 goto and handle protection regions specially in that case.
4293 If REACHABLE, we emit code, otherwise just inform the exception handling
4294 code about this finalization. */
4296 static void
4297 expand_cleanups (list, dont_do, in_fixup, reachable)
4298 tree list;
4299 tree dont_do;
4300 int in_fixup;
4301 int reachable;
4303 tree tail;
4304 for (tail = list; tail; tail = TREE_CHAIN (tail))
4305 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4307 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4308 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4309 else
4311 if (! in_fixup)
4313 tree cleanup = TREE_VALUE (tail);
4315 /* See expand_d{h,c}c_cleanup for why we avoid this. */
4316 if (TREE_CODE (cleanup) != POPDHC_EXPR
4317 && TREE_CODE (cleanup) != POPDCC_EXPR
4318 /* See expand_eh_region_start_tree for this case. */
4319 && ! TREE_ADDRESSABLE (tail))
4321 cleanup = protect_with_terminate (cleanup);
4322 expand_eh_region_end (cleanup);
4326 if (reachable)
4328 /* Cleanups may be run multiple times. For example,
4329 when exiting a binding contour, we expand the
4330 cleanups associated with that contour. When a goto
4331 within that binding contour has a target outside that
4332 contour, it will expand all cleanups from its scope to
4333 the target. Though the cleanups are expanded multiple
4334 times, the control paths are non-overlapping so the
4335 cleanups will not be executed twice. */
4337 /* We may need to protect fixups with rethrow regions. */
4338 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
4340 if (protect)
4341 expand_fixup_region_start ();
4343 /* The cleanup might contain try-blocks, so we have to
4344 preserve our current queue. */
4345 push_ehqueue ();
4346 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4347 pop_ehqueue ();
4348 if (protect)
4349 expand_fixup_region_end (TREE_VALUE (tail));
4350 free_temp_slots ();
4356 /* Mark when the context we are emitting RTL for as a conditional
4357 context, so that any cleanup actions we register with
4358 expand_decl_init will be properly conditionalized when those
4359 cleanup actions are later performed. Must be called before any
4360 expression (tree) is expanded that is within a conditional context. */
4362 void
4363 start_cleanup_deferral ()
4365 /* block_stack can be NULL if we are inside the parameter list. It is
4366 OK to do nothing, because cleanups aren't possible here. */
4367 if (block_stack)
4368 ++block_stack->data.block.conditional_code;
4371 /* Mark the end of a conditional region of code. Because cleanup
4372 deferrals may be nested, we may still be in a conditional region
4373 after we end the currently deferred cleanups, only after we end all
4374 deferred cleanups, are we back in unconditional code. */
4376 void
4377 end_cleanup_deferral ()
4379 /* block_stack can be NULL if we are inside the parameter list. It is
4380 OK to do nothing, because cleanups aren't possible here. */
4381 if (block_stack)
4382 --block_stack->data.block.conditional_code;
4385 /* Move all cleanups from the current block_stack
4386 to the containing block_stack, where they are assumed to
4387 have been created. If anything can cause a temporary to
4388 be created, but not expanded for more than one level of
4389 block_stacks, then this code will have to change. */
4391 void
4392 move_cleanups_up ()
4394 struct nesting *block = block_stack;
4395 struct nesting *outer = block->next;
4397 outer->data.block.cleanups
4398 = chainon (block->data.block.cleanups,
4399 outer->data.block.cleanups);
4400 block->data.block.cleanups = 0;
4403 tree
4404 last_cleanup_this_contour ()
4406 if (block_stack == 0)
4407 return 0;
4409 return block_stack->data.block.cleanups;
4412 /* Return 1 if there are any pending cleanups at this point.
4413 If THIS_CONTOUR is nonzero, check the current contour as well.
4414 Otherwise, look only at the contours that enclose this one. */
4417 any_pending_cleanups (this_contour)
4418 int this_contour;
4420 struct nesting *block;
4422 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4423 return 0;
4425 if (this_contour && block_stack->data.block.cleanups != NULL)
4426 return 1;
4427 if (block_stack->data.block.cleanups == 0
4428 && block_stack->data.block.outer_cleanups == 0)
4429 return 0;
4431 for (block = block_stack->next; block; block = block->next)
4432 if (block->data.block.cleanups != 0)
4433 return 1;
4435 return 0;
4438 /* Enter a case (Pascal) or switch (C) statement.
4439 Push a block onto case_stack and nesting_stack
4440 to accumulate the case-labels that are seen
4441 and to record the labels generated for the statement.
4443 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4444 Otherwise, this construct is transparent for `exit_something'.
4446 EXPR is the index-expression to be dispatched on.
4447 TYPE is its nominal type. We could simply convert EXPR to this type,
4448 but instead we take short cuts. */
4450 void
4451 expand_start_case (exit_flag, expr, type, printname)
4452 int exit_flag;
4453 tree expr;
4454 tree type;
4455 const char *printname;
4457 register struct nesting *thiscase = ALLOC_NESTING ();
4459 /* Make an entry on case_stack for the case we are entering. */
4461 thiscase->next = case_stack;
4462 thiscase->all = nesting_stack;
4463 thiscase->depth = ++nesting_depth;
4464 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4465 thiscase->data.case_stmt.case_list = 0;
4466 thiscase->data.case_stmt.index_expr = expr;
4467 thiscase->data.case_stmt.nominal_type = type;
4468 thiscase->data.case_stmt.default_label = 0;
4469 thiscase->data.case_stmt.printname = printname;
4470 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4471 case_stack = thiscase;
4472 nesting_stack = thiscase;
4474 do_pending_stack_adjust ();
4476 /* Make sure case_stmt.start points to something that won't
4477 need any transformation before expand_end_case. */
4478 if (GET_CODE (get_last_insn ()) != NOTE)
4479 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4481 thiscase->data.case_stmt.start = get_last_insn ();
4483 start_cleanup_deferral ();
4486 /* Start a "dummy case statement" within which case labels are invalid
4487 and are not connected to any larger real case statement.
4488 This can be used if you don't want to let a case statement jump
4489 into the middle of certain kinds of constructs. */
4491 void
4492 expand_start_case_dummy ()
4494 register struct nesting *thiscase = ALLOC_NESTING ();
4496 /* Make an entry on case_stack for the dummy. */
4498 thiscase->next = case_stack;
4499 thiscase->all = nesting_stack;
4500 thiscase->depth = ++nesting_depth;
4501 thiscase->exit_label = 0;
4502 thiscase->data.case_stmt.case_list = 0;
4503 thiscase->data.case_stmt.start = 0;
4504 thiscase->data.case_stmt.nominal_type = 0;
4505 thiscase->data.case_stmt.default_label = 0;
4506 case_stack = thiscase;
4507 nesting_stack = thiscase;
4508 start_cleanup_deferral ();
4511 /* End a dummy case statement. */
4513 void
4514 expand_end_case_dummy ()
4516 end_cleanup_deferral ();
4517 POPSTACK (case_stack);
4520 /* Return the data type of the index-expression
4521 of the innermost case statement, or null if none. */
4523 tree
4524 case_index_expr_type ()
4526 if (case_stack)
4527 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4528 return 0;
4531 static void
4532 check_seenlabel ()
4534 /* If this is the first label, warn if any insns have been emitted. */
4535 if (case_stack->data.case_stmt.line_number_status >= 0)
4537 rtx insn;
4539 restore_line_number_status
4540 (case_stack->data.case_stmt.line_number_status);
4541 case_stack->data.case_stmt.line_number_status = -1;
4543 for (insn = case_stack->data.case_stmt.start;
4544 insn;
4545 insn = NEXT_INSN (insn))
4547 if (GET_CODE (insn) == CODE_LABEL)
4548 break;
4549 if (GET_CODE (insn) != NOTE
4550 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4553 insn = PREV_INSN (insn);
4554 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4556 /* If insn is zero, then there must have been a syntax error. */
4557 if (insn)
4558 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4559 NOTE_LINE_NUMBER (insn),
4560 "unreachable code at beginning of %s",
4561 case_stack->data.case_stmt.printname);
4562 break;
4568 /* Accumulate one case or default label inside a case or switch statement.
4569 VALUE is the value of the case (a null pointer, for a default label).
4570 The function CONVERTER, when applied to arguments T and V,
4571 converts the value V to the type T.
4573 If not currently inside a case or switch statement, return 1 and do
4574 nothing. The caller will print a language-specific error message.
4575 If VALUE is a duplicate or overlaps, return 2 and do nothing
4576 except store the (first) duplicate node in *DUPLICATE.
4577 If VALUE is out of range, return 3 and do nothing.
4578 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4579 Return 0 on success.
4581 Extended to handle range statements. */
4584 pushcase (value, converter, label, duplicate)
4585 register tree value;
4586 tree (*converter) PARAMS ((tree, tree));
4587 register tree label;
4588 tree *duplicate;
4590 tree index_type;
4591 tree nominal_type;
4593 /* Fail if not inside a real case statement. */
4594 if (! (case_stack && case_stack->data.case_stmt.start))
4595 return 1;
4597 if (stack_block_stack
4598 && stack_block_stack->depth > case_stack->depth)
4599 return 5;
4601 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4602 nominal_type = case_stack->data.case_stmt.nominal_type;
4604 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4605 if (index_type == error_mark_node)
4606 return 0;
4608 /* Convert VALUE to the type in which the comparisons are nominally done. */
4609 if (value != 0)
4610 value = (*converter) (nominal_type, value);
4612 check_seenlabel ();
4614 /* Fail if this value is out of range for the actual type of the index
4615 (which may be narrower than NOMINAL_TYPE). */
4616 if (value != 0
4617 && (TREE_CONSTANT_OVERFLOW (value)
4618 || ! int_fits_type_p (value, index_type)))
4619 return 3;
4621 return add_case_node (value, value, label, duplicate);
4624 /* Like pushcase but this case applies to all values between VALUE1 and
4625 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4626 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4627 starts at VALUE1 and ends at the highest value of the index type.
4628 If both are NULL, this case applies to all values.
4630 The return value is the same as that of pushcase but there is one
4631 additional error code: 4 means the specified range was empty. */
4634 pushcase_range (value1, value2, converter, label, duplicate)
4635 register tree value1, value2;
4636 tree (*converter) PARAMS ((tree, tree));
4637 register tree label;
4638 tree *duplicate;
4640 tree index_type;
4641 tree nominal_type;
4643 /* Fail if not inside a real case statement. */
4644 if (! (case_stack && case_stack->data.case_stmt.start))
4645 return 1;
4647 if (stack_block_stack
4648 && stack_block_stack->depth > case_stack->depth)
4649 return 5;
4651 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4652 nominal_type = case_stack->data.case_stmt.nominal_type;
4654 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4655 if (index_type == error_mark_node)
4656 return 0;
4658 check_seenlabel ();
4660 /* Convert VALUEs to type in which the comparisons are nominally done
4661 and replace any unspecified value with the corresponding bound. */
4662 if (value1 == 0)
4663 value1 = TYPE_MIN_VALUE (index_type);
4664 if (value2 == 0)
4665 value2 = TYPE_MAX_VALUE (index_type);
4667 /* Fail if the range is empty. Do this before any conversion since
4668 we want to allow out-of-range empty ranges. */
4669 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4670 return 4;
4672 /* If the max was unbounded, use the max of the nominal_type we are
4673 converting to. Do this after the < check above to suppress false
4674 positives. */
4675 if (value2 == 0)
4676 value2 = TYPE_MAX_VALUE (nominal_type);
4678 value1 = (*converter) (nominal_type, value1);
4679 value2 = (*converter) (nominal_type, value2);
4681 /* Fail if these values are out of range. */
4682 if (TREE_CONSTANT_OVERFLOW (value1)
4683 || ! int_fits_type_p (value1, index_type))
4684 return 3;
4686 if (TREE_CONSTANT_OVERFLOW (value2)
4687 || ! int_fits_type_p (value2, index_type))
4688 return 3;
4690 return add_case_node (value1, value2, label, duplicate);
4693 /* Do the actual insertion of a case label for pushcase and pushcase_range
4694 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4695 slowdown for large switch statements. */
4698 add_case_node (low, high, label, duplicate)
4699 tree low, high;
4700 tree label;
4701 tree *duplicate;
4703 struct case_node *p, **q, *r;
4705 /* If there's no HIGH value, then this is not a case range; it's
4706 just a simple case label. But that's just a degenerate case
4707 range. */
4708 if (!high)
4709 high = low;
4711 /* Handle default labels specially. */
4712 if (!high && !low)
4714 if (case_stack->data.case_stmt.default_label != 0)
4716 *duplicate = case_stack->data.case_stmt.default_label;
4717 return 2;
4719 case_stack->data.case_stmt.default_label = label;
4720 expand_label (label);
4721 return 0;
4724 q = &case_stack->data.case_stmt.case_list;
4725 p = *q;
4727 while ((r = *q))
4729 p = r;
4731 /* Keep going past elements distinctly greater than HIGH. */
4732 if (tree_int_cst_lt (high, p->low))
4733 q = &p->left;
4735 /* or distinctly less than LOW. */
4736 else if (tree_int_cst_lt (p->high, low))
4737 q = &p->right;
4739 else
4741 /* We have an overlap; this is an error. */
4742 *duplicate = p->code_label;
4743 return 2;
4747 /* Add this label to the chain, and succeed. */
4749 r = (struct case_node *) xmalloc (sizeof (struct case_node));
4750 r->low = low;
4752 /* If the bounds are equal, turn this into the one-value case. */
4753 if (tree_int_cst_equal (low, high))
4754 r->high = r->low;
4755 else
4756 r->high = high;
4758 r->code_label = label;
4759 expand_label (label);
4761 *q = r;
4762 r->parent = p;
4763 r->left = 0;
4764 r->right = 0;
4765 r->balance = 0;
4767 while (p)
4769 struct case_node *s;
4771 if (r == p->left)
4773 int b;
4775 if (! (b = p->balance))
4776 /* Growth propagation from left side. */
4777 p->balance = -1;
4778 else if (b < 0)
4780 if (r->balance < 0)
4782 /* R-Rotation */
4783 if ((p->left = s = r->right))
4784 s->parent = p;
4786 r->right = p;
4787 p->balance = 0;
4788 r->balance = 0;
4789 s = p->parent;
4790 p->parent = r;
4792 if ((r->parent = s))
4794 if (s->left == p)
4795 s->left = r;
4796 else
4797 s->right = r;
4799 else
4800 case_stack->data.case_stmt.case_list = r;
4802 else
4803 /* r->balance == +1 */
4805 /* LR-Rotation */
4807 int b2;
4808 struct case_node *t = r->right;
4810 if ((p->left = s = t->right))
4811 s->parent = p;
4813 t->right = p;
4814 if ((r->right = s = t->left))
4815 s->parent = r;
4817 t->left = r;
4818 b = t->balance;
4819 b2 = b < 0;
4820 p->balance = b2;
4821 b2 = -b2 - b;
4822 r->balance = b2;
4823 t->balance = 0;
4824 s = p->parent;
4825 p->parent = t;
4826 r->parent = t;
4828 if ((t->parent = s))
4830 if (s->left == p)
4831 s->left = t;
4832 else
4833 s->right = t;
4835 else
4836 case_stack->data.case_stmt.case_list = t;
4838 break;
4841 else
4843 /* p->balance == +1; growth of left side balances the node. */
4844 p->balance = 0;
4845 break;
4848 else
4849 /* r == p->right */
4851 int b;
4853 if (! (b = p->balance))
4854 /* Growth propagation from right side. */
4855 p->balance++;
4856 else if (b > 0)
4858 if (r->balance > 0)
4860 /* L-Rotation */
4862 if ((p->right = s = r->left))
4863 s->parent = p;
4865 r->left = p;
4866 p->balance = 0;
4867 r->balance = 0;
4868 s = p->parent;
4869 p->parent = r;
4870 if ((r->parent = s))
4872 if (s->left == p)
4873 s->left = r;
4874 else
4875 s->right = r;
4878 else
4879 case_stack->data.case_stmt.case_list = r;
4882 else
4883 /* r->balance == -1 */
4885 /* RL-Rotation */
4886 int b2;
4887 struct case_node *t = r->left;
4889 if ((p->right = s = t->left))
4890 s->parent = p;
4892 t->left = p;
4894 if ((r->left = s = t->right))
4895 s->parent = r;
4897 t->right = r;
4898 b = t->balance;
4899 b2 = b < 0;
4900 r->balance = b2;
4901 b2 = -b2 - b;
4902 p->balance = b2;
4903 t->balance = 0;
4904 s = p->parent;
4905 p->parent = t;
4906 r->parent = t;
4908 if ((t->parent = s))
4910 if (s->left == p)
4911 s->left = t;
4912 else
4913 s->right = t;
4916 else
4917 case_stack->data.case_stmt.case_list = t;
4919 break;
4921 else
4923 /* p->balance == -1; growth of right side balances the node. */
4924 p->balance = 0;
4925 break;
4929 r = p;
4930 p = p->parent;
4933 return 0;
4936 /* Returns the number of possible values of TYPE.
4937 Returns -1 if the number is unknown, variable, or if the number does not
4938 fit in a HOST_WIDE_INT.
4939 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4940 do not increase monotonically (there may be duplicates);
4941 to 1 if the values increase monotonically, but not always by 1;
4942 otherwise sets it to 0. */
4944 HOST_WIDE_INT
4945 all_cases_count (type, spareness)
4946 tree type;
4947 int *spareness;
4949 tree t;
4950 HOST_WIDE_INT count, minval, lastval;
4952 *spareness = 0;
4954 switch (TREE_CODE (type))
4956 case BOOLEAN_TYPE:
4957 count = 2;
4958 break;
4960 case CHAR_TYPE:
4961 count = 1 << BITS_PER_UNIT;
4962 break;
4964 default:
4965 case INTEGER_TYPE:
4966 if (TYPE_MAX_VALUE (type) != 0
4967 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4968 TYPE_MIN_VALUE (type))))
4969 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4970 convert (type, integer_zero_node))))
4971 && host_integerp (t, 1))
4972 count = tree_low_cst (t, 1);
4973 else
4974 return -1;
4975 break;
4977 case ENUMERAL_TYPE:
4978 /* Don't waste time with enumeral types with huge values. */
4979 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4980 || TYPE_MAX_VALUE (type) == 0
4981 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4982 return -1;
4984 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4985 count = 0;
4987 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4989 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4991 if (*spareness == 2 || thisval < lastval)
4992 *spareness = 2;
4993 else if (thisval != minval + count)
4994 *spareness = 1;
4996 count++;
5000 return count;
5003 #define BITARRAY_TEST(ARRAY, INDEX) \
5004 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5005 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
5006 #define BITARRAY_SET(ARRAY, INDEX) \
5007 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5008 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
5010 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
5011 with the case values we have seen, assuming the case expression
5012 has the given TYPE.
5013 SPARSENESS is as determined by all_cases_count.
5015 The time needed is proportional to COUNT, unless
5016 SPARSENESS is 2, in which case quadratic time is needed. */
5018 void
5019 mark_seen_cases (type, cases_seen, count, sparseness)
5020 tree type;
5021 unsigned char *cases_seen;
5022 HOST_WIDE_INT count;
5023 int sparseness;
5025 tree next_node_to_try = NULL_TREE;
5026 HOST_WIDE_INT next_node_offset = 0;
5028 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
5029 tree val = make_node (INTEGER_CST);
5031 TREE_TYPE (val) = type;
5032 if (! root)
5033 /* Do nothing. */
5035 else if (sparseness == 2)
5037 tree t;
5038 unsigned HOST_WIDE_INT xlo;
5040 /* This less efficient loop is only needed to handle
5041 duplicate case values (multiple enum constants
5042 with the same value). */
5043 TREE_TYPE (val) = TREE_TYPE (root->low);
5044 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5045 t = TREE_CHAIN (t), xlo++)
5047 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
5048 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5049 n = root;
5052 /* Keep going past elements distinctly greater than VAL. */
5053 if (tree_int_cst_lt (val, n->low))
5054 n = n->left;
5056 /* or distinctly less than VAL. */
5057 else if (tree_int_cst_lt (n->high, val))
5058 n = n->right;
5060 else
5062 /* We have found a matching range. */
5063 BITARRAY_SET (cases_seen, xlo);
5064 break;
5067 while (n);
5070 else
5072 if (root->left)
5073 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5075 for (n = root; n; n = n->right)
5077 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5078 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5079 while (! tree_int_cst_lt (n->high, val))
5081 /* Calculate (into xlo) the "offset" of the integer (val).
5082 The element with lowest value has offset 0, the next smallest
5083 element has offset 1, etc. */
5085 unsigned HOST_WIDE_INT xlo;
5086 HOST_WIDE_INT xhi;
5087 tree t;
5089 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5091 /* The TYPE_VALUES will be in increasing order, so
5092 starting searching where we last ended. */
5093 t = next_node_to_try;
5094 xlo = next_node_offset;
5095 xhi = 0;
5096 for (;;)
5098 if (t == NULL_TREE)
5100 t = TYPE_VALUES (type);
5101 xlo = 0;
5103 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5105 next_node_to_try = TREE_CHAIN (t);
5106 next_node_offset = xlo + 1;
5107 break;
5109 xlo++;
5110 t = TREE_CHAIN (t);
5111 if (t == next_node_to_try)
5113 xlo = -1;
5114 break;
5118 else
5120 t = TYPE_MIN_VALUE (type);
5121 if (t)
5122 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5123 &xlo, &xhi);
5124 else
5125 xlo = xhi = 0;
5126 add_double (xlo, xhi,
5127 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5128 &xlo, &xhi);
5131 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5132 BITARRAY_SET (cases_seen, xlo);
5134 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5135 1, 0,
5136 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5142 /* Called when the index of a switch statement is an enumerated type
5143 and there is no default label.
5145 Checks that all enumeration literals are covered by the case
5146 expressions of a switch. Also, warn if there are any extra
5147 switch cases that are *not* elements of the enumerated type.
5149 If all enumeration literals were covered by the case expressions,
5150 turn one of the expressions into the default expression since it should
5151 not be possible to fall through such a switch. */
5153 void
5154 check_for_full_enumeration_handling (type)
5155 tree type;
5157 register struct case_node *n;
5158 register tree chain;
5159 #if 0 /* variable used by 'if 0'ed code below. */
5160 register struct case_node **l;
5161 int all_values = 1;
5162 #endif
5164 /* True iff the selector type is a numbered set mode. */
5165 int sparseness = 0;
5167 /* The number of possible selector values. */
5168 HOST_WIDE_INT size;
5170 /* For each possible selector value. a one iff it has been matched
5171 by a case value alternative. */
5172 unsigned char *cases_seen;
5174 /* The allocated size of cases_seen, in chars. */
5175 HOST_WIDE_INT bytes_needed;
5177 if (! warn_switch)
5178 return;
5180 size = all_cases_count (type, &sparseness);
5181 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5183 if (size > 0 && size < 600000
5184 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5185 this optimization if we don't have enough memory rather than
5186 aborting, as xmalloc would do. */
5187 && (cases_seen = (unsigned char *) calloc (bytes_needed, 1)) != NULL)
5189 HOST_WIDE_INT i;
5190 tree v = TYPE_VALUES (type);
5192 /* The time complexity of this code is normally O(N), where
5193 N being the number of members in the enumerated type.
5194 However, if type is a ENUMERAL_TYPE whose values do not
5195 increase monotonically, O(N*log(N)) time may be needed. */
5197 mark_seen_cases (type, cases_seen, size, sparseness);
5199 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5200 if (BITARRAY_TEST (cases_seen, i) == 0)
5201 warning ("enumeration value `%s' not handled in switch",
5202 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5204 free (cases_seen);
5207 /* Now we go the other way around; we warn if there are case
5208 expressions that don't correspond to enumerators. This can
5209 occur since C and C++ don't enforce type-checking of
5210 assignments to enumeration variables. */
5212 if (case_stack->data.case_stmt.case_list
5213 && case_stack->data.case_stmt.case_list->left)
5214 case_stack->data.case_stmt.case_list
5215 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5216 if (warn_switch)
5217 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5219 for (chain = TYPE_VALUES (type);
5220 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5221 chain = TREE_CHAIN (chain))
5224 if (!chain)
5226 if (TYPE_NAME (type) == 0)
5227 warning ("case value `%ld' not in enumerated type",
5228 (long) TREE_INT_CST_LOW (n->low));
5229 else
5230 warning ("case value `%ld' not in enumerated type `%s'",
5231 (long) TREE_INT_CST_LOW (n->low),
5232 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5233 == IDENTIFIER_NODE)
5234 ? TYPE_NAME (type)
5235 : DECL_NAME (TYPE_NAME (type))));
5237 if (!tree_int_cst_equal (n->low, n->high))
5239 for (chain = TYPE_VALUES (type);
5240 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5241 chain = TREE_CHAIN (chain))
5244 if (!chain)
5246 if (TYPE_NAME (type) == 0)
5247 warning ("case value `%ld' not in enumerated type",
5248 (long) TREE_INT_CST_LOW (n->high));
5249 else
5250 warning ("case value `%ld' not in enumerated type `%s'",
5251 (long) TREE_INT_CST_LOW (n->high),
5252 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5253 == IDENTIFIER_NODE)
5254 ? TYPE_NAME (type)
5255 : DECL_NAME (TYPE_NAME (type))));
5260 #if 0
5261 /* ??? This optimization is disabled because it causes valid programs to
5262 fail. ANSI C does not guarantee that an expression with enum type
5263 will have a value that is the same as one of the enumeration literals. */
5265 /* If all values were found as case labels, make one of them the default
5266 label. Thus, this switch will never fall through. We arbitrarily pick
5267 the last one to make the default since this is likely the most
5268 efficient choice. */
5270 if (all_values)
5272 for (l = &case_stack->data.case_stmt.case_list;
5273 (*l)->right != 0;
5274 l = &(*l)->right)
5277 case_stack->data.case_stmt.default_label = (*l)->code_label;
5278 *l = 0;
5280 #endif /* 0 */
5283 /* Free CN, and its children. */
5285 static void
5286 free_case_nodes (cn)
5287 case_node_ptr cn;
5289 if (cn)
5291 free_case_nodes (cn->left);
5292 free_case_nodes (cn->right);
5293 free (cn);
5298 /* Terminate a case (Pascal) or switch (C) statement
5299 in which ORIG_INDEX is the expression to be tested.
5300 Generate the code to test it and jump to the right place. */
5302 void
5303 expand_end_case (orig_index)
5304 tree orig_index;
5306 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE, orig_minval;
5307 rtx default_label = 0;
5308 register struct case_node *n;
5309 unsigned int count;
5310 rtx index;
5311 rtx table_label;
5312 int ncases;
5313 rtx *labelvec;
5314 register int i;
5315 rtx before_case;
5316 register struct nesting *thiscase = case_stack;
5317 tree index_expr, index_type;
5318 int unsignedp;
5320 /* Don't crash due to previous errors. */
5321 if (thiscase == NULL)
5322 return;
5324 table_label = gen_label_rtx ();
5325 index_expr = thiscase->data.case_stmt.index_expr;
5326 index_type = TREE_TYPE (index_expr);
5327 unsignedp = TREE_UNSIGNED (index_type);
5329 do_pending_stack_adjust ();
5331 /* This might get an spurious warning in the presence of a syntax error;
5332 it could be fixed by moving the call to check_seenlabel after the
5333 check for error_mark_node, and copying the code of check_seenlabel that
5334 deals with case_stack->data.case_stmt.line_number_status /
5335 restore_line_number_status in front of the call to end_cleanup_deferral;
5336 However, this might miss some useful warnings in the presence of
5337 non-syntax errors. */
5338 check_seenlabel ();
5340 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5341 if (index_type != error_mark_node)
5343 /* If switch expression was an enumerated type, check that all
5344 enumeration literals are covered by the cases.
5345 No sense trying this if there's a default case, however. */
5347 if (!thiscase->data.case_stmt.default_label
5348 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5349 && TREE_CODE (index_expr) != INTEGER_CST)
5350 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5352 /* If we don't have a default-label, create one here,
5353 after the body of the switch. */
5354 if (thiscase->data.case_stmt.default_label == 0)
5356 thiscase->data.case_stmt.default_label
5357 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5358 expand_label (thiscase->data.case_stmt.default_label);
5360 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5362 before_case = get_last_insn ();
5364 if (thiscase->data.case_stmt.case_list
5365 && thiscase->data.case_stmt.case_list->left)
5366 thiscase->data.case_stmt.case_list
5367 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5369 /* Simplify the case-list before we count it. */
5370 group_case_nodes (thiscase->data.case_stmt.case_list);
5372 /* Get upper and lower bounds of case values.
5373 Also convert all the case values to the index expr's data type. */
5375 count = 0;
5376 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5378 /* Check low and high label values are integers. */
5379 if (TREE_CODE (n->low) != INTEGER_CST)
5380 abort ();
5381 if (TREE_CODE (n->high) != INTEGER_CST)
5382 abort ();
5384 n->low = convert (index_type, n->low);
5385 n->high = convert (index_type, n->high);
5387 /* Count the elements and track the largest and smallest
5388 of them (treating them as signed even if they are not). */
5389 if (count++ == 0)
5391 minval = n->low;
5392 maxval = n->high;
5394 else
5396 if (INT_CST_LT (n->low, minval))
5397 minval = n->low;
5398 if (INT_CST_LT (maxval, n->high))
5399 maxval = n->high;
5401 /* A range counts double, since it requires two compares. */
5402 if (! tree_int_cst_equal (n->low, n->high))
5403 count++;
5406 orig_minval = minval;
5408 /* Compute span of values. */
5409 if (count != 0)
5410 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5412 end_cleanup_deferral ();
5414 if (count == 0)
5416 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5417 emit_queue ();
5418 emit_jump (default_label);
5421 /* If range of values is much bigger than number of values,
5422 make a sequence of conditional branches instead of a dispatch.
5423 If the switch-index is a constant, do it this way
5424 because we can optimize it. */
5426 #ifndef CASE_VALUES_THRESHOLD
5427 #ifdef HAVE_casesi
5428 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5429 #else
5430 /* If machine does not have a case insn that compares the
5431 bounds, this means extra overhead for dispatch tables
5432 which raises the threshold for using them. */
5433 #define CASE_VALUES_THRESHOLD 5
5434 #endif /* HAVE_casesi */
5435 #endif /* CASE_VALUES_THRESHOLD */
5437 else if (count < CASE_VALUES_THRESHOLD
5438 || compare_tree_int (range, 10 * count) > 0
5439 /* RANGE may be signed, and really large ranges will show up
5440 as negative numbers. */
5441 || compare_tree_int (range, 0) < 0
5442 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5443 || flag_pic
5444 #endif
5445 || TREE_CODE (index_expr) == INTEGER_CST
5446 /* These will reduce to a constant. */
5447 || (TREE_CODE (index_expr) == CALL_EXPR
5448 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5449 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5450 && DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_NORMAL
5451 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5452 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5453 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5455 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5457 /* If the index is a short or char that we do not have
5458 an insn to handle comparisons directly, convert it to
5459 a full integer now, rather than letting each comparison
5460 generate the conversion. */
5462 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5463 && (cmp_optab->handlers[(int) GET_MODE (index)].insn_code
5464 == CODE_FOR_nothing))
5466 enum machine_mode wider_mode;
5467 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5468 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5469 if (cmp_optab->handlers[(int) wider_mode].insn_code
5470 != CODE_FOR_nothing)
5472 index = convert_to_mode (wider_mode, index, unsignedp);
5473 break;
5477 emit_queue ();
5478 do_pending_stack_adjust ();
5480 index = protect_from_queue (index, 0);
5481 if (GET_CODE (index) == MEM)
5482 index = copy_to_reg (index);
5483 if (GET_CODE (index) == CONST_INT
5484 || TREE_CODE (index_expr) == INTEGER_CST)
5486 /* Make a tree node with the proper constant value
5487 if we don't already have one. */
5488 if (TREE_CODE (index_expr) != INTEGER_CST)
5490 index_expr
5491 = build_int_2 (INTVAL (index),
5492 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5493 index_expr = convert (index_type, index_expr);
5496 /* For constant index expressions we need only
5497 issue a unconditional branch to the appropriate
5498 target code. The job of removing any unreachable
5499 code is left to the optimisation phase if the
5500 "-O" option is specified. */
5501 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5502 if (! tree_int_cst_lt (index_expr, n->low)
5503 && ! tree_int_cst_lt (n->high, index_expr))
5504 break;
5506 if (n)
5507 emit_jump (label_rtx (n->code_label));
5508 else
5509 emit_jump (default_label);
5511 else
5513 /* If the index expression is not constant we generate
5514 a binary decision tree to select the appropriate
5515 target code. This is done as follows:
5517 The list of cases is rearranged into a binary tree,
5518 nearly optimal assuming equal probability for each case.
5520 The tree is transformed into RTL, eliminating
5521 redundant test conditions at the same time.
5523 If program flow could reach the end of the
5524 decision tree an unconditional jump to the
5525 default code is emitted. */
5527 use_cost_table
5528 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5529 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5530 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5531 NULL_PTR);
5532 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5533 default_label, index_type);
5534 emit_jump_if_reachable (default_label);
5537 else
5539 int win = 0;
5540 #ifdef HAVE_casesi
5541 if (HAVE_casesi)
5543 enum machine_mode index_mode = SImode;
5544 int index_bits = GET_MODE_BITSIZE (index_mode);
5545 rtx op1, op2;
5546 enum machine_mode op_mode;
5548 /* Convert the index to SImode. */
5549 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5550 > GET_MODE_BITSIZE (index_mode))
5552 enum machine_mode omode = TYPE_MODE (index_type);
5553 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5555 /* We must handle the endpoints in the original mode. */
5556 index_expr = build (MINUS_EXPR, index_type,
5557 index_expr, minval);
5558 minval = integer_zero_node;
5559 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5560 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
5561 omode, 1, 0, default_label);
5562 /* Now we can safely truncate. */
5563 index = convert_to_mode (index_mode, index, 0);
5565 else
5567 if (TYPE_MODE (index_type) != index_mode)
5569 index_expr = convert (type_for_size (index_bits, 0),
5570 index_expr);
5571 index_type = TREE_TYPE (index_expr);
5574 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5576 emit_queue ();
5577 index = protect_from_queue (index, 0);
5578 do_pending_stack_adjust ();
5580 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
5581 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
5582 (index, op_mode))
5583 index = copy_to_mode_reg (op_mode, index);
5585 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5587 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
5588 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
5589 (op1, op_mode))
5590 op1 = copy_to_mode_reg (op_mode, op1);
5592 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5594 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
5595 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
5596 (op2, op_mode))
5597 op2 = copy_to_mode_reg (op_mode, op2);
5599 emit_jump_insn (gen_casesi (index, op1, op2,
5600 table_label, default_label));
5601 win = 1;
5603 #endif
5604 #ifdef HAVE_tablejump
5605 if (! win && HAVE_tablejump)
5607 index_type = thiscase->data.case_stmt.nominal_type;
5608 index_expr = fold (build (MINUS_EXPR, index_type,
5609 convert (index_type, index_expr),
5610 convert (index_type, minval)));
5611 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5612 emit_queue ();
5613 index = protect_from_queue (index, 0);
5614 do_pending_stack_adjust ();
5616 do_tablejump (index, TYPE_MODE (index_type),
5617 expand_expr (range, NULL_RTX, VOIDmode, 0),
5618 table_label, default_label);
5619 win = 1;
5621 #endif
5622 if (! win)
5623 abort ();
5625 /* Get table of labels to jump to, in order of case index. */
5627 ncases = TREE_INT_CST_LOW (range) + 1;
5628 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5629 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5631 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5633 register HOST_WIDE_INT i
5634 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5636 while (1)
5638 labelvec[i]
5639 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5640 if (i + TREE_INT_CST_LOW (orig_minval)
5641 == TREE_INT_CST_LOW (n->high))
5642 break;
5643 i++;
5647 /* Fill in the gaps with the default. */
5648 for (i = 0; i < ncases; i++)
5649 if (labelvec[i] == 0)
5650 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5652 /* Output the table */
5653 emit_label (table_label);
5655 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5656 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5657 gen_rtx_LABEL_REF (Pmode, table_label),
5658 gen_rtvec_v (ncases, labelvec),
5659 const0_rtx, const0_rtx));
5660 else
5661 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5662 gen_rtvec_v (ncases, labelvec)));
5664 /* If the case insn drops through the table,
5665 after the table we must jump to the default-label.
5666 Otherwise record no drop-through after the table. */
5667 #ifdef CASE_DROPS_THROUGH
5668 emit_jump (default_label);
5669 #else
5670 emit_barrier ();
5671 #endif
5674 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5675 reorder_insns (before_case, get_last_insn (),
5676 thiscase->data.case_stmt.start);
5678 else
5679 end_cleanup_deferral ();
5681 if (thiscase->exit_label)
5682 emit_label (thiscase->exit_label);
5684 free_case_nodes (case_stack->data.case_stmt.case_list);
5685 POPSTACK (case_stack);
5687 free_temp_slots ();
5690 /* Convert the tree NODE into a list linked by the right field, with the left
5691 field zeroed. RIGHT is used for recursion; it is a list to be placed
5692 rightmost in the resulting list. */
5694 static struct case_node *
5695 case_tree2list (node, right)
5696 struct case_node *node, *right;
5698 struct case_node *left;
5700 if (node->right)
5701 right = case_tree2list (node->right, right);
5703 node->right = right;
5704 if ((left = node->left))
5706 node->left = 0;
5707 return case_tree2list (left, node);
5710 return node;
5713 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5715 static void
5716 do_jump_if_equal (op1, op2, label, unsignedp)
5717 rtx op1, op2, label;
5718 int unsignedp;
5720 if (GET_CODE (op1) == CONST_INT
5721 && GET_CODE (op2) == CONST_INT)
5723 if (INTVAL (op1) == INTVAL (op2))
5724 emit_jump (label);
5726 else
5728 enum machine_mode mode = GET_MODE (op1);
5729 if (mode == VOIDmode)
5730 mode = GET_MODE (op2);
5731 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5732 0, label);
5736 /* Not all case values are encountered equally. This function
5737 uses a heuristic to weight case labels, in cases where that
5738 looks like a reasonable thing to do.
5740 Right now, all we try to guess is text, and we establish the
5741 following weights:
5743 chars above space: 16
5744 digits: 16
5745 default: 12
5746 space, punct: 8
5747 tab: 4
5748 newline: 2
5749 other "\" chars: 1
5750 remaining chars: 0
5752 If we find any cases in the switch that are not either -1 or in the range
5753 of valid ASCII characters, or are control characters other than those
5754 commonly used with "\", don't treat this switch scanning text.
5756 Return 1 if these nodes are suitable for cost estimation, otherwise
5757 return 0. */
5759 static int
5760 estimate_case_costs (node)
5761 case_node_ptr node;
5763 tree min_ascii = build_int_2 (-1, -1);
5764 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5765 case_node_ptr n;
5766 int i;
5768 /* If we haven't already made the cost table, make it now. Note that the
5769 lower bound of the table is -1, not zero. */
5771 if (cost_table == NULL)
5773 cost_table = cost_table_ + 1;
5775 for (i = 0; i < 128; i++)
5777 if (ISALNUM (i))
5778 cost_table[i] = 16;
5779 else if (ISPUNCT (i))
5780 cost_table[i] = 8;
5781 else if (ISCNTRL (i))
5782 cost_table[i] = -1;
5785 cost_table[' '] = 8;
5786 cost_table['\t'] = 4;
5787 cost_table['\0'] = 4;
5788 cost_table['\n'] = 2;
5789 cost_table['\f'] = 1;
5790 cost_table['\v'] = 1;
5791 cost_table['\b'] = 1;
5794 /* See if all the case expressions look like text. It is text if the
5795 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5796 as signed arithmetic since we don't want to ever access cost_table with a
5797 value less than -1. Also check that none of the constants in a range
5798 are strange control characters. */
5800 for (n = node; n; n = n->right)
5802 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5803 return 0;
5805 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5806 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5807 if (cost_table[i] < 0)
5808 return 0;
5811 /* All interesting values are within the range of interesting
5812 ASCII characters. */
5813 return 1;
5816 /* Scan an ordered list of case nodes
5817 combining those with consecutive values or ranges.
5819 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5821 static void
5822 group_case_nodes (head)
5823 case_node_ptr head;
5825 case_node_ptr node = head;
5827 while (node)
5829 rtx lb = next_real_insn (label_rtx (node->code_label));
5830 rtx lb2;
5831 case_node_ptr np = node;
5833 /* Try to group the successors of NODE with NODE. */
5834 while (((np = np->right) != 0)
5835 /* Do they jump to the same place? */
5836 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5837 || (lb != 0 && lb2 != 0
5838 && simplejump_p (lb)
5839 && simplejump_p (lb2)
5840 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5841 SET_SRC (PATTERN (lb2)))))
5842 /* Are their ranges consecutive? */
5843 && tree_int_cst_equal (np->low,
5844 fold (build (PLUS_EXPR,
5845 TREE_TYPE (node->high),
5846 node->high,
5847 integer_one_node)))
5848 /* An overflow is not consecutive. */
5849 && tree_int_cst_lt (node->high,
5850 fold (build (PLUS_EXPR,
5851 TREE_TYPE (node->high),
5852 node->high,
5853 integer_one_node))))
5855 node->high = np->high;
5857 /* NP is the first node after NODE which can't be grouped with it.
5858 Delete the nodes in between, and move on to that node. */
5859 node->right = np;
5860 node = np;
5864 /* Take an ordered list of case nodes
5865 and transform them into a near optimal binary tree,
5866 on the assumption that any target code selection value is as
5867 likely as any other.
5869 The transformation is performed by splitting the ordered
5870 list into two equal sections plus a pivot. The parts are
5871 then attached to the pivot as left and right branches. Each
5872 branch is then transformed recursively. */
5874 static void
5875 balance_case_nodes (head, parent)
5876 case_node_ptr *head;
5877 case_node_ptr parent;
5879 register case_node_ptr np;
5881 np = *head;
5882 if (np)
5884 int cost = 0;
5885 int i = 0;
5886 int ranges = 0;
5887 register case_node_ptr *npp;
5888 case_node_ptr left;
5890 /* Count the number of entries on branch. Also count the ranges. */
5892 while (np)
5894 if (!tree_int_cst_equal (np->low, np->high))
5896 ranges++;
5897 if (use_cost_table)
5898 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5901 if (use_cost_table)
5902 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5904 i++;
5905 np = np->right;
5908 if (i > 2)
5910 /* Split this list if it is long enough for that to help. */
5911 npp = head;
5912 left = *npp;
5913 if (use_cost_table)
5915 /* Find the place in the list that bisects the list's total cost,
5916 Here I gets half the total cost. */
5917 int n_moved = 0;
5918 i = (cost + 1) / 2;
5919 while (1)
5921 /* Skip nodes while their cost does not reach that amount. */
5922 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5923 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5924 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5925 if (i <= 0)
5926 break;
5927 npp = &(*npp)->right;
5928 n_moved += 1;
5930 if (n_moved == 0)
5932 /* Leave this branch lopsided, but optimize left-hand
5933 side and fill in `parent' fields for right-hand side. */
5934 np = *head;
5935 np->parent = parent;
5936 balance_case_nodes (&np->left, np);
5937 for (; np->right; np = np->right)
5938 np->right->parent = np;
5939 return;
5942 /* If there are just three nodes, split at the middle one. */
5943 else if (i == 3)
5944 npp = &(*npp)->right;
5945 else
5947 /* Find the place in the list that bisects the list's total cost,
5948 where ranges count as 2.
5949 Here I gets half the total cost. */
5950 i = (i + ranges + 1) / 2;
5951 while (1)
5953 /* Skip nodes while their cost does not reach that amount. */
5954 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5955 i--;
5956 i--;
5957 if (i <= 0)
5958 break;
5959 npp = &(*npp)->right;
5962 *head = np = *npp;
5963 *npp = 0;
5964 np->parent = parent;
5965 np->left = left;
5967 /* Optimize each of the two split parts. */
5968 balance_case_nodes (&np->left, np);
5969 balance_case_nodes (&np->right, np);
5971 else
5973 /* Else leave this branch as one level,
5974 but fill in `parent' fields. */
5975 np = *head;
5976 np->parent = parent;
5977 for (; np->right; np = np->right)
5978 np->right->parent = np;
5983 /* Search the parent sections of the case node tree
5984 to see if a test for the lower bound of NODE would be redundant.
5985 INDEX_TYPE is the type of the index expression.
5987 The instructions to generate the case decision tree are
5988 output in the same order as nodes are processed so it is
5989 known that if a parent node checks the range of the current
5990 node minus one that the current node is bounded at its lower
5991 span. Thus the test would be redundant. */
5993 static int
5994 node_has_low_bound (node, index_type)
5995 case_node_ptr node;
5996 tree index_type;
5998 tree low_minus_one;
5999 case_node_ptr pnode;
6001 /* If the lower bound of this node is the lowest value in the index type,
6002 we need not test it. */
6004 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
6005 return 1;
6007 /* If this node has a left branch, the value at the left must be less
6008 than that at this node, so it cannot be bounded at the bottom and
6009 we need not bother testing any further. */
6011 if (node->left)
6012 return 0;
6014 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
6015 node->low, integer_one_node));
6017 /* If the subtraction above overflowed, we can't verify anything.
6018 Otherwise, look for a parent that tests our value - 1. */
6020 if (! tree_int_cst_lt (low_minus_one, node->low))
6021 return 0;
6023 for (pnode = node->parent; pnode; pnode = pnode->parent)
6024 if (tree_int_cst_equal (low_minus_one, pnode->high))
6025 return 1;
6027 return 0;
6030 /* Search the parent sections of the case node tree
6031 to see if a test for the upper bound of NODE would be redundant.
6032 INDEX_TYPE is the type of the index expression.
6034 The instructions to generate the case decision tree are
6035 output in the same order as nodes are processed so it is
6036 known that if a parent node checks the range of the current
6037 node plus one that the current node is bounded at its upper
6038 span. Thus the test would be redundant. */
6040 static int
6041 node_has_high_bound (node, index_type)
6042 case_node_ptr node;
6043 tree index_type;
6045 tree high_plus_one;
6046 case_node_ptr pnode;
6048 /* If there is no upper bound, obviously no test is needed. */
6050 if (TYPE_MAX_VALUE (index_type) == NULL)
6051 return 1;
6053 /* If the upper bound of this node is the highest value in the type
6054 of the index expression, we need not test against it. */
6056 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
6057 return 1;
6059 /* If this node has a right branch, the value at the right must be greater
6060 than that at this node, so it cannot be bounded at the top and
6061 we need not bother testing any further. */
6063 if (node->right)
6064 return 0;
6066 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6067 node->high, integer_one_node));
6069 /* If the addition above overflowed, we can't verify anything.
6070 Otherwise, look for a parent that tests our value + 1. */
6072 if (! tree_int_cst_lt (node->high, high_plus_one))
6073 return 0;
6075 for (pnode = node->parent; pnode; pnode = pnode->parent)
6076 if (tree_int_cst_equal (high_plus_one, pnode->low))
6077 return 1;
6079 return 0;
6082 /* Search the parent sections of the
6083 case node tree to see if both tests for the upper and lower
6084 bounds of NODE would be redundant. */
6086 static int
6087 node_is_bounded (node, index_type)
6088 case_node_ptr node;
6089 tree index_type;
6091 return (node_has_low_bound (node, index_type)
6092 && node_has_high_bound (node, index_type));
6095 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6097 static void
6098 emit_jump_if_reachable (label)
6099 rtx label;
6101 if (GET_CODE (get_last_insn ()) != BARRIER)
6102 emit_jump (label);
6105 /* Emit step-by-step code to select a case for the value of INDEX.
6106 The thus generated decision tree follows the form of the
6107 case-node binary tree NODE, whose nodes represent test conditions.
6108 INDEX_TYPE is the type of the index of the switch.
6110 Care is taken to prune redundant tests from the decision tree
6111 by detecting any boundary conditions already checked by
6112 emitted rtx. (See node_has_high_bound, node_has_low_bound
6113 and node_is_bounded, above.)
6115 Where the test conditions can be shown to be redundant we emit
6116 an unconditional jump to the target code. As a further
6117 optimization, the subordinates of a tree node are examined to
6118 check for bounded nodes. In this case conditional and/or
6119 unconditional jumps as a result of the boundary check for the
6120 current node are arranged to target the subordinates associated
6121 code for out of bound conditions on the current node.
6123 We can assume that when control reaches the code generated here,
6124 the index value has already been compared with the parents
6125 of this node, and determined to be on the same side of each parent
6126 as this node is. Thus, if this node tests for the value 51,
6127 and a parent tested for 52, we don't need to consider
6128 the possibility of a value greater than 51. If another parent
6129 tests for the value 50, then this node need not test anything. */
6131 static void
6132 emit_case_nodes (index, node, default_label, index_type)
6133 rtx index;
6134 case_node_ptr node;
6135 rtx default_label;
6136 tree index_type;
6138 /* If INDEX has an unsigned type, we must make unsigned branches. */
6139 int unsignedp = TREE_UNSIGNED (index_type);
6140 enum machine_mode mode = GET_MODE (index);
6142 /* See if our parents have already tested everything for us.
6143 If they have, emit an unconditional jump for this node. */
6144 if (node_is_bounded (node, index_type))
6145 emit_jump (label_rtx (node->code_label));
6147 else if (tree_int_cst_equal (node->low, node->high))
6149 /* Node is single valued. First see if the index expression matches
6150 this node and then check our children, if any. */
6152 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6153 label_rtx (node->code_label), unsignedp);
6155 if (node->right != 0 && node->left != 0)
6157 /* This node has children on both sides.
6158 Dispatch to one side or the other
6159 by comparing the index value with this node's value.
6160 If one subtree is bounded, check that one first,
6161 so we can avoid real branches in the tree. */
6163 if (node_is_bounded (node->right, index_type))
6165 emit_cmp_and_jump_insns (index,
6166 expand_expr (node->high, NULL_RTX,
6167 VOIDmode, 0),
6168 GT, NULL_RTX, mode, unsignedp, 0,
6169 label_rtx (node->right->code_label));
6170 emit_case_nodes (index, node->left, default_label, index_type);
6173 else if (node_is_bounded (node->left, index_type))
6175 emit_cmp_and_jump_insns (index,
6176 expand_expr (node->high, NULL_RTX,
6177 VOIDmode, 0),
6178 LT, NULL_RTX, mode, unsignedp, 0,
6179 label_rtx (node->left->code_label));
6180 emit_case_nodes (index, node->right, default_label, index_type);
6183 else
6185 /* Neither node is bounded. First distinguish the two sides;
6186 then emit the code for one side at a time. */
6188 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6190 /* See if the value is on the right. */
6191 emit_cmp_and_jump_insns (index,
6192 expand_expr (node->high, NULL_RTX,
6193 VOIDmode, 0),
6194 GT, NULL_RTX, mode, unsignedp, 0,
6195 label_rtx (test_label));
6197 /* Value must be on the left.
6198 Handle the left-hand subtree. */
6199 emit_case_nodes (index, node->left, default_label, index_type);
6200 /* If left-hand subtree does nothing,
6201 go to default. */
6202 emit_jump_if_reachable (default_label);
6204 /* Code branches here for the right-hand subtree. */
6205 expand_label (test_label);
6206 emit_case_nodes (index, node->right, default_label, index_type);
6210 else if (node->right != 0 && node->left == 0)
6212 /* Here we have a right child but no left so we issue conditional
6213 branch to default and process the right child.
6215 Omit the conditional branch to default if we it avoid only one
6216 right child; it costs too much space to save so little time. */
6218 if (node->right->right || node->right->left
6219 || !tree_int_cst_equal (node->right->low, node->right->high))
6221 if (!node_has_low_bound (node, index_type))
6223 emit_cmp_and_jump_insns (index,
6224 expand_expr (node->high, NULL_RTX,
6225 VOIDmode, 0),
6226 LT, NULL_RTX, mode, unsignedp, 0,
6227 default_label);
6230 emit_case_nodes (index, node->right, default_label, index_type);
6232 else
6233 /* We cannot process node->right normally
6234 since we haven't ruled out the numbers less than
6235 this node's value. So handle node->right explicitly. */
6236 do_jump_if_equal (index,
6237 expand_expr (node->right->low, NULL_RTX,
6238 VOIDmode, 0),
6239 label_rtx (node->right->code_label), unsignedp);
6242 else if (node->right == 0 && node->left != 0)
6244 /* Just one subtree, on the left. */
6246 #if 0 /* The following code and comment were formerly part
6247 of the condition here, but they didn't work
6248 and I don't understand what the idea was. -- rms. */
6249 /* If our "most probable entry" is less probable
6250 than the default label, emit a jump to
6251 the default label using condition codes
6252 already lying around. With no right branch,
6253 a branch-greater-than will get us to the default
6254 label correctly. */
6255 if (use_cost_table
6256 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
6258 #endif /* 0 */
6259 if (node->left->left || node->left->right
6260 || !tree_int_cst_equal (node->left->low, node->left->high))
6262 if (!node_has_high_bound (node, index_type))
6264 emit_cmp_and_jump_insns (index, expand_expr (node->high,
6265 NULL_RTX,
6266 VOIDmode, 0),
6267 GT, NULL_RTX, mode, unsignedp, 0,
6268 default_label);
6271 emit_case_nodes (index, node->left, default_label, index_type);
6273 else
6274 /* We cannot process node->left normally
6275 since we haven't ruled out the numbers less than
6276 this node's value. So handle node->left explicitly. */
6277 do_jump_if_equal (index,
6278 expand_expr (node->left->low, NULL_RTX,
6279 VOIDmode, 0),
6280 label_rtx (node->left->code_label), unsignedp);
6283 else
6285 /* Node is a range. These cases are very similar to those for a single
6286 value, except that we do not start by testing whether this node
6287 is the one to branch to. */
6289 if (node->right != 0 && node->left != 0)
6291 /* Node has subtrees on both sides.
6292 If the right-hand subtree is bounded,
6293 test for it first, since we can go straight there.
6294 Otherwise, we need to make a branch in the control structure,
6295 then handle the two subtrees. */
6296 tree test_label = 0;
6298 if (node_is_bounded (node->right, index_type))
6299 /* Right hand node is fully bounded so we can eliminate any
6300 testing and branch directly to the target code. */
6301 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6302 VOIDmode, 0),
6303 GT, NULL_RTX, mode, unsignedp, 0,
6304 label_rtx (node->right->code_label));
6305 else
6307 /* Right hand node requires testing.
6308 Branch to a label where we will handle it later. */
6310 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6311 emit_cmp_and_jump_insns (index,
6312 expand_expr (node->high, NULL_RTX,
6313 VOIDmode, 0),
6314 GT, NULL_RTX, mode, unsignedp, 0,
6315 label_rtx (test_label));
6318 /* Value belongs to this node or to the left-hand subtree. */
6320 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6321 VOIDmode, 0),
6322 GE, NULL_RTX, mode, unsignedp, 0,
6323 label_rtx (node->code_label));
6325 /* Handle the left-hand subtree. */
6326 emit_case_nodes (index, node->left, default_label, index_type);
6328 /* If right node had to be handled later, do that now. */
6330 if (test_label)
6332 /* If the left-hand subtree fell through,
6333 don't let it fall into the right-hand subtree. */
6334 emit_jump_if_reachable (default_label);
6336 expand_label (test_label);
6337 emit_case_nodes (index, node->right, default_label, index_type);
6341 else if (node->right != 0 && node->left == 0)
6343 /* Deal with values to the left of this node,
6344 if they are possible. */
6345 if (!node_has_low_bound (node, index_type))
6347 emit_cmp_and_jump_insns (index,
6348 expand_expr (node->low, NULL_RTX,
6349 VOIDmode, 0),
6350 LT, NULL_RTX, mode, unsignedp, 0,
6351 default_label);
6354 /* Value belongs to this node or to the right-hand subtree. */
6356 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6357 VOIDmode, 0),
6358 LE, NULL_RTX, mode, unsignedp, 0,
6359 label_rtx (node->code_label));
6361 emit_case_nodes (index, node->right, default_label, index_type);
6364 else if (node->right == 0 && node->left != 0)
6366 /* Deal with values to the right of this node,
6367 if they are possible. */
6368 if (!node_has_high_bound (node, index_type))
6370 emit_cmp_and_jump_insns (index,
6371 expand_expr (node->high, NULL_RTX,
6372 VOIDmode, 0),
6373 GT, NULL_RTX, mode, unsignedp, 0,
6374 default_label);
6377 /* Value belongs to this node or to the left-hand subtree. */
6379 emit_cmp_and_jump_insns (index,
6380 expand_expr (node->low, NULL_RTX,
6381 VOIDmode, 0),
6382 GE, NULL_RTX, mode, unsignedp, 0,
6383 label_rtx (node->code_label));
6385 emit_case_nodes (index, node->left, default_label, index_type);
6388 else
6390 /* Node has no children so we check low and high bounds to remove
6391 redundant tests. Only one of the bounds can exist,
6392 since otherwise this node is bounded--a case tested already. */
6394 if (!node_has_high_bound (node, index_type))
6396 emit_cmp_and_jump_insns (index,
6397 expand_expr (node->high, NULL_RTX,
6398 VOIDmode, 0),
6399 GT, NULL_RTX, mode, unsignedp, 0,
6400 default_label);
6403 if (!node_has_low_bound (node, index_type))
6405 emit_cmp_and_jump_insns (index,
6406 expand_expr (node->low, NULL_RTX,
6407 VOIDmode, 0),
6408 LT, NULL_RTX, mode, unsignedp, 0,
6409 default_label);
6412 emit_jump (label_rtx (node->code_label));