* regclass.c (fix_register): Fix typo.
[official-gcc.git] / gcc / stmt.c
blob5ef7ba90c7c88bcfb60d5e774f0aee388625d2ce
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "tm_p.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
47 #include "expr.h"
48 #include "libfuncs.h"
49 #include "hard-reg-set.h"
50 #include "obstack.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
58 #define obstack_chunk_alloc xmalloc
59 #define obstack_chunk_free free
60 struct obstack stmt_obstack;
62 /* Assume that case vectors are not pc-relative. */
63 #ifndef CASE_VECTOR_PC_RELATIVE
64 #define CASE_VECTOR_PC_RELATIVE 0
65 #endif
67 /* Functions and data structures for expanding case statements. */
69 /* Case label structure, used to hold info on labels within case
70 statements. We handle "range" labels; for a single-value label
71 as in C, the high and low limits are the same.
73 An AVL tree of case nodes is initially created, and later transformed
74 to a list linked via the RIGHT fields in the nodes. Nodes with
75 higher case values are later in the list.
77 Switch statements can be output in one of two forms. A branch table
78 is used if there are more than a few labels and the labels are dense
79 within the range between the smallest and largest case value. If a
80 branch table is used, no further manipulations are done with the case
81 node chain.
83 The alternative to the use of a branch table is to generate a series
84 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
85 and PARENT fields to hold a binary tree. Initially the tree is
86 totally unbalanced, with everything on the right. We balance the tree
87 with nodes on the left having lower case values than the parent
88 and nodes on the right having higher values. We then output the tree
89 in order. */
91 struct case_node
93 struct case_node *left; /* Left son in binary tree */
94 struct case_node *right; /* Right son in binary tree; also node chain */
95 struct case_node *parent; /* Parent of node in binary tree */
96 tree low; /* Lowest index value for this label */
97 tree high; /* Highest index value for this label */
98 tree code_label; /* Label to jump to when node matches */
99 int balance;
102 typedef struct case_node case_node;
103 typedef struct case_node *case_node_ptr;
105 /* These are used by estimate_case_costs and balance_case_nodes. */
107 /* This must be a signed type, and non-ANSI compilers lack signed char. */
108 static short cost_table_[129];
109 static int use_cost_table;
110 static int cost_table_initialized;
112 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
113 is unsigned. */
114 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT)((I) + 1)]
116 /* Stack of control and binding constructs we are currently inside.
118 These constructs begin when you call `expand_start_WHATEVER'
119 and end when you call `expand_end_WHATEVER'. This stack records
120 info about how the construct began that tells the end-function
121 what to do. It also may provide information about the construct
122 to alter the behavior of other constructs within the body.
123 For example, they may affect the behavior of C `break' and `continue'.
125 Each construct gets one `struct nesting' object.
126 All of these objects are chained through the `all' field.
127 `nesting_stack' points to the first object (innermost construct).
128 The position of an entry on `nesting_stack' is in its `depth' field.
130 Each type of construct has its own individual stack.
131 For example, loops have `loop_stack'. Each object points to the
132 next object of the same type through the `next' field.
134 Some constructs are visible to `break' exit-statements and others
135 are not. Which constructs are visible depends on the language.
136 Therefore, the data structure allows each construct to be visible
137 or not, according to the args given when the construct is started.
138 The construct is visible if the `exit_label' field is non-null.
139 In that case, the value should be a CODE_LABEL rtx. */
141 struct nesting
143 struct nesting *all;
144 struct nesting *next;
145 int depth;
146 rtx exit_label;
147 union
149 /* For conds (if-then and if-then-else statements). */
150 struct
152 /* Label for the end of the if construct.
153 There is none if EXITFLAG was not set
154 and no `else' has been seen yet. */
155 rtx endif_label;
156 /* Label for the end of this alternative.
157 This may be the end of the if or the next else/elseif. */
158 rtx next_label;
159 } cond;
160 /* For loops. */
161 struct
163 /* Label at the top of the loop; place to loop back to. */
164 rtx start_label;
165 /* Label at the end of the whole construct. */
166 rtx end_label;
167 /* Label before a jump that branches to the end of the whole
168 construct. This is where destructors go if any. */
169 rtx alt_end_label;
170 /* Label for `continue' statement to jump to;
171 this is in front of the stepper of the loop. */
172 rtx continue_label;
173 } loop;
174 /* For variable binding contours. */
175 struct
177 /* Sequence number of this binding contour within the function,
178 in order of entry. */
179 int block_start_count;
180 /* Nonzero => value to restore stack to on exit. */
181 rtx stack_level;
182 /* The NOTE that starts this contour.
183 Used by expand_goto to check whether the destination
184 is within each contour or not. */
185 rtx first_insn;
186 /* Innermost containing binding contour that has a stack level. */
187 struct nesting *innermost_stack_block;
188 /* List of cleanups to be run on exit from this contour.
189 This is a list of expressions to be evaluated.
190 The TREE_PURPOSE of each link is the ..._DECL node
191 which the cleanup pertains to. */
192 tree cleanups;
193 /* List of cleanup-lists of blocks containing this block,
194 as they were at the locus where this block appears.
195 There is an element for each containing block,
196 ordered innermost containing block first.
197 The tail of this list can be 0,
198 if all remaining elements would be empty lists.
199 The element's TREE_VALUE is the cleanup-list of that block,
200 which may be null. */
201 tree outer_cleanups;
202 /* Chain of labels defined inside this binding contour.
203 For contours that have stack levels or cleanups. */
204 struct label_chain *label_chain;
205 /* Number of function calls seen, as of start of this block. */
206 int n_function_calls;
207 /* Nonzero if this is associated with a EH region. */
208 int exception_region;
209 /* The saved target_temp_slot_level from our outer block.
210 We may reset target_temp_slot_level to be the level of
211 this block, if that is done, target_temp_slot_level
212 reverts to the saved target_temp_slot_level at the very
213 end of the block. */
214 int block_target_temp_slot_level;
215 /* True if we are currently emitting insns in an area of
216 output code that is controlled by a conditional
217 expression. This is used by the cleanup handling code to
218 generate conditional cleanup actions. */
219 int conditional_code;
220 /* A place to move the start of the exception region for any
221 of the conditional cleanups, must be at the end or after
222 the start of the last unconditional cleanup, and before any
223 conditional branch points. */
224 rtx last_unconditional_cleanup;
225 /* When in a conditional context, this is the specific
226 cleanup list associated with last_unconditional_cleanup,
227 where we place the conditionalized cleanups. */
228 tree *cleanup_ptr;
229 } block;
230 /* For switch (C) or case (Pascal) statements,
231 and also for dummies (see `expand_start_case_dummy'). */
232 struct
234 /* The insn after which the case dispatch should finally
235 be emitted. Zero for a dummy. */
236 rtx start;
237 /* A list of case labels; it is first built as an AVL tree.
238 During expand_end_case, this is converted to a list, and may be
239 rearranged into a nearly balanced binary tree. */
240 struct case_node *case_list;
241 /* Label to jump to if no case matches. */
242 tree default_label;
243 /* The expression to be dispatched on. */
244 tree index_expr;
245 /* Type that INDEX_EXPR should be converted to. */
246 tree nominal_type;
247 /* Name of this kind of statement, for warnings. */
248 const char *printname;
249 /* Used to save no_line_numbers till we see the first case label.
250 We set this to -1 when we see the first case label in this
251 case statement. */
252 int line_number_status;
253 } case_stmt;
254 } data;
257 /* Allocate and return a new `struct nesting'. */
259 #define ALLOC_NESTING() \
260 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
262 /* Pop the nesting stack element by element until we pop off
263 the element which is at the top of STACK.
264 Update all the other stacks, popping off elements from them
265 as we pop them from nesting_stack. */
267 #define POPSTACK(STACK) \
268 do { struct nesting *target = STACK; \
269 struct nesting *this; \
270 do { this = nesting_stack; \
271 if (loop_stack == this) \
272 loop_stack = loop_stack->next; \
273 if (cond_stack == this) \
274 cond_stack = cond_stack->next; \
275 if (block_stack == this) \
276 block_stack = block_stack->next; \
277 if (stack_block_stack == this) \
278 stack_block_stack = stack_block_stack->next; \
279 if (case_stack == this) \
280 case_stack = case_stack->next; \
281 nesting_depth = nesting_stack->depth - 1; \
282 nesting_stack = this->all; \
283 obstack_free (&stmt_obstack, this); } \
284 while (this != target); } while (0)
286 /* In some cases it is impossible to generate code for a forward goto
287 until the label definition is seen. This happens when it may be necessary
288 for the goto to reset the stack pointer: we don't yet know how to do that.
289 So expand_goto puts an entry on this fixup list.
290 Each time a binding contour that resets the stack is exited,
291 we check each fixup.
292 If the target label has now been defined, we can insert the proper code. */
294 struct goto_fixup
296 /* Points to following fixup. */
297 struct goto_fixup *next;
298 /* Points to the insn before the jump insn.
299 If more code must be inserted, it goes after this insn. */
300 rtx before_jump;
301 /* The LABEL_DECL that this jump is jumping to, or 0
302 for break, continue or return. */
303 tree target;
304 /* The BLOCK for the place where this goto was found. */
305 tree context;
306 /* The CODE_LABEL rtx that this is jumping to. */
307 rtx target_rtl;
308 /* Number of binding contours started in current function
309 before the label reference. */
310 int block_start_count;
311 /* The outermost stack level that should be restored for this jump.
312 Each time a binding contour that resets the stack is exited,
313 if the target label is *not* yet defined, this slot is updated. */
314 rtx stack_level;
315 /* List of lists of cleanup expressions to be run by this goto.
316 There is one element for each block that this goto is within.
317 The tail of this list can be 0,
318 if all remaining elements would be empty.
319 The TREE_VALUE contains the cleanup list of that block as of the
320 time this goto was seen.
321 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
322 tree cleanup_list_list;
325 /* Within any binding contour that must restore a stack level,
326 all labels are recorded with a chain of these structures. */
328 struct label_chain
330 /* Points to following fixup. */
331 struct label_chain *next;
332 tree label;
335 struct stmt_status
337 /* Chain of all pending binding contours. */
338 struct nesting *x_block_stack;
340 /* If any new stacks are added here, add them to POPSTACKS too. */
342 /* Chain of all pending binding contours that restore stack levels
343 or have cleanups. */
344 struct nesting *x_stack_block_stack;
346 /* Chain of all pending conditional statements. */
347 struct nesting *x_cond_stack;
349 /* Chain of all pending loops. */
350 struct nesting *x_loop_stack;
352 /* Chain of all pending case or switch statements. */
353 struct nesting *x_case_stack;
355 /* Separate chain including all of the above,
356 chained through the `all' field. */
357 struct nesting *x_nesting_stack;
359 /* Number of entries on nesting_stack now. */
360 int x_nesting_depth;
362 /* Number of binding contours started so far in this function. */
363 int x_block_start_count;
365 /* Each time we expand an expression-statement,
366 record the expr's type and its RTL value here. */
367 tree x_last_expr_type;
368 rtx x_last_expr_value;
370 /* Nonzero if within a ({...}) grouping, in which case we must
371 always compute a value for each expr-stmt in case it is the last one. */
372 int x_expr_stmts_for_value;
374 /* Filename and line number of last line-number note,
375 whether we actually emitted it or not. */
376 const char *x_emit_filename;
377 int x_emit_lineno;
379 struct goto_fixup *x_goto_fixup_chain;
382 #define block_stack (cfun->stmt->x_block_stack)
383 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
384 #define cond_stack (cfun->stmt->x_cond_stack)
385 #define loop_stack (cfun->stmt->x_loop_stack)
386 #define case_stack (cfun->stmt->x_case_stack)
387 #define nesting_stack (cfun->stmt->x_nesting_stack)
388 #define nesting_depth (cfun->stmt->x_nesting_depth)
389 #define current_block_start_count (cfun->stmt->x_block_start_count)
390 #define last_expr_type (cfun->stmt->x_last_expr_type)
391 #define last_expr_value (cfun->stmt->x_last_expr_value)
392 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
393 #define emit_filename (cfun->stmt->x_emit_filename)
394 #define emit_lineno (cfun->stmt->x_emit_lineno)
395 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
397 /* Non-zero if we are using EH to handle cleanus. */
398 static int using_eh_for_cleanups_p = 0;
400 static int n_occurrences PARAMS ((int, const char *));
401 static void expand_goto_internal PARAMS ((tree, rtx, rtx));
402 static int expand_fixup PARAMS ((tree, rtx, rtx));
403 static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
404 static void expand_nl_goto_receiver PARAMS ((void));
405 static void expand_nl_goto_receivers PARAMS ((struct nesting *));
406 static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
407 rtx, int));
408 static void expand_null_return_1 PARAMS ((rtx));
409 static void expand_value_return PARAMS ((rtx));
410 static int tail_recursion_args PARAMS ((tree, tree));
411 static void expand_cleanups PARAMS ((tree, tree, int, int));
412 static void check_seenlabel PARAMS ((void));
413 static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
414 static int estimate_case_costs PARAMS ((case_node_ptr));
415 static void group_case_nodes PARAMS ((case_node_ptr));
416 static void balance_case_nodes PARAMS ((case_node_ptr *,
417 case_node_ptr));
418 static int node_has_low_bound PARAMS ((case_node_ptr, tree));
419 static int node_has_high_bound PARAMS ((case_node_ptr, tree));
420 static int node_is_bounded PARAMS ((case_node_ptr, tree));
421 static void emit_jump_if_reachable PARAMS ((rtx));
422 static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
423 static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
424 static void mark_cond_nesting PARAMS ((struct nesting *));
425 static void mark_loop_nesting PARAMS ((struct nesting *));
426 static void mark_block_nesting PARAMS ((struct nesting *));
427 static void mark_case_nesting PARAMS ((struct nesting *));
428 static void mark_case_node PARAMS ((struct case_node *));
429 static void mark_goto_fixup PARAMS ((struct goto_fixup *));
430 static void free_case_nodes PARAMS ((case_node_ptr));
432 void
433 using_eh_for_cleanups ()
435 using_eh_for_cleanups_p = 1;
438 /* Mark N (known to be a cond-nesting) for GC. */
440 static void
441 mark_cond_nesting (n)
442 struct nesting *n;
444 while (n)
446 ggc_mark_rtx (n->exit_label);
447 ggc_mark_rtx (n->data.cond.endif_label);
448 ggc_mark_rtx (n->data.cond.next_label);
450 n = n->next;
454 /* Mark N (known to be a loop-nesting) for GC. */
456 static void
457 mark_loop_nesting (n)
458 struct nesting *n;
461 while (n)
463 ggc_mark_rtx (n->exit_label);
464 ggc_mark_rtx (n->data.loop.start_label);
465 ggc_mark_rtx (n->data.loop.end_label);
466 ggc_mark_rtx (n->data.loop.alt_end_label);
467 ggc_mark_rtx (n->data.loop.continue_label);
469 n = n->next;
473 /* Mark N (known to be a block-nesting) for GC. */
475 static void
476 mark_block_nesting (n)
477 struct nesting *n;
479 while (n)
481 struct label_chain *l;
483 ggc_mark_rtx (n->exit_label);
484 ggc_mark_rtx (n->data.block.stack_level);
485 ggc_mark_rtx (n->data.block.first_insn);
486 ggc_mark_tree (n->data.block.cleanups);
487 ggc_mark_tree (n->data.block.outer_cleanups);
489 for (l = n->data.block.label_chain; l != NULL; l = l->next)
491 ggc_mark (l);
492 ggc_mark_tree (l->label);
495 ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
497 /* ??? cleanup_ptr never points outside the stack, does it? */
499 n = n->next;
503 /* Mark N (known to be a case-nesting) for GC. */
505 static void
506 mark_case_nesting (n)
507 struct nesting *n;
509 while (n)
511 ggc_mark_rtx (n->exit_label);
512 ggc_mark_rtx (n->data.case_stmt.start);
514 ggc_mark_tree (n->data.case_stmt.default_label);
515 ggc_mark_tree (n->data.case_stmt.index_expr);
516 ggc_mark_tree (n->data.case_stmt.nominal_type);
518 mark_case_node (n->data.case_stmt.case_list);
519 n = n->next;
523 /* Mark C for GC. */
525 static void
526 mark_case_node (c)
527 struct case_node *c;
529 if (c != 0)
531 ggc_mark_tree (c->low);
532 ggc_mark_tree (c->high);
533 ggc_mark_tree (c->code_label);
535 mark_case_node (c->right);
536 mark_case_node (c->left);
540 /* Mark G for GC. */
542 static void
543 mark_goto_fixup (g)
544 struct goto_fixup *g;
546 while (g)
548 ggc_mark (g);
549 ggc_mark_rtx (g->before_jump);
550 ggc_mark_tree (g->target);
551 ggc_mark_tree (g->context);
552 ggc_mark_rtx (g->target_rtl);
553 ggc_mark_rtx (g->stack_level);
554 ggc_mark_tree (g->cleanup_list_list);
556 g = g->next;
560 /* Clear out all parts of the state in F that can safely be discarded
561 after the function has been compiled, to let garbage collection
562 reclaim the memory. */
564 void
565 free_stmt_status (f)
566 struct function *f;
568 /* We're about to free the function obstack. If we hold pointers to
569 things allocated there, then we'll try to mark them when we do
570 GC. So, we clear them out here explicitly. */
571 if (f->stmt)
572 free (f->stmt);
573 f->stmt = NULL;
576 /* Mark P for GC. */
578 void
579 mark_stmt_status (p)
580 struct stmt_status *p;
582 if (p == 0)
583 return;
585 mark_block_nesting (p->x_block_stack);
586 mark_cond_nesting (p->x_cond_stack);
587 mark_loop_nesting (p->x_loop_stack);
588 mark_case_nesting (p->x_case_stack);
590 ggc_mark_tree (p->x_last_expr_type);
591 /* last_epxr_value is only valid if last_expr_type is nonzero. */
592 if (p->x_last_expr_type)
593 ggc_mark_rtx (p->x_last_expr_value);
595 mark_goto_fixup (p->x_goto_fixup_chain);
598 void
599 init_stmt ()
601 gcc_obstack_init (&stmt_obstack);
604 void
605 init_stmt_for_function ()
607 cfun->stmt = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
609 /* We are not currently within any block, conditional, loop or case. */
610 block_stack = 0;
611 stack_block_stack = 0;
612 loop_stack = 0;
613 case_stack = 0;
614 cond_stack = 0;
615 nesting_stack = 0;
616 nesting_depth = 0;
618 current_block_start_count = 0;
620 /* No gotos have been expanded yet. */
621 goto_fixup_chain = 0;
623 /* We are not processing a ({...}) grouping. */
624 expr_stmts_for_value = 0;
625 last_expr_type = 0;
626 last_expr_value = NULL_RTX;
629 /* Return nonzero if anything is pushed on the loop, condition, or case
630 stack. */
632 in_control_zone_p ()
634 return cond_stack || loop_stack || case_stack;
637 /* Record the current file and line. Called from emit_line_note. */
638 void
639 set_file_and_line_for_stmt (file, line)
640 const char *file;
641 int line;
643 /* If we're outputting an inline function, and we add a line note,
644 there may be no CFUN->STMT information. So, there's no need to
645 update it. */
646 if (cfun->stmt)
648 emit_filename = file;
649 emit_lineno = line;
653 /* Emit a no-op instruction. */
655 void
656 emit_nop ()
658 rtx last_insn;
660 last_insn = get_last_insn ();
661 if (!optimize
662 && (GET_CODE (last_insn) == CODE_LABEL
663 || (GET_CODE (last_insn) == NOTE
664 && prev_real_insn (last_insn) == 0)))
665 emit_insn (gen_nop ());
668 /* Return the rtx-label that corresponds to a LABEL_DECL,
669 creating it if necessary. */
672 label_rtx (label)
673 tree label;
675 if (TREE_CODE (label) != LABEL_DECL)
676 abort ();
678 if (!DECL_RTL_SET_P (label))
679 SET_DECL_RTL (label, gen_label_rtx ());
681 return DECL_RTL (label);
685 /* Add an unconditional jump to LABEL as the next sequential instruction. */
687 void
688 emit_jump (label)
689 rtx label;
691 do_pending_stack_adjust ();
692 emit_jump_insn (gen_jump (label));
693 emit_barrier ();
696 /* Emit code to jump to the address
697 specified by the pointer expression EXP. */
699 void
700 expand_computed_goto (exp)
701 tree exp;
703 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
705 #ifdef POINTERS_EXTEND_UNSIGNED
706 x = convert_memory_address (Pmode, x);
707 #endif
709 emit_queue ();
710 /* Be sure the function is executable. */
711 if (current_function_check_memory_usage)
712 emit_library_call (chkr_check_exec_libfunc, LCT_CONST_MAKE_BLOCK,
713 VOIDmode, 1, x, ptr_mode);
715 do_pending_stack_adjust ();
716 emit_indirect_jump (x);
718 current_function_has_computed_jump = 1;
721 /* Handle goto statements and the labels that they can go to. */
723 /* Specify the location in the RTL code of a label LABEL,
724 which is a LABEL_DECL tree node.
726 This is used for the kind of label that the user can jump to with a
727 goto statement, and for alternatives of a switch or case statement.
728 RTL labels generated for loops and conditionals don't go through here;
729 they are generated directly at the RTL level, by other functions below.
731 Note that this has nothing to do with defining label *names*.
732 Languages vary in how they do that and what that even means. */
734 void
735 expand_label (label)
736 tree label;
738 struct label_chain *p;
740 do_pending_stack_adjust ();
741 emit_label (label_rtx (label));
742 if (DECL_NAME (label))
743 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
745 if (stack_block_stack != 0)
747 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
748 p->next = stack_block_stack->data.block.label_chain;
749 stack_block_stack->data.block.label_chain = p;
750 p->label = label;
754 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
755 from nested functions. */
757 void
758 declare_nonlocal_label (label)
759 tree label;
761 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
763 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
764 LABEL_PRESERVE_P (label_rtx (label)) = 1;
765 if (nonlocal_goto_handler_slots == 0)
767 emit_stack_save (SAVE_NONLOCAL,
768 &nonlocal_goto_stack_level,
769 PREV_INSN (tail_recursion_reentry));
771 nonlocal_goto_handler_slots
772 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
775 /* Generate RTL code for a `goto' statement with target label LABEL.
776 LABEL should be a LABEL_DECL tree node that was or will later be
777 defined with `expand_label'. */
779 void
780 expand_goto (label)
781 tree label;
783 tree context;
785 /* Check for a nonlocal goto to a containing function. */
786 context = decl_function_context (label);
787 if (context != 0 && context != current_function_decl)
789 struct function *p = find_function_data (context);
790 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
791 rtx handler_slot, static_chain, save_area, insn;
792 tree link;
794 /* Find the corresponding handler slot for this label. */
795 handler_slot = p->x_nonlocal_goto_handler_slots;
796 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
797 link = TREE_CHAIN (link))
798 handler_slot = XEXP (handler_slot, 1);
799 handler_slot = XEXP (handler_slot, 0);
801 p->has_nonlocal_label = 1;
802 current_function_has_nonlocal_goto = 1;
803 LABEL_REF_NONLOCAL_P (label_ref) = 1;
805 /* Copy the rtl for the slots so that they won't be shared in
806 case the virtual stack vars register gets instantiated differently
807 in the parent than in the child. */
809 static_chain = copy_to_reg (lookup_static_chain (label));
811 /* Get addr of containing function's current nonlocal goto handler,
812 which will do any cleanups and then jump to the label. */
813 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
814 virtual_stack_vars_rtx,
815 static_chain));
817 /* Get addr of containing function's nonlocal save area. */
818 save_area = p->x_nonlocal_goto_stack_level;
819 if (save_area)
820 save_area = replace_rtx (copy_rtx (save_area),
821 virtual_stack_vars_rtx, static_chain);
823 #if HAVE_nonlocal_goto
824 if (HAVE_nonlocal_goto)
825 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
826 save_area, label_ref));
827 else
828 #endif
830 /* Restore frame pointer for containing function.
831 This sets the actual hard register used for the frame pointer
832 to the location of the function's incoming static chain info.
833 The non-local goto handler will then adjust it to contain the
834 proper value and reload the argument pointer, if needed. */
835 emit_move_insn (hard_frame_pointer_rtx, static_chain);
836 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
838 /* USE of hard_frame_pointer_rtx added for consistency;
839 not clear if really needed. */
840 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
841 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
842 emit_indirect_jump (handler_slot);
845 /* Search backwards to the jump insn and mark it as a
846 non-local goto. */
847 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
849 if (GET_CODE (insn) == JUMP_INSN)
851 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
852 const0_rtx, REG_NOTES (insn));
853 break;
855 else if (GET_CODE (insn) == CALL_INSN)
856 break;
859 else
860 expand_goto_internal (label, label_rtx (label), NULL_RTX);
863 /* Generate RTL code for a `goto' statement with target label BODY.
864 LABEL should be a LABEL_REF.
865 LAST_INSN, if non-0, is the rtx we should consider as the last
866 insn emitted (for the purposes of cleaning up a return). */
868 static void
869 expand_goto_internal (body, label, last_insn)
870 tree body;
871 rtx label;
872 rtx last_insn;
874 struct nesting *block;
875 rtx stack_level = 0;
877 if (GET_CODE (label) != CODE_LABEL)
878 abort ();
880 /* If label has already been defined, we can tell now
881 whether and how we must alter the stack level. */
883 if (PREV_INSN (label) != 0)
885 /* Find the innermost pending block that contains the label.
886 (Check containment by comparing insn-uids.)
887 Then restore the outermost stack level within that block,
888 and do cleanups of all blocks contained in it. */
889 for (block = block_stack; block; block = block->next)
891 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
892 break;
893 if (block->data.block.stack_level != 0)
894 stack_level = block->data.block.stack_level;
895 /* Execute the cleanups for blocks we are exiting. */
896 if (block->data.block.cleanups != 0)
898 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
899 do_pending_stack_adjust ();
903 if (stack_level)
905 /* Ensure stack adjust isn't done by emit_jump, as this
906 would clobber the stack pointer. This one should be
907 deleted as dead by flow. */
908 clear_pending_stack_adjust ();
909 do_pending_stack_adjust ();
911 /* Don't do this adjust if it's to the end label and this function
912 is to return with a depressed stack pointer. */
913 if (label == return_label
914 && (((TREE_CODE (TREE_TYPE (current_function_decl))
915 == FUNCTION_TYPE)
916 && (TYPE_RETURNS_STACK_DEPRESSED
917 (TREE_TYPE (current_function_decl))))))
919 else
920 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
923 if (body != 0 && DECL_TOO_LATE (body))
924 error ("jump to `%s' invalidly jumps into binding contour",
925 IDENTIFIER_POINTER (DECL_NAME (body)));
927 /* Label not yet defined: may need to put this goto
928 on the fixup list. */
929 else if (! expand_fixup (body, label, last_insn))
931 /* No fixup needed. Record that the label is the target
932 of at least one goto that has no fixup. */
933 if (body != 0)
934 TREE_ADDRESSABLE (body) = 1;
937 emit_jump (label);
940 /* Generate if necessary a fixup for a goto
941 whose target label in tree structure (if any) is TREE_LABEL
942 and whose target in rtl is RTL_LABEL.
944 If LAST_INSN is nonzero, we pretend that the jump appears
945 after insn LAST_INSN instead of at the current point in the insn stream.
947 The fixup will be used later to insert insns just before the goto.
948 Those insns will restore the stack level as appropriate for the
949 target label, and will (in the case of C++) also invoke any object
950 destructors which have to be invoked when we exit the scopes which
951 are exited by the goto.
953 Value is nonzero if a fixup is made. */
955 static int
956 expand_fixup (tree_label, rtl_label, last_insn)
957 tree tree_label;
958 rtx rtl_label;
959 rtx last_insn;
961 struct nesting *block, *end_block;
963 /* See if we can recognize which block the label will be output in.
964 This is possible in some very common cases.
965 If we succeed, set END_BLOCK to that block.
966 Otherwise, set it to 0. */
968 if (cond_stack
969 && (rtl_label == cond_stack->data.cond.endif_label
970 || rtl_label == cond_stack->data.cond.next_label))
971 end_block = cond_stack;
972 /* If we are in a loop, recognize certain labels which
973 are likely targets. This reduces the number of fixups
974 we need to create. */
975 else if (loop_stack
976 && (rtl_label == loop_stack->data.loop.start_label
977 || rtl_label == loop_stack->data.loop.end_label
978 || rtl_label == loop_stack->data.loop.continue_label))
979 end_block = loop_stack;
980 else
981 end_block = 0;
983 /* Now set END_BLOCK to the binding level to which we will return. */
985 if (end_block)
987 struct nesting *next_block = end_block->all;
988 block = block_stack;
990 /* First see if the END_BLOCK is inside the innermost binding level.
991 If so, then no cleanups or stack levels are relevant. */
992 while (next_block && next_block != block)
993 next_block = next_block->all;
995 if (next_block)
996 return 0;
998 /* Otherwise, set END_BLOCK to the innermost binding level
999 which is outside the relevant control-structure nesting. */
1000 next_block = block_stack->next;
1001 for (block = block_stack; block != end_block; block = block->all)
1002 if (block == next_block)
1003 next_block = next_block->next;
1004 end_block = next_block;
1007 /* Does any containing block have a stack level or cleanups?
1008 If not, no fixup is needed, and that is the normal case
1009 (the only case, for standard C). */
1010 for (block = block_stack; block != end_block; block = block->next)
1011 if (block->data.block.stack_level != 0
1012 || block->data.block.cleanups != 0)
1013 break;
1015 if (block != end_block)
1017 /* Ok, a fixup is needed. Add a fixup to the list of such. */
1018 struct goto_fixup *fixup
1019 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
1020 /* In case an old stack level is restored, make sure that comes
1021 after any pending stack adjust. */
1022 /* ?? If the fixup isn't to come at the present position,
1023 doing the stack adjust here isn't useful. Doing it with our
1024 settings at that location isn't useful either. Let's hope
1025 someone does it! */
1026 if (last_insn == 0)
1027 do_pending_stack_adjust ();
1028 fixup->target = tree_label;
1029 fixup->target_rtl = rtl_label;
1031 /* Create a BLOCK node and a corresponding matched set of
1032 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
1033 this point. The notes will encapsulate any and all fixup
1034 code which we might later insert at this point in the insn
1035 stream. Also, the BLOCK node will be the parent (i.e. the
1036 `SUPERBLOCK') of any other BLOCK nodes which we might create
1037 later on when we are expanding the fixup code.
1039 Note that optimization passes (including expand_end_loop)
1040 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
1041 as a placeholder. */
1044 register rtx original_before_jump
1045 = last_insn ? last_insn : get_last_insn ();
1046 rtx start;
1047 rtx end;
1048 tree block;
1050 block = make_node (BLOCK);
1051 TREE_USED (block) = 1;
1053 if (!cfun->x_whole_function_mode_p)
1054 insert_block (block);
1055 else
1057 BLOCK_CHAIN (block)
1058 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1059 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
1060 = block;
1063 start_sequence ();
1064 start = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
1065 if (cfun->x_whole_function_mode_p)
1066 NOTE_BLOCK (start) = block;
1067 fixup->before_jump = emit_note (NULL, NOTE_INSN_DELETED);
1068 end = emit_note (NULL, NOTE_INSN_BLOCK_END);
1069 if (cfun->x_whole_function_mode_p)
1070 NOTE_BLOCK (end) = block;
1071 fixup->context = block;
1072 end_sequence ();
1073 emit_insns_after (start, original_before_jump);
1076 fixup->block_start_count = current_block_start_count;
1077 fixup->stack_level = 0;
1078 fixup->cleanup_list_list
1079 = ((block->data.block.outer_cleanups
1080 || block->data.block.cleanups)
1081 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1082 block->data.block.outer_cleanups)
1083 : 0);
1084 fixup->next = goto_fixup_chain;
1085 goto_fixup_chain = fixup;
1088 return block != 0;
1091 /* Expand any needed fixups in the outputmost binding level of the
1092 function. FIRST_INSN is the first insn in the function. */
1094 void
1095 expand_fixups (first_insn)
1096 rtx first_insn;
1098 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
1101 /* When exiting a binding contour, process all pending gotos requiring fixups.
1102 THISBLOCK is the structure that describes the block being exited.
1103 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1104 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1105 FIRST_INSN is the insn that began this contour.
1107 Gotos that jump out of this contour must restore the
1108 stack level and do the cleanups before actually jumping.
1110 DONT_JUMP_IN nonzero means report error there is a jump into this
1111 contour from before the beginning of the contour.
1112 This is also done if STACK_LEVEL is nonzero. */
1114 static void
1115 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1116 struct nesting *thisblock;
1117 rtx stack_level;
1118 tree cleanup_list;
1119 rtx first_insn;
1120 int dont_jump_in;
1122 register struct goto_fixup *f, *prev;
1124 /* F is the fixup we are considering; PREV is the previous one. */
1125 /* We run this loop in two passes so that cleanups of exited blocks
1126 are run first, and blocks that are exited are marked so
1127 afterwards. */
1129 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1131 /* Test for a fixup that is inactive because it is already handled. */
1132 if (f->before_jump == 0)
1134 /* Delete inactive fixup from the chain, if that is easy to do. */
1135 if (prev != 0)
1136 prev->next = f->next;
1138 /* Has this fixup's target label been defined?
1139 If so, we can finalize it. */
1140 else if (PREV_INSN (f->target_rtl) != 0)
1142 register rtx cleanup_insns;
1144 /* If this fixup jumped into this contour from before the beginning
1145 of this contour, report an error. This code used to use
1146 the first non-label insn after f->target_rtl, but that's
1147 wrong since such can be added, by things like put_var_into_stack
1148 and have INSN_UIDs that are out of the range of the block. */
1149 /* ??? Bug: this does not detect jumping in through intermediate
1150 blocks that have stack levels or cleanups.
1151 It detects only a problem with the innermost block
1152 around the label. */
1153 if (f->target != 0
1154 && (dont_jump_in || stack_level || cleanup_list)
1155 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
1156 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1157 && ! DECL_ERROR_ISSUED (f->target))
1159 error_with_decl (f->target,
1160 "label `%s' used before containing binding contour");
1161 /* Prevent multiple errors for one label. */
1162 DECL_ERROR_ISSUED (f->target) = 1;
1165 /* We will expand the cleanups into a sequence of their own and
1166 then later on we will attach this new sequence to the insn
1167 stream just ahead of the actual jump insn. */
1169 start_sequence ();
1171 /* Temporarily restore the lexical context where we will
1172 logically be inserting the fixup code. We do this for the
1173 sake of getting the debugging information right. */
1175 pushlevel (0);
1176 set_block (f->context);
1178 /* Expand the cleanups for blocks this jump exits. */
1179 if (f->cleanup_list_list)
1181 tree lists;
1182 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1183 /* Marked elements correspond to blocks that have been closed.
1184 Do their cleanups. */
1185 if (TREE_ADDRESSABLE (lists)
1186 && TREE_VALUE (lists) != 0)
1188 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1189 /* Pop any pushes done in the cleanups,
1190 in case function is about to return. */
1191 do_pending_stack_adjust ();
1195 /* Restore stack level for the biggest contour that this
1196 jump jumps out of. */
1197 if (f->stack_level
1198 && ! (f->target_rtl == return_label
1199 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1200 == FUNCTION_TYPE)
1201 && (TYPE_RETURNS_STACK_DEPRESSED
1202 (TREE_TYPE (current_function_decl))))))
1203 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1205 /* Finish up the sequence containing the insns which implement the
1206 necessary cleanups, and then attach that whole sequence to the
1207 insn stream just ahead of the actual jump insn. Attaching it
1208 at that point insures that any cleanups which are in fact
1209 implicit C++ object destructions (which must be executed upon
1210 leaving the block) appear (to the debugger) to be taking place
1211 in an area of the generated code where the object(s) being
1212 destructed are still "in scope". */
1214 cleanup_insns = get_insns ();
1215 poplevel (1, 0, 0);
1217 end_sequence ();
1218 emit_insns_after (cleanup_insns, f->before_jump);
1220 f->before_jump = 0;
1224 /* For any still-undefined labels, do the cleanups for this block now.
1225 We must do this now since items in the cleanup list may go out
1226 of scope when the block ends. */
1227 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1228 if (f->before_jump != 0
1229 && PREV_INSN (f->target_rtl) == 0
1230 /* Label has still not appeared. If we are exiting a block with
1231 a stack level to restore, that started before the fixup,
1232 mark this stack level as needing restoration
1233 when the fixup is later finalized. */
1234 && thisblock != 0
1235 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1236 means the label is undefined. That's erroneous, but possible. */
1237 && (thisblock->data.block.block_start_count
1238 <= f->block_start_count))
1240 tree lists = f->cleanup_list_list;
1241 rtx cleanup_insns;
1243 for (; lists; lists = TREE_CHAIN (lists))
1244 /* If the following elt. corresponds to our containing block
1245 then the elt. must be for this block. */
1246 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1248 start_sequence ();
1249 pushlevel (0);
1250 set_block (f->context);
1251 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1252 do_pending_stack_adjust ();
1253 cleanup_insns = get_insns ();
1254 poplevel (1, 0, 0);
1255 end_sequence ();
1256 if (cleanup_insns != 0)
1257 f->before_jump
1258 = emit_insns_after (cleanup_insns, f->before_jump);
1260 f->cleanup_list_list = TREE_CHAIN (lists);
1263 if (stack_level)
1264 f->stack_level = stack_level;
1268 /* Return the number of times character C occurs in string S. */
1269 static int
1270 n_occurrences (c, s)
1271 int c;
1272 const char *s;
1274 int n = 0;
1275 while (*s)
1276 n += (*s++ == c);
1277 return n;
1280 /* Generate RTL for an asm statement (explicit assembler code).
1281 BODY is a STRING_CST node containing the assembler code text,
1282 or an ADDR_EXPR containing a STRING_CST. */
1284 void
1285 expand_asm (body)
1286 tree body;
1288 if (current_function_check_memory_usage)
1290 error ("`asm' cannot be used in function where memory usage is checked");
1291 return;
1294 if (TREE_CODE (body) == ADDR_EXPR)
1295 body = TREE_OPERAND (body, 0);
1297 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1298 TREE_STRING_POINTER (body)));
1299 last_expr_type = 0;
1302 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1303 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1304 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1305 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1306 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1307 constraint allows the use of a register operand. And, *IS_INOUT
1308 will be true if the operand is read-write, i.e., if it is used as
1309 an input as well as an output. If *CONSTRAINT_P is not in
1310 canonical form, it will be made canonical. (Note that `+' will be
1311 rpelaced with `=' as part of this process.)
1313 Returns TRUE if all went well; FALSE if an error occurred. */
1315 bool
1316 parse_output_constraint (constraint_p,
1317 operand_num,
1318 ninputs,
1319 noutputs,
1320 allows_mem,
1321 allows_reg,
1322 is_inout)
1323 const char **constraint_p;
1324 int operand_num;
1325 int ninputs;
1326 int noutputs;
1327 bool *allows_mem;
1328 bool *allows_reg;
1329 bool *is_inout;
1331 const char *constraint = *constraint_p;
1332 const char *p;
1334 /* Assume the constraint doesn't allow the use of either a register
1335 or memory. */
1336 *allows_mem = false;
1337 *allows_reg = false;
1339 /* Allow the `=' or `+' to not be at the beginning of the string,
1340 since it wasn't explicitly documented that way, and there is a
1341 large body of code that puts it last. Swap the character to
1342 the front, so as not to uglify any place else. */
1343 p = strchr (constraint, '=');
1344 if (!p)
1345 p = strchr (constraint, '+');
1347 /* If the string doesn't contain an `=', issue an error
1348 message. */
1349 if (!p)
1351 error ("output operand constraint lacks `='");
1352 return false;
1355 /* If the constraint begins with `+', then the operand is both read
1356 from and written to. */
1357 *is_inout = (*p == '+');
1359 /* Make sure we can specify the matching operand. */
1360 if (*is_inout && operand_num > 9)
1362 error ("output operand constraint %d contains `+'",
1363 operand_num);
1364 return false;
1367 /* Canonicalize the output constraint so that it begins with `='. */
1368 if (p != constraint || is_inout)
1370 char *buf;
1371 size_t c_len = strlen (constraint);
1373 if (p != constraint)
1374 warning ("output constraint `%c' for operand %d is not at the beginning",
1375 *p, operand_num);
1377 /* Make a copy of the constraint. */
1378 buf = alloca (c_len + 1);
1379 strcpy (buf, constraint);
1380 /* Swap the first character and the `=' or `+'. */
1381 buf[p - constraint] = buf[0];
1382 /* Make sure the first character is an `='. (Until we do this,
1383 it might be a `+'.) */
1384 buf[0] = '=';
1385 /* Replace the constraint with the canonicalized string. */
1386 *constraint_p = ggc_alloc_string (buf, c_len);
1387 constraint = *constraint_p;
1390 /* Loop through the constraint string. */
1391 for (p = constraint + 1; *p; ++p)
1392 switch (*p)
1394 case '+':
1395 case '=':
1396 error ("operand constraint contains '+' or '=' at illegal position.");
1397 return false;
1399 case '%':
1400 if (operand_num + 1 == ninputs + noutputs)
1402 error ("`%%' constraint used with last operand");
1403 return false;
1405 break;
1407 case 'V': case 'm': case 'o':
1408 *allows_mem = true;
1409 break;
1411 case '?': case '!': case '*': case '&': case '#':
1412 case 'E': case 'F': case 'G': case 'H':
1413 case 's': case 'i': case 'n':
1414 case 'I': case 'J': case 'K': case 'L': case 'M':
1415 case 'N': case 'O': case 'P': case ',':
1416 break;
1418 case '0': case '1': case '2': case '3': case '4':
1419 case '5': case '6': case '7': case '8': case '9':
1420 error ("matching constraint not valid in output operand");
1421 return false;
1423 case '<': case '>':
1424 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1425 excepting those that expand_call created. So match memory
1426 and hope. */
1427 *allows_mem = true;
1428 break;
1430 case 'g': case 'X':
1431 *allows_reg = true;
1432 *allows_mem = true;
1433 break;
1435 case 'p': case 'r':
1436 *allows_reg = true;
1437 break;
1439 default:
1440 if (!ISALPHA (*p))
1441 break;
1442 if (REG_CLASS_FROM_LETTER (*p) != NO_REGS)
1443 *allows_reg = true;
1444 #ifdef EXTRA_CONSTRAINT
1445 else
1447 /* Otherwise we can't assume anything about the nature of
1448 the constraint except that it isn't purely registers.
1449 Treat it like "g" and hope for the best. */
1450 *allows_reg = true;
1451 *allows_mem = true;
1453 #endif
1454 break;
1457 return true;
1460 /* Generate RTL for an asm statement with arguments.
1461 STRING is the instruction template.
1462 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1463 Each output or input has an expression in the TREE_VALUE and
1464 a constraint-string in the TREE_PURPOSE.
1465 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1466 that is clobbered by this insn.
1468 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1469 Some elements of OUTPUTS may be replaced with trees representing temporary
1470 values. The caller should copy those temporary values to the originally
1471 specified lvalues.
1473 VOL nonzero means the insn is volatile; don't optimize it. */
1475 void
1476 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1477 tree string, outputs, inputs, clobbers;
1478 int vol;
1479 const char *filename;
1480 int line;
1482 rtvec argvec, constraints;
1483 rtx body;
1484 int ninputs = list_length (inputs);
1485 int noutputs = list_length (outputs);
1486 int ninout = 0;
1487 int nclobbers;
1488 tree tail;
1489 register int i;
1490 /* Vector of RTX's of evaluated output operands. */
1491 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1492 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1493 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1494 enum machine_mode *inout_mode
1495 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1496 const char **output_constraints
1497 = alloca (noutputs * sizeof (const char *));
1498 /* The insn we have emitted. */
1499 rtx insn;
1500 int old_generating_concat_p = generating_concat_p;
1502 /* An ASM with no outputs needs to be treated as volatile, for now. */
1503 if (noutputs == 0)
1504 vol = 1;
1506 if (current_function_check_memory_usage)
1508 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1509 return;
1512 #ifdef MD_ASM_CLOBBERS
1513 /* Sometimes we wish to automatically clobber registers across an asm.
1514 Case in point is when the i386 backend moved from cc0 to a hard reg --
1515 maintaining source-level compatability means automatically clobbering
1516 the flags register. */
1517 MD_ASM_CLOBBERS (clobbers);
1518 #endif
1520 if (current_function_check_memory_usage)
1522 error ("`asm' cannot be used in function where memory usage is checked");
1523 return;
1526 /* Count the number of meaningful clobbered registers, ignoring what
1527 we would ignore later. */
1528 nclobbers = 0;
1529 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1531 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1533 i = decode_reg_name (regname);
1534 if (i >= 0 || i == -4)
1535 ++nclobbers;
1536 else if (i == -2)
1537 error ("unknown register name `%s' in `asm'", regname);
1540 last_expr_type = 0;
1542 /* Check that the number of alternatives is constant across all
1543 operands. */
1544 if (outputs || inputs)
1546 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1547 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1548 tree next = inputs;
1550 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1552 error ("too many alternatives in `asm'");
1553 return;
1556 tmp = outputs;
1557 while (tmp)
1559 const char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1561 if (n_occurrences (',', constraint) != nalternatives)
1563 error ("operand constraints for `asm' differ in number of alternatives");
1564 return;
1567 if (TREE_CHAIN (tmp))
1568 tmp = TREE_CHAIN (tmp);
1569 else
1570 tmp = next, next = 0;
1574 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1576 tree val = TREE_VALUE (tail);
1577 tree type = TREE_TYPE (val);
1578 const char *constraint;
1579 bool is_inout;
1580 bool allows_reg;
1581 bool allows_mem;
1583 /* If there's an erroneous arg, emit no insn. */
1584 if (type == error_mark_node)
1585 return;
1587 /* Make sure constraint has `=' and does not have `+'. Also, see
1588 if it allows any register. Be liberal on the latter test, since
1589 the worst that happens if we get it wrong is we issue an error
1590 message. */
1592 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1593 output_constraints[i] = constraint;
1595 /* Try to parse the output constraint. If that fails, there's
1596 no point in going further. */
1597 if (!parse_output_constraint (&output_constraints[i],
1599 ninputs,
1600 noutputs,
1601 &allows_mem,
1602 &allows_reg,
1603 &is_inout))
1604 return;
1606 /* If an output operand is not a decl or indirect ref and our constraint
1607 allows a register, make a temporary to act as an intermediate.
1608 Make the asm insn write into that, then our caller will copy it to
1609 the real output operand. Likewise for promoted variables. */
1611 generating_concat_p = 0;
1613 real_output_rtx[i] = NULL_RTX;
1614 if ((TREE_CODE (val) == INDIRECT_REF
1615 && allows_mem)
1616 || (DECL_P (val)
1617 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1618 && ! (GET_CODE (DECL_RTL (val)) == REG
1619 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1620 || ! allows_reg
1621 || is_inout)
1623 if (! allows_reg)
1624 mark_addressable (TREE_VALUE (tail));
1626 output_rtx[i]
1627 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1628 EXPAND_MEMORY_USE_WO);
1630 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1631 error ("output number %d not directly addressable", i);
1632 if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1633 || GET_CODE (output_rtx[i]) == CONCAT)
1635 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1636 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1637 if (is_inout)
1638 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1641 else
1643 output_rtx[i] = assign_temp (type, 0, 0, 1);
1644 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1647 generating_concat_p = old_generating_concat_p;
1649 if (is_inout)
1651 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1652 inout_opnum[ninout++] = i;
1656 ninputs += ninout;
1657 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1659 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1660 return;
1663 /* Make vectors for the expression-rtx and constraint strings. */
1665 argvec = rtvec_alloc (ninputs);
1666 constraints = rtvec_alloc (ninputs);
1668 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1669 : GET_MODE (output_rtx[0])),
1670 TREE_STRING_POINTER (string),
1671 empty_string, 0, argvec, constraints,
1672 filename, line);
1674 MEM_VOLATILE_P (body) = vol;
1676 /* Eval the inputs and put them into ARGVEC.
1677 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1679 i = 0;
1680 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1682 int j;
1683 int allows_reg = 0, allows_mem = 0;
1684 const char *constraint, *orig_constraint;
1685 int c_len;
1686 rtx op;
1688 /* If there's an erroneous arg, emit no insn,
1689 because the ASM_INPUT would get VOIDmode
1690 and that could cause a crash in reload. */
1691 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1692 return;
1694 /* ??? Can this happen, and does the error message make any sense? */
1695 if (TREE_PURPOSE (tail) == NULL_TREE)
1697 error ("hard register `%s' listed as input operand to `asm'",
1698 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1699 return;
1702 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1703 c_len = strlen (constraint);
1704 orig_constraint = constraint;
1706 /* Make sure constraint has neither `=', `+', nor '&'. */
1708 for (j = 0; j < c_len; j++)
1709 switch (constraint[j])
1711 case '+': case '=': case '&':
1712 if (constraint == orig_constraint)
1714 error ("input operand constraint contains `%c'",
1715 constraint[j]);
1716 return;
1718 break;
1720 case '%':
1721 if (constraint == orig_constraint
1722 && i + 1 == ninputs - ninout)
1724 error ("`%%' constraint used with last operand");
1725 return;
1727 break;
1729 case 'V': case 'm': case 'o':
1730 allows_mem = 1;
1731 break;
1733 case '<': case '>':
1734 case '?': case '!': case '*': case '#':
1735 case 'E': case 'F': case 'G': case 'H':
1736 case 's': case 'i': case 'n':
1737 case 'I': case 'J': case 'K': case 'L': case 'M':
1738 case 'N': case 'O': case 'P': case ',':
1739 break;
1741 /* Whether or not a numeric constraint allows a register is
1742 decided by the matching constraint, and so there is no need
1743 to do anything special with them. We must handle them in
1744 the default case, so that we don't unnecessarily force
1745 operands to memory. */
1746 case '0': case '1': case '2': case '3': case '4':
1747 case '5': case '6': case '7': case '8': case '9':
1748 if (constraint[j] >= '0' + noutputs)
1750 error
1751 ("matching constraint references invalid operand number");
1752 return;
1755 /* Try and find the real constraint for this dup. */
1756 if ((j == 0 && c_len == 1)
1757 || (j == 1 && c_len == 2 && constraint[0] == '%'))
1759 tree o = outputs;
1761 for (j = constraint[j] - '0'; j > 0; --j)
1762 o = TREE_CHAIN (o);
1764 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1765 c_len = strlen (constraint);
1766 j = 0;
1767 break;
1770 /* Fall through. */
1772 case 'p': case 'r':
1773 allows_reg = 1;
1774 break;
1776 case 'g': case 'X':
1777 allows_reg = 1;
1778 allows_mem = 1;
1779 break;
1781 default:
1782 if (! ISALPHA (constraint[j]))
1784 error ("invalid punctuation `%c' in constraint",
1785 constraint[j]);
1786 return;
1788 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1789 allows_reg = 1;
1790 #ifdef EXTRA_CONSTRAINT
1791 else
1793 /* Otherwise we can't assume anything about the nature of
1794 the constraint except that it isn't purely registers.
1795 Treat it like "g" and hope for the best. */
1796 allows_reg = 1;
1797 allows_mem = 1;
1799 #endif
1800 break;
1803 if (! allows_reg && allows_mem)
1804 mark_addressable (TREE_VALUE (tail));
1806 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1808 /* Never pass a CONCAT to an ASM. */
1809 generating_concat_p = 0;
1810 if (GET_CODE (op) == CONCAT)
1811 op = force_reg (GET_MODE (op), op);
1813 if (asm_operand_ok (op, constraint) <= 0)
1815 if (allows_reg)
1816 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1817 else if (!allows_mem)
1818 warning ("asm operand %d probably doesn't match constraints", i);
1819 else if (CONSTANT_P (op))
1820 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1821 op);
1822 else if (GET_CODE (op) == REG
1823 || GET_CODE (op) == SUBREG
1824 || GET_CODE (op) == ADDRESSOF
1825 || GET_CODE (op) == CONCAT)
1827 tree type = TREE_TYPE (TREE_VALUE (tail));
1828 tree qual_type = build_qualified_type (type,
1829 (TYPE_QUALS (type)
1830 | TYPE_QUAL_CONST));
1831 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1833 emit_move_insn (memloc, op);
1834 op = memloc;
1837 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1838 /* We won't recognize volatile memory as available a
1839 memory_operand at this point. Ignore it. */
1841 else if (queued_subexp_p (op))
1843 else
1844 /* ??? Leave this only until we have experience with what
1845 happens in combine and elsewhere when constraints are
1846 not satisfied. */
1847 warning ("asm operand %d probably doesn't match constraints", i);
1849 generating_concat_p = old_generating_concat_p;
1850 ASM_OPERANDS_INPUT (body, i) = op;
1852 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1853 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1854 orig_constraint);
1855 i++;
1858 /* Protect all the operands from the queue now that they have all been
1859 evaluated. */
1861 generating_concat_p = 0;
1863 for (i = 0; i < ninputs - ninout; i++)
1864 ASM_OPERANDS_INPUT (body, i)
1865 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1867 for (i = 0; i < noutputs; i++)
1868 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1870 /* For in-out operands, copy output rtx to input rtx. */
1871 for (i = 0; i < ninout; i++)
1873 int j = inout_opnum[i];
1875 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1876 = output_rtx[j];
1877 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1878 = gen_rtx_ASM_INPUT (inout_mode[i], digit_string (j));
1881 generating_concat_p = old_generating_concat_p;
1883 /* Now, for each output, construct an rtx
1884 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1885 ARGVEC CONSTRAINTS))
1886 If there is more than one, put them inside a PARALLEL. */
1888 if (noutputs == 1 && nclobbers == 0)
1890 ASM_OPERANDS_OUTPUT_CONSTRAINT (body)
1891 = output_constraints[0];
1892 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1895 else if (noutputs == 0 && nclobbers == 0)
1897 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1898 insn = emit_insn (body);
1901 else
1903 rtx obody = body;
1904 int num = noutputs;
1906 if (num == 0)
1907 num = 1;
1909 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1911 /* For each output operand, store a SET. */
1912 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1914 XVECEXP (body, 0, i)
1915 = gen_rtx_SET (VOIDmode,
1916 output_rtx[i],
1917 gen_rtx_ASM_OPERANDS
1918 (GET_MODE (output_rtx[i]),
1919 TREE_STRING_POINTER (string),
1920 output_constraints[i],
1921 i, argvec, constraints,
1922 filename, line));
1924 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1927 /* If there are no outputs (but there are some clobbers)
1928 store the bare ASM_OPERANDS into the PARALLEL. */
1930 if (i == 0)
1931 XVECEXP (body, 0, i++) = obody;
1933 /* Store (clobber REG) for each clobbered register specified. */
1935 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1937 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1938 int j = decode_reg_name (regname);
1940 if (j < 0)
1942 if (j == -3) /* `cc', which is not a register */
1943 continue;
1945 if (j == -4) /* `memory', don't cache memory across asm */
1947 XVECEXP (body, 0, i++)
1948 = gen_rtx_CLOBBER (VOIDmode,
1949 gen_rtx_MEM
1950 (BLKmode,
1951 gen_rtx_SCRATCH (VOIDmode)));
1952 continue;
1955 /* Ignore unknown register, error already signaled. */
1956 continue;
1959 /* Use QImode since that's guaranteed to clobber just one reg. */
1960 XVECEXP (body, 0, i++)
1961 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1964 insn = emit_insn (body);
1967 /* For any outputs that needed reloading into registers, spill them
1968 back to where they belong. */
1969 for (i = 0; i < noutputs; ++i)
1970 if (real_output_rtx[i])
1971 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1973 free_temp_slots ();
1976 /* Generate RTL to evaluate the expression EXP
1977 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1979 void
1980 expand_expr_stmt (exp)
1981 tree exp;
1983 /* If -W, warn about statements with no side effects,
1984 except for an explicit cast to void (e.g. for assert()), and
1985 except inside a ({...}) where they may be useful. */
1986 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1988 if (! TREE_SIDE_EFFECTS (exp))
1990 if ((extra_warnings || warn_unused_value)
1991 && !(TREE_CODE (exp) == CONVERT_EXPR
1992 && VOID_TYPE_P (TREE_TYPE (exp))))
1993 warning_with_file_and_line (emit_filename, emit_lineno,
1994 "statement with no effect");
1996 else if (warn_unused_value)
1997 warn_if_unused_value (exp);
2000 /* If EXP is of function type and we are expanding statements for
2001 value, convert it to pointer-to-function. */
2002 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2003 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2005 /* The call to `expand_expr' could cause last_expr_type and
2006 last_expr_value to get reset. Therefore, we set last_expr_value
2007 and last_expr_type *after* calling expand_expr. */
2008 last_expr_value = expand_expr (exp,
2009 (expr_stmts_for_value
2010 ? NULL_RTX : const0_rtx),
2011 VOIDmode, 0);
2012 last_expr_type = TREE_TYPE (exp);
2014 /* If all we do is reference a volatile value in memory,
2015 copy it to a register to be sure it is actually touched. */
2016 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
2017 && TREE_THIS_VOLATILE (exp))
2019 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
2021 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
2022 copy_to_reg (last_expr_value);
2023 else
2025 rtx lab = gen_label_rtx ();
2027 /* Compare the value with itself to reference it. */
2028 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
2029 expand_expr (TYPE_SIZE (last_expr_type),
2030 NULL_RTX, VOIDmode, 0),
2031 BLKmode, 0,
2032 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
2033 lab);
2034 emit_label (lab);
2038 /* If this expression is part of a ({...}) and is in memory, we may have
2039 to preserve temporaries. */
2040 preserve_temp_slots (last_expr_value);
2042 /* Free any temporaries used to evaluate this expression. Any temporary
2043 used as a result of this expression will already have been preserved
2044 above. */
2045 free_temp_slots ();
2047 emit_queue ();
2050 /* Warn if EXP contains any computations whose results are not used.
2051 Return 1 if a warning is printed; 0 otherwise. */
2054 warn_if_unused_value (exp)
2055 tree exp;
2057 if (TREE_USED (exp))
2058 return 0;
2060 /* Don't warn about void constructs. This includes casting to void,
2061 void function calls, and statement expressions with a final cast
2062 to void. */
2063 if (VOID_TYPE_P (TREE_TYPE (exp)))
2064 return 0;
2066 /* If this is an expression with side effects, don't warn. */
2067 if (TREE_SIDE_EFFECTS (exp))
2068 return 0;
2070 switch (TREE_CODE (exp))
2072 case PREINCREMENT_EXPR:
2073 case POSTINCREMENT_EXPR:
2074 case PREDECREMENT_EXPR:
2075 case POSTDECREMENT_EXPR:
2076 case MODIFY_EXPR:
2077 case INIT_EXPR:
2078 case TARGET_EXPR:
2079 case CALL_EXPR:
2080 case METHOD_CALL_EXPR:
2081 case RTL_EXPR:
2082 case TRY_CATCH_EXPR:
2083 case WITH_CLEANUP_EXPR:
2084 case EXIT_EXPR:
2085 return 0;
2087 case BIND_EXPR:
2088 /* For a binding, warn if no side effect within it. */
2089 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2091 case SAVE_EXPR:
2092 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2094 case TRUTH_ORIF_EXPR:
2095 case TRUTH_ANDIF_EXPR:
2096 /* In && or ||, warn if 2nd operand has no side effect. */
2097 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2099 case COMPOUND_EXPR:
2100 if (TREE_NO_UNUSED_WARNING (exp))
2101 return 0;
2102 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2103 return 1;
2104 /* Let people do `(foo (), 0)' without a warning. */
2105 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2106 return 0;
2107 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2109 case NOP_EXPR:
2110 case CONVERT_EXPR:
2111 case NON_LVALUE_EXPR:
2112 /* Don't warn about conversions not explicit in the user's program. */
2113 if (TREE_NO_UNUSED_WARNING (exp))
2114 return 0;
2115 /* Assignment to a cast usually results in a cast of a modify.
2116 Don't complain about that. There can be an arbitrary number of
2117 casts before the modify, so we must loop until we find the first
2118 non-cast expression and then test to see if that is a modify. */
2120 tree tem = TREE_OPERAND (exp, 0);
2122 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2123 tem = TREE_OPERAND (tem, 0);
2125 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2126 || TREE_CODE (tem) == CALL_EXPR)
2127 return 0;
2129 goto warn;
2131 case INDIRECT_REF:
2132 /* Don't warn about automatic dereferencing of references, since
2133 the user cannot control it. */
2134 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2135 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2136 /* Fall through. */
2138 default:
2139 /* Referencing a volatile value is a side effect, so don't warn. */
2140 if ((DECL_P (exp)
2141 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2142 && TREE_THIS_VOLATILE (exp))
2143 return 0;
2145 /* If this is an expression which has no operands, there is no value
2146 to be unused. There are no such language-independent codes,
2147 but front ends may define such. */
2148 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2149 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2150 return 0;
2152 warn:
2153 warning_with_file_and_line (emit_filename, emit_lineno,
2154 "value computed is not used");
2155 return 1;
2159 /* Clear out the memory of the last expression evaluated. */
2161 void
2162 clear_last_expr ()
2164 last_expr_type = 0;
2167 /* Begin a statement which will return a value.
2168 Return the RTL_EXPR for this statement expr.
2169 The caller must save that value and pass it to expand_end_stmt_expr. */
2171 tree
2172 expand_start_stmt_expr ()
2174 tree t;
2176 /* Make the RTL_EXPR node temporary, not momentary,
2177 so that rtl_expr_chain doesn't become garbage. */
2178 t = make_node (RTL_EXPR);
2179 do_pending_stack_adjust ();
2180 start_sequence_for_rtl_expr (t);
2181 NO_DEFER_POP;
2182 expr_stmts_for_value++;
2183 return t;
2186 /* Restore the previous state at the end of a statement that returns a value.
2187 Returns a tree node representing the statement's value and the
2188 insns to compute the value.
2190 The nodes of that expression have been freed by now, so we cannot use them.
2191 But we don't want to do that anyway; the expression has already been
2192 evaluated and now we just want to use the value. So generate a RTL_EXPR
2193 with the proper type and RTL value.
2195 If the last substatement was not an expression,
2196 return something with type `void'. */
2198 tree
2199 expand_end_stmt_expr (t)
2200 tree t;
2202 OK_DEFER_POP;
2204 if (last_expr_type == 0)
2206 last_expr_type = void_type_node;
2207 last_expr_value = const0_rtx;
2209 else if (last_expr_value == 0)
2210 /* There are some cases where this can happen, such as when the
2211 statement is void type. */
2212 last_expr_value = const0_rtx;
2213 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2214 /* Remove any possible QUEUED. */
2215 last_expr_value = protect_from_queue (last_expr_value, 0);
2217 emit_queue ();
2219 TREE_TYPE (t) = last_expr_type;
2220 RTL_EXPR_RTL (t) = last_expr_value;
2221 RTL_EXPR_SEQUENCE (t) = get_insns ();
2223 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2225 end_sequence ();
2227 /* Don't consider deleting this expr or containing exprs at tree level. */
2228 TREE_SIDE_EFFECTS (t) = 1;
2229 /* Propagate volatility of the actual RTL expr. */
2230 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2232 last_expr_type = 0;
2233 expr_stmts_for_value--;
2235 return t;
2238 /* Generate RTL for the start of an if-then. COND is the expression
2239 whose truth should be tested.
2241 If EXITFLAG is nonzero, this conditional is visible to
2242 `exit_something'. */
2244 void
2245 expand_start_cond (cond, exitflag)
2246 tree cond;
2247 int exitflag;
2249 struct nesting *thiscond = ALLOC_NESTING ();
2251 /* Make an entry on cond_stack for the cond we are entering. */
2253 thiscond->next = cond_stack;
2254 thiscond->all = nesting_stack;
2255 thiscond->depth = ++nesting_depth;
2256 thiscond->data.cond.next_label = gen_label_rtx ();
2257 /* Before we encounter an `else', we don't need a separate exit label
2258 unless there are supposed to be exit statements
2259 to exit this conditional. */
2260 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2261 thiscond->data.cond.endif_label = thiscond->exit_label;
2262 cond_stack = thiscond;
2263 nesting_stack = thiscond;
2265 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2268 /* Generate RTL between then-clause and the elseif-clause
2269 of an if-then-elseif-.... */
2271 void
2272 expand_start_elseif (cond)
2273 tree cond;
2275 if (cond_stack->data.cond.endif_label == 0)
2276 cond_stack->data.cond.endif_label = gen_label_rtx ();
2277 emit_jump (cond_stack->data.cond.endif_label);
2278 emit_label (cond_stack->data.cond.next_label);
2279 cond_stack->data.cond.next_label = gen_label_rtx ();
2280 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2283 /* Generate RTL between the then-clause and the else-clause
2284 of an if-then-else. */
2286 void
2287 expand_start_else ()
2289 if (cond_stack->data.cond.endif_label == 0)
2290 cond_stack->data.cond.endif_label = gen_label_rtx ();
2292 emit_jump (cond_stack->data.cond.endif_label);
2293 emit_label (cond_stack->data.cond.next_label);
2294 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2297 /* After calling expand_start_else, turn this "else" into an "else if"
2298 by providing another condition. */
2300 void
2301 expand_elseif (cond)
2302 tree cond;
2304 cond_stack->data.cond.next_label = gen_label_rtx ();
2305 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2308 /* Generate RTL for the end of an if-then.
2309 Pop the record for it off of cond_stack. */
2311 void
2312 expand_end_cond ()
2314 struct nesting *thiscond = cond_stack;
2316 do_pending_stack_adjust ();
2317 if (thiscond->data.cond.next_label)
2318 emit_label (thiscond->data.cond.next_label);
2319 if (thiscond->data.cond.endif_label)
2320 emit_label (thiscond->data.cond.endif_label);
2322 POPSTACK (cond_stack);
2323 last_expr_type = 0;
2326 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2327 loop should be exited by `exit_something'. This is a loop for which
2328 `expand_continue' will jump to the top of the loop.
2330 Make an entry on loop_stack to record the labels associated with
2331 this loop. */
2333 struct nesting *
2334 expand_start_loop (exit_flag)
2335 int exit_flag;
2337 register struct nesting *thisloop = ALLOC_NESTING ();
2339 /* Make an entry on loop_stack for the loop we are entering. */
2341 thisloop->next = loop_stack;
2342 thisloop->all = nesting_stack;
2343 thisloop->depth = ++nesting_depth;
2344 thisloop->data.loop.start_label = gen_label_rtx ();
2345 thisloop->data.loop.end_label = gen_label_rtx ();
2346 thisloop->data.loop.alt_end_label = 0;
2347 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2348 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2349 loop_stack = thisloop;
2350 nesting_stack = thisloop;
2352 do_pending_stack_adjust ();
2353 emit_queue ();
2354 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2355 emit_label (thisloop->data.loop.start_label);
2357 return thisloop;
2360 /* Like expand_start_loop but for a loop where the continuation point
2361 (for expand_continue_loop) will be specified explicitly. */
2363 struct nesting *
2364 expand_start_loop_continue_elsewhere (exit_flag)
2365 int exit_flag;
2367 struct nesting *thisloop = expand_start_loop (exit_flag);
2368 loop_stack->data.loop.continue_label = gen_label_rtx ();
2369 return thisloop;
2372 /* Begin a null, aka do { } while (0) "loop". But since the contents
2373 of said loop can still contain a break, we must frob the loop nest. */
2375 struct nesting *
2376 expand_start_null_loop ()
2378 register struct nesting *thisloop = ALLOC_NESTING ();
2380 /* Make an entry on loop_stack for the loop we are entering. */
2382 thisloop->next = loop_stack;
2383 thisloop->all = nesting_stack;
2384 thisloop->depth = ++nesting_depth;
2385 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2386 thisloop->data.loop.end_label = gen_label_rtx ();
2387 thisloop->data.loop.alt_end_label = NULL_RTX;
2388 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2389 thisloop->exit_label = thisloop->data.loop.end_label;
2390 loop_stack = thisloop;
2391 nesting_stack = thisloop;
2393 return thisloop;
2396 /* Specify the continuation point for a loop started with
2397 expand_start_loop_continue_elsewhere.
2398 Use this at the point in the code to which a continue statement
2399 should jump. */
2401 void
2402 expand_loop_continue_here ()
2404 do_pending_stack_adjust ();
2405 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2406 emit_label (loop_stack->data.loop.continue_label);
2409 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2410 Pop the block off of loop_stack. */
2412 void
2413 expand_end_loop ()
2415 rtx start_label = loop_stack->data.loop.start_label;
2416 rtx insn = get_last_insn ();
2417 int needs_end_jump = 1;
2419 /* Mark the continue-point at the top of the loop if none elsewhere. */
2420 if (start_label == loop_stack->data.loop.continue_label)
2421 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2423 do_pending_stack_adjust ();
2425 /* If optimizing, perhaps reorder the loop.
2426 First, try to use a condjump near the end.
2427 expand_exit_loop_if_false ends loops with unconditional jumps,
2428 like this:
2430 if (test) goto label;
2431 optional: cleanup
2432 goto loop_stack->data.loop.end_label
2433 barrier
2434 label:
2436 If we find such a pattern, we can end the loop earlier. */
2438 if (optimize
2439 && GET_CODE (insn) == CODE_LABEL
2440 && LABEL_NAME (insn) == NULL
2441 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2443 rtx label = insn;
2444 rtx jump = PREV_INSN (PREV_INSN (label));
2446 if (GET_CODE (jump) == JUMP_INSN
2447 && GET_CODE (PATTERN (jump)) == SET
2448 && SET_DEST (PATTERN (jump)) == pc_rtx
2449 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2450 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2451 == loop_stack->data.loop.end_label))
2453 rtx prev;
2455 /* The test might be complex and reference LABEL multiple times,
2456 like the loop in loop_iterations to set vtop. To handle this,
2457 we move LABEL. */
2458 insn = PREV_INSN (label);
2459 reorder_insns (label, label, start_label);
2461 for (prev = PREV_INSN (jump);; prev = PREV_INSN (prev))
2463 /* We ignore line number notes, but if we see any other note,
2464 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2465 NOTE_INSN_LOOP_*, we disable this optimization. */
2466 if (GET_CODE (prev) == NOTE)
2468 if (NOTE_LINE_NUMBER (prev) < 0)
2469 break;
2470 continue;
2472 if (GET_CODE (prev) == CODE_LABEL)
2473 break;
2474 if (GET_CODE (prev) == JUMP_INSN)
2476 if (GET_CODE (PATTERN (prev)) == SET
2477 && SET_DEST (PATTERN (prev)) == pc_rtx
2478 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2479 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2480 == LABEL_REF)
2481 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2483 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2484 = start_label;
2485 emit_note_after (NOTE_INSN_LOOP_END, prev);
2486 needs_end_jump = 0;
2488 break;
2494 /* If the loop starts with a loop exit, roll that to the end where
2495 it will optimize together with the jump back.
2497 We look for the conditional branch to the exit, except that once
2498 we find such a branch, we don't look past 30 instructions.
2500 In more detail, if the loop presently looks like this (in pseudo-C):
2502 start_label:
2503 if (test) goto end_label;
2504 body;
2505 goto start_label;
2506 end_label:
2508 transform it to look like:
2510 goto start_label;
2511 newstart_label:
2512 body;
2513 start_label:
2514 if (test) goto end_label;
2515 goto newstart_label;
2516 end_label:
2518 Here, the `test' may actually consist of some reasonably complex
2519 code, terminating in a test. */
2521 if (optimize
2522 && needs_end_jump
2524 ! (GET_CODE (insn) == JUMP_INSN
2525 && GET_CODE (PATTERN (insn)) == SET
2526 && SET_DEST (PATTERN (insn)) == pc_rtx
2527 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2529 int eh_regions = 0;
2530 int num_insns = 0;
2531 rtx last_test_insn = NULL_RTX;
2533 /* Scan insns from the top of the loop looking for a qualified
2534 conditional exit. */
2535 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2536 insn = NEXT_INSN (insn))
2538 if (GET_CODE (insn) == NOTE)
2540 if (optimize < 2
2541 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2542 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2543 /* The code that actually moves the exit test will
2544 carefully leave BLOCK notes in their original
2545 location. That means, however, that we can't debug
2546 the exit test itself. So, we refuse to move code
2547 containing BLOCK notes at low optimization levels. */
2548 break;
2550 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2551 ++eh_regions;
2552 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2554 --eh_regions;
2555 if (eh_regions < 0)
2556 /* We've come to the end of an EH region, but
2557 never saw the beginning of that region. That
2558 means that an EH region begins before the top
2559 of the loop, and ends in the middle of it. The
2560 existence of such a situation violates a basic
2561 assumption in this code, since that would imply
2562 that even when EH_REGIONS is zero, we might
2563 move code out of an exception region. */
2564 abort ();
2567 /* We must not walk into a nested loop. */
2568 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2569 break;
2571 /* We already know this INSN is a NOTE, so there's no
2572 point in looking at it to see if it's a JUMP. */
2573 continue;
2576 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2577 num_insns++;
2579 if (last_test_insn && num_insns > 30)
2580 break;
2582 if (eh_regions > 0)
2583 /* We don't want to move a partial EH region. Consider:
2585 while ( ( { try {
2586 if (cond ()) 0;
2587 else {
2588 bar();
2591 } catch (...) {
2593 } )) {
2594 body;
2597 This isn't legal C++, but here's what it's supposed to
2598 mean: if cond() is true, stop looping. Otherwise,
2599 call bar, and keep looping. In addition, if cond
2600 throws an exception, catch it and keep looping. Such
2601 constructs are certainy legal in LISP.
2603 We should not move the `if (cond()) 0' test since then
2604 the EH-region for the try-block would be broken up.
2605 (In this case we would the EH_BEG note for the `try'
2606 and `if cond()' but not the call to bar() or the
2607 EH_END note.)
2609 So we don't look for tests within an EH region. */
2610 continue;
2612 if (GET_CODE (insn) == JUMP_INSN
2613 && GET_CODE (PATTERN (insn)) == SET
2614 && SET_DEST (PATTERN (insn)) == pc_rtx)
2616 /* This is indeed a jump. */
2617 rtx dest1 = NULL_RTX;
2618 rtx dest2 = NULL_RTX;
2619 rtx potential_last_test;
2620 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2622 /* A conditional jump. */
2623 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2624 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2625 potential_last_test = insn;
2627 else
2629 /* An unconditional jump. */
2630 dest1 = SET_SRC (PATTERN (insn));
2631 /* Include the BARRIER after the JUMP. */
2632 potential_last_test = NEXT_INSN (insn);
2635 do {
2636 if (dest1 && GET_CODE (dest1) == LABEL_REF
2637 && ((XEXP (dest1, 0)
2638 == loop_stack->data.loop.alt_end_label)
2639 || (XEXP (dest1, 0)
2640 == loop_stack->data.loop.end_label)))
2642 last_test_insn = potential_last_test;
2643 break;
2646 /* If this was a conditional jump, there may be
2647 another label at which we should look. */
2648 dest1 = dest2;
2649 dest2 = NULL_RTX;
2650 } while (dest1);
2654 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2656 /* We found one. Move everything from there up
2657 to the end of the loop, and add a jump into the loop
2658 to jump to there. */
2659 register rtx newstart_label = gen_label_rtx ();
2660 register rtx start_move = start_label;
2661 rtx next_insn;
2663 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2664 then we want to move this note also. */
2665 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2666 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2667 == NOTE_INSN_LOOP_CONT))
2668 start_move = PREV_INSN (start_move);
2670 emit_label_after (newstart_label, PREV_INSN (start_move));
2672 /* Actually move the insns. Start at the beginning, and
2673 keep copying insns until we've copied the
2674 last_test_insn. */
2675 for (insn = start_move; insn; insn = next_insn)
2677 /* Figure out which insn comes after this one. We have
2678 to do this before we move INSN. */
2679 if (insn == last_test_insn)
2680 /* We've moved all the insns. */
2681 next_insn = NULL_RTX;
2682 else
2683 next_insn = NEXT_INSN (insn);
2685 if (GET_CODE (insn) == NOTE
2686 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2687 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2688 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2689 NOTE_INSN_BLOCK_ENDs because the correct generation
2690 of debugging information depends on these appearing
2691 in the same order in the RTL and in the tree
2692 structure, where they are represented as BLOCKs.
2693 So, we don't move block notes. Of course, moving
2694 the code inside the block is likely to make it
2695 impossible to debug the instructions in the exit
2696 test, but such is the price of optimization. */
2697 continue;
2699 /* Move the INSN. */
2700 reorder_insns (insn, insn, get_last_insn ());
2703 emit_jump_insn_after (gen_jump (start_label),
2704 PREV_INSN (newstart_label));
2705 emit_barrier_after (PREV_INSN (newstart_label));
2706 start_label = newstart_label;
2710 if (needs_end_jump)
2712 emit_jump (start_label);
2713 emit_note (NULL, NOTE_INSN_LOOP_END);
2715 emit_label (loop_stack->data.loop.end_label);
2717 POPSTACK (loop_stack);
2719 last_expr_type = 0;
2722 /* Finish a null loop, aka do { } while (0). */
2724 void
2725 expand_end_null_loop ()
2727 do_pending_stack_adjust ();
2728 emit_label (loop_stack->data.loop.end_label);
2730 POPSTACK (loop_stack);
2732 last_expr_type = 0;
2735 /* Generate a jump to the current loop's continue-point.
2736 This is usually the top of the loop, but may be specified
2737 explicitly elsewhere. If not currently inside a loop,
2738 return 0 and do nothing; caller will print an error message. */
2741 expand_continue_loop (whichloop)
2742 struct nesting *whichloop;
2744 last_expr_type = 0;
2745 if (whichloop == 0)
2746 whichloop = loop_stack;
2747 if (whichloop == 0)
2748 return 0;
2749 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2750 NULL_RTX);
2751 return 1;
2754 /* Generate a jump to exit the current loop. If not currently inside a loop,
2755 return 0 and do nothing; caller will print an error message. */
2758 expand_exit_loop (whichloop)
2759 struct nesting *whichloop;
2761 last_expr_type = 0;
2762 if (whichloop == 0)
2763 whichloop = loop_stack;
2764 if (whichloop == 0)
2765 return 0;
2766 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2767 return 1;
2770 /* Generate a conditional jump to exit the current loop if COND
2771 evaluates to zero. If not currently inside a loop,
2772 return 0 and do nothing; caller will print an error message. */
2775 expand_exit_loop_if_false (whichloop, cond)
2776 struct nesting *whichloop;
2777 tree cond;
2779 rtx label = gen_label_rtx ();
2780 rtx last_insn;
2781 last_expr_type = 0;
2783 if (whichloop == 0)
2784 whichloop = loop_stack;
2785 if (whichloop == 0)
2786 return 0;
2787 /* In order to handle fixups, we actually create a conditional jump
2788 around a unconditional branch to exit the loop. If fixups are
2789 necessary, they go before the unconditional branch. */
2791 do_jump (cond, NULL_RTX, label);
2792 last_insn = get_last_insn ();
2793 if (GET_CODE (last_insn) == CODE_LABEL)
2794 whichloop->data.loop.alt_end_label = last_insn;
2795 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2796 NULL_RTX);
2797 emit_label (label);
2799 return 1;
2802 /* Return nonzero if the loop nest is empty. Else return zero. */
2805 stmt_loop_nest_empty ()
2807 /* cfun->stmt can be NULL if we are building a call to get the
2808 EH context for a setjmp/longjmp EH target and the current
2809 function was a deferred inline function. */
2810 return (cfun->stmt == NULL || loop_stack == NULL);
2813 /* Return non-zero if we should preserve sub-expressions as separate
2814 pseudos. We never do so if we aren't optimizing. We always do so
2815 if -fexpensive-optimizations.
2817 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2818 the loop may still be a small one. */
2821 preserve_subexpressions_p ()
2823 rtx insn;
2825 if (flag_expensive_optimizations)
2826 return 1;
2828 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2829 return 0;
2831 insn = get_last_insn_anywhere ();
2833 return (insn
2834 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2835 < n_non_fixed_regs * 3));
2839 /* Generate a jump to exit the current loop, conditional, binding contour
2840 or case statement. Not all such constructs are visible to this function,
2841 only those started with EXIT_FLAG nonzero. Individual languages use
2842 the EXIT_FLAG parameter to control which kinds of constructs you can
2843 exit this way.
2845 If not currently inside anything that can be exited,
2846 return 0 and do nothing; caller will print an error message. */
2849 expand_exit_something ()
2851 struct nesting *n;
2852 last_expr_type = 0;
2853 for (n = nesting_stack; n; n = n->all)
2854 if (n->exit_label != 0)
2856 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2857 return 1;
2860 return 0;
2863 /* Generate RTL to return from the current function, with no value.
2864 (That is, we do not do anything about returning any value.) */
2866 void
2867 expand_null_return ()
2869 rtx last_insn = get_last_insn ();
2871 /* If this function was declared to return a value, but we
2872 didn't, clobber the return registers so that they are not
2873 propogated live to the rest of the function. */
2874 clobber_return_register ();
2876 expand_null_return_1 (last_insn);
2879 /* Generate RTL to return from the current function, with value VAL. */
2881 static void
2882 expand_value_return (val)
2883 rtx val;
2885 rtx last_insn = get_last_insn ();
2886 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2888 /* Copy the value to the return location
2889 unless it's already there. */
2891 if (return_reg != val)
2893 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2894 #ifdef PROMOTE_FUNCTION_RETURN
2895 int unsignedp = TREE_UNSIGNED (type);
2896 enum machine_mode old_mode
2897 = DECL_MODE (DECL_RESULT (current_function_decl));
2898 enum machine_mode mode
2899 = promote_mode (type, old_mode, &unsignedp, 1);
2901 if (mode != old_mode)
2902 val = convert_modes (mode, old_mode, val, unsignedp);
2903 #endif
2904 if (GET_CODE (return_reg) == PARALLEL)
2905 emit_group_load (return_reg, val, int_size_in_bytes (type),
2906 TYPE_ALIGN (type));
2907 else
2908 emit_move_insn (return_reg, val);
2911 expand_null_return_1 (last_insn);
2914 /* Output a return with no value. If LAST_INSN is nonzero,
2915 pretend that the return takes place after LAST_INSN. */
2917 static void
2918 expand_null_return_1 (last_insn)
2919 rtx last_insn;
2921 rtx end_label = cleanup_label ? cleanup_label : return_label;
2923 clear_pending_stack_adjust ();
2924 do_pending_stack_adjust ();
2925 last_expr_type = 0;
2927 if (end_label == 0)
2928 end_label = return_label = gen_label_rtx ();
2929 expand_goto_internal (NULL_TREE, end_label, last_insn);
2932 /* Generate RTL to evaluate the expression RETVAL and return it
2933 from the current function. */
2935 void
2936 expand_return (retval)
2937 tree retval;
2939 /* If there are any cleanups to be performed, then they will
2940 be inserted following LAST_INSN. It is desirable
2941 that the last_insn, for such purposes, should be the
2942 last insn before computing the return value. Otherwise, cleanups
2943 which call functions can clobber the return value. */
2944 /* ??? rms: I think that is erroneous, because in C++ it would
2945 run destructors on variables that might be used in the subsequent
2946 computation of the return value. */
2947 rtx last_insn = 0;
2948 rtx result_rtl;
2949 register rtx val = 0;
2950 tree retval_rhs;
2952 /* If function wants no value, give it none. */
2953 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2955 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2956 emit_queue ();
2957 expand_null_return ();
2958 return;
2961 if (retval == error_mark_node)
2963 /* Treat this like a return of no value from a function that
2964 returns a value. */
2965 expand_null_return ();
2966 return;
2968 else if (TREE_CODE (retval) == RESULT_DECL)
2969 retval_rhs = retval;
2970 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2971 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2972 retval_rhs = TREE_OPERAND (retval, 1);
2973 else if (VOID_TYPE_P (TREE_TYPE (retval)))
2974 /* Recognize tail-recursive call to void function. */
2975 retval_rhs = retval;
2976 else
2977 retval_rhs = NULL_TREE;
2979 last_insn = get_last_insn ();
2981 /* Distribute return down conditional expr if either of the sides
2982 may involve tail recursion (see test below). This enhances the number
2983 of tail recursions we see. Don't do this always since it can produce
2984 sub-optimal code in some cases and we distribute assignments into
2985 conditional expressions when it would help. */
2987 if (optimize && retval_rhs != 0
2988 && frame_offset == 0
2989 && TREE_CODE (retval_rhs) == COND_EXPR
2990 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2991 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2993 rtx label = gen_label_rtx ();
2994 tree expr;
2996 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2997 start_cleanup_deferral ();
2998 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2999 DECL_RESULT (current_function_decl),
3000 TREE_OPERAND (retval_rhs, 1));
3001 TREE_SIDE_EFFECTS (expr) = 1;
3002 expand_return (expr);
3003 emit_label (label);
3005 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3006 DECL_RESULT (current_function_decl),
3007 TREE_OPERAND (retval_rhs, 2));
3008 TREE_SIDE_EFFECTS (expr) = 1;
3009 expand_return (expr);
3010 end_cleanup_deferral ();
3011 return;
3014 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3016 /* If the result is an aggregate that is being returned in one (or more)
3017 registers, load the registers here. The compiler currently can't handle
3018 copying a BLKmode value into registers. We could put this code in a
3019 more general area (for use by everyone instead of just function
3020 call/return), but until this feature is generally usable it is kept here
3021 (and in expand_call). The value must go into a pseudo in case there
3022 are cleanups that will clobber the real return register. */
3024 if (retval_rhs != 0
3025 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3026 && GET_CODE (result_rtl) == REG)
3028 int i;
3029 unsigned HOST_WIDE_INT bitpos, xbitpos;
3030 unsigned HOST_WIDE_INT big_endian_correction = 0;
3031 unsigned HOST_WIDE_INT bytes
3032 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3033 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3034 unsigned int bitsize
3035 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3036 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3037 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3038 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3039 enum machine_mode tmpmode, result_reg_mode;
3041 if (bytes == 0)
3043 expand_null_return ();
3044 return;
3047 /* Structures whose size is not a multiple of a word are aligned
3048 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3049 machine, this means we must skip the empty high order bytes when
3050 calculating the bit offset. */
3051 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
3052 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3053 * BITS_PER_UNIT));
3055 /* Copy the structure BITSIZE bits at a time. */
3056 for (bitpos = 0, xbitpos = big_endian_correction;
3057 bitpos < bytes * BITS_PER_UNIT;
3058 bitpos += bitsize, xbitpos += bitsize)
3060 /* We need a new destination pseudo each time xbitpos is
3061 on a word boundary and when xbitpos == big_endian_correction
3062 (the first time through). */
3063 if (xbitpos % BITS_PER_WORD == 0
3064 || xbitpos == big_endian_correction)
3066 /* Generate an appropriate register. */
3067 dst = gen_reg_rtx (word_mode);
3068 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3070 /* Clobber the destination before we move anything into it. */
3071 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
3074 /* We need a new source operand each time bitpos is on a word
3075 boundary. */
3076 if (bitpos % BITS_PER_WORD == 0)
3077 src = operand_subword_force (result_val,
3078 bitpos / BITS_PER_WORD,
3079 BLKmode);
3081 /* Use bitpos for the source extraction (left justified) and
3082 xbitpos for the destination store (right justified). */
3083 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3084 extract_bit_field (src, bitsize,
3085 bitpos % BITS_PER_WORD, 1,
3086 NULL_RTX, word_mode, word_mode,
3087 bitsize, BITS_PER_WORD),
3088 bitsize, BITS_PER_WORD);
3091 /* Find the smallest integer mode large enough to hold the
3092 entire structure and use that mode instead of BLKmode
3093 on the USE insn for the return register. */
3094 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3095 tmpmode != VOIDmode;
3096 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3097 /* Have we found a large enough mode? */
3098 if (GET_MODE_SIZE (tmpmode) >= bytes)
3099 break;
3101 /* No suitable mode found. */
3102 if (tmpmode == VOIDmode)
3103 abort ();
3105 PUT_MODE (result_rtl, tmpmode);
3107 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3108 result_reg_mode = word_mode;
3109 else
3110 result_reg_mode = tmpmode;
3111 result_reg = gen_reg_rtx (result_reg_mode);
3113 emit_queue ();
3114 for (i = 0; i < n_regs; i++)
3115 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3116 result_pseudos[i]);
3118 if (tmpmode != result_reg_mode)
3119 result_reg = gen_lowpart (tmpmode, result_reg);
3121 expand_value_return (result_reg);
3123 else if (retval_rhs != 0
3124 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3125 && (GET_CODE (result_rtl) == REG
3126 || (GET_CODE (result_rtl) == PARALLEL)))
3128 /* Calculate the return value into a temporary (usually a pseudo
3129 reg). */
3130 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3131 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3133 val = assign_temp (nt, 0, 0, 1);
3134 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3135 val = force_not_mem (val);
3136 emit_queue ();
3137 /* Return the calculated value, doing cleanups first. */
3138 expand_value_return (val);
3140 else
3142 /* No cleanups or no hard reg used;
3143 calculate value into hard return reg. */
3144 expand_expr (retval, const0_rtx, VOIDmode, 0);
3145 emit_queue ();
3146 expand_value_return (result_rtl);
3150 /* Return 1 if the end of the generated RTX is not a barrier.
3151 This means code already compiled can drop through. */
3154 drop_through_at_end_p ()
3156 rtx insn = get_last_insn ();
3157 while (insn && GET_CODE (insn) == NOTE)
3158 insn = PREV_INSN (insn);
3159 return insn && GET_CODE (insn) != BARRIER;
3162 /* Attempt to optimize a potential tail recursion call into a goto.
3163 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3164 where to place the jump to the tail recursion label.
3166 Return TRUE if the call was optimized into a goto. */
3169 optimize_tail_recursion (arguments, last_insn)
3170 tree arguments;
3171 rtx last_insn;
3173 /* Finish checking validity, and if valid emit code to set the
3174 argument variables for the new call. */
3175 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3177 if (tail_recursion_label == 0)
3179 tail_recursion_label = gen_label_rtx ();
3180 emit_label_after (tail_recursion_label,
3181 tail_recursion_reentry);
3183 emit_queue ();
3184 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3185 emit_barrier ();
3186 return 1;
3188 return 0;
3191 /* Emit code to alter this function's formal parms for a tail-recursive call.
3192 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3193 FORMALS is the chain of decls of formals.
3194 Return 1 if this can be done;
3195 otherwise return 0 and do not emit any code. */
3197 static int
3198 tail_recursion_args (actuals, formals)
3199 tree actuals, formals;
3201 register tree a = actuals, f = formals;
3202 register int i;
3203 register rtx *argvec;
3205 /* Check that number and types of actuals are compatible
3206 with the formals. This is not always true in valid C code.
3207 Also check that no formal needs to be addressable
3208 and that all formals are scalars. */
3210 /* Also count the args. */
3212 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3214 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3215 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3216 return 0;
3217 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3218 return 0;
3220 if (a != 0 || f != 0)
3221 return 0;
3223 /* Compute all the actuals. */
3225 argvec = (rtx *) alloca (i * sizeof (rtx));
3227 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3228 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3230 /* Find which actual values refer to current values of previous formals.
3231 Copy each of them now, before any formal is changed. */
3233 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3235 int copy = 0;
3236 register int j;
3237 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3238 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3240 copy = 1;
3241 break;
3243 if (copy)
3244 argvec[i] = copy_to_reg (argvec[i]);
3247 /* Store the values of the actuals into the formals. */
3249 for (f = formals, a = actuals, i = 0; f;
3250 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3252 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3253 emit_move_insn (DECL_RTL (f), argvec[i]);
3254 else
3255 convert_move (DECL_RTL (f), argvec[i],
3256 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3259 free_temp_slots ();
3260 return 1;
3263 /* Generate the RTL code for entering a binding contour.
3264 The variables are declared one by one, by calls to `expand_decl'.
3266 FLAGS is a bitwise or of the following flags:
3268 1 - Nonzero if this construct should be visible to
3269 `exit_something'.
3271 2 - Nonzero if this contour does not require a
3272 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3273 language-independent code should set this flag because they
3274 will not create corresponding BLOCK nodes. (There should be
3275 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3276 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3277 when expand_end_bindings is called.
3279 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3280 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3281 note. */
3283 void
3284 expand_start_bindings_and_block (flags, block)
3285 int flags;
3286 tree block;
3288 struct nesting *thisblock = ALLOC_NESTING ();
3289 rtx note;
3290 int exit_flag = ((flags & 1) != 0);
3291 int block_flag = ((flags & 2) == 0);
3293 /* If a BLOCK is supplied, then the caller should be requesting a
3294 NOTE_INSN_BLOCK_BEG note. */
3295 if (!block_flag && block)
3296 abort ();
3298 /* Create a note to mark the beginning of the block. */
3299 if (block_flag)
3301 note = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
3302 NOTE_BLOCK (note) = block;
3304 else
3305 note = emit_note (NULL, NOTE_INSN_DELETED);
3307 /* Make an entry on block_stack for the block we are entering. */
3309 thisblock->next = block_stack;
3310 thisblock->all = nesting_stack;
3311 thisblock->depth = ++nesting_depth;
3312 thisblock->data.block.stack_level = 0;
3313 thisblock->data.block.cleanups = 0;
3314 thisblock->data.block.n_function_calls = 0;
3315 thisblock->data.block.exception_region = 0;
3316 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3318 thisblock->data.block.conditional_code = 0;
3319 thisblock->data.block.last_unconditional_cleanup = note;
3320 /* When we insert instructions after the last unconditional cleanup,
3321 we don't adjust last_insn. That means that a later add_insn will
3322 clobber the instructions we've just added. The easiest way to
3323 fix this is to just insert another instruction here, so that the
3324 instructions inserted after the last unconditional cleanup are
3325 never the last instruction. */
3326 emit_note (NULL, NOTE_INSN_DELETED);
3327 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3329 if (block_stack
3330 && !(block_stack->data.block.cleanups == NULL_TREE
3331 && block_stack->data.block.outer_cleanups == NULL_TREE))
3332 thisblock->data.block.outer_cleanups
3333 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3334 block_stack->data.block.outer_cleanups);
3335 else
3336 thisblock->data.block.outer_cleanups = 0;
3337 thisblock->data.block.label_chain = 0;
3338 thisblock->data.block.innermost_stack_block = stack_block_stack;
3339 thisblock->data.block.first_insn = note;
3340 thisblock->data.block.block_start_count = ++current_block_start_count;
3341 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3342 block_stack = thisblock;
3343 nesting_stack = thisblock;
3345 /* Make a new level for allocating stack slots. */
3346 push_temp_slots ();
3349 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3350 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3351 expand_expr are made. After we end the region, we know that all
3352 space for all temporaries that were created by TARGET_EXPRs will be
3353 destroyed and their space freed for reuse. */
3355 void
3356 expand_start_target_temps ()
3358 /* This is so that even if the result is preserved, the space
3359 allocated will be freed, as we know that it is no longer in use. */
3360 push_temp_slots ();
3362 /* Start a new binding layer that will keep track of all cleanup
3363 actions to be performed. */
3364 expand_start_bindings (2);
3366 target_temp_slot_level = temp_slot_level;
3369 void
3370 expand_end_target_temps ()
3372 expand_end_bindings (NULL_TREE, 0, 0);
3374 /* This is so that even if the result is preserved, the space
3375 allocated will be freed, as we know that it is no longer in use. */
3376 pop_temp_slots ();
3379 /* Given a pointer to a BLOCK node return non-zero if (and only if) the node
3380 in question represents the outermost pair of curly braces (i.e. the "body
3381 block") of a function or method.
3383 For any BLOCK node representing a "body block" of a function or method, the
3384 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3385 represents the outermost (function) scope for the function or method (i.e.
3386 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3387 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3390 is_body_block (stmt)
3391 register tree stmt;
3393 if (TREE_CODE (stmt) == BLOCK)
3395 tree parent = BLOCK_SUPERCONTEXT (stmt);
3397 if (parent && TREE_CODE (parent) == BLOCK)
3399 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3401 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3402 return 1;
3406 return 0;
3409 /* True if we are currently emitting insns in an area of output code
3410 that is controlled by a conditional expression. This is used by
3411 the cleanup handling code to generate conditional cleanup actions. */
3414 conditional_context ()
3416 return block_stack && block_stack->data.block.conditional_code;
3419 /* Return an opaque pointer to the current nesting level, so frontend code
3420 can check its own sanity. */
3422 struct nesting *
3423 current_nesting_level ()
3425 return cfun ? block_stack : 0;
3428 /* Emit a handler label for a nonlocal goto handler.
3429 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3431 static rtx
3432 expand_nl_handler_label (slot, before_insn)
3433 rtx slot, before_insn;
3435 rtx insns;
3436 rtx handler_label = gen_label_rtx ();
3438 /* Don't let cleanup_cfg delete the handler. */
3439 LABEL_PRESERVE_P (handler_label) = 1;
3441 start_sequence ();
3442 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3443 insns = get_insns ();
3444 end_sequence ();
3445 emit_insns_before (insns, before_insn);
3447 emit_label (handler_label);
3449 return handler_label;
3452 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3453 handler. */
3454 static void
3455 expand_nl_goto_receiver ()
3457 #ifdef HAVE_nonlocal_goto
3458 if (! HAVE_nonlocal_goto)
3459 #endif
3460 /* First adjust our frame pointer to its actual value. It was
3461 previously set to the start of the virtual area corresponding to
3462 the stacked variables when we branched here and now needs to be
3463 adjusted to the actual hardware fp value.
3465 Assignments are to virtual registers are converted by
3466 instantiate_virtual_regs into the corresponding assignment
3467 to the underlying register (fp in this case) that makes
3468 the original assignment true.
3469 So the following insn will actually be
3470 decrementing fp by STARTING_FRAME_OFFSET. */
3471 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3473 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3474 if (fixed_regs[ARG_POINTER_REGNUM])
3476 #ifdef ELIMINABLE_REGS
3477 /* If the argument pointer can be eliminated in favor of the
3478 frame pointer, we don't need to restore it. We assume here
3479 that if such an elimination is present, it can always be used.
3480 This is the case on all known machines; if we don't make this
3481 assumption, we do unnecessary saving on many machines. */
3482 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3483 size_t i;
3485 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3486 if (elim_regs[i].from == ARG_POINTER_REGNUM
3487 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3488 break;
3490 if (i == ARRAY_SIZE (elim_regs))
3491 #endif
3493 /* Now restore our arg pointer from the address at which it
3494 was saved in our stack frame.
3495 If there hasn't be space allocated for it yet, make
3496 some now. */
3497 if (arg_pointer_save_area == 0)
3498 arg_pointer_save_area
3499 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3500 emit_move_insn (virtual_incoming_args_rtx,
3501 /* We need a pseudo here, or else
3502 instantiate_virtual_regs_1 complains. */
3503 copy_to_reg (arg_pointer_save_area));
3506 #endif
3508 #ifdef HAVE_nonlocal_goto_receiver
3509 if (HAVE_nonlocal_goto_receiver)
3510 emit_insn (gen_nonlocal_goto_receiver ());
3511 #endif
3514 /* Make handlers for nonlocal gotos taking place in the function calls in
3515 block THISBLOCK. */
3517 static void
3518 expand_nl_goto_receivers (thisblock)
3519 struct nesting *thisblock;
3521 tree link;
3522 rtx afterward = gen_label_rtx ();
3523 rtx insns, slot;
3524 rtx label_list;
3525 int any_invalid;
3527 /* Record the handler address in the stack slot for that purpose,
3528 during this block, saving and restoring the outer value. */
3529 if (thisblock->next != 0)
3530 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3532 rtx save_receiver = gen_reg_rtx (Pmode);
3533 emit_move_insn (XEXP (slot, 0), save_receiver);
3535 start_sequence ();
3536 emit_move_insn (save_receiver, XEXP (slot, 0));
3537 insns = get_insns ();
3538 end_sequence ();
3539 emit_insns_before (insns, thisblock->data.block.first_insn);
3542 /* Jump around the handlers; they run only when specially invoked. */
3543 emit_jump (afterward);
3545 /* Make a separate handler for each label. */
3546 link = nonlocal_labels;
3547 slot = nonlocal_goto_handler_slots;
3548 label_list = NULL_RTX;
3549 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3550 /* Skip any labels we shouldn't be able to jump to from here,
3551 we generate one special handler for all of them below which just calls
3552 abort. */
3553 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3555 rtx lab;
3556 lab = expand_nl_handler_label (XEXP (slot, 0),
3557 thisblock->data.block.first_insn);
3558 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3560 expand_nl_goto_receiver ();
3562 /* Jump to the "real" nonlocal label. */
3563 expand_goto (TREE_VALUE (link));
3566 /* A second pass over all nonlocal labels; this time we handle those
3567 we should not be able to jump to at this point. */
3568 link = nonlocal_labels;
3569 slot = nonlocal_goto_handler_slots;
3570 any_invalid = 0;
3571 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3572 if (DECL_TOO_LATE (TREE_VALUE (link)))
3574 rtx lab;
3575 lab = expand_nl_handler_label (XEXP (slot, 0),
3576 thisblock->data.block.first_insn);
3577 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3578 any_invalid = 1;
3581 if (any_invalid)
3583 expand_nl_goto_receiver ();
3584 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3585 VOIDmode, 0);
3586 emit_barrier ();
3589 nonlocal_goto_handler_labels = label_list;
3590 emit_label (afterward);
3593 /* Warn about any unused VARS (which may contain nodes other than
3594 VAR_DECLs, but such nodes are ignored). The nodes are connected
3595 via the TREE_CHAIN field. */
3597 void
3598 warn_about_unused_variables (vars)
3599 tree vars;
3601 tree decl;
3603 if (warn_unused_variable)
3604 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3605 if (TREE_CODE (decl) == VAR_DECL
3606 && ! TREE_USED (decl)
3607 && ! DECL_IN_SYSTEM_HEADER (decl)
3608 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3609 warning_with_decl (decl, "unused variable `%s'");
3612 /* Generate RTL code to terminate a binding contour.
3614 VARS is the chain of VAR_DECL nodes for the variables bound in this
3615 contour. There may actually be other nodes in this chain, but any
3616 nodes other than VAR_DECLS are ignored.
3618 MARK_ENDS is nonzero if we should put a note at the beginning
3619 and end of this binding contour.
3621 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3622 (That is true automatically if the contour has a saved stack level.) */
3624 void
3625 expand_end_bindings (vars, mark_ends, dont_jump_in)
3626 tree vars;
3627 int mark_ends;
3628 int dont_jump_in;
3630 register struct nesting *thisblock = block_stack;
3632 /* If any of the variables in this scope were not used, warn the
3633 user. */
3634 warn_about_unused_variables (vars);
3636 if (thisblock->exit_label)
3638 do_pending_stack_adjust ();
3639 emit_label (thisblock->exit_label);
3642 /* If necessary, make handlers for nonlocal gotos taking
3643 place in the function calls in this block. */
3644 if (function_call_count != thisblock->data.block.n_function_calls
3645 && nonlocal_labels
3646 /* Make handler for outermost block
3647 if there were any nonlocal gotos to this function. */
3648 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3649 /* Make handler for inner block if it has something
3650 special to do when you jump out of it. */
3651 : (thisblock->data.block.cleanups != 0
3652 || thisblock->data.block.stack_level != 0)))
3653 expand_nl_goto_receivers (thisblock);
3655 /* Don't allow jumping into a block that has a stack level.
3656 Cleanups are allowed, though. */
3657 if (dont_jump_in
3658 || thisblock->data.block.stack_level != 0)
3660 struct label_chain *chain;
3662 /* Any labels in this block are no longer valid to go to.
3663 Mark them to cause an error message. */
3664 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3666 DECL_TOO_LATE (chain->label) = 1;
3667 /* If any goto without a fixup came to this label,
3668 that must be an error, because gotos without fixups
3669 come from outside all saved stack-levels. */
3670 if (TREE_ADDRESSABLE (chain->label))
3671 error_with_decl (chain->label,
3672 "label `%s' used before containing binding contour");
3676 /* Restore stack level in effect before the block
3677 (only if variable-size objects allocated). */
3678 /* Perform any cleanups associated with the block. */
3680 if (thisblock->data.block.stack_level != 0
3681 || thisblock->data.block.cleanups != 0)
3683 int reachable;
3684 rtx insn;
3686 /* Don't let cleanups affect ({...}) constructs. */
3687 int old_expr_stmts_for_value = expr_stmts_for_value;
3688 rtx old_last_expr_value = last_expr_value;
3689 tree old_last_expr_type = last_expr_type;
3690 expr_stmts_for_value = 0;
3692 /* Only clean up here if this point can actually be reached. */
3693 insn = get_last_insn ();
3694 if (GET_CODE (insn) == NOTE)
3695 insn = prev_nonnote_insn (insn);
3696 reachable = (! insn || GET_CODE (insn) != BARRIER);
3698 /* Do the cleanups. */
3699 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3700 if (reachable)
3701 do_pending_stack_adjust ();
3703 expr_stmts_for_value = old_expr_stmts_for_value;
3704 last_expr_value = old_last_expr_value;
3705 last_expr_type = old_last_expr_type;
3707 /* Restore the stack level. */
3709 if (reachable && thisblock->data.block.stack_level != 0)
3711 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3712 thisblock->data.block.stack_level, NULL_RTX);
3713 if (nonlocal_goto_handler_slots != 0)
3714 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3715 NULL_RTX);
3718 /* Any gotos out of this block must also do these things.
3719 Also report any gotos with fixups that came to labels in this
3720 level. */
3721 fixup_gotos (thisblock,
3722 thisblock->data.block.stack_level,
3723 thisblock->data.block.cleanups,
3724 thisblock->data.block.first_insn,
3725 dont_jump_in);
3728 /* Mark the beginning and end of the scope if requested.
3729 We do this now, after running cleanups on the variables
3730 just going out of scope, so they are in scope for their cleanups. */
3732 if (mark_ends)
3734 rtx note = emit_note (NULL, NOTE_INSN_BLOCK_END);
3735 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3737 else
3738 /* Get rid of the beginning-mark if we don't make an end-mark. */
3739 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3741 /* Restore the temporary level of TARGET_EXPRs. */
3742 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3744 /* Restore block_stack level for containing block. */
3746 stack_block_stack = thisblock->data.block.innermost_stack_block;
3747 POPSTACK (block_stack);
3749 /* Pop the stack slot nesting and free any slots at this level. */
3750 pop_temp_slots ();
3753 /* Generate code to save the stack pointer at the start of the current block
3754 and set up to restore it on exit. */
3756 void
3757 save_stack_pointer ()
3759 struct nesting *thisblock = block_stack;
3761 if (thisblock->data.block.stack_level == 0)
3763 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3764 &thisblock->data.block.stack_level,
3765 thisblock->data.block.first_insn);
3766 stack_block_stack = thisblock;
3770 /* Generate RTL for the automatic variable declaration DECL.
3771 (Other kinds of declarations are simply ignored if seen here.) */
3773 void
3774 expand_decl (decl)
3775 register tree decl;
3777 struct nesting *thisblock;
3778 tree type;
3780 type = TREE_TYPE (decl);
3782 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3783 type in case this node is used in a reference. */
3784 if (TREE_CODE (decl) == CONST_DECL)
3786 DECL_MODE (decl) = TYPE_MODE (type);
3787 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3788 DECL_SIZE (decl) = TYPE_SIZE (type);
3789 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3790 return;
3793 /* Otherwise, only automatic variables need any expansion done. Static and
3794 external variables, and external functions, will be handled by
3795 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3796 nothing. PARM_DECLs are handled in `assign_parms'. */
3797 if (TREE_CODE (decl) != VAR_DECL)
3798 return;
3800 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3801 return;
3803 thisblock = block_stack;
3805 /* Create the RTL representation for the variable. */
3807 if (type == error_mark_node)
3808 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3810 else if (DECL_SIZE (decl) == 0)
3811 /* Variable with incomplete type. */
3813 rtx x;
3814 if (DECL_INITIAL (decl) == 0)
3815 /* Error message was already done; now avoid a crash. */
3816 x = gen_rtx_MEM (BLKmode, const0_rtx);
3817 else
3818 /* An initializer is going to decide the size of this array.
3819 Until we know the size, represent its address with a reg. */
3820 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3822 set_mem_attributes (x, decl, 1);
3823 SET_DECL_RTL (decl, x);
3825 else if (DECL_MODE (decl) != BLKmode
3826 /* If -ffloat-store, don't put explicit float vars
3827 into regs. */
3828 && !(flag_float_store
3829 && TREE_CODE (type) == REAL_TYPE)
3830 && ! TREE_THIS_VOLATILE (decl)
3831 && (DECL_REGISTER (decl) || optimize)
3832 /* if -fcheck-memory-usage, check all variables. */
3833 && ! current_function_check_memory_usage)
3835 /* Automatic variable that can go in a register. */
3836 int unsignedp = TREE_UNSIGNED (type);
3837 enum machine_mode reg_mode
3838 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3840 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3841 mark_user_reg (DECL_RTL (decl));
3843 if (POINTER_TYPE_P (type))
3844 mark_reg_pointer (DECL_RTL (decl),
3845 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3847 maybe_set_unchanging (DECL_RTL (decl), decl);
3849 /* If something wants our address, try to use ADDRESSOF. */
3850 if (TREE_ADDRESSABLE (decl))
3851 put_var_into_stack (decl);
3854 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3855 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3856 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3857 STACK_CHECK_MAX_VAR_SIZE)))
3859 /* Variable of fixed size that goes on the stack. */
3860 rtx oldaddr = 0;
3861 rtx addr;
3863 /* If we previously made RTL for this decl, it must be an array
3864 whose size was determined by the initializer.
3865 The old address was a register; set that register now
3866 to the proper address. */
3867 if (DECL_RTL_SET_P (decl))
3869 if (GET_CODE (DECL_RTL (decl)) != MEM
3870 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3871 abort ();
3872 oldaddr = XEXP (DECL_RTL (decl), 0);
3875 SET_DECL_RTL (decl,
3876 assign_temp (TREE_TYPE (decl), 1, 1, 1));
3878 /* Set alignment we actually gave this decl. */
3879 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3880 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3881 DECL_USER_ALIGN (decl) = 0;
3883 if (oldaddr)
3885 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3886 if (addr != oldaddr)
3887 emit_move_insn (oldaddr, addr);
3890 else
3891 /* Dynamic-size object: must push space on the stack. */
3893 rtx address, size, x;
3895 /* Record the stack pointer on entry to block, if have
3896 not already done so. */
3897 do_pending_stack_adjust ();
3898 save_stack_pointer ();
3900 /* In function-at-a-time mode, variable_size doesn't expand this,
3901 so do it now. */
3902 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3903 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3904 const0_rtx, VOIDmode, 0);
3906 /* Compute the variable's size, in bytes. */
3907 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3908 free_temp_slots ();
3910 /* Allocate space on the stack for the variable. Note that
3911 DECL_ALIGN says how the variable is to be aligned and we
3912 cannot use it to conclude anything about the alignment of
3913 the size. */
3914 address = allocate_dynamic_stack_space (size, NULL_RTX,
3915 TYPE_ALIGN (TREE_TYPE (decl)));
3917 /* Reference the variable indirect through that rtx. */
3918 x = gen_rtx_MEM (DECL_MODE (decl), address);
3919 set_mem_attributes (x, decl, 1);
3920 SET_DECL_RTL (decl, x);
3923 /* Indicate the alignment we actually gave this variable. */
3924 #ifdef STACK_BOUNDARY
3925 DECL_ALIGN (decl) = STACK_BOUNDARY;
3926 #else
3927 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3928 #endif
3929 DECL_USER_ALIGN (decl) = 0;
3933 /* Emit code to perform the initialization of a declaration DECL. */
3935 void
3936 expand_decl_init (decl)
3937 tree decl;
3939 int was_used = TREE_USED (decl);
3941 /* If this is a CONST_DECL, we don't have to generate any code, but
3942 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3943 to be set while in the obstack containing the constant. If we don't
3944 do this, we can lose if we have functions nested three deep and the middle
3945 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3946 the innermost function is the first to expand that STRING_CST. */
3947 if (TREE_CODE (decl) == CONST_DECL)
3949 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3950 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3951 EXPAND_INITIALIZER);
3952 return;
3955 if (TREE_STATIC (decl))
3956 return;
3958 /* Compute and store the initial value now. */
3960 if (DECL_INITIAL (decl) == error_mark_node)
3962 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3964 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3965 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3966 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3967 0, 0);
3968 emit_queue ();
3970 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3972 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3973 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3974 emit_queue ();
3977 /* Don't let the initialization count as "using" the variable. */
3978 TREE_USED (decl) = was_used;
3980 /* Free any temporaries we made while initializing the decl. */
3981 preserve_temp_slots (NULL_RTX);
3982 free_temp_slots ();
3985 /* CLEANUP is an expression to be executed at exit from this binding contour;
3986 for example, in C++, it might call the destructor for this variable.
3988 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3989 CLEANUP multiple times, and have the correct semantics. This
3990 happens in exception handling, for gotos, returns, breaks that
3991 leave the current scope.
3993 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3994 that is not associated with any particular variable. */
3997 expand_decl_cleanup (decl, cleanup)
3998 tree decl, cleanup;
4000 struct nesting *thisblock;
4002 /* Error if we are not in any block. */
4003 if (cfun == 0 || block_stack == 0)
4004 return 0;
4006 thisblock = block_stack;
4008 /* Record the cleanup if there is one. */
4010 if (cleanup != 0)
4012 tree t;
4013 rtx seq;
4014 tree *cleanups = &thisblock->data.block.cleanups;
4015 int cond_context = conditional_context ();
4017 if (cond_context)
4019 rtx flag = gen_reg_rtx (word_mode);
4020 rtx set_flag_0;
4021 tree cond;
4023 start_sequence ();
4024 emit_move_insn (flag, const0_rtx);
4025 set_flag_0 = get_insns ();
4026 end_sequence ();
4028 thisblock->data.block.last_unconditional_cleanup
4029 = emit_insns_after (set_flag_0,
4030 thisblock->data.block.last_unconditional_cleanup);
4032 emit_move_insn (flag, const1_rtx);
4034 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
4035 SET_DECL_RTL (cond, flag);
4037 /* Conditionalize the cleanup. */
4038 cleanup = build (COND_EXPR, void_type_node,
4039 truthvalue_conversion (cond),
4040 cleanup, integer_zero_node);
4041 cleanup = fold (cleanup);
4043 cleanups = thisblock->data.block.cleanup_ptr;
4046 cleanup = unsave_expr (cleanup);
4048 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4050 if (! cond_context)
4051 /* If this block has a cleanup, it belongs in stack_block_stack. */
4052 stack_block_stack = thisblock;
4054 if (cond_context)
4056 start_sequence ();
4059 if (! using_eh_for_cleanups_p)
4060 TREE_ADDRESSABLE (t) = 1;
4061 else
4062 expand_eh_region_start ();
4064 if (cond_context)
4066 seq = get_insns ();
4067 end_sequence ();
4068 if (seq)
4069 thisblock->data.block.last_unconditional_cleanup
4070 = emit_insns_after (seq,
4071 thisblock->data.block.last_unconditional_cleanup);
4073 else
4075 thisblock->data.block.last_unconditional_cleanup
4076 = get_last_insn ();
4077 /* When we insert instructions after the last unconditional cleanup,
4078 we don't adjust last_insn. That means that a later add_insn will
4079 clobber the instructions we've just added. The easiest way to
4080 fix this is to just insert another instruction here, so that the
4081 instructions inserted after the last unconditional cleanup are
4082 never the last instruction. */
4083 emit_note (NULL, NOTE_INSN_DELETED);
4084 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4087 return 1;
4090 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4091 DECL_ELTS is the list of elements that belong to DECL's type.
4092 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4094 void
4095 expand_anon_union_decl (decl, cleanup, decl_elts)
4096 tree decl, cleanup, decl_elts;
4098 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4099 rtx x;
4100 tree t;
4102 /* If any of the elements are addressable, so is the entire union. */
4103 for (t = decl_elts; t; t = TREE_CHAIN (t))
4104 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4106 TREE_ADDRESSABLE (decl) = 1;
4107 break;
4110 expand_decl (decl);
4111 expand_decl_cleanup (decl, cleanup);
4112 x = DECL_RTL (decl);
4114 /* Go through the elements, assigning RTL to each. */
4115 for (t = decl_elts; t; t = TREE_CHAIN (t))
4117 tree decl_elt = TREE_VALUE (t);
4118 tree cleanup_elt = TREE_PURPOSE (t);
4119 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4121 /* Propagate the union's alignment to the elements. */
4122 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4123 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4125 /* If the element has BLKmode and the union doesn't, the union is
4126 aligned such that the element doesn't need to have BLKmode, so
4127 change the element's mode to the appropriate one for its size. */
4128 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4129 DECL_MODE (decl_elt) = mode
4130 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4132 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4133 instead create a new MEM rtx with the proper mode. */
4134 if (GET_CODE (x) == MEM)
4136 if (mode == GET_MODE (x))
4137 SET_DECL_RTL (decl_elt, x);
4138 else
4139 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4141 else if (GET_CODE (x) == REG)
4143 if (mode == GET_MODE (x))
4144 SET_DECL_RTL (decl_elt, x);
4145 else
4146 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4148 else
4149 abort ();
4151 /* Record the cleanup if there is one. */
4153 if (cleanup != 0)
4154 thisblock->data.block.cleanups
4155 = tree_cons (decl_elt, cleanup_elt,
4156 thisblock->data.block.cleanups);
4160 /* Expand a list of cleanups LIST.
4161 Elements may be expressions or may be nested lists.
4163 If DONT_DO is nonnull, then any list-element
4164 whose TREE_PURPOSE matches DONT_DO is omitted.
4165 This is sometimes used to avoid a cleanup associated with
4166 a value that is being returned out of the scope.
4168 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4169 goto and handle protection regions specially in that case.
4171 If REACHABLE, we emit code, otherwise just inform the exception handling
4172 code about this finalization. */
4174 static void
4175 expand_cleanups (list, dont_do, in_fixup, reachable)
4176 tree list;
4177 tree dont_do;
4178 int in_fixup;
4179 int reachable;
4181 tree tail;
4182 for (tail = list; tail; tail = TREE_CHAIN (tail))
4183 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4185 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4186 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4187 else
4189 if (! in_fixup && using_eh_for_cleanups_p)
4190 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4192 if (reachable)
4194 /* Cleanups may be run multiple times. For example,
4195 when exiting a binding contour, we expand the
4196 cleanups associated with that contour. When a goto
4197 within that binding contour has a target outside that
4198 contour, it will expand all cleanups from its scope to
4199 the target. Though the cleanups are expanded multiple
4200 times, the control paths are non-overlapping so the
4201 cleanups will not be executed twice. */
4203 /* We may need to protect from outer cleanups. */
4204 if (in_fixup && using_eh_for_cleanups_p)
4206 expand_eh_region_start ();
4208 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4210 expand_eh_region_end_fixup (TREE_VALUE (tail));
4212 else
4213 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4215 free_temp_slots ();
4221 /* Mark when the context we are emitting RTL for as a conditional
4222 context, so that any cleanup actions we register with
4223 expand_decl_init will be properly conditionalized when those
4224 cleanup actions are later performed. Must be called before any
4225 expression (tree) is expanded that is within a conditional context. */
4227 void
4228 start_cleanup_deferral ()
4230 /* block_stack can be NULL if we are inside the parameter list. It is
4231 OK to do nothing, because cleanups aren't possible here. */
4232 if (block_stack)
4233 ++block_stack->data.block.conditional_code;
4236 /* Mark the end of a conditional region of code. Because cleanup
4237 deferrals may be nested, we may still be in a conditional region
4238 after we end the currently deferred cleanups, only after we end all
4239 deferred cleanups, are we back in unconditional code. */
4241 void
4242 end_cleanup_deferral ()
4244 /* block_stack can be NULL if we are inside the parameter list. It is
4245 OK to do nothing, because cleanups aren't possible here. */
4246 if (block_stack)
4247 --block_stack->data.block.conditional_code;
4250 /* Move all cleanups from the current block_stack
4251 to the containing block_stack, where they are assumed to
4252 have been created. If anything can cause a temporary to
4253 be created, but not expanded for more than one level of
4254 block_stacks, then this code will have to change. */
4256 void
4257 move_cleanups_up ()
4259 struct nesting *block = block_stack;
4260 struct nesting *outer = block->next;
4262 outer->data.block.cleanups
4263 = chainon (block->data.block.cleanups,
4264 outer->data.block.cleanups);
4265 block->data.block.cleanups = 0;
4268 tree
4269 last_cleanup_this_contour ()
4271 if (block_stack == 0)
4272 return 0;
4274 return block_stack->data.block.cleanups;
4277 /* Return 1 if there are any pending cleanups at this point.
4278 If THIS_CONTOUR is nonzero, check the current contour as well.
4279 Otherwise, look only at the contours that enclose this one. */
4282 any_pending_cleanups (this_contour)
4283 int this_contour;
4285 struct nesting *block;
4287 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4288 return 0;
4290 if (this_contour && block_stack->data.block.cleanups != NULL)
4291 return 1;
4292 if (block_stack->data.block.cleanups == 0
4293 && block_stack->data.block.outer_cleanups == 0)
4294 return 0;
4296 for (block = block_stack->next; block; block = block->next)
4297 if (block->data.block.cleanups != 0)
4298 return 1;
4300 return 0;
4303 /* Enter a case (Pascal) or switch (C) statement.
4304 Push a block onto case_stack and nesting_stack
4305 to accumulate the case-labels that are seen
4306 and to record the labels generated for the statement.
4308 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4309 Otherwise, this construct is transparent for `exit_something'.
4311 EXPR is the index-expression to be dispatched on.
4312 TYPE is its nominal type. We could simply convert EXPR to this type,
4313 but instead we take short cuts. */
4315 void
4316 expand_start_case (exit_flag, expr, type, printname)
4317 int exit_flag;
4318 tree expr;
4319 tree type;
4320 const char *printname;
4322 register struct nesting *thiscase = ALLOC_NESTING ();
4324 /* Make an entry on case_stack for the case we are entering. */
4326 thiscase->next = case_stack;
4327 thiscase->all = nesting_stack;
4328 thiscase->depth = ++nesting_depth;
4329 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4330 thiscase->data.case_stmt.case_list = 0;
4331 thiscase->data.case_stmt.index_expr = expr;
4332 thiscase->data.case_stmt.nominal_type = type;
4333 thiscase->data.case_stmt.default_label = 0;
4334 thiscase->data.case_stmt.printname = printname;
4335 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4336 case_stack = thiscase;
4337 nesting_stack = thiscase;
4339 do_pending_stack_adjust ();
4341 /* Make sure case_stmt.start points to something that won't
4342 need any transformation before expand_end_case. */
4343 if (GET_CODE (get_last_insn ()) != NOTE)
4344 emit_note (NULL, NOTE_INSN_DELETED);
4346 thiscase->data.case_stmt.start = get_last_insn ();
4348 start_cleanup_deferral ();
4351 /* Start a "dummy case statement" within which case labels are invalid
4352 and are not connected to any larger real case statement.
4353 This can be used if you don't want to let a case statement jump
4354 into the middle of certain kinds of constructs. */
4356 void
4357 expand_start_case_dummy ()
4359 register struct nesting *thiscase = ALLOC_NESTING ();
4361 /* Make an entry on case_stack for the dummy. */
4363 thiscase->next = case_stack;
4364 thiscase->all = nesting_stack;
4365 thiscase->depth = ++nesting_depth;
4366 thiscase->exit_label = 0;
4367 thiscase->data.case_stmt.case_list = 0;
4368 thiscase->data.case_stmt.start = 0;
4369 thiscase->data.case_stmt.nominal_type = 0;
4370 thiscase->data.case_stmt.default_label = 0;
4371 case_stack = thiscase;
4372 nesting_stack = thiscase;
4373 start_cleanup_deferral ();
4376 /* End a dummy case statement. */
4378 void
4379 expand_end_case_dummy ()
4381 end_cleanup_deferral ();
4382 POPSTACK (case_stack);
4385 /* Return the data type of the index-expression
4386 of the innermost case statement, or null if none. */
4388 tree
4389 case_index_expr_type ()
4391 if (case_stack)
4392 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4393 return 0;
4396 static void
4397 check_seenlabel ()
4399 /* If this is the first label, warn if any insns have been emitted. */
4400 if (case_stack->data.case_stmt.line_number_status >= 0)
4402 rtx insn;
4404 restore_line_number_status
4405 (case_stack->data.case_stmt.line_number_status);
4406 case_stack->data.case_stmt.line_number_status = -1;
4408 for (insn = case_stack->data.case_stmt.start;
4409 insn;
4410 insn = NEXT_INSN (insn))
4412 if (GET_CODE (insn) == CODE_LABEL)
4413 break;
4414 if (GET_CODE (insn) != NOTE
4415 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4418 insn = PREV_INSN (insn);
4419 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4421 /* If insn is zero, then there must have been a syntax error. */
4422 if (insn)
4423 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4424 NOTE_LINE_NUMBER (insn),
4425 "unreachable code at beginning of %s",
4426 case_stack->data.case_stmt.printname);
4427 break;
4433 /* Accumulate one case or default label inside a case or switch statement.
4434 VALUE is the value of the case (a null pointer, for a default label).
4435 The function CONVERTER, when applied to arguments T and V,
4436 converts the value V to the type T.
4438 If not currently inside a case or switch statement, return 1 and do
4439 nothing. The caller will print a language-specific error message.
4440 If VALUE is a duplicate or overlaps, return 2 and do nothing
4441 except store the (first) duplicate node in *DUPLICATE.
4442 If VALUE is out of range, return 3 and do nothing.
4443 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4444 Return 0 on success.
4446 Extended to handle range statements. */
4449 pushcase (value, converter, label, duplicate)
4450 register tree value;
4451 tree (*converter) PARAMS ((tree, tree));
4452 register tree label;
4453 tree *duplicate;
4455 tree index_type;
4456 tree nominal_type;
4458 /* Fail if not inside a real case statement. */
4459 if (! (case_stack && case_stack->data.case_stmt.start))
4460 return 1;
4462 if (stack_block_stack
4463 && stack_block_stack->depth > case_stack->depth)
4464 return 5;
4466 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4467 nominal_type = case_stack->data.case_stmt.nominal_type;
4469 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4470 if (index_type == error_mark_node)
4471 return 0;
4473 /* Convert VALUE to the type in which the comparisons are nominally done. */
4474 if (value != 0)
4475 value = (*converter) (nominal_type, value);
4477 check_seenlabel ();
4479 /* Fail if this value is out of range for the actual type of the index
4480 (which may be narrower than NOMINAL_TYPE). */
4481 if (value != 0
4482 && (TREE_CONSTANT_OVERFLOW (value)
4483 || ! int_fits_type_p (value, index_type)))
4484 return 3;
4486 return add_case_node (value, value, label, duplicate);
4489 /* Like pushcase but this case applies to all values between VALUE1 and
4490 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4491 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4492 starts at VALUE1 and ends at the highest value of the index type.
4493 If both are NULL, this case applies to all values.
4495 The return value is the same as that of pushcase but there is one
4496 additional error code: 4 means the specified range was empty. */
4499 pushcase_range (value1, value2, converter, label, duplicate)
4500 register tree value1, value2;
4501 tree (*converter) PARAMS ((tree, tree));
4502 register tree label;
4503 tree *duplicate;
4505 tree index_type;
4506 tree nominal_type;
4508 /* Fail if not inside a real case statement. */
4509 if (! (case_stack && case_stack->data.case_stmt.start))
4510 return 1;
4512 if (stack_block_stack
4513 && stack_block_stack->depth > case_stack->depth)
4514 return 5;
4516 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4517 nominal_type = case_stack->data.case_stmt.nominal_type;
4519 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4520 if (index_type == error_mark_node)
4521 return 0;
4523 check_seenlabel ();
4525 /* Convert VALUEs to type in which the comparisons are nominally done
4526 and replace any unspecified value with the corresponding bound. */
4527 if (value1 == 0)
4528 value1 = TYPE_MIN_VALUE (index_type);
4529 if (value2 == 0)
4530 value2 = TYPE_MAX_VALUE (index_type);
4532 /* Fail if the range is empty. Do this before any conversion since
4533 we want to allow out-of-range empty ranges. */
4534 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4535 return 4;
4537 /* If the max was unbounded, use the max of the nominal_type we are
4538 converting to. Do this after the < check above to suppress false
4539 positives. */
4540 if (value2 == 0)
4541 value2 = TYPE_MAX_VALUE (nominal_type);
4543 value1 = (*converter) (nominal_type, value1);
4544 value2 = (*converter) (nominal_type, value2);
4546 /* Fail if these values are out of range. */
4547 if (TREE_CONSTANT_OVERFLOW (value1)
4548 || ! int_fits_type_p (value1, index_type))
4549 return 3;
4551 if (TREE_CONSTANT_OVERFLOW (value2)
4552 || ! int_fits_type_p (value2, index_type))
4553 return 3;
4555 return add_case_node (value1, value2, label, duplicate);
4558 /* Do the actual insertion of a case label for pushcase and pushcase_range
4559 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4560 slowdown for large switch statements. */
4563 add_case_node (low, high, label, duplicate)
4564 tree low, high;
4565 tree label;
4566 tree *duplicate;
4568 struct case_node *p, **q, *r;
4570 /* If there's no HIGH value, then this is not a case range; it's
4571 just a simple case label. But that's just a degenerate case
4572 range. */
4573 if (!high)
4574 high = low;
4576 /* Handle default labels specially. */
4577 if (!high && !low)
4579 if (case_stack->data.case_stmt.default_label != 0)
4581 *duplicate = case_stack->data.case_stmt.default_label;
4582 return 2;
4584 case_stack->data.case_stmt.default_label = label;
4585 expand_label (label);
4586 return 0;
4589 q = &case_stack->data.case_stmt.case_list;
4590 p = *q;
4592 while ((r = *q))
4594 p = r;
4596 /* Keep going past elements distinctly greater than HIGH. */
4597 if (tree_int_cst_lt (high, p->low))
4598 q = &p->left;
4600 /* or distinctly less than LOW. */
4601 else if (tree_int_cst_lt (p->high, low))
4602 q = &p->right;
4604 else
4606 /* We have an overlap; this is an error. */
4607 *duplicate = p->code_label;
4608 return 2;
4612 /* Add this label to the chain, and succeed. */
4614 r = (struct case_node *) xmalloc (sizeof (struct case_node));
4615 r->low = low;
4617 /* If the bounds are equal, turn this into the one-value case. */
4618 if (tree_int_cst_equal (low, high))
4619 r->high = r->low;
4620 else
4621 r->high = high;
4623 r->code_label = label;
4624 expand_label (label);
4626 *q = r;
4627 r->parent = p;
4628 r->left = 0;
4629 r->right = 0;
4630 r->balance = 0;
4632 while (p)
4634 struct case_node *s;
4636 if (r == p->left)
4638 int b;
4640 if (! (b = p->balance))
4641 /* Growth propagation from left side. */
4642 p->balance = -1;
4643 else if (b < 0)
4645 if (r->balance < 0)
4647 /* R-Rotation */
4648 if ((p->left = s = r->right))
4649 s->parent = p;
4651 r->right = p;
4652 p->balance = 0;
4653 r->balance = 0;
4654 s = p->parent;
4655 p->parent = r;
4657 if ((r->parent = s))
4659 if (s->left == p)
4660 s->left = r;
4661 else
4662 s->right = r;
4664 else
4665 case_stack->data.case_stmt.case_list = r;
4667 else
4668 /* r->balance == +1 */
4670 /* LR-Rotation */
4672 int b2;
4673 struct case_node *t = r->right;
4675 if ((p->left = s = t->right))
4676 s->parent = p;
4678 t->right = p;
4679 if ((r->right = s = t->left))
4680 s->parent = r;
4682 t->left = r;
4683 b = t->balance;
4684 b2 = b < 0;
4685 p->balance = b2;
4686 b2 = -b2 - b;
4687 r->balance = b2;
4688 t->balance = 0;
4689 s = p->parent;
4690 p->parent = t;
4691 r->parent = t;
4693 if ((t->parent = s))
4695 if (s->left == p)
4696 s->left = t;
4697 else
4698 s->right = t;
4700 else
4701 case_stack->data.case_stmt.case_list = t;
4703 break;
4706 else
4708 /* p->balance == +1; growth of left side balances the node. */
4709 p->balance = 0;
4710 break;
4713 else
4714 /* r == p->right */
4716 int b;
4718 if (! (b = p->balance))
4719 /* Growth propagation from right side. */
4720 p->balance++;
4721 else if (b > 0)
4723 if (r->balance > 0)
4725 /* L-Rotation */
4727 if ((p->right = s = r->left))
4728 s->parent = p;
4730 r->left = p;
4731 p->balance = 0;
4732 r->balance = 0;
4733 s = p->parent;
4734 p->parent = r;
4735 if ((r->parent = s))
4737 if (s->left == p)
4738 s->left = r;
4739 else
4740 s->right = r;
4743 else
4744 case_stack->data.case_stmt.case_list = r;
4747 else
4748 /* r->balance == -1 */
4750 /* RL-Rotation */
4751 int b2;
4752 struct case_node *t = r->left;
4754 if ((p->right = s = t->left))
4755 s->parent = p;
4757 t->left = p;
4759 if ((r->left = s = t->right))
4760 s->parent = r;
4762 t->right = r;
4763 b = t->balance;
4764 b2 = b < 0;
4765 r->balance = b2;
4766 b2 = -b2 - b;
4767 p->balance = b2;
4768 t->balance = 0;
4769 s = p->parent;
4770 p->parent = t;
4771 r->parent = t;
4773 if ((t->parent = s))
4775 if (s->left == p)
4776 s->left = t;
4777 else
4778 s->right = t;
4781 else
4782 case_stack->data.case_stmt.case_list = t;
4784 break;
4786 else
4788 /* p->balance == -1; growth of right side balances the node. */
4789 p->balance = 0;
4790 break;
4794 r = p;
4795 p = p->parent;
4798 return 0;
4801 /* Returns the number of possible values of TYPE.
4802 Returns -1 if the number is unknown, variable, or if the number does not
4803 fit in a HOST_WIDE_INT.
4804 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4805 do not increase monotonically (there may be duplicates);
4806 to 1 if the values increase monotonically, but not always by 1;
4807 otherwise sets it to 0. */
4809 HOST_WIDE_INT
4810 all_cases_count (type, spareness)
4811 tree type;
4812 int *spareness;
4814 tree t;
4815 HOST_WIDE_INT count, minval, lastval;
4817 *spareness = 0;
4819 switch (TREE_CODE (type))
4821 case BOOLEAN_TYPE:
4822 count = 2;
4823 break;
4825 case CHAR_TYPE:
4826 count = 1 << BITS_PER_UNIT;
4827 break;
4829 default:
4830 case INTEGER_TYPE:
4831 if (TYPE_MAX_VALUE (type) != 0
4832 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4833 TYPE_MIN_VALUE (type))))
4834 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4835 convert (type, integer_zero_node))))
4836 && host_integerp (t, 1))
4837 count = tree_low_cst (t, 1);
4838 else
4839 return -1;
4840 break;
4842 case ENUMERAL_TYPE:
4843 /* Don't waste time with enumeral types with huge values. */
4844 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4845 || TYPE_MAX_VALUE (type) == 0
4846 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4847 return -1;
4849 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4850 count = 0;
4852 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4854 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4856 if (*spareness == 2 || thisval < lastval)
4857 *spareness = 2;
4858 else if (thisval != minval + count)
4859 *spareness = 1;
4861 count++;
4865 return count;
4868 #define BITARRAY_TEST(ARRAY, INDEX) \
4869 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4870 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4871 #define BITARRAY_SET(ARRAY, INDEX) \
4872 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4873 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4875 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4876 with the case values we have seen, assuming the case expression
4877 has the given TYPE.
4878 SPARSENESS is as determined by all_cases_count.
4880 The time needed is proportional to COUNT, unless
4881 SPARSENESS is 2, in which case quadratic time is needed. */
4883 void
4884 mark_seen_cases (type, cases_seen, count, sparseness)
4885 tree type;
4886 unsigned char *cases_seen;
4887 HOST_WIDE_INT count;
4888 int sparseness;
4890 tree next_node_to_try = NULL_TREE;
4891 HOST_WIDE_INT next_node_offset = 0;
4893 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4894 tree val = make_node (INTEGER_CST);
4896 TREE_TYPE (val) = type;
4897 if (! root)
4898 /* Do nothing. */
4900 else if (sparseness == 2)
4902 tree t;
4903 unsigned HOST_WIDE_INT xlo;
4905 /* This less efficient loop is only needed to handle
4906 duplicate case values (multiple enum constants
4907 with the same value). */
4908 TREE_TYPE (val) = TREE_TYPE (root->low);
4909 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4910 t = TREE_CHAIN (t), xlo++)
4912 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4913 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4914 n = root;
4917 /* Keep going past elements distinctly greater than VAL. */
4918 if (tree_int_cst_lt (val, n->low))
4919 n = n->left;
4921 /* or distinctly less than VAL. */
4922 else if (tree_int_cst_lt (n->high, val))
4923 n = n->right;
4925 else
4927 /* We have found a matching range. */
4928 BITARRAY_SET (cases_seen, xlo);
4929 break;
4932 while (n);
4935 else
4937 if (root->left)
4938 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4940 for (n = root; n; n = n->right)
4942 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4943 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4944 while (! tree_int_cst_lt (n->high, val))
4946 /* Calculate (into xlo) the "offset" of the integer (val).
4947 The element with lowest value has offset 0, the next smallest
4948 element has offset 1, etc. */
4950 unsigned HOST_WIDE_INT xlo;
4951 HOST_WIDE_INT xhi;
4952 tree t;
4954 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4956 /* The TYPE_VALUES will be in increasing order, so
4957 starting searching where we last ended. */
4958 t = next_node_to_try;
4959 xlo = next_node_offset;
4960 xhi = 0;
4961 for (;;)
4963 if (t == NULL_TREE)
4965 t = TYPE_VALUES (type);
4966 xlo = 0;
4968 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4970 next_node_to_try = TREE_CHAIN (t);
4971 next_node_offset = xlo + 1;
4972 break;
4974 xlo++;
4975 t = TREE_CHAIN (t);
4976 if (t == next_node_to_try)
4978 xlo = -1;
4979 break;
4983 else
4985 t = TYPE_MIN_VALUE (type);
4986 if (t)
4987 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4988 &xlo, &xhi);
4989 else
4990 xlo = xhi = 0;
4991 add_double (xlo, xhi,
4992 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4993 &xlo, &xhi);
4996 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
4997 BITARRAY_SET (cases_seen, xlo);
4999 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5000 1, 0,
5001 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5007 /* Called when the index of a switch statement is an enumerated type
5008 and there is no default label.
5010 Checks that all enumeration literals are covered by the case
5011 expressions of a switch. Also, warn if there are any extra
5012 switch cases that are *not* elements of the enumerated type.
5014 If all enumeration literals were covered by the case expressions,
5015 turn one of the expressions into the default expression since it should
5016 not be possible to fall through such a switch. */
5018 void
5019 check_for_full_enumeration_handling (type)
5020 tree type;
5022 register struct case_node *n;
5023 register tree chain;
5024 #if 0 /* variable used by 'if 0'ed code below. */
5025 register struct case_node **l;
5026 int all_values = 1;
5027 #endif
5029 /* True iff the selector type is a numbered set mode. */
5030 int sparseness = 0;
5032 /* The number of possible selector values. */
5033 HOST_WIDE_INT size;
5035 /* For each possible selector value. a one iff it has been matched
5036 by a case value alternative. */
5037 unsigned char *cases_seen;
5039 /* The allocated size of cases_seen, in chars. */
5040 HOST_WIDE_INT bytes_needed;
5042 if (! warn_switch)
5043 return;
5045 size = all_cases_count (type, &sparseness);
5046 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5048 if (size > 0 && size < 600000
5049 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5050 this optimization if we don't have enough memory rather than
5051 aborting, as xmalloc would do. */
5052 && (cases_seen =
5053 (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
5055 HOST_WIDE_INT i;
5056 tree v = TYPE_VALUES (type);
5058 /* The time complexity of this code is normally O(N), where
5059 N being the number of members in the enumerated type.
5060 However, if type is a ENUMERAL_TYPE whose values do not
5061 increase monotonically, O(N*log(N)) time may be needed. */
5063 mark_seen_cases (type, cases_seen, size, sparseness);
5065 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5066 if (BITARRAY_TEST (cases_seen, i) == 0)
5067 warning ("enumeration value `%s' not handled in switch",
5068 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5070 free (cases_seen);
5073 /* Now we go the other way around; we warn if there are case
5074 expressions that don't correspond to enumerators. This can
5075 occur since C and C++ don't enforce type-checking of
5076 assignments to enumeration variables. */
5078 if (case_stack->data.case_stmt.case_list
5079 && case_stack->data.case_stmt.case_list->left)
5080 case_stack->data.case_stmt.case_list
5081 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5082 if (warn_switch)
5083 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5085 for (chain = TYPE_VALUES (type);
5086 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5087 chain = TREE_CHAIN (chain))
5090 if (!chain)
5092 if (TYPE_NAME (type) == 0)
5093 warning ("case value `%ld' not in enumerated type",
5094 (long) TREE_INT_CST_LOW (n->low));
5095 else
5096 warning ("case value `%ld' not in enumerated type `%s'",
5097 (long) TREE_INT_CST_LOW (n->low),
5098 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5099 == IDENTIFIER_NODE)
5100 ? TYPE_NAME (type)
5101 : DECL_NAME (TYPE_NAME (type))));
5103 if (!tree_int_cst_equal (n->low, n->high))
5105 for (chain = TYPE_VALUES (type);
5106 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5107 chain = TREE_CHAIN (chain))
5110 if (!chain)
5112 if (TYPE_NAME (type) == 0)
5113 warning ("case value `%ld' not in enumerated type",
5114 (long) TREE_INT_CST_LOW (n->high));
5115 else
5116 warning ("case value `%ld' not in enumerated type `%s'",
5117 (long) TREE_INT_CST_LOW (n->high),
5118 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5119 == IDENTIFIER_NODE)
5120 ? TYPE_NAME (type)
5121 : DECL_NAME (TYPE_NAME (type))));
5126 #if 0
5127 /* ??? This optimization is disabled because it causes valid programs to
5128 fail. ANSI C does not guarantee that an expression with enum type
5129 will have a value that is the same as one of the enumeration literals. */
5131 /* If all values were found as case labels, make one of them the default
5132 label. Thus, this switch will never fall through. We arbitrarily pick
5133 the last one to make the default since this is likely the most
5134 efficient choice. */
5136 if (all_values)
5138 for (l = &case_stack->data.case_stmt.case_list;
5139 (*l)->right != 0;
5140 l = &(*l)->right)
5143 case_stack->data.case_stmt.default_label = (*l)->code_label;
5144 *l = 0;
5146 #endif /* 0 */
5149 /* Free CN, and its children. */
5151 static void
5152 free_case_nodes (cn)
5153 case_node_ptr cn;
5155 if (cn)
5157 free_case_nodes (cn->left);
5158 free_case_nodes (cn->right);
5159 free (cn);
5164 /* Terminate a case (Pascal) or switch (C) statement
5165 in which ORIG_INDEX is the expression to be tested.
5166 Generate the code to test it and jump to the right place. */
5168 void
5169 expand_end_case (orig_index)
5170 tree orig_index;
5172 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE, orig_minval;
5173 rtx default_label = 0;
5174 register struct case_node *n;
5175 unsigned int count;
5176 rtx index;
5177 rtx table_label;
5178 int ncases;
5179 rtx *labelvec;
5180 register int i;
5181 rtx before_case;
5182 register struct nesting *thiscase = case_stack;
5183 tree index_expr, index_type;
5184 int unsignedp;
5186 /* Don't crash due to previous errors. */
5187 if (thiscase == NULL)
5188 return;
5190 table_label = gen_label_rtx ();
5191 index_expr = thiscase->data.case_stmt.index_expr;
5192 index_type = TREE_TYPE (index_expr);
5193 unsignedp = TREE_UNSIGNED (index_type);
5195 do_pending_stack_adjust ();
5197 /* This might get an spurious warning in the presence of a syntax error;
5198 it could be fixed by moving the call to check_seenlabel after the
5199 check for error_mark_node, and copying the code of check_seenlabel that
5200 deals with case_stack->data.case_stmt.line_number_status /
5201 restore_line_number_status in front of the call to end_cleanup_deferral;
5202 However, this might miss some useful warnings in the presence of
5203 non-syntax errors. */
5204 check_seenlabel ();
5206 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5207 if (index_type != error_mark_node)
5209 /* If switch expression was an enumerated type, check that all
5210 enumeration literals are covered by the cases.
5211 No sense trying this if there's a default case, however. */
5213 if (!thiscase->data.case_stmt.default_label
5214 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5215 && TREE_CODE (index_expr) != INTEGER_CST)
5216 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5218 /* If we don't have a default-label, create one here,
5219 after the body of the switch. */
5220 if (thiscase->data.case_stmt.default_label == 0)
5222 thiscase->data.case_stmt.default_label
5223 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5224 expand_label (thiscase->data.case_stmt.default_label);
5226 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5228 before_case = get_last_insn ();
5230 if (thiscase->data.case_stmt.case_list
5231 && thiscase->data.case_stmt.case_list->left)
5232 thiscase->data.case_stmt.case_list
5233 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5235 /* Simplify the case-list before we count it. */
5236 group_case_nodes (thiscase->data.case_stmt.case_list);
5238 /* Get upper and lower bounds of case values.
5239 Also convert all the case values to the index expr's data type. */
5241 count = 0;
5242 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5244 /* Check low and high label values are integers. */
5245 if (TREE_CODE (n->low) != INTEGER_CST)
5246 abort ();
5247 if (TREE_CODE (n->high) != INTEGER_CST)
5248 abort ();
5250 n->low = convert (index_type, n->low);
5251 n->high = convert (index_type, n->high);
5253 /* Count the elements and track the largest and smallest
5254 of them (treating them as signed even if they are not). */
5255 if (count++ == 0)
5257 minval = n->low;
5258 maxval = n->high;
5260 else
5262 if (INT_CST_LT (n->low, minval))
5263 minval = n->low;
5264 if (INT_CST_LT (maxval, n->high))
5265 maxval = n->high;
5267 /* A range counts double, since it requires two compares. */
5268 if (! tree_int_cst_equal (n->low, n->high))
5269 count++;
5272 orig_minval = minval;
5274 /* Compute span of values. */
5275 if (count != 0)
5276 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5278 end_cleanup_deferral ();
5280 if (count == 0)
5282 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5283 emit_queue ();
5284 emit_jump (default_label);
5287 /* If range of values is much bigger than number of values,
5288 make a sequence of conditional branches instead of a dispatch.
5289 If the switch-index is a constant, do it this way
5290 because we can optimize it. */
5292 #ifndef CASE_VALUES_THRESHOLD
5293 #ifdef HAVE_casesi
5294 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5295 #else
5296 /* If machine does not have a case insn that compares the
5297 bounds, this means extra overhead for dispatch tables
5298 which raises the threshold for using them. */
5299 #define CASE_VALUES_THRESHOLD 5
5300 #endif /* HAVE_casesi */
5301 #endif /* CASE_VALUES_THRESHOLD */
5303 else if (count < CASE_VALUES_THRESHOLD
5304 || compare_tree_int (range, 10 * count) > 0
5305 /* RANGE may be signed, and really large ranges will show up
5306 as negative numbers. */
5307 || compare_tree_int (range, 0) < 0
5308 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5309 || flag_pic
5310 #endif
5311 || TREE_CODE (index_expr) == INTEGER_CST
5312 /* These will reduce to a constant. */
5313 || (TREE_CODE (index_expr) == CALL_EXPR
5314 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5315 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5316 && DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_NORMAL
5317 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5318 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5319 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5321 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5323 /* If the index is a short or char that we do not have
5324 an insn to handle comparisons directly, convert it to
5325 a full integer now, rather than letting each comparison
5326 generate the conversion. */
5328 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5329 && ! have_insn_for (COMPARE, GET_MODE (index)))
5331 enum machine_mode wider_mode;
5332 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5333 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5334 if (have_insn_for (COMPARE, wider_mode))
5336 index = convert_to_mode (wider_mode, index, unsignedp);
5337 break;
5341 emit_queue ();
5342 do_pending_stack_adjust ();
5344 index = protect_from_queue (index, 0);
5345 if (GET_CODE (index) == MEM)
5346 index = copy_to_reg (index);
5347 if (GET_CODE (index) == CONST_INT
5348 || TREE_CODE (index_expr) == INTEGER_CST)
5350 /* Make a tree node with the proper constant value
5351 if we don't already have one. */
5352 if (TREE_CODE (index_expr) != INTEGER_CST)
5354 index_expr
5355 = build_int_2 (INTVAL (index),
5356 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5357 index_expr = convert (index_type, index_expr);
5360 /* For constant index expressions we need only
5361 issue a unconditional branch to the appropriate
5362 target code. The job of removing any unreachable
5363 code is left to the optimisation phase if the
5364 "-O" option is specified. */
5365 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5366 if (! tree_int_cst_lt (index_expr, n->low)
5367 && ! tree_int_cst_lt (n->high, index_expr))
5368 break;
5370 if (n)
5371 emit_jump (label_rtx (n->code_label));
5372 else
5373 emit_jump (default_label);
5375 else
5377 /* If the index expression is not constant we generate
5378 a binary decision tree to select the appropriate
5379 target code. This is done as follows:
5381 The list of cases is rearranged into a binary tree,
5382 nearly optimal assuming equal probability for each case.
5384 The tree is transformed into RTL, eliminating
5385 redundant test conditions at the same time.
5387 If program flow could reach the end of the
5388 decision tree an unconditional jump to the
5389 default code is emitted. */
5391 use_cost_table
5392 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5393 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5394 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5395 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5396 default_label, index_type);
5397 emit_jump_if_reachable (default_label);
5400 else
5402 int win = 0;
5403 #ifdef HAVE_casesi
5404 if (HAVE_casesi)
5406 enum machine_mode index_mode = SImode;
5407 int index_bits = GET_MODE_BITSIZE (index_mode);
5408 rtx op1, op2;
5409 enum machine_mode op_mode;
5411 /* Convert the index to SImode. */
5412 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5413 > GET_MODE_BITSIZE (index_mode))
5415 enum machine_mode omode = TYPE_MODE (index_type);
5416 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5418 /* We must handle the endpoints in the original mode. */
5419 index_expr = build (MINUS_EXPR, index_type,
5420 index_expr, minval);
5421 minval = integer_zero_node;
5422 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5423 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
5424 omode, 1, 0, default_label);
5425 /* Now we can safely truncate. */
5426 index = convert_to_mode (index_mode, index, 0);
5428 else
5430 if (TYPE_MODE (index_type) != index_mode)
5432 index_expr = convert (type_for_size (index_bits, 0),
5433 index_expr);
5434 index_type = TREE_TYPE (index_expr);
5437 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5439 emit_queue ();
5440 index = protect_from_queue (index, 0);
5441 do_pending_stack_adjust ();
5443 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
5444 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
5445 (index, op_mode))
5446 index = copy_to_mode_reg (op_mode, index);
5448 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5450 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
5451 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
5452 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
5453 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
5454 (op1, op_mode))
5455 op1 = copy_to_mode_reg (op_mode, op1);
5457 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5459 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
5460 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
5461 op2, TREE_UNSIGNED (TREE_TYPE (range)));
5462 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
5463 (op2, op_mode))
5464 op2 = copy_to_mode_reg (op_mode, op2);
5466 emit_jump_insn (gen_casesi (index, op1, op2,
5467 table_label, default_label));
5468 win = 1;
5470 #endif
5471 #ifdef HAVE_tablejump
5472 if (! win && HAVE_tablejump)
5474 index_type = thiscase->data.case_stmt.nominal_type;
5475 index_expr = fold (build (MINUS_EXPR, index_type,
5476 convert (index_type, index_expr),
5477 convert (index_type, minval)));
5478 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5479 emit_queue ();
5480 index = protect_from_queue (index, 0);
5481 do_pending_stack_adjust ();
5483 do_tablejump (index, TYPE_MODE (index_type),
5484 convert_modes (TYPE_MODE (index_type),
5485 TYPE_MODE (TREE_TYPE (range)),
5486 expand_expr (range, NULL_RTX,
5487 VOIDmode, 0),
5488 TREE_UNSIGNED (TREE_TYPE (range))),
5489 table_label, default_label);
5490 win = 1;
5492 #endif
5493 if (! win)
5494 abort ();
5496 /* Get table of labels to jump to, in order of case index. */
5498 ncases = TREE_INT_CST_LOW (range) + 1;
5499 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5500 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5502 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5504 register HOST_WIDE_INT i
5505 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5507 while (1)
5509 labelvec[i]
5510 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5511 if (i + TREE_INT_CST_LOW (orig_minval)
5512 == TREE_INT_CST_LOW (n->high))
5513 break;
5514 i++;
5518 /* Fill in the gaps with the default. */
5519 for (i = 0; i < ncases; i++)
5520 if (labelvec[i] == 0)
5521 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5523 /* Output the table */
5524 emit_label (table_label);
5526 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5527 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5528 gen_rtx_LABEL_REF (Pmode, table_label),
5529 gen_rtvec_v (ncases, labelvec),
5530 const0_rtx, const0_rtx));
5531 else
5532 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5533 gen_rtvec_v (ncases, labelvec)));
5535 /* If the case insn drops through the table,
5536 after the table we must jump to the default-label.
5537 Otherwise record no drop-through after the table. */
5538 #ifdef CASE_DROPS_THROUGH
5539 emit_jump (default_label);
5540 #else
5541 emit_barrier ();
5542 #endif
5545 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5546 reorder_insns (before_case, get_last_insn (),
5547 thiscase->data.case_stmt.start);
5549 else
5550 end_cleanup_deferral ();
5552 if (thiscase->exit_label)
5553 emit_label (thiscase->exit_label);
5555 free_case_nodes (case_stack->data.case_stmt.case_list);
5556 POPSTACK (case_stack);
5558 free_temp_slots ();
5561 /* Convert the tree NODE into a list linked by the right field, with the left
5562 field zeroed. RIGHT is used for recursion; it is a list to be placed
5563 rightmost in the resulting list. */
5565 static struct case_node *
5566 case_tree2list (node, right)
5567 struct case_node *node, *right;
5569 struct case_node *left;
5571 if (node->right)
5572 right = case_tree2list (node->right, right);
5574 node->right = right;
5575 if ((left = node->left))
5577 node->left = 0;
5578 return case_tree2list (left, node);
5581 return node;
5584 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5586 static void
5587 do_jump_if_equal (op1, op2, label, unsignedp)
5588 rtx op1, op2, label;
5589 int unsignedp;
5591 if (GET_CODE (op1) == CONST_INT
5592 && GET_CODE (op2) == CONST_INT)
5594 if (INTVAL (op1) == INTVAL (op2))
5595 emit_jump (label);
5597 else
5599 enum machine_mode mode = GET_MODE (op1);
5600 if (mode == VOIDmode)
5601 mode = GET_MODE (op2);
5602 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5603 0, label);
5607 /* Not all case values are encountered equally. This function
5608 uses a heuristic to weight case labels, in cases where that
5609 looks like a reasonable thing to do.
5611 Right now, all we try to guess is text, and we establish the
5612 following weights:
5614 chars above space: 16
5615 digits: 16
5616 default: 12
5617 space, punct: 8
5618 tab: 4
5619 newline: 2
5620 other "\" chars: 1
5621 remaining chars: 0
5623 If we find any cases in the switch that are not either -1 or in the range
5624 of valid ASCII characters, or are control characters other than those
5625 commonly used with "\", don't treat this switch scanning text.
5627 Return 1 if these nodes are suitable for cost estimation, otherwise
5628 return 0. */
5630 static int
5631 estimate_case_costs (node)
5632 case_node_ptr node;
5634 tree min_ascii = integer_minus_one_node;
5635 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5636 case_node_ptr n;
5637 int i;
5639 /* If we haven't already made the cost table, make it now. Note that the
5640 lower bound of the table is -1, not zero. */
5642 if (! cost_table_initialized)
5644 cost_table_initialized = 1;
5646 for (i = 0; i < 128; i++)
5648 if (ISALNUM (i))
5649 COST_TABLE (i) = 16;
5650 else if (ISPUNCT (i))
5651 COST_TABLE (i) = 8;
5652 else if (ISCNTRL (i))
5653 COST_TABLE (i) = -1;
5656 COST_TABLE (' ') = 8;
5657 COST_TABLE ('\t') = 4;
5658 COST_TABLE ('\0') = 4;
5659 COST_TABLE ('\n') = 2;
5660 COST_TABLE ('\f') = 1;
5661 COST_TABLE ('\v') = 1;
5662 COST_TABLE ('\b') = 1;
5665 /* See if all the case expressions look like text. It is text if the
5666 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5667 as signed arithmetic since we don't want to ever access cost_table with a
5668 value less than -1. Also check that none of the constants in a range
5669 are strange control characters. */
5671 for (n = node; n; n = n->right)
5673 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5674 return 0;
5676 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5677 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5678 if (COST_TABLE (i) < 0)
5679 return 0;
5682 /* All interesting values are within the range of interesting
5683 ASCII characters. */
5684 return 1;
5687 /* Scan an ordered list of case nodes
5688 combining those with consecutive values or ranges.
5690 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5692 static void
5693 group_case_nodes (head)
5694 case_node_ptr head;
5696 case_node_ptr node = head;
5698 while (node)
5700 rtx lb = next_real_insn (label_rtx (node->code_label));
5701 rtx lb2;
5702 case_node_ptr np = node;
5704 /* Try to group the successors of NODE with NODE. */
5705 while (((np = np->right) != 0)
5706 /* Do they jump to the same place? */
5707 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5708 || (lb != 0 && lb2 != 0
5709 && simplejump_p (lb)
5710 && simplejump_p (lb2)
5711 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5712 SET_SRC (PATTERN (lb2)))))
5713 /* Are their ranges consecutive? */
5714 && tree_int_cst_equal (np->low,
5715 fold (build (PLUS_EXPR,
5716 TREE_TYPE (node->high),
5717 node->high,
5718 integer_one_node)))
5719 /* An overflow is not consecutive. */
5720 && tree_int_cst_lt (node->high,
5721 fold (build (PLUS_EXPR,
5722 TREE_TYPE (node->high),
5723 node->high,
5724 integer_one_node))))
5726 node->high = np->high;
5728 /* NP is the first node after NODE which can't be grouped with it.
5729 Delete the nodes in between, and move on to that node. */
5730 node->right = np;
5731 node = np;
5735 /* Take an ordered list of case nodes
5736 and transform them into a near optimal binary tree,
5737 on the assumption that any target code selection value is as
5738 likely as any other.
5740 The transformation is performed by splitting the ordered
5741 list into two equal sections plus a pivot. The parts are
5742 then attached to the pivot as left and right branches. Each
5743 branch is then transformed recursively. */
5745 static void
5746 balance_case_nodes (head, parent)
5747 case_node_ptr *head;
5748 case_node_ptr parent;
5750 register case_node_ptr np;
5752 np = *head;
5753 if (np)
5755 int cost = 0;
5756 int i = 0;
5757 int ranges = 0;
5758 register case_node_ptr *npp;
5759 case_node_ptr left;
5761 /* Count the number of entries on branch. Also count the ranges. */
5763 while (np)
5765 if (!tree_int_cst_equal (np->low, np->high))
5767 ranges++;
5768 if (use_cost_table)
5769 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5772 if (use_cost_table)
5773 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5775 i++;
5776 np = np->right;
5779 if (i > 2)
5781 /* Split this list if it is long enough for that to help. */
5782 npp = head;
5783 left = *npp;
5784 if (use_cost_table)
5786 /* Find the place in the list that bisects the list's total cost,
5787 Here I gets half the total cost. */
5788 int n_moved = 0;
5789 i = (cost + 1) / 2;
5790 while (1)
5792 /* Skip nodes while their cost does not reach that amount. */
5793 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5794 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5795 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5796 if (i <= 0)
5797 break;
5798 npp = &(*npp)->right;
5799 n_moved += 1;
5801 if (n_moved == 0)
5803 /* Leave this branch lopsided, but optimize left-hand
5804 side and fill in `parent' fields for right-hand side. */
5805 np = *head;
5806 np->parent = parent;
5807 balance_case_nodes (&np->left, np);
5808 for (; np->right; np = np->right)
5809 np->right->parent = np;
5810 return;
5813 /* If there are just three nodes, split at the middle one. */
5814 else if (i == 3)
5815 npp = &(*npp)->right;
5816 else
5818 /* Find the place in the list that bisects the list's total cost,
5819 where ranges count as 2.
5820 Here I gets half the total cost. */
5821 i = (i + ranges + 1) / 2;
5822 while (1)
5824 /* Skip nodes while their cost does not reach that amount. */
5825 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5826 i--;
5827 i--;
5828 if (i <= 0)
5829 break;
5830 npp = &(*npp)->right;
5833 *head = np = *npp;
5834 *npp = 0;
5835 np->parent = parent;
5836 np->left = left;
5838 /* Optimize each of the two split parts. */
5839 balance_case_nodes (&np->left, np);
5840 balance_case_nodes (&np->right, np);
5842 else
5844 /* Else leave this branch as one level,
5845 but fill in `parent' fields. */
5846 np = *head;
5847 np->parent = parent;
5848 for (; np->right; np = np->right)
5849 np->right->parent = np;
5854 /* Search the parent sections of the case node tree
5855 to see if a test for the lower bound of NODE would be redundant.
5856 INDEX_TYPE is the type of the index expression.
5858 The instructions to generate the case decision tree are
5859 output in the same order as nodes are processed so it is
5860 known that if a parent node checks the range of the current
5861 node minus one that the current node is bounded at its lower
5862 span. Thus the test would be redundant. */
5864 static int
5865 node_has_low_bound (node, index_type)
5866 case_node_ptr node;
5867 tree index_type;
5869 tree low_minus_one;
5870 case_node_ptr pnode;
5872 /* If the lower bound of this node is the lowest value in the index type,
5873 we need not test it. */
5875 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5876 return 1;
5878 /* If this node has a left branch, the value at the left must be less
5879 than that at this node, so it cannot be bounded at the bottom and
5880 we need not bother testing any further. */
5882 if (node->left)
5883 return 0;
5885 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5886 node->low, integer_one_node));
5888 /* If the subtraction above overflowed, we can't verify anything.
5889 Otherwise, look for a parent that tests our value - 1. */
5891 if (! tree_int_cst_lt (low_minus_one, node->low))
5892 return 0;
5894 for (pnode = node->parent; pnode; pnode = pnode->parent)
5895 if (tree_int_cst_equal (low_minus_one, pnode->high))
5896 return 1;
5898 return 0;
5901 /* Search the parent sections of the case node tree
5902 to see if a test for the upper bound of NODE would be redundant.
5903 INDEX_TYPE is the type of the index expression.
5905 The instructions to generate the case decision tree are
5906 output in the same order as nodes are processed so it is
5907 known that if a parent node checks the range of the current
5908 node plus one that the current node is bounded at its upper
5909 span. Thus the test would be redundant. */
5911 static int
5912 node_has_high_bound (node, index_type)
5913 case_node_ptr node;
5914 tree index_type;
5916 tree high_plus_one;
5917 case_node_ptr pnode;
5919 /* If there is no upper bound, obviously no test is needed. */
5921 if (TYPE_MAX_VALUE (index_type) == NULL)
5922 return 1;
5924 /* If the upper bound of this node is the highest value in the type
5925 of the index expression, we need not test against it. */
5927 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5928 return 1;
5930 /* If this node has a right branch, the value at the right must be greater
5931 than that at this node, so it cannot be bounded at the top and
5932 we need not bother testing any further. */
5934 if (node->right)
5935 return 0;
5937 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5938 node->high, integer_one_node));
5940 /* If the addition above overflowed, we can't verify anything.
5941 Otherwise, look for a parent that tests our value + 1. */
5943 if (! tree_int_cst_lt (node->high, high_plus_one))
5944 return 0;
5946 for (pnode = node->parent; pnode; pnode = pnode->parent)
5947 if (tree_int_cst_equal (high_plus_one, pnode->low))
5948 return 1;
5950 return 0;
5953 /* Search the parent sections of the
5954 case node tree to see if both tests for the upper and lower
5955 bounds of NODE would be redundant. */
5957 static int
5958 node_is_bounded (node, index_type)
5959 case_node_ptr node;
5960 tree index_type;
5962 return (node_has_low_bound (node, index_type)
5963 && node_has_high_bound (node, index_type));
5966 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5968 static void
5969 emit_jump_if_reachable (label)
5970 rtx label;
5972 if (GET_CODE (get_last_insn ()) != BARRIER)
5973 emit_jump (label);
5976 /* Emit step-by-step code to select a case for the value of INDEX.
5977 The thus generated decision tree follows the form of the
5978 case-node binary tree NODE, whose nodes represent test conditions.
5979 INDEX_TYPE is the type of the index of the switch.
5981 Care is taken to prune redundant tests from the decision tree
5982 by detecting any boundary conditions already checked by
5983 emitted rtx. (See node_has_high_bound, node_has_low_bound
5984 and node_is_bounded, above.)
5986 Where the test conditions can be shown to be redundant we emit
5987 an unconditional jump to the target code. As a further
5988 optimization, the subordinates of a tree node are examined to
5989 check for bounded nodes. In this case conditional and/or
5990 unconditional jumps as a result of the boundary check for the
5991 current node are arranged to target the subordinates associated
5992 code for out of bound conditions on the current node.
5994 We can assume that when control reaches the code generated here,
5995 the index value has already been compared with the parents
5996 of this node, and determined to be on the same side of each parent
5997 as this node is. Thus, if this node tests for the value 51,
5998 and a parent tested for 52, we don't need to consider
5999 the possibility of a value greater than 51. If another parent
6000 tests for the value 50, then this node need not test anything. */
6002 static void
6003 emit_case_nodes (index, node, default_label, index_type)
6004 rtx index;
6005 case_node_ptr node;
6006 rtx default_label;
6007 tree index_type;
6009 /* If INDEX has an unsigned type, we must make unsigned branches. */
6010 int unsignedp = TREE_UNSIGNED (index_type);
6011 enum machine_mode mode = GET_MODE (index);
6012 enum machine_mode imode = TYPE_MODE (index_type);
6014 /* See if our parents have already tested everything for us.
6015 If they have, emit an unconditional jump for this node. */
6016 if (node_is_bounded (node, index_type))
6017 emit_jump (label_rtx (node->code_label));
6019 else if (tree_int_cst_equal (node->low, node->high))
6021 /* Node is single valued. First see if the index expression matches
6022 this node and then check our children, if any. */
6024 do_jump_if_equal (index,
6025 convert_modes (mode, imode,
6026 expand_expr (node->low, NULL_RTX,
6027 VOIDmode, 0),
6028 unsignedp),
6029 label_rtx (node->code_label), unsignedp);
6031 if (node->right != 0 && node->left != 0)
6033 /* This node has children on both sides.
6034 Dispatch to one side or the other
6035 by comparing the index value with this node's value.
6036 If one subtree is bounded, check that one first,
6037 so we can avoid real branches in the tree. */
6039 if (node_is_bounded (node->right, index_type))
6041 emit_cmp_and_jump_insns (index,
6042 convert_modes
6043 (mode, imode,
6044 expand_expr (node->high, NULL_RTX,
6045 VOIDmode, 0),
6046 unsignedp),
6047 GT, NULL_RTX, mode, unsignedp, 0,
6048 label_rtx (node->right->code_label));
6049 emit_case_nodes (index, node->left, default_label, index_type);
6052 else if (node_is_bounded (node->left, index_type))
6054 emit_cmp_and_jump_insns (index,
6055 convert_modes
6056 (mode, imode,
6057 expand_expr (node->high, NULL_RTX,
6058 VOIDmode, 0),
6059 unsignedp),
6060 LT, NULL_RTX, mode, unsignedp, 0,
6061 label_rtx (node->left->code_label));
6062 emit_case_nodes (index, node->right, default_label, index_type);
6065 else
6067 /* Neither node is bounded. First distinguish the two sides;
6068 then emit the code for one side at a time. */
6070 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6072 /* See if the value is on the right. */
6073 emit_cmp_and_jump_insns (index,
6074 convert_modes
6075 (mode, imode,
6076 expand_expr (node->high, NULL_RTX,
6077 VOIDmode, 0),
6078 unsignedp),
6079 GT, NULL_RTX, mode, unsignedp, 0,
6080 label_rtx (test_label));
6082 /* Value must be on the left.
6083 Handle the left-hand subtree. */
6084 emit_case_nodes (index, node->left, default_label, index_type);
6085 /* If left-hand subtree does nothing,
6086 go to default. */
6087 emit_jump_if_reachable (default_label);
6089 /* Code branches here for the right-hand subtree. */
6090 expand_label (test_label);
6091 emit_case_nodes (index, node->right, default_label, index_type);
6095 else if (node->right != 0 && node->left == 0)
6097 /* Here we have a right child but no left so we issue conditional
6098 branch to default and process the right child.
6100 Omit the conditional branch to default if we it avoid only one
6101 right child; it costs too much space to save so little time. */
6103 if (node->right->right || node->right->left
6104 || !tree_int_cst_equal (node->right->low, node->right->high))
6106 if (!node_has_low_bound (node, index_type))
6108 emit_cmp_and_jump_insns (index,
6109 convert_modes
6110 (mode, imode,
6111 expand_expr (node->high, NULL_RTX,
6112 VOIDmode, 0),
6113 unsignedp),
6114 LT, NULL_RTX, mode, unsignedp, 0,
6115 default_label);
6118 emit_case_nodes (index, node->right, default_label, index_type);
6120 else
6121 /* We cannot process node->right normally
6122 since we haven't ruled out the numbers less than
6123 this node's value. So handle node->right explicitly. */
6124 do_jump_if_equal (index,
6125 convert_modes
6126 (mode, imode,
6127 expand_expr (node->right->low, NULL_RTX,
6128 VOIDmode, 0),
6129 unsignedp),
6130 label_rtx (node->right->code_label), unsignedp);
6133 else if (node->right == 0 && node->left != 0)
6135 /* Just one subtree, on the left. */
6137 #if 0 /* The following code and comment were formerly part
6138 of the condition here, but they didn't work
6139 and I don't understand what the idea was. -- rms. */
6140 /* If our "most probable entry" is less probable
6141 than the default label, emit a jump to
6142 the default label using condition codes
6143 already lying around. With no right branch,
6144 a branch-greater-than will get us to the default
6145 label correctly. */
6146 if (use_cost_table
6147 && COST_TABLE (TREE_INT_CST_LOW (node->high)) < 12)
6149 #endif /* 0 */
6150 if (node->left->left || node->left->right
6151 || !tree_int_cst_equal (node->left->low, node->left->high))
6153 if (!node_has_high_bound (node, index_type))
6155 emit_cmp_and_jump_insns (index,
6156 convert_modes
6157 (mode, imode,
6158 expand_expr (node->high, NULL_RTX,
6159 VOIDmode, 0),
6160 unsignedp),
6161 GT, NULL_RTX, mode, unsignedp, 0,
6162 default_label);
6165 emit_case_nodes (index, node->left, default_label, index_type);
6167 else
6168 /* We cannot process node->left normally
6169 since we haven't ruled out the numbers less than
6170 this node's value. So handle node->left explicitly. */
6171 do_jump_if_equal (index,
6172 convert_modes
6173 (mode, imode,
6174 expand_expr (node->left->low, NULL_RTX,
6175 VOIDmode, 0),
6176 unsignedp),
6177 label_rtx (node->left->code_label), unsignedp);
6180 else
6182 /* Node is a range. These cases are very similar to those for a single
6183 value, except that we do not start by testing whether this node
6184 is the one to branch to. */
6186 if (node->right != 0 && node->left != 0)
6188 /* Node has subtrees on both sides.
6189 If the right-hand subtree is bounded,
6190 test for it first, since we can go straight there.
6191 Otherwise, we need to make a branch in the control structure,
6192 then handle the two subtrees. */
6193 tree test_label = 0;
6195 if (node_is_bounded (node->right, index_type))
6196 /* Right hand node is fully bounded so we can eliminate any
6197 testing and branch directly to the target code. */
6198 emit_cmp_and_jump_insns (index,
6199 convert_modes
6200 (mode, imode,
6201 expand_expr (node->high, NULL_RTX,
6202 VOIDmode, 0),
6203 unsignedp),
6204 GT, NULL_RTX, mode, unsignedp, 0,
6205 label_rtx (node->right->code_label));
6206 else
6208 /* Right hand node requires testing.
6209 Branch to a label where we will handle it later. */
6211 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6212 emit_cmp_and_jump_insns (index,
6213 convert_modes
6214 (mode, imode,
6215 expand_expr (node->high, NULL_RTX,
6216 VOIDmode, 0),
6217 unsignedp),
6218 GT, NULL_RTX, mode, unsignedp, 0,
6219 label_rtx (test_label));
6222 /* Value belongs to this node or to the left-hand subtree. */
6224 emit_cmp_and_jump_insns (index,
6225 convert_modes
6226 (mode, imode,
6227 expand_expr (node->low, NULL_RTX,
6228 VOIDmode, 0),
6229 unsignedp),
6230 GE, NULL_RTX, mode, unsignedp, 0,
6231 label_rtx (node->code_label));
6233 /* Handle the left-hand subtree. */
6234 emit_case_nodes (index, node->left, default_label, index_type);
6236 /* If right node had to be handled later, do that now. */
6238 if (test_label)
6240 /* If the left-hand subtree fell through,
6241 don't let it fall into the right-hand subtree. */
6242 emit_jump_if_reachable (default_label);
6244 expand_label (test_label);
6245 emit_case_nodes (index, node->right, default_label, index_type);
6249 else if (node->right != 0 && node->left == 0)
6251 /* Deal with values to the left of this node,
6252 if they are possible. */
6253 if (!node_has_low_bound (node, index_type))
6255 emit_cmp_and_jump_insns (index,
6256 convert_modes
6257 (mode, imode,
6258 expand_expr (node->low, NULL_RTX,
6259 VOIDmode, 0),
6260 unsignedp),
6261 LT, NULL_RTX, mode, unsignedp, 0,
6262 default_label);
6265 /* Value belongs to this node or to the right-hand subtree. */
6267 emit_cmp_and_jump_insns (index,
6268 convert_modes
6269 (mode, imode,
6270 expand_expr (node->high, NULL_RTX,
6271 VOIDmode, 0),
6272 unsignedp),
6273 LE, NULL_RTX, mode, unsignedp, 0,
6274 label_rtx (node->code_label));
6276 emit_case_nodes (index, node->right, default_label, index_type);
6279 else if (node->right == 0 && node->left != 0)
6281 /* Deal with values to the right of this node,
6282 if they are possible. */
6283 if (!node_has_high_bound (node, index_type))
6285 emit_cmp_and_jump_insns (index,
6286 convert_modes
6287 (mode, imode,
6288 expand_expr (node->high, NULL_RTX,
6289 VOIDmode, 0),
6290 unsignedp),
6291 GT, NULL_RTX, mode, unsignedp, 0,
6292 default_label);
6295 /* Value belongs to this node or to the left-hand subtree. */
6297 emit_cmp_and_jump_insns (index,
6298 convert_modes
6299 (mode, imode,
6300 expand_expr (node->low, NULL_RTX,
6301 VOIDmode, 0),
6302 unsignedp),
6303 GE, NULL_RTX, mode, unsignedp, 0,
6304 label_rtx (node->code_label));
6306 emit_case_nodes (index, node->left, default_label, index_type);
6309 else
6311 /* Node has no children so we check low and high bounds to remove
6312 redundant tests. Only one of the bounds can exist,
6313 since otherwise this node is bounded--a case tested already. */
6314 int high_bound = node_has_high_bound (node, index_type);
6315 int low_bound = node_has_low_bound (node, index_type);
6317 if (!high_bound && low_bound)
6319 emit_cmp_and_jump_insns (index,
6320 convert_modes
6321 (mode, imode,
6322 expand_expr (node->high, NULL_RTX,
6323 VOIDmode, 0),
6324 unsignedp),
6325 GT, NULL_RTX, mode, unsignedp, 0,
6326 default_label);
6329 else if (!low_bound && high_bound)
6331 emit_cmp_and_jump_insns (index,
6332 convert_modes
6333 (mode, imode,
6334 expand_expr (node->low, NULL_RTX,
6335 VOIDmode, 0),
6336 unsignedp),
6337 LT, NULL_RTX, mode, unsignedp, 0,
6338 default_label);
6340 else if (!low_bound && !high_bound)
6342 /* Widen LOW and HIGH to the same width as INDEX. */
6343 tree type = type_for_mode (mode, unsignedp);
6344 tree low = build1 (CONVERT_EXPR, type, node->low);
6345 tree high = build1 (CONVERT_EXPR, type, node->high);
6346 rtx low_rtx, new_index, new_bound;
6348 /* Instead of doing two branches, emit one unsigned branch for
6349 (index-low) > (high-low). */
6350 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6351 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6352 NULL_RTX, unsignedp,
6353 OPTAB_WIDEN);
6354 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6355 high, low)),
6356 NULL_RTX, mode, 0);
6358 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6359 mode, 1, 0, default_label);
6362 emit_jump (label_rtx (node->code_label));