Declare malloc, free, and atexit if inhibit_libc is defined.
[official-gcc.git] / gcc / stmt.c
blob4e5a7a8ea8791183a01e7c127649bb842c073017
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "tm_p.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "insn-flags.h"
46 #include "insn-config.h"
47 #include "insn-codes.h"
48 #include "expr.h"
49 #include "hard-reg-set.h"
50 #include "obstack.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
58 #define obstack_chunk_alloc xmalloc
59 #define obstack_chunk_free free
60 struct obstack stmt_obstack;
62 /* Assume that case vectors are not pc-relative. */
63 #ifndef CASE_VECTOR_PC_RELATIVE
64 #define CASE_VECTOR_PC_RELATIVE 0
65 #endif
67 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
68 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
69 This is used by the `remember_end_note' function to record the endpoint
70 of each generated block in its associated BLOCK node. */
72 static rtx last_block_end_note;
74 /* Functions and data structures for expanding case statements. */
76 /* Case label structure, used to hold info on labels within case
77 statements. We handle "range" labels; for a single-value label
78 as in C, the high and low limits are the same.
80 An AVL tree of case nodes is initially created, and later transformed
81 to a list linked via the RIGHT fields in the nodes. Nodes with
82 higher case values are later in the list.
84 Switch statements can be output in one of two forms. A branch table
85 is used if there are more than a few labels and the labels are dense
86 within the range between the smallest and largest case value. If a
87 branch table is used, no further manipulations are done with the case
88 node chain.
90 The alternative to the use of a branch table is to generate a series
91 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
92 and PARENT fields to hold a binary tree. Initially the tree is
93 totally unbalanced, with everything on the right. We balance the tree
94 with nodes on the left having lower case values than the parent
95 and nodes on the right having higher values. We then output the tree
96 in order. */
98 struct case_node
100 struct case_node *left; /* Left son in binary tree */
101 struct case_node *right; /* Right son in binary tree; also node chain */
102 struct case_node *parent; /* Parent of node in binary tree */
103 tree low; /* Lowest index value for this label */
104 tree high; /* Highest index value for this label */
105 tree code_label; /* Label to jump to when node matches */
106 int balance;
109 typedef struct case_node case_node;
110 typedef struct case_node *case_node_ptr;
112 /* These are used by estimate_case_costs and balance_case_nodes. */
114 /* This must be a signed type, and non-ANSI compilers lack signed char. */
115 static short *cost_table;
116 static int use_cost_table;
118 /* Stack of control and binding constructs we are currently inside.
120 These constructs begin when you call `expand_start_WHATEVER'
121 and end when you call `expand_end_WHATEVER'. This stack records
122 info about how the construct began that tells the end-function
123 what to do. It also may provide information about the construct
124 to alter the behavior of other constructs within the body.
125 For example, they may affect the behavior of C `break' and `continue'.
127 Each construct gets one `struct nesting' object.
128 All of these objects are chained through the `all' field.
129 `nesting_stack' points to the first object (innermost construct).
130 The position of an entry on `nesting_stack' is in its `depth' field.
132 Each type of construct has its own individual stack.
133 For example, loops have `loop_stack'. Each object points to the
134 next object of the same type through the `next' field.
136 Some constructs are visible to `break' exit-statements and others
137 are not. Which constructs are visible depends on the language.
138 Therefore, the data structure allows each construct to be visible
139 or not, according to the args given when the construct is started.
140 The construct is visible if the `exit_label' field is non-null.
141 In that case, the value should be a CODE_LABEL rtx. */
143 struct nesting
145 struct nesting *all;
146 struct nesting *next;
147 int depth;
148 rtx exit_label;
149 union
151 /* For conds (if-then and if-then-else statements). */
152 struct
154 /* Label for the end of the if construct.
155 There is none if EXITFLAG was not set
156 and no `else' has been seen yet. */
157 rtx endif_label;
158 /* Label for the end of this alternative.
159 This may be the end of the if or the next else/elseif. */
160 rtx next_label;
161 } cond;
162 /* For loops. */
163 struct
165 /* Label at the top of the loop; place to loop back to. */
166 rtx start_label;
167 /* Label at the end of the whole construct. */
168 rtx end_label;
169 /* Label before a jump that branches to the end of the whole
170 construct. This is where destructors go if any. */
171 rtx alt_end_label;
172 /* Label for `continue' statement to jump to;
173 this is in front of the stepper of the loop. */
174 rtx continue_label;
175 } loop;
176 /* For variable binding contours. */
177 struct
179 /* Sequence number of this binding contour within the function,
180 in order of entry. */
181 int block_start_count;
182 /* Nonzero => value to restore stack to on exit. */
183 rtx stack_level;
184 /* The NOTE that starts this contour.
185 Used by expand_goto to check whether the destination
186 is within each contour or not. */
187 rtx first_insn;
188 /* Innermost containing binding contour that has a stack level. */
189 struct nesting *innermost_stack_block;
190 /* List of cleanups to be run on exit from this contour.
191 This is a list of expressions to be evaluated.
192 The TREE_PURPOSE of each link is the ..._DECL node
193 which the cleanup pertains to. */
194 tree cleanups;
195 /* List of cleanup-lists of blocks containing this block,
196 as they were at the locus where this block appears.
197 There is an element for each containing block,
198 ordered innermost containing block first.
199 The tail of this list can be 0,
200 if all remaining elements would be empty lists.
201 The element's TREE_VALUE is the cleanup-list of that block,
202 which may be null. */
203 tree outer_cleanups;
204 /* Chain of labels defined inside this binding contour.
205 For contours that have stack levels or cleanups. */
206 struct label_chain *label_chain;
207 /* Number of function calls seen, as of start of this block. */
208 int n_function_calls;
209 /* Nonzero if this is associated with a EH region. */
210 int exception_region;
211 /* The saved target_temp_slot_level from our outer block.
212 We may reset target_temp_slot_level to be the level of
213 this block, if that is done, target_temp_slot_level
214 reverts to the saved target_temp_slot_level at the very
215 end of the block. */
216 int block_target_temp_slot_level;
217 /* True if we are currently emitting insns in an area of
218 output code that is controlled by a conditional
219 expression. This is used by the cleanup handling code to
220 generate conditional cleanup actions. */
221 int conditional_code;
222 /* A place to move the start of the exception region for any
223 of the conditional cleanups, must be at the end or after
224 the start of the last unconditional cleanup, and before any
225 conditional branch points. */
226 rtx last_unconditional_cleanup;
227 /* When in a conditional context, this is the specific
228 cleanup list associated with last_unconditional_cleanup,
229 where we place the conditionalized cleanups. */
230 tree *cleanup_ptr;
231 } block;
232 /* For switch (C) or case (Pascal) statements,
233 and also for dummies (see `expand_start_case_dummy'). */
234 struct
236 /* The insn after which the case dispatch should finally
237 be emitted. Zero for a dummy. */
238 rtx start;
239 /* A list of case labels; it is first built as an AVL tree.
240 During expand_end_case, this is converted to a list, and may be
241 rearranged into a nearly balanced binary tree. */
242 struct case_node *case_list;
243 /* Label to jump to if no case matches. */
244 tree default_label;
245 /* The expression to be dispatched on. */
246 tree index_expr;
247 /* Type that INDEX_EXPR should be converted to. */
248 tree nominal_type;
249 /* Number of range exprs in case statement. */
250 int num_ranges;
251 /* Name of this kind of statement, for warnings. */
252 const char *printname;
253 /* Used to save no_line_numbers till we see the first case label.
254 We set this to -1 when we see the first case label in this
255 case statement. */
256 int line_number_status;
257 } case_stmt;
258 } data;
261 /* Allocate and return a new `struct nesting'. */
263 #define ALLOC_NESTING() \
264 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
266 /* Pop the nesting stack element by element until we pop off
267 the element which is at the top of STACK.
268 Update all the other stacks, popping off elements from them
269 as we pop them from nesting_stack. */
271 #define POPSTACK(STACK) \
272 do { struct nesting *target = STACK; \
273 struct nesting *this; \
274 do { this = nesting_stack; \
275 if (loop_stack == this) \
276 loop_stack = loop_stack->next; \
277 if (cond_stack == this) \
278 cond_stack = cond_stack->next; \
279 if (block_stack == this) \
280 block_stack = block_stack->next; \
281 if (stack_block_stack == this) \
282 stack_block_stack = stack_block_stack->next; \
283 if (case_stack == this) \
284 case_stack = case_stack->next; \
285 nesting_depth = nesting_stack->depth - 1; \
286 nesting_stack = this->all; \
287 obstack_free (&stmt_obstack, this); } \
288 while (this != target); } while (0)
290 /* In some cases it is impossible to generate code for a forward goto
291 until the label definition is seen. This happens when it may be necessary
292 for the goto to reset the stack pointer: we don't yet know how to do that.
293 So expand_goto puts an entry on this fixup list.
294 Each time a binding contour that resets the stack is exited,
295 we check each fixup.
296 If the target label has now been defined, we can insert the proper code. */
298 struct goto_fixup
300 /* Points to following fixup. */
301 struct goto_fixup *next;
302 /* Points to the insn before the jump insn.
303 If more code must be inserted, it goes after this insn. */
304 rtx before_jump;
305 /* The LABEL_DECL that this jump is jumping to, or 0
306 for break, continue or return. */
307 tree target;
308 /* The BLOCK for the place where this goto was found. */
309 tree context;
310 /* The CODE_LABEL rtx that this is jumping to. */
311 rtx target_rtl;
312 /* Number of binding contours started in current function
313 before the label reference. */
314 int block_start_count;
315 /* The outermost stack level that should be restored for this jump.
316 Each time a binding contour that resets the stack is exited,
317 if the target label is *not* yet defined, this slot is updated. */
318 rtx stack_level;
319 /* List of lists of cleanup expressions to be run by this goto.
320 There is one element for each block that this goto is within.
321 The tail of this list can be 0,
322 if all remaining elements would be empty.
323 The TREE_VALUE contains the cleanup list of that block as of the
324 time this goto was seen.
325 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
326 tree cleanup_list_list;
329 /* Within any binding contour that must restore a stack level,
330 all labels are recorded with a chain of these structures. */
332 struct label_chain
334 /* Points to following fixup. */
335 struct label_chain *next;
336 tree label;
339 struct stmt_status
341 /* Chain of all pending binding contours. */
342 struct nesting *x_block_stack;
344 /* If any new stacks are added here, add them to POPSTACKS too. */
346 /* Chain of all pending binding contours that restore stack levels
347 or have cleanups. */
348 struct nesting *x_stack_block_stack;
350 /* Chain of all pending conditional statements. */
351 struct nesting *x_cond_stack;
353 /* Chain of all pending loops. */
354 struct nesting *x_loop_stack;
356 /* Chain of all pending case or switch statements. */
357 struct nesting *x_case_stack;
359 /* Separate chain including all of the above,
360 chained through the `all' field. */
361 struct nesting *x_nesting_stack;
363 /* Number of entries on nesting_stack now. */
364 int x_nesting_depth;
366 /* Number of binding contours started so far in this function. */
367 int x_block_start_count;
369 /* Each time we expand an expression-statement,
370 record the expr's type and its RTL value here. */
371 tree x_last_expr_type;
372 rtx x_last_expr_value;
374 /* Nonzero if within a ({...}) grouping, in which case we must
375 always compute a value for each expr-stmt in case it is the last one. */
376 int x_expr_stmts_for_value;
378 /* Filename and line number of last line-number note,
379 whether we actually emitted it or not. */
380 char *x_emit_filename;
381 int x_emit_lineno;
383 struct goto_fixup *x_goto_fixup_chain;
386 #define block_stack (current_function->stmt->x_block_stack)
387 #define stack_block_stack (current_function->stmt->x_stack_block_stack)
388 #define cond_stack (current_function->stmt->x_cond_stack)
389 #define loop_stack (current_function->stmt->x_loop_stack)
390 #define case_stack (current_function->stmt->x_case_stack)
391 #define nesting_stack (current_function->stmt->x_nesting_stack)
392 #define nesting_depth (current_function->stmt->x_nesting_depth)
393 #define current_block_start_count (current_function->stmt->x_block_start_count)
394 #define last_expr_type (current_function->stmt->x_last_expr_type)
395 #define last_expr_value (current_function->stmt->x_last_expr_value)
396 #define expr_stmts_for_value (current_function->stmt->x_expr_stmts_for_value)
397 #define emit_filename (current_function->stmt->x_emit_filename)
398 #define emit_lineno (current_function->stmt->x_emit_lineno)
399 #define goto_fixup_chain (current_function->stmt->x_goto_fixup_chain)
401 /* Non-zero if we are using EH to handle cleanus. */
402 static int using_eh_for_cleanups_p = 0;
404 /* Character strings, each containing a single decimal digit. */
405 static char *digit_strings[10];
408 static int n_occurrences PROTO((int, const char *));
409 static void expand_goto_internal PROTO((tree, rtx, rtx));
410 static int expand_fixup PROTO((tree, rtx, rtx));
411 static rtx expand_nl_handler_label PROTO((rtx, rtx));
412 static void expand_nl_goto_receiver PROTO((void));
413 static void expand_nl_goto_receivers PROTO((struct nesting *));
414 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
415 rtx, int));
416 static void expand_null_return_1 PROTO((rtx, int));
417 static void expand_value_return PROTO((rtx));
418 static int tail_recursion_args PROTO((tree, tree));
419 static void expand_cleanups PROTO((tree, tree, int, int));
420 static void check_seenlabel PROTO((void));
421 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
422 static int estimate_case_costs PROTO((case_node_ptr));
423 static void group_case_nodes PROTO((case_node_ptr));
424 static void balance_case_nodes PROTO((case_node_ptr *,
425 case_node_ptr));
426 static int node_has_low_bound PROTO((case_node_ptr, tree));
427 static int node_has_high_bound PROTO((case_node_ptr, tree));
428 static int node_is_bounded PROTO((case_node_ptr, tree));
429 static void emit_jump_if_reachable PROTO((rtx));
430 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
431 static int add_case_node PROTO((tree, tree, tree, tree *));
432 static struct case_node *case_tree2list PROTO((case_node *, case_node *));
433 static void mark_cond_nesting PROTO((struct nesting *));
434 static void mark_loop_nesting PROTO((struct nesting *));
435 static void mark_block_nesting PROTO((struct nesting *));
436 static void mark_case_nesting PROTO((struct nesting *));
437 static void mark_goto_fixup PROTO((struct goto_fixup *));
440 void
441 using_eh_for_cleanups ()
443 using_eh_for_cleanups_p = 1;
446 /* Mark N (known to be a cond-nesting) for GC. */
448 static void
449 mark_cond_nesting (n)
450 struct nesting *n;
452 while (n)
454 ggc_mark_rtx (n->exit_label);
455 ggc_mark_rtx (n->data.cond.endif_label);
456 ggc_mark_rtx (n->data.cond.next_label);
458 n = n->next;
462 /* Mark N (known to be a loop-nesting) for GC. */
464 static void
465 mark_loop_nesting (n)
466 struct nesting *n;
469 while (n)
471 ggc_mark_rtx (n->exit_label);
472 ggc_mark_rtx (n->data.loop.start_label);
473 ggc_mark_rtx (n->data.loop.end_label);
474 ggc_mark_rtx (n->data.loop.alt_end_label);
475 ggc_mark_rtx (n->data.loop.continue_label);
477 n = n->next;
481 /* Mark N (known to be a block-nesting) for GC. */
483 static void
484 mark_block_nesting (n)
485 struct nesting *n;
487 while (n)
489 struct label_chain *l;
491 ggc_mark_rtx (n->exit_label);
492 ggc_mark_rtx (n->data.block.stack_level);
493 ggc_mark_rtx (n->data.block.first_insn);
494 ggc_mark_tree (n->data.block.cleanups);
495 ggc_mark_tree (n->data.block.outer_cleanups);
497 for (l = n->data.block.label_chain; l != NULL; l = l->next)
498 ggc_mark_tree (l->label);
500 ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
502 /* ??? cleanup_ptr never points outside the stack, does it? */
504 n = n->next;
508 /* Mark N (known to be a case-nesting) for GC. */
510 static void
511 mark_case_nesting (n)
512 struct nesting *n;
514 while (n)
516 struct case_node *node;
518 ggc_mark_rtx (n->exit_label);
519 ggc_mark_rtx (n->data.case_stmt.start);
521 node = n->data.case_stmt.case_list;
522 while (node)
524 ggc_mark_tree (node->low);
525 ggc_mark_tree (node->high);
526 ggc_mark_tree (node->code_label);
527 node = node->right;
530 ggc_mark_tree (n->data.case_stmt.default_label);
531 ggc_mark_tree (n->data.case_stmt.index_expr);
532 ggc_mark_tree (n->data.case_stmt.nominal_type);
534 n = n->next;
538 /* Mark G for GC. */
540 static void
541 mark_goto_fixup (g)
542 struct goto_fixup *g;
544 while (g)
546 ggc_mark_rtx (g->before_jump);
547 ggc_mark_tree (g->target);
548 ggc_mark_tree (g->context);
549 ggc_mark_rtx (g->target_rtl);
550 ggc_mark_rtx (g->stack_level);
551 ggc_mark_tree (g->cleanup_list_list);
553 g = g->next;
557 /* Clear out all parts of the state in F that can safely be discarded
558 after the function has been compiled, to let garbage collection
559 reclaim the memory. */
561 void
562 free_stmt_status (f)
563 struct function *f;
565 /* We're about to free the function obstack. If we hold pointers to
566 things allocated there, then we'll try to mark them when we do
567 GC. So, we clear them out here explicitly. */
569 free (f->stmt);
570 f->stmt = NULL;
573 /* Mark P for GC. */
575 void
576 mark_stmt_status (p)
577 struct stmt_status *p;
579 if (p == 0)
580 return;
582 mark_block_nesting (p->x_block_stack);
583 mark_cond_nesting (p->x_cond_stack);
584 mark_loop_nesting (p->x_loop_stack);
585 mark_case_nesting (p->x_case_stack);
587 ggc_mark_tree (p->x_last_expr_type);
588 /* last_epxr_value is only valid if last_expr_type is nonzero. */
589 if (p->x_last_expr_type)
590 ggc_mark_rtx (p->x_last_expr_value);
592 mark_goto_fixup (p->x_goto_fixup_chain);
595 void
596 init_stmt ()
598 int i;
600 gcc_obstack_init (&stmt_obstack);
601 ggc_add_rtx_root (&last_block_end_note, 1);
603 for (i = 0; i < 10; i++)
605 digit_strings[i] = ggc_alloc_string (NULL, 1);
606 digit_strings[i][0] = '0' + i;
608 ggc_add_string_root (digit_strings, 10);
611 void
612 init_stmt_for_function ()
614 current_function->stmt
615 = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
617 /* We are not currently within any block, conditional, loop or case. */
618 block_stack = 0;
619 stack_block_stack = 0;
620 loop_stack = 0;
621 case_stack = 0;
622 cond_stack = 0;
623 nesting_stack = 0;
624 nesting_depth = 0;
626 current_block_start_count = 0;
628 /* No gotos have been expanded yet. */
629 goto_fixup_chain = 0;
631 /* We are not processing a ({...}) grouping. */
632 expr_stmts_for_value = 0;
633 last_expr_type = 0;
634 last_expr_value = NULL_RTX;
637 /* Return nonzero if anything is pushed on the loop, condition, or case
638 stack. */
640 in_control_zone_p ()
642 return cond_stack || loop_stack || case_stack;
645 /* Record the current file and line. Called from emit_line_note. */
646 void
647 set_file_and_line_for_stmt (file, line)
648 char *file;
649 int line;
651 emit_filename = file;
652 emit_lineno = line;
655 /* Emit a no-op instruction. */
657 void
658 emit_nop ()
660 rtx last_insn;
662 last_insn = get_last_insn ();
663 if (!optimize
664 && (GET_CODE (last_insn) == CODE_LABEL
665 || (GET_CODE (last_insn) == NOTE
666 && prev_real_insn (last_insn) == 0)))
667 emit_insn (gen_nop ());
670 /* Return the rtx-label that corresponds to a LABEL_DECL,
671 creating it if necessary. */
674 label_rtx (label)
675 tree label;
677 if (TREE_CODE (label) != LABEL_DECL)
678 abort ();
680 if (DECL_RTL (label))
681 return DECL_RTL (label);
683 return DECL_RTL (label) = gen_label_rtx ();
686 /* Add an unconditional jump to LABEL as the next sequential instruction. */
688 void
689 emit_jump (label)
690 rtx label;
692 do_pending_stack_adjust ();
693 emit_jump_insn (gen_jump (label));
694 emit_barrier ();
697 /* Emit code to jump to the address
698 specified by the pointer expression EXP. */
700 void
701 expand_computed_goto (exp)
702 tree exp;
704 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
706 #ifdef POINTERS_EXTEND_UNSIGNED
707 x = convert_memory_address (Pmode, x);
708 #endif
710 emit_queue ();
711 /* Be sure the function is executable. */
712 if (current_function_check_memory_usage)
713 emit_library_call (chkr_check_exec_libfunc, 1,
714 VOIDmode, 1, x, ptr_mode);
716 do_pending_stack_adjust ();
717 emit_indirect_jump (x);
719 current_function_has_computed_jump = 1;
722 /* Handle goto statements and the labels that they can go to. */
724 /* Specify the location in the RTL code of a label LABEL,
725 which is a LABEL_DECL tree node.
727 This is used for the kind of label that the user can jump to with a
728 goto statement, and for alternatives of a switch or case statement.
729 RTL labels generated for loops and conditionals don't go through here;
730 they are generated directly at the RTL level, by other functions below.
732 Note that this has nothing to do with defining label *names*.
733 Languages vary in how they do that and what that even means. */
735 void
736 expand_label (label)
737 tree label;
739 struct label_chain *p;
741 do_pending_stack_adjust ();
742 emit_label (label_rtx (label));
743 if (DECL_NAME (label))
744 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
746 if (stack_block_stack != 0)
748 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
749 p->next = stack_block_stack->data.block.label_chain;
750 stack_block_stack->data.block.label_chain = p;
751 p->label = label;
755 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
756 from nested functions. */
758 void
759 declare_nonlocal_label (label)
760 tree label;
762 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
764 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
765 LABEL_PRESERVE_P (label_rtx (label)) = 1;
766 if (nonlocal_goto_handler_slots == 0)
768 emit_stack_save (SAVE_NONLOCAL,
769 &nonlocal_goto_stack_level,
770 PREV_INSN (tail_recursion_reentry));
772 nonlocal_goto_handler_slots
773 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
776 /* Generate RTL code for a `goto' statement with target label LABEL.
777 LABEL should be a LABEL_DECL tree node that was or will later be
778 defined with `expand_label'. */
780 void
781 expand_goto (label)
782 tree label;
784 tree context;
786 /* Check for a nonlocal goto to a containing function. */
787 context = decl_function_context (label);
788 if (context != 0 && context != current_function_decl)
790 struct function *p = find_function_data (context);
791 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
792 rtx temp, handler_slot;
793 tree link;
795 /* Find the corresponding handler slot for this label. */
796 handler_slot = p->x_nonlocal_goto_handler_slots;
797 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
798 link = TREE_CHAIN (link))
799 handler_slot = XEXP (handler_slot, 1);
800 handler_slot = XEXP (handler_slot, 0);
802 p->has_nonlocal_label = 1;
803 current_function_has_nonlocal_goto = 1;
804 LABEL_REF_NONLOCAL_P (label_ref) = 1;
806 /* Copy the rtl for the slots so that they won't be shared in
807 case the virtual stack vars register gets instantiated differently
808 in the parent than in the child. */
810 #if HAVE_nonlocal_goto
811 if (HAVE_nonlocal_goto)
812 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
813 copy_rtx (handler_slot),
814 copy_rtx (p->x_nonlocal_goto_stack_level),
815 label_ref));
816 else
817 #endif
819 rtx addr;
821 /* Restore frame pointer for containing function.
822 This sets the actual hard register used for the frame pointer
823 to the location of the function's incoming static chain info.
824 The non-local goto handler will then adjust it to contain the
825 proper value and reload the argument pointer, if needed. */
826 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
828 /* We have now loaded the frame pointer hardware register with
829 the address of that corresponds to the start of the virtual
830 stack vars. So replace virtual_stack_vars_rtx in all
831 addresses we use with stack_pointer_rtx. */
833 /* Get addr of containing function's current nonlocal goto handler,
834 which will do any cleanups and then jump to the label. */
835 addr = copy_rtx (handler_slot);
836 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
837 hard_frame_pointer_rtx));
839 /* Restore the stack pointer. Note this uses fp just restored. */
840 addr = p->x_nonlocal_goto_stack_level;
841 if (addr)
842 addr = replace_rtx (copy_rtx (addr),
843 virtual_stack_vars_rtx,
844 hard_frame_pointer_rtx);
846 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
848 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
849 really needed. */
850 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
851 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
852 emit_indirect_jump (temp);
855 else
856 expand_goto_internal (label, label_rtx (label), NULL_RTX);
859 /* Generate RTL code for a `goto' statement with target label BODY.
860 LABEL should be a LABEL_REF.
861 LAST_INSN, if non-0, is the rtx we should consider as the last
862 insn emitted (for the purposes of cleaning up a return). */
864 static void
865 expand_goto_internal (body, label, last_insn)
866 tree body;
867 rtx label;
868 rtx last_insn;
870 struct nesting *block;
871 rtx stack_level = 0;
873 if (GET_CODE (label) != CODE_LABEL)
874 abort ();
876 /* If label has already been defined, we can tell now
877 whether and how we must alter the stack level. */
879 if (PREV_INSN (label) != 0)
881 /* Find the innermost pending block that contains the label.
882 (Check containment by comparing insn-uids.)
883 Then restore the outermost stack level within that block,
884 and do cleanups of all blocks contained in it. */
885 for (block = block_stack; block; block = block->next)
887 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
888 break;
889 if (block->data.block.stack_level != 0)
890 stack_level = block->data.block.stack_level;
891 /* Execute the cleanups for blocks we are exiting. */
892 if (block->data.block.cleanups != 0)
894 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
895 do_pending_stack_adjust ();
899 if (stack_level)
901 /* Ensure stack adjust isn't done by emit_jump, as this
902 would clobber the stack pointer. This one should be
903 deleted as dead by flow. */
904 clear_pending_stack_adjust ();
905 do_pending_stack_adjust ();
906 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
909 if (body != 0 && DECL_TOO_LATE (body))
910 error ("jump to `%s' invalidly jumps into binding contour",
911 IDENTIFIER_POINTER (DECL_NAME (body)));
913 /* Label not yet defined: may need to put this goto
914 on the fixup list. */
915 else if (! expand_fixup (body, label, last_insn))
917 /* No fixup needed. Record that the label is the target
918 of at least one goto that has no fixup. */
919 if (body != 0)
920 TREE_ADDRESSABLE (body) = 1;
923 emit_jump (label);
926 /* Generate if necessary a fixup for a goto
927 whose target label in tree structure (if any) is TREE_LABEL
928 and whose target in rtl is RTL_LABEL.
930 If LAST_INSN is nonzero, we pretend that the jump appears
931 after insn LAST_INSN instead of at the current point in the insn stream.
933 The fixup will be used later to insert insns just before the goto.
934 Those insns will restore the stack level as appropriate for the
935 target label, and will (in the case of C++) also invoke any object
936 destructors which have to be invoked when we exit the scopes which
937 are exited by the goto.
939 Value is nonzero if a fixup is made. */
941 static int
942 expand_fixup (tree_label, rtl_label, last_insn)
943 tree tree_label;
944 rtx rtl_label;
945 rtx last_insn;
947 struct nesting *block, *end_block;
949 /* See if we can recognize which block the label will be output in.
950 This is possible in some very common cases.
951 If we succeed, set END_BLOCK to that block.
952 Otherwise, set it to 0. */
954 if (cond_stack
955 && (rtl_label == cond_stack->data.cond.endif_label
956 || rtl_label == cond_stack->data.cond.next_label))
957 end_block = cond_stack;
958 /* If we are in a loop, recognize certain labels which
959 are likely targets. This reduces the number of fixups
960 we need to create. */
961 else if (loop_stack
962 && (rtl_label == loop_stack->data.loop.start_label
963 || rtl_label == loop_stack->data.loop.end_label
964 || rtl_label == loop_stack->data.loop.continue_label))
965 end_block = loop_stack;
966 else
967 end_block = 0;
969 /* Now set END_BLOCK to the binding level to which we will return. */
971 if (end_block)
973 struct nesting *next_block = end_block->all;
974 block = block_stack;
976 /* First see if the END_BLOCK is inside the innermost binding level.
977 If so, then no cleanups or stack levels are relevant. */
978 while (next_block && next_block != block)
979 next_block = next_block->all;
981 if (next_block)
982 return 0;
984 /* Otherwise, set END_BLOCK to the innermost binding level
985 which is outside the relevant control-structure nesting. */
986 next_block = block_stack->next;
987 for (block = block_stack; block != end_block; block = block->all)
988 if (block == next_block)
989 next_block = next_block->next;
990 end_block = next_block;
993 /* Does any containing block have a stack level or cleanups?
994 If not, no fixup is needed, and that is the normal case
995 (the only case, for standard C). */
996 for (block = block_stack; block != end_block; block = block->next)
997 if (block->data.block.stack_level != 0
998 || block->data.block.cleanups != 0)
999 break;
1001 if (block != end_block)
1003 /* Ok, a fixup is needed. Add a fixup to the list of such. */
1004 struct goto_fixup *fixup
1005 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1006 /* In case an old stack level is restored, make sure that comes
1007 after any pending stack adjust. */
1008 /* ?? If the fixup isn't to come at the present position,
1009 doing the stack adjust here isn't useful. Doing it with our
1010 settings at that location isn't useful either. Let's hope
1011 someone does it! */
1012 if (last_insn == 0)
1013 do_pending_stack_adjust ();
1014 fixup->target = tree_label;
1015 fixup->target_rtl = rtl_label;
1017 /* Create a BLOCK node and a corresponding matched set of
1018 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1019 this point. The notes will encapsulate any and all fixup
1020 code which we might later insert at this point in the insn
1021 stream. Also, the BLOCK node will be the parent (i.e. the
1022 `SUPERBLOCK') of any other BLOCK nodes which we might create
1023 later on when we are expanding the fixup code.
1025 Note that optimization passes (including expand_end_loop)
1026 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
1027 as a placeholder. */
1030 register rtx original_before_jump
1031 = last_insn ? last_insn : get_last_insn ();
1032 rtx start;
1033 tree block;
1035 block = make_node (BLOCK);
1036 TREE_USED (block) = 1;
1038 if (current_function->x_whole_function_mode_p)
1040 find_loop_tree_blocks ();
1041 retrofit_block (block, original_before_jump);
1043 else
1044 insert_block (block);
1046 start_sequence ();
1047 start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1048 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
1049 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1050 fixup->context = block;
1051 end_sequence ();
1052 emit_insns_after (start, original_before_jump);
1055 fixup->block_start_count = current_block_start_count;
1056 fixup->stack_level = 0;
1057 fixup->cleanup_list_list
1058 = ((block->data.block.outer_cleanups
1059 || block->data.block.cleanups)
1060 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1061 block->data.block.outer_cleanups)
1062 : 0);
1063 fixup->next = goto_fixup_chain;
1064 goto_fixup_chain = fixup;
1067 return block != 0;
1072 /* Expand any needed fixups in the outputmost binding level of the
1073 function. FIRST_INSN is the first insn in the function. */
1075 void
1076 expand_fixups (first_insn)
1077 rtx first_insn;
1079 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1082 /* When exiting a binding contour, process all pending gotos requiring fixups.
1083 THISBLOCK is the structure that describes the block being exited.
1084 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1085 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1086 FIRST_INSN is the insn that began this contour.
1088 Gotos that jump out of this contour must restore the
1089 stack level and do the cleanups before actually jumping.
1091 DONT_JUMP_IN nonzero means report error there is a jump into this
1092 contour from before the beginning of the contour.
1093 This is also done if STACK_LEVEL is nonzero. */
1095 static void
1096 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1097 struct nesting *thisblock;
1098 rtx stack_level;
1099 tree cleanup_list;
1100 rtx first_insn;
1101 int dont_jump_in;
1103 register struct goto_fixup *f, *prev;
1105 /* F is the fixup we are considering; PREV is the previous one. */
1106 /* We run this loop in two passes so that cleanups of exited blocks
1107 are run first, and blocks that are exited are marked so
1108 afterwards. */
1110 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1112 /* Test for a fixup that is inactive because it is already handled. */
1113 if (f->before_jump == 0)
1115 /* Delete inactive fixup from the chain, if that is easy to do. */
1116 if (prev != 0)
1117 prev->next = f->next;
1119 /* Has this fixup's target label been defined?
1120 If so, we can finalize it. */
1121 else if (PREV_INSN (f->target_rtl) != 0)
1123 register rtx cleanup_insns;
1125 /* Get the first non-label after the label
1126 this goto jumps to. If that's before this scope begins,
1127 we don't have a jump into the scope. */
1128 rtx after_label = f->target_rtl;
1129 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1130 after_label = NEXT_INSN (after_label);
1132 /* If this fixup jumped into this contour from before the beginning
1133 of this contour, report an error. */
1134 /* ??? Bug: this does not detect jumping in through intermediate
1135 blocks that have stack levels or cleanups.
1136 It detects only a problem with the innermost block
1137 around the label. */
1138 if (f->target != 0
1139 && (dont_jump_in || stack_level || cleanup_list)
1140 /* If AFTER_LABEL is 0, it means the jump goes to the end
1141 of the rtl, which means it jumps into this scope. */
1142 && (after_label == 0
1143 || INSN_UID (first_insn) < INSN_UID (after_label))
1144 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1145 && ! DECL_ERROR_ISSUED (f->target))
1147 error_with_decl (f->target,
1148 "label `%s' used before containing binding contour");
1149 /* Prevent multiple errors for one label. */
1150 DECL_ERROR_ISSUED (f->target) = 1;
1153 /* We will expand the cleanups into a sequence of their own and
1154 then later on we will attach this new sequence to the insn
1155 stream just ahead of the actual jump insn. */
1157 start_sequence ();
1159 /* Temporarily restore the lexical context where we will
1160 logically be inserting the fixup code. We do this for the
1161 sake of getting the debugging information right. */
1163 pushlevel (0);
1164 set_block (f->context);
1166 /* Expand the cleanups for blocks this jump exits. */
1167 if (f->cleanup_list_list)
1169 tree lists;
1170 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1171 /* Marked elements correspond to blocks that have been closed.
1172 Do their cleanups. */
1173 if (TREE_ADDRESSABLE (lists)
1174 && TREE_VALUE (lists) != 0)
1176 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1177 /* Pop any pushes done in the cleanups,
1178 in case function is about to return. */
1179 do_pending_stack_adjust ();
1183 /* Restore stack level for the biggest contour that this
1184 jump jumps out of. */
1185 if (f->stack_level)
1186 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1188 /* Finish up the sequence containing the insns which implement the
1189 necessary cleanups, and then attach that whole sequence to the
1190 insn stream just ahead of the actual jump insn. Attaching it
1191 at that point insures that any cleanups which are in fact
1192 implicit C++ object destructions (which must be executed upon
1193 leaving the block) appear (to the debugger) to be taking place
1194 in an area of the generated code where the object(s) being
1195 destructed are still "in scope". */
1197 cleanup_insns = get_insns ();
1198 poplevel (1, 0, 0);
1200 end_sequence ();
1201 emit_insns_after (cleanup_insns, f->before_jump);
1204 f->before_jump = 0;
1208 /* For any still-undefined labels, do the cleanups for this block now.
1209 We must do this now since items in the cleanup list may go out
1210 of scope when the block ends. */
1211 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1212 if (f->before_jump != 0
1213 && PREV_INSN (f->target_rtl) == 0
1214 /* Label has still not appeared. If we are exiting a block with
1215 a stack level to restore, that started before the fixup,
1216 mark this stack level as needing restoration
1217 when the fixup is later finalized. */
1218 && thisblock != 0
1219 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1220 means the label is undefined. That's erroneous, but possible. */
1221 && (thisblock->data.block.block_start_count
1222 <= f->block_start_count))
1224 tree lists = f->cleanup_list_list;
1225 rtx cleanup_insns;
1227 for (; lists; lists = TREE_CHAIN (lists))
1228 /* If the following elt. corresponds to our containing block
1229 then the elt. must be for this block. */
1230 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1232 start_sequence ();
1233 pushlevel (0);
1234 set_block (f->context);
1235 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1236 do_pending_stack_adjust ();
1237 cleanup_insns = get_insns ();
1238 poplevel (1, 0, 0);
1239 end_sequence ();
1240 if (cleanup_insns != 0)
1241 f->before_jump
1242 = emit_insns_after (cleanup_insns, f->before_jump);
1244 f->cleanup_list_list = TREE_CHAIN (lists);
1247 if (stack_level)
1248 f->stack_level = stack_level;
1252 /* Return the number of times character C occurs in string S. */
1253 static int
1254 n_occurrences (c, s)
1255 int c;
1256 const char *s;
1258 int n = 0;
1259 while (*s)
1260 n += (*s++ == c);
1261 return n;
1264 /* Generate RTL for an asm statement (explicit assembler code).
1265 BODY is a STRING_CST node containing the assembler code text,
1266 or an ADDR_EXPR containing a STRING_CST. */
1268 void
1269 expand_asm (body)
1270 tree body;
1272 if (current_function_check_memory_usage)
1274 error ("`asm' cannot be used in function where memory usage is checked");
1275 return;
1278 if (TREE_CODE (body) == ADDR_EXPR)
1279 body = TREE_OPERAND (body, 0);
1281 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1282 TREE_STRING_POINTER (body)));
1283 last_expr_type = 0;
1286 /* Generate RTL for an asm statement with arguments.
1287 STRING is the instruction template.
1288 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1289 Each output or input has an expression in the TREE_VALUE and
1290 a constraint-string in the TREE_PURPOSE.
1291 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1292 that is clobbered by this insn.
1294 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1295 Some elements of OUTPUTS may be replaced with trees representing temporary
1296 values. The caller should copy those temporary values to the originally
1297 specified lvalues.
1299 VOL nonzero means the insn is volatile; don't optimize it. */
1301 void
1302 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1303 tree string, outputs, inputs, clobbers;
1304 int vol;
1305 char *filename;
1306 int line;
1308 rtvec argvec, constraints;
1309 rtx body;
1310 int ninputs = list_length (inputs);
1311 int noutputs = list_length (outputs);
1312 int ninout = 0;
1313 int nclobbers;
1314 tree tail;
1315 register int i;
1316 /* Vector of RTX's of evaluated output operands. */
1317 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1318 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1319 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1320 enum machine_mode *inout_mode
1321 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1322 /* The insn we have emitted. */
1323 rtx insn;
1325 /* An ASM with no outputs needs to be treated as volatile, for now. */
1326 if (noutputs == 0)
1327 vol = 1;
1329 if (current_function_check_memory_usage)
1331 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1332 return;
1335 #ifdef MD_ASM_CLOBBERS
1336 /* Sometimes we wish to automatically clobber registers across an asm.
1337 Case in point is when the i386 backend moved from cc0 to a hard reg --
1338 maintaining source-level compatability means automatically clobbering
1339 the flags register. */
1340 MD_ASM_CLOBBERS (clobbers);
1341 #endif
1343 if (current_function_check_memory_usage)
1345 error ("`asm' cannot be used in function where memory usage is checked");
1346 return;
1349 /* Count the number of meaningful clobbered registers, ignoring what
1350 we would ignore later. */
1351 nclobbers = 0;
1352 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1354 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1355 i = decode_reg_name (regname);
1356 if (i >= 0 || i == -4)
1357 ++nclobbers;
1358 else if (i == -2)
1359 error ("unknown register name `%s' in `asm'", regname);
1362 last_expr_type = 0;
1364 /* Check that the number of alternatives is constant across all
1365 operands. */
1366 if (outputs || inputs)
1368 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1369 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1370 tree next = inputs;
1372 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1374 error ("too many alternatives in `asm'");
1375 return;
1378 tmp = outputs;
1379 while (tmp)
1381 char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1382 if (n_occurrences (',', constraint) != nalternatives)
1384 error ("operand constraints for `asm' differ in number of alternatives");
1385 return;
1387 if (TREE_CHAIN (tmp))
1388 tmp = TREE_CHAIN (tmp);
1389 else
1390 tmp = next, next = 0;
1394 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1396 tree val = TREE_VALUE (tail);
1397 tree type = TREE_TYPE (val);
1398 char *constraint;
1399 char *p;
1400 int c_len;
1401 int j;
1402 int is_inout = 0;
1403 int allows_reg = 0;
1404 int allows_mem = 0;
1406 /* If there's an erroneous arg, emit no insn. */
1407 if (TREE_TYPE (val) == error_mark_node)
1408 return;
1410 /* Make sure constraint has `=' and does not have `+'. Also, see
1411 if it allows any register. Be liberal on the latter test, since
1412 the worst that happens if we get it wrong is we issue an error
1413 message. */
1415 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1416 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1418 /* Allow the `=' or `+' to not be at the beginning of the string,
1419 since it wasn't explicitly documented that way, and there is a
1420 large body of code that puts it last. Swap the character to
1421 the front, so as not to uglify any place else. */
1422 switch (c_len)
1424 default:
1425 if ((p = strchr (constraint, '=')) != NULL)
1426 break;
1427 if ((p = strchr (constraint, '+')) != NULL)
1428 break;
1429 case 0:
1430 error ("output operand constraint lacks `='");
1431 return;
1434 if (p != constraint)
1436 j = *p;
1437 bcopy (constraint, constraint+1, p-constraint);
1438 *constraint = j;
1440 warning ("output constraint `%c' for operand %d is not at the beginning", j, i);
1443 is_inout = constraint[0] == '+';
1444 /* Replace '+' with '='. */
1445 constraint[0] = '=';
1446 /* Make sure we can specify the matching operand. */
1447 if (is_inout && i > 9)
1449 error ("output operand constraint %d contains `+'", i);
1450 return;
1453 for (j = 1; j < c_len; j++)
1454 switch (constraint[j])
1456 case '+':
1457 case '=':
1458 error ("operand constraint contains '+' or '=' at illegal position.");
1459 return;
1461 case '%':
1462 if (i + 1 == ninputs + noutputs)
1464 error ("`%%' constraint used with last operand");
1465 return;
1467 break;
1469 case '?': case '!': case '*': case '&':
1470 case 'E': case 'F': case 'G': case 'H':
1471 case 's': case 'i': case 'n':
1472 case 'I': case 'J': case 'K': case 'L': case 'M':
1473 case 'N': case 'O': case 'P': case ',':
1474 #ifdef EXTRA_CONSTRAINT
1475 case 'Q': case 'R': case 'S': case 'T': case 'U':
1476 #endif
1477 break;
1479 case '0': case '1': case '2': case '3': case '4':
1480 case '5': case '6': case '7': case '8': case '9':
1481 error ("matching constraint not valid in output operand");
1482 break;
1484 case 'V': case 'm': case 'o':
1485 allows_mem = 1;
1486 break;
1488 case '<': case '>':
1489 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1490 excepting those that expand_call created. So match memory
1491 and hope. */
1492 allows_mem = 1;
1493 break;
1495 case 'g': case 'X':
1496 allows_reg = 1;
1497 allows_mem = 1;
1498 break;
1500 case 'p': case 'r':
1501 default:
1502 allows_reg = 1;
1503 break;
1506 /* If an output operand is not a decl or indirect ref and our constraint
1507 allows a register, make a temporary to act as an intermediate.
1508 Make the asm insn write into that, then our caller will copy it to
1509 the real output operand. Likewise for promoted variables. */
1511 real_output_rtx[i] = NULL_RTX;
1512 if ((TREE_CODE (val) == INDIRECT_REF
1513 && allows_mem)
1514 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1515 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1516 && ! (GET_CODE (DECL_RTL (val)) == REG
1517 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1518 || ! allows_reg
1519 || is_inout)
1521 if (! allows_reg)
1522 mark_addressable (TREE_VALUE (tail));
1524 output_rtx[i]
1525 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1526 EXPAND_MEMORY_USE_WO);
1528 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1529 error ("output number %d not directly addressable", i);
1530 if (! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1532 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1533 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1534 if (is_inout)
1535 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1538 else
1540 output_rtx[i] = assign_temp (type, 0, 0, 0);
1541 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1544 if (is_inout)
1546 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1547 inout_opnum[ninout++] = i;
1551 ninputs += ninout;
1552 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1554 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1555 return;
1558 /* Make vectors for the expression-rtx and constraint strings. */
1560 argvec = rtvec_alloc (ninputs);
1561 constraints = rtvec_alloc (ninputs);
1563 body = gen_rtx_ASM_OPERANDS (VOIDmode, TREE_STRING_POINTER (string),
1564 empty_string, 0, argvec, constraints,
1565 filename, line);
1567 MEM_VOLATILE_P (body) = vol;
1569 /* Eval the inputs and put them into ARGVEC.
1570 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1572 i = 0;
1573 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1575 int j;
1576 int allows_reg = 0, allows_mem = 0;
1577 char *constraint, *orig_constraint;
1578 int c_len;
1579 rtx op;
1581 /* If there's an erroneous arg, emit no insn,
1582 because the ASM_INPUT would get VOIDmode
1583 and that could cause a crash in reload. */
1584 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1585 return;
1587 /* ??? Can this happen, and does the error message make any sense? */
1588 if (TREE_PURPOSE (tail) == NULL_TREE)
1590 error ("hard register `%s' listed as input operand to `asm'",
1591 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1592 return;
1595 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1596 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1597 orig_constraint = constraint;
1599 /* Make sure constraint has neither `=', `+', nor '&'. */
1601 for (j = 0; j < c_len; j++)
1602 switch (constraint[j])
1604 case '+': case '=': case '&':
1605 if (constraint == orig_constraint)
1607 error ("input operand constraint contains `%c'", constraint[j]);
1608 return;
1610 break;
1612 case '%':
1613 if (constraint == orig_constraint
1614 && i + 1 == ninputs - ninout)
1616 error ("`%%' constraint used with last operand");
1617 return;
1619 break;
1621 case 'V': case 'm': case 'o':
1622 allows_mem = 1;
1623 break;
1625 case '<': case '>':
1626 case '?': case '!': case '*':
1627 case 'E': case 'F': case 'G': case 'H': case 'X':
1628 case 's': case 'i': case 'n':
1629 case 'I': case 'J': case 'K': case 'L': case 'M':
1630 case 'N': case 'O': case 'P': case ',':
1631 #ifdef EXTRA_CONSTRAINT
1632 case 'Q': case 'R': case 'S': case 'T': case 'U':
1633 #endif
1634 break;
1636 /* Whether or not a numeric constraint allows a register is
1637 decided by the matching constraint, and so there is no need
1638 to do anything special with them. We must handle them in
1639 the default case, so that we don't unnecessarily force
1640 operands to memory. */
1641 case '0': case '1': case '2': case '3': case '4':
1642 case '5': case '6': case '7': case '8': case '9':
1643 if (constraint[j] >= '0' + noutputs)
1645 error
1646 ("matching constraint references invalid operand number");
1647 return;
1650 /* Try and find the real constraint for this dup. */
1651 if ((j == 0 && c_len == 1)
1652 || (j == 1 && c_len == 2 && constraint[0] == '%'))
1654 tree o = outputs;
1655 for (j = constraint[j] - '0'; j > 0; --j)
1656 o = TREE_CHAIN (o);
1658 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (o)) - 1;
1659 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1660 j = 0;
1661 break;
1664 /* ... fall through ... */
1666 case 'p': case 'r':
1667 default:
1668 allows_reg = 1;
1669 break;
1671 case 'g':
1672 allows_reg = 1;
1673 allows_mem = 1;
1674 break;
1677 if (! allows_reg && allows_mem)
1678 mark_addressable (TREE_VALUE (tail));
1680 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1682 if (asm_operand_ok (op, constraint) <= 0)
1684 if (allows_reg)
1685 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1686 else if (!allows_mem)
1687 warning ("asm operand %d probably doesn't match constraints", i);
1688 else if (CONSTANT_P (op))
1689 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1690 op);
1691 else if (GET_CODE (op) == REG
1692 || GET_CODE (op) == SUBREG
1693 || GET_CODE (op) == CONCAT)
1695 tree type = TREE_TYPE (TREE_VALUE (tail));
1696 rtx memloc = assign_temp (type, 1, 1, 1);
1698 emit_move_insn (memloc, op);
1699 op = memloc;
1701 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1702 /* We won't recognize volatile memory as available a
1703 memory_operand at this point. Ignore it. */
1705 else if (queued_subexp_p (op))
1707 else
1708 /* ??? Leave this only until we have experience with what
1709 happens in combine and elsewhere when constraints are
1710 not satisfied. */
1711 warning ("asm operand %d probably doesn't match constraints", i);
1713 XVECEXP (body, 3, i) = op;
1715 XVECEXP (body, 4, i) /* constraints */
1716 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1717 orig_constraint);
1718 i++;
1721 /* Protect all the operands from the queue,
1722 now that they have all been evaluated. */
1724 for (i = 0; i < ninputs - ninout; i++)
1725 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1727 for (i = 0; i < noutputs; i++)
1728 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1730 /* For in-out operands, copy output rtx to input rtx. */
1731 for (i = 0; i < ninout; i++)
1733 int j = inout_opnum[i];
1735 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1736 = output_rtx[j];
1737 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
1738 = gen_rtx_ASM_INPUT (inout_mode[i], digit_strings[j]);
1741 /* Now, for each output, construct an rtx
1742 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1743 ARGVEC CONSTRAINTS))
1744 If there is more than one, put them inside a PARALLEL. */
1746 if (noutputs == 1 && nclobbers == 0)
1748 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1749 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1751 else if (noutputs == 0 && nclobbers == 0)
1753 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1754 insn = emit_insn (body);
1756 else
1758 rtx obody = body;
1759 int num = noutputs;
1760 if (num == 0) num = 1;
1761 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1763 /* For each output operand, store a SET. */
1765 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1767 XVECEXP (body, 0, i)
1768 = gen_rtx_SET (VOIDmode,
1769 output_rtx[i],
1770 gen_rtx_ASM_OPERANDS
1771 (VOIDmode,
1772 TREE_STRING_POINTER (string),
1773 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1774 i, argvec, constraints,
1775 filename, line));
1777 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1780 /* If there are no outputs (but there are some clobbers)
1781 store the bare ASM_OPERANDS into the PARALLEL. */
1783 if (i == 0)
1784 XVECEXP (body, 0, i++) = obody;
1786 /* Store (clobber REG) for each clobbered register specified. */
1788 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1790 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1791 int j = decode_reg_name (regname);
1793 if (j < 0)
1795 if (j == -3) /* `cc', which is not a register */
1796 continue;
1798 if (j == -4) /* `memory', don't cache memory across asm */
1800 XVECEXP (body, 0, i++)
1801 = gen_rtx_CLOBBER (VOIDmode,
1802 gen_rtx_MEM
1803 (BLKmode,
1804 gen_rtx_SCRATCH (VOIDmode)));
1805 continue;
1808 /* Ignore unknown register, error already signaled. */
1809 continue;
1812 /* Use QImode since that's guaranteed to clobber just one reg. */
1813 XVECEXP (body, 0, i++)
1814 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1817 insn = emit_insn (body);
1820 /* For any outputs that needed reloading into registers, spill them
1821 back to where they belong. */
1822 for (i = 0; i < noutputs; ++i)
1823 if (real_output_rtx[i])
1824 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1826 free_temp_slots ();
1829 /* Generate RTL to evaluate the expression EXP
1830 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1832 void
1833 expand_expr_stmt (exp)
1834 tree exp;
1836 /* If -W, warn about statements with no side effects,
1837 except for an explicit cast to void (e.g. for assert()), and
1838 except inside a ({...}) where they may be useful. */
1839 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1841 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1842 && !(TREE_CODE (exp) == CONVERT_EXPR
1843 && TREE_TYPE (exp) == void_type_node))
1844 warning_with_file_and_line (emit_filename, emit_lineno,
1845 "statement with no effect");
1846 else if (warn_unused)
1847 warn_if_unused_value (exp);
1850 /* If EXP is of function type and we are expanding statements for
1851 value, convert it to pointer-to-function. */
1852 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1853 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1855 last_expr_type = TREE_TYPE (exp);
1856 last_expr_value = expand_expr (exp,
1857 (expr_stmts_for_value
1858 ? NULL_RTX : const0_rtx),
1859 VOIDmode, 0);
1861 /* If all we do is reference a volatile value in memory,
1862 copy it to a register to be sure it is actually touched. */
1863 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1864 && TREE_THIS_VOLATILE (exp))
1866 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1868 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1869 copy_to_reg (last_expr_value);
1870 else
1872 rtx lab = gen_label_rtx ();
1874 /* Compare the value with itself to reference it. */
1875 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
1876 expand_expr (TYPE_SIZE (last_expr_type),
1877 NULL_RTX, VOIDmode, 0),
1878 BLKmode, 0,
1879 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
1880 lab);
1881 emit_label (lab);
1885 /* If this expression is part of a ({...}) and is in memory, we may have
1886 to preserve temporaries. */
1887 preserve_temp_slots (last_expr_value);
1889 /* Free any temporaries used to evaluate this expression. Any temporary
1890 used as a result of this expression will already have been preserved
1891 above. */
1892 free_temp_slots ();
1894 emit_queue ();
1897 /* Warn if EXP contains any computations whose results are not used.
1898 Return 1 if a warning is printed; 0 otherwise. */
1901 warn_if_unused_value (exp)
1902 tree exp;
1904 if (TREE_USED (exp))
1905 return 0;
1907 switch (TREE_CODE (exp))
1909 case PREINCREMENT_EXPR:
1910 case POSTINCREMENT_EXPR:
1911 case PREDECREMENT_EXPR:
1912 case POSTDECREMENT_EXPR:
1913 case MODIFY_EXPR:
1914 case INIT_EXPR:
1915 case TARGET_EXPR:
1916 case CALL_EXPR:
1917 case METHOD_CALL_EXPR:
1918 case RTL_EXPR:
1919 case TRY_CATCH_EXPR:
1920 case WITH_CLEANUP_EXPR:
1921 case EXIT_EXPR:
1922 /* We don't warn about COND_EXPR because it may be a useful
1923 construct if either arm contains a side effect. */
1924 case COND_EXPR:
1925 return 0;
1927 case BIND_EXPR:
1928 /* For a binding, warn if no side effect within it. */
1929 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1931 case SAVE_EXPR:
1932 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1934 case TRUTH_ORIF_EXPR:
1935 case TRUTH_ANDIF_EXPR:
1936 /* In && or ||, warn if 2nd operand has no side effect. */
1937 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1939 case COMPOUND_EXPR:
1940 if (TREE_NO_UNUSED_WARNING (exp))
1941 return 0;
1942 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1943 return 1;
1944 /* Let people do `(foo (), 0)' without a warning. */
1945 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1946 return 0;
1947 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1949 case NOP_EXPR:
1950 case CONVERT_EXPR:
1951 case NON_LVALUE_EXPR:
1952 /* Don't warn about values cast to void. */
1953 if (TREE_TYPE (exp) == void_type_node)
1954 return 0;
1955 /* Don't warn about conversions not explicit in the user's program. */
1956 if (TREE_NO_UNUSED_WARNING (exp))
1957 return 0;
1958 /* Assignment to a cast usually results in a cast of a modify.
1959 Don't complain about that. There can be an arbitrary number of
1960 casts before the modify, so we must loop until we find the first
1961 non-cast expression and then test to see if that is a modify. */
1963 tree tem = TREE_OPERAND (exp, 0);
1965 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1966 tem = TREE_OPERAND (tem, 0);
1968 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1969 || TREE_CODE (tem) == CALL_EXPR)
1970 return 0;
1972 goto warn;
1974 case INDIRECT_REF:
1975 /* Don't warn about automatic dereferencing of references, since
1976 the user cannot control it. */
1977 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1978 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1979 /* ... fall through ... */
1981 default:
1982 /* Referencing a volatile value is a side effect, so don't warn. */
1983 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1984 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1985 && TREE_THIS_VOLATILE (exp))
1986 return 0;
1987 warn:
1988 warning_with_file_and_line (emit_filename, emit_lineno,
1989 "value computed is not used");
1990 return 1;
1994 /* Clear out the memory of the last expression evaluated. */
1996 void
1997 clear_last_expr ()
1999 last_expr_type = 0;
2002 /* Begin a statement which will return a value.
2003 Return the RTL_EXPR for this statement expr.
2004 The caller must save that value and pass it to expand_end_stmt_expr. */
2006 tree
2007 expand_start_stmt_expr ()
2009 int momentary;
2010 tree t;
2012 /* Make the RTL_EXPR node temporary, not momentary,
2013 so that rtl_expr_chain doesn't become garbage. */
2014 momentary = suspend_momentary ();
2015 t = make_node (RTL_EXPR);
2016 resume_momentary (momentary);
2017 do_pending_stack_adjust ();
2018 start_sequence_for_rtl_expr (t);
2019 NO_DEFER_POP;
2020 expr_stmts_for_value++;
2021 return t;
2024 /* Restore the previous state at the end of a statement that returns a value.
2025 Returns a tree node representing the statement's value and the
2026 insns to compute the value.
2028 The nodes of that expression have been freed by now, so we cannot use them.
2029 But we don't want to do that anyway; the expression has already been
2030 evaluated and now we just want to use the value. So generate a RTL_EXPR
2031 with the proper type and RTL value.
2033 If the last substatement was not an expression,
2034 return something with type `void'. */
2036 tree
2037 expand_end_stmt_expr (t)
2038 tree t;
2040 OK_DEFER_POP;
2042 if (last_expr_type == 0)
2044 last_expr_type = void_type_node;
2045 last_expr_value = const0_rtx;
2047 else if (last_expr_value == 0)
2048 /* There are some cases where this can happen, such as when the
2049 statement is void type. */
2050 last_expr_value = const0_rtx;
2051 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2052 /* Remove any possible QUEUED. */
2053 last_expr_value = protect_from_queue (last_expr_value, 0);
2055 emit_queue ();
2057 TREE_TYPE (t) = last_expr_type;
2058 RTL_EXPR_RTL (t) = last_expr_value;
2059 RTL_EXPR_SEQUENCE (t) = get_insns ();
2061 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2063 end_sequence ();
2065 /* Don't consider deleting this expr or containing exprs at tree level. */
2066 TREE_SIDE_EFFECTS (t) = 1;
2067 /* Propagate volatility of the actual RTL expr. */
2068 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2070 last_expr_type = 0;
2071 expr_stmts_for_value--;
2073 return t;
2076 /* Generate RTL for the start of an if-then. COND is the expression
2077 whose truth should be tested.
2079 If EXITFLAG is nonzero, this conditional is visible to
2080 `exit_something'. */
2082 void
2083 expand_start_cond (cond, exitflag)
2084 tree cond;
2085 int exitflag;
2087 struct nesting *thiscond = ALLOC_NESTING ();
2089 /* Make an entry on cond_stack for the cond we are entering. */
2091 thiscond->next = cond_stack;
2092 thiscond->all = nesting_stack;
2093 thiscond->depth = ++nesting_depth;
2094 thiscond->data.cond.next_label = gen_label_rtx ();
2095 /* Before we encounter an `else', we don't need a separate exit label
2096 unless there are supposed to be exit statements
2097 to exit this conditional. */
2098 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2099 thiscond->data.cond.endif_label = thiscond->exit_label;
2100 cond_stack = thiscond;
2101 nesting_stack = thiscond;
2103 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2106 /* Generate RTL between then-clause and the elseif-clause
2107 of an if-then-elseif-.... */
2109 void
2110 expand_start_elseif (cond)
2111 tree cond;
2113 if (cond_stack->data.cond.endif_label == 0)
2114 cond_stack->data.cond.endif_label = gen_label_rtx ();
2115 emit_jump (cond_stack->data.cond.endif_label);
2116 emit_label (cond_stack->data.cond.next_label);
2117 cond_stack->data.cond.next_label = gen_label_rtx ();
2118 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2121 /* Generate RTL between the then-clause and the else-clause
2122 of an if-then-else. */
2124 void
2125 expand_start_else ()
2127 if (cond_stack->data.cond.endif_label == 0)
2128 cond_stack->data.cond.endif_label = gen_label_rtx ();
2130 emit_jump (cond_stack->data.cond.endif_label);
2131 emit_label (cond_stack->data.cond.next_label);
2132 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2135 /* After calling expand_start_else, turn this "else" into an "else if"
2136 by providing another condition. */
2138 void
2139 expand_elseif (cond)
2140 tree cond;
2142 cond_stack->data.cond.next_label = gen_label_rtx ();
2143 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2146 /* Generate RTL for the end of an if-then.
2147 Pop the record for it off of cond_stack. */
2149 void
2150 expand_end_cond ()
2152 struct nesting *thiscond = cond_stack;
2154 do_pending_stack_adjust ();
2155 if (thiscond->data.cond.next_label)
2156 emit_label (thiscond->data.cond.next_label);
2157 if (thiscond->data.cond.endif_label)
2158 emit_label (thiscond->data.cond.endif_label);
2160 POPSTACK (cond_stack);
2161 last_expr_type = 0;
2166 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2167 loop should be exited by `exit_something'. This is a loop for which
2168 `expand_continue' will jump to the top of the loop.
2170 Make an entry on loop_stack to record the labels associated with
2171 this loop. */
2173 struct nesting *
2174 expand_start_loop (exit_flag)
2175 int exit_flag;
2177 register struct nesting *thisloop = ALLOC_NESTING ();
2179 /* Make an entry on loop_stack for the loop we are entering. */
2181 thisloop->next = loop_stack;
2182 thisloop->all = nesting_stack;
2183 thisloop->depth = ++nesting_depth;
2184 thisloop->data.loop.start_label = gen_label_rtx ();
2185 thisloop->data.loop.end_label = gen_label_rtx ();
2186 thisloop->data.loop.alt_end_label = 0;
2187 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2188 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2189 loop_stack = thisloop;
2190 nesting_stack = thisloop;
2192 do_pending_stack_adjust ();
2193 emit_queue ();
2194 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2195 emit_label (thisloop->data.loop.start_label);
2197 return thisloop;
2200 /* Like expand_start_loop but for a loop where the continuation point
2201 (for expand_continue_loop) will be specified explicitly. */
2203 struct nesting *
2204 expand_start_loop_continue_elsewhere (exit_flag)
2205 int exit_flag;
2207 struct nesting *thisloop = expand_start_loop (exit_flag);
2208 loop_stack->data.loop.continue_label = gen_label_rtx ();
2209 return thisloop;
2212 /* Specify the continuation point for a loop started with
2213 expand_start_loop_continue_elsewhere.
2214 Use this at the point in the code to which a continue statement
2215 should jump. */
2217 void
2218 expand_loop_continue_here ()
2220 do_pending_stack_adjust ();
2221 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2222 emit_label (loop_stack->data.loop.continue_label);
2225 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2226 Pop the block off of loop_stack. */
2228 void
2229 expand_end_loop ()
2231 rtx start_label = loop_stack->data.loop.start_label;
2232 rtx insn = get_last_insn ();
2233 int needs_end_jump = 1;
2235 /* Mark the continue-point at the top of the loop if none elsewhere. */
2236 if (start_label == loop_stack->data.loop.continue_label)
2237 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2239 do_pending_stack_adjust ();
2241 /* If optimizing, perhaps reorder the loop.
2242 First, try to use a condjump near the end.
2243 expand_exit_loop_if_false ends loops with unconditional jumps,
2244 like this:
2246 if (test) goto label;
2247 optional: cleanup
2248 goto loop_stack->data.loop.end_label
2249 barrier
2250 label:
2252 If we find such a pattern, we can end the loop earlier. */
2254 if (optimize
2255 && GET_CODE (insn) == CODE_LABEL
2256 && LABEL_NAME (insn) == NULL
2257 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2259 rtx label = insn;
2260 rtx jump = PREV_INSN (PREV_INSN (label));
2262 if (GET_CODE (jump) == JUMP_INSN
2263 && GET_CODE (PATTERN (jump)) == SET
2264 && SET_DEST (PATTERN (jump)) == pc_rtx
2265 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2266 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2267 == loop_stack->data.loop.end_label))
2269 rtx prev;
2271 /* The test might be complex and reference LABEL multiple times,
2272 like the loop in loop_iterations to set vtop. To handle this,
2273 we move LABEL. */
2274 insn = PREV_INSN (label);
2275 reorder_insns (label, label, start_label);
2277 for (prev = PREV_INSN (jump); ; prev = PREV_INSN (prev))
2279 /* We ignore line number notes, but if we see any other note,
2280 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2281 NOTE_INSN_LOOP_*, we disable this optimization. */
2282 if (GET_CODE (prev) == NOTE)
2284 if (NOTE_LINE_NUMBER (prev) < 0)
2285 break;
2286 continue;
2288 if (GET_CODE (prev) == CODE_LABEL)
2289 break;
2290 if (GET_CODE (prev) == JUMP_INSN)
2292 if (GET_CODE (PATTERN (prev)) == SET
2293 && SET_DEST (PATTERN (prev)) == pc_rtx
2294 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2295 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2296 == LABEL_REF)
2297 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2299 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2300 = start_label;
2301 emit_note_after (NOTE_INSN_LOOP_END, prev);
2302 needs_end_jump = 0;
2304 break;
2310 /* If the loop starts with a loop exit, roll that to the end where
2311 it will optimize together with the jump back.
2313 We look for the conditional branch to the exit, except that once
2314 we find such a branch, we don't look past 30 instructions.
2316 In more detail, if the loop presently looks like this (in pseudo-C):
2318 start_label:
2319 if (test) goto end_label;
2320 body;
2321 goto start_label;
2322 end_label:
2324 transform it to look like:
2326 goto start_label;
2327 newstart_label:
2328 body;
2329 start_label:
2330 if (test) goto end_label;
2331 goto newstart_label;
2332 end_label:
2334 Here, the `test' may actually consist of some reasonably complex
2335 code, terminating in a test. */
2337 if (optimize
2338 && needs_end_jump
2340 ! (GET_CODE (insn) == JUMP_INSN
2341 && GET_CODE (PATTERN (insn)) == SET
2342 && SET_DEST (PATTERN (insn)) == pc_rtx
2343 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2345 int eh_regions = 0;
2346 int num_insns = 0;
2347 rtx last_test_insn = NULL_RTX;
2349 /* Scan insns from the top of the loop looking for a qualified
2350 conditional exit. */
2351 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2352 insn = NEXT_INSN (insn))
2354 if (GET_CODE (insn) == NOTE)
2356 if (optimize < 2
2357 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2358 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2359 /* The code that actually moves the exit test will
2360 carefully leave BLOCK notes in their original
2361 location. That means, however, that we can't debug
2362 the exit test itself. So, we refuse to move code
2363 containing BLOCK notes at low optimization levels. */
2364 break;
2366 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2367 ++eh_regions;
2368 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2370 --eh_regions;
2371 if (eh_regions < 0)
2372 /* We've come to the end of an EH region, but
2373 never saw the beginning of that region. That
2374 means that an EH region begins before the top
2375 of the loop, and ends in the middle of it. The
2376 existence of such a situation violates a basic
2377 assumption in this code, since that would imply
2378 that even when EH_REGIONS is zero, we might
2379 move code out of an exception region. */
2380 abort ();
2383 /* We must not walk into a nested loop. */
2384 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2385 break;
2387 /* We already know this INSN is a NOTE, so there's no
2388 point in looking at it to see if it's a JUMP. */
2389 continue;
2392 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2393 num_insns++;
2395 if (last_test_insn && num_insns > 30)
2396 break;
2398 if (eh_regions > 0)
2399 /* We don't want to move a partial EH region. Consider:
2401 while ( ( { try {
2402 if (cond ()) 0;
2403 else {
2404 bar();
2407 } catch (...) {
2409 } )) {
2410 body;
2413 This isn't legal C++, but here's what it's supposed to
2414 mean: if cond() is true, stop looping. Otherwise,
2415 call bar, and keep looping. In addition, if cond
2416 throws an exception, catch it and keep looping. Such
2417 constructs are certainy legal in LISP.
2419 We should not move the `if (cond()) 0' test since then
2420 the EH-region for the try-block would be broken up.
2421 (In this case we would the EH_BEG note for the `try'
2422 and `if cond()' but not the call to bar() or the
2423 EH_END note.)
2425 So we don't look for tests within an EH region. */
2426 continue;
2428 if (GET_CODE (insn) == JUMP_INSN
2429 && GET_CODE (PATTERN (insn)) == SET
2430 && SET_DEST (PATTERN (insn)) == pc_rtx)
2432 /* This is indeed a jump. */
2433 rtx dest1 = NULL_RTX;
2434 rtx dest2 = NULL_RTX;
2435 rtx potential_last_test;
2436 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2438 /* A conditional jump. */
2439 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2440 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2441 potential_last_test = insn;
2443 else
2445 /* An unconditional jump. */
2446 dest1 = SET_SRC (PATTERN (insn));
2447 /* Include the BARRIER after the JUMP. */
2448 potential_last_test = NEXT_INSN (insn);
2451 do {
2452 if (dest1 && GET_CODE (dest1) == LABEL_REF
2453 && ((XEXP (dest1, 0)
2454 == loop_stack->data.loop.alt_end_label)
2455 || (XEXP (dest1, 0)
2456 == loop_stack->data.loop.end_label)))
2458 last_test_insn = potential_last_test;
2459 break;
2462 /* If this was a conditional jump, there may be
2463 another label at which we should look. */
2464 dest1 = dest2;
2465 dest2 = NULL_RTX;
2466 } while (dest1);
2470 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2472 /* We found one. Move everything from there up
2473 to the end of the loop, and add a jump into the loop
2474 to jump to there. */
2475 register rtx newstart_label = gen_label_rtx ();
2476 register rtx start_move = start_label;
2477 rtx next_insn;
2479 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2480 then we want to move this note also. */
2481 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2482 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2483 == NOTE_INSN_LOOP_CONT))
2484 start_move = PREV_INSN (start_move);
2486 emit_label_after (newstart_label, PREV_INSN (start_move));
2488 /* Actually move the insns. Start at the beginning, and
2489 keep copying insns until we've copied the
2490 last_test_insn. */
2491 for (insn = start_move; insn; insn = next_insn)
2493 /* Figure out which insn comes after this one. We have
2494 to do this before we move INSN. */
2495 if (insn == last_test_insn)
2496 /* We've moved all the insns. */
2497 next_insn = NULL_RTX;
2498 else
2499 next_insn = NEXT_INSN (insn);
2501 if (GET_CODE (insn) == NOTE
2502 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2503 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2504 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2505 NOTE_INSN_BLOCK_ENDs because the correct generation
2506 of debugging information depends on these appearing
2507 in the same order in the RTL and in the tree
2508 structure, where they are represented as BLOCKs.
2509 So, we don't move block notes. Of course, moving
2510 the code inside the block is likely to make it
2511 impossible to debug the instructions in the exit
2512 test, but such is the price of optimization. */
2513 continue;
2515 /* Move the INSN. */
2516 reorder_insns (insn, insn, get_last_insn ());
2519 emit_jump_insn_after (gen_jump (start_label),
2520 PREV_INSN (newstart_label));
2521 emit_barrier_after (PREV_INSN (newstart_label));
2522 start_label = newstart_label;
2526 if (needs_end_jump)
2528 emit_jump (start_label);
2529 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2531 emit_label (loop_stack->data.loop.end_label);
2533 POPSTACK (loop_stack);
2535 last_expr_type = 0;
2538 /* Generate a jump to the current loop's continue-point.
2539 This is usually the top of the loop, but may be specified
2540 explicitly elsewhere. If not currently inside a loop,
2541 return 0 and do nothing; caller will print an error message. */
2544 expand_continue_loop (whichloop)
2545 struct nesting *whichloop;
2547 last_expr_type = 0;
2548 if (whichloop == 0)
2549 whichloop = loop_stack;
2550 if (whichloop == 0)
2551 return 0;
2552 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2553 NULL_RTX);
2554 return 1;
2557 /* Generate a jump to exit the current loop. If not currently inside a loop,
2558 return 0 and do nothing; caller will print an error message. */
2561 expand_exit_loop (whichloop)
2562 struct nesting *whichloop;
2564 last_expr_type = 0;
2565 if (whichloop == 0)
2566 whichloop = loop_stack;
2567 if (whichloop == 0)
2568 return 0;
2569 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2570 return 1;
2573 /* Generate a conditional jump to exit the current loop if COND
2574 evaluates to zero. If not currently inside a loop,
2575 return 0 and do nothing; caller will print an error message. */
2578 expand_exit_loop_if_false (whichloop, cond)
2579 struct nesting *whichloop;
2580 tree cond;
2582 rtx label = gen_label_rtx ();
2583 rtx last_insn;
2584 last_expr_type = 0;
2586 if (whichloop == 0)
2587 whichloop = loop_stack;
2588 if (whichloop == 0)
2589 return 0;
2590 /* In order to handle fixups, we actually create a conditional jump
2591 around a unconditional branch to exit the loop. If fixups are
2592 necessary, they go before the unconditional branch. */
2595 do_jump (cond, NULL_RTX, label);
2596 last_insn = get_last_insn ();
2597 if (GET_CODE (last_insn) == CODE_LABEL)
2598 whichloop->data.loop.alt_end_label = last_insn;
2599 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2600 NULL_RTX);
2601 emit_label (label);
2603 return 1;
2606 /* Return nonzero if the loop nest is empty. Else return zero. */
2609 stmt_loop_nest_empty ()
2611 return (loop_stack == NULL);
2614 /* Return non-zero if we should preserve sub-expressions as separate
2615 pseudos. We never do so if we aren't optimizing. We always do so
2616 if -fexpensive-optimizations.
2618 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2619 the loop may still be a small one. */
2622 preserve_subexpressions_p ()
2624 rtx insn;
2626 if (flag_expensive_optimizations)
2627 return 1;
2629 if (optimize == 0 || current_function == 0
2630 || current_function->stmt == 0 || loop_stack == 0)
2631 return 0;
2633 insn = get_last_insn_anywhere ();
2635 return (insn
2636 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2637 < n_non_fixed_regs * 3));
2641 /* Generate a jump to exit the current loop, conditional, binding contour
2642 or case statement. Not all such constructs are visible to this function,
2643 only those started with EXIT_FLAG nonzero. Individual languages use
2644 the EXIT_FLAG parameter to control which kinds of constructs you can
2645 exit this way.
2647 If not currently inside anything that can be exited,
2648 return 0 and do nothing; caller will print an error message. */
2651 expand_exit_something ()
2653 struct nesting *n;
2654 last_expr_type = 0;
2655 for (n = nesting_stack; n; n = n->all)
2656 if (n->exit_label != 0)
2658 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2659 return 1;
2662 return 0;
2665 /* Generate RTL to return from the current function, with no value.
2666 (That is, we do not do anything about returning any value.) */
2668 void
2669 expand_null_return ()
2671 struct nesting *block = block_stack;
2672 rtx last_insn = 0;
2674 /* Does any pending block have cleanups? */
2676 while (block && block->data.block.cleanups == 0)
2677 block = block->next;
2679 /* If yes, use a goto to return, since that runs cleanups. */
2681 expand_null_return_1 (last_insn, block != 0);
2684 /* Generate RTL to return from the current function, with value VAL. */
2686 static void
2687 expand_value_return (val)
2688 rtx val;
2690 struct nesting *block = block_stack;
2691 rtx last_insn = get_last_insn ();
2692 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2694 /* Copy the value to the return location
2695 unless it's already there. */
2697 if (return_reg != val)
2699 #ifdef PROMOTE_FUNCTION_RETURN
2700 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2701 int unsignedp = TREE_UNSIGNED (type);
2702 enum machine_mode mode
2703 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2704 &unsignedp, 1);
2706 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2707 convert_move (return_reg, val, unsignedp);
2708 else
2709 #endif
2710 emit_move_insn (return_reg, val);
2712 if (GET_CODE (return_reg) == REG
2713 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2714 emit_insn (gen_rtx_USE (VOIDmode, return_reg));
2715 /* Handle calls that return values in multiple non-contiguous locations.
2716 The Irix 6 ABI has examples of this. */
2717 else if (GET_CODE (return_reg) == PARALLEL)
2719 int i;
2721 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2723 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2725 if (GET_CODE (x) == REG
2726 && REGNO (x) < FIRST_PSEUDO_REGISTER)
2727 emit_insn (gen_rtx_USE (VOIDmode, x));
2731 /* Does any pending block have cleanups? */
2733 while (block && block->data.block.cleanups == 0)
2734 block = block->next;
2736 /* If yes, use a goto to return, since that runs cleanups.
2737 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2739 expand_null_return_1 (last_insn, block != 0);
2742 /* Output a return with no value. If LAST_INSN is nonzero,
2743 pretend that the return takes place after LAST_INSN.
2744 If USE_GOTO is nonzero then don't use a return instruction;
2745 go to the return label instead. This causes any cleanups
2746 of pending blocks to be executed normally. */
2748 static void
2749 expand_null_return_1 (last_insn, use_goto)
2750 rtx last_insn;
2751 int use_goto;
2753 rtx end_label = cleanup_label ? cleanup_label : return_label;
2755 clear_pending_stack_adjust ();
2756 do_pending_stack_adjust ();
2757 last_expr_type = 0;
2759 /* PCC-struct return always uses an epilogue. */
2760 if (current_function_returns_pcc_struct || use_goto)
2762 if (end_label == 0)
2763 end_label = return_label = gen_label_rtx ();
2764 expand_goto_internal (NULL_TREE, end_label, last_insn);
2765 return;
2768 /* Otherwise output a simple return-insn if one is available,
2769 unless it won't do the job. */
2770 #ifdef HAVE_return
2771 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2773 emit_jump_insn (gen_return ());
2774 emit_barrier ();
2775 return;
2777 #endif
2779 /* Otherwise jump to the epilogue. */
2780 expand_goto_internal (NULL_TREE, end_label, last_insn);
2783 /* Generate RTL to evaluate the expression RETVAL and return it
2784 from the current function. */
2786 void
2787 expand_return (retval)
2788 tree retval;
2790 /* If there are any cleanups to be performed, then they will
2791 be inserted following LAST_INSN. It is desirable
2792 that the last_insn, for such purposes, should be the
2793 last insn before computing the return value. Otherwise, cleanups
2794 which call functions can clobber the return value. */
2795 /* ??? rms: I think that is erroneous, because in C++ it would
2796 run destructors on variables that might be used in the subsequent
2797 computation of the return value. */
2798 rtx last_insn = 0;
2799 register rtx val = 0;
2800 register rtx op0;
2801 tree retval_rhs;
2802 int cleanups;
2804 /* If function wants no value, give it none. */
2805 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2807 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2808 emit_queue ();
2809 expand_null_return ();
2810 return;
2813 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2814 /* This is not sufficient. We also need to watch for cleanups of the
2815 expression we are about to expand. Unfortunately, we cannot know
2816 if it has cleanups until we expand it, and we want to change how we
2817 expand it depending upon if we need cleanups. We can't win. */
2818 #if 0
2819 cleanups = any_pending_cleanups (1);
2820 #else
2821 cleanups = 1;
2822 #endif
2824 if (TREE_CODE (retval) == RESULT_DECL)
2825 retval_rhs = retval;
2826 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2827 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2828 retval_rhs = TREE_OPERAND (retval, 1);
2829 else if (TREE_TYPE (retval) == void_type_node)
2830 /* Recognize tail-recursive call to void function. */
2831 retval_rhs = retval;
2832 else
2833 retval_rhs = NULL_TREE;
2835 /* Only use `last_insn' if there are cleanups which must be run. */
2836 if (cleanups || cleanup_label != 0)
2837 last_insn = get_last_insn ();
2839 /* Distribute return down conditional expr if either of the sides
2840 may involve tail recursion (see test below). This enhances the number
2841 of tail recursions we see. Don't do this always since it can produce
2842 sub-optimal code in some cases and we distribute assignments into
2843 conditional expressions when it would help. */
2845 if (optimize && retval_rhs != 0
2846 && frame_offset == 0
2847 && TREE_CODE (retval_rhs) == COND_EXPR
2848 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2849 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2851 rtx label = gen_label_rtx ();
2852 tree expr;
2854 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2855 start_cleanup_deferral ();
2856 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2857 DECL_RESULT (current_function_decl),
2858 TREE_OPERAND (retval_rhs, 1));
2859 TREE_SIDE_EFFECTS (expr) = 1;
2860 expand_return (expr);
2861 emit_label (label);
2863 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2864 DECL_RESULT (current_function_decl),
2865 TREE_OPERAND (retval_rhs, 2));
2866 TREE_SIDE_EFFECTS (expr) = 1;
2867 expand_return (expr);
2868 end_cleanup_deferral ();
2869 return;
2872 /* Attempt to optimize the call if it is tail recursive. */
2873 if (optimize_tail_recursion (retval_rhs, last_insn))
2874 return;
2876 #ifdef HAVE_return
2877 /* This optimization is safe if there are local cleanups
2878 because expand_null_return takes care of them.
2879 ??? I think it should also be safe when there is a cleanup label,
2880 because expand_null_return takes care of them, too.
2881 Any reason why not? */
2882 if (HAVE_return && cleanup_label == 0
2883 && ! current_function_returns_pcc_struct
2884 && BRANCH_COST <= 1)
2886 /* If this is return x == y; then generate
2887 if (x == y) return 1; else return 0;
2888 if we can do it with explicit return insns and branches are cheap,
2889 but not if we have the corresponding scc insn. */
2890 int has_scc = 0;
2891 if (retval_rhs)
2892 switch (TREE_CODE (retval_rhs))
2894 case EQ_EXPR:
2895 #ifdef HAVE_seq
2896 has_scc = HAVE_seq;
2897 #endif
2898 case NE_EXPR:
2899 #ifdef HAVE_sne
2900 has_scc = HAVE_sne;
2901 #endif
2902 case GT_EXPR:
2903 #ifdef HAVE_sgt
2904 has_scc = HAVE_sgt;
2905 #endif
2906 case GE_EXPR:
2907 #ifdef HAVE_sge
2908 has_scc = HAVE_sge;
2909 #endif
2910 case LT_EXPR:
2911 #ifdef HAVE_slt
2912 has_scc = HAVE_slt;
2913 #endif
2914 case LE_EXPR:
2915 #ifdef HAVE_sle
2916 has_scc = HAVE_sle;
2917 #endif
2918 case TRUTH_ANDIF_EXPR:
2919 case TRUTH_ORIF_EXPR:
2920 case TRUTH_AND_EXPR:
2921 case TRUTH_OR_EXPR:
2922 case TRUTH_NOT_EXPR:
2923 case TRUTH_XOR_EXPR:
2924 if (! has_scc)
2926 op0 = gen_label_rtx ();
2927 jumpifnot (retval_rhs, op0);
2928 expand_value_return (const1_rtx);
2929 emit_label (op0);
2930 expand_value_return (const0_rtx);
2931 return;
2933 break;
2935 default:
2936 break;
2939 #endif /* HAVE_return */
2941 /* If the result is an aggregate that is being returned in one (or more)
2942 registers, load the registers here. The compiler currently can't handle
2943 copying a BLKmode value into registers. We could put this code in a
2944 more general area (for use by everyone instead of just function
2945 call/return), but until this feature is generally usable it is kept here
2946 (and in expand_call). The value must go into a pseudo in case there
2947 are cleanups that will clobber the real return register. */
2949 if (retval_rhs != 0
2950 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2951 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2953 int i, bitpos, xbitpos;
2954 int big_endian_correction = 0;
2955 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2956 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2957 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),
2958 (unsigned int)BITS_PER_WORD);
2959 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2960 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2961 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2962 enum machine_mode tmpmode, result_reg_mode;
2964 /* Structures whose size is not a multiple of a word are aligned
2965 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2966 machine, this means we must skip the empty high order bytes when
2967 calculating the bit offset. */
2968 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2969 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2970 * BITS_PER_UNIT));
2972 /* Copy the structure BITSIZE bits at a time. */
2973 for (bitpos = 0, xbitpos = big_endian_correction;
2974 bitpos < bytes * BITS_PER_UNIT;
2975 bitpos += bitsize, xbitpos += bitsize)
2977 /* We need a new destination pseudo each time xbitpos is
2978 on a word boundary and when xbitpos == big_endian_correction
2979 (the first time through). */
2980 if (xbitpos % BITS_PER_WORD == 0
2981 || xbitpos == big_endian_correction)
2983 /* Generate an appropriate register. */
2984 dst = gen_reg_rtx (word_mode);
2985 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2987 /* Clobber the destination before we move anything into it. */
2988 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
2991 /* We need a new source operand each time bitpos is on a word
2992 boundary. */
2993 if (bitpos % BITS_PER_WORD == 0)
2994 src = operand_subword_force (result_val,
2995 bitpos / BITS_PER_WORD,
2996 BLKmode);
2998 /* Use bitpos for the source extraction (left justified) and
2999 xbitpos for the destination store (right justified). */
3000 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3001 extract_bit_field (src, bitsize,
3002 bitpos % BITS_PER_WORD, 1,
3003 NULL_RTX, word_mode,
3004 word_mode,
3005 bitsize / BITS_PER_UNIT,
3006 BITS_PER_WORD),
3007 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
3010 /* Find the smallest integer mode large enough to hold the
3011 entire structure and use that mode instead of BLKmode
3012 on the USE insn for the return register. */
3013 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
3014 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3015 tmpmode != VOIDmode;
3016 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3018 /* Have we found a large enough mode? */
3019 if (GET_MODE_SIZE (tmpmode) >= bytes)
3020 break;
3023 /* No suitable mode found. */
3024 if (tmpmode == VOIDmode)
3025 abort ();
3027 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
3029 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3030 result_reg_mode = word_mode;
3031 else
3032 result_reg_mode = tmpmode;
3033 result_reg = gen_reg_rtx (result_reg_mode);
3035 emit_queue ();
3036 for (i = 0; i < n_regs; i++)
3037 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3038 result_pseudos[i]);
3040 if (tmpmode != result_reg_mode)
3041 result_reg = gen_lowpart (tmpmode, result_reg);
3043 expand_value_return (result_reg);
3045 else if (cleanups
3046 && retval_rhs != 0
3047 && TREE_TYPE (retval_rhs) != void_type_node
3048 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
3050 /* Calculate the return value into a pseudo reg. */
3051 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
3052 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3053 val = force_not_mem (val);
3054 emit_queue ();
3055 /* Return the calculated value, doing cleanups first. */
3056 expand_value_return (val);
3058 else
3060 /* No cleanups or no hard reg used;
3061 calculate value into hard return reg. */
3062 expand_expr (retval, const0_rtx, VOIDmode, 0);
3063 emit_queue ();
3064 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
3068 /* Return 1 if the end of the generated RTX is not a barrier.
3069 This means code already compiled can drop through. */
3072 drop_through_at_end_p ()
3074 rtx insn = get_last_insn ();
3075 while (insn && GET_CODE (insn) == NOTE)
3076 insn = PREV_INSN (insn);
3077 return insn && GET_CODE (insn) != BARRIER;
3080 /* Test CALL_EXPR to determine if it is a potential tail recursion call
3081 and emit code to optimize the tail recursion. LAST_INSN indicates where
3082 to place the jump to the tail recursion label. Return TRUE if the
3083 call was optimized into a goto.
3085 This is only used by expand_return, but expand_call is expected to
3086 use it soon. */
3089 optimize_tail_recursion (call_expr, last_insn)
3090 tree call_expr;
3091 rtx last_insn;
3093 /* For tail-recursive call to current function,
3094 just jump back to the beginning.
3095 It's unsafe if any auto variable in this function
3096 has its address taken; for simplicity,
3097 require stack frame to be empty. */
3098 if (optimize && call_expr != 0
3099 && frame_offset == 0
3100 && TREE_CODE (call_expr) == CALL_EXPR
3101 && TREE_CODE (TREE_OPERAND (call_expr, 0)) == ADDR_EXPR
3102 && TREE_OPERAND (TREE_OPERAND (call_expr, 0), 0) == current_function_decl
3103 /* Finish checking validity, and if valid emit code
3104 to set the argument variables for the new call. */
3105 && tail_recursion_args (TREE_OPERAND (call_expr, 1),
3106 DECL_ARGUMENTS (current_function_decl)))
3108 if (tail_recursion_label == 0)
3110 tail_recursion_label = gen_label_rtx ();
3111 emit_label_after (tail_recursion_label,
3112 tail_recursion_reentry);
3114 emit_queue ();
3115 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3116 emit_barrier ();
3117 return 1;
3120 return 0;
3123 /* Emit code to alter this function's formal parms for a tail-recursive call.
3124 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3125 FORMALS is the chain of decls of formals.
3126 Return 1 if this can be done;
3127 otherwise return 0 and do not emit any code. */
3129 static int
3130 tail_recursion_args (actuals, formals)
3131 tree actuals, formals;
3133 register tree a = actuals, f = formals;
3134 register int i;
3135 register rtx *argvec;
3137 /* Check that number and types of actuals are compatible
3138 with the formals. This is not always true in valid C code.
3139 Also check that no formal needs to be addressable
3140 and that all formals are scalars. */
3142 /* Also count the args. */
3144 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3146 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3147 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3148 return 0;
3149 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3150 return 0;
3152 if (a != 0 || f != 0)
3153 return 0;
3155 /* Compute all the actuals. */
3157 argvec = (rtx *) alloca (i * sizeof (rtx));
3159 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3160 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3162 /* Find which actual values refer to current values of previous formals.
3163 Copy each of them now, before any formal is changed. */
3165 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3167 int copy = 0;
3168 register int j;
3169 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3170 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3171 { copy = 1; break; }
3172 if (copy)
3173 argvec[i] = copy_to_reg (argvec[i]);
3176 /* Store the values of the actuals into the formals. */
3178 for (f = formals, a = actuals, i = 0; f;
3179 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3181 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3182 emit_move_insn (DECL_RTL (f), argvec[i]);
3183 else
3184 convert_move (DECL_RTL (f), argvec[i],
3185 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3188 free_temp_slots ();
3189 return 1;
3192 /* Generate the RTL code for entering a binding contour.
3193 The variables are declared one by one, by calls to `expand_decl'.
3195 FLAGS is a bitwise or of the following flags:
3197 1 - Nonzero if this construct should be visible to
3198 `exit_something'.
3200 2 - Nonzero if this contour does not require a
3201 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3202 language-independent code should set this flag because they
3203 will not create corresponding BLOCK nodes. (There should be
3204 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3205 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3206 when expand_end_bindings is called. */
3208 void
3209 expand_start_bindings (flags)
3210 int flags;
3212 struct nesting *thisblock = ALLOC_NESTING ();
3213 rtx note;
3214 int exit_flag = ((flags & 1) != 0);
3215 int block_flag = ((flags & 2) == 0);
3217 note = emit_note (NULL_PTR,
3218 block_flag ? NOTE_INSN_BLOCK_BEG : NOTE_INSN_DELETED);
3220 /* Make an entry on block_stack for the block we are entering. */
3222 thisblock->next = block_stack;
3223 thisblock->all = nesting_stack;
3224 thisblock->depth = ++nesting_depth;
3225 thisblock->data.block.stack_level = 0;
3226 thisblock->data.block.cleanups = 0;
3227 thisblock->data.block.n_function_calls = 0;
3228 thisblock->data.block.exception_region = 0;
3229 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3231 thisblock->data.block.conditional_code = 0;
3232 thisblock->data.block.last_unconditional_cleanup = note;
3233 /* When we insert instructions after the last unconditional cleanup,
3234 we don't adjust last_insn. That means that a later add_insn will
3235 clobber the instructions we've just added. The easiest way to
3236 fix this is to just insert another instruction here, so that the
3237 instructions inserted after the last unconditional cleanup are
3238 never the last instruction. */
3239 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3240 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3242 if (block_stack
3243 && !(block_stack->data.block.cleanups == NULL_TREE
3244 && block_stack->data.block.outer_cleanups == NULL_TREE))
3245 thisblock->data.block.outer_cleanups
3246 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3247 block_stack->data.block.outer_cleanups);
3248 else
3249 thisblock->data.block.outer_cleanups = 0;
3250 thisblock->data.block.label_chain = 0;
3251 thisblock->data.block.innermost_stack_block = stack_block_stack;
3252 thisblock->data.block.first_insn = note;
3253 thisblock->data.block.block_start_count = ++current_block_start_count;
3254 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3255 block_stack = thisblock;
3256 nesting_stack = thisblock;
3258 /* Make a new level for allocating stack slots. */
3259 push_temp_slots ();
3262 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3263 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3264 expand_expr are made. After we end the region, we know that all
3265 space for all temporaries that were created by TARGET_EXPRs will be
3266 destroyed and their space freed for reuse. */
3268 void
3269 expand_start_target_temps ()
3271 /* This is so that even if the result is preserved, the space
3272 allocated will be freed, as we know that it is no longer in use. */
3273 push_temp_slots ();
3275 /* Start a new binding layer that will keep track of all cleanup
3276 actions to be performed. */
3277 expand_start_bindings (2);
3279 target_temp_slot_level = temp_slot_level;
3282 void
3283 expand_end_target_temps ()
3285 expand_end_bindings (NULL_TREE, 0, 0);
3287 /* This is so that even if the result is preserved, the space
3288 allocated will be freed, as we know that it is no longer in use. */
3289 pop_temp_slots ();
3292 /* Mark top block of block_stack as an implicit binding for an
3293 exception region. This is used to prevent infinite recursion when
3294 ending a binding with expand_end_bindings. It is only ever called
3295 by expand_eh_region_start, as that it the only way to create a
3296 block stack for a exception region. */
3298 void
3299 mark_block_as_eh_region ()
3301 block_stack->data.block.exception_region = 1;
3302 if (block_stack->next
3303 && block_stack->next->data.block.conditional_code)
3305 block_stack->data.block.conditional_code
3306 = block_stack->next->data.block.conditional_code;
3307 block_stack->data.block.last_unconditional_cleanup
3308 = block_stack->next->data.block.last_unconditional_cleanup;
3309 block_stack->data.block.cleanup_ptr
3310 = block_stack->next->data.block.cleanup_ptr;
3314 /* True if we are currently emitting insns in an area of output code
3315 that is controlled by a conditional expression. This is used by
3316 the cleanup handling code to generate conditional cleanup actions. */
3319 conditional_context ()
3321 return block_stack && block_stack->data.block.conditional_code;
3324 /* Mark top block of block_stack as not for an implicit binding for an
3325 exception region. This is only ever done by expand_eh_region_end
3326 to let expand_end_bindings know that it is being called explicitly
3327 to end the binding layer for just the binding layer associated with
3328 the exception region, otherwise expand_end_bindings would try and
3329 end all implicit binding layers for exceptions regions, and then
3330 one normal binding layer. */
3332 void
3333 mark_block_as_not_eh_region ()
3335 block_stack->data.block.exception_region = 0;
3338 /* True if the top block of block_stack was marked as for an exception
3339 region by mark_block_as_eh_region. */
3342 is_eh_region ()
3344 return (current_function && block_stack
3345 && block_stack->data.block.exception_region);
3348 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3349 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3350 BLOCK node. */
3352 void
3353 remember_end_note (block)
3354 register tree block;
3356 BLOCK_END_NOTE (block) = last_block_end_note;
3357 last_block_end_note = NULL_RTX;
3360 /* Emit a handler label for a nonlocal goto handler.
3361 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3363 static rtx
3364 expand_nl_handler_label (slot, before_insn)
3365 rtx slot, before_insn;
3367 rtx insns;
3368 rtx handler_label = gen_label_rtx ();
3370 /* Don't let jump_optimize delete the handler. */
3371 LABEL_PRESERVE_P (handler_label) = 1;
3373 start_sequence ();
3374 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3375 insns = get_insns ();
3376 end_sequence ();
3377 emit_insns_before (insns, before_insn);
3379 emit_label (handler_label);
3381 return handler_label;
3384 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3385 handler. */
3386 static void
3387 expand_nl_goto_receiver ()
3389 #ifdef HAVE_nonlocal_goto
3390 if (! HAVE_nonlocal_goto)
3391 #endif
3392 /* First adjust our frame pointer to its actual value. It was
3393 previously set to the start of the virtual area corresponding to
3394 the stacked variables when we branched here and now needs to be
3395 adjusted to the actual hardware fp value.
3397 Assignments are to virtual registers are converted by
3398 instantiate_virtual_regs into the corresponding assignment
3399 to the underlying register (fp in this case) that makes
3400 the original assignment true.
3401 So the following insn will actually be
3402 decrementing fp by STARTING_FRAME_OFFSET. */
3403 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3405 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3406 if (fixed_regs[ARG_POINTER_REGNUM])
3408 #ifdef ELIMINABLE_REGS
3409 /* If the argument pointer can be eliminated in favor of the
3410 frame pointer, we don't need to restore it. We assume here
3411 that if such an elimination is present, it can always be used.
3412 This is the case on all known machines; if we don't make this
3413 assumption, we do unnecessary saving on many machines. */
3414 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3415 size_t i;
3417 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3418 if (elim_regs[i].from == ARG_POINTER_REGNUM
3419 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3420 break;
3422 if (i == sizeof elim_regs / sizeof elim_regs [0])
3423 #endif
3425 /* Now restore our arg pointer from the address at which it
3426 was saved in our stack frame.
3427 If there hasn't be space allocated for it yet, make
3428 some now. */
3429 if (arg_pointer_save_area == 0)
3430 arg_pointer_save_area
3431 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3432 emit_move_insn (virtual_incoming_args_rtx,
3433 /* We need a pseudo here, or else
3434 instantiate_virtual_regs_1 complains. */
3435 copy_to_reg (arg_pointer_save_area));
3438 #endif
3440 #ifdef HAVE_nonlocal_goto_receiver
3441 if (HAVE_nonlocal_goto_receiver)
3442 emit_insn (gen_nonlocal_goto_receiver ());
3443 #endif
3446 /* Make handlers for nonlocal gotos taking place in the function calls in
3447 block THISBLOCK. */
3449 static void
3450 expand_nl_goto_receivers (thisblock)
3451 struct nesting *thisblock;
3453 tree link;
3454 rtx afterward = gen_label_rtx ();
3455 rtx insns, slot;
3456 rtx label_list;
3457 int any_invalid;
3459 /* Record the handler address in the stack slot for that purpose,
3460 during this block, saving and restoring the outer value. */
3461 if (thisblock->next != 0)
3462 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3464 rtx save_receiver = gen_reg_rtx (Pmode);
3465 emit_move_insn (XEXP (slot, 0), save_receiver);
3467 start_sequence ();
3468 emit_move_insn (save_receiver, XEXP (slot, 0));
3469 insns = get_insns ();
3470 end_sequence ();
3471 emit_insns_before (insns, thisblock->data.block.first_insn);
3474 /* Jump around the handlers; they run only when specially invoked. */
3475 emit_jump (afterward);
3477 /* Make a separate handler for each label. */
3478 link = nonlocal_labels;
3479 slot = nonlocal_goto_handler_slots;
3480 label_list = NULL_RTX;
3481 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3482 /* Skip any labels we shouldn't be able to jump to from here,
3483 we generate one special handler for all of them below which just calls
3484 abort. */
3485 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3487 rtx lab;
3488 lab = expand_nl_handler_label (XEXP (slot, 0),
3489 thisblock->data.block.first_insn);
3490 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3492 expand_nl_goto_receiver ();
3494 /* Jump to the "real" nonlocal label. */
3495 expand_goto (TREE_VALUE (link));
3498 /* A second pass over all nonlocal labels; this time we handle those
3499 we should not be able to jump to at this point. */
3500 link = nonlocal_labels;
3501 slot = nonlocal_goto_handler_slots;
3502 any_invalid = 0;
3503 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3504 if (DECL_TOO_LATE (TREE_VALUE (link)))
3506 rtx lab;
3507 lab = expand_nl_handler_label (XEXP (slot, 0),
3508 thisblock->data.block.first_insn);
3509 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3510 any_invalid = 1;
3513 if (any_invalid)
3515 expand_nl_goto_receiver ();
3516 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3517 VOIDmode, 0);
3518 emit_barrier ();
3521 nonlocal_goto_handler_labels = label_list;
3522 emit_label (afterward);
3525 /* Warn about any unused VARS (which may contain nodes other than
3526 VAR_DECLs, but such nodes are ignored). The nodes are connected
3527 via the TREE_CHAIN field. */
3529 void
3530 warn_about_unused_variables (vars)
3531 tree vars;
3533 tree decl;
3535 if (warn_unused)
3536 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3537 if (TREE_CODE (decl) == VAR_DECL
3538 && ! TREE_USED (decl)
3539 && ! DECL_IN_SYSTEM_HEADER (decl)
3540 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3541 warning_with_decl (decl, "unused variable `%s'");
3544 /* Generate RTL code to terminate a binding contour.
3546 VARS is the chain of VAR_DECL nodes for the variables bound in this
3547 contour. There may actually be other nodes in this chain, but any
3548 nodes other than VAR_DECLS are ignored.
3550 MARK_ENDS is nonzero if we should put a note at the beginning
3551 and end of this binding contour.
3553 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3554 (That is true automatically if the contour has a saved stack level.) */
3556 void
3557 expand_end_bindings (vars, mark_ends, dont_jump_in)
3558 tree vars;
3559 int mark_ends;
3560 int dont_jump_in;
3562 register struct nesting *thisblock;
3563 register tree decl;
3565 while (block_stack->data.block.exception_region)
3567 /* Because we don't need or want a new temporary level and
3568 because we didn't create one in expand_eh_region_start,
3569 create a fake one now to avoid removing one in
3570 expand_end_bindings. */
3571 push_temp_slots ();
3573 block_stack->data.block.exception_region = 0;
3575 expand_end_bindings (NULL_TREE, 0, 0);
3578 /* Since expand_eh_region_start does an expand_start_bindings, we
3579 have to first end all the bindings that were created by
3580 expand_eh_region_start. */
3582 thisblock = block_stack;
3584 /* If any of the variables in this scope were not used, warn the
3585 user. */
3586 warn_about_unused_variables (vars);
3588 if (thisblock->exit_label)
3590 do_pending_stack_adjust ();
3591 emit_label (thisblock->exit_label);
3594 /* If necessary, make handlers for nonlocal gotos taking
3595 place in the function calls in this block. */
3596 if (function_call_count != thisblock->data.block.n_function_calls
3597 && nonlocal_labels
3598 /* Make handler for outermost block
3599 if there were any nonlocal gotos to this function. */
3600 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3601 /* Make handler for inner block if it has something
3602 special to do when you jump out of it. */
3603 : (thisblock->data.block.cleanups != 0
3604 || thisblock->data.block.stack_level != 0)))
3605 expand_nl_goto_receivers (thisblock);
3607 /* Don't allow jumping into a block that has a stack level.
3608 Cleanups are allowed, though. */
3609 if (dont_jump_in
3610 || thisblock->data.block.stack_level != 0)
3612 struct label_chain *chain;
3614 /* Any labels in this block are no longer valid to go to.
3615 Mark them to cause an error message. */
3616 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3618 DECL_TOO_LATE (chain->label) = 1;
3619 /* If any goto without a fixup came to this label,
3620 that must be an error, because gotos without fixups
3621 come from outside all saved stack-levels. */
3622 if (TREE_ADDRESSABLE (chain->label))
3623 error_with_decl (chain->label,
3624 "label `%s' used before containing binding contour");
3628 /* Restore stack level in effect before the block
3629 (only if variable-size objects allocated). */
3630 /* Perform any cleanups associated with the block. */
3632 if (thisblock->data.block.stack_level != 0
3633 || thisblock->data.block.cleanups != 0)
3635 /* Only clean up here if this point can actually be reached. */
3636 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3638 /* Don't let cleanups affect ({...}) constructs. */
3639 int old_expr_stmts_for_value = expr_stmts_for_value;
3640 rtx old_last_expr_value = last_expr_value;
3641 tree old_last_expr_type = last_expr_type;
3642 expr_stmts_for_value = 0;
3644 /* Do the cleanups. */
3645 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3646 if (reachable)
3647 do_pending_stack_adjust ();
3649 expr_stmts_for_value = old_expr_stmts_for_value;
3650 last_expr_value = old_last_expr_value;
3651 last_expr_type = old_last_expr_type;
3653 /* Restore the stack level. */
3655 if (reachable && thisblock->data.block.stack_level != 0)
3657 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3658 thisblock->data.block.stack_level, NULL_RTX);
3659 if (nonlocal_goto_handler_slots != 0)
3660 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3661 NULL_RTX);
3664 /* Any gotos out of this block must also do these things.
3665 Also report any gotos with fixups that came to labels in this
3666 level. */
3667 fixup_gotos (thisblock,
3668 thisblock->data.block.stack_level,
3669 thisblock->data.block.cleanups,
3670 thisblock->data.block.first_insn,
3671 dont_jump_in);
3674 /* Mark the beginning and end of the scope if requested.
3675 We do this now, after running cleanups on the variables
3676 just going out of scope, so they are in scope for their cleanups. */
3678 if (mark_ends)
3679 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3680 else
3681 /* Get rid of the beginning-mark if we don't make an end-mark. */
3682 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3684 /* If doing stupid register allocation, make sure lives of all
3685 register variables declared here extend thru end of scope. */
3687 if (obey_regdecls)
3688 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3689 if (TREE_CODE (decl) == VAR_DECL && DECL_RTL (decl))
3690 use_variable (DECL_RTL (decl));
3692 /* Restore the temporary level of TARGET_EXPRs. */
3693 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3695 /* Restore block_stack level for containing block. */
3697 stack_block_stack = thisblock->data.block.innermost_stack_block;
3698 POPSTACK (block_stack);
3700 /* Pop the stack slot nesting and free any slots at this level. */
3701 pop_temp_slots ();
3704 /* Generate RTL for the automatic variable declaration DECL.
3705 (Other kinds of declarations are simply ignored if seen here.) */
3707 void
3708 expand_decl (decl)
3709 register tree decl;
3711 struct nesting *thisblock;
3712 tree type;
3714 type = TREE_TYPE (decl);
3716 /* Only automatic variables need any expansion done.
3717 Static and external variables, and external functions,
3718 will be handled by `assemble_variable' (called from finish_decl).
3719 TYPE_DECL and CONST_DECL require nothing.
3720 PARM_DECLs are handled in `assign_parms'. */
3722 if (TREE_CODE (decl) != VAR_DECL)
3723 return;
3724 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3725 return;
3727 thisblock = block_stack;
3729 /* Create the RTL representation for the variable. */
3731 if (type == error_mark_node)
3732 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3733 else if (DECL_SIZE (decl) == 0)
3734 /* Variable with incomplete type. */
3736 if (DECL_INITIAL (decl) == 0)
3737 /* Error message was already done; now avoid a crash. */
3738 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3739 else
3740 /* An initializer is going to decide the size of this array.
3741 Until we know the size, represent its address with a reg. */
3742 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3743 MEM_SET_IN_STRUCT_P (DECL_RTL (decl), AGGREGATE_TYPE_P (type));
3745 else if (DECL_MODE (decl) != BLKmode
3746 /* If -ffloat-store, don't put explicit float vars
3747 into regs. */
3748 && !(flag_float_store
3749 && TREE_CODE (type) == REAL_TYPE)
3750 && ! TREE_THIS_VOLATILE (decl)
3751 && ! TREE_ADDRESSABLE (decl)
3752 && (DECL_REGISTER (decl) || ! obey_regdecls)
3753 /* if -fcheck-memory-usage, check all variables. */
3754 && ! current_function_check_memory_usage)
3756 /* Automatic variable that can go in a register. */
3757 int unsignedp = TREE_UNSIGNED (type);
3758 enum machine_mode reg_mode
3759 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3761 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3762 mark_user_reg (DECL_RTL (decl));
3764 if (POINTER_TYPE_P (type))
3765 mark_reg_pointer (DECL_RTL (decl),
3766 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3767 / BITS_PER_UNIT));
3770 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3771 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3772 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3773 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3774 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
3776 /* Variable of fixed size that goes on the stack. */
3777 rtx oldaddr = 0;
3778 rtx addr;
3780 /* If we previously made RTL for this decl, it must be an array
3781 whose size was determined by the initializer.
3782 The old address was a register; set that register now
3783 to the proper address. */
3784 if (DECL_RTL (decl) != 0)
3786 if (GET_CODE (DECL_RTL (decl)) != MEM
3787 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3788 abort ();
3789 oldaddr = XEXP (DECL_RTL (decl), 0);
3792 DECL_RTL (decl) = assign_temp (TREE_TYPE (decl), 1, 1, 1);
3793 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3794 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3796 /* Set alignment we actually gave this decl. */
3797 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3798 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3800 if (oldaddr)
3802 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3803 if (addr != oldaddr)
3804 emit_move_insn (oldaddr, addr);
3807 /* If this is a memory ref that contains aggregate components,
3808 mark it as such for cse and loop optimize. */
3809 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3810 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3811 #if 0
3812 /* If this is in memory because of -ffloat-store,
3813 set the volatile bit, to prevent optimizations from
3814 undoing the effects. */
3815 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3816 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3817 #endif
3819 MEM_ALIAS_SET (DECL_RTL (decl)) = get_alias_set (decl);
3821 else
3822 /* Dynamic-size object: must push space on the stack. */
3824 rtx address, size;
3826 /* Record the stack pointer on entry to block, if have
3827 not already done so. */
3828 if (thisblock->data.block.stack_level == 0)
3830 do_pending_stack_adjust ();
3831 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3832 &thisblock->data.block.stack_level,
3833 thisblock->data.block.first_insn);
3834 stack_block_stack = thisblock;
3837 /* Compute the variable's size, in bytes. */
3838 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3839 DECL_SIZE (decl),
3840 size_int (BITS_PER_UNIT)),
3841 NULL_RTX, VOIDmode, 0);
3842 free_temp_slots ();
3844 /* Allocate space on the stack for the variable. Note that
3845 DECL_ALIGN says how the variable is to be aligned and we
3846 cannot use it to conclude anything about the alignment of
3847 the size. */
3848 address = allocate_dynamic_stack_space (size, NULL_RTX,
3849 TYPE_ALIGN (TREE_TYPE (decl)));
3851 /* Reference the variable indirect through that rtx. */
3852 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
3854 /* If this is a memory ref that contains aggregate components,
3855 mark it as such for cse and loop optimize. */
3856 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3857 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3859 /* Indicate the alignment we actually gave this variable. */
3860 #ifdef STACK_BOUNDARY
3861 DECL_ALIGN (decl) = STACK_BOUNDARY;
3862 #else
3863 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3864 #endif
3867 if (TREE_THIS_VOLATILE (decl))
3868 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3869 #if 0 /* A variable is not necessarily unchanging
3870 just because it is const. RTX_UNCHANGING_P
3871 means no change in the function,
3872 not merely no change in the variable's scope.
3873 It is correct to set RTX_UNCHANGING_P if the variable's scope
3874 is the whole function. There's no convenient way to test that. */
3875 if (TREE_READONLY (decl))
3876 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3877 #endif
3879 /* If doing stupid register allocation, make sure life of any
3880 register variable starts here, at the start of its scope. */
3882 if (obey_regdecls)
3883 use_variable (DECL_RTL (decl));
3888 /* Emit code to perform the initialization of a declaration DECL. */
3890 void
3891 expand_decl_init (decl)
3892 tree decl;
3894 int was_used = TREE_USED (decl);
3896 /* If this is a CONST_DECL, we don't have to generate any code, but
3897 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3898 to be set while in the obstack containing the constant. If we don't
3899 do this, we can lose if we have functions nested three deep and the middle
3900 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3901 the innermost function is the first to expand that STRING_CST. */
3902 if (TREE_CODE (decl) == CONST_DECL)
3904 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3905 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3906 EXPAND_INITIALIZER);
3907 return;
3910 if (TREE_STATIC (decl))
3911 return;
3913 /* Compute and store the initial value now. */
3915 if (DECL_INITIAL (decl) == error_mark_node)
3917 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3919 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3920 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3921 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3922 0, 0);
3923 emit_queue ();
3925 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3927 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3928 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3929 emit_queue ();
3932 /* Don't let the initialization count as "using" the variable. */
3933 TREE_USED (decl) = was_used;
3935 /* Free any temporaries we made while initializing the decl. */
3936 preserve_temp_slots (NULL_RTX);
3937 free_temp_slots ();
3940 /* CLEANUP is an expression to be executed at exit from this binding contour;
3941 for example, in C++, it might call the destructor for this variable.
3943 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3944 CLEANUP multiple times, and have the correct semantics. This
3945 happens in exception handling, for gotos, returns, breaks that
3946 leave the current scope.
3948 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3949 that is not associated with any particular variable. */
3952 expand_decl_cleanup (decl, cleanup)
3953 tree decl, cleanup;
3955 struct nesting *thisblock;
3957 /* Error if we are not in any block. */
3958 if (current_function == 0 || block_stack == 0)
3959 return 0;
3961 thisblock = block_stack;
3963 /* Record the cleanup if there is one. */
3965 if (cleanup != 0)
3967 tree t;
3968 rtx seq;
3969 tree *cleanups = &thisblock->data.block.cleanups;
3970 int cond_context = conditional_context ();
3972 if (cond_context)
3974 rtx flag = gen_reg_rtx (word_mode);
3975 rtx set_flag_0;
3976 tree cond;
3978 start_sequence ();
3979 emit_move_insn (flag, const0_rtx);
3980 set_flag_0 = get_insns ();
3981 end_sequence ();
3983 thisblock->data.block.last_unconditional_cleanup
3984 = emit_insns_after (set_flag_0,
3985 thisblock->data.block.last_unconditional_cleanup);
3987 emit_move_insn (flag, const1_rtx);
3989 /* All cleanups must be on the function_obstack. */
3990 push_obstacks_nochange ();
3991 resume_temporary_allocation ();
3993 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3994 DECL_RTL (cond) = flag;
3996 /* Conditionalize the cleanup. */
3997 cleanup = build (COND_EXPR, void_type_node,
3998 truthvalue_conversion (cond),
3999 cleanup, integer_zero_node);
4000 cleanup = fold (cleanup);
4002 pop_obstacks ();
4004 cleanups = thisblock->data.block.cleanup_ptr;
4007 /* All cleanups must be on the function_obstack. */
4008 push_obstacks_nochange ();
4009 resume_temporary_allocation ();
4010 cleanup = unsave_expr (cleanup);
4011 pop_obstacks ();
4013 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
4015 if (! cond_context)
4016 /* If this block has a cleanup, it belongs in stack_block_stack. */
4017 stack_block_stack = thisblock;
4019 if (cond_context)
4021 start_sequence ();
4024 /* If this was optimized so that there is no exception region for the
4025 cleanup, then mark the TREE_LIST node, so that we can later tell
4026 if we need to call expand_eh_region_end. */
4027 if (! using_eh_for_cleanups_p
4028 || expand_eh_region_start_tree (decl, cleanup))
4029 TREE_ADDRESSABLE (t) = 1;
4030 /* If that started a new EH region, we're in a new block. */
4031 thisblock = block_stack;
4033 if (cond_context)
4035 seq = get_insns ();
4036 end_sequence ();
4037 if (seq)
4038 thisblock->data.block.last_unconditional_cleanup
4039 = emit_insns_after (seq,
4040 thisblock->data.block.last_unconditional_cleanup);
4042 else
4044 thisblock->data.block.last_unconditional_cleanup
4045 = get_last_insn ();
4046 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4049 return 1;
4052 /* Like expand_decl_cleanup, but suppress generating an exception handler
4053 to perform the cleanup. */
4055 #if 0
4057 expand_decl_cleanup_no_eh (decl, cleanup)
4058 tree decl, cleanup;
4060 int save_eh = using_eh_for_cleanups_p;
4061 int result;
4063 using_eh_for_cleanups_p = 0;
4064 result = expand_decl_cleanup (decl, cleanup);
4065 using_eh_for_cleanups_p = save_eh;
4067 return result;
4069 #endif
4071 /* Arrange for the top element of the dynamic cleanup chain to be
4072 popped if we exit the current binding contour. DECL is the
4073 associated declaration, if any, otherwise NULL_TREE. If the
4074 current contour is left via an exception, then __sjthrow will pop
4075 the top element off the dynamic cleanup chain. The code that
4076 avoids doing the action we push into the cleanup chain in the
4077 exceptional case is contained in expand_cleanups.
4079 This routine is only used by expand_eh_region_start, and that is
4080 the only way in which an exception region should be started. This
4081 routine is only used when using the setjmp/longjmp codegen method
4082 for exception handling. */
4085 expand_dcc_cleanup (decl)
4086 tree decl;
4088 struct nesting *thisblock;
4089 tree cleanup;
4091 /* Error if we are not in any block. */
4092 if (current_function == 0 || block_stack == 0)
4093 return 0;
4094 thisblock = block_stack;
4096 /* Record the cleanup for the dynamic handler chain. */
4098 /* All cleanups must be on the function_obstack. */
4099 push_obstacks_nochange ();
4100 resume_temporary_allocation ();
4101 cleanup = make_node (POPDCC_EXPR);
4102 pop_obstacks ();
4104 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4105 thisblock->data.block.cleanups
4106 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4108 /* If this block has a cleanup, it belongs in stack_block_stack. */
4109 stack_block_stack = thisblock;
4110 return 1;
4113 /* Arrange for the top element of the dynamic handler chain to be
4114 popped if we exit the current binding contour. DECL is the
4115 associated declaration, if any, otherwise NULL_TREE. If the current
4116 contour is left via an exception, then __sjthrow will pop the top
4117 element off the dynamic handler chain. The code that avoids doing
4118 the action we push into the handler chain in the exceptional case
4119 is contained in expand_cleanups.
4121 This routine is only used by expand_eh_region_start, and that is
4122 the only way in which an exception region should be started. This
4123 routine is only used when using the setjmp/longjmp codegen method
4124 for exception handling. */
4127 expand_dhc_cleanup (decl)
4128 tree decl;
4130 struct nesting *thisblock;
4131 tree cleanup;
4133 /* Error if we are not in any block. */
4134 if (current_function == 0 || block_stack == 0)
4135 return 0;
4136 thisblock = block_stack;
4138 /* Record the cleanup for the dynamic handler chain. */
4140 /* All cleanups must be on the function_obstack. */
4141 push_obstacks_nochange ();
4142 resume_temporary_allocation ();
4143 cleanup = make_node (POPDHC_EXPR);
4144 pop_obstacks ();
4146 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4147 thisblock->data.block.cleanups
4148 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4150 /* If this block has a cleanup, it belongs in stack_block_stack. */
4151 stack_block_stack = thisblock;
4152 return 1;
4155 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4156 DECL_ELTS is the list of elements that belong to DECL's type.
4157 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4159 void
4160 expand_anon_union_decl (decl, cleanup, decl_elts)
4161 tree decl, cleanup, decl_elts;
4163 struct nesting *thisblock = current_function == 0 ? 0 : block_stack;
4164 rtx x;
4165 tree t;
4167 /* If any of the elements are addressable, so is the entire union. */
4168 for (t = decl_elts; t; t = TREE_CHAIN (t))
4169 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4171 TREE_ADDRESSABLE (decl) = 1;
4172 break;
4175 expand_decl (decl);
4176 expand_decl_cleanup (decl, cleanup);
4177 x = DECL_RTL (decl);
4179 /* Go through the elements, assigning RTL to each. */
4180 for (t = decl_elts; t; t = TREE_CHAIN (t))
4182 tree decl_elt = TREE_VALUE (t);
4183 tree cleanup_elt = TREE_PURPOSE (t);
4184 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4186 /* Propagate the union's alignment to the elements. */
4187 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4189 /* If the element has BLKmode and the union doesn't, the union is
4190 aligned such that the element doesn't need to have BLKmode, so
4191 change the element's mode to the appropriate one for its size. */
4192 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4193 DECL_MODE (decl_elt) = mode
4194 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
4195 MODE_INT, 1);
4197 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4198 instead create a new MEM rtx with the proper mode. */
4199 if (GET_CODE (x) == MEM)
4201 if (mode == GET_MODE (x))
4202 DECL_RTL (decl_elt) = x;
4203 else
4205 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
4206 MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt), x);
4207 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
4210 else if (GET_CODE (x) == REG)
4212 if (mode == GET_MODE (x))
4213 DECL_RTL (decl_elt) = x;
4214 else
4215 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
4217 else
4218 abort ();
4220 /* Record the cleanup if there is one. */
4222 if (cleanup != 0)
4223 thisblock->data.block.cleanups
4224 = temp_tree_cons (decl_elt, cleanup_elt,
4225 thisblock->data.block.cleanups);
4229 /* Expand a list of cleanups LIST.
4230 Elements may be expressions or may be nested lists.
4232 If DONT_DO is nonnull, then any list-element
4233 whose TREE_PURPOSE matches DONT_DO is omitted.
4234 This is sometimes used to avoid a cleanup associated with
4235 a value that is being returned out of the scope.
4237 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4238 goto and handle protection regions specially in that case.
4240 If REACHABLE, we emit code, otherwise just inform the exception handling
4241 code about this finalization. */
4243 static void
4244 expand_cleanups (list, dont_do, in_fixup, reachable)
4245 tree list;
4246 tree dont_do;
4247 int in_fixup;
4248 int reachable;
4250 tree tail;
4251 for (tail = list; tail; tail = TREE_CHAIN (tail))
4252 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4254 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4255 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4256 else
4258 if (! in_fixup)
4260 tree cleanup = TREE_VALUE (tail);
4262 /* See expand_d{h,c}c_cleanup for why we avoid this. */
4263 if (TREE_CODE (cleanup) != POPDHC_EXPR
4264 && TREE_CODE (cleanup) != POPDCC_EXPR
4265 /* See expand_eh_region_start_tree for this case. */
4266 && ! TREE_ADDRESSABLE (tail))
4268 cleanup = protect_with_terminate (cleanup);
4269 expand_eh_region_end (cleanup);
4273 if (reachable)
4275 /* Cleanups may be run multiple times. For example,
4276 when exiting a binding contour, we expand the
4277 cleanups associated with that contour. When a goto
4278 within that binding contour has a target outside that
4279 contour, it will expand all cleanups from its scope to
4280 the target. Though the cleanups are expanded multiple
4281 times, the control paths are non-overlapping so the
4282 cleanups will not be executed twice. */
4284 /* We may need to protect fixups with rethrow regions. */
4285 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
4287 if (protect)
4288 expand_fixup_region_start ();
4290 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4291 if (protect)
4292 expand_fixup_region_end (TREE_VALUE (tail));
4293 free_temp_slots ();
4299 /* Mark when the context we are emitting RTL for as a conditional
4300 context, so that any cleanup actions we register with
4301 expand_decl_init will be properly conditionalized when those
4302 cleanup actions are later performed. Must be called before any
4303 expression (tree) is expanded that is within a conditional context. */
4305 void
4306 start_cleanup_deferral ()
4308 /* block_stack can be NULL if we are inside the parameter list. It is
4309 OK to do nothing, because cleanups aren't possible here. */
4310 if (block_stack)
4311 ++block_stack->data.block.conditional_code;
4314 /* Mark the end of a conditional region of code. Because cleanup
4315 deferrals may be nested, we may still be in a conditional region
4316 after we end the currently deferred cleanups, only after we end all
4317 deferred cleanups, are we back in unconditional code. */
4319 void
4320 end_cleanup_deferral ()
4322 /* block_stack can be NULL if we are inside the parameter list. It is
4323 OK to do nothing, because cleanups aren't possible here. */
4324 if (block_stack)
4325 --block_stack->data.block.conditional_code;
4328 /* Move all cleanups from the current block_stack
4329 to the containing block_stack, where they are assumed to
4330 have been created. If anything can cause a temporary to
4331 be created, but not expanded for more than one level of
4332 block_stacks, then this code will have to change. */
4334 void
4335 move_cleanups_up ()
4337 struct nesting *block = block_stack;
4338 struct nesting *outer = block->next;
4340 outer->data.block.cleanups
4341 = chainon (block->data.block.cleanups,
4342 outer->data.block.cleanups);
4343 block->data.block.cleanups = 0;
4346 tree
4347 last_cleanup_this_contour ()
4349 if (block_stack == 0)
4350 return 0;
4352 return block_stack->data.block.cleanups;
4355 /* Return 1 if there are any pending cleanups at this point.
4356 If THIS_CONTOUR is nonzero, check the current contour as well.
4357 Otherwise, look only at the contours that enclose this one. */
4360 any_pending_cleanups (this_contour)
4361 int this_contour;
4363 struct nesting *block;
4365 if (current_function == NULL || current_function->stmt == NULL
4366 || block_stack == 0)
4367 return 0;
4369 if (this_contour && block_stack->data.block.cleanups != NULL)
4370 return 1;
4371 if (block_stack->data.block.cleanups == 0
4372 && block_stack->data.block.outer_cleanups == 0)
4373 return 0;
4375 for (block = block_stack->next; block; block = block->next)
4376 if (block->data.block.cleanups != 0)
4377 return 1;
4379 return 0;
4382 /* Enter a case (Pascal) or switch (C) statement.
4383 Push a block onto case_stack and nesting_stack
4384 to accumulate the case-labels that are seen
4385 and to record the labels generated for the statement.
4387 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4388 Otherwise, this construct is transparent for `exit_something'.
4390 EXPR is the index-expression to be dispatched on.
4391 TYPE is its nominal type. We could simply convert EXPR to this type,
4392 but instead we take short cuts. */
4394 void
4395 expand_start_case (exit_flag, expr, type, printname)
4396 int exit_flag;
4397 tree expr;
4398 tree type;
4399 const char *printname;
4401 register struct nesting *thiscase = ALLOC_NESTING ();
4403 /* Make an entry on case_stack for the case we are entering. */
4405 thiscase->next = case_stack;
4406 thiscase->all = nesting_stack;
4407 thiscase->depth = ++nesting_depth;
4408 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4409 thiscase->data.case_stmt.case_list = 0;
4410 thiscase->data.case_stmt.index_expr = expr;
4411 thiscase->data.case_stmt.nominal_type = type;
4412 thiscase->data.case_stmt.default_label = 0;
4413 thiscase->data.case_stmt.num_ranges = 0;
4414 thiscase->data.case_stmt.printname = printname;
4415 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4416 case_stack = thiscase;
4417 nesting_stack = thiscase;
4419 do_pending_stack_adjust ();
4421 /* Make sure case_stmt.start points to something that won't
4422 need any transformation before expand_end_case. */
4423 if (GET_CODE (get_last_insn ()) != NOTE)
4424 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4426 thiscase->data.case_stmt.start = get_last_insn ();
4428 start_cleanup_deferral ();
4432 /* Start a "dummy case statement" within which case labels are invalid
4433 and are not connected to any larger real case statement.
4434 This can be used if you don't want to let a case statement jump
4435 into the middle of certain kinds of constructs. */
4437 void
4438 expand_start_case_dummy ()
4440 register struct nesting *thiscase = ALLOC_NESTING ();
4442 /* Make an entry on case_stack for the dummy. */
4444 thiscase->next = case_stack;
4445 thiscase->all = nesting_stack;
4446 thiscase->depth = ++nesting_depth;
4447 thiscase->exit_label = 0;
4448 thiscase->data.case_stmt.case_list = 0;
4449 thiscase->data.case_stmt.start = 0;
4450 thiscase->data.case_stmt.nominal_type = 0;
4451 thiscase->data.case_stmt.default_label = 0;
4452 thiscase->data.case_stmt.num_ranges = 0;
4453 case_stack = thiscase;
4454 nesting_stack = thiscase;
4455 start_cleanup_deferral ();
4458 /* End a dummy case statement. */
4460 void
4461 expand_end_case_dummy ()
4463 end_cleanup_deferral ();
4464 POPSTACK (case_stack);
4467 /* Return the data type of the index-expression
4468 of the innermost case statement, or null if none. */
4470 tree
4471 case_index_expr_type ()
4473 if (case_stack)
4474 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4475 return 0;
4478 static void
4479 check_seenlabel ()
4481 /* If this is the first label, warn if any insns have been emitted. */
4482 if (case_stack->data.case_stmt.line_number_status >= 0)
4484 rtx insn;
4486 restore_line_number_status
4487 (case_stack->data.case_stmt.line_number_status);
4488 case_stack->data.case_stmt.line_number_status = -1;
4490 for (insn = case_stack->data.case_stmt.start;
4491 insn;
4492 insn = NEXT_INSN (insn))
4494 if (GET_CODE (insn) == CODE_LABEL)
4495 break;
4496 if (GET_CODE (insn) != NOTE
4497 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4500 insn = PREV_INSN (insn);
4501 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4503 /* If insn is zero, then there must have been a syntax error. */
4504 if (insn)
4505 warning_with_file_and_line (NOTE_SOURCE_FILE(insn),
4506 NOTE_LINE_NUMBER(insn),
4507 "unreachable code at beginning of %s",
4508 case_stack->data.case_stmt.printname);
4509 break;
4515 /* Accumulate one case or default label inside a case or switch statement.
4516 VALUE is the value of the case (a null pointer, for a default label).
4517 The function CONVERTER, when applied to arguments T and V,
4518 converts the value V to the type T.
4520 If not currently inside a case or switch statement, return 1 and do
4521 nothing. The caller will print a language-specific error message.
4522 If VALUE is a duplicate or overlaps, return 2 and do nothing
4523 except store the (first) duplicate node in *DUPLICATE.
4524 If VALUE is out of range, return 3 and do nothing.
4525 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4526 Return 0 on success.
4528 Extended to handle range statements. */
4531 pushcase (value, converter, label, duplicate)
4532 register tree value;
4533 tree (*converter) PROTO((tree, tree));
4534 register tree label;
4535 tree *duplicate;
4537 tree index_type;
4538 tree nominal_type;
4540 /* Fail if not inside a real case statement. */
4541 if (! (case_stack && case_stack->data.case_stmt.start))
4542 return 1;
4544 if (stack_block_stack
4545 && stack_block_stack->depth > case_stack->depth)
4546 return 5;
4548 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4549 nominal_type = case_stack->data.case_stmt.nominal_type;
4551 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4552 if (index_type == error_mark_node)
4553 return 0;
4555 /* Convert VALUE to the type in which the comparisons are nominally done. */
4556 if (value != 0)
4557 value = (*converter) (nominal_type, value);
4559 check_seenlabel ();
4561 /* Fail if this value is out of range for the actual type of the index
4562 (which may be narrower than NOMINAL_TYPE). */
4563 if (value != 0 && ! int_fits_type_p (value, index_type))
4564 return 3;
4566 /* Fail if this is a duplicate or overlaps another entry. */
4567 if (value == 0)
4569 if (case_stack->data.case_stmt.default_label != 0)
4571 *duplicate = case_stack->data.case_stmt.default_label;
4572 return 2;
4574 case_stack->data.case_stmt.default_label = label;
4576 else
4577 return add_case_node (value, value, label, duplicate);
4579 expand_label (label);
4580 return 0;
4583 /* Like pushcase but this case applies to all values between VALUE1 and
4584 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4585 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4586 starts at VALUE1 and ends at the highest value of the index type.
4587 If both are NULL, this case applies to all values.
4589 The return value is the same as that of pushcase but there is one
4590 additional error code: 4 means the specified range was empty. */
4593 pushcase_range (value1, value2, converter, label, duplicate)
4594 register tree value1, value2;
4595 tree (*converter) PROTO((tree, tree));
4596 register tree label;
4597 tree *duplicate;
4599 tree index_type;
4600 tree nominal_type;
4602 /* Fail if not inside a real case statement. */
4603 if (! (case_stack && case_stack->data.case_stmt.start))
4604 return 1;
4606 if (stack_block_stack
4607 && stack_block_stack->depth > case_stack->depth)
4608 return 5;
4610 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4611 nominal_type = case_stack->data.case_stmt.nominal_type;
4613 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4614 if (index_type == error_mark_node)
4615 return 0;
4617 check_seenlabel ();
4619 /* Convert VALUEs to type in which the comparisons are nominally done
4620 and replace any unspecified value with the corresponding bound. */
4621 if (value1 == 0)
4622 value1 = TYPE_MIN_VALUE (index_type);
4623 if (value2 == 0)
4624 value2 = TYPE_MAX_VALUE (index_type);
4626 /* Fail if the range is empty. Do this before any conversion since
4627 we want to allow out-of-range empty ranges. */
4628 if (value2 && tree_int_cst_lt (value2, value1))
4629 return 4;
4631 value1 = (*converter) (nominal_type, value1);
4633 /* If the max was unbounded, use the max of the nominal_type we are
4634 converting to. Do this after the < check above to suppress false
4635 positives. */
4636 if (!value2)
4637 value2 = TYPE_MAX_VALUE (nominal_type);
4638 value2 = (*converter) (nominal_type, value2);
4640 /* Fail if these values are out of range. */
4641 if (TREE_CONSTANT_OVERFLOW (value1)
4642 || ! int_fits_type_p (value1, index_type))
4643 return 3;
4645 if (TREE_CONSTANT_OVERFLOW (value2)
4646 || ! int_fits_type_p (value2, index_type))
4647 return 3;
4649 return add_case_node (value1, value2, label, duplicate);
4652 /* Do the actual insertion of a case label for pushcase and pushcase_range
4653 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4654 slowdown for large switch statements. */
4656 static int
4657 add_case_node (low, high, label, duplicate)
4658 tree low, high;
4659 tree label;
4660 tree *duplicate;
4662 struct case_node *p, **q, *r;
4664 q = &case_stack->data.case_stmt.case_list;
4665 p = *q;
4667 while ((r = *q))
4669 p = r;
4671 /* Keep going past elements distinctly greater than HIGH. */
4672 if (tree_int_cst_lt (high, p->low))
4673 q = &p->left;
4675 /* or distinctly less than LOW. */
4676 else if (tree_int_cst_lt (p->high, low))
4677 q = &p->right;
4679 else
4681 /* We have an overlap; this is an error. */
4682 *duplicate = p->code_label;
4683 return 2;
4687 /* Add this label to the chain, and succeed.
4688 Copy LOW, HIGH so they are on temporary rather than momentary
4689 obstack and will thus survive till the end of the case statement. */
4691 r = (struct case_node *) oballoc (sizeof (struct case_node));
4692 r->low = copy_node (low);
4694 /* If the bounds are equal, turn this into the one-value case. */
4696 if (tree_int_cst_equal (low, high))
4697 r->high = r->low;
4698 else
4700 r->high = copy_node (high);
4701 case_stack->data.case_stmt.num_ranges++;
4704 r->code_label = label;
4705 expand_label (label);
4707 *q = r;
4708 r->parent = p;
4709 r->left = 0;
4710 r->right = 0;
4711 r->balance = 0;
4713 while (p)
4715 struct case_node *s;
4717 if (r == p->left)
4719 int b;
4721 if (! (b = p->balance))
4722 /* Growth propagation from left side. */
4723 p->balance = -1;
4724 else if (b < 0)
4726 if (r->balance < 0)
4728 /* R-Rotation */
4729 if ((p->left = s = r->right))
4730 s->parent = p;
4732 r->right = p;
4733 p->balance = 0;
4734 r->balance = 0;
4735 s = p->parent;
4736 p->parent = r;
4738 if ((r->parent = s))
4740 if (s->left == p)
4741 s->left = r;
4742 else
4743 s->right = r;
4745 else
4746 case_stack->data.case_stmt.case_list = r;
4748 else
4749 /* r->balance == +1 */
4751 /* LR-Rotation */
4753 int b2;
4754 struct case_node *t = r->right;
4756 if ((p->left = s = t->right))
4757 s->parent = p;
4759 t->right = p;
4760 if ((r->right = s = t->left))
4761 s->parent = r;
4763 t->left = r;
4764 b = t->balance;
4765 b2 = b < 0;
4766 p->balance = b2;
4767 b2 = -b2 - b;
4768 r->balance = b2;
4769 t->balance = 0;
4770 s = p->parent;
4771 p->parent = t;
4772 r->parent = t;
4774 if ((t->parent = s))
4776 if (s->left == p)
4777 s->left = t;
4778 else
4779 s->right = t;
4781 else
4782 case_stack->data.case_stmt.case_list = t;
4784 break;
4787 else
4789 /* p->balance == +1; growth of left side balances the node. */
4790 p->balance = 0;
4791 break;
4794 else
4795 /* r == p->right */
4797 int b;
4799 if (! (b = p->balance))
4800 /* Growth propagation from right side. */
4801 p->balance++;
4802 else if (b > 0)
4804 if (r->balance > 0)
4806 /* L-Rotation */
4808 if ((p->right = s = r->left))
4809 s->parent = p;
4811 r->left = p;
4812 p->balance = 0;
4813 r->balance = 0;
4814 s = p->parent;
4815 p->parent = r;
4816 if ((r->parent = s))
4818 if (s->left == p)
4819 s->left = r;
4820 else
4821 s->right = r;
4824 else
4825 case_stack->data.case_stmt.case_list = r;
4828 else
4829 /* r->balance == -1 */
4831 /* RL-Rotation */
4832 int b2;
4833 struct case_node *t = r->left;
4835 if ((p->right = s = t->left))
4836 s->parent = p;
4838 t->left = p;
4840 if ((r->left = s = t->right))
4841 s->parent = r;
4843 t->right = r;
4844 b = t->balance;
4845 b2 = b < 0;
4846 r->balance = b2;
4847 b2 = -b2 - b;
4848 p->balance = b2;
4849 t->balance = 0;
4850 s = p->parent;
4851 p->parent = t;
4852 r->parent = t;
4854 if ((t->parent = s))
4856 if (s->left == p)
4857 s->left = t;
4858 else
4859 s->right = t;
4862 else
4863 case_stack->data.case_stmt.case_list = t;
4865 break;
4867 else
4869 /* p->balance == -1; growth of right side balances the node. */
4870 p->balance = 0;
4871 break;
4875 r = p;
4876 p = p->parent;
4879 return 0;
4883 /* Returns the number of possible values of TYPE.
4884 Returns -1 if the number is unknown or variable.
4885 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4886 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4887 do not increase monotonically (there may be duplicates);
4888 to 1 if the values increase monotonically, but not always by 1;
4889 otherwise sets it to 0. */
4891 HOST_WIDE_INT
4892 all_cases_count (type, spareness)
4893 tree type;
4894 int *spareness;
4896 HOST_WIDE_INT count;
4897 *spareness = 0;
4899 switch (TREE_CODE (type))
4901 tree t;
4902 case BOOLEAN_TYPE:
4903 count = 2;
4904 break;
4905 case CHAR_TYPE:
4906 count = 1 << BITS_PER_UNIT;
4907 break;
4908 default:
4909 case INTEGER_TYPE:
4910 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4911 || TYPE_MAX_VALUE (type) == NULL
4912 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4913 return -1;
4914 else
4916 /* count
4917 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4918 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4919 but with overflow checking. */
4920 tree mint = TYPE_MIN_VALUE (type);
4921 tree maxt = TYPE_MAX_VALUE (type);
4922 HOST_WIDE_INT lo, hi;
4923 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4924 &lo, &hi);
4925 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4926 lo, hi, &lo, &hi);
4927 add_double (lo, hi, 1, 0, &lo, &hi);
4928 if (hi != 0 || lo < 0)
4929 return -2;
4930 count = lo;
4932 break;
4933 case ENUMERAL_TYPE:
4934 count = 0;
4935 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4937 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4938 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4939 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4940 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4941 *spareness = 1;
4942 count++;
4944 if (*spareness == 1)
4946 tree prev = TREE_VALUE (TYPE_VALUES (type));
4947 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4949 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4951 *spareness = 2;
4952 break;
4954 prev = TREE_VALUE (t);
4959 return count;
4963 #define BITARRAY_TEST(ARRAY, INDEX) \
4964 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4965 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4966 #define BITARRAY_SET(ARRAY, INDEX) \
4967 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4968 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4970 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4971 with the case values we have seen, assuming the case expression
4972 has the given TYPE.
4973 SPARSENESS is as determined by all_cases_count.
4975 The time needed is proportional to COUNT, unless
4976 SPARSENESS is 2, in which case quadratic time is needed. */
4978 void
4979 mark_seen_cases (type, cases_seen, count, sparseness)
4980 tree type;
4981 unsigned char *cases_seen;
4982 long count;
4983 int sparseness;
4985 tree next_node_to_try = NULL_TREE;
4986 long next_node_offset = 0;
4988 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4989 tree val = make_node (INTEGER_CST);
4990 TREE_TYPE (val) = type;
4991 if (! root)
4992 ; /* Do nothing */
4993 else if (sparseness == 2)
4995 tree t;
4996 HOST_WIDE_INT xlo;
4998 /* This less efficient loop is only needed to handle
4999 duplicate case values (multiple enum constants
5000 with the same value). */
5001 TREE_TYPE (val) = TREE_TYPE (root->low);
5002 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5003 t = TREE_CHAIN (t), xlo++)
5005 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
5006 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5007 n = root;
5010 /* Keep going past elements distinctly greater than VAL. */
5011 if (tree_int_cst_lt (val, n->low))
5012 n = n->left;
5014 /* or distinctly less than VAL. */
5015 else if (tree_int_cst_lt (n->high, val))
5016 n = n->right;
5018 else
5020 /* We have found a matching range. */
5021 BITARRAY_SET (cases_seen, xlo);
5022 break;
5025 while (n);
5028 else
5030 if (root->left)
5031 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5032 for (n = root; n; n = n->right)
5034 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5035 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5036 while ( ! tree_int_cst_lt (n->high, val))
5038 /* Calculate (into xlo) the "offset" of the integer (val).
5039 The element with lowest value has offset 0, the next smallest
5040 element has offset 1, etc. */
5042 HOST_WIDE_INT xlo, xhi;
5043 tree t;
5044 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5046 /* The TYPE_VALUES will be in increasing order, so
5047 starting searching where we last ended. */
5048 t = next_node_to_try;
5049 xlo = next_node_offset;
5050 xhi = 0;
5051 for (;;)
5053 if (t == NULL_TREE)
5055 t = TYPE_VALUES (type);
5056 xlo = 0;
5058 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5060 next_node_to_try = TREE_CHAIN (t);
5061 next_node_offset = xlo + 1;
5062 break;
5064 xlo++;
5065 t = TREE_CHAIN (t);
5066 if (t == next_node_to_try)
5068 xlo = -1;
5069 break;
5073 else
5075 t = TYPE_MIN_VALUE (type);
5076 if (t)
5077 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5078 &xlo, &xhi);
5079 else
5080 xlo = xhi = 0;
5081 add_double (xlo, xhi,
5082 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5083 &xlo, &xhi);
5086 if (xhi == 0 && xlo >= 0 && xlo < count)
5087 BITARRAY_SET (cases_seen, xlo);
5088 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5089 1, 0,
5090 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5096 /* Called when the index of a switch statement is an enumerated type
5097 and there is no default label.
5099 Checks that all enumeration literals are covered by the case
5100 expressions of a switch. Also, warn if there are any extra
5101 switch cases that are *not* elements of the enumerated type.
5103 If all enumeration literals were covered by the case expressions,
5104 turn one of the expressions into the default expression since it should
5105 not be possible to fall through such a switch. */
5107 void
5108 check_for_full_enumeration_handling (type)
5109 tree type;
5111 register struct case_node *n;
5112 register tree chain;
5113 #if 0 /* variable used by 'if 0'ed code below. */
5114 register struct case_node **l;
5115 int all_values = 1;
5116 #endif
5118 /* True iff the selector type is a numbered set mode. */
5119 int sparseness = 0;
5121 /* The number of possible selector values. */
5122 HOST_WIDE_INT size;
5124 /* For each possible selector value. a one iff it has been matched
5125 by a case value alternative. */
5126 unsigned char *cases_seen;
5128 /* The allocated size of cases_seen, in chars. */
5129 long bytes_needed;
5131 if (! warn_switch)
5132 return;
5134 size = all_cases_count (type, &sparseness);
5135 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5137 if (size > 0 && size < 600000
5138 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5139 this optimization if we don't have enough memory rather than
5140 aborting, as xmalloc would do. */
5141 && (cases_seen = (unsigned char *) calloc (bytes_needed, 1)) != NULL)
5143 long i;
5144 tree v = TYPE_VALUES (type);
5146 /* The time complexity of this code is normally O(N), where
5147 N being the number of members in the enumerated type.
5148 However, if type is a ENUMERAL_TYPE whose values do not
5149 increase monotonically, O(N*log(N)) time may be needed. */
5151 mark_seen_cases (type, cases_seen, size, sparseness);
5153 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5155 if (BITARRAY_TEST(cases_seen, i) == 0)
5156 warning ("enumeration value `%s' not handled in switch",
5157 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5160 free (cases_seen);
5163 /* Now we go the other way around; we warn if there are case
5164 expressions that don't correspond to enumerators. This can
5165 occur since C and C++ don't enforce type-checking of
5166 assignments to enumeration variables. */
5168 if (case_stack->data.case_stmt.case_list
5169 && case_stack->data.case_stmt.case_list->left)
5170 case_stack->data.case_stmt.case_list
5171 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5172 if (warn_switch)
5173 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5175 for (chain = TYPE_VALUES (type);
5176 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5177 chain = TREE_CHAIN (chain))
5180 if (!chain)
5182 if (TYPE_NAME (type) == 0)
5183 warning ("case value `%ld' not in enumerated type",
5184 (long) TREE_INT_CST_LOW (n->low));
5185 else
5186 warning ("case value `%ld' not in enumerated type `%s'",
5187 (long) TREE_INT_CST_LOW (n->low),
5188 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5189 == IDENTIFIER_NODE)
5190 ? TYPE_NAME (type)
5191 : DECL_NAME (TYPE_NAME (type))));
5193 if (!tree_int_cst_equal (n->low, n->high))
5195 for (chain = TYPE_VALUES (type);
5196 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5197 chain = TREE_CHAIN (chain))
5200 if (!chain)
5202 if (TYPE_NAME (type) == 0)
5203 warning ("case value `%ld' not in enumerated type",
5204 (long) TREE_INT_CST_LOW (n->high));
5205 else
5206 warning ("case value `%ld' not in enumerated type `%s'",
5207 (long) TREE_INT_CST_LOW (n->high),
5208 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5209 == IDENTIFIER_NODE)
5210 ? TYPE_NAME (type)
5211 : DECL_NAME (TYPE_NAME (type))));
5216 #if 0
5217 /* ??? This optimization is disabled because it causes valid programs to
5218 fail. ANSI C does not guarantee that an expression with enum type
5219 will have a value that is the same as one of the enumeration literals. */
5221 /* If all values were found as case labels, make one of them the default
5222 label. Thus, this switch will never fall through. We arbitrarily pick
5223 the last one to make the default since this is likely the most
5224 efficient choice. */
5226 if (all_values)
5228 for (l = &case_stack->data.case_stmt.case_list;
5229 (*l)->right != 0;
5230 l = &(*l)->right)
5233 case_stack->data.case_stmt.default_label = (*l)->code_label;
5234 *l = 0;
5236 #endif /* 0 */
5240 /* Terminate a case (Pascal) or switch (C) statement
5241 in which ORIG_INDEX is the expression to be tested.
5242 Generate the code to test it and jump to the right place. */
5244 void
5245 expand_end_case (orig_index)
5246 tree orig_index;
5248 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE, orig_minval;
5249 rtx default_label = 0;
5250 register struct case_node *n;
5251 unsigned int count;
5252 rtx index;
5253 rtx table_label;
5254 int ncases;
5255 rtx *labelvec;
5256 register int i;
5257 rtx before_case;
5258 register struct nesting *thiscase = case_stack;
5259 tree index_expr, index_type;
5260 int unsignedp;
5262 /* Don't crash due to previous errors. */
5263 if (thiscase == NULL)
5264 return;
5266 table_label = gen_label_rtx ();
5267 index_expr = thiscase->data.case_stmt.index_expr;
5268 index_type = TREE_TYPE (index_expr);
5269 unsignedp = TREE_UNSIGNED (index_type);
5271 do_pending_stack_adjust ();
5273 /* This might get an spurious warning in the presence of a syntax error;
5274 it could be fixed by moving the call to check_seenlabel after the
5275 check for error_mark_node, and copying the code of check_seenlabel that
5276 deals with case_stack->data.case_stmt.line_number_status /
5277 restore_line_number_status in front of the call to end_cleanup_deferral;
5278 However, this might miss some useful warnings in the presence of
5279 non-syntax errors. */
5280 check_seenlabel ();
5282 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5283 if (index_type != error_mark_node)
5285 /* If switch expression was an enumerated type, check that all
5286 enumeration literals are covered by the cases.
5287 No sense trying this if there's a default case, however. */
5289 if (!thiscase->data.case_stmt.default_label
5290 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5291 && TREE_CODE (index_expr) != INTEGER_CST)
5292 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5294 /* If we don't have a default-label, create one here,
5295 after the body of the switch. */
5296 if (thiscase->data.case_stmt.default_label == 0)
5298 thiscase->data.case_stmt.default_label
5299 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5300 expand_label (thiscase->data.case_stmt.default_label);
5302 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5304 before_case = get_last_insn ();
5306 if (thiscase->data.case_stmt.case_list
5307 && thiscase->data.case_stmt.case_list->left)
5308 thiscase->data.case_stmt.case_list
5309 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
5311 /* Simplify the case-list before we count it. */
5312 group_case_nodes (thiscase->data.case_stmt.case_list);
5314 /* Get upper and lower bounds of case values.
5315 Also convert all the case values to the index expr's data type. */
5317 count = 0;
5318 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5320 /* Check low and high label values are integers. */
5321 if (TREE_CODE (n->low) != INTEGER_CST)
5322 abort ();
5323 if (TREE_CODE (n->high) != INTEGER_CST)
5324 abort ();
5326 n->low = convert (index_type, n->low);
5327 n->high = convert (index_type, n->high);
5329 /* Count the elements and track the largest and smallest
5330 of them (treating them as signed even if they are not). */
5331 if (count++ == 0)
5333 minval = n->low;
5334 maxval = n->high;
5336 else
5338 if (INT_CST_LT (n->low, minval))
5339 minval = n->low;
5340 if (INT_CST_LT (maxval, n->high))
5341 maxval = n->high;
5343 /* A range counts double, since it requires two compares. */
5344 if (! tree_int_cst_equal (n->low, n->high))
5345 count++;
5348 orig_minval = minval;
5350 /* Compute span of values. */
5351 if (count != 0)
5352 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5354 end_cleanup_deferral ();
5356 if (count == 0)
5358 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5359 emit_queue ();
5360 emit_jump (default_label);
5363 /* If range of values is much bigger than number of values,
5364 make a sequence of conditional branches instead of a dispatch.
5365 If the switch-index is a constant, do it this way
5366 because we can optimize it. */
5368 #ifndef CASE_VALUES_THRESHOLD
5369 #ifdef HAVE_casesi
5370 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5371 #else
5372 /* If machine does not have a case insn that compares the
5373 bounds, this means extra overhead for dispatch tables
5374 which raises the threshold for using them. */
5375 #define CASE_VALUES_THRESHOLD 5
5376 #endif /* HAVE_casesi */
5377 #endif /* CASE_VALUES_THRESHOLD */
5379 else if (TREE_INT_CST_HIGH (range) != 0
5380 || count < (unsigned int) CASE_VALUES_THRESHOLD
5381 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5382 > 10 * count)
5383 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5384 || flag_pic
5385 #endif
5386 || TREE_CODE (index_expr) == INTEGER_CST
5387 /* These will reduce to a constant. */
5388 || (TREE_CODE (index_expr) == CALL_EXPR
5389 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5390 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5391 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5392 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5393 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5395 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5397 /* If the index is a short or char that we do not have
5398 an insn to handle comparisons directly, convert it to
5399 a full integer now, rather than letting each comparison
5400 generate the conversion. */
5402 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5403 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5404 == CODE_FOR_nothing))
5406 enum machine_mode wider_mode;
5407 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5408 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5409 if (cmp_optab->handlers[(int) wider_mode].insn_code
5410 != CODE_FOR_nothing)
5412 index = convert_to_mode (wider_mode, index, unsignedp);
5413 break;
5417 emit_queue ();
5418 do_pending_stack_adjust ();
5420 index = protect_from_queue (index, 0);
5421 if (GET_CODE (index) == MEM)
5422 index = copy_to_reg (index);
5423 if (GET_CODE (index) == CONST_INT
5424 || TREE_CODE (index_expr) == INTEGER_CST)
5426 /* Make a tree node with the proper constant value
5427 if we don't already have one. */
5428 if (TREE_CODE (index_expr) != INTEGER_CST)
5430 index_expr
5431 = build_int_2 (INTVAL (index),
5432 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5433 index_expr = convert (index_type, index_expr);
5436 /* For constant index expressions we need only
5437 issue a unconditional branch to the appropriate
5438 target code. The job of removing any unreachable
5439 code is left to the optimisation phase if the
5440 "-O" option is specified. */
5441 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5442 if (! tree_int_cst_lt (index_expr, n->low)
5443 && ! tree_int_cst_lt (n->high, index_expr))
5444 break;
5446 if (n)
5447 emit_jump (label_rtx (n->code_label));
5448 else
5449 emit_jump (default_label);
5451 else
5453 /* If the index expression is not constant we generate
5454 a binary decision tree to select the appropriate
5455 target code. This is done as follows:
5457 The list of cases is rearranged into a binary tree,
5458 nearly optimal assuming equal probability for each case.
5460 The tree is transformed into RTL, eliminating
5461 redundant test conditions at the same time.
5463 If program flow could reach the end of the
5464 decision tree an unconditional jump to the
5465 default code is emitted. */
5467 use_cost_table
5468 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5469 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5470 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5471 NULL_PTR);
5472 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5473 default_label, index_type);
5474 emit_jump_if_reachable (default_label);
5477 else
5479 int win = 0;
5480 #ifdef HAVE_casesi
5481 if (HAVE_casesi)
5483 enum machine_mode index_mode = SImode;
5484 int index_bits = GET_MODE_BITSIZE (index_mode);
5485 rtx op1, op2;
5486 enum machine_mode op_mode;
5488 /* Convert the index to SImode. */
5489 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5490 > GET_MODE_BITSIZE (index_mode))
5492 enum machine_mode omode = TYPE_MODE (index_type);
5493 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5495 /* We must handle the endpoints in the original mode. */
5496 index_expr = build (MINUS_EXPR, index_type,
5497 index_expr, minval);
5498 minval = integer_zero_node;
5499 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5500 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
5501 omode, 1, 0, default_label);
5502 /* Now we can safely truncate. */
5503 index = convert_to_mode (index_mode, index, 0);
5505 else
5507 if (TYPE_MODE (index_type) != index_mode)
5509 index_expr = convert (type_for_size (index_bits, 0),
5510 index_expr);
5511 index_type = TREE_TYPE (index_expr);
5514 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5516 emit_queue ();
5517 index = protect_from_queue (index, 0);
5518 do_pending_stack_adjust ();
5520 op_mode = insn_data[(int)CODE_FOR_casesi].operand[0].mode;
5521 if (! (*insn_data[(int)CODE_FOR_casesi].operand[0].predicate)
5522 (index, op_mode))
5523 index = copy_to_mode_reg (op_mode, index);
5525 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5527 op_mode = insn_data[(int)CODE_FOR_casesi].operand[1].mode;
5528 if (! (*insn_data[(int)CODE_FOR_casesi].operand[1].predicate)
5529 (op1, op_mode))
5530 op1 = copy_to_mode_reg (op_mode, op1);
5532 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5534 op_mode = insn_data[(int)CODE_FOR_casesi].operand[2].mode;
5535 if (! (*insn_data[(int)CODE_FOR_casesi].operand[2].predicate)
5536 (op2, op_mode))
5537 op2 = copy_to_mode_reg (op_mode, op2);
5539 emit_jump_insn (gen_casesi (index, op1, op2,
5540 table_label, default_label));
5541 win = 1;
5543 #endif
5544 #ifdef HAVE_tablejump
5545 if (! win && HAVE_tablejump)
5547 index_expr = convert (thiscase->data.case_stmt.nominal_type,
5548 fold (build (MINUS_EXPR, index_type,
5549 index_expr, minval)));
5550 index_type = TREE_TYPE (index_expr);
5551 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5552 emit_queue ();
5553 index = protect_from_queue (index, 0);
5554 do_pending_stack_adjust ();
5556 do_tablejump (index, TYPE_MODE (index_type),
5557 expand_expr (range, NULL_RTX, VOIDmode, 0),
5558 table_label, default_label);
5559 win = 1;
5561 #endif
5562 if (! win)
5563 abort ();
5565 /* Get table of labels to jump to, in order of case index. */
5567 ncases = TREE_INT_CST_LOW (range) + 1;
5568 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5569 bzero ((char *) labelvec, ncases * sizeof (rtx));
5571 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5573 register HOST_WIDE_INT i
5574 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5576 while (1)
5578 labelvec[i]
5579 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5580 if (i + TREE_INT_CST_LOW (orig_minval)
5581 == TREE_INT_CST_LOW (n->high))
5582 break;
5583 i++;
5587 /* Fill in the gaps with the default. */
5588 for (i = 0; i < ncases; i++)
5589 if (labelvec[i] == 0)
5590 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5592 /* Output the table */
5593 emit_label (table_label);
5595 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5596 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5597 gen_rtx_LABEL_REF (Pmode, table_label),
5598 gen_rtvec_v (ncases, labelvec),
5599 const0_rtx, const0_rtx));
5600 else
5601 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5602 gen_rtvec_v (ncases, labelvec)));
5604 /* If the case insn drops through the table,
5605 after the table we must jump to the default-label.
5606 Otherwise record no drop-through after the table. */
5607 #ifdef CASE_DROPS_THROUGH
5608 emit_jump (default_label);
5609 #else
5610 emit_barrier ();
5611 #endif
5614 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5615 reorder_insns (before_case, get_last_insn (),
5616 thiscase->data.case_stmt.start);
5618 else
5619 end_cleanup_deferral ();
5621 if (thiscase->exit_label)
5622 emit_label (thiscase->exit_label);
5624 POPSTACK (case_stack);
5626 free_temp_slots ();
5629 /* Convert the tree NODE into a list linked by the right field, with the left
5630 field zeroed. RIGHT is used for recursion; it is a list to be placed
5631 rightmost in the resulting list. */
5633 static struct case_node *
5634 case_tree2list (node, right)
5635 struct case_node *node, *right;
5637 struct case_node *left;
5639 if (node->right)
5640 right = case_tree2list (node->right, right);
5642 node->right = right;
5643 if ((left = node->left))
5645 node->left = 0;
5646 return case_tree2list (left, node);
5649 return node;
5652 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5654 static void
5655 do_jump_if_equal (op1, op2, label, unsignedp)
5656 rtx op1, op2, label;
5657 int unsignedp;
5659 if (GET_CODE (op1) == CONST_INT
5660 && GET_CODE (op2) == CONST_INT)
5662 if (INTVAL (op1) == INTVAL (op2))
5663 emit_jump (label);
5665 else
5667 enum machine_mode mode = GET_MODE (op1);
5668 if (mode == VOIDmode)
5669 mode = GET_MODE (op2);
5670 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5671 0, label);
5675 /* Not all case values are encountered equally. This function
5676 uses a heuristic to weight case labels, in cases where that
5677 looks like a reasonable thing to do.
5679 Right now, all we try to guess is text, and we establish the
5680 following weights:
5682 chars above space: 16
5683 digits: 16
5684 default: 12
5685 space, punct: 8
5686 tab: 4
5687 newline: 2
5688 other "\" chars: 1
5689 remaining chars: 0
5691 If we find any cases in the switch that are not either -1 or in the range
5692 of valid ASCII characters, or are control characters other than those
5693 commonly used with "\", don't treat this switch scanning text.
5695 Return 1 if these nodes are suitable for cost estimation, otherwise
5696 return 0. */
5698 static int
5699 estimate_case_costs (node)
5700 case_node_ptr node;
5702 tree min_ascii = build_int_2 (-1, -1);
5703 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5704 case_node_ptr n;
5705 int i;
5707 /* If we haven't already made the cost table, make it now. Note that the
5708 lower bound of the table is -1, not zero. */
5710 if (cost_table == NULL)
5712 cost_table = ((short *) xcalloc (129, sizeof (short))) + 1;
5714 for (i = 0; i < 128; i++)
5716 if (ISALNUM (i))
5717 cost_table[i] = 16;
5718 else if (ISPUNCT (i))
5719 cost_table[i] = 8;
5720 else if (ISCNTRL (i))
5721 cost_table[i] = -1;
5724 cost_table[' '] = 8;
5725 cost_table['\t'] = 4;
5726 cost_table['\0'] = 4;
5727 cost_table['\n'] = 2;
5728 cost_table['\f'] = 1;
5729 cost_table['\v'] = 1;
5730 cost_table['\b'] = 1;
5733 /* See if all the case expressions look like text. It is text if the
5734 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5735 as signed arithmetic since we don't want to ever access cost_table with a
5736 value less than -1. Also check that none of the constants in a range
5737 are strange control characters. */
5739 for (n = node; n; n = n->right)
5741 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5742 return 0;
5744 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5745 if (cost_table[i] < 0)
5746 return 0;
5749 /* All interesting values are within the range of interesting
5750 ASCII characters. */
5751 return 1;
5754 /* Scan an ordered list of case nodes
5755 combining those with consecutive values or ranges.
5757 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5759 static void
5760 group_case_nodes (head)
5761 case_node_ptr head;
5763 case_node_ptr node = head;
5765 while (node)
5767 rtx lb = next_real_insn (label_rtx (node->code_label));
5768 rtx lb2;
5769 case_node_ptr np = node;
5771 /* Try to group the successors of NODE with NODE. */
5772 while (((np = np->right) != 0)
5773 /* Do they jump to the same place? */
5774 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5775 || (lb != 0 && lb2 != 0
5776 && simplejump_p (lb)
5777 && simplejump_p (lb2)
5778 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5779 SET_SRC (PATTERN (lb2)))))
5780 /* Are their ranges consecutive? */
5781 && tree_int_cst_equal (np->low,
5782 fold (build (PLUS_EXPR,
5783 TREE_TYPE (node->high),
5784 node->high,
5785 integer_one_node)))
5786 /* An overflow is not consecutive. */
5787 && tree_int_cst_lt (node->high,
5788 fold (build (PLUS_EXPR,
5789 TREE_TYPE (node->high),
5790 node->high,
5791 integer_one_node))))
5793 node->high = np->high;
5795 /* NP is the first node after NODE which can't be grouped with it.
5796 Delete the nodes in between, and move on to that node. */
5797 node->right = np;
5798 node = np;
5802 /* Take an ordered list of case nodes
5803 and transform them into a near optimal binary tree,
5804 on the assumption that any target code selection value is as
5805 likely as any other.
5807 The transformation is performed by splitting the ordered
5808 list into two equal sections plus a pivot. The parts are
5809 then attached to the pivot as left and right branches. Each
5810 branch is then transformed recursively. */
5812 static void
5813 balance_case_nodes (head, parent)
5814 case_node_ptr *head;
5815 case_node_ptr parent;
5817 register case_node_ptr np;
5819 np = *head;
5820 if (np)
5822 int cost = 0;
5823 int i = 0;
5824 int ranges = 0;
5825 register case_node_ptr *npp;
5826 case_node_ptr left;
5828 /* Count the number of entries on branch. Also count the ranges. */
5830 while (np)
5832 if (!tree_int_cst_equal (np->low, np->high))
5834 ranges++;
5835 if (use_cost_table)
5836 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5839 if (use_cost_table)
5840 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5842 i++;
5843 np = np->right;
5846 if (i > 2)
5848 /* Split this list if it is long enough for that to help. */
5849 npp = head;
5850 left = *npp;
5851 if (use_cost_table)
5853 /* Find the place in the list that bisects the list's total cost,
5854 Here I gets half the total cost. */
5855 int n_moved = 0;
5856 i = (cost + 1) / 2;
5857 while (1)
5859 /* Skip nodes while their cost does not reach that amount. */
5860 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5861 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5862 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5863 if (i <= 0)
5864 break;
5865 npp = &(*npp)->right;
5866 n_moved += 1;
5868 if (n_moved == 0)
5870 /* Leave this branch lopsided, but optimize left-hand
5871 side and fill in `parent' fields for right-hand side. */
5872 np = *head;
5873 np->parent = parent;
5874 balance_case_nodes (&np->left, np);
5875 for (; np->right; np = np->right)
5876 np->right->parent = np;
5877 return;
5880 /* If there are just three nodes, split at the middle one. */
5881 else if (i == 3)
5882 npp = &(*npp)->right;
5883 else
5885 /* Find the place in the list that bisects the list's total cost,
5886 where ranges count as 2.
5887 Here I gets half the total cost. */
5888 i = (i + ranges + 1) / 2;
5889 while (1)
5891 /* Skip nodes while their cost does not reach that amount. */
5892 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5893 i--;
5894 i--;
5895 if (i <= 0)
5896 break;
5897 npp = &(*npp)->right;
5900 *head = np = *npp;
5901 *npp = 0;
5902 np->parent = parent;
5903 np->left = left;
5905 /* Optimize each of the two split parts. */
5906 balance_case_nodes (&np->left, np);
5907 balance_case_nodes (&np->right, np);
5909 else
5911 /* Else leave this branch as one level,
5912 but fill in `parent' fields. */
5913 np = *head;
5914 np->parent = parent;
5915 for (; np->right; np = np->right)
5916 np->right->parent = np;
5921 /* Search the parent sections of the case node tree
5922 to see if a test for the lower bound of NODE would be redundant.
5923 INDEX_TYPE is the type of the index expression.
5925 The instructions to generate the case decision tree are
5926 output in the same order as nodes are processed so it is
5927 known that if a parent node checks the range of the current
5928 node minus one that the current node is bounded at its lower
5929 span. Thus the test would be redundant. */
5931 static int
5932 node_has_low_bound (node, index_type)
5933 case_node_ptr node;
5934 tree index_type;
5936 tree low_minus_one;
5937 case_node_ptr pnode;
5939 /* If the lower bound of this node is the lowest value in the index type,
5940 we need not test it. */
5942 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5943 return 1;
5945 /* If this node has a left branch, the value at the left must be less
5946 than that at this node, so it cannot be bounded at the bottom and
5947 we need not bother testing any further. */
5949 if (node->left)
5950 return 0;
5952 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5953 node->low, integer_one_node));
5955 /* If the subtraction above overflowed, we can't verify anything.
5956 Otherwise, look for a parent that tests our value - 1. */
5958 if (! tree_int_cst_lt (low_minus_one, node->low))
5959 return 0;
5961 for (pnode = node->parent; pnode; pnode = pnode->parent)
5962 if (tree_int_cst_equal (low_minus_one, pnode->high))
5963 return 1;
5965 return 0;
5968 /* Search the parent sections of the case node tree
5969 to see if a test for the upper bound of NODE would be redundant.
5970 INDEX_TYPE is the type of the index expression.
5972 The instructions to generate the case decision tree are
5973 output in the same order as nodes are processed so it is
5974 known that if a parent node checks the range of the current
5975 node plus one that the current node is bounded at its upper
5976 span. Thus the test would be redundant. */
5978 static int
5979 node_has_high_bound (node, index_type)
5980 case_node_ptr node;
5981 tree index_type;
5983 tree high_plus_one;
5984 case_node_ptr pnode;
5986 /* If there is no upper bound, obviously no test is needed. */
5988 if (TYPE_MAX_VALUE (index_type) == NULL)
5989 return 1;
5991 /* If the upper bound of this node is the highest value in the type
5992 of the index expression, we need not test against it. */
5994 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5995 return 1;
5997 /* If this node has a right branch, the value at the right must be greater
5998 than that at this node, so it cannot be bounded at the top and
5999 we need not bother testing any further. */
6001 if (node->right)
6002 return 0;
6004 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6005 node->high, integer_one_node));
6007 /* If the addition above overflowed, we can't verify anything.
6008 Otherwise, look for a parent that tests our value + 1. */
6010 if (! tree_int_cst_lt (node->high, high_plus_one))
6011 return 0;
6013 for (pnode = node->parent; pnode; pnode = pnode->parent)
6014 if (tree_int_cst_equal (high_plus_one, pnode->low))
6015 return 1;
6017 return 0;
6020 /* Search the parent sections of the
6021 case node tree to see if both tests for the upper and lower
6022 bounds of NODE would be redundant. */
6024 static int
6025 node_is_bounded (node, index_type)
6026 case_node_ptr node;
6027 tree index_type;
6029 return (node_has_low_bound (node, index_type)
6030 && node_has_high_bound (node, index_type));
6033 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6035 static void
6036 emit_jump_if_reachable (label)
6037 rtx label;
6039 if (GET_CODE (get_last_insn ()) != BARRIER)
6040 emit_jump (label);
6043 /* Emit step-by-step code to select a case for the value of INDEX.
6044 The thus generated decision tree follows the form of the
6045 case-node binary tree NODE, whose nodes represent test conditions.
6046 INDEX_TYPE is the type of the index of the switch.
6048 Care is taken to prune redundant tests from the decision tree
6049 by detecting any boundary conditions already checked by
6050 emitted rtx. (See node_has_high_bound, node_has_low_bound
6051 and node_is_bounded, above.)
6053 Where the test conditions can be shown to be redundant we emit
6054 an unconditional jump to the target code. As a further
6055 optimization, the subordinates of a tree node are examined to
6056 check for bounded nodes. In this case conditional and/or
6057 unconditional jumps as a result of the boundary check for the
6058 current node are arranged to target the subordinates associated
6059 code for out of bound conditions on the current node.
6061 We can assume that when control reaches the code generated here,
6062 the index value has already been compared with the parents
6063 of this node, and determined to be on the same side of each parent
6064 as this node is. Thus, if this node tests for the value 51,
6065 and a parent tested for 52, we don't need to consider
6066 the possibility of a value greater than 51. If another parent
6067 tests for the value 50, then this node need not test anything. */
6069 static void
6070 emit_case_nodes (index, node, default_label, index_type)
6071 rtx index;
6072 case_node_ptr node;
6073 rtx default_label;
6074 tree index_type;
6076 /* If INDEX has an unsigned type, we must make unsigned branches. */
6077 int unsignedp = TREE_UNSIGNED (index_type);
6078 enum machine_mode mode = GET_MODE (index);
6080 /* See if our parents have already tested everything for us.
6081 If they have, emit an unconditional jump for this node. */
6082 if (node_is_bounded (node, index_type))
6083 emit_jump (label_rtx (node->code_label));
6085 else if (tree_int_cst_equal (node->low, node->high))
6087 /* Node is single valued. First see if the index expression matches
6088 this node and then check our children, if any. */
6090 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6091 label_rtx (node->code_label), unsignedp);
6093 if (node->right != 0 && node->left != 0)
6095 /* This node has children on both sides.
6096 Dispatch to one side or the other
6097 by comparing the index value with this node's value.
6098 If one subtree is bounded, check that one first,
6099 so we can avoid real branches in the tree. */
6101 if (node_is_bounded (node->right, index_type))
6103 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6104 VOIDmode, 0),
6105 GT, NULL_RTX, mode, unsignedp, 0,
6106 label_rtx (node->right->code_label));
6107 emit_case_nodes (index, node->left, default_label, index_type);
6110 else if (node_is_bounded (node->left, index_type))
6112 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6113 VOIDmode, 0),
6114 LT, NULL_RTX, mode, unsignedp, 0,
6115 label_rtx (node->left->code_label));
6116 emit_case_nodes (index, node->right, default_label, index_type);
6119 else
6121 /* Neither node is bounded. First distinguish the two sides;
6122 then emit the code for one side at a time. */
6124 tree test_label
6125 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6127 /* See if the value is on the right. */
6128 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6129 VOIDmode, 0),
6130 GT, NULL_RTX, mode, unsignedp, 0,
6131 label_rtx (test_label));
6133 /* Value must be on the left.
6134 Handle the left-hand subtree. */
6135 emit_case_nodes (index, node->left, default_label, index_type);
6136 /* If left-hand subtree does nothing,
6137 go to default. */
6138 emit_jump_if_reachable (default_label);
6140 /* Code branches here for the right-hand subtree. */
6141 expand_label (test_label);
6142 emit_case_nodes (index, node->right, default_label, index_type);
6146 else if (node->right != 0 && node->left == 0)
6148 /* Here we have a right child but no left so we issue conditional
6149 branch to default and process the right child.
6151 Omit the conditional branch to default if we it avoid only one
6152 right child; it costs too much space to save so little time. */
6154 if (node->right->right || node->right->left
6155 || !tree_int_cst_equal (node->right->low, node->right->high))
6157 if (!node_has_low_bound (node, index_type))
6159 emit_cmp_and_jump_insns (index, expand_expr (node->high,
6160 NULL_RTX,
6161 VOIDmode, 0),
6162 LT, NULL_RTX, mode, unsignedp, 0,
6163 default_label);
6166 emit_case_nodes (index, node->right, default_label, index_type);
6168 else
6169 /* We cannot process node->right normally
6170 since we haven't ruled out the numbers less than
6171 this node's value. So handle node->right explicitly. */
6172 do_jump_if_equal (index,
6173 expand_expr (node->right->low, NULL_RTX,
6174 VOIDmode, 0),
6175 label_rtx (node->right->code_label), unsignedp);
6178 else if (node->right == 0 && node->left != 0)
6180 /* Just one subtree, on the left. */
6182 #if 0 /* The following code and comment were formerly part
6183 of the condition here, but they didn't work
6184 and I don't understand what the idea was. -- rms. */
6185 /* If our "most probable entry" is less probable
6186 than the default label, emit a jump to
6187 the default label using condition codes
6188 already lying around. With no right branch,
6189 a branch-greater-than will get us to the default
6190 label correctly. */
6191 if (use_cost_table
6192 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
6194 #endif /* 0 */
6195 if (node->left->left || node->left->right
6196 || !tree_int_cst_equal (node->left->low, node->left->high))
6198 if (!node_has_high_bound (node, index_type))
6200 emit_cmp_and_jump_insns (index, expand_expr (node->high,
6201 NULL_RTX,
6202 VOIDmode, 0),
6203 GT, NULL_RTX, mode, unsignedp, 0,
6204 default_label);
6207 emit_case_nodes (index, node->left, default_label, index_type);
6209 else
6210 /* We cannot process node->left normally
6211 since we haven't ruled out the numbers less than
6212 this node's value. So handle node->left explicitly. */
6213 do_jump_if_equal (index,
6214 expand_expr (node->left->low, NULL_RTX,
6215 VOIDmode, 0),
6216 label_rtx (node->left->code_label), unsignedp);
6219 else
6221 /* Node is a range. These cases are very similar to those for a single
6222 value, except that we do not start by testing whether this node
6223 is the one to branch to. */
6225 if (node->right != 0 && node->left != 0)
6227 /* Node has subtrees on both sides.
6228 If the right-hand subtree is bounded,
6229 test for it first, since we can go straight there.
6230 Otherwise, we need to make a branch in the control structure,
6231 then handle the two subtrees. */
6232 tree test_label = 0;
6235 if (node_is_bounded (node->right, index_type))
6236 /* Right hand node is fully bounded so we can eliminate any
6237 testing and branch directly to the target code. */
6238 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6239 VOIDmode, 0),
6240 GT, NULL_RTX, mode, unsignedp, 0,
6241 label_rtx (node->right->code_label));
6242 else
6244 /* Right hand node requires testing.
6245 Branch to a label where we will handle it later. */
6247 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6248 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6249 VOIDmode, 0),
6250 GT, NULL_RTX, mode, unsignedp, 0,
6251 label_rtx (test_label));
6254 /* Value belongs to this node or to the left-hand subtree. */
6256 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6257 VOIDmode, 0),
6258 GE, NULL_RTX, mode, unsignedp, 0,
6259 label_rtx (node->code_label));
6261 /* Handle the left-hand subtree. */
6262 emit_case_nodes (index, node->left, default_label, index_type);
6264 /* If right node had to be handled later, do that now. */
6266 if (test_label)
6268 /* If the left-hand subtree fell through,
6269 don't let it fall into the right-hand subtree. */
6270 emit_jump_if_reachable (default_label);
6272 expand_label (test_label);
6273 emit_case_nodes (index, node->right, default_label, index_type);
6277 else if (node->right != 0 && node->left == 0)
6279 /* Deal with values to the left of this node,
6280 if they are possible. */
6281 if (!node_has_low_bound (node, index_type))
6283 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6284 VOIDmode, 0),
6285 LT, NULL_RTX, mode, unsignedp, 0,
6286 default_label);
6289 /* Value belongs to this node or to the right-hand subtree. */
6291 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6292 VOIDmode, 0),
6293 LE, NULL_RTX, mode, unsignedp, 0,
6294 label_rtx (node->code_label));
6296 emit_case_nodes (index, node->right, default_label, index_type);
6299 else if (node->right == 0 && node->left != 0)
6301 /* Deal with values to the right of this node,
6302 if they are possible. */
6303 if (!node_has_high_bound (node, index_type))
6305 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6306 VOIDmode, 0),
6307 GT, NULL_RTX, mode, unsignedp, 0,
6308 default_label);
6311 /* Value belongs to this node or to the left-hand subtree. */
6313 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6314 VOIDmode, 0),
6315 GE, NULL_RTX, mode, unsignedp, 0,
6316 label_rtx (node->code_label));
6318 emit_case_nodes (index, node->left, default_label, index_type);
6321 else
6323 /* Node has no children so we check low and high bounds to remove
6324 redundant tests. Only one of the bounds can exist,
6325 since otherwise this node is bounded--a case tested already. */
6327 if (!node_has_high_bound (node, index_type))
6329 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6330 VOIDmode, 0),
6331 GT, NULL_RTX, mode, unsignedp, 0,
6332 default_label);
6335 if (!node_has_low_bound (node, index_type))
6337 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6338 VOIDmode, 0),
6339 LT, NULL_RTX, mode, unsignedp, 0,
6340 default_label);
6343 emit_jump (label_rtx (node->code_label));
6348 /* These routines are used by the loop unrolling code. They copy BLOCK trees
6349 so that the debugging info will be correct for the unrolled loop. */
6351 void
6352 find_loop_tree_blocks ()
6354 identify_blocks (DECL_INITIAL (current_function_decl), get_insns ());
6357 void
6358 unroll_block_trees ()
6360 tree block = DECL_INITIAL (current_function_decl);
6362 reorder_blocks (block, get_insns ());