Daily bump.
[official-gcc.git] / gcc / stmt.c
blob018e1907b7c9b9cc772dee751914cd9d56704a91
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "flags.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
47 #include "expr.h"
48 #include "hard-reg-set.h"
49 #include "obstack.h"
50 #include "loop.h"
51 #include "recog.h"
52 #include "machmode.h"
53 #include "toplev.h"
54 #include "output.h"
56 #define obstack_chunk_alloc xmalloc
57 #define obstack_chunk_free free
58 struct obstack stmt_obstack;
60 /* Assume that case vectors are not pc-relative. */
61 #ifndef CASE_VECTOR_PC_RELATIVE
62 #define CASE_VECTOR_PC_RELATIVE 0
63 #endif
65 /* Filename and line number of last line-number note,
66 whether we actually emitted it or not. */
67 char *emit_filename;
68 int emit_lineno;
70 /* Nonzero if within a ({...}) grouping, in which case we must
71 always compute a value for each expr-stmt in case it is the last one. */
73 int expr_stmts_for_value;
75 /* Each time we expand an expression-statement,
76 record the expr's type and its RTL value here. */
78 static tree last_expr_type;
79 static rtx last_expr_value;
81 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
82 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
83 This is used by the `remember_end_note' function to record the endpoint
84 of each generated block in its associated BLOCK node. */
86 static rtx last_block_end_note;
88 /* Number of binding contours started so far in this function. */
90 int block_start_count;
92 /* Functions and data structures for expanding case statements. */
94 /* Case label structure, used to hold info on labels within case
95 statements. We handle "range" labels; for a single-value label
96 as in C, the high and low limits are the same.
98 An AVL tree of case nodes is initially created, and later transformed
99 to a list linked via the RIGHT fields in the nodes. Nodes with
100 higher case values are later in the list.
102 Switch statements can be output in one of two forms. A branch table
103 is used if there are more than a few labels and the labels are dense
104 within the range between the smallest and largest case value. If a
105 branch table is used, no further manipulations are done with the case
106 node chain.
108 The alternative to the use of a branch table is to generate a series
109 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
110 and PARENT fields to hold a binary tree. Initially the tree is
111 totally unbalanced, with everything on the right. We balance the tree
112 with nodes on the left having lower case values than the parent
113 and nodes on the right having higher values. We then output the tree
114 in order. */
116 struct case_node
118 struct case_node *left; /* Left son in binary tree */
119 struct case_node *right; /* Right son in binary tree; also node chain */
120 struct case_node *parent; /* Parent of node in binary tree */
121 tree low; /* Lowest index value for this label */
122 tree high; /* Highest index value for this label */
123 tree code_label; /* Label to jump to when node matches */
124 int balance;
127 typedef struct case_node case_node;
128 typedef struct case_node *case_node_ptr;
130 /* These are used by estimate_case_costs and balance_case_nodes. */
132 /* This must be a signed type, and non-ANSI compilers lack signed char. */
133 static short *cost_table;
134 static int use_cost_table;
136 /* Stack of control and binding constructs we are currently inside.
138 These constructs begin when you call `expand_start_WHATEVER'
139 and end when you call `expand_end_WHATEVER'. This stack records
140 info about how the construct began that tells the end-function
141 what to do. It also may provide information about the construct
142 to alter the behavior of other constructs within the body.
143 For example, they may affect the behavior of C `break' and `continue'.
145 Each construct gets one `struct nesting' object.
146 All of these objects are chained through the `all' field.
147 `nesting_stack' points to the first object (innermost construct).
148 The position of an entry on `nesting_stack' is in its `depth' field.
150 Each type of construct has its own individual stack.
151 For example, loops have `loop_stack'. Each object points to the
152 next object of the same type through the `next' field.
154 Some constructs are visible to `break' exit-statements and others
155 are not. Which constructs are visible depends on the language.
156 Therefore, the data structure allows each construct to be visible
157 or not, according to the args given when the construct is started.
158 The construct is visible if the `exit_label' field is non-null.
159 In that case, the value should be a CODE_LABEL rtx. */
161 struct nesting
163 struct nesting *all;
164 struct nesting *next;
165 int depth;
166 rtx exit_label;
167 union
169 /* For conds (if-then and if-then-else statements). */
170 struct
172 /* Label for the end of the if construct.
173 There is none if EXITFLAG was not set
174 and no `else' has been seen yet. */
175 rtx endif_label;
176 /* Label for the end of this alternative.
177 This may be the end of the if or the next else/elseif. */
178 rtx next_label;
179 } cond;
180 /* For loops. */
181 struct
183 /* Label at the top of the loop; place to loop back to. */
184 rtx start_label;
185 /* Label at the end of the whole construct. */
186 rtx end_label;
187 /* Label before a jump that branches to the end of the whole
188 construct. This is where destructors go if any. */
189 rtx alt_end_label;
190 /* Label for `continue' statement to jump to;
191 this is in front of the stepper of the loop. */
192 rtx continue_label;
193 } loop;
194 /* For variable binding contours. */
195 struct
197 /* Sequence number of this binding contour within the function,
198 in order of entry. */
199 int block_start_count;
200 /* Nonzero => value to restore stack to on exit. */
201 rtx stack_level;
202 /* The NOTE that starts this contour.
203 Used by expand_goto to check whether the destination
204 is within each contour or not. */
205 rtx first_insn;
206 /* Innermost containing binding contour that has a stack level. */
207 struct nesting *innermost_stack_block;
208 /* List of cleanups to be run on exit from this contour.
209 This is a list of expressions to be evaluated.
210 The TREE_PURPOSE of each link is the ..._DECL node
211 which the cleanup pertains to. */
212 tree cleanups;
213 /* List of cleanup-lists of blocks containing this block,
214 as they were at the locus where this block appears.
215 There is an element for each containing block,
216 ordered innermost containing block first.
217 The tail of this list can be 0,
218 if all remaining elements would be empty lists.
219 The element's TREE_VALUE is the cleanup-list of that block,
220 which may be null. */
221 tree outer_cleanups;
222 /* Chain of labels defined inside this binding contour.
223 For contours that have stack levels or cleanups. */
224 struct label_chain *label_chain;
225 /* Number of function calls seen, as of start of this block. */
226 int function_call_count;
227 /* Nonzero if this is associated with a EH region. */
228 int exception_region;
229 /* The saved target_temp_slot_level from our outer block.
230 We may reset target_temp_slot_level to be the level of
231 this block, if that is done, target_temp_slot_level
232 reverts to the saved target_temp_slot_level at the very
233 end of the block. */
234 int target_temp_slot_level;
235 /* True if we are currently emitting insns in an area of
236 output code that is controlled by a conditional
237 expression. This is used by the cleanup handling code to
238 generate conditional cleanup actions. */
239 int conditional_code;
240 /* A place to move the start of the exception region for any
241 of the conditional cleanups, must be at the end or after
242 the start of the last unconditional cleanup, and before any
243 conditional branch points. */
244 rtx last_unconditional_cleanup;
245 /* When in a conditional context, this is the specific
246 cleanup list associated with last_unconditional_cleanup,
247 where we place the conditionalized cleanups. */
248 tree *cleanup_ptr;
249 } block;
250 /* For switch (C) or case (Pascal) statements,
251 and also for dummies (see `expand_start_case_dummy'). */
252 struct
254 /* The insn after which the case dispatch should finally
255 be emitted. Zero for a dummy. */
256 rtx start;
257 /* A list of case labels; it is first built as an AVL tree.
258 During expand_end_case, this is converted to a list, and may be
259 rearranged into a nearly balanced binary tree. */
260 struct case_node *case_list;
261 /* Label to jump to if no case matches. */
262 tree default_label;
263 /* The expression to be dispatched on. */
264 tree index_expr;
265 /* Type that INDEX_EXPR should be converted to. */
266 tree nominal_type;
267 /* Number of range exprs in case statement. */
268 int num_ranges;
269 /* Name of this kind of statement, for warnings. */
270 const char *printname;
271 /* Used to save no_line_numbers till we see the first case label.
272 We set this to -1 when we see the first case label in this
273 case statement. */
274 int line_number_status;
275 } case_stmt;
276 } data;
279 /* Chain of all pending binding contours. */
280 struct nesting *block_stack;
282 /* If any new stacks are added here, add them to POPSTACKS too. */
284 /* Chain of all pending binding contours that restore stack levels
285 or have cleanups. */
286 struct nesting *stack_block_stack;
288 /* Chain of all pending conditional statements. */
289 struct nesting *cond_stack;
291 /* Chain of all pending loops. */
292 struct nesting *loop_stack;
294 /* Chain of all pending case or switch statements. */
295 struct nesting *case_stack;
297 /* Separate chain including all of the above,
298 chained through the `all' field. */
299 struct nesting *nesting_stack;
301 /* Number of entries on nesting_stack now. */
302 int nesting_depth;
304 /* Allocate and return a new `struct nesting'. */
306 #define ALLOC_NESTING() \
307 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
309 /* Pop the nesting stack element by element until we pop off
310 the element which is at the top of STACK.
311 Update all the other stacks, popping off elements from them
312 as we pop them from nesting_stack. */
314 #define POPSTACK(STACK) \
315 do { struct nesting *target = STACK; \
316 struct nesting *this; \
317 do { this = nesting_stack; \
318 if (loop_stack == this) \
319 loop_stack = loop_stack->next; \
320 if (cond_stack == this) \
321 cond_stack = cond_stack->next; \
322 if (block_stack == this) \
323 block_stack = block_stack->next; \
324 if (stack_block_stack == this) \
325 stack_block_stack = stack_block_stack->next; \
326 if (case_stack == this) \
327 case_stack = case_stack->next; \
328 nesting_depth = nesting_stack->depth - 1; \
329 nesting_stack = this->all; \
330 obstack_free (&stmt_obstack, this); } \
331 while (this != target); } while (0)
333 /* In some cases it is impossible to generate code for a forward goto
334 until the label definition is seen. This happens when it may be necessary
335 for the goto to reset the stack pointer: we don't yet know how to do that.
336 So expand_goto puts an entry on this fixup list.
337 Each time a binding contour that resets the stack is exited,
338 we check each fixup.
339 If the target label has now been defined, we can insert the proper code. */
341 struct goto_fixup
343 /* Points to following fixup. */
344 struct goto_fixup *next;
345 /* Points to the insn before the jump insn.
346 If more code must be inserted, it goes after this insn. */
347 rtx before_jump;
348 /* The LABEL_DECL that this jump is jumping to, or 0
349 for break, continue or return. */
350 tree target;
351 /* The BLOCK for the place where this goto was found. */
352 tree context;
353 /* The CODE_LABEL rtx that this is jumping to. */
354 rtx target_rtl;
355 /* Number of binding contours started in current function
356 before the label reference. */
357 int block_start_count;
358 /* The outermost stack level that should be restored for this jump.
359 Each time a binding contour that resets the stack is exited,
360 if the target label is *not* yet defined, this slot is updated. */
361 rtx stack_level;
362 /* List of lists of cleanup expressions to be run by this goto.
363 There is one element for each block that this goto is within.
364 The tail of this list can be 0,
365 if all remaining elements would be empty.
366 The TREE_VALUE contains the cleanup list of that block as of the
367 time this goto was seen.
368 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
369 tree cleanup_list_list;
372 static struct goto_fixup *goto_fixup_chain;
374 /* Within any binding contour that must restore a stack level,
375 all labels are recorded with a chain of these structures. */
377 struct label_chain
379 /* Points to following fixup. */
380 struct label_chain *next;
381 tree label;
385 /* Non-zero if we are using EH to handle cleanus. */
386 static int using_eh_for_cleanups_p = 0;
389 static int n_occurrences PROTO((int, const char *));
390 static void expand_goto_internal PROTO((tree, rtx, rtx));
391 static int expand_fixup PROTO((tree, rtx, rtx));
392 static rtx expand_nl_handler_label PROTO((rtx, rtx));
393 static void expand_nl_goto_receiver PROTO((void));
394 static void expand_nl_goto_receivers PROTO((struct nesting *));
395 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
396 rtx, int));
397 static void expand_null_return_1 PROTO((rtx, int));
398 static void expand_value_return PROTO((rtx));
399 static int tail_recursion_args PROTO((tree, tree));
400 static void expand_cleanups PROTO((tree, tree, int, int));
401 static void check_seenlabel PROTO((void));
402 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
403 static int estimate_case_costs PROTO((case_node_ptr));
404 static void group_case_nodes PROTO((case_node_ptr));
405 static void balance_case_nodes PROTO((case_node_ptr *,
406 case_node_ptr));
407 static int node_has_low_bound PROTO((case_node_ptr, tree));
408 static int node_has_high_bound PROTO((case_node_ptr, tree));
409 static int node_is_bounded PROTO((case_node_ptr, tree));
410 static void emit_jump_if_reachable PROTO((rtx));
411 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
412 static int add_case_node PROTO((tree, tree, tree, tree *));
413 static struct case_node *case_tree2list PROTO((case_node *, case_node *));
415 void
416 using_eh_for_cleanups ()
418 using_eh_for_cleanups_p = 1;
421 void
422 init_stmt ()
424 gcc_obstack_init (&stmt_obstack);
425 init_eh ();
428 void
429 init_stmt_for_function ()
431 /* We are not currently within any block, conditional, loop or case. */
432 block_stack = 0;
433 stack_block_stack = 0;
434 loop_stack = 0;
435 case_stack = 0;
436 cond_stack = 0;
437 nesting_stack = 0;
438 nesting_depth = 0;
440 block_start_count = 0;
442 /* No gotos have been expanded yet. */
443 goto_fixup_chain = 0;
445 /* We are not processing a ({...}) grouping. */
446 expr_stmts_for_value = 0;
447 last_expr_type = 0;
449 init_eh_for_function ();
452 void
453 save_stmt_status (p)
454 struct function *p;
456 p->block_stack = block_stack;
457 p->stack_block_stack = stack_block_stack;
458 p->cond_stack = cond_stack;
459 p->loop_stack = loop_stack;
460 p->case_stack = case_stack;
461 p->nesting_stack = nesting_stack;
462 p->nesting_depth = nesting_depth;
463 p->block_start_count = block_start_count;
464 p->last_expr_type = last_expr_type;
465 p->last_expr_value = last_expr_value;
466 p->expr_stmts_for_value = expr_stmts_for_value;
467 p->emit_filename = emit_filename;
468 p->emit_lineno = emit_lineno;
469 p->goto_fixup_chain = goto_fixup_chain;
470 save_eh_status (p);
473 void
474 restore_stmt_status (p)
475 struct function *p;
477 block_stack = p->block_stack;
478 stack_block_stack = p->stack_block_stack;
479 cond_stack = p->cond_stack;
480 loop_stack = p->loop_stack;
481 case_stack = p->case_stack;
482 nesting_stack = p->nesting_stack;
483 nesting_depth = p->nesting_depth;
484 block_start_count = p->block_start_count;
485 last_expr_type = p->last_expr_type;
486 last_expr_value = p->last_expr_value;
487 expr_stmts_for_value = p->expr_stmts_for_value;
488 emit_filename = p->emit_filename;
489 emit_lineno = p->emit_lineno;
490 goto_fixup_chain = p->goto_fixup_chain;
491 restore_eh_status (p);
494 /* Emit a no-op instruction. */
496 void
497 emit_nop ()
499 rtx last_insn;
501 last_insn = get_last_insn ();
502 if (!optimize
503 && (GET_CODE (last_insn) == CODE_LABEL
504 || (GET_CODE (last_insn) == NOTE
505 && prev_real_insn (last_insn) == 0)))
506 emit_insn (gen_nop ());
509 /* Return the rtx-label that corresponds to a LABEL_DECL,
510 creating it if necessary. */
513 label_rtx (label)
514 tree label;
516 if (TREE_CODE (label) != LABEL_DECL)
517 abort ();
519 if (DECL_RTL (label))
520 return DECL_RTL (label);
522 return DECL_RTL (label) = gen_label_rtx ();
525 /* Add an unconditional jump to LABEL as the next sequential instruction. */
527 void
528 emit_jump (label)
529 rtx label;
531 do_pending_stack_adjust ();
532 emit_jump_insn (gen_jump (label));
533 emit_barrier ();
536 /* Emit code to jump to the address
537 specified by the pointer expression EXP. */
539 void
540 expand_computed_goto (exp)
541 tree exp;
543 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
545 #ifdef POINTERS_EXTEND_UNSIGNED
546 x = convert_memory_address (Pmode, x);
547 #endif
549 emit_queue ();
550 /* Be sure the function is executable. */
551 if (current_function_check_memory_usage)
552 emit_library_call (chkr_check_exec_libfunc, 1,
553 VOIDmode, 1, x, ptr_mode);
555 do_pending_stack_adjust ();
556 emit_indirect_jump (x);
558 current_function_has_computed_jump = 1;
561 /* Handle goto statements and the labels that they can go to. */
563 /* Specify the location in the RTL code of a label LABEL,
564 which is a LABEL_DECL tree node.
566 This is used for the kind of label that the user can jump to with a
567 goto statement, and for alternatives of a switch or case statement.
568 RTL labels generated for loops and conditionals don't go through here;
569 they are generated directly at the RTL level, by other functions below.
571 Note that this has nothing to do with defining label *names*.
572 Languages vary in how they do that and what that even means. */
574 void
575 expand_label (label)
576 tree label;
578 struct label_chain *p;
580 do_pending_stack_adjust ();
581 emit_label (label_rtx (label));
582 if (DECL_NAME (label))
583 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
585 if (stack_block_stack != 0)
587 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
588 p->next = stack_block_stack->data.block.label_chain;
589 stack_block_stack->data.block.label_chain = p;
590 p->label = label;
594 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
595 from nested functions. */
597 void
598 declare_nonlocal_label (label)
599 tree label;
601 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
603 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
604 LABEL_PRESERVE_P (label_rtx (label)) = 1;
605 if (nonlocal_goto_handler_slots == 0)
607 emit_stack_save (SAVE_NONLOCAL,
608 &nonlocal_goto_stack_level,
609 PREV_INSN (tail_recursion_reentry));
611 nonlocal_goto_handler_slots
612 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
615 /* Generate RTL code for a `goto' statement with target label LABEL.
616 LABEL should be a LABEL_DECL tree node that was or will later be
617 defined with `expand_label'. */
619 void
620 expand_goto (label)
621 tree label;
623 tree context;
625 /* Check for a nonlocal goto to a containing function. */
626 context = decl_function_context (label);
627 if (context != 0 && context != current_function_decl)
629 struct function *p = find_function_data (context);
630 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
631 rtx temp, handler_slot;
632 tree link;
634 /* Find the corresponding handler slot for this label. */
635 handler_slot = p->nonlocal_goto_handler_slots;
636 for (link = p->nonlocal_labels; TREE_VALUE (link) != label;
637 link = TREE_CHAIN (link))
638 handler_slot = XEXP (handler_slot, 1);
639 handler_slot = XEXP (handler_slot, 0);
641 p->has_nonlocal_label = 1;
642 current_function_has_nonlocal_goto = 1;
643 LABEL_REF_NONLOCAL_P (label_ref) = 1;
645 /* Copy the rtl for the slots so that they won't be shared in
646 case the virtual stack vars register gets instantiated differently
647 in the parent than in the child. */
649 #if HAVE_nonlocal_goto
650 if (HAVE_nonlocal_goto)
651 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
652 copy_rtx (handler_slot),
653 copy_rtx (p->nonlocal_goto_stack_level),
654 label_ref));
655 else
656 #endif
658 rtx addr;
660 /* Restore frame pointer for containing function.
661 This sets the actual hard register used for the frame pointer
662 to the location of the function's incoming static chain info.
663 The non-local goto handler will then adjust it to contain the
664 proper value and reload the argument pointer, if needed. */
665 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
667 /* We have now loaded the frame pointer hardware register with
668 the address of that corresponds to the start of the virtual
669 stack vars. So replace virtual_stack_vars_rtx in all
670 addresses we use with stack_pointer_rtx. */
672 /* Get addr of containing function's current nonlocal goto handler,
673 which will do any cleanups and then jump to the label. */
674 addr = copy_rtx (handler_slot);
675 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
676 hard_frame_pointer_rtx));
678 /* Restore the stack pointer. Note this uses fp just restored. */
679 addr = p->nonlocal_goto_stack_level;
680 if (addr)
681 addr = replace_rtx (copy_rtx (addr),
682 virtual_stack_vars_rtx,
683 hard_frame_pointer_rtx);
685 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
687 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
688 really needed. */
689 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
690 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
691 emit_indirect_jump (temp);
694 else
695 expand_goto_internal (label, label_rtx (label), NULL_RTX);
698 /* Generate RTL code for a `goto' statement with target label BODY.
699 LABEL should be a LABEL_REF.
700 LAST_INSN, if non-0, is the rtx we should consider as the last
701 insn emitted (for the purposes of cleaning up a return). */
703 static void
704 expand_goto_internal (body, label, last_insn)
705 tree body;
706 rtx label;
707 rtx last_insn;
709 struct nesting *block;
710 rtx stack_level = 0;
712 if (GET_CODE (label) != CODE_LABEL)
713 abort ();
715 /* If label has already been defined, we can tell now
716 whether and how we must alter the stack level. */
718 if (PREV_INSN (label) != 0)
720 /* Find the innermost pending block that contains the label.
721 (Check containment by comparing insn-uids.)
722 Then restore the outermost stack level within that block,
723 and do cleanups of all blocks contained in it. */
724 for (block = block_stack; block; block = block->next)
726 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
727 break;
728 if (block->data.block.stack_level != 0)
729 stack_level = block->data.block.stack_level;
730 /* Execute the cleanups for blocks we are exiting. */
731 if (block->data.block.cleanups != 0)
733 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
734 do_pending_stack_adjust ();
738 if (stack_level)
740 /* Ensure stack adjust isn't done by emit_jump, as this
741 would clobber the stack pointer. This one should be
742 deleted as dead by flow. */
743 clear_pending_stack_adjust ();
744 do_pending_stack_adjust ();
745 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
748 if (body != 0 && DECL_TOO_LATE (body))
749 error ("jump to `%s' invalidly jumps into binding contour",
750 IDENTIFIER_POINTER (DECL_NAME (body)));
752 /* Label not yet defined: may need to put this goto
753 on the fixup list. */
754 else if (! expand_fixup (body, label, last_insn))
756 /* No fixup needed. Record that the label is the target
757 of at least one goto that has no fixup. */
758 if (body != 0)
759 TREE_ADDRESSABLE (body) = 1;
762 emit_jump (label);
765 /* Generate if necessary a fixup for a goto
766 whose target label in tree structure (if any) is TREE_LABEL
767 and whose target in rtl is RTL_LABEL.
769 If LAST_INSN is nonzero, we pretend that the jump appears
770 after insn LAST_INSN instead of at the current point in the insn stream.
772 The fixup will be used later to insert insns just before the goto.
773 Those insns will restore the stack level as appropriate for the
774 target label, and will (in the case of C++) also invoke any object
775 destructors which have to be invoked when we exit the scopes which
776 are exited by the goto.
778 Value is nonzero if a fixup is made. */
780 static int
781 expand_fixup (tree_label, rtl_label, last_insn)
782 tree tree_label;
783 rtx rtl_label;
784 rtx last_insn;
786 struct nesting *block, *end_block;
788 /* See if we can recognize which block the label will be output in.
789 This is possible in some very common cases.
790 If we succeed, set END_BLOCK to that block.
791 Otherwise, set it to 0. */
793 if (cond_stack
794 && (rtl_label == cond_stack->data.cond.endif_label
795 || rtl_label == cond_stack->data.cond.next_label))
796 end_block = cond_stack;
797 /* If we are in a loop, recognize certain labels which
798 are likely targets. This reduces the number of fixups
799 we need to create. */
800 else if (loop_stack
801 && (rtl_label == loop_stack->data.loop.start_label
802 || rtl_label == loop_stack->data.loop.end_label
803 || rtl_label == loop_stack->data.loop.continue_label))
804 end_block = loop_stack;
805 else
806 end_block = 0;
808 /* Now set END_BLOCK to the binding level to which we will return. */
810 if (end_block)
812 struct nesting *next_block = end_block->all;
813 block = block_stack;
815 /* First see if the END_BLOCK is inside the innermost binding level.
816 If so, then no cleanups or stack levels are relevant. */
817 while (next_block && next_block != block)
818 next_block = next_block->all;
820 if (next_block)
821 return 0;
823 /* Otherwise, set END_BLOCK to the innermost binding level
824 which is outside the relevant control-structure nesting. */
825 next_block = block_stack->next;
826 for (block = block_stack; block != end_block; block = block->all)
827 if (block == next_block)
828 next_block = next_block->next;
829 end_block = next_block;
832 /* Does any containing block have a stack level or cleanups?
833 If not, no fixup is needed, and that is the normal case
834 (the only case, for standard C). */
835 for (block = block_stack; block != end_block; block = block->next)
836 if (block->data.block.stack_level != 0
837 || block->data.block.cleanups != 0)
838 break;
840 if (block != end_block)
842 /* Ok, a fixup is needed. Add a fixup to the list of such. */
843 struct goto_fixup *fixup
844 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
845 /* In case an old stack level is restored, make sure that comes
846 after any pending stack adjust. */
847 /* ?? If the fixup isn't to come at the present position,
848 doing the stack adjust here isn't useful. Doing it with our
849 settings at that location isn't useful either. Let's hope
850 someone does it! */
851 if (last_insn == 0)
852 do_pending_stack_adjust ();
853 fixup->target = tree_label;
854 fixup->target_rtl = rtl_label;
856 /* Create a BLOCK node and a corresponding matched set of
857 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
858 this point. The notes will encapsulate any and all fixup
859 code which we might later insert at this point in the insn
860 stream. Also, the BLOCK node will be the parent (i.e. the
861 `SUPERBLOCK') of any other BLOCK nodes which we might create
862 later on when we are expanding the fixup code.
864 Note that optimization passes (including expand_end_loop)
865 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
866 as a placeholder. */
869 register rtx original_before_jump
870 = last_insn ? last_insn : get_last_insn ();
871 rtx start;
873 start_sequence ();
874 pushlevel (0);
875 start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
876 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
877 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
878 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
879 end_sequence ();
880 emit_insns_after (start, original_before_jump);
883 fixup->block_start_count = block_start_count;
884 fixup->stack_level = 0;
885 fixup->cleanup_list_list
886 = ((block->data.block.outer_cleanups
887 || block->data.block.cleanups)
888 ? tree_cons (NULL_TREE, block->data.block.cleanups,
889 block->data.block.outer_cleanups)
890 : 0);
891 fixup->next = goto_fixup_chain;
892 goto_fixup_chain = fixup;
895 return block != 0;
900 /* Expand any needed fixups in the outputmost binding level of the
901 function. FIRST_INSN is the first insn in the function. */
903 void
904 expand_fixups (first_insn)
905 rtx first_insn;
907 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
910 /* When exiting a binding contour, process all pending gotos requiring fixups.
911 THISBLOCK is the structure that describes the block being exited.
912 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
913 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
914 FIRST_INSN is the insn that began this contour.
916 Gotos that jump out of this contour must restore the
917 stack level and do the cleanups before actually jumping.
919 DONT_JUMP_IN nonzero means report error there is a jump into this
920 contour from before the beginning of the contour.
921 This is also done if STACK_LEVEL is nonzero. */
923 static void
924 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
925 struct nesting *thisblock;
926 rtx stack_level;
927 tree cleanup_list;
928 rtx first_insn;
929 int dont_jump_in;
931 register struct goto_fixup *f, *prev;
933 /* F is the fixup we are considering; PREV is the previous one. */
934 /* We run this loop in two passes so that cleanups of exited blocks
935 are run first, and blocks that are exited are marked so
936 afterwards. */
938 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
940 /* Test for a fixup that is inactive because it is already handled. */
941 if (f->before_jump == 0)
943 /* Delete inactive fixup from the chain, if that is easy to do. */
944 if (prev != 0)
945 prev->next = f->next;
947 /* Has this fixup's target label been defined?
948 If so, we can finalize it. */
949 else if (PREV_INSN (f->target_rtl) != 0)
951 register rtx cleanup_insns;
953 /* Get the first non-label after the label
954 this goto jumps to. If that's before this scope begins,
955 we don't have a jump into the scope. */
956 rtx after_label = f->target_rtl;
957 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
958 after_label = NEXT_INSN (after_label);
960 /* If this fixup jumped into this contour from before the beginning
961 of this contour, report an error. */
962 /* ??? Bug: this does not detect jumping in through intermediate
963 blocks that have stack levels or cleanups.
964 It detects only a problem with the innermost block
965 around the label. */
966 if (f->target != 0
967 && (dont_jump_in || stack_level || cleanup_list)
968 /* If AFTER_LABEL is 0, it means the jump goes to the end
969 of the rtl, which means it jumps into this scope. */
970 && (after_label == 0
971 || INSN_UID (first_insn) < INSN_UID (after_label))
972 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
973 && ! DECL_ERROR_ISSUED (f->target))
975 error_with_decl (f->target,
976 "label `%s' used before containing binding contour");
977 /* Prevent multiple errors for one label. */
978 DECL_ERROR_ISSUED (f->target) = 1;
981 /* We will expand the cleanups into a sequence of their own and
982 then later on we will attach this new sequence to the insn
983 stream just ahead of the actual jump insn. */
985 start_sequence ();
987 /* Temporarily restore the lexical context where we will
988 logically be inserting the fixup code. We do this for the
989 sake of getting the debugging information right. */
991 pushlevel (0);
992 set_block (f->context);
994 /* Expand the cleanups for blocks this jump exits. */
995 if (f->cleanup_list_list)
997 tree lists;
998 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
999 /* Marked elements correspond to blocks that have been closed.
1000 Do their cleanups. */
1001 if (TREE_ADDRESSABLE (lists)
1002 && TREE_VALUE (lists) != 0)
1004 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1005 /* Pop any pushes done in the cleanups,
1006 in case function is about to return. */
1007 do_pending_stack_adjust ();
1011 /* Restore stack level for the biggest contour that this
1012 jump jumps out of. */
1013 if (f->stack_level)
1014 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1016 /* Finish up the sequence containing the insns which implement the
1017 necessary cleanups, and then attach that whole sequence to the
1018 insn stream just ahead of the actual jump insn. Attaching it
1019 at that point insures that any cleanups which are in fact
1020 implicit C++ object destructions (which must be executed upon
1021 leaving the block) appear (to the debugger) to be taking place
1022 in an area of the generated code where the object(s) being
1023 destructed are still "in scope". */
1025 cleanup_insns = get_insns ();
1026 poplevel (1, 0, 0);
1028 end_sequence ();
1029 emit_insns_after (cleanup_insns, f->before_jump);
1032 f->before_jump = 0;
1036 /* For any still-undefined labels, do the cleanups for this block now.
1037 We must do this now since items in the cleanup list may go out
1038 of scope when the block ends. */
1039 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1040 if (f->before_jump != 0
1041 && PREV_INSN (f->target_rtl) == 0
1042 /* Label has still not appeared. If we are exiting a block with
1043 a stack level to restore, that started before the fixup,
1044 mark this stack level as needing restoration
1045 when the fixup is later finalized. */
1046 && thisblock != 0
1047 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1048 means the label is undefined. That's erroneous, but possible. */
1049 && (thisblock->data.block.block_start_count
1050 <= f->block_start_count))
1052 tree lists = f->cleanup_list_list;
1053 rtx cleanup_insns;
1055 for (; lists; lists = TREE_CHAIN (lists))
1056 /* If the following elt. corresponds to our containing block
1057 then the elt. must be for this block. */
1058 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1060 start_sequence ();
1061 pushlevel (0);
1062 set_block (f->context);
1063 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1064 do_pending_stack_adjust ();
1065 cleanup_insns = get_insns ();
1066 poplevel (1, 0, 0);
1067 end_sequence ();
1068 if (cleanup_insns != 0)
1069 f->before_jump
1070 = emit_insns_after (cleanup_insns, f->before_jump);
1072 f->cleanup_list_list = TREE_CHAIN (lists);
1075 if (stack_level)
1076 f->stack_level = stack_level;
1080 /* Return the number of times character C occurs in string S. */
1081 static int
1082 n_occurrences (c, s)
1083 int c;
1084 const char *s;
1086 int n = 0;
1087 while (*s)
1088 n += (*s++ == c);
1089 return n;
1092 /* Generate RTL for an asm statement (explicit assembler code).
1093 BODY is a STRING_CST node containing the assembler code text,
1094 or an ADDR_EXPR containing a STRING_CST. */
1096 void
1097 expand_asm (body)
1098 tree body;
1100 if (current_function_check_memory_usage)
1102 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1103 return;
1106 if (TREE_CODE (body) == ADDR_EXPR)
1107 body = TREE_OPERAND (body, 0);
1109 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1110 TREE_STRING_POINTER (body)));
1111 last_expr_type = 0;
1114 /* Generate RTL for an asm statement with arguments.
1115 STRING is the instruction template.
1116 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1117 Each output or input has an expression in the TREE_VALUE and
1118 a constraint-string in the TREE_PURPOSE.
1119 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1120 that is clobbered by this insn.
1122 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1123 Some elements of OUTPUTS may be replaced with trees representing temporary
1124 values. The caller should copy those temporary values to the originally
1125 specified lvalues.
1127 VOL nonzero means the insn is volatile; don't optimize it. */
1129 void
1130 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1131 tree string, outputs, inputs, clobbers;
1132 int vol;
1133 char *filename;
1134 int line;
1136 rtvec argvec, constraints;
1137 rtx body;
1138 int ninputs = list_length (inputs);
1139 int noutputs = list_length (outputs);
1140 int ninout = 0;
1141 int nclobbers;
1142 tree tail;
1143 register int i;
1144 /* Vector of RTX's of evaluated output operands. */
1145 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1146 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1147 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1148 enum machine_mode *inout_mode
1149 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1150 /* The insn we have emitted. */
1151 rtx insn;
1153 /* An ASM with no outputs needs to be treated as volatile, for now. */
1154 if (noutputs == 0)
1155 vol = 1;
1157 if (current_function_check_memory_usage)
1159 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1160 return;
1163 /* Count the number of meaningful clobbered registers, ignoring what
1164 we would ignore later. */
1165 nclobbers = 0;
1166 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1168 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1169 i = decode_reg_name (regname);
1170 if (i >= 0 || i == -4)
1171 ++nclobbers;
1172 else if (i == -2)
1173 error ("unknown register name `%s' in `asm'", regname);
1176 last_expr_type = 0;
1178 /* Check that the number of alternatives is constant across all
1179 operands. */
1180 if (outputs || inputs)
1182 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1183 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1184 tree next = inputs;
1186 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1188 error ("too many alternatives in `asm'");
1189 return;
1192 tmp = outputs;
1193 while (tmp)
1195 char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1196 if (n_occurrences (',', constraint) != nalternatives)
1198 error ("operand constraints for `asm' differ in number of alternatives");
1199 return;
1201 if (TREE_CHAIN (tmp))
1202 tmp = TREE_CHAIN (tmp);
1203 else
1204 tmp = next, next = 0;
1208 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1210 tree val = TREE_VALUE (tail);
1211 tree type = TREE_TYPE (val);
1212 char *constraint;
1213 char *p;
1214 int c_len;
1215 int j;
1216 int is_inout = 0;
1217 int allows_reg = 0;
1218 int allows_mem = 0;
1220 /* If there's an erroneous arg, emit no insn. */
1221 if (TREE_TYPE (val) == error_mark_node)
1222 return;
1224 /* Make sure constraint has `=' and does not have `+'. Also, see
1225 if it allows any register. Be liberal on the latter test, since
1226 the worst that happens if we get it wrong is we issue an error
1227 message. */
1229 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1230 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1232 /* Allow the `=' or `+' to not be at the beginning of the string,
1233 since it wasn't explicitly documented that way, and there is a
1234 large body of code that puts it last. Swap the character to
1235 the front, so as not to uglify any place else. */
1236 switch (c_len)
1238 default:
1239 if ((p = strchr (constraint, '=')) != NULL)
1240 break;
1241 if ((p = strchr (constraint, '+')) != NULL)
1242 break;
1243 case 0:
1244 error ("output operand constraint lacks `='");
1245 return;
1248 if (p != constraint)
1250 j = *p;
1251 bcopy (constraint, constraint+1, p-constraint);
1252 *constraint = j;
1254 warning ("output constraint `%c' for operand %d is not at the beginning", j, i);
1257 is_inout = constraint[0] == '+';
1258 /* Replace '+' with '='. */
1259 constraint[0] = '=';
1260 /* Make sure we can specify the matching operand. */
1261 if (is_inout && i > 9)
1263 error ("output operand constraint %d contains `+'", i);
1264 return;
1267 for (j = 1; j < c_len; j++)
1268 switch (constraint[j])
1270 case '+':
1271 case '=':
1272 error ("operand constraint contains '+' or '=' at illegal position.");
1273 return;
1275 case '%':
1276 if (i + 1 == ninputs + noutputs)
1278 error ("`%%' constraint used with last operand");
1279 return;
1281 break;
1283 case '?': case '!': case '*': case '&':
1284 case 'E': case 'F': case 'G': case 'H':
1285 case 's': case 'i': case 'n':
1286 case 'I': case 'J': case 'K': case 'L': case 'M':
1287 case 'N': case 'O': case 'P': case ',':
1288 #ifdef EXTRA_CONSTRAINT
1289 case 'Q': case 'R': case 'S': case 'T': case 'U':
1290 #endif
1291 break;
1293 case '0': case '1': case '2': case '3': case '4':
1294 case '5': case '6': case '7': case '8': case '9':
1295 error ("matching constraint not valid in output operand");
1296 break;
1298 case 'V': case 'm': case 'o':
1299 allows_mem = 1;
1300 break;
1302 case '<': case '>':
1303 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1304 excepting those that expand_call created. So match memory
1305 and hope. */
1306 allows_mem = 1;
1307 break;
1309 case 'g': case 'X':
1310 allows_reg = 1;
1311 allows_mem = 1;
1312 break;
1314 case 'p': case 'r':
1315 default:
1316 allows_reg = 1;
1317 break;
1320 /* If an output operand is not a decl or indirect ref and our constraint
1321 allows a register, make a temporary to act as an intermediate.
1322 Make the asm insn write into that, then our caller will copy it to
1323 the real output operand. Likewise for promoted variables. */
1325 real_output_rtx[i] = NULL_RTX;
1326 if ((TREE_CODE (val) == INDIRECT_REF
1327 && allows_mem)
1328 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1329 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1330 && ! (GET_CODE (DECL_RTL (val)) == REG
1331 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1332 || ! allows_reg
1333 || is_inout)
1335 if (! allows_reg)
1336 mark_addressable (TREE_VALUE (tail));
1338 output_rtx[i]
1339 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1340 EXPAND_MEMORY_USE_WO);
1342 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1343 error ("output number %d not directly addressable", i);
1344 if (! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1346 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1347 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1348 if (is_inout)
1349 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1352 else
1354 output_rtx[i] = assign_temp (type, 0, 0, 0);
1355 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1358 if (is_inout)
1360 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1361 inout_opnum[ninout++] = i;
1365 ninputs += ninout;
1366 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1368 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1369 return;
1372 /* Make vectors for the expression-rtx and constraint strings. */
1374 argvec = rtvec_alloc (ninputs);
1375 constraints = rtvec_alloc (ninputs);
1377 body = gen_rtx_ASM_OPERANDS (VOIDmode,
1378 TREE_STRING_POINTER (string), "", 0, argvec,
1379 constraints, filename, line);
1381 MEM_VOLATILE_P (body) = vol;
1383 /* Eval the inputs and put them into ARGVEC.
1384 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1386 i = 0;
1387 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1389 int j;
1390 int allows_reg = 0, allows_mem = 0;
1391 char *constraint, *orig_constraint;
1392 int c_len;
1393 rtx op;
1395 /* If there's an erroneous arg, emit no insn,
1396 because the ASM_INPUT would get VOIDmode
1397 and that could cause a crash in reload. */
1398 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1399 return;
1401 /* ??? Can this happen, and does the error message make any sense? */
1402 if (TREE_PURPOSE (tail) == NULL_TREE)
1404 error ("hard register `%s' listed as input operand to `asm'",
1405 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1406 return;
1409 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1410 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1411 orig_constraint = constraint;
1413 /* Make sure constraint has neither `=', `+', nor '&'. */
1415 for (j = 0; j < c_len; j++)
1416 switch (constraint[j])
1418 case '+': case '=': case '&':
1419 if (constraint == orig_constraint)
1421 error ("input operand constraint contains `%c'", constraint[j]);
1422 return;
1424 break;
1426 case '%':
1427 if (constraint == orig_constraint
1428 && i + 1 == ninputs - ninout)
1430 error ("`%%' constraint used with last operand");
1431 return;
1433 break;
1435 case 'V': case 'm': case 'o':
1436 allows_mem = 1;
1437 break;
1439 case '<': case '>':
1440 case '?': case '!': case '*':
1441 case 'E': case 'F': case 'G': case 'H': case 'X':
1442 case 's': case 'i': case 'n':
1443 case 'I': case 'J': case 'K': case 'L': case 'M':
1444 case 'N': case 'O': case 'P': case ',':
1445 #ifdef EXTRA_CONSTRAINT
1446 case 'Q': case 'R': case 'S': case 'T': case 'U':
1447 #endif
1448 break;
1450 /* Whether or not a numeric constraint allows a register is
1451 decided by the matching constraint, and so there is no need
1452 to do anything special with them. We must handle them in
1453 the default case, so that we don't unnecessarily force
1454 operands to memory. */
1455 case '0': case '1': case '2': case '3': case '4':
1456 case '5': case '6': case '7': case '8': case '9':
1457 if (constraint[j] >= '0' + noutputs)
1459 error
1460 ("matching constraint references invalid operand number");
1461 return;
1464 /* Try and find the real constraint for this dup. */
1465 if ((j == 0 && c_len == 1)
1466 || (j == 1 && c_len == 2 && constraint[0] == '%'))
1468 tree o = outputs;
1469 for (j = constraint[j] - '0'; j > 0; --j)
1470 o = TREE_CHAIN (o);
1472 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (o)) - 1;
1473 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1474 j = 0;
1475 break;
1478 /* ... fall through ... */
1480 case 'p': case 'r':
1481 default:
1482 allows_reg = 1;
1483 break;
1485 case 'g':
1486 allows_reg = 1;
1487 allows_mem = 1;
1488 break;
1491 if (! allows_reg && allows_mem)
1492 mark_addressable (TREE_VALUE (tail));
1494 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1496 if (asm_operand_ok (op, constraint) <= 0)
1498 if (allows_reg)
1499 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1500 else if (!allows_mem)
1501 warning ("asm operand %d probably doesn't match constraints", i);
1502 else if (CONSTANT_P (op))
1503 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1504 op);
1505 else if (GET_CODE (op) == REG
1506 || GET_CODE (op) == SUBREG
1507 || GET_CODE (op) == CONCAT)
1509 tree type = TREE_TYPE (TREE_VALUE (tail));
1510 rtx memloc = assign_temp (type, 1, 1, 1);
1512 emit_move_insn (memloc, op);
1513 op = memloc;
1515 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1516 /* We won't recognize volatile memory as available a
1517 memory_operand at this point. Ignore it. */
1519 else if (queued_subexp_p (op))
1521 else
1522 /* ??? Leave this only until we have experience with what
1523 happens in combine and elsewhere when constraints are
1524 not satisfied. */
1525 warning ("asm operand %d probably doesn't match constraints", i);
1527 XVECEXP (body, 3, i) = op;
1529 XVECEXP (body, 4, i) /* constraints */
1530 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1531 orig_constraint);
1532 i++;
1535 /* Protect all the operands from the queue,
1536 now that they have all been evaluated. */
1538 for (i = 0; i < ninputs - ninout; i++)
1539 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1541 for (i = 0; i < noutputs; i++)
1542 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1544 /* For in-out operands, copy output rtx to input rtx. */
1545 for (i = 0; i < ninout; i++)
1547 static char match[9+1][2]
1548 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1549 int j = inout_opnum[i];
1551 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1552 = output_rtx[j];
1553 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
1554 = gen_rtx_ASM_INPUT (inout_mode[j], match[j]);
1557 /* Now, for each output, construct an rtx
1558 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1559 ARGVEC CONSTRAINTS))
1560 If there is more than one, put them inside a PARALLEL. */
1562 if (noutputs == 1 && nclobbers == 0)
1564 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1565 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1567 else if (noutputs == 0 && nclobbers == 0)
1569 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1570 insn = emit_insn (body);
1572 else
1574 rtx obody = body;
1575 int num = noutputs;
1576 if (num == 0) num = 1;
1577 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1579 /* For each output operand, store a SET. */
1581 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1583 XVECEXP (body, 0, i)
1584 = gen_rtx_SET (VOIDmode,
1585 output_rtx[i],
1586 gen_rtx_ASM_OPERANDS (VOIDmode,
1587 TREE_STRING_POINTER (string),
1588 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1589 i, argvec, constraints,
1590 filename, line));
1591 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1594 /* If there are no outputs (but there are some clobbers)
1595 store the bare ASM_OPERANDS into the PARALLEL. */
1597 if (i == 0)
1598 XVECEXP (body, 0, i++) = obody;
1600 /* Store (clobber REG) for each clobbered register specified. */
1602 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1604 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1605 int j = decode_reg_name (regname);
1607 if (j < 0)
1609 if (j == -3) /* `cc', which is not a register */
1610 continue;
1612 if (j == -4) /* `memory', don't cache memory across asm */
1614 XVECEXP (body, 0, i++)
1615 = gen_rtx_CLOBBER (VOIDmode,
1616 gen_rtx_MEM (BLKmode,
1617 gen_rtx_SCRATCH (VOIDmode)));
1618 continue;
1621 /* Ignore unknown register, error already signaled. */
1622 continue;
1625 /* Use QImode since that's guaranteed to clobber just one reg. */
1626 XVECEXP (body, 0, i++)
1627 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1630 insn = emit_insn (body);
1633 /* For any outputs that needed reloading into registers, spill them
1634 back to where they belong. */
1635 for (i = 0; i < noutputs; ++i)
1636 if (real_output_rtx[i])
1637 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1639 free_temp_slots ();
1642 /* Generate RTL to evaluate the expression EXP
1643 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1645 void
1646 expand_expr_stmt (exp)
1647 tree exp;
1649 /* If -W, warn about statements with no side effects,
1650 except for an explicit cast to void (e.g. for assert()), and
1651 except inside a ({...}) where they may be useful. */
1652 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1654 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1655 && !(TREE_CODE (exp) == CONVERT_EXPR
1656 && TREE_TYPE (exp) == void_type_node))
1657 warning_with_file_and_line (emit_filename, emit_lineno,
1658 "statement with no effect");
1659 else if (warn_unused)
1660 warn_if_unused_value (exp);
1663 /* If EXP is of function type and we are expanding statements for
1664 value, convert it to pointer-to-function. */
1665 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1666 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1668 last_expr_type = TREE_TYPE (exp);
1669 last_expr_value = expand_expr (exp,
1670 (expr_stmts_for_value
1671 ? NULL_RTX : const0_rtx),
1672 VOIDmode, 0);
1674 /* If all we do is reference a volatile value in memory,
1675 copy it to a register to be sure it is actually touched. */
1676 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1677 && TREE_THIS_VOLATILE (exp))
1679 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1681 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1682 copy_to_reg (last_expr_value);
1683 else
1685 rtx lab = gen_label_rtx ();
1687 /* Compare the value with itself to reference it. */
1688 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
1689 expand_expr (TYPE_SIZE (last_expr_type),
1690 NULL_RTX, VOIDmode, 0),
1691 BLKmode, 0,
1692 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
1693 lab);
1694 emit_label (lab);
1698 /* If this expression is part of a ({...}) and is in memory, we may have
1699 to preserve temporaries. */
1700 preserve_temp_slots (last_expr_value);
1702 /* Free any temporaries used to evaluate this expression. Any temporary
1703 used as a result of this expression will already have been preserved
1704 above. */
1705 free_temp_slots ();
1707 emit_queue ();
1710 /* Warn if EXP contains any computations whose results are not used.
1711 Return 1 if a warning is printed; 0 otherwise. */
1714 warn_if_unused_value (exp)
1715 tree exp;
1717 if (TREE_USED (exp))
1718 return 0;
1720 switch (TREE_CODE (exp))
1722 case PREINCREMENT_EXPR:
1723 case POSTINCREMENT_EXPR:
1724 case PREDECREMENT_EXPR:
1725 case POSTDECREMENT_EXPR:
1726 case MODIFY_EXPR:
1727 case INIT_EXPR:
1728 case TARGET_EXPR:
1729 case CALL_EXPR:
1730 case METHOD_CALL_EXPR:
1731 case RTL_EXPR:
1732 case TRY_CATCH_EXPR:
1733 case WITH_CLEANUP_EXPR:
1734 case EXIT_EXPR:
1735 /* We don't warn about COND_EXPR because it may be a useful
1736 construct if either arm contains a side effect. */
1737 case COND_EXPR:
1738 return 0;
1740 case BIND_EXPR:
1741 /* For a binding, warn if no side effect within it. */
1742 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1744 case SAVE_EXPR:
1745 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1747 case TRUTH_ORIF_EXPR:
1748 case TRUTH_ANDIF_EXPR:
1749 /* In && or ||, warn if 2nd operand has no side effect. */
1750 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1752 case COMPOUND_EXPR:
1753 if (TREE_NO_UNUSED_WARNING (exp))
1754 return 0;
1755 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1756 return 1;
1757 /* Let people do `(foo (), 0)' without a warning. */
1758 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1759 return 0;
1760 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1762 case NOP_EXPR:
1763 case CONVERT_EXPR:
1764 case NON_LVALUE_EXPR:
1765 /* Don't warn about values cast to void. */
1766 if (TREE_TYPE (exp) == void_type_node)
1767 return 0;
1768 /* Don't warn about conversions not explicit in the user's program. */
1769 if (TREE_NO_UNUSED_WARNING (exp))
1770 return 0;
1771 /* Assignment to a cast usually results in a cast of a modify.
1772 Don't complain about that. There can be an arbitrary number of
1773 casts before the modify, so we must loop until we find the first
1774 non-cast expression and then test to see if that is a modify. */
1776 tree tem = TREE_OPERAND (exp, 0);
1778 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1779 tem = TREE_OPERAND (tem, 0);
1781 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1782 || TREE_CODE (tem) == CALL_EXPR)
1783 return 0;
1785 goto warn;
1787 case INDIRECT_REF:
1788 /* Don't warn about automatic dereferencing of references, since
1789 the user cannot control it. */
1790 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1791 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1792 /* ... fall through ... */
1794 default:
1795 /* Referencing a volatile value is a side effect, so don't warn. */
1796 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1797 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1798 && TREE_THIS_VOLATILE (exp))
1799 return 0;
1800 warn:
1801 warning_with_file_and_line (emit_filename, emit_lineno,
1802 "value computed is not used");
1803 return 1;
1807 /* Clear out the memory of the last expression evaluated. */
1809 void
1810 clear_last_expr ()
1812 last_expr_type = 0;
1815 /* Begin a statement which will return a value.
1816 Return the RTL_EXPR for this statement expr.
1817 The caller must save that value and pass it to expand_end_stmt_expr. */
1819 tree
1820 expand_start_stmt_expr ()
1822 int momentary;
1823 tree t;
1825 /* Make the RTL_EXPR node temporary, not momentary,
1826 so that rtl_expr_chain doesn't become garbage. */
1827 momentary = suspend_momentary ();
1828 t = make_node (RTL_EXPR);
1829 resume_momentary (momentary);
1830 do_pending_stack_adjust ();
1831 start_sequence_for_rtl_expr (t);
1832 NO_DEFER_POP;
1833 expr_stmts_for_value++;
1834 return t;
1837 /* Restore the previous state at the end of a statement that returns a value.
1838 Returns a tree node representing the statement's value and the
1839 insns to compute the value.
1841 The nodes of that expression have been freed by now, so we cannot use them.
1842 But we don't want to do that anyway; the expression has already been
1843 evaluated and now we just want to use the value. So generate a RTL_EXPR
1844 with the proper type and RTL value.
1846 If the last substatement was not an expression,
1847 return something with type `void'. */
1849 tree
1850 expand_end_stmt_expr (t)
1851 tree t;
1853 OK_DEFER_POP;
1855 if (last_expr_type == 0)
1857 last_expr_type = void_type_node;
1858 last_expr_value = const0_rtx;
1860 else if (last_expr_value == 0)
1861 /* There are some cases where this can happen, such as when the
1862 statement is void type. */
1863 last_expr_value = const0_rtx;
1864 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1865 /* Remove any possible QUEUED. */
1866 last_expr_value = protect_from_queue (last_expr_value, 0);
1868 emit_queue ();
1870 TREE_TYPE (t) = last_expr_type;
1871 RTL_EXPR_RTL (t) = last_expr_value;
1872 RTL_EXPR_SEQUENCE (t) = get_insns ();
1874 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1876 end_sequence ();
1878 /* Don't consider deleting this expr or containing exprs at tree level. */
1879 TREE_SIDE_EFFECTS (t) = 1;
1880 /* Propagate volatility of the actual RTL expr. */
1881 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1883 last_expr_type = 0;
1884 expr_stmts_for_value--;
1886 return t;
1889 /* Generate RTL for the start of an if-then. COND is the expression
1890 whose truth should be tested.
1892 If EXITFLAG is nonzero, this conditional is visible to
1893 `exit_something'. */
1895 void
1896 expand_start_cond (cond, exitflag)
1897 tree cond;
1898 int exitflag;
1900 struct nesting *thiscond = ALLOC_NESTING ();
1902 /* Make an entry on cond_stack for the cond we are entering. */
1904 thiscond->next = cond_stack;
1905 thiscond->all = nesting_stack;
1906 thiscond->depth = ++nesting_depth;
1907 thiscond->data.cond.next_label = gen_label_rtx ();
1908 /* Before we encounter an `else', we don't need a separate exit label
1909 unless there are supposed to be exit statements
1910 to exit this conditional. */
1911 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1912 thiscond->data.cond.endif_label = thiscond->exit_label;
1913 cond_stack = thiscond;
1914 nesting_stack = thiscond;
1916 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1919 /* Generate RTL between then-clause and the elseif-clause
1920 of an if-then-elseif-.... */
1922 void
1923 expand_start_elseif (cond)
1924 tree cond;
1926 if (cond_stack->data.cond.endif_label == 0)
1927 cond_stack->data.cond.endif_label = gen_label_rtx ();
1928 emit_jump (cond_stack->data.cond.endif_label);
1929 emit_label (cond_stack->data.cond.next_label);
1930 cond_stack->data.cond.next_label = gen_label_rtx ();
1931 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1934 /* Generate RTL between the then-clause and the else-clause
1935 of an if-then-else. */
1937 void
1938 expand_start_else ()
1940 if (cond_stack->data.cond.endif_label == 0)
1941 cond_stack->data.cond.endif_label = gen_label_rtx ();
1943 emit_jump (cond_stack->data.cond.endif_label);
1944 emit_label (cond_stack->data.cond.next_label);
1945 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1948 /* After calling expand_start_else, turn this "else" into an "else if"
1949 by providing another condition. */
1951 void
1952 expand_elseif (cond)
1953 tree cond;
1955 cond_stack->data.cond.next_label = gen_label_rtx ();
1956 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1959 /* Generate RTL for the end of an if-then.
1960 Pop the record for it off of cond_stack. */
1962 void
1963 expand_end_cond ()
1965 struct nesting *thiscond = cond_stack;
1967 do_pending_stack_adjust ();
1968 if (thiscond->data.cond.next_label)
1969 emit_label (thiscond->data.cond.next_label);
1970 if (thiscond->data.cond.endif_label)
1971 emit_label (thiscond->data.cond.endif_label);
1973 POPSTACK (cond_stack);
1974 last_expr_type = 0;
1979 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1980 loop should be exited by `exit_something'. This is a loop for which
1981 `expand_continue' will jump to the top of the loop.
1983 Make an entry on loop_stack to record the labels associated with
1984 this loop. */
1986 struct nesting *
1987 expand_start_loop (exit_flag)
1988 int exit_flag;
1990 register struct nesting *thisloop = ALLOC_NESTING ();
1992 /* Make an entry on loop_stack for the loop we are entering. */
1994 thisloop->next = loop_stack;
1995 thisloop->all = nesting_stack;
1996 thisloop->depth = ++nesting_depth;
1997 thisloop->data.loop.start_label = gen_label_rtx ();
1998 thisloop->data.loop.end_label = gen_label_rtx ();
1999 thisloop->data.loop.alt_end_label = 0;
2000 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2001 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2002 loop_stack = thisloop;
2003 nesting_stack = thisloop;
2005 do_pending_stack_adjust ();
2006 emit_queue ();
2007 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2008 emit_label (thisloop->data.loop.start_label);
2010 return thisloop;
2013 /* Like expand_start_loop but for a loop where the continuation point
2014 (for expand_continue_loop) will be specified explicitly. */
2016 struct nesting *
2017 expand_start_loop_continue_elsewhere (exit_flag)
2018 int exit_flag;
2020 struct nesting *thisloop = expand_start_loop (exit_flag);
2021 loop_stack->data.loop.continue_label = gen_label_rtx ();
2022 return thisloop;
2025 /* Specify the continuation point for a loop started with
2026 expand_start_loop_continue_elsewhere.
2027 Use this at the point in the code to which a continue statement
2028 should jump. */
2030 void
2031 expand_loop_continue_here ()
2033 do_pending_stack_adjust ();
2034 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2035 emit_label (loop_stack->data.loop.continue_label);
2038 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2039 Pop the block off of loop_stack. */
2041 void
2042 expand_end_loop ()
2044 rtx start_label = loop_stack->data.loop.start_label;
2045 rtx insn = get_last_insn ();
2046 int needs_end_jump = 1;
2048 /* Mark the continue-point at the top of the loop if none elsewhere. */
2049 if (start_label == loop_stack->data.loop.continue_label)
2050 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2052 do_pending_stack_adjust ();
2054 /* If optimizing, perhaps reorder the loop.
2055 First, try to use a condjump near the end.
2056 expand_exit_loop_if_false ends loops with unconditional jumps,
2057 like this:
2059 if (test) goto label;
2060 optional: cleanup
2061 goto loop_stack->data.loop.end_label
2062 barrier
2063 label:
2065 If we find such a pattern, we can end the loop earlier. */
2067 if (optimize
2068 && GET_CODE (insn) == CODE_LABEL
2069 && LABEL_NAME (insn) == NULL
2070 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2072 rtx label = insn;
2073 rtx jump = PREV_INSN (PREV_INSN (label));
2075 if (GET_CODE (jump) == JUMP_INSN
2076 && GET_CODE (PATTERN (jump)) == SET
2077 && SET_DEST (PATTERN (jump)) == pc_rtx
2078 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2079 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2080 == loop_stack->data.loop.end_label))
2082 rtx prev;
2084 /* The test might be complex and reference LABEL multiple times,
2085 like the loop in loop_iterations to set vtop. To handle this,
2086 we move LABEL. */
2087 insn = PREV_INSN (label);
2088 reorder_insns (label, label, start_label);
2090 for (prev = PREV_INSN (jump); ; prev = PREV_INSN (prev))
2092 /* We ignore line number notes, but if we see any other note,
2093 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2094 NOTE_INSN_LOOP_*, we disable this optimization. */
2095 if (GET_CODE (prev) == NOTE)
2097 if (NOTE_LINE_NUMBER (prev) < 0)
2098 break;
2099 continue;
2101 if (GET_CODE (prev) == CODE_LABEL)
2102 break;
2103 if (GET_CODE (prev) == JUMP_INSN)
2105 if (GET_CODE (PATTERN (prev)) == SET
2106 && SET_DEST (PATTERN (prev)) == pc_rtx
2107 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2108 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2109 == LABEL_REF)
2110 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2112 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2113 = start_label;
2114 emit_note_after (NOTE_INSN_LOOP_END, prev);
2115 needs_end_jump = 0;
2117 break;
2123 /* If the loop starts with a loop exit, roll that to the end where
2124 it will optimize together with the jump back.
2126 We look for the conditional branch to the exit, except that once
2127 we find such a branch, we don't look past 30 instructions.
2129 In more detail, if the loop presently looks like this (in pseudo-C):
2131 start_label:
2132 if (test) goto end_label;
2133 body;
2134 goto start_label;
2135 end_label:
2137 transform it to look like:
2139 goto start_label;
2140 newstart_label:
2141 body;
2142 start_label:
2143 if (test) goto end_label;
2144 goto newstart_label;
2145 end_label:
2147 Here, the `test' may actually consist of some reasonably complex
2148 code, terminating in a test. */
2150 if (optimize
2151 && needs_end_jump
2153 ! (GET_CODE (insn) == JUMP_INSN
2154 && GET_CODE (PATTERN (insn)) == SET
2155 && SET_DEST (PATTERN (insn)) == pc_rtx
2156 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2158 int eh_regions = 0;
2159 int num_insns = 0;
2160 rtx last_test_insn = NULL_RTX;
2162 /* Scan insns from the top of the loop looking for a qualified
2163 conditional exit. */
2164 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2165 insn = NEXT_INSN (insn))
2167 if (GET_CODE (insn) == NOTE)
2169 if (optimize < 2
2170 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2171 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2172 /* The code that actually moves the exit test will
2173 carefully leave BLOCK notes in their original
2174 location. That means, however, that we can't debug
2175 the exit test itself. So, we refuse to move code
2176 containing BLOCK notes at low optimization levels. */
2177 break;
2179 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2180 ++eh_regions;
2181 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2183 --eh_regions;
2184 if (eh_regions < 0)
2185 /* We've come to the end of an EH region, but
2186 never saw the beginning of that region. That
2187 means that an EH region begins before the top
2188 of the loop, and ends in the middle of it. The
2189 existence of such a situation violates a basic
2190 assumption in this code, since that would imply
2191 that even when EH_REGIONS is zero, we might
2192 move code out of an exception region. */
2193 abort ();
2196 /* We must not walk into a nested loop. */
2197 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2198 break;
2200 /* We already know this INSN is a NOTE, so there's no
2201 point in looking at it to see if it's a JUMP. */
2202 continue;
2205 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2206 num_insns++;
2208 if (last_test_insn && num_insns > 30)
2209 break;
2211 if (eh_regions > 0)
2212 /* We don't want to move a partial EH region. Consider:
2214 while ( ( { try {
2215 if (cond ()) 0;
2216 else {
2217 bar();
2220 } catch (...) {
2222 } )) {
2223 body;
2226 This isn't legal C++, but here's what it's supposed to
2227 mean: if cond() is true, stop looping. Otherwise,
2228 call bar, and keep looping. In addition, if cond
2229 throws an exception, catch it and keep looping. Such
2230 constructs are certainy legal in LISP.
2232 We should not move the `if (cond()) 0' test since then
2233 the EH-region for the try-block would be broken up.
2234 (In this case we would the EH_BEG note for the `try'
2235 and `if cond()' but not the call to bar() or the
2236 EH_END note.)
2238 So we don't look for tests within an EH region. */
2239 continue;
2241 if (GET_CODE (insn) == JUMP_INSN
2242 && GET_CODE (PATTERN (insn)) == SET
2243 && SET_DEST (PATTERN (insn)) == pc_rtx)
2245 /* This is indeed a jump. */
2246 rtx dest1 = NULL_RTX;
2247 rtx dest2 = NULL_RTX;
2248 rtx potential_last_test;
2249 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2251 /* A conditional jump. */
2252 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2253 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2254 potential_last_test = insn;
2256 else
2258 /* An unconditional jump. */
2259 dest1 = SET_SRC (PATTERN (insn));
2260 /* Include the BARRIER after the JUMP. */
2261 potential_last_test = NEXT_INSN (insn);
2264 do {
2265 if (dest1 && GET_CODE (dest1) == LABEL_REF
2266 && ((XEXP (dest1, 0)
2267 == loop_stack->data.loop.alt_end_label)
2268 || (XEXP (dest1, 0)
2269 == loop_stack->data.loop.end_label)))
2271 last_test_insn = potential_last_test;
2272 break;
2275 /* If this was a conditional jump, there may be
2276 another label at which we should look. */
2277 dest1 = dest2;
2278 dest2 = NULL_RTX;
2279 } while (dest1);
2283 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2285 /* We found one. Move everything from there up
2286 to the end of the loop, and add a jump into the loop
2287 to jump to there. */
2288 register rtx newstart_label = gen_label_rtx ();
2289 register rtx start_move = start_label;
2290 rtx next_insn;
2292 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2293 then we want to move this note also. */
2294 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2295 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2296 == NOTE_INSN_LOOP_CONT))
2297 start_move = PREV_INSN (start_move);
2299 emit_label_after (newstart_label, PREV_INSN (start_move));
2301 /* Actually move the insns. Start at the beginning, and
2302 keep copying insns until we've copied the
2303 last_test_insn. */
2304 for (insn = start_move; insn; insn = next_insn)
2306 /* Figure out which insn comes after this one. We have
2307 to do this before we move INSN. */
2308 if (insn == last_test_insn)
2309 /* We've moved all the insns. */
2310 next_insn = NULL_RTX;
2311 else
2312 next_insn = NEXT_INSN (insn);
2314 if (GET_CODE (insn) == NOTE
2315 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2316 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2317 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2318 NOTE_INSN_BLOCK_ENDs because the correct generation
2319 of debugging information depends on these appearing
2320 in the same order in the RTL and in the tree
2321 structure, where they are represented as BLOCKs.
2322 So, we don't move block notes. Of course, moving
2323 the code inside the block is likely to make it
2324 impossible to debug the instructions in the exit
2325 test, but such is the price of optimization. */
2326 continue;
2328 /* Move the INSN. */
2329 reorder_insns (insn, insn, get_last_insn ());
2332 emit_jump_insn_after (gen_jump (start_label),
2333 PREV_INSN (newstart_label));
2334 emit_barrier_after (PREV_INSN (newstart_label));
2335 start_label = newstart_label;
2339 if (needs_end_jump)
2341 emit_jump (start_label);
2342 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2344 emit_label (loop_stack->data.loop.end_label);
2346 POPSTACK (loop_stack);
2348 last_expr_type = 0;
2351 /* Generate a jump to the current loop's continue-point.
2352 This is usually the top of the loop, but may be specified
2353 explicitly elsewhere. If not currently inside a loop,
2354 return 0 and do nothing; caller will print an error message. */
2357 expand_continue_loop (whichloop)
2358 struct nesting *whichloop;
2360 last_expr_type = 0;
2361 if (whichloop == 0)
2362 whichloop = loop_stack;
2363 if (whichloop == 0)
2364 return 0;
2365 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2366 NULL_RTX);
2367 return 1;
2370 /* Generate a jump to exit the current loop. If not currently inside a loop,
2371 return 0 and do nothing; caller will print an error message. */
2374 expand_exit_loop (whichloop)
2375 struct nesting *whichloop;
2377 last_expr_type = 0;
2378 if (whichloop == 0)
2379 whichloop = loop_stack;
2380 if (whichloop == 0)
2381 return 0;
2382 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2383 return 1;
2386 /* Generate a conditional jump to exit the current loop if COND
2387 evaluates to zero. If not currently inside a loop,
2388 return 0 and do nothing; caller will print an error message. */
2391 expand_exit_loop_if_false (whichloop, cond)
2392 struct nesting *whichloop;
2393 tree cond;
2395 rtx label = gen_label_rtx ();
2396 rtx last_insn;
2397 last_expr_type = 0;
2399 if (whichloop == 0)
2400 whichloop = loop_stack;
2401 if (whichloop == 0)
2402 return 0;
2403 /* In order to handle fixups, we actually create a conditional jump
2404 around a unconditional branch to exit the loop. If fixups are
2405 necessary, they go before the unconditional branch. */
2408 do_jump (cond, NULL_RTX, label);
2409 last_insn = get_last_insn ();
2410 if (GET_CODE (last_insn) == CODE_LABEL)
2411 whichloop->data.loop.alt_end_label = last_insn;
2412 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2413 NULL_RTX);
2414 emit_label (label);
2416 return 1;
2419 /* Return nonzero if the loop nest is empty. Else return zero. */
2422 stmt_loop_nest_empty ()
2424 return (loop_stack == NULL);
2427 /* Return non-zero if we should preserve sub-expressions as separate
2428 pseudos. We never do so if we aren't optimizing. We always do so
2429 if -fexpensive-optimizations.
2431 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2432 the loop may still be a small one. */
2435 preserve_subexpressions_p ()
2437 rtx insn;
2439 if (flag_expensive_optimizations)
2440 return 1;
2442 if (optimize == 0 || loop_stack == 0)
2443 return 0;
2445 insn = get_last_insn_anywhere ();
2447 return (insn
2448 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2449 < n_non_fixed_regs * 3));
2453 /* Generate a jump to exit the current loop, conditional, binding contour
2454 or case statement. Not all such constructs are visible to this function,
2455 only those started with EXIT_FLAG nonzero. Individual languages use
2456 the EXIT_FLAG parameter to control which kinds of constructs you can
2457 exit this way.
2459 If not currently inside anything that can be exited,
2460 return 0 and do nothing; caller will print an error message. */
2463 expand_exit_something ()
2465 struct nesting *n;
2466 last_expr_type = 0;
2467 for (n = nesting_stack; n; n = n->all)
2468 if (n->exit_label != 0)
2470 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2471 return 1;
2474 return 0;
2477 /* Generate RTL to return from the current function, with no value.
2478 (That is, we do not do anything about returning any value.) */
2480 void
2481 expand_null_return ()
2483 struct nesting *block = block_stack;
2484 rtx last_insn = 0;
2486 /* Does any pending block have cleanups? */
2488 while (block && block->data.block.cleanups == 0)
2489 block = block->next;
2491 /* If yes, use a goto to return, since that runs cleanups. */
2493 expand_null_return_1 (last_insn, block != 0);
2496 /* Generate RTL to return from the current function, with value VAL. */
2498 static void
2499 expand_value_return (val)
2500 rtx val;
2502 struct nesting *block = block_stack;
2503 rtx last_insn = get_last_insn ();
2504 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2506 /* Copy the value to the return location
2507 unless it's already there. */
2509 if (return_reg != val)
2511 #ifdef PROMOTE_FUNCTION_RETURN
2512 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2513 int unsignedp = TREE_UNSIGNED (type);
2514 enum machine_mode mode
2515 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2516 &unsignedp, 1);
2518 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2519 convert_move (return_reg, val, unsignedp);
2520 else
2521 #endif
2522 emit_move_insn (return_reg, val);
2524 if (GET_CODE (return_reg) == REG
2525 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2526 emit_insn (gen_rtx_USE (VOIDmode, return_reg));
2527 /* Handle calls that return values in multiple non-contiguous locations.
2528 The Irix 6 ABI has examples of this. */
2529 else if (GET_CODE (return_reg) == PARALLEL)
2531 int i;
2533 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2535 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2537 if (GET_CODE (x) == REG
2538 && REGNO (x) < FIRST_PSEUDO_REGISTER)
2539 emit_insn (gen_rtx_USE (VOIDmode, x));
2543 /* Does any pending block have cleanups? */
2545 while (block && block->data.block.cleanups == 0)
2546 block = block->next;
2548 /* If yes, use a goto to return, since that runs cleanups.
2549 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2551 expand_null_return_1 (last_insn, block != 0);
2554 /* Output a return with no value. If LAST_INSN is nonzero,
2555 pretend that the return takes place after LAST_INSN.
2556 If USE_GOTO is nonzero then don't use a return instruction;
2557 go to the return label instead. This causes any cleanups
2558 of pending blocks to be executed normally. */
2560 static void
2561 expand_null_return_1 (last_insn, use_goto)
2562 rtx last_insn;
2563 int use_goto;
2565 rtx end_label = cleanup_label ? cleanup_label : return_label;
2567 clear_pending_stack_adjust ();
2568 do_pending_stack_adjust ();
2569 last_expr_type = 0;
2571 /* PCC-struct return always uses an epilogue. */
2572 if (current_function_returns_pcc_struct || use_goto)
2574 if (end_label == 0)
2575 end_label = return_label = gen_label_rtx ();
2576 expand_goto_internal (NULL_TREE, end_label, last_insn);
2577 return;
2580 /* Otherwise output a simple return-insn if one is available,
2581 unless it won't do the job. */
2582 #ifdef HAVE_return
2583 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2585 emit_jump_insn (gen_return ());
2586 emit_barrier ();
2587 return;
2589 #endif
2591 /* Otherwise jump to the epilogue. */
2592 expand_goto_internal (NULL_TREE, end_label, last_insn);
2595 /* Generate RTL to evaluate the expression RETVAL and return it
2596 from the current function. */
2598 void
2599 expand_return (retval)
2600 tree retval;
2602 /* If there are any cleanups to be performed, then they will
2603 be inserted following LAST_INSN. It is desirable
2604 that the last_insn, for such purposes, should be the
2605 last insn before computing the return value. Otherwise, cleanups
2606 which call functions can clobber the return value. */
2607 /* ??? rms: I think that is erroneous, because in C++ it would
2608 run destructors on variables that might be used in the subsequent
2609 computation of the return value. */
2610 rtx last_insn = 0;
2611 register rtx val = 0;
2612 register rtx op0;
2613 tree retval_rhs;
2614 int cleanups;
2616 /* If function wants no value, give it none. */
2617 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2619 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2620 emit_queue ();
2621 expand_null_return ();
2622 return;
2625 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2626 /* This is not sufficient. We also need to watch for cleanups of the
2627 expression we are about to expand. Unfortunately, we cannot know
2628 if it has cleanups until we expand it, and we want to change how we
2629 expand it depending upon if we need cleanups. We can't win. */
2630 #if 0
2631 cleanups = any_pending_cleanups (1);
2632 #else
2633 cleanups = 1;
2634 #endif
2636 if (TREE_CODE (retval) == RESULT_DECL)
2637 retval_rhs = retval;
2638 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2639 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2640 retval_rhs = TREE_OPERAND (retval, 1);
2641 else if (TREE_TYPE (retval) == void_type_node)
2642 /* Recognize tail-recursive call to void function. */
2643 retval_rhs = retval;
2644 else
2645 retval_rhs = NULL_TREE;
2647 /* Only use `last_insn' if there are cleanups which must be run. */
2648 if (cleanups || cleanup_label != 0)
2649 last_insn = get_last_insn ();
2651 /* Distribute return down conditional expr if either of the sides
2652 may involve tail recursion (see test below). This enhances the number
2653 of tail recursions we see. Don't do this always since it can produce
2654 sub-optimal code in some cases and we distribute assignments into
2655 conditional expressions when it would help. */
2657 if (optimize && retval_rhs != 0
2658 && frame_offset == 0
2659 && TREE_CODE (retval_rhs) == COND_EXPR
2660 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2661 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2663 rtx label = gen_label_rtx ();
2664 tree expr;
2666 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2667 start_cleanup_deferral ();
2668 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2669 DECL_RESULT (current_function_decl),
2670 TREE_OPERAND (retval_rhs, 1));
2671 TREE_SIDE_EFFECTS (expr) = 1;
2672 expand_return (expr);
2673 emit_label (label);
2675 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2676 DECL_RESULT (current_function_decl),
2677 TREE_OPERAND (retval_rhs, 2));
2678 TREE_SIDE_EFFECTS (expr) = 1;
2679 expand_return (expr);
2680 end_cleanup_deferral ();
2681 return;
2684 /* Attempt to optimize the call if it is tail recursive. */
2685 if (optimize_tail_recursion (retval_rhs, last_insn))
2686 return;
2688 #ifdef HAVE_return
2689 /* This optimization is safe if there are local cleanups
2690 because expand_null_return takes care of them.
2691 ??? I think it should also be safe when there is a cleanup label,
2692 because expand_null_return takes care of them, too.
2693 Any reason why not? */
2694 if (HAVE_return && cleanup_label == 0
2695 && ! current_function_returns_pcc_struct
2696 && BRANCH_COST <= 1)
2698 /* If this is return x == y; then generate
2699 if (x == y) return 1; else return 0;
2700 if we can do it with explicit return insns and branches are cheap,
2701 but not if we have the corresponding scc insn. */
2702 int has_scc = 0;
2703 if (retval_rhs)
2704 switch (TREE_CODE (retval_rhs))
2706 case EQ_EXPR:
2707 #ifdef HAVE_seq
2708 has_scc = HAVE_seq;
2709 #endif
2710 case NE_EXPR:
2711 #ifdef HAVE_sne
2712 has_scc = HAVE_sne;
2713 #endif
2714 case GT_EXPR:
2715 #ifdef HAVE_sgt
2716 has_scc = HAVE_sgt;
2717 #endif
2718 case GE_EXPR:
2719 #ifdef HAVE_sge
2720 has_scc = HAVE_sge;
2721 #endif
2722 case LT_EXPR:
2723 #ifdef HAVE_slt
2724 has_scc = HAVE_slt;
2725 #endif
2726 case LE_EXPR:
2727 #ifdef HAVE_sle
2728 has_scc = HAVE_sle;
2729 #endif
2730 case TRUTH_ANDIF_EXPR:
2731 case TRUTH_ORIF_EXPR:
2732 case TRUTH_AND_EXPR:
2733 case TRUTH_OR_EXPR:
2734 case TRUTH_NOT_EXPR:
2735 case TRUTH_XOR_EXPR:
2736 if (! has_scc)
2738 op0 = gen_label_rtx ();
2739 jumpifnot (retval_rhs, op0);
2740 expand_value_return (const1_rtx);
2741 emit_label (op0);
2742 expand_value_return (const0_rtx);
2743 return;
2745 break;
2747 default:
2748 break;
2751 #endif /* HAVE_return */
2753 /* If the result is an aggregate that is being returned in one (or more)
2754 registers, load the registers here. The compiler currently can't handle
2755 copying a BLKmode value into registers. We could put this code in a
2756 more general area (for use by everyone instead of just function
2757 call/return), but until this feature is generally usable it is kept here
2758 (and in expand_call). The value must go into a pseudo in case there
2759 are cleanups that will clobber the real return register. */
2761 if (retval_rhs != 0
2762 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2763 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2765 int i, bitpos, xbitpos;
2766 int big_endian_correction = 0;
2767 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2768 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2769 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),
2770 (unsigned int)BITS_PER_WORD);
2771 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2772 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2773 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2774 enum machine_mode tmpmode, result_reg_mode;
2776 /* Structures whose size is not a multiple of a word are aligned
2777 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2778 machine, this means we must skip the empty high order bytes when
2779 calculating the bit offset. */
2780 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2781 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2782 * BITS_PER_UNIT));
2784 /* Copy the structure BITSIZE bits at a time. */
2785 for (bitpos = 0, xbitpos = big_endian_correction;
2786 bitpos < bytes * BITS_PER_UNIT;
2787 bitpos += bitsize, xbitpos += bitsize)
2789 /* We need a new destination pseudo each time xbitpos is
2790 on a word boundary and when xbitpos == big_endian_correction
2791 (the first time through). */
2792 if (xbitpos % BITS_PER_WORD == 0
2793 || xbitpos == big_endian_correction)
2795 /* Generate an appropriate register. */
2796 dst = gen_reg_rtx (word_mode);
2797 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2799 /* Clobber the destination before we move anything into it. */
2800 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
2803 /* We need a new source operand each time bitpos is on a word
2804 boundary. */
2805 if (bitpos % BITS_PER_WORD == 0)
2806 src = operand_subword_force (result_val,
2807 bitpos / BITS_PER_WORD,
2808 BLKmode);
2810 /* Use bitpos for the source extraction (left justified) and
2811 xbitpos for the destination store (right justified). */
2812 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2813 extract_bit_field (src, bitsize,
2814 bitpos % BITS_PER_WORD, 1,
2815 NULL_RTX, word_mode,
2816 word_mode,
2817 bitsize / BITS_PER_UNIT,
2818 BITS_PER_WORD),
2819 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2822 /* Find the smallest integer mode large enough to hold the
2823 entire structure and use that mode instead of BLKmode
2824 on the USE insn for the return register. */
2825 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2826 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2827 tmpmode != MAX_MACHINE_MODE;
2828 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2830 /* Have we found a large enough mode? */
2831 if (GET_MODE_SIZE (tmpmode) >= bytes)
2832 break;
2835 /* No suitable mode found. */
2836 if (tmpmode == MAX_MACHINE_MODE)
2837 abort ();
2839 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2841 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2842 result_reg_mode = word_mode;
2843 else
2844 result_reg_mode = tmpmode;
2845 result_reg = gen_reg_rtx (result_reg_mode);
2847 emit_queue ();
2848 for (i = 0; i < n_regs; i++)
2849 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2850 result_pseudos[i]);
2852 if (tmpmode != result_reg_mode)
2853 result_reg = gen_lowpart (tmpmode, result_reg);
2855 expand_value_return (result_reg);
2857 else if (cleanups
2858 && retval_rhs != 0
2859 && TREE_TYPE (retval_rhs) != void_type_node
2860 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2862 /* Calculate the return value into a pseudo reg. */
2863 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
2864 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2865 val = force_not_mem (val);
2866 emit_queue ();
2867 /* Return the calculated value, doing cleanups first. */
2868 expand_value_return (val);
2870 else
2872 /* No cleanups or no hard reg used;
2873 calculate value into hard return reg. */
2874 expand_expr (retval, const0_rtx, VOIDmode, 0);
2875 emit_queue ();
2876 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2880 /* Return 1 if the end of the generated RTX is not a barrier.
2881 This means code already compiled can drop through. */
2884 drop_through_at_end_p ()
2886 rtx insn = get_last_insn ();
2887 while (insn && GET_CODE (insn) == NOTE)
2888 insn = PREV_INSN (insn);
2889 return insn && GET_CODE (insn) != BARRIER;
2892 /* Test CALL_EXPR to determine if it is a potential tail recursion call
2893 and emit code to optimize the tail recursion. LAST_INSN indicates where
2894 to place the jump to the tail recursion label. Return TRUE if the
2895 call was optimized into a goto.
2897 This is only used by expand_return, but expand_call is expected to
2898 use it soon. */
2901 optimize_tail_recursion (call_expr, last_insn)
2902 tree call_expr;
2903 rtx last_insn;
2905 /* For tail-recursive call to current function,
2906 just jump back to the beginning.
2907 It's unsafe if any auto variable in this function
2908 has its address taken; for simplicity,
2909 require stack frame to be empty. */
2910 if (optimize && call_expr != 0
2911 && frame_offset == 0
2912 && TREE_CODE (call_expr) == CALL_EXPR
2913 && TREE_CODE (TREE_OPERAND (call_expr, 0)) == ADDR_EXPR
2914 && TREE_OPERAND (TREE_OPERAND (call_expr, 0), 0) == current_function_decl
2915 /* Finish checking validity, and if valid emit code
2916 to set the argument variables for the new call. */
2917 && tail_recursion_args (TREE_OPERAND (call_expr, 1),
2918 DECL_ARGUMENTS (current_function_decl)))
2920 if (tail_recursion_label == 0)
2922 tail_recursion_label = gen_label_rtx ();
2923 emit_label_after (tail_recursion_label,
2924 tail_recursion_reentry);
2926 emit_queue ();
2927 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2928 emit_barrier ();
2929 return 1;
2932 return 0;
2935 /* Emit code to alter this function's formal parms for a tail-recursive call.
2936 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2937 FORMALS is the chain of decls of formals.
2938 Return 1 if this can be done;
2939 otherwise return 0 and do not emit any code. */
2941 static int
2942 tail_recursion_args (actuals, formals)
2943 tree actuals, formals;
2945 register tree a = actuals, f = formals;
2946 register int i;
2947 register rtx *argvec;
2949 /* Check that number and types of actuals are compatible
2950 with the formals. This is not always true in valid C code.
2951 Also check that no formal needs to be addressable
2952 and that all formals are scalars. */
2954 /* Also count the args. */
2956 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2958 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
2959 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
2960 return 0;
2961 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2962 return 0;
2964 if (a != 0 || f != 0)
2965 return 0;
2967 /* Compute all the actuals. */
2969 argvec = (rtx *) alloca (i * sizeof (rtx));
2971 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2972 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2974 /* Find which actual values refer to current values of previous formals.
2975 Copy each of them now, before any formal is changed. */
2977 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2979 int copy = 0;
2980 register int j;
2981 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2982 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2983 { copy = 1; break; }
2984 if (copy)
2985 argvec[i] = copy_to_reg (argvec[i]);
2988 /* Store the values of the actuals into the formals. */
2990 for (f = formals, a = actuals, i = 0; f;
2991 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2993 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2994 emit_move_insn (DECL_RTL (f), argvec[i]);
2995 else
2996 convert_move (DECL_RTL (f), argvec[i],
2997 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3000 free_temp_slots ();
3001 return 1;
3004 /* Generate the RTL code for entering a binding contour.
3005 The variables are declared one by one, by calls to `expand_decl'.
3007 EXIT_FLAG is nonzero if this construct should be visible to
3008 `exit_something'. */
3010 void
3011 expand_start_bindings (exit_flag)
3012 int exit_flag;
3014 struct nesting *thisblock = ALLOC_NESTING ();
3015 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3017 /* Make an entry on block_stack for the block we are entering. */
3019 thisblock->next = block_stack;
3020 thisblock->all = nesting_stack;
3021 thisblock->depth = ++nesting_depth;
3022 thisblock->data.block.stack_level = 0;
3023 thisblock->data.block.cleanups = 0;
3024 thisblock->data.block.function_call_count = 0;
3025 thisblock->data.block.exception_region = 0;
3026 thisblock->data.block.target_temp_slot_level = target_temp_slot_level;
3028 thisblock->data.block.conditional_code = 0;
3029 thisblock->data.block.last_unconditional_cleanup = note;
3030 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3032 if (block_stack
3033 && !(block_stack->data.block.cleanups == NULL_TREE
3034 && block_stack->data.block.outer_cleanups == NULL_TREE))
3035 thisblock->data.block.outer_cleanups
3036 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3037 block_stack->data.block.outer_cleanups);
3038 else
3039 thisblock->data.block.outer_cleanups = 0;
3040 thisblock->data.block.label_chain = 0;
3041 thisblock->data.block.innermost_stack_block = stack_block_stack;
3042 thisblock->data.block.first_insn = note;
3043 thisblock->data.block.block_start_count = ++block_start_count;
3044 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3045 block_stack = thisblock;
3046 nesting_stack = thisblock;
3048 /* Make a new level for allocating stack slots. */
3049 push_temp_slots ();
3052 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3053 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3054 expand_expr are made. After we end the region, we know that all
3055 space for all temporaries that were created by TARGET_EXPRs will be
3056 destroyed and their space freed for reuse. */
3058 void
3059 expand_start_target_temps ()
3061 /* This is so that even if the result is preserved, the space
3062 allocated will be freed, as we know that it is no longer in use. */
3063 push_temp_slots ();
3065 /* Start a new binding layer that will keep track of all cleanup
3066 actions to be performed. */
3067 expand_start_bindings (0);
3069 target_temp_slot_level = temp_slot_level;
3072 void
3073 expand_end_target_temps ()
3075 expand_end_bindings (NULL_TREE, 0, 0);
3077 /* This is so that even if the result is preserved, the space
3078 allocated will be freed, as we know that it is no longer in use. */
3079 pop_temp_slots ();
3082 /* Mark top block of block_stack as an implicit binding for an
3083 exception region. This is used to prevent infinite recursion when
3084 ending a binding with expand_end_bindings. It is only ever called
3085 by expand_eh_region_start, as that it the only way to create a
3086 block stack for a exception region. */
3088 void
3089 mark_block_as_eh_region ()
3091 block_stack->data.block.exception_region = 1;
3092 if (block_stack->next
3093 && block_stack->next->data.block.conditional_code)
3095 block_stack->data.block.conditional_code
3096 = block_stack->next->data.block.conditional_code;
3097 block_stack->data.block.last_unconditional_cleanup
3098 = block_stack->next->data.block.last_unconditional_cleanup;
3099 block_stack->data.block.cleanup_ptr
3100 = block_stack->next->data.block.cleanup_ptr;
3104 /* True if we are currently emitting insns in an area of output code
3105 that is controlled by a conditional expression. This is used by
3106 the cleanup handling code to generate conditional cleanup actions. */
3109 conditional_context ()
3111 return block_stack && block_stack->data.block.conditional_code;
3114 /* Mark top block of block_stack as not for an implicit binding for an
3115 exception region. This is only ever done by expand_eh_region_end
3116 to let expand_end_bindings know that it is being called explicitly
3117 to end the binding layer for just the binding layer associated with
3118 the exception region, otherwise expand_end_bindings would try and
3119 end all implicit binding layers for exceptions regions, and then
3120 one normal binding layer. */
3122 void
3123 mark_block_as_not_eh_region ()
3125 block_stack->data.block.exception_region = 0;
3128 /* True if the top block of block_stack was marked as for an exception
3129 region by mark_block_as_eh_region. */
3132 is_eh_region ()
3134 return block_stack && block_stack->data.block.exception_region;
3137 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3138 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3139 BLOCK node. */
3141 void
3142 remember_end_note (block)
3143 register tree block;
3145 BLOCK_END_NOTE (block) = last_block_end_note;
3146 last_block_end_note = NULL_RTX;
3149 /* Emit a handler label for a nonlocal goto handler.
3150 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3152 static rtx
3153 expand_nl_handler_label (slot, before_insn)
3154 rtx slot, before_insn;
3156 rtx insns;
3157 rtx handler_label = gen_label_rtx ();
3159 /* Don't let jump_optimize delete the handler. */
3160 LABEL_PRESERVE_P (handler_label) = 1;
3162 start_sequence ();
3163 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3164 insns = get_insns ();
3165 end_sequence ();
3166 emit_insns_before (insns, before_insn);
3168 emit_label (handler_label);
3170 return handler_label;
3173 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3174 handler. */
3175 static void
3176 expand_nl_goto_receiver ()
3178 #ifdef HAVE_nonlocal_goto
3179 if (! HAVE_nonlocal_goto)
3180 #endif
3181 /* First adjust our frame pointer to its actual value. It was
3182 previously set to the start of the virtual area corresponding to
3183 the stacked variables when we branched here and now needs to be
3184 adjusted to the actual hardware fp value.
3186 Assignments are to virtual registers are converted by
3187 instantiate_virtual_regs into the corresponding assignment
3188 to the underlying register (fp in this case) that makes
3189 the original assignment true.
3190 So the following insn will actually be
3191 decrementing fp by STARTING_FRAME_OFFSET. */
3192 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3194 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3195 if (fixed_regs[ARG_POINTER_REGNUM])
3197 #ifdef ELIMINABLE_REGS
3198 /* If the argument pointer can be eliminated in favor of the
3199 frame pointer, we don't need to restore it. We assume here
3200 that if such an elimination is present, it can always be used.
3201 This is the case on all known machines; if we don't make this
3202 assumption, we do unnecessary saving on many machines. */
3203 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3204 size_t i;
3206 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3207 if (elim_regs[i].from == ARG_POINTER_REGNUM
3208 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3209 break;
3211 if (i == sizeof elim_regs / sizeof elim_regs [0])
3212 #endif
3214 /* Now restore our arg pointer from the address at which it
3215 was saved in our stack frame.
3216 If there hasn't be space allocated for it yet, make
3217 some now. */
3218 if (arg_pointer_save_area == 0)
3219 arg_pointer_save_area
3220 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3221 emit_move_insn (virtual_incoming_args_rtx,
3222 /* We need a pseudo here, or else
3223 instantiate_virtual_regs_1 complains. */
3224 copy_to_reg (arg_pointer_save_area));
3227 #endif
3229 #ifdef HAVE_nonlocal_goto_receiver
3230 if (HAVE_nonlocal_goto_receiver)
3231 emit_insn (gen_nonlocal_goto_receiver ());
3232 #endif
3235 /* Make handlers for nonlocal gotos taking place in the function calls in
3236 block THISBLOCK. */
3238 static void
3239 expand_nl_goto_receivers (thisblock)
3240 struct nesting *thisblock;
3242 tree link;
3243 rtx afterward = gen_label_rtx ();
3244 rtx insns, slot;
3245 rtx label_list;
3246 int any_invalid;
3248 /* Record the handler address in the stack slot for that purpose,
3249 during this block, saving and restoring the outer value. */
3250 if (thisblock->next != 0)
3251 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3253 rtx save_receiver = gen_reg_rtx (Pmode);
3254 emit_move_insn (XEXP (slot, 0), save_receiver);
3256 start_sequence ();
3257 emit_move_insn (save_receiver, XEXP (slot, 0));
3258 insns = get_insns ();
3259 end_sequence ();
3260 emit_insns_before (insns, thisblock->data.block.first_insn);
3263 /* Jump around the handlers; they run only when specially invoked. */
3264 emit_jump (afterward);
3266 /* Make a separate handler for each label. */
3267 link = nonlocal_labels;
3268 slot = nonlocal_goto_handler_slots;
3269 label_list = NULL_RTX;
3270 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3271 /* Skip any labels we shouldn't be able to jump to from here,
3272 we generate one special handler for all of them below which just calls
3273 abort. */
3274 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3276 rtx lab;
3277 lab = expand_nl_handler_label (XEXP (slot, 0),
3278 thisblock->data.block.first_insn);
3279 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3281 expand_nl_goto_receiver ();
3283 /* Jump to the "real" nonlocal label. */
3284 expand_goto (TREE_VALUE (link));
3287 /* A second pass over all nonlocal labels; this time we handle those
3288 we should not be able to jump to at this point. */
3289 link = nonlocal_labels;
3290 slot = nonlocal_goto_handler_slots;
3291 any_invalid = 0;
3292 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3293 if (DECL_TOO_LATE (TREE_VALUE (link)))
3295 rtx lab;
3296 lab = expand_nl_handler_label (XEXP (slot, 0),
3297 thisblock->data.block.first_insn);
3298 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3299 any_invalid = 1;
3302 if (any_invalid)
3304 expand_nl_goto_receiver ();
3305 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3306 VOIDmode, 0);
3307 emit_barrier ();
3310 nonlocal_goto_handler_labels = label_list;
3311 emit_label (afterward);
3314 /* Generate RTL code to terminate a binding contour.
3316 VARS is the chain of VAR_DECL nodes for the variables bound in this
3317 contour. There may actually be other nodes in this chain, but any
3318 nodes other than VAR_DECLS are ignored.
3320 MARK_ENDS is nonzero if we should put a note at the beginning
3321 and end of this binding contour.
3323 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3324 (That is true automatically if the contour has a saved stack level.) */
3326 void
3327 expand_end_bindings (vars, mark_ends, dont_jump_in)
3328 tree vars;
3329 int mark_ends;
3330 int dont_jump_in;
3332 register struct nesting *thisblock;
3333 register tree decl;
3335 while (block_stack->data.block.exception_region)
3337 /* Because we don't need or want a new temporary level and
3338 because we didn't create one in expand_eh_region_start,
3339 create a fake one now to avoid removing one in
3340 expand_end_bindings. */
3341 push_temp_slots ();
3343 block_stack->data.block.exception_region = 0;
3345 expand_end_bindings (NULL_TREE, 0, 0);
3348 /* Since expand_eh_region_start does an expand_start_bindings, we
3349 have to first end all the bindings that were created by
3350 expand_eh_region_start. */
3352 thisblock = block_stack;
3354 if (warn_unused)
3355 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3356 if (TREE_CODE (decl) == VAR_DECL
3357 && ! TREE_USED (decl)
3358 && ! DECL_IN_SYSTEM_HEADER (decl)
3359 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3360 warning_with_decl (decl, "unused variable `%s'");
3362 if (thisblock->exit_label)
3364 do_pending_stack_adjust ();
3365 emit_label (thisblock->exit_label);
3368 /* If necessary, make handlers for nonlocal gotos taking
3369 place in the function calls in this block. */
3370 if (function_call_count != thisblock->data.block.function_call_count
3371 && nonlocal_labels
3372 /* Make handler for outermost block
3373 if there were any nonlocal gotos to this function. */
3374 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3375 /* Make handler for inner block if it has something
3376 special to do when you jump out of it. */
3377 : (thisblock->data.block.cleanups != 0
3378 || thisblock->data.block.stack_level != 0)))
3379 expand_nl_goto_receivers (thisblock);
3381 /* Don't allow jumping into a block that has a stack level.
3382 Cleanups are allowed, though. */
3383 if (dont_jump_in
3384 || thisblock->data.block.stack_level != 0)
3386 struct label_chain *chain;
3388 /* Any labels in this block are no longer valid to go to.
3389 Mark them to cause an error message. */
3390 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3392 DECL_TOO_LATE (chain->label) = 1;
3393 /* If any goto without a fixup came to this label,
3394 that must be an error, because gotos without fixups
3395 come from outside all saved stack-levels. */
3396 if (TREE_ADDRESSABLE (chain->label))
3397 error_with_decl (chain->label,
3398 "label `%s' used before containing binding contour");
3402 /* Restore stack level in effect before the block
3403 (only if variable-size objects allocated). */
3404 /* Perform any cleanups associated with the block. */
3406 if (thisblock->data.block.stack_level != 0
3407 || thisblock->data.block.cleanups != 0)
3409 /* Only clean up here if this point can actually be reached. */
3410 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3412 /* Don't let cleanups affect ({...}) constructs. */
3413 int old_expr_stmts_for_value = expr_stmts_for_value;
3414 rtx old_last_expr_value = last_expr_value;
3415 tree old_last_expr_type = last_expr_type;
3416 expr_stmts_for_value = 0;
3418 /* Do the cleanups. */
3419 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3420 if (reachable)
3421 do_pending_stack_adjust ();
3423 expr_stmts_for_value = old_expr_stmts_for_value;
3424 last_expr_value = old_last_expr_value;
3425 last_expr_type = old_last_expr_type;
3427 /* Restore the stack level. */
3429 if (reachable && thisblock->data.block.stack_level != 0)
3431 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3432 thisblock->data.block.stack_level, NULL_RTX);
3433 if (nonlocal_goto_handler_slots != 0)
3434 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3435 NULL_RTX);
3438 /* Any gotos out of this block must also do these things.
3439 Also report any gotos with fixups that came to labels in this
3440 level. */
3441 fixup_gotos (thisblock,
3442 thisblock->data.block.stack_level,
3443 thisblock->data.block.cleanups,
3444 thisblock->data.block.first_insn,
3445 dont_jump_in);
3448 /* Mark the beginning and end of the scope if requested.
3449 We do this now, after running cleanups on the variables
3450 just going out of scope, so they are in scope for their cleanups. */
3452 if (mark_ends)
3453 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3454 else
3455 /* Get rid of the beginning-mark if we don't make an end-mark. */
3456 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3458 /* If doing stupid register allocation, make sure lives of all
3459 register variables declared here extend thru end of scope. */
3461 if (obey_regdecls)
3462 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3463 if (TREE_CODE (decl) == VAR_DECL && DECL_RTL (decl))
3464 use_variable (DECL_RTL (decl));
3466 /* Restore the temporary level of TARGET_EXPRs. */
3467 target_temp_slot_level = thisblock->data.block.target_temp_slot_level;
3469 /* Restore block_stack level for containing block. */
3471 stack_block_stack = thisblock->data.block.innermost_stack_block;
3472 POPSTACK (block_stack);
3474 /* Pop the stack slot nesting and free any slots at this level. */
3475 pop_temp_slots ();
3478 /* Generate RTL for the automatic variable declaration DECL.
3479 (Other kinds of declarations are simply ignored if seen here.) */
3481 void
3482 expand_decl (decl)
3483 register tree decl;
3485 struct nesting *thisblock = block_stack;
3486 tree type;
3488 type = TREE_TYPE (decl);
3490 /* Only automatic variables need any expansion done.
3491 Static and external variables, and external functions,
3492 will be handled by `assemble_variable' (called from finish_decl).
3493 TYPE_DECL and CONST_DECL require nothing.
3494 PARM_DECLs are handled in `assign_parms'. */
3496 if (TREE_CODE (decl) != VAR_DECL)
3497 return;
3498 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3499 return;
3501 /* Create the RTL representation for the variable. */
3503 if (type == error_mark_node)
3504 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3505 else if (DECL_SIZE (decl) == 0)
3506 /* Variable with incomplete type. */
3508 if (DECL_INITIAL (decl) == 0)
3509 /* Error message was already done; now avoid a crash. */
3510 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3511 else
3512 /* An initializer is going to decide the size of this array.
3513 Until we know the size, represent its address with a reg. */
3514 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3515 MEM_SET_IN_STRUCT_P (DECL_RTL (decl), AGGREGATE_TYPE_P (type));
3517 else if (DECL_MODE (decl) != BLKmode
3518 /* If -ffloat-store, don't put explicit float vars
3519 into regs. */
3520 && !(flag_float_store
3521 && TREE_CODE (type) == REAL_TYPE)
3522 && ! TREE_THIS_VOLATILE (decl)
3523 && ! TREE_ADDRESSABLE (decl)
3524 && (DECL_REGISTER (decl) || ! obey_regdecls)
3525 /* if -fcheck-memory-usage, check all variables. */
3526 && ! current_function_check_memory_usage)
3528 /* Automatic variable that can go in a register. */
3529 int unsignedp = TREE_UNSIGNED (type);
3530 enum machine_mode reg_mode
3531 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3533 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3534 mark_user_reg (DECL_RTL (decl));
3536 if (POINTER_TYPE_P (type))
3537 mark_reg_pointer (DECL_RTL (decl),
3538 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3539 / BITS_PER_UNIT));
3542 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3543 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3544 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3545 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3546 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
3548 /* Variable of fixed size that goes on the stack. */
3549 rtx oldaddr = 0;
3550 rtx addr;
3552 /* If we previously made RTL for this decl, it must be an array
3553 whose size was determined by the initializer.
3554 The old address was a register; set that register now
3555 to the proper address. */
3556 if (DECL_RTL (decl) != 0)
3558 if (GET_CODE (DECL_RTL (decl)) != MEM
3559 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3560 abort ();
3561 oldaddr = XEXP (DECL_RTL (decl), 0);
3564 DECL_RTL (decl) = assign_temp (TREE_TYPE (decl), 1, 1, 1);
3565 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3566 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3568 /* Set alignment we actually gave this decl. */
3569 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3570 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3572 if (oldaddr)
3574 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3575 if (addr != oldaddr)
3576 emit_move_insn (oldaddr, addr);
3579 /* If this is a memory ref that contains aggregate components,
3580 mark it as such for cse and loop optimize. */
3581 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3582 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3583 #if 0
3584 /* If this is in memory because of -ffloat-store,
3585 set the volatile bit, to prevent optimizations from
3586 undoing the effects. */
3587 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3588 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3589 #endif
3591 MEM_ALIAS_SET (DECL_RTL (decl)) = get_alias_set (decl);
3593 else
3594 /* Dynamic-size object: must push space on the stack. */
3596 rtx address, size;
3598 /* Record the stack pointer on entry to block, if have
3599 not already done so. */
3600 if (thisblock->data.block.stack_level == 0)
3602 do_pending_stack_adjust ();
3603 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3604 &thisblock->data.block.stack_level,
3605 thisblock->data.block.first_insn);
3606 stack_block_stack = thisblock;
3609 /* Compute the variable's size, in bytes. */
3610 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3611 DECL_SIZE (decl),
3612 size_int (BITS_PER_UNIT)),
3613 NULL_RTX, VOIDmode, 0);
3614 free_temp_slots ();
3616 /* Allocate space on the stack for the variable. Note that
3617 DECL_ALIGN says how the variable is to be aligned and we
3618 cannot use it to conclude anything about the alignment of
3619 the size. */
3620 address = allocate_dynamic_stack_space (size, NULL_RTX,
3621 TYPE_ALIGN (TREE_TYPE (decl)));
3623 /* Reference the variable indirect through that rtx. */
3624 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
3626 /* If this is a memory ref that contains aggregate components,
3627 mark it as such for cse and loop optimize. */
3628 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3629 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3631 /* Indicate the alignment we actually gave this variable. */
3632 #ifdef STACK_BOUNDARY
3633 DECL_ALIGN (decl) = STACK_BOUNDARY;
3634 #else
3635 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3636 #endif
3639 if (TREE_THIS_VOLATILE (decl))
3640 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3641 #if 0 /* A variable is not necessarily unchanging
3642 just because it is const. RTX_UNCHANGING_P
3643 means no change in the function,
3644 not merely no change in the variable's scope.
3645 It is correct to set RTX_UNCHANGING_P if the variable's scope
3646 is the whole function. There's no convenient way to test that. */
3647 if (TREE_READONLY (decl))
3648 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3649 #endif
3651 /* If doing stupid register allocation, make sure life of any
3652 register variable starts here, at the start of its scope. */
3654 if (obey_regdecls)
3655 use_variable (DECL_RTL (decl));
3660 /* Emit code to perform the initialization of a declaration DECL. */
3662 void
3663 expand_decl_init (decl)
3664 tree decl;
3666 int was_used = TREE_USED (decl);
3668 /* If this is a CONST_DECL, we don't have to generate any code, but
3669 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3670 to be set while in the obstack containing the constant. If we don't
3671 do this, we can lose if we have functions nested three deep and the middle
3672 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3673 the innermost function is the first to expand that STRING_CST. */
3674 if (TREE_CODE (decl) == CONST_DECL)
3676 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3677 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3678 EXPAND_INITIALIZER);
3679 return;
3682 if (TREE_STATIC (decl))
3683 return;
3685 /* Compute and store the initial value now. */
3687 if (DECL_INITIAL (decl) == error_mark_node)
3689 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3691 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3692 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3693 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3694 0, 0);
3695 emit_queue ();
3697 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3699 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3700 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3701 emit_queue ();
3704 /* Don't let the initialization count as "using" the variable. */
3705 TREE_USED (decl) = was_used;
3707 /* Free any temporaries we made while initializing the decl. */
3708 preserve_temp_slots (NULL_RTX);
3709 free_temp_slots ();
3712 /* CLEANUP is an expression to be executed at exit from this binding contour;
3713 for example, in C++, it might call the destructor for this variable.
3715 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3716 CLEANUP multiple times, and have the correct semantics. This
3717 happens in exception handling, for gotos, returns, breaks that
3718 leave the current scope.
3720 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3721 that is not associated with any particular variable. */
3724 expand_decl_cleanup (decl, cleanup)
3725 tree decl, cleanup;
3727 struct nesting *thisblock = block_stack;
3729 /* Error if we are not in any block. */
3730 if (thisblock == 0)
3731 return 0;
3733 /* Record the cleanup if there is one. */
3735 if (cleanup != 0)
3737 tree t;
3738 rtx seq;
3739 tree *cleanups = &thisblock->data.block.cleanups;
3740 int cond_context = conditional_context ();
3742 if (cond_context)
3744 rtx flag = gen_reg_rtx (word_mode);
3745 rtx set_flag_0;
3746 tree cond;
3748 start_sequence ();
3749 emit_move_insn (flag, const0_rtx);
3750 set_flag_0 = get_insns ();
3751 end_sequence ();
3753 thisblock->data.block.last_unconditional_cleanup
3754 = emit_insns_after (set_flag_0,
3755 thisblock->data.block.last_unconditional_cleanup);
3757 emit_move_insn (flag, const1_rtx);
3759 /* All cleanups must be on the function_obstack. */
3760 push_obstacks_nochange ();
3761 resume_temporary_allocation ();
3763 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3764 DECL_RTL (cond) = flag;
3766 /* Conditionalize the cleanup. */
3767 cleanup = build (COND_EXPR, void_type_node,
3768 truthvalue_conversion (cond),
3769 cleanup, integer_zero_node);
3770 cleanup = fold (cleanup);
3772 pop_obstacks ();
3774 cleanups = thisblock->data.block.cleanup_ptr;
3777 /* All cleanups must be on the function_obstack. */
3778 push_obstacks_nochange ();
3779 resume_temporary_allocation ();
3780 cleanup = unsave_expr (cleanup);
3781 pop_obstacks ();
3783 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
3785 if (! cond_context)
3786 /* If this block has a cleanup, it belongs in stack_block_stack. */
3787 stack_block_stack = thisblock;
3789 if (cond_context)
3791 start_sequence ();
3794 /* If this was optimized so that there is no exception region for the
3795 cleanup, then mark the TREE_LIST node, so that we can later tell
3796 if we need to call expand_eh_region_end. */
3797 if (! using_eh_for_cleanups_p
3798 || expand_eh_region_start_tree (decl, cleanup))
3799 TREE_ADDRESSABLE (t) = 1;
3800 /* If that started a new EH region, we're in a new block. */
3801 thisblock = block_stack;
3803 if (cond_context)
3805 seq = get_insns ();
3806 end_sequence ();
3807 if (seq)
3808 thisblock->data.block.last_unconditional_cleanup
3809 = emit_insns_after (seq,
3810 thisblock->data.block.last_unconditional_cleanup);
3812 else
3814 thisblock->data.block.last_unconditional_cleanup
3815 = get_last_insn ();
3816 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3819 return 1;
3822 /* Like expand_decl_cleanup, but suppress generating an exception handler
3823 to perform the cleanup. */
3826 expand_decl_cleanup_no_eh (decl, cleanup)
3827 tree decl, cleanup;
3829 int save_eh = using_eh_for_cleanups_p;
3830 int result;
3832 using_eh_for_cleanups_p = 0;
3833 result = expand_decl_cleanup (decl, cleanup);
3834 using_eh_for_cleanups_p = save_eh;
3836 return result;
3839 /* Arrange for the top element of the dynamic cleanup chain to be
3840 popped if we exit the current binding contour. DECL is the
3841 associated declaration, if any, otherwise NULL_TREE. If the
3842 current contour is left via an exception, then __sjthrow will pop
3843 the top element off the dynamic cleanup chain. The code that
3844 avoids doing the action we push into the cleanup chain in the
3845 exceptional case is contained in expand_cleanups.
3847 This routine is only used by expand_eh_region_start, and that is
3848 the only way in which an exception region should be started. This
3849 routine is only used when using the setjmp/longjmp codegen method
3850 for exception handling. */
3853 expand_dcc_cleanup (decl)
3854 tree decl;
3856 struct nesting *thisblock = block_stack;
3857 tree cleanup;
3859 /* Error if we are not in any block. */
3860 if (thisblock == 0)
3861 return 0;
3863 /* Record the cleanup for the dynamic handler chain. */
3865 /* All cleanups must be on the function_obstack. */
3866 push_obstacks_nochange ();
3867 resume_temporary_allocation ();
3868 cleanup = make_node (POPDCC_EXPR);
3869 pop_obstacks ();
3871 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3872 thisblock->data.block.cleanups
3873 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3875 /* If this block has a cleanup, it belongs in stack_block_stack. */
3876 stack_block_stack = thisblock;
3877 return 1;
3880 /* Arrange for the top element of the dynamic handler chain to be
3881 popped if we exit the current binding contour. DECL is the
3882 associated declaration, if any, otherwise NULL_TREE. If the current
3883 contour is left via an exception, then __sjthrow will pop the top
3884 element off the dynamic handler chain. The code that avoids doing
3885 the action we push into the handler chain in the exceptional case
3886 is contained in expand_cleanups.
3888 This routine is only used by expand_eh_region_start, and that is
3889 the only way in which an exception region should be started. This
3890 routine is only used when using the setjmp/longjmp codegen method
3891 for exception handling. */
3894 expand_dhc_cleanup (decl)
3895 tree decl;
3897 struct nesting *thisblock = block_stack;
3898 tree cleanup;
3900 /* Error if we are not in any block. */
3901 if (thisblock == 0)
3902 return 0;
3904 /* Record the cleanup for the dynamic handler chain. */
3906 /* All cleanups must be on the function_obstack. */
3907 push_obstacks_nochange ();
3908 resume_temporary_allocation ();
3909 cleanup = make_node (POPDHC_EXPR);
3910 pop_obstacks ();
3912 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3913 thisblock->data.block.cleanups
3914 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3916 /* If this block has a cleanup, it belongs in stack_block_stack. */
3917 stack_block_stack = thisblock;
3918 return 1;
3921 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3922 DECL_ELTS is the list of elements that belong to DECL's type.
3923 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3925 void
3926 expand_anon_union_decl (decl, cleanup, decl_elts)
3927 tree decl, cleanup, decl_elts;
3929 struct nesting *thisblock = block_stack;
3930 rtx x;
3932 expand_decl (decl);
3933 expand_decl_cleanup (decl, cleanup);
3934 x = DECL_RTL (decl);
3936 while (decl_elts)
3938 tree decl_elt = TREE_VALUE (decl_elts);
3939 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3940 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3942 /* Propagate the union's alignment to the elements. */
3943 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3945 /* If the element has BLKmode and the union doesn't, the union is
3946 aligned such that the element doesn't need to have BLKmode, so
3947 change the element's mode to the appropriate one for its size. */
3948 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3949 DECL_MODE (decl_elt) = mode
3950 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3951 MODE_INT, 1);
3953 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3954 instead create a new MEM rtx with the proper mode. */
3955 if (GET_CODE (x) == MEM)
3957 if (mode == GET_MODE (x))
3958 DECL_RTL (decl_elt) = x;
3959 else
3961 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
3962 MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt), x);
3963 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3966 else if (GET_CODE (x) == REG)
3968 if (mode == GET_MODE (x))
3969 DECL_RTL (decl_elt) = x;
3970 else
3971 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
3973 else
3974 abort ();
3976 /* Record the cleanup if there is one. */
3978 if (cleanup != 0)
3979 thisblock->data.block.cleanups
3980 = temp_tree_cons (decl_elt, cleanup_elt,
3981 thisblock->data.block.cleanups);
3983 decl_elts = TREE_CHAIN (decl_elts);
3987 /* Expand a list of cleanups LIST.
3988 Elements may be expressions or may be nested lists.
3990 If DONT_DO is nonnull, then any list-element
3991 whose TREE_PURPOSE matches DONT_DO is omitted.
3992 This is sometimes used to avoid a cleanup associated with
3993 a value that is being returned out of the scope.
3995 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
3996 goto and handle protection regions specially in that case.
3998 If REACHABLE, we emit code, otherwise just inform the exception handling
3999 code about this finalization. */
4001 static void
4002 expand_cleanups (list, dont_do, in_fixup, reachable)
4003 tree list;
4004 tree dont_do;
4005 int in_fixup;
4006 int reachable;
4008 tree tail;
4009 for (tail = list; tail; tail = TREE_CHAIN (tail))
4010 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4012 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4013 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4014 else
4016 if (! in_fixup)
4018 tree cleanup = TREE_VALUE (tail);
4020 /* See expand_d{h,c}c_cleanup for why we avoid this. */
4021 if (TREE_CODE (cleanup) != POPDHC_EXPR
4022 && TREE_CODE (cleanup) != POPDCC_EXPR
4023 /* See expand_eh_region_start_tree for this case. */
4024 && ! TREE_ADDRESSABLE (tail))
4026 cleanup = protect_with_terminate (cleanup);
4027 expand_eh_region_end (cleanup);
4031 if (reachable)
4033 /* Cleanups may be run multiple times. For example,
4034 when exiting a binding contour, we expand the
4035 cleanups associated with that contour. When a goto
4036 within that binding contour has a target outside that
4037 contour, it will expand all cleanups from its scope to
4038 the target. Though the cleanups are expanded multiple
4039 times, the control paths are non-overlapping so the
4040 cleanups will not be executed twice. */
4042 /* We may need to protect fixups with rethrow regions. */
4043 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
4045 if (protect)
4046 expand_fixup_region_start ();
4048 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4049 if (protect)
4050 expand_fixup_region_end (TREE_VALUE (tail));
4051 free_temp_slots ();
4057 /* Mark when the context we are emitting RTL for as a conditional
4058 context, so that any cleanup actions we register with
4059 expand_decl_init will be properly conditionalized when those
4060 cleanup actions are later performed. Must be called before any
4061 expression (tree) is expanded that is within a conditional context. */
4063 void
4064 start_cleanup_deferral ()
4066 /* block_stack can be NULL if we are inside the parameter list. It is
4067 OK to do nothing, because cleanups aren't possible here. */
4068 if (block_stack)
4069 ++block_stack->data.block.conditional_code;
4072 /* Mark the end of a conditional region of code. Because cleanup
4073 deferrals may be nested, we may still be in a conditional region
4074 after we end the currently deferred cleanups, only after we end all
4075 deferred cleanups, are we back in unconditional code. */
4077 void
4078 end_cleanup_deferral ()
4080 /* block_stack can be NULL if we are inside the parameter list. It is
4081 OK to do nothing, because cleanups aren't possible here. */
4082 if (block_stack)
4083 --block_stack->data.block.conditional_code;
4086 /* Move all cleanups from the current block_stack
4087 to the containing block_stack, where they are assumed to
4088 have been created. If anything can cause a temporary to
4089 be created, but not expanded for more than one level of
4090 block_stacks, then this code will have to change. */
4092 void
4093 move_cleanups_up ()
4095 struct nesting *block = block_stack;
4096 struct nesting *outer = block->next;
4098 outer->data.block.cleanups
4099 = chainon (block->data.block.cleanups,
4100 outer->data.block.cleanups);
4101 block->data.block.cleanups = 0;
4104 tree
4105 last_cleanup_this_contour ()
4107 if (block_stack == 0)
4108 return 0;
4110 return block_stack->data.block.cleanups;
4113 /* Return 1 if there are any pending cleanups at this point.
4114 If THIS_CONTOUR is nonzero, check the current contour as well.
4115 Otherwise, look only at the contours that enclose this one. */
4118 any_pending_cleanups (this_contour)
4119 int this_contour;
4121 struct nesting *block;
4123 if (block_stack == 0)
4124 return 0;
4126 if (this_contour && block_stack->data.block.cleanups != NULL)
4127 return 1;
4128 if (block_stack->data.block.cleanups == 0
4129 && block_stack->data.block.outer_cleanups == 0)
4130 return 0;
4132 for (block = block_stack->next; block; block = block->next)
4133 if (block->data.block.cleanups != 0)
4134 return 1;
4136 return 0;
4139 /* Enter a case (Pascal) or switch (C) statement.
4140 Push a block onto case_stack and nesting_stack
4141 to accumulate the case-labels that are seen
4142 and to record the labels generated for the statement.
4144 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4145 Otherwise, this construct is transparent for `exit_something'.
4147 EXPR is the index-expression to be dispatched on.
4148 TYPE is its nominal type. We could simply convert EXPR to this type,
4149 but instead we take short cuts. */
4151 void
4152 expand_start_case (exit_flag, expr, type, printname)
4153 int exit_flag;
4154 tree expr;
4155 tree type;
4156 const char *printname;
4158 register struct nesting *thiscase = ALLOC_NESTING ();
4160 /* Make an entry on case_stack for the case we are entering. */
4162 thiscase->next = case_stack;
4163 thiscase->all = nesting_stack;
4164 thiscase->depth = ++nesting_depth;
4165 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4166 thiscase->data.case_stmt.case_list = 0;
4167 thiscase->data.case_stmt.index_expr = expr;
4168 thiscase->data.case_stmt.nominal_type = type;
4169 thiscase->data.case_stmt.default_label = 0;
4170 thiscase->data.case_stmt.num_ranges = 0;
4171 thiscase->data.case_stmt.printname = printname;
4172 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4173 case_stack = thiscase;
4174 nesting_stack = thiscase;
4176 do_pending_stack_adjust ();
4178 /* Make sure case_stmt.start points to something that won't
4179 need any transformation before expand_end_case. */
4180 if (GET_CODE (get_last_insn ()) != NOTE)
4181 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4183 thiscase->data.case_stmt.start = get_last_insn ();
4185 start_cleanup_deferral ();
4189 /* Start a "dummy case statement" within which case labels are invalid
4190 and are not connected to any larger real case statement.
4191 This can be used if you don't want to let a case statement jump
4192 into the middle of certain kinds of constructs. */
4194 void
4195 expand_start_case_dummy ()
4197 register struct nesting *thiscase = ALLOC_NESTING ();
4199 /* Make an entry on case_stack for the dummy. */
4201 thiscase->next = case_stack;
4202 thiscase->all = nesting_stack;
4203 thiscase->depth = ++nesting_depth;
4204 thiscase->exit_label = 0;
4205 thiscase->data.case_stmt.case_list = 0;
4206 thiscase->data.case_stmt.start = 0;
4207 thiscase->data.case_stmt.nominal_type = 0;
4208 thiscase->data.case_stmt.default_label = 0;
4209 thiscase->data.case_stmt.num_ranges = 0;
4210 case_stack = thiscase;
4211 nesting_stack = thiscase;
4212 start_cleanup_deferral ();
4215 /* End a dummy case statement. */
4217 void
4218 expand_end_case_dummy ()
4220 end_cleanup_deferral ();
4221 POPSTACK (case_stack);
4224 /* Return the data type of the index-expression
4225 of the innermost case statement, or null if none. */
4227 tree
4228 case_index_expr_type ()
4230 if (case_stack)
4231 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4232 return 0;
4235 static void
4236 check_seenlabel ()
4238 /* If this is the first label, warn if any insns have been emitted. */
4239 if (case_stack->data.case_stmt.line_number_status >= 0)
4241 rtx insn;
4243 restore_line_number_status
4244 (case_stack->data.case_stmt.line_number_status);
4245 case_stack->data.case_stmt.line_number_status = -1;
4247 for (insn = case_stack->data.case_stmt.start;
4248 insn;
4249 insn = NEXT_INSN (insn))
4251 if (GET_CODE (insn) == CODE_LABEL)
4252 break;
4253 if (GET_CODE (insn) != NOTE
4254 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4257 insn = PREV_INSN (insn);
4258 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4260 /* If insn is zero, then there must have been a syntax error. */
4261 if (insn)
4262 warning_with_file_and_line (NOTE_SOURCE_FILE(insn),
4263 NOTE_LINE_NUMBER(insn),
4264 "unreachable code at beginning of %s",
4265 case_stack->data.case_stmt.printname);
4266 break;
4272 /* Accumulate one case or default label inside a case or switch statement.
4273 VALUE is the value of the case (a null pointer, for a default label).
4274 The function CONVERTER, when applied to arguments T and V,
4275 converts the value V to the type T.
4277 If not currently inside a case or switch statement, return 1 and do
4278 nothing. The caller will print a language-specific error message.
4279 If VALUE is a duplicate or overlaps, return 2 and do nothing
4280 except store the (first) duplicate node in *DUPLICATE.
4281 If VALUE is out of range, return 3 and do nothing.
4282 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4283 Return 0 on success.
4285 Extended to handle range statements. */
4288 pushcase (value, converter, label, duplicate)
4289 register tree value;
4290 tree (*converter) PROTO((tree, tree));
4291 register tree label;
4292 tree *duplicate;
4294 tree index_type;
4295 tree nominal_type;
4297 /* Fail if not inside a real case statement. */
4298 if (! (case_stack && case_stack->data.case_stmt.start))
4299 return 1;
4301 if (stack_block_stack
4302 && stack_block_stack->depth > case_stack->depth)
4303 return 5;
4305 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4306 nominal_type = case_stack->data.case_stmt.nominal_type;
4308 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4309 if (index_type == error_mark_node)
4310 return 0;
4312 /* Convert VALUE to the type in which the comparisons are nominally done. */
4313 if (value != 0)
4314 value = (*converter) (nominal_type, value);
4316 check_seenlabel ();
4318 /* Fail if this value is out of range for the actual type of the index
4319 (which may be narrower than NOMINAL_TYPE). */
4320 if (value != 0 && ! int_fits_type_p (value, index_type))
4321 return 3;
4323 /* Fail if this is a duplicate or overlaps another entry. */
4324 if (value == 0)
4326 if (case_stack->data.case_stmt.default_label != 0)
4328 *duplicate = case_stack->data.case_stmt.default_label;
4329 return 2;
4331 case_stack->data.case_stmt.default_label = label;
4333 else
4334 return add_case_node (value, value, label, duplicate);
4336 expand_label (label);
4337 return 0;
4340 /* Like pushcase but this case applies to all values between VALUE1 and
4341 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4342 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4343 starts at VALUE1 and ends at the highest value of the index type.
4344 If both are NULL, this case applies to all values.
4346 The return value is the same as that of pushcase but there is one
4347 additional error code: 4 means the specified range was empty. */
4350 pushcase_range (value1, value2, converter, label, duplicate)
4351 register tree value1, value2;
4352 tree (*converter) PROTO((tree, tree));
4353 register tree label;
4354 tree *duplicate;
4356 tree index_type;
4357 tree nominal_type;
4359 /* Fail if not inside a real case statement. */
4360 if (! (case_stack && case_stack->data.case_stmt.start))
4361 return 1;
4363 if (stack_block_stack
4364 && stack_block_stack->depth > case_stack->depth)
4365 return 5;
4367 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4368 nominal_type = case_stack->data.case_stmt.nominal_type;
4370 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4371 if (index_type == error_mark_node)
4372 return 0;
4374 check_seenlabel ();
4376 /* Convert VALUEs to type in which the comparisons are nominally done
4377 and replace any unspecified value with the corresponding bound. */
4378 if (value1 == 0)
4379 value1 = TYPE_MIN_VALUE (index_type);
4380 if (value2 == 0)
4381 value2 = TYPE_MAX_VALUE (index_type);
4383 /* Fail if the range is empty. Do this before any conversion since
4384 we want to allow out-of-range empty ranges. */
4385 if (value2 && tree_int_cst_lt (value2, value1))
4386 return 4;
4388 value1 = (*converter) (nominal_type, value1);
4390 /* If the max was unbounded, use the max of the nominal_type we are
4391 converting to. Do this after the < check above to suppress false
4392 positives. */
4393 if (!value2)
4394 value2 = TYPE_MAX_VALUE (nominal_type);
4395 value2 = (*converter) (nominal_type, value2);
4397 /* Fail if these values are out of range. */
4398 if (TREE_CONSTANT_OVERFLOW (value1)
4399 || ! int_fits_type_p (value1, index_type))
4400 return 3;
4402 if (TREE_CONSTANT_OVERFLOW (value2)
4403 || ! int_fits_type_p (value2, index_type))
4404 return 3;
4406 return add_case_node (value1, value2, label, duplicate);
4409 /* Do the actual insertion of a case label for pushcase and pushcase_range
4410 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4411 slowdown for large switch statements. */
4413 static int
4414 add_case_node (low, high, label, duplicate)
4415 tree low, high;
4416 tree label;
4417 tree *duplicate;
4419 struct case_node *p, **q, *r;
4421 q = &case_stack->data.case_stmt.case_list;
4422 p = *q;
4424 while ((r = *q))
4426 p = r;
4428 /* Keep going past elements distinctly greater than HIGH. */
4429 if (tree_int_cst_lt (high, p->low))
4430 q = &p->left;
4432 /* or distinctly less than LOW. */
4433 else if (tree_int_cst_lt (p->high, low))
4434 q = &p->right;
4436 else
4438 /* We have an overlap; this is an error. */
4439 *duplicate = p->code_label;
4440 return 2;
4444 /* Add this label to the chain, and succeed.
4445 Copy LOW, HIGH so they are on temporary rather than momentary
4446 obstack and will thus survive till the end of the case statement. */
4448 r = (struct case_node *) oballoc (sizeof (struct case_node));
4449 r->low = copy_node (low);
4451 /* If the bounds are equal, turn this into the one-value case. */
4453 if (tree_int_cst_equal (low, high))
4454 r->high = r->low;
4455 else
4457 r->high = copy_node (high);
4458 case_stack->data.case_stmt.num_ranges++;
4461 r->code_label = label;
4462 expand_label (label);
4464 *q = r;
4465 r->parent = p;
4466 r->left = 0;
4467 r->right = 0;
4468 r->balance = 0;
4470 while (p)
4472 struct case_node *s;
4474 if (r == p->left)
4476 int b;
4478 if (! (b = p->balance))
4479 /* Growth propagation from left side. */
4480 p->balance = -1;
4481 else if (b < 0)
4483 if (r->balance < 0)
4485 /* R-Rotation */
4486 if ((p->left = s = r->right))
4487 s->parent = p;
4489 r->right = p;
4490 p->balance = 0;
4491 r->balance = 0;
4492 s = p->parent;
4493 p->parent = r;
4495 if ((r->parent = s))
4497 if (s->left == p)
4498 s->left = r;
4499 else
4500 s->right = r;
4502 else
4503 case_stack->data.case_stmt.case_list = r;
4505 else
4506 /* r->balance == +1 */
4508 /* LR-Rotation */
4510 int b2;
4511 struct case_node *t = r->right;
4513 if ((p->left = s = t->right))
4514 s->parent = p;
4516 t->right = p;
4517 if ((r->right = s = t->left))
4518 s->parent = r;
4520 t->left = r;
4521 b = t->balance;
4522 b2 = b < 0;
4523 p->balance = b2;
4524 b2 = -b2 - b;
4525 r->balance = b2;
4526 t->balance = 0;
4527 s = p->parent;
4528 p->parent = t;
4529 r->parent = t;
4531 if ((t->parent = s))
4533 if (s->left == p)
4534 s->left = t;
4535 else
4536 s->right = t;
4538 else
4539 case_stack->data.case_stmt.case_list = t;
4541 break;
4544 else
4546 /* p->balance == +1; growth of left side balances the node. */
4547 p->balance = 0;
4548 break;
4551 else
4552 /* r == p->right */
4554 int b;
4556 if (! (b = p->balance))
4557 /* Growth propagation from right side. */
4558 p->balance++;
4559 else if (b > 0)
4561 if (r->balance > 0)
4563 /* L-Rotation */
4565 if ((p->right = s = r->left))
4566 s->parent = p;
4568 r->left = p;
4569 p->balance = 0;
4570 r->balance = 0;
4571 s = p->parent;
4572 p->parent = r;
4573 if ((r->parent = s))
4575 if (s->left == p)
4576 s->left = r;
4577 else
4578 s->right = r;
4581 else
4582 case_stack->data.case_stmt.case_list = r;
4585 else
4586 /* r->balance == -1 */
4588 /* RL-Rotation */
4589 int b2;
4590 struct case_node *t = r->left;
4592 if ((p->right = s = t->left))
4593 s->parent = p;
4595 t->left = p;
4597 if ((r->left = s = t->right))
4598 s->parent = r;
4600 t->right = r;
4601 b = t->balance;
4602 b2 = b < 0;
4603 r->balance = b2;
4604 b2 = -b2 - b;
4605 p->balance = b2;
4606 t->balance = 0;
4607 s = p->parent;
4608 p->parent = t;
4609 r->parent = t;
4611 if ((t->parent = s))
4613 if (s->left == p)
4614 s->left = t;
4615 else
4616 s->right = t;
4619 else
4620 case_stack->data.case_stmt.case_list = t;
4622 break;
4624 else
4626 /* p->balance == -1; growth of right side balances the node. */
4627 p->balance = 0;
4628 break;
4632 r = p;
4633 p = p->parent;
4636 return 0;
4640 /* Returns the number of possible values of TYPE.
4641 Returns -1 if the number is unknown or variable.
4642 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4643 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4644 do not increase monotonically (there may be duplicates);
4645 to 1 if the values increase monotonically, but not always by 1;
4646 otherwise sets it to 0. */
4648 HOST_WIDE_INT
4649 all_cases_count (type, spareness)
4650 tree type;
4651 int *spareness;
4653 HOST_WIDE_INT count;
4654 *spareness = 0;
4656 switch (TREE_CODE (type))
4658 tree t;
4659 case BOOLEAN_TYPE:
4660 count = 2;
4661 break;
4662 case CHAR_TYPE:
4663 count = 1 << BITS_PER_UNIT;
4664 break;
4665 default:
4666 case INTEGER_TYPE:
4667 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4668 || TYPE_MAX_VALUE (type) == NULL
4669 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4670 return -1;
4671 else
4673 /* count
4674 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4675 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4676 but with overflow checking. */
4677 tree mint = TYPE_MIN_VALUE (type);
4678 tree maxt = TYPE_MAX_VALUE (type);
4679 HOST_WIDE_INT lo, hi;
4680 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4681 &lo, &hi);
4682 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4683 lo, hi, &lo, &hi);
4684 add_double (lo, hi, 1, 0, &lo, &hi);
4685 if (hi != 0 || lo < 0)
4686 return -2;
4687 count = lo;
4689 break;
4690 case ENUMERAL_TYPE:
4691 count = 0;
4692 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4694 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4695 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4696 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4697 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4698 *spareness = 1;
4699 count++;
4701 if (*spareness == 1)
4703 tree prev = TREE_VALUE (TYPE_VALUES (type));
4704 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4706 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4708 *spareness = 2;
4709 break;
4711 prev = TREE_VALUE (t);
4716 return count;
4720 #define BITARRAY_TEST(ARRAY, INDEX) \
4721 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4722 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4723 #define BITARRAY_SET(ARRAY, INDEX) \
4724 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4725 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4727 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4728 with the case values we have seen, assuming the case expression
4729 has the given TYPE.
4730 SPARSENESS is as determined by all_cases_count.
4732 The time needed is proportional to COUNT, unless
4733 SPARSENESS is 2, in which case quadratic time is needed. */
4735 void
4736 mark_seen_cases (type, cases_seen, count, sparseness)
4737 tree type;
4738 unsigned char *cases_seen;
4739 long count;
4740 int sparseness;
4742 tree next_node_to_try = NULL_TREE;
4743 long next_node_offset = 0;
4745 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4746 tree val = make_node (INTEGER_CST);
4747 TREE_TYPE (val) = type;
4748 if (! root)
4749 ; /* Do nothing */
4750 else if (sparseness == 2)
4752 tree t;
4753 HOST_WIDE_INT xlo;
4755 /* This less efficient loop is only needed to handle
4756 duplicate case values (multiple enum constants
4757 with the same value). */
4758 TREE_TYPE (val) = TREE_TYPE (root->low);
4759 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4760 t = TREE_CHAIN (t), xlo++)
4762 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4763 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4764 n = root;
4767 /* Keep going past elements distinctly greater than VAL. */
4768 if (tree_int_cst_lt (val, n->low))
4769 n = n->left;
4771 /* or distinctly less than VAL. */
4772 else if (tree_int_cst_lt (n->high, val))
4773 n = n->right;
4775 else
4777 /* We have found a matching range. */
4778 BITARRAY_SET (cases_seen, xlo);
4779 break;
4782 while (n);
4785 else
4787 if (root->left)
4788 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4789 for (n = root; n; n = n->right)
4791 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4792 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4793 while ( ! tree_int_cst_lt (n->high, val))
4795 /* Calculate (into xlo) the "offset" of the integer (val).
4796 The element with lowest value has offset 0, the next smallest
4797 element has offset 1, etc. */
4799 HOST_WIDE_INT xlo, xhi;
4800 tree t;
4801 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4803 /* The TYPE_VALUES will be in increasing order, so
4804 starting searching where we last ended. */
4805 t = next_node_to_try;
4806 xlo = next_node_offset;
4807 xhi = 0;
4808 for (;;)
4810 if (t == NULL_TREE)
4812 t = TYPE_VALUES (type);
4813 xlo = 0;
4815 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4817 next_node_to_try = TREE_CHAIN (t);
4818 next_node_offset = xlo + 1;
4819 break;
4821 xlo++;
4822 t = TREE_CHAIN (t);
4823 if (t == next_node_to_try)
4825 xlo = -1;
4826 break;
4830 else
4832 t = TYPE_MIN_VALUE (type);
4833 if (t)
4834 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4835 &xlo, &xhi);
4836 else
4837 xlo = xhi = 0;
4838 add_double (xlo, xhi,
4839 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4840 &xlo, &xhi);
4843 if (xhi == 0 && xlo >= 0 && xlo < count)
4844 BITARRAY_SET (cases_seen, xlo);
4845 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4846 1, 0,
4847 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4853 /* Called when the index of a switch statement is an enumerated type
4854 and there is no default label.
4856 Checks that all enumeration literals are covered by the case
4857 expressions of a switch. Also, warn if there are any extra
4858 switch cases that are *not* elements of the enumerated type.
4860 If all enumeration literals were covered by the case expressions,
4861 turn one of the expressions into the default expression since it should
4862 not be possible to fall through such a switch. */
4864 void
4865 check_for_full_enumeration_handling (type)
4866 tree type;
4868 register struct case_node *n;
4869 register tree chain;
4870 #if 0 /* variable used by 'if 0'ed code below. */
4871 register struct case_node **l;
4872 int all_values = 1;
4873 #endif
4875 /* True iff the selector type is a numbered set mode. */
4876 int sparseness = 0;
4878 /* The number of possible selector values. */
4879 HOST_WIDE_INT size;
4881 /* For each possible selector value. a one iff it has been matched
4882 by a case value alternative. */
4883 unsigned char *cases_seen;
4885 /* The allocated size of cases_seen, in chars. */
4886 long bytes_needed;
4888 if (! warn_switch)
4889 return;
4891 size = all_cases_count (type, &sparseness);
4892 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
4894 if (size > 0 && size < 600000
4895 /* We deliberately use malloc here - not xmalloc. */
4896 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
4898 long i;
4899 tree v = TYPE_VALUES (type);
4900 bzero (cases_seen, bytes_needed);
4902 /* The time complexity of this code is normally O(N), where
4903 N being the number of members in the enumerated type.
4904 However, if type is a ENUMERAL_TYPE whose values do not
4905 increase monotonically, O(N*log(N)) time may be needed. */
4907 mark_seen_cases (type, cases_seen, size, sparseness);
4909 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4911 if (BITARRAY_TEST(cases_seen, i) == 0)
4912 warning ("enumeration value `%s' not handled in switch",
4913 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
4916 free (cases_seen);
4919 /* Now we go the other way around; we warn if there are case
4920 expressions that don't correspond to enumerators. This can
4921 occur since C and C++ don't enforce type-checking of
4922 assignments to enumeration variables. */
4924 if (case_stack->data.case_stmt.case_list
4925 && case_stack->data.case_stmt.case_list->left)
4926 case_stack->data.case_stmt.case_list
4927 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
4928 if (warn_switch)
4929 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4931 for (chain = TYPE_VALUES (type);
4932 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4933 chain = TREE_CHAIN (chain))
4936 if (!chain)
4938 if (TYPE_NAME (type) == 0)
4939 warning ("case value `%ld' not in enumerated type",
4940 (long) TREE_INT_CST_LOW (n->low));
4941 else
4942 warning ("case value `%ld' not in enumerated type `%s'",
4943 (long) TREE_INT_CST_LOW (n->low),
4944 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4945 == IDENTIFIER_NODE)
4946 ? TYPE_NAME (type)
4947 : DECL_NAME (TYPE_NAME (type))));
4949 if (!tree_int_cst_equal (n->low, n->high))
4951 for (chain = TYPE_VALUES (type);
4952 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4953 chain = TREE_CHAIN (chain))
4956 if (!chain)
4958 if (TYPE_NAME (type) == 0)
4959 warning ("case value `%ld' not in enumerated type",
4960 (long) TREE_INT_CST_LOW (n->high));
4961 else
4962 warning ("case value `%ld' not in enumerated type `%s'",
4963 (long) TREE_INT_CST_LOW (n->high),
4964 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4965 == IDENTIFIER_NODE)
4966 ? TYPE_NAME (type)
4967 : DECL_NAME (TYPE_NAME (type))));
4972 #if 0
4973 /* ??? This optimization is disabled because it causes valid programs to
4974 fail. ANSI C does not guarantee that an expression with enum type
4975 will have a value that is the same as one of the enumeration literals. */
4977 /* If all values were found as case labels, make one of them the default
4978 label. Thus, this switch will never fall through. We arbitrarily pick
4979 the last one to make the default since this is likely the most
4980 efficient choice. */
4982 if (all_values)
4984 for (l = &case_stack->data.case_stmt.case_list;
4985 (*l)->right != 0;
4986 l = &(*l)->right)
4989 case_stack->data.case_stmt.default_label = (*l)->code_label;
4990 *l = 0;
4992 #endif /* 0 */
4996 /* Terminate a case (Pascal) or switch (C) statement
4997 in which ORIG_INDEX is the expression to be tested.
4998 Generate the code to test it and jump to the right place. */
5000 void
5001 expand_end_case (orig_index)
5002 tree orig_index;
5004 tree minval = NULL_TREE, maxval = NULL_TREE, range, orig_minval;
5005 rtx default_label = 0;
5006 register struct case_node *n;
5007 unsigned int count;
5008 rtx index;
5009 rtx table_label;
5010 int ncases;
5011 rtx *labelvec;
5012 register int i;
5013 rtx before_case;
5014 register struct nesting *thiscase = case_stack;
5015 tree index_expr, index_type;
5016 int unsignedp;
5018 table_label = gen_label_rtx ();
5019 index_expr = thiscase->data.case_stmt.index_expr;
5020 index_type = TREE_TYPE (index_expr);
5021 unsignedp = TREE_UNSIGNED (index_type);
5023 do_pending_stack_adjust ();
5025 /* This might get an spurious warning in the presence of a syntax error;
5026 it could be fixed by moving the call to check_seenlabel after the
5027 check for error_mark_node, and copying the code of check_seenlabel that
5028 deals with case_stack->data.case_stmt.line_number_status /
5029 restore_line_number_status in front of the call to end_cleanup_deferral;
5030 However, this might miss some useful warnings in the presence of
5031 non-syntax errors. */
5032 check_seenlabel ();
5034 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5035 if (index_type != error_mark_node)
5037 /* If switch expression was an enumerated type, check that all
5038 enumeration literals are covered by the cases.
5039 No sense trying this if there's a default case, however. */
5041 if (!thiscase->data.case_stmt.default_label
5042 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5043 && TREE_CODE (index_expr) != INTEGER_CST)
5044 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5046 /* If we don't have a default-label, create one here,
5047 after the body of the switch. */
5048 if (thiscase->data.case_stmt.default_label == 0)
5050 thiscase->data.case_stmt.default_label
5051 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5052 expand_label (thiscase->data.case_stmt.default_label);
5054 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5056 before_case = get_last_insn ();
5058 if (thiscase->data.case_stmt.case_list
5059 && thiscase->data.case_stmt.case_list->left)
5060 thiscase->data.case_stmt.case_list
5061 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
5063 /* Simplify the case-list before we count it. */
5064 group_case_nodes (thiscase->data.case_stmt.case_list);
5066 /* Get upper and lower bounds of case values.
5067 Also convert all the case values to the index expr's data type. */
5069 count = 0;
5070 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5072 /* Check low and high label values are integers. */
5073 if (TREE_CODE (n->low) != INTEGER_CST)
5074 abort ();
5075 if (TREE_CODE (n->high) != INTEGER_CST)
5076 abort ();
5078 n->low = convert (index_type, n->low);
5079 n->high = convert (index_type, n->high);
5081 /* Count the elements and track the largest and smallest
5082 of them (treating them as signed even if they are not). */
5083 if (count++ == 0)
5085 minval = n->low;
5086 maxval = n->high;
5088 else
5090 if (INT_CST_LT (n->low, minval))
5091 minval = n->low;
5092 if (INT_CST_LT (maxval, n->high))
5093 maxval = n->high;
5095 /* A range counts double, since it requires two compares. */
5096 if (! tree_int_cst_equal (n->low, n->high))
5097 count++;
5100 orig_minval = minval;
5102 /* Compute span of values. */
5103 if (count != 0)
5104 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5106 end_cleanup_deferral ();
5108 if (count == 0)
5110 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5111 emit_queue ();
5112 emit_jump (default_label);
5115 /* If range of values is much bigger than number of values,
5116 make a sequence of conditional branches instead of a dispatch.
5117 If the switch-index is a constant, do it this way
5118 because we can optimize it. */
5120 #ifndef CASE_VALUES_THRESHOLD
5121 #ifdef HAVE_casesi
5122 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5123 #else
5124 /* If machine does not have a case insn that compares the
5125 bounds, this means extra overhead for dispatch tables
5126 which raises the threshold for using them. */
5127 #define CASE_VALUES_THRESHOLD 5
5128 #endif /* HAVE_casesi */
5129 #endif /* CASE_VALUES_THRESHOLD */
5131 else if (TREE_INT_CST_HIGH (range) != 0
5132 || count < (unsigned int) CASE_VALUES_THRESHOLD
5133 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5134 > 10 * count)
5135 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5136 || flag_pic
5137 #endif
5138 || TREE_CODE (index_expr) == INTEGER_CST
5139 /* These will reduce to a constant. */
5140 || (TREE_CODE (index_expr) == CALL_EXPR
5141 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5142 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5143 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5144 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5145 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5147 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5149 /* If the index is a short or char that we do not have
5150 an insn to handle comparisons directly, convert it to
5151 a full integer now, rather than letting each comparison
5152 generate the conversion. */
5154 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5155 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5156 == CODE_FOR_nothing))
5158 enum machine_mode wider_mode;
5159 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5160 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5161 if (cmp_optab->handlers[(int) wider_mode].insn_code
5162 != CODE_FOR_nothing)
5164 index = convert_to_mode (wider_mode, index, unsignedp);
5165 break;
5169 emit_queue ();
5170 do_pending_stack_adjust ();
5172 index = protect_from_queue (index, 0);
5173 if (GET_CODE (index) == MEM)
5174 index = copy_to_reg (index);
5175 if (GET_CODE (index) == CONST_INT
5176 || TREE_CODE (index_expr) == INTEGER_CST)
5178 /* Make a tree node with the proper constant value
5179 if we don't already have one. */
5180 if (TREE_CODE (index_expr) != INTEGER_CST)
5182 index_expr
5183 = build_int_2 (INTVAL (index),
5184 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5185 index_expr = convert (index_type, index_expr);
5188 /* For constant index expressions we need only
5189 issue a unconditional branch to the appropriate
5190 target code. The job of removing any unreachable
5191 code is left to the optimisation phase if the
5192 "-O" option is specified. */
5193 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5194 if (! tree_int_cst_lt (index_expr, n->low)
5195 && ! tree_int_cst_lt (n->high, index_expr))
5196 break;
5198 if (n)
5199 emit_jump (label_rtx (n->code_label));
5200 else
5201 emit_jump (default_label);
5203 else
5205 /* If the index expression is not constant we generate
5206 a binary decision tree to select the appropriate
5207 target code. This is done as follows:
5209 The list of cases is rearranged into a binary tree,
5210 nearly optimal assuming equal probability for each case.
5212 The tree is transformed into RTL, eliminating
5213 redundant test conditions at the same time.
5215 If program flow could reach the end of the
5216 decision tree an unconditional jump to the
5217 default code is emitted. */
5219 use_cost_table
5220 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5221 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5222 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5223 NULL_PTR);
5224 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5225 default_label, index_type);
5226 emit_jump_if_reachable (default_label);
5229 else
5231 int win = 0;
5232 #ifdef HAVE_casesi
5233 if (HAVE_casesi)
5235 enum machine_mode index_mode = SImode;
5236 int index_bits = GET_MODE_BITSIZE (index_mode);
5237 rtx op1, op2;
5238 enum machine_mode op_mode;
5240 /* Convert the index to SImode. */
5241 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5242 > GET_MODE_BITSIZE (index_mode))
5244 enum machine_mode omode = TYPE_MODE (index_type);
5245 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5247 /* We must handle the endpoints in the original mode. */
5248 index_expr = build (MINUS_EXPR, index_type,
5249 index_expr, minval);
5250 minval = integer_zero_node;
5251 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5252 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
5253 omode, 1, 0, default_label);
5254 /* Now we can safely truncate. */
5255 index = convert_to_mode (index_mode, index, 0);
5257 else
5259 if (TYPE_MODE (index_type) != index_mode)
5261 index_expr = convert (type_for_size (index_bits, 0),
5262 index_expr);
5263 index_type = TREE_TYPE (index_expr);
5266 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5268 emit_queue ();
5269 index = protect_from_queue (index, 0);
5270 do_pending_stack_adjust ();
5272 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
5273 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
5274 (index, op_mode))
5275 index = copy_to_mode_reg (op_mode, index);
5277 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5279 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
5280 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
5281 (op1, op_mode))
5282 op1 = copy_to_mode_reg (op_mode, op1);
5284 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5286 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
5287 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
5288 (op2, op_mode))
5289 op2 = copy_to_mode_reg (op_mode, op2);
5291 emit_jump_insn (gen_casesi (index, op1, op2,
5292 table_label, default_label));
5293 win = 1;
5295 #endif
5296 #ifdef HAVE_tablejump
5297 if (! win && HAVE_tablejump)
5299 index_expr = convert (thiscase->data.case_stmt.nominal_type,
5300 fold (build (MINUS_EXPR, index_type,
5301 index_expr, minval)));
5302 index_type = TREE_TYPE (index_expr);
5303 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5304 emit_queue ();
5305 index = protect_from_queue (index, 0);
5306 do_pending_stack_adjust ();
5308 do_tablejump (index, TYPE_MODE (index_type),
5309 expand_expr (range, NULL_RTX, VOIDmode, 0),
5310 table_label, default_label);
5311 win = 1;
5313 #endif
5314 if (! win)
5315 abort ();
5317 /* Get table of labels to jump to, in order of case index. */
5319 ncases = TREE_INT_CST_LOW (range) + 1;
5320 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5321 bzero ((char *) labelvec, ncases * sizeof (rtx));
5323 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5325 register HOST_WIDE_INT i
5326 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5328 while (1)
5330 labelvec[i]
5331 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5332 if (i + TREE_INT_CST_LOW (orig_minval)
5333 == TREE_INT_CST_LOW (n->high))
5334 break;
5335 i++;
5339 /* Fill in the gaps with the default. */
5340 for (i = 0; i < ncases; i++)
5341 if (labelvec[i] == 0)
5342 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5344 /* Output the table */
5345 emit_label (table_label);
5347 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5348 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5349 gen_rtx_LABEL_REF (Pmode, table_label),
5350 gen_rtvec_v (ncases, labelvec),
5351 const0_rtx, const0_rtx, 0));
5352 else
5353 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5354 gen_rtvec_v (ncases, labelvec)));
5356 /* If the case insn drops through the table,
5357 after the table we must jump to the default-label.
5358 Otherwise record no drop-through after the table. */
5359 #ifdef CASE_DROPS_THROUGH
5360 emit_jump (default_label);
5361 #else
5362 emit_barrier ();
5363 #endif
5366 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5367 reorder_insns (before_case, get_last_insn (),
5368 thiscase->data.case_stmt.start);
5370 else
5371 end_cleanup_deferral ();
5373 if (thiscase->exit_label)
5374 emit_label (thiscase->exit_label);
5376 POPSTACK (case_stack);
5378 free_temp_slots ();
5381 /* Convert the tree NODE into a list linked by the right field, with the left
5382 field zeroed. RIGHT is used for recursion; it is a list to be placed
5383 rightmost in the resulting list. */
5385 static struct case_node *
5386 case_tree2list (node, right)
5387 struct case_node *node, *right;
5389 struct case_node *left;
5391 if (node->right)
5392 right = case_tree2list (node->right, right);
5394 node->right = right;
5395 if ((left = node->left))
5397 node->left = 0;
5398 return case_tree2list (left, node);
5401 return node;
5404 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5406 static void
5407 do_jump_if_equal (op1, op2, label, unsignedp)
5408 rtx op1, op2, label;
5409 int unsignedp;
5411 if (GET_CODE (op1) == CONST_INT
5412 && GET_CODE (op2) == CONST_INT)
5414 if (INTVAL (op1) == INTVAL (op2))
5415 emit_jump (label);
5417 else
5419 enum machine_mode mode = GET_MODE (op1);
5420 if (mode == VOIDmode)
5421 mode = GET_MODE (op2);
5422 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5423 0, label);
5427 /* Not all case values are encountered equally. This function
5428 uses a heuristic to weight case labels, in cases where that
5429 looks like a reasonable thing to do.
5431 Right now, all we try to guess is text, and we establish the
5432 following weights:
5434 chars above space: 16
5435 digits: 16
5436 default: 12
5437 space, punct: 8
5438 tab: 4
5439 newline: 2
5440 other "\" chars: 1
5441 remaining chars: 0
5443 If we find any cases in the switch that are not either -1 or in the range
5444 of valid ASCII characters, or are control characters other than those
5445 commonly used with "\", don't treat this switch scanning text.
5447 Return 1 if these nodes are suitable for cost estimation, otherwise
5448 return 0. */
5450 static int
5451 estimate_case_costs (node)
5452 case_node_ptr node;
5454 tree min_ascii = build_int_2 (-1, -1);
5455 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5456 case_node_ptr n;
5457 int i;
5459 /* If we haven't already made the cost table, make it now. Note that the
5460 lower bound of the table is -1, not zero. */
5462 if (cost_table == NULL)
5464 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5465 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5467 for (i = 0; i < 128; i++)
5469 if (ISALNUM (i))
5470 cost_table[i] = 16;
5471 else if (ISPUNCT (i))
5472 cost_table[i] = 8;
5473 else if (ISCNTRL (i))
5474 cost_table[i] = -1;
5477 cost_table[' '] = 8;
5478 cost_table['\t'] = 4;
5479 cost_table['\0'] = 4;
5480 cost_table['\n'] = 2;
5481 cost_table['\f'] = 1;
5482 cost_table['\v'] = 1;
5483 cost_table['\b'] = 1;
5486 /* See if all the case expressions look like text. It is text if the
5487 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5488 as signed arithmetic since we don't want to ever access cost_table with a
5489 value less than -1. Also check that none of the constants in a range
5490 are strange control characters. */
5492 for (n = node; n; n = n->right)
5494 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5495 return 0;
5497 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5498 if (cost_table[i] < 0)
5499 return 0;
5502 /* All interesting values are within the range of interesting
5503 ASCII characters. */
5504 return 1;
5507 /* Scan an ordered list of case nodes
5508 combining those with consecutive values or ranges.
5510 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5512 static void
5513 group_case_nodes (head)
5514 case_node_ptr head;
5516 case_node_ptr node = head;
5518 while (node)
5520 rtx lb = next_real_insn (label_rtx (node->code_label));
5521 rtx lb2;
5522 case_node_ptr np = node;
5524 /* Try to group the successors of NODE with NODE. */
5525 while (((np = np->right) != 0)
5526 /* Do they jump to the same place? */
5527 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5528 || (lb != 0 && lb2 != 0
5529 && simplejump_p (lb)
5530 && simplejump_p (lb2)
5531 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5532 SET_SRC (PATTERN (lb2)))))
5533 /* Are their ranges consecutive? */
5534 && tree_int_cst_equal (np->low,
5535 fold (build (PLUS_EXPR,
5536 TREE_TYPE (node->high),
5537 node->high,
5538 integer_one_node)))
5539 /* An overflow is not consecutive. */
5540 && tree_int_cst_lt (node->high,
5541 fold (build (PLUS_EXPR,
5542 TREE_TYPE (node->high),
5543 node->high,
5544 integer_one_node))))
5546 node->high = np->high;
5548 /* NP is the first node after NODE which can't be grouped with it.
5549 Delete the nodes in between, and move on to that node. */
5550 node->right = np;
5551 node = np;
5555 /* Take an ordered list of case nodes
5556 and transform them into a near optimal binary tree,
5557 on the assumption that any target code selection value is as
5558 likely as any other.
5560 The transformation is performed by splitting the ordered
5561 list into two equal sections plus a pivot. The parts are
5562 then attached to the pivot as left and right branches. Each
5563 branch is then transformed recursively. */
5565 static void
5566 balance_case_nodes (head, parent)
5567 case_node_ptr *head;
5568 case_node_ptr parent;
5570 register case_node_ptr np;
5572 np = *head;
5573 if (np)
5575 int cost = 0;
5576 int i = 0;
5577 int ranges = 0;
5578 register case_node_ptr *npp;
5579 case_node_ptr left;
5581 /* Count the number of entries on branch. Also count the ranges. */
5583 while (np)
5585 if (!tree_int_cst_equal (np->low, np->high))
5587 ranges++;
5588 if (use_cost_table)
5589 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5592 if (use_cost_table)
5593 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5595 i++;
5596 np = np->right;
5599 if (i > 2)
5601 /* Split this list if it is long enough for that to help. */
5602 npp = head;
5603 left = *npp;
5604 if (use_cost_table)
5606 /* Find the place in the list that bisects the list's total cost,
5607 Here I gets half the total cost. */
5608 int n_moved = 0;
5609 i = (cost + 1) / 2;
5610 while (1)
5612 /* Skip nodes while their cost does not reach that amount. */
5613 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5614 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5615 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5616 if (i <= 0)
5617 break;
5618 npp = &(*npp)->right;
5619 n_moved += 1;
5621 if (n_moved == 0)
5623 /* Leave this branch lopsided, but optimize left-hand
5624 side and fill in `parent' fields for right-hand side. */
5625 np = *head;
5626 np->parent = parent;
5627 balance_case_nodes (&np->left, np);
5628 for (; np->right; np = np->right)
5629 np->right->parent = np;
5630 return;
5633 /* If there are just three nodes, split at the middle one. */
5634 else if (i == 3)
5635 npp = &(*npp)->right;
5636 else
5638 /* Find the place in the list that bisects the list's total cost,
5639 where ranges count as 2.
5640 Here I gets half the total cost. */
5641 i = (i + ranges + 1) / 2;
5642 while (1)
5644 /* Skip nodes while their cost does not reach that amount. */
5645 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5646 i--;
5647 i--;
5648 if (i <= 0)
5649 break;
5650 npp = &(*npp)->right;
5653 *head = np = *npp;
5654 *npp = 0;
5655 np->parent = parent;
5656 np->left = left;
5658 /* Optimize each of the two split parts. */
5659 balance_case_nodes (&np->left, np);
5660 balance_case_nodes (&np->right, np);
5662 else
5664 /* Else leave this branch as one level,
5665 but fill in `parent' fields. */
5666 np = *head;
5667 np->parent = parent;
5668 for (; np->right; np = np->right)
5669 np->right->parent = np;
5674 /* Search the parent sections of the case node tree
5675 to see if a test for the lower bound of NODE would be redundant.
5676 INDEX_TYPE is the type of the index expression.
5678 The instructions to generate the case decision tree are
5679 output in the same order as nodes are processed so it is
5680 known that if a parent node checks the range of the current
5681 node minus one that the current node is bounded at its lower
5682 span. Thus the test would be redundant. */
5684 static int
5685 node_has_low_bound (node, index_type)
5686 case_node_ptr node;
5687 tree index_type;
5689 tree low_minus_one;
5690 case_node_ptr pnode;
5692 /* If the lower bound of this node is the lowest value in the index type,
5693 we need not test it. */
5695 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5696 return 1;
5698 /* If this node has a left branch, the value at the left must be less
5699 than that at this node, so it cannot be bounded at the bottom and
5700 we need not bother testing any further. */
5702 if (node->left)
5703 return 0;
5705 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5706 node->low, integer_one_node));
5708 /* If the subtraction above overflowed, we can't verify anything.
5709 Otherwise, look for a parent that tests our value - 1. */
5711 if (! tree_int_cst_lt (low_minus_one, node->low))
5712 return 0;
5714 for (pnode = node->parent; pnode; pnode = pnode->parent)
5715 if (tree_int_cst_equal (low_minus_one, pnode->high))
5716 return 1;
5718 return 0;
5721 /* Search the parent sections of the case node tree
5722 to see if a test for the upper bound of NODE would be redundant.
5723 INDEX_TYPE is the type of the index expression.
5725 The instructions to generate the case decision tree are
5726 output in the same order as nodes are processed so it is
5727 known that if a parent node checks the range of the current
5728 node plus one that the current node is bounded at its upper
5729 span. Thus the test would be redundant. */
5731 static int
5732 node_has_high_bound (node, index_type)
5733 case_node_ptr node;
5734 tree index_type;
5736 tree high_plus_one;
5737 case_node_ptr pnode;
5739 /* If there is no upper bound, obviously no test is needed. */
5741 if (TYPE_MAX_VALUE (index_type) == NULL)
5742 return 1;
5744 /* If the upper bound of this node is the highest value in the type
5745 of the index expression, we need not test against it. */
5747 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5748 return 1;
5750 /* If this node has a right branch, the value at the right must be greater
5751 than that at this node, so it cannot be bounded at the top and
5752 we need not bother testing any further. */
5754 if (node->right)
5755 return 0;
5757 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5758 node->high, integer_one_node));
5760 /* If the addition above overflowed, we can't verify anything.
5761 Otherwise, look for a parent that tests our value + 1. */
5763 if (! tree_int_cst_lt (node->high, high_plus_one))
5764 return 0;
5766 for (pnode = node->parent; pnode; pnode = pnode->parent)
5767 if (tree_int_cst_equal (high_plus_one, pnode->low))
5768 return 1;
5770 return 0;
5773 /* Search the parent sections of the
5774 case node tree to see if both tests for the upper and lower
5775 bounds of NODE would be redundant. */
5777 static int
5778 node_is_bounded (node, index_type)
5779 case_node_ptr node;
5780 tree index_type;
5782 return (node_has_low_bound (node, index_type)
5783 && node_has_high_bound (node, index_type));
5786 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5788 static void
5789 emit_jump_if_reachable (label)
5790 rtx label;
5792 if (GET_CODE (get_last_insn ()) != BARRIER)
5793 emit_jump (label);
5796 /* Emit step-by-step code to select a case for the value of INDEX.
5797 The thus generated decision tree follows the form of the
5798 case-node binary tree NODE, whose nodes represent test conditions.
5799 INDEX_TYPE is the type of the index of the switch.
5801 Care is taken to prune redundant tests from the decision tree
5802 by detecting any boundary conditions already checked by
5803 emitted rtx. (See node_has_high_bound, node_has_low_bound
5804 and node_is_bounded, above.)
5806 Where the test conditions can be shown to be redundant we emit
5807 an unconditional jump to the target code. As a further
5808 optimization, the subordinates of a tree node are examined to
5809 check for bounded nodes. In this case conditional and/or
5810 unconditional jumps as a result of the boundary check for the
5811 current node are arranged to target the subordinates associated
5812 code for out of bound conditions on the current node.
5814 We can assume that when control reaches the code generated here,
5815 the index value has already been compared with the parents
5816 of this node, and determined to be on the same side of each parent
5817 as this node is. Thus, if this node tests for the value 51,
5818 and a parent tested for 52, we don't need to consider
5819 the possibility of a value greater than 51. If another parent
5820 tests for the value 50, then this node need not test anything. */
5822 static void
5823 emit_case_nodes (index, node, default_label, index_type)
5824 rtx index;
5825 case_node_ptr node;
5826 rtx default_label;
5827 tree index_type;
5829 /* If INDEX has an unsigned type, we must make unsigned branches. */
5830 int unsignedp = TREE_UNSIGNED (index_type);
5831 typedef rtx rtx_fn ();
5832 enum machine_mode mode = GET_MODE (index);
5834 /* See if our parents have already tested everything for us.
5835 If they have, emit an unconditional jump for this node. */
5836 if (node_is_bounded (node, index_type))
5837 emit_jump (label_rtx (node->code_label));
5839 else if (tree_int_cst_equal (node->low, node->high))
5841 /* Node is single valued. First see if the index expression matches
5842 this node and then check our children, if any. */
5844 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5845 label_rtx (node->code_label), unsignedp);
5847 if (node->right != 0 && node->left != 0)
5849 /* This node has children on both sides.
5850 Dispatch to one side or the other
5851 by comparing the index value with this node's value.
5852 If one subtree is bounded, check that one first,
5853 so we can avoid real branches in the tree. */
5855 if (node_is_bounded (node->right, index_type))
5857 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5858 VOIDmode, 0),
5859 GT, NULL_RTX, mode, unsignedp, 0,
5860 label_rtx (node->right->code_label));
5861 emit_case_nodes (index, node->left, default_label, index_type);
5864 else if (node_is_bounded (node->left, index_type))
5866 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5867 VOIDmode, 0),
5868 LT, NULL_RTX, mode, unsignedp, 0,
5869 label_rtx (node->left->code_label));
5870 emit_case_nodes (index, node->right, default_label, index_type);
5873 else
5875 /* Neither node is bounded. First distinguish the two sides;
5876 then emit the code for one side at a time. */
5878 tree test_label
5879 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5881 /* See if the value is on the right. */
5882 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5883 VOIDmode, 0),
5884 GT, NULL_RTX, mode, unsignedp, 0,
5885 label_rtx (test_label));
5887 /* Value must be on the left.
5888 Handle the left-hand subtree. */
5889 emit_case_nodes (index, node->left, default_label, index_type);
5890 /* If left-hand subtree does nothing,
5891 go to default. */
5892 emit_jump_if_reachable (default_label);
5894 /* Code branches here for the right-hand subtree. */
5895 expand_label (test_label);
5896 emit_case_nodes (index, node->right, default_label, index_type);
5900 else if (node->right != 0 && node->left == 0)
5902 /* Here we have a right child but no left so we issue conditional
5903 branch to default and process the right child.
5905 Omit the conditional branch to default if we it avoid only one
5906 right child; it costs too much space to save so little time. */
5908 if (node->right->right || node->right->left
5909 || !tree_int_cst_equal (node->right->low, node->right->high))
5911 if (!node_has_low_bound (node, index_type))
5913 emit_cmp_and_jump_insns (index, expand_expr (node->high,
5914 NULL_RTX,
5915 VOIDmode, 0),
5916 LT, NULL_RTX, mode, unsignedp, 0,
5917 default_label);
5920 emit_case_nodes (index, node->right, default_label, index_type);
5922 else
5923 /* We cannot process node->right normally
5924 since we haven't ruled out the numbers less than
5925 this node's value. So handle node->right explicitly. */
5926 do_jump_if_equal (index,
5927 expand_expr (node->right->low, NULL_RTX,
5928 VOIDmode, 0),
5929 label_rtx (node->right->code_label), unsignedp);
5932 else if (node->right == 0 && node->left != 0)
5934 /* Just one subtree, on the left. */
5936 #if 0 /* The following code and comment were formerly part
5937 of the condition here, but they didn't work
5938 and I don't understand what the idea was. -- rms. */
5939 /* If our "most probable entry" is less probable
5940 than the default label, emit a jump to
5941 the default label using condition codes
5942 already lying around. With no right branch,
5943 a branch-greater-than will get us to the default
5944 label correctly. */
5945 if (use_cost_table
5946 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5948 #endif /* 0 */
5949 if (node->left->left || node->left->right
5950 || !tree_int_cst_equal (node->left->low, node->left->high))
5952 if (!node_has_high_bound (node, index_type))
5954 emit_cmp_and_jump_insns (index, expand_expr (node->high,
5955 NULL_RTX,
5956 VOIDmode, 0),
5957 GT, NULL_RTX, mode, unsignedp, 0,
5958 default_label);
5961 emit_case_nodes (index, node->left, default_label, index_type);
5963 else
5964 /* We cannot process node->left normally
5965 since we haven't ruled out the numbers less than
5966 this node's value. So handle node->left explicitly. */
5967 do_jump_if_equal (index,
5968 expand_expr (node->left->low, NULL_RTX,
5969 VOIDmode, 0),
5970 label_rtx (node->left->code_label), unsignedp);
5973 else
5975 /* Node is a range. These cases are very similar to those for a single
5976 value, except that we do not start by testing whether this node
5977 is the one to branch to. */
5979 if (node->right != 0 && node->left != 0)
5981 /* Node has subtrees on both sides.
5982 If the right-hand subtree is bounded,
5983 test for it first, since we can go straight there.
5984 Otherwise, we need to make a branch in the control structure,
5985 then handle the two subtrees. */
5986 tree test_label = 0;
5989 if (node_is_bounded (node->right, index_type))
5990 /* Right hand node is fully bounded so we can eliminate any
5991 testing and branch directly to the target code. */
5992 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5993 VOIDmode, 0),
5994 GT, NULL_RTX, mode, unsignedp, 0,
5995 label_rtx (node->right->code_label));
5996 else
5998 /* Right hand node requires testing.
5999 Branch to a label where we will handle it later. */
6001 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6002 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6003 VOIDmode, 0),
6004 GT, NULL_RTX, mode, unsignedp, 0,
6005 label_rtx (test_label));
6008 /* Value belongs to this node or to the left-hand subtree. */
6010 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6011 VOIDmode, 0),
6012 GE, NULL_RTX, mode, unsignedp, 0,
6013 label_rtx (node->code_label));
6015 /* Handle the left-hand subtree. */
6016 emit_case_nodes (index, node->left, default_label, index_type);
6018 /* If right node had to be handled later, do that now. */
6020 if (test_label)
6022 /* If the left-hand subtree fell through,
6023 don't let it fall into the right-hand subtree. */
6024 emit_jump_if_reachable (default_label);
6026 expand_label (test_label);
6027 emit_case_nodes (index, node->right, default_label, index_type);
6031 else if (node->right != 0 && node->left == 0)
6033 /* Deal with values to the left of this node,
6034 if they are possible. */
6035 if (!node_has_low_bound (node, index_type))
6037 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6038 VOIDmode, 0),
6039 LT, NULL_RTX, mode, unsignedp, 0,
6040 default_label);
6043 /* Value belongs to this node or to the right-hand subtree. */
6045 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6046 VOIDmode, 0),
6047 LE, NULL_RTX, mode, unsignedp, 0,
6048 label_rtx (node->code_label));
6050 emit_case_nodes (index, node->right, default_label, index_type);
6053 else if (node->right == 0 && node->left != 0)
6055 /* Deal with values to the right of this node,
6056 if they are possible. */
6057 if (!node_has_high_bound (node, index_type))
6059 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6060 VOIDmode, 0),
6061 GT, NULL_RTX, mode, unsignedp, 0,
6062 default_label);
6065 /* Value belongs to this node or to the left-hand subtree. */
6067 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6068 VOIDmode, 0),
6069 GE, NULL_RTX, mode, unsignedp, 0,
6070 label_rtx (node->code_label));
6072 emit_case_nodes (index, node->left, default_label, index_type);
6075 else
6077 /* Node has no children so we check low and high bounds to remove
6078 redundant tests. Only one of the bounds can exist,
6079 since otherwise this node is bounded--a case tested already. */
6081 if (!node_has_high_bound (node, index_type))
6083 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6084 VOIDmode, 0),
6085 GT, NULL_RTX, mode, unsignedp, 0,
6086 default_label);
6089 if (!node_has_low_bound (node, index_type))
6091 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6092 VOIDmode, 0),
6093 LT, NULL_RTX, mode, unsignedp, 0,
6094 default_label);
6097 emit_jump (label_rtx (node->code_label));
6102 /* These routines are used by the loop unrolling code. They copy BLOCK trees
6103 so that the debugging info will be correct for the unrolled loop. */
6105 /* Indexed by block number, contains a pointer to the N'th block node.
6107 Allocated by the call to identify_blocks, then released after the call
6108 to reorder_blocks in the function unroll_block_trees. */
6110 static tree *block_vector;
6112 void
6113 find_loop_tree_blocks ()
6115 tree block = DECL_INITIAL (current_function_decl);
6117 block_vector = identify_blocks (block, get_insns ());
6120 void
6121 unroll_block_trees ()
6123 tree block = DECL_INITIAL (current_function_decl);
6125 reorder_blocks (block_vector, block, get_insns ());
6127 /* Release any memory allocated by identify_blocks. */
6128 if (block_vector)
6129 free (block_vector);